From e01505d7585b7fa8e52d4b7517769a8e12ed3a86 Mon Sep 17 00:00:00 2001 From: david Date: Sat, 2 Dec 2023 13:49:02 +1000 Subject: [PATCH 01/25] progress commit --- Dockerfile | 4 +- connegp-0.1.5-py3-none-any.whl | Bin 5042 -> 0 bytes connegp-0.1.6-py3-none-any.whl | Bin 0 -> 5052 bytes poetry.lock | 848 ++++++++--- prez/app.py | 26 +- prez/config.py | 51 +- prez/dependencies.py | 50 +- prez/models/listing.py | 49 +- prez/models/object_item.py | 3 + prez/models/profiles_and_mediatypes.py | 45 +- prez/models/profiles_item.py | 17 +- prez/models/profiles_listings.py | 35 +- prez/models/search_method.py | 8 +- prez/queries/vocprez.py | 2 +- .../reference_data/context_ontologies/dcat.nq | 1342 +++++++++++++++++ prez/reference_data/context_ontologies/rdf.nq | 127 ++ .../new_endpoints/cql_endpoints.ttl | 17 + .../new_endpoints/ogc_endpoints.ttl | 58 + .../new_endpoints/spaceprez_endpoints.ttl | 87 ++ .../new_endpoints/vocprez_endpoints.ttl | 115 ++ prez/reference_data/prefixes/testing.ttl | 16 + prez/reference_data/prez_ns.py | 1 + .../profiles/catprez_default_profiles.ttl | 4 +- prez/reference_data/profiles/ogc_profile.ttl | 64 + .../profiles/prez_default_profiles.ttl | 38 +- .../profiles/spaceprez_default_profiles.ttl | 114 +- .../profiles/vocprez_default_profiles.ttl | 60 +- prez/routers/cql.py | 107 +- prez/routers/management.py | 21 + prez/routers/object.py | 2 - prez/routers/ogc_catprez.py | 117 ++ prez/routers/spaceprez.py | 13 +- prez/routers/vocprez.py | 58 +- prez/services/app_service.py | 4 +- prez/services/generate_profiles.py | 51 +- prez/services/link_generation.py | 30 +- prez/services/listings.py | 108 +- prez/services/model_methods.py | 2 + prez/services/objects.py | 94 +- prez/sparql/methods.py | 3 + prez/sparql/objects_listings.py | 29 +- pyproject.toml | 4 +- temp/cql2sparql.py | 275 ++++ temp/cql_sparql_reference.py | 31 + temp/default_cql_context.json | 24 + temp/grammar.py | 542 +++++++ temp/shacl2sparql.py | 522 +++++++ tests/conftest.py | 3 - .../expected_responses/catalog_anot.ttl | 6 +- tests/data/catprez/input/_system-catalog.ttl | 5 + tests/data/cql/input/example01.json | 7 + tests/data/cql/input/example02.json | 7 + tests/data/cql/input/example03.json | 7 + tests/data/cql/input/example05a.json | 19 + tests/data/cql/input/example05b.json | 7 + tests/data/cql/input/example06a.json | 26 + tests/data/cql/input/example06b.json | 33 + tests/data/cql/input/example07.json | 35 + .../expected_responses/dataset_anot.ttl | 15 +- .../dataset_listing_anot.ttl | 1 + .../expected_responses/feature_anot.ttl | 21 +- .../feature_collection_anot.ttl | 15 +- .../feature_collection_listing_anot.ttl | 37 +- .../feature_listing_anot.ttl | 23 +- tests/data/spaceprez/input/sandgate.ttl | 4 + tests/test_count.py | 84 -- tests/test_cql.py | 54 + tests/test_endpoints_catprez.py | 118 -- tests/test_endpoints_profiles.py | 78 - tests/test_endpoints_spaceprez.py | 146 -- tests/test_endpoints_vocprez.py | 249 --- tests/test_search.py | 2 +- 72 files changed, 4964 insertions(+), 1256 deletions(-) delete mode 100644 connegp-0.1.5-py3-none-any.whl create mode 100644 connegp-0.1.6-py3-none-any.whl create mode 100644 prez/reference_data/context_ontologies/dcat.nq create mode 100644 prez/reference_data/context_ontologies/rdf.nq create mode 100644 prez/reference_data/new_endpoints/cql_endpoints.ttl create mode 100644 prez/reference_data/new_endpoints/ogc_endpoints.ttl create mode 100644 prez/reference_data/new_endpoints/spaceprez_endpoints.ttl create mode 100644 prez/reference_data/new_endpoints/vocprez_endpoints.ttl create mode 100644 prez/reference_data/profiles/ogc_profile.ttl create mode 100644 prez/routers/ogc_catprez.py create mode 100644 temp/cql2sparql.py create mode 100644 temp/cql_sparql_reference.py create mode 100644 temp/default_cql_context.json create mode 100644 temp/grammar.py create mode 100644 temp/shacl2sparql.py delete mode 100644 tests/conftest.py create mode 100644 tests/data/cql/input/example01.json create mode 100644 tests/data/cql/input/example02.json create mode 100644 tests/data/cql/input/example03.json create mode 100644 tests/data/cql/input/example05a.json create mode 100644 tests/data/cql/input/example05b.json create mode 100644 tests/data/cql/input/example06a.json create mode 100644 tests/data/cql/input/example06b.json create mode 100644 tests/data/cql/input/example07.json delete mode 100644 tests/test_count.py create mode 100644 tests/test_cql.py delete mode 100644 tests/test_endpoints_catprez.py delete mode 100644 tests/test_endpoints_profiles.py delete mode 100644 tests/test_endpoints_spaceprez.py delete mode 100644 tests/test_endpoints_vocprez.py diff --git a/Dockerfile b/Dockerfile index fce68b24..aabdef4a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ RUN curl -sSL https://install.python-poetry.org | python && \ chmod a+x /opt/poetry/bin/poetry WORKDIR /app -COPY poetry.lock pyproject.toml connegp-0.1.5-py3-none-any.whl ./ +COPY poetry.lock pyproject.toml connegp-0.1.6-py3-none-any.whl ./ RUN poetry install --only main --no-root --no-ansi FROM python:3.11-slim-buster @@ -44,6 +44,8 @@ ENV PYTHONDONTWRITEBYTECODE=1 \ PATH="/app/.venv/bin:$PATH" WORKDIR /app COPY ./prez /app/prez +COPY ./temp /app/temp +COPY ./rdf /app/rdf # copy the pyproject.toml as the application reads the version from here COPY pyproject.toml . diff --git a/connegp-0.1.5-py3-none-any.whl b/connegp-0.1.5-py3-none-any.whl deleted file mode 100644 index c23a7232203a2481928fb52b022d9786bf9ba543..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5042 zcmaJ_1yq#X)*c#>E(z)GVQ3Hm$)R(A0Rds?7&-<-Ku|!X83cxukd!V7326oqq`MnM zloGsnzyH5}^8ep`-*whm?^^HkoO7Oi_CEV;T@5U3N&o=ICu{Dd-;XB6i5B6G$3>oBg$ zohTlWplMEXh{h>QE0|8#-SSZBZIztM8s-4qKZIDxXN*6%0RZ6g0088F6XJgaxzb?> zY6hQ?qz*oQ!wj=x^Pw!93h>$iD9F60-mFO#zi+i9MmSyIss#hHaBy{7U!G?w>QnOQ z)uqop-R6i_V$9s}6Ff}YkkMz1W}k_J8Lu|8XOZi5uLdGlO*QAaG#XCzliXEDLoGU{ ztSY)Q2JQ?@7SL2~a3ne_X{{x{1zbi~9oV*i^+~8Y+skNTAjIXX^LgD9 zN$~P-hp^-DRv0DFB>0(0ro`jeA7Rw4B}^G)e!%IikR)w_E`?pdzLdqSP#yP8s4 z2l>>3`(0}R$-CvPtv^5Wn3_{P)j5D4+_m6n&^EugfbLhra82V@u|?&`Kd};iPjo-x zMNo7xn`Y&?Bo@~5j?&!kGLFrzyT)s=AGsopA z+cIC9lxktsPqF#l7Rp3XC2$)VEjV`blTZX#nA%*EIuUDj7^kq3PTbfS!U3a_#4stY zt#_dpFC+y|@z#fGWoa_@fdPp5iqYZY^LNV{gBo>wq-gsLj>wWgHOFl8!57k8@y77S z4L-hO`3r&W4;8R`R>`!Vu+gDn`+QdCa{HMNb6n!N_MrILUQ+2>)D``izzIO<5Xp?xPuZdRGFOg ziFm#*^StX#^bUcXBnFFKPg3*xj4XKXGsa+*cokP0r?1vG$?rj7Ton}rm&wL3jhrCI z&5vnW^BFRz<};}LGwHbb7eJ>_x>JX=>*XPz0~5v9)t!h(vv$h$FIk9wH3F7|?#PGW=r%_S%UFS*R+Go6mo9$B2yl;hz3S zp9F(2tD=qjwocJ2eP8rG{N$kmaeK5^I~B)q_TE;1$91Us{vzebu@7#YN$M1!Bgt1rie{Wrc8XihSv|b{v|FSUQw<!d^ayd2H?prW9UnD8Fvyjc7*<)YT|s{)uTbqqAL$dZIsPE>RCg$k z@FN2;J^lb7SyI4s(t3jA6u}1Y`YitWar08Y8Awg2=JhNp_ha&qDkm{}`S$&6$HpNY z=vDz*0bC!lb>{ex>O(w9GscKfEjLfk_;KwpzzMOuX<50Z7bxRgM<|wz_hRjPY9ZaI z3-{qDOeL6QBA7OY3HS{6F1Yk#&W;A7OAc3W47FviD*bjHMZ2BZXh(>6uLq+YsiIInP-*l&5}NFBp)gj@@L2 zA4&?|z_LU-YsYt##5lJ3O{Ac#eCNN{DMUCK5XP#eXN%A?r`W;IM+ZT>`-LYXZapd; zNC}l5W`laE@1tIHX-L(_FY@o7DOR*qCa*O;41PmDW$QT;d&aQNffJnGlXC0amF(>1 zqq#j?I6MYyV97;?dA^Njo1dB244?$FSVNvd=@)OU>Vy+DVK#6F`W-i*I3Q!+K55r` zj=Dj%2WDbSgkTaYzH#0nG%GbSs=K(tvzMMt-6^Rwj#>h*mYGc&4bqLwx8T0% z)C703S>(nzWJ_z(yiH{kJ~7#h@CP1>*wsT5GKtZSY=Myt`dMi^-$_$j>$aTU;7s8^-BC@W5L1MI*(Bnjjf5W$1<>p+dsq^H*Dt`B!H{xa;+(+S2m&`@2VZsG`3~0gC z=GiXq9dioGMn;`2=5g}g1GT)1q4%#lbbSWb+es#5VQ8nEuA-2lW7#ek+(?R=q(?+8 zQ3~k1dp=^W(Q0hTyK_T+K%r?IYhikXJpF5P$AgZkOP}&LCTw{tmW^g6vNdge+fx0s zOjv@HndaW>l!ZkBtyn13PrsteNv;s-p^Tg?v|hPaW+rwP;{YOKKu#Jt({3U6MDBt%;sxL=7z(D2!TIKtJI0`FG=jihAWRe7T_iXqndFI)GMp0WsPTf3t^4MJr(C$dxBV#IGnh7}+j&~h_KIk9f10G-w zi#=t)n2_=;$jiQa5v7Ox!X5*R@95w1tSI|wsaY^?ke<7R~LNGFj!@c3mtbGp> zLyl;VWRD+hxf^LrTW=5K9qK(WS~Y3AKw9}B$71(bty4GnRn!ubmW;azT+1)+JmtM@ z^l^SA(^o2jqCAc3&;VH5>;xXJ`Uqm=u)bw%HO~QX z;0nh0(-P-Iqy)-W7O=i*RDZLCt-p<}ySF3U?Uyw)&|rQ+2tIee&cGm8;JcPqpC*41 zznXw=nVPl$ci-UfD89xZkCu-BZ@)N}PP{g7Dgf zAhP*tJk6_J@b?RXyC>Yv(Z%*^P@kBA;tqMjC&ynYF(e4?D)=$Z;uE-;N8PsOGQdg& zr-Dh2_H<+{nn3C1tv~vhs#Gc}H^wk}EUMe%pEUx9&qr)PZVRxKZVjZs538I+1o?(J zp9uk~8kZXSsJFla_&%;NpwDTzHPpIsJj^d4-&IUSy^fGIpu3MQxd73my$Fda40$|x zVE-x_0I?zo>tqxS;Y>@Ets!+UhBe}Y7)paSBl-t)wsCh`+X98|2taDWTXyKR0~f(s_uqpObBOOLtqB|^Rztnt(>2kE@_Li>EGs$U1Zxl3n*-h?@Dprn3sPH zZHc$Ry8k(I@AYLVo!ERyY@bwgb?C#-sSlrEM(`)y#Ok5#ra{n}$ZDPPDPb+Fh6f^( z%JtvJz#8cMJs9^)I|L=aq2&^lv_Cx%bZ=~$DX+Zmx?ce~57kewXegEKvSO3sn(w$7 zAQNv3RBr1+k0N+t_m;alm%}a0v1%`W_E+b*SfO-QtG(T;XPjIrOUKfuI9Td>?KH9JAN6ms1#5uji)IB9e&L)X1&nwr)TTL9Kj$)|jhWKZx6K3jSj{ zYP#Rv#)e#3N9pR`&iG%Zqpz%}qp##MJm6al1Vk(aWN}PlM|LUT5!5JEP$`je@s~%l z-lE|Ce6&FbO-YRTzH4@_vt8V;J>*P7ZPecBKK?u;gNfwC$2G2Kl_mx*I7Q%jta z`aNX<@}Aij=q~Y!*Sb4uP9e%C2|sk@&e_*Kq0FY@i!!5Pp4HLJ9KDShO@2Uho?bZ<0Gj z7aWa5bFHgV;IOZBRo}WtN@b)gTqQO|IYPzXzoG-&yA)-(f8i+#u(-U6U>KN`SpQsg zxia9tpS;jt-TzaAxlVAsTJaYg0LTlyakVM_m*98p;yV8NarPU}ee)0eUx(Xu`1Kh4 z8(u~D8~#T`zD{vHoc^ZZCHhVAJGfrQUk_2g@zlis!T)2xy3TgpyMMFsko+^-zkU50 k)&KF#|4?Bx{1>Xf{8U#12lv5IpW9A}re4@4y3mpEH2E36 z8~+|veB*={{yn5SNz|py!UIWWw|B8x%#tQ}dQd@6@N3Iwry;7wKf!OwFQ02X~qw$+fqZ)UkHfIC;>q6Sf- zV-!VMU!LX4Xp=nDYRdW}>Jwcp8cV282^OP#qS_)a}j==Aa zdV*$w+`xSLLP>*bxpKy2c~lNj;+#Y9VJLJ!ps?@|Wd^vCn>gm){J^Siczk3YxOeQY z(nK%39y-$GOrf)s5<}sdGl5AXG|$}z5z}3imn9LSPjA)^$9|fT;D4(cg!LH?1HFxe zi?%*5t14nrU1{jYQd2@LqnRY%D(li$D%4njE}6=2WKS?pAyn)X{8TqVJ?Bze9x-=( zR!?8~?=#aW%L~fEV<)DkMK}f5sdVdJ`+q<9;wjcSNRTJ_@sDS1F!!96KhGLJ+7IB? zfd+USf>;Lq$ahi82Wm&0lae+t2w-RW$nKPnBZcv0-H*eVepUM#1OCy5_` zzpWQx8RW}c& zeB3>?E@o2Ig@3z;QY~FJ-#*v_Twb+8l^upqDlq7#P^((5JtQ+PI1Qg+9dIB*>hgBn zDI7TV>NmU%(X8c4@Z6lY36rHv17p8V4aBH_n?7;~!C5~9XL zFV^M78xLxK9urz=%?XQqD)W)dqseAzepSJo*9ChNmMmI&>>nGB174_zzEgji=d2~m zD#kr%S1%MF`=L@l#d4sWEVADIKoyIpu|7!CU7Q!h10L?$qSS?p<%B*&LLOE$JFbo# zeKUfK*Xjam>UHJ$f~bX`;@M}?QV<>LthQ=x%(~4QF0TU+0sHL%Y%tP$!K5~YA%(3D z`iI{;A`bIcRjNSF>|c|^6_no#@EwM0-Bz-%ZKZTdPnIampH+lnxHE)>%#;1H zD?Dv)y+BpMRU;IJyb@h@aX#@?>2rbok~FmYY%DVDnwNNNjItP}x(V{B_&iexRdz$x z5JOC22W9kj?>Q$3w_oQX!AU1R9}?;Rw8a={S}2vy3N|}-pJv>O;De_*3SF7ZGWS3YU=cEg5tzK`s@_3ZK+lTGFiz75(PU{A zBUX)Q|Kog9)g%!f_@hS$ed3}{llx*VZJ^NGYZU5{51eXflXm*s<))?A^7tQqfM(mD ziw!2Z<@U|!Gt3*@E!C*ml?*{JkuRI)Xmh!`D!CESp|!lyp7(saWmwX*#U*K`&@7X= zxs*;v^Pa5A5vVHROp(hZ3ewo+R^ReL7OgqT<}!6L1443IjCJQWX>aRo%G$pY?1Sn> z%r&9(Ep189+1Lgmr7yH6i%HG2ad2k@`5hBE1skNA6DH+Q!VLXu!RoCM{xFUx@Q$Lo zO&J3}(U4X4K6eVU&@4OaYY6K+*IioPDpGdE5ZrdgXU;mBu-69OG z`1ELJgGE{-Av+=_i@HK*&5YQ8L-9d3H;^x8oH$Ov5I2N}7dWIIH>q&}hm<*8rIjiO zUNW%bRp=4j4cyUwhe?c{+?v>!>znqHIe{5v556kp@6ShJj+K{5!$5pxm?vwA$zL!| zOGH9Nk(|x!9HWD=jmp~6YWRb~Cg9E#&elPa@ zjnS41UOZ5cV_1b4N-@ftxaHv8Ca~Set7~uFs3=k?-P!nr{!Fc7Hi=Xdauz2^_k5V5 znlr++uYn6%2GG@_gU(9t`aW?&)vAw<{-IMiN0P;BY~>NoJUAm^o_)APrJgiOgl`tgKA9o(@gzim|Gs}t1WE7eup0WP4d$B7JLT82hJCY`fkpXx zoA63z+4f86jwn_ptE5kE@1zWIqI(^?-vHZ#T4pHGQp2*k2o56Ipf5q<d$f4d^I*rRuS(WwcrF%)ijxYD-Z&aGewn=8aPKXZ zjPwVw6=qOEGg7vNjNLfN1snXx-ffV60ATl}H_Ie11^67O;DG~KUzK9)d+(y^^-ZmF zJ%k+y)%&cNhUL{vh)$P0dK<|;lT9CCKsp(;U(G3ubQa%pe&#WqOHPw0C}}S!!8tP8 zRGMowYo>Ae!Ym;^91j|o04{Nb?zV68^Wr4}DOK9NqAY!7*i^jF&eb2)sjQXIdW#M} zhMh4bzT!H{qW8LY(KdwV>YAveW5%3`cJ>KqTb$d{0w7TWStFv9DVOf8X@ujo-D+VG z@%3%VX4IehHmzUkG4HHfJ~R(AB!Dol`XqWOtlqNFB46cim*2g`^YPL{1T~XE?x@E8 zV&#@$t4@lZ={D4B=Nq#kyeJ>+X@LS2QP@`|F_QTGN4Ag&QO-lO^x))0b zjO~GV{T@%-2j-9i>!Q9SP>wF$A+~I1-$l$wqT(|xH!_iXS*5yx)^S#Edt-Wv5L2QR z{1u)t-(G&2)?BA@0i9`$7~u!39fMzuyx11CYM07oRY*IIuB*0smP1GWwbB%d^WMAA)cv&t{ zy3xfIjYp2<+f4UUqwh+krui9>5QS3hYM@lk=aI5YB4GETa?;g?wqoDHqmND|iUNYF|@ z`NYXp{JHRu^tLY|C{VRsAQb10QNfvq&Oz((N@A>cb$9uiD@*fWw6uDSs+B?B;~~Qm z&0*;cO&1~cm=HE2Z=tnae&0gX`tusIvT4b)GG&20yABK!`-;T87aWgEGrKmFeb{{U zS%s`mX0oiDLT&H3=H${Hc&Tg2$hZu~oT}VHDVkge)zPse7{w8COh_-rp zKKPIa?lNI)H8{e?y^lU75JinPBkWO9lrM1+qlNgwkiZRpGq~+hQ*~vbTJl~et8iz< z>%d5{!1Uy@$9YZNtGZgQG?r*X31w8ETE8K)`Vq_jv>T#xgcxvr=hW6R%d(An#@-_t zsnta5FRBTf*x`ZE0K|1p5_tI+KF#U$G1B?r;Ll9DqG_3w)Q28E0e1R(`Xj2l$Y2^| z340=35F&jC>9O3r&+Y`ej4<~!_w&xe%}mZ`a&#(pcR)UK&R0dj^(sEDV@Su#mN> zLUy)g*?xzf+X&t$CT~CHAhchdIKv~vk-K)l`nr++@L8(TLoN2u!$H&jNI*g5sF zSOztQhLt$Jsi_U9vcF_k)`o{!JTSq+IE z?L8Xq@T%d*PXqiqZkaPqbjS60s@H$P&l?0c52&4^v+ebu-U&nHZ=`S|Prj0%J;IRy z`_g>G!f`c?p|WPy!N^d`P$D?m(-1Xp-8oh`}L^PrzMd zX8@NNhkiq-B*{02%7F`vo4N8BdmC`(rF@vap6kT*TtPP)b8vI=a0+rl96h}_99`|8 zTxxQ6-G zyl~Dp-7Zb;b$L!9Kb$de`r<=qaJV*I9L&zs}DDG`;_yHDJHwT$8^cVyV<42%;j^9 z{#l52vUy9Tc(3IHVdjN5cl<=*w%l@^y`wtyseL}~D^~8(T-71QSI^dsv<10GnGcOb zT0Xc4G_{z!+^?)f>iw1g#+EsDb+4r`yl&M0B8h>boSZ7&AQ&eYM2I>5y7T~@Sgxu= zsv$Po*yO2lQUkMxQjw+9_;tr8fTbWdU1WsAlVcmjJvLNB2{kJA8`|uq zQfivyBzT_X#U4`EQN#eA$Oj&vLs*cI#i^6gBB2KVJjLqC zFWfj*spZ-A3kZmA#zxO07rlwp$>v@8C!hmO(hahm*|>6E2OC?P?LRkexzMeKx~+dG z;y30L|A<4CbkT}SeTZZY0UbzcOW66%Pdr;ty+hM}8#}=H5mbjVZsNP#ThQ8Yzh_p$ zUfYwStjj`2F5gW@)bj%MQZB7t=%M7JV`Lv7QX7NWgn@X}-EZGgWW zmg2j5(mt;6h&oWJH&s>a#x$SJ$hh@hrwLWDio;??`NPY*rD>^?RxQ=0UY*pG(3s=n zDbqM(omDOX>8XJ)G-G4K&!?1Nb64csd^wvY;;D$3XUF*L#|VtCXA=$B%5e_^x3HzQ z4EXqu6UPBd>s*>DXy_yu|9t*(?cjf}qL3fm-<-z$PVoD2#b0m$peW?-^fw$w{z<)`}zf=64Pj4vr@NOt>vg^0h84I3}PKePQe*ng(_yU_d}RYOSkzfk=ZsG2I6z#s2pUEeI%p&j(2 H)&TqmtxK%P literal 0 HcmV?d00001 diff --git a/poetry.lock b/poetry.lock index d14ec4b4..9eeba6f8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "anyio" -version = "4.0.0" +version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, - {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, ] [package.dependencies] @@ -16,9 +16,9 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.22)"] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] [[package]] name = "async-lru" @@ -64,15 +64,26 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] @@ -80,7 +91,7 @@ name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -88,86 +99,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -191,8 +217,8 @@ description = "Extended pickling support for Python objects" optional = false python-versions = ">=3.6" files = [ - {file = "cloudpickle-2.2.1-py3-none-any.whl", hash = "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f"}, - {file = "cloudpickle-2.2.1.tar.gz", hash = "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5"}, + {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, + {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, ] [[package]] @@ -208,20 +234,20 @@ files = [ [[package]] name = "connegp" -version = "0.1.5" +version = "0.1.6" description = "Content negotiation by profile" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "connegp-0.1.5-py3-none-any.whl", hash = "sha256:9fe85c1f24f206c3be6773e54e09df3065fdef4d032922e896b5668cfa81e3d3"}, + {file = "connegp-0.1.6-py3-none-any.whl", hash = "sha256:8d4f7f605d568032243e7cfa84c22bedae66e28651acb58af82b4b43d3de899f"}, ] [package.dependencies] -pydantic = ">=1.8.2,<2.0.0" +pydantic = ">=1.8.2,<3.0.0" [package.source] type = "file" -url = "connegp-0.1.5-py3-none-any.whl" +url = "connegp-0.1.6-py3-none-any.whl" [[package]] name = "coverage" @@ -300,50 +326,95 @@ files = [ [[package]] name = "fastapi" -version = "0.95.2" +version = "0.104.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "fastapi-0.95.2-py3-none-any.whl", hash = "sha256:d374dbc4ef2ad9b803899bd3360d34c534adc574546e25314ab72c0c4411749f"}, - {file = "fastapi-0.95.2.tar.gz", hash = "sha256:4d9d3e8c71c73f11874bcf5e33626258d143252e329a01002f767306c64fb982"}, + {file = "fastapi-0.104.1-py3-none-any.whl", hash = "sha256:752dc31160cdbd0436bb93bad51560b57e525cbb1d4bbf6f4904ceee75548241"}, + {file = "fastapi-0.104.1.tar.gz", hash = "sha256:e5e4540a7c5e1dcfbbcf5b903c234feddcdcd881f191977a1c5dfd917487e7ae"}, ] [package.dependencies] -pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +anyio = ">=3.7.1,<4.0.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" starlette = ">=0.27.0,<0.28.0" +typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] -dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.138)", "uvicorn[standard] (>=0.12.0,<0.21.0)"] -doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer-cli (>=0.0.13,<0.0.14)", "typer[all] (>=0.6.1,<0.8.0)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==23.1.0)", "coverage[toml] (>=6.5.0,<8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.7)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.138)", "sqlalchemy (>=1.3.18,<1.4.43)", "types-orjson (==3.6.2)", "types-ujson (==5.7.0.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" -version = "3.12.4" +version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "frozendict" +version = "2.3.8" +description = "A simple immutable dictionary" +optional = false +python-versions = ">=3.6" +files = [ + {file = "frozendict-2.3.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d188d062084fba0e4bf32719ff7380b26c050b932ff164043ce82ab90587c52b"}, + {file = "frozendict-2.3.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2a4e818ac457f6354401dcb631527af25e5a20fcfc81e6b5054b45fc245caca"}, + {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a506d807858fa961aaa5b48dab6154fdc6bd045bbe9310788bbff141bb42d13"}, + {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:750632cc890d8ee9484fe6d31b261159144b6efacc08e1317fe46accd1410373"}, + {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ee5fe2658a8ac9a57f748acaf563f6a47f80b8308cbf0a04fac0ba057d41f75"}, + {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23c4bb46e6b8246e1e7e49b5593c2bc09221db0d8f31f7c092be8dfb42b9e620"}, + {file = "frozendict-2.3.8-cp310-cp310-win_amd64.whl", hash = "sha256:c31abc8acea309b132dde441856829f6003a3d242da8b54bce4c0f2a3c8c63f0"}, + {file = "frozendict-2.3.8-cp310-cp310-win_arm64.whl", hash = "sha256:9ea5520e85447ff8d4681e181941e482662817ccba921b7cb3f87922056d892a"}, + {file = "frozendict-2.3.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f83fed36497af9562ead5e9fb8443224ba2781786bd3b92b1087cb7d0ff20135"}, + {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27c5c1d29d0eda7979253ec88abc239da1313b38f39f4b16984db3b3e482300"}, + {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c785de7f1a13f15963945f400656b18f057c2fc76c089dacf127a2bb188c03"}, + {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8cf35ddd25513428ec152614def9696afb93ae5ec0eb54fa6aa6206eda77ac4c"}, + {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ffc684773de7c88724788fa9787d0016fd75830412d58acbd9ed1a04762c675b"}, + {file = "frozendict-2.3.8-cp36-cp36m-win_amd64.whl", hash = "sha256:4c258aab9c8488338634f2ec670ef049dbf0ab0e7a2fa9bc2c7b5009cb614801"}, + {file = "frozendict-2.3.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47fc26468407fdeb428cfc89495b7921419e670355c21b383765482fdf6c5c14"}, + {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ea638228692db2bf94bce40ea4b25f4077588497b516bd16576575560094bd9"}, + {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a75bf87e76c4386caecdbdd02a99e53ad43a6b5c38fb3d5a634a9fc9ce41462"}, + {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed5a6c5c7a0f57269577c2a338a6002949aea21a23b7b7d06da7e7dced8b605b"}, + {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d086440328a465dea9bef2dbad7548d75d1a0a0d21f43a08c03e1ec79ac5240e"}, + {file = "frozendict-2.3.8-cp37-cp37m-win_amd64.whl", hash = "sha256:0bc4767e2f83db5b701c787e22380296977368b0c57e485ca71b2eedfa11c4a3"}, + {file = "frozendict-2.3.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:638cf363d3cbca31a341503cf2219eac52a5f5140449676fae3d9644cd3c5487"}, + {file = "frozendict-2.3.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b2fd8ce36277919b36e3c834d2389f3cd7ac068ae730c312671dd4439a5dd65"}, + {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3957d52f1906b0c85f641a1911d214255873f6408ab4e5ad657cc27a247fb145"}, + {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72cfe08ab8ae524e54848fa90b22d02c1b1ecfb3064438696bcaa4b953f18772"}, + {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4742e76c4111bd09198d3ab66cef94be8506212311338f9182d6ef5f5cb60493"}, + {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:313ed8d9ba6bac35d7635cd9580ee5721a0fb016f4d2d20f0efa05dbecbdb1be"}, + {file = "frozendict-2.3.8-cp38-cp38-win_amd64.whl", hash = "sha256:d3c6ce943946c2a61501c8cf116fff0892d11dd579877eb36e2aea2c27fddfef"}, + {file = "frozendict-2.3.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0f573dc4861dd7ec9e055c8cceaf45355e894e749f621f199aab7b311ac4bdb"}, + {file = "frozendict-2.3.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b3435e5f1ca5ae68a5e95e64b09d6d5c645cadd6b87569a0b3019dd248c8d00"}, + {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:145afd033ebfade28416093335261b8ec1af5cccc593482309e7add062ec8668"}, + {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da98427de26b5a2865727947480cbb53860089c4d195baa29c539da811cea617"}, + {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e82befa7c385a668d569cebbebbdf49cee6fea4083f08e869a1b08cfb640a9f"}, + {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80abe81d36e889ceec665e06ec764a7638000fa3e7be09786ac4d3ddc64b76db"}, + {file = "frozendict-2.3.8-cp39-cp39-win_amd64.whl", hash = "sha256:8ccc94ac781710db44e142e1a11ff9b31d02c032c01c6868d51fcbef73086225"}, + {file = "frozendict-2.3.8-cp39-cp39-win_arm64.whl", hash = "sha256:e72dbc1bcc2203cef38d205f692396f5505921a5680f66aa9a7e8bb71fd38f28"}, + {file = "frozendict-2.3.8-py311-none-any.whl", hash = "sha256:ba41a7ed019bd03b62d63ed3f8dea35b8243d1936f7c9ed4b5298ca45a01928e"}, + {file = "frozendict-2.3.8.tar.gz", hash = "sha256:5526559eca8f1780a4ee5146896f59afc31435313560208dd394a3a5e537d3ff"}, +] [[package]] name = "geojson-rewind" -version = "1.0.3" +version = "1.1.0" description = "A Python library for enforcing polygon ring winding order in GeoJSON" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.8" files = [ - {file = "geojson-rewind-1.0.3.tar.gz", hash = "sha256:f9a0972992f20c863aa44f6f486dbb200ce3b95491aa92f35c51857353985d01"}, - {file = "geojson_rewind-1.0.3-py3-none-any.whl", hash = "sha256:66d411c2ecbf8e7ad53d9fc62d92ba7a8f6fb033755eb7f9f3d822afff71b2b4"}, + {file = "geojson_rewind-1.1.0-py3-none-any.whl", hash = "sha256:eb89989210f533c7797553fcf61c0cacdfbd247790812a0e13ac56ec454ea135"}, + {file = "geojson_rewind-1.1.0.tar.gz", hash = "sha256:146600aa4bfa1d260bac1c498f11f118196cad2556d1754f0bc3df1d8fd902c6"}, ] [[package]] @@ -359,39 +430,40 @@ files = [ [[package]] name = "httpcore" -version = "0.18.0" +version = "1.0.2" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-0.18.0-py3-none-any.whl", hash = "sha256:adc5398ee0a476567bf87467063ee63584a8bce86078bf748e48754f60202ced"}, - {file = "httpcore-0.18.0.tar.gz", hash = "sha256:13b5e5cd1dca1a6636a6aaea212b19f4f85cd88c366a2b82304181b769aab3c9"}, + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, ] [package.dependencies] -anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" sniffio = "==1.*" [package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] [[package]] name = "httpx" -version = "0.25.0" +version = "0.25.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.25.0-py3-none-any.whl", hash = "sha256:181ea7f8ba3a82578be86ef4171554dd45fec26a02556a744db029a0a27b7100"}, - {file = "httpx-0.25.0.tar.gz", hash = "sha256:47ecda285389cb32bb2691cc6e069e3ab0205956f681c5b2ad2325719751d875"}, + {file = "httpx-0.25.1-py3-none-any.whl", hash = "sha256:fec7d6cc5c27c578a391f7e87b9aa7d3d8fbcd034f6399f9f79b45bcc12a866a"}, + {file = "httpx-0.25.1.tar.gz", hash = "sha256:ffd96d5cf901e63863d9f1b4b6807861dbea4d301613415d9e6e57ead15fc5d0"}, ] [package.dependencies] +anyio = "*" certifi = "*" -httpcore = ">=0.18.0,<0.19.0" +httpcore = "*" idna = "*" sniffio = "*" @@ -403,13 +475,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.28" +version = "2.5.32" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.28-py2.py3-none-any.whl", hash = "sha256:87816de144bf46d161bd5b3e8f5596b16cade3b80be537087334b26bc5c177f3"}, - {file = "identify-2.5.28.tar.gz", hash = "sha256:94bb59643083ebd60dc996d043497479ee554381fbc5307763915cda49b0e78f"}, + {file = "identify-2.5.32-py2.py3-none-any.whl", hash = "sha256:0b7656ef6cba81664b783352c73f8c24b39cf82f926f78f4550eda928e5e0545"}, + {file = "identify-2.5.32.tar.gz", hash = "sha256:5d9979348ec1a21c768ae07e0a652924538e8bce67313a73cb0f681cf08ba407"}, ] [package.extras] @@ -468,6 +540,113 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "lxml" +version = "4.9.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -597,6 +776,66 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "numpy" +version = "1.26.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, + {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, + {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, +] + +[[package]] +name = "oxrdflib" +version = "0.3.6" +description = "rdflib stores based on pyoxigraph" +optional = false +python-versions = ">=3.7" +files = [ + {file = "oxrdflib-0.3.6-py3-none-any.whl", hash = "sha256:a645a3e5ba86e0c8ff33f6429ca623fe01d93d30234c8f2ad1f553636b4b756a"}, + {file = "oxrdflib-0.3.6.tar.gz", hash = "sha256:50f675773b87dd656f1753e24bf3b92fde06ad9ae7e8c95629a7593521d0aa06"}, +] + +[package.dependencies] +pyoxigraph = ">=0.3.14,<0.4.0" +rdflib = ">=6.3,<8.0" + [[package]] name = "oxrdflib" version = "0.3.6" @@ -614,13 +853,13 @@ rdflib = ">=6.3,<8.0" [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -636,13 +875,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.10.0" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] @@ -689,71 +928,192 @@ description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-2.5.1-py3-none-any.whl", hash = "sha256:dc5244a8939e0d9a68f1f1b5f550b2e1c879912033b1becbedb315accc75441b"}, + {file = "pydantic-2.5.1.tar.gz", hash = "sha256:0b8be5413c06aadfbe56f6dc1d45c9ed25fd43264414c571135c97dd77c2bedb"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.14.3" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.3" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ba44fad1d114539d6a1509966b20b74d2dec9a5b0ee12dd7fd0a1bb7b8785e5f"}, + {file = "pydantic_core-2.14.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a70d23eedd88a6484aa79a732a90e36701048a1509078d1b59578ef0ea2cdf5"}, + {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cc24728a1a9cef497697e53b3d085fb4d3bc0ef1ef4d9b424d9cf808f52c146"}, + {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab4a2381005769a4af2ffddae74d769e8a4aae42e970596208ec6d615c6fb080"}, + {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:905a12bf088d6fa20e094f9a477bf84bd823651d8b8384f59bcd50eaa92e6a52"}, + {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38aed5a1bbc3025859f56d6a32f6e53ca173283cb95348e03480f333b1091e7d"}, + {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1767bd3f6370458e60c1d3d7b1d9c2751cc1ad743434e8ec84625a610c8b9195"}, + {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7cb0c397f29688a5bd2c0dbd44451bc44ebb9b22babc90f97db5ec3e5bb69977"}, + {file = "pydantic_core-2.14.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ff737f24b34ed26de62d481ef522f233d3c5927279f6b7229de9b0deb3f76b5"}, + {file = "pydantic_core-2.14.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1a39fecb5f0b19faee9a8a8176c805ed78ce45d760259a4ff3d21a7daa4dfc1"}, + {file = "pydantic_core-2.14.3-cp310-none-win32.whl", hash = "sha256:ccbf355b7276593c68fa824030e68cb29f630c50e20cb11ebb0ee450ae6b3d08"}, + {file = "pydantic_core-2.14.3-cp310-none-win_amd64.whl", hash = "sha256:536e1f58419e1ec35f6d1310c88496f0d60e4f182cacb773d38076f66a60b149"}, + {file = "pydantic_core-2.14.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f1f46700402312bdc31912f6fc17f5ecaaaa3bafe5487c48f07c800052736289"}, + {file = "pydantic_core-2.14.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:88ec906eb2d92420f5b074f59cf9e50b3bb44f3cb70e6512099fdd4d88c2f87c"}, + {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:056ea7cc3c92a7d2a14b5bc9c9fa14efa794d9f05b9794206d089d06d3433dc7"}, + {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076edc972b68a66870cec41a4efdd72a6b655c4098a232314b02d2bfa3bfa157"}, + {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e71f666c3bf019f2490a47dddb44c3ccea2e69ac882f7495c68dc14d4065eac2"}, + {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f518eac285c9632be337323eef9824a856f2680f943a9b68ac41d5f5bad7df7c"}, + {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dbab442a8d9ca918b4ed99db8d89d11b1f067a7dadb642476ad0889560dac79"}, + {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0653fb9fc2fa6787f2fa08631314ab7fc8070307bd344bf9471d1b7207c24623"}, + {file = "pydantic_core-2.14.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c54af5069da58ea643ad34ff32fd6bc4eebb8ae0fef9821cd8919063e0aeeaab"}, + {file = "pydantic_core-2.14.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc956f78651778ec1ab105196e90e0e5f5275884793ab67c60938c75bcca3989"}, + {file = "pydantic_core-2.14.3-cp311-none-win32.whl", hash = "sha256:5b73441a1159f1fb37353aaefb9e801ab35a07dd93cb8177504b25a317f4215a"}, + {file = "pydantic_core-2.14.3-cp311-none-win_amd64.whl", hash = "sha256:7349f99f1ef8b940b309179733f2cad2e6037a29560f1b03fdc6aa6be0a8d03c"}, + {file = "pydantic_core-2.14.3-cp311-none-win_arm64.whl", hash = "sha256:ec79dbe23702795944d2ae4c6925e35a075b88acd0d20acde7c77a817ebbce94"}, + {file = "pydantic_core-2.14.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8f5624f0f67f2b9ecaa812e1dfd2e35b256487566585160c6c19268bf2ffeccc"}, + {file = "pydantic_core-2.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6c2d118d1b6c9e2d577e215567eedbe11804c3aafa76d39ec1f8bc74e918fd07"}, + {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe863491664c6720d65ae438d4efaa5eca766565a53adb53bf14bc3246c72fe0"}, + {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:136bc7247e97a921a020abbd6ef3169af97569869cd6eff41b6a15a73c44ea9b"}, + {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aeafc7f5bbddc46213707266cadc94439bfa87ecf699444de8be044d6d6eb26f"}, + {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16aaf788f1de5a85c8f8fcc9c1ca1dd7dd52b8ad30a7889ca31c7c7606615b8"}, + {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc652c354d3362e2932a79d5ac4bbd7170757a41a62c4fe0f057d29f10bebb"}, + {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b92e72babfd56585c75caf44f0b15258c58e6be23bc33f90885cebffde3400"}, + {file = "pydantic_core-2.14.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:75f3f534f33651b73f4d3a16d0254de096f43737d51e981478d580f4b006b427"}, + {file = "pydantic_core-2.14.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c9ffd823c46e05ef3eb28b821aa7bc501efa95ba8880b4a1380068e32c5bed47"}, + {file = "pydantic_core-2.14.3-cp312-none-win32.whl", hash = "sha256:12e05a76b223577a4696c76d7a6b36a0ccc491ffb3c6a8cf92d8001d93ddfd63"}, + {file = "pydantic_core-2.14.3-cp312-none-win_amd64.whl", hash = "sha256:1582f01eaf0537a696c846bea92082082b6bfc1103a88e777e983ea9fbdc2a0f"}, + {file = "pydantic_core-2.14.3-cp312-none-win_arm64.whl", hash = "sha256:96fb679c7ca12a512d36d01c174a4fbfd912b5535cc722eb2c010c7b44eceb8e"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:71ed769b58d44e0bc2701aa59eb199b6665c16e8a5b8b4a84db01f71580ec448"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:5402ee0f61e7798ea93a01b0489520f2abfd9b57b76b82c93714c4318c66ca06"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaab9dc009e22726c62fe3b850b797e7f0e7ba76d245284d1064081f512c7226"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92486a04d54987054f8b4405a9af9d482e5100d6fe6374fc3303015983fc8bda"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf08b43d1d5d1678f295f0431a4a7e1707d4652576e1d0f8914b5e0213bfeee5"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8ca13480ce16daad0504be6ce893b0ee8ec34cd43b993b754198a89e2787f7e"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44afa3c18d45053fe8d8228950ee4c8eaf3b5a7f3b64963fdeac19b8342c987f"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56814b41486e2d712a8bc02a7b1f17b87fa30999d2323bbd13cf0e52296813a1"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3dc2920cc96f9aa40c6dc54256e436cc95c0a15562eb7bd579e1811593c377e"}, + {file = "pydantic_core-2.14.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e483b8b913fcd3b48badec54185c150cb7ab0e6487914b84dc7cde2365e0c892"}, + {file = "pydantic_core-2.14.3-cp37-none-win32.whl", hash = "sha256:364dba61494e48f01ef50ae430e392f67ee1ee27e048daeda0e9d21c3ab2d609"}, + {file = "pydantic_core-2.14.3-cp37-none-win_amd64.whl", hash = "sha256:a402ae1066be594701ac45661278dc4a466fb684258d1a2c434de54971b006ca"}, + {file = "pydantic_core-2.14.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:10904368261e4509c091cbcc067e5a88b070ed9a10f7ad78f3029c175487490f"}, + {file = "pydantic_core-2.14.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:260692420028319e201b8649b13ac0988974eeafaaef95d0dfbf7120c38dc000"}, + {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1bf1a7b05a65d3b37a9adea98e195e0081be6b17ca03a86f92aeb8b110f468"}, + {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7abd17a838a52140e3aeca271054e321226f52df7e0a9f0da8f91ea123afe98"}, + {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5c51460ede609fbb4fa883a8fe16e749964ddb459966d0518991ec02eb8dfb9"}, + {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d06c78074646111fb01836585f1198367b17d57c9f427e07aaa9ff499003e58d"}, + {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af452e69446fadf247f18ac5d153b1f7e61ef708f23ce85d8c52833748c58075"}, + {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3ad4968711fb379a67c8c755beb4dae8b721a83737737b7bcee27c05400b047"}, + {file = "pydantic_core-2.14.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c5ea0153482e5b4d601c25465771c7267c99fddf5d3f3bdc238ef930e6d051cf"}, + {file = "pydantic_core-2.14.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96eb10ef8920990e703da348bb25fedb8b8653b5966e4e078e5be382b430f9e0"}, + {file = "pydantic_core-2.14.3-cp38-none-win32.whl", hash = "sha256:ea1498ce4491236d1cffa0eee9ad0968b6ecb0c1cd711699c5677fc689905f00"}, + {file = "pydantic_core-2.14.3-cp38-none-win_amd64.whl", hash = "sha256:2bc736725f9bd18a60eec0ed6ef9b06b9785454c8d0105f2be16e4d6274e63d0"}, + {file = "pydantic_core-2.14.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:1ea992659c03c3ea811d55fc0a997bec9dde863a617cc7b25cfde69ef32e55af"}, + {file = "pydantic_core-2.14.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2b53e1f851a2b406bbb5ac58e16c4a5496038eddd856cc900278fa0da97f3fc"}, + {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c7f8e8a7cf8e81ca7d44bea4f181783630959d41b4b51d2f74bc50f348a090f"}, + {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3b9c91eeb372a64ec6686c1402afd40cc20f61a0866850f7d989b6bf39a41a"}, + {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef3e2e407e4cad2df3c89488a761ed1f1c33f3b826a2ea9a411b0a7d1cccf1b"}, + {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f86f20a9d5bee1a6ede0f2757b917bac6908cde0f5ad9fcb3606db1e2968bcf5"}, + {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61beaa79d392d44dc19d6f11ccd824d3cccb865c4372157c40b92533f8d76dd0"}, + {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d41df8e10b094640a6b234851b624b76a41552f637b9fb34dc720b9fe4ef3be4"}, + {file = "pydantic_core-2.14.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c08ac60c3caa31f825b5dbac47e4875bd4954d8f559650ad9e0b225eaf8ed0c"}, + {file = "pydantic_core-2.14.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d8b3932f1a369364606417ded5412c4ffb15bedbcf797c31317e55bd5d920e"}, + {file = "pydantic_core-2.14.3-cp39-none-win32.whl", hash = "sha256:caa94726791e316f0f63049ee00dff3b34a629b0d099f3b594770f7d0d8f1f56"}, + {file = "pydantic_core-2.14.3-cp39-none-win_amd64.whl", hash = "sha256:2494d20e4c22beac30150b4be3b8339bf2a02ab5580fa6553ca274bc08681a65"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:fe272a72c7ed29f84c42fedd2d06c2f9858dc0c00dae3b34ba15d6d8ae0fbaaf"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7e63a56eb7fdee1587d62f753ccd6d5fa24fbeea57a40d9d8beaef679a24bdd6"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7692f539a26265cece1e27e366df5b976a6db6b1f825a9e0466395b314ee48b"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af46f0b7a1342b49f208fed31f5a83b8495bb14b652f621e0a6787d2f10f24ee"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e2f9d76c00e805d47f19c7a96a14e4135238a7551a18bfd89bb757993fd0933"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:de52ddfa6e10e892d00f747bf7135d7007302ad82e243cf16d89dd77b03b649d"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:38113856c7fad8c19be7ddd57df0c3e77b1b2336459cb03ee3903ce9d5e236ce"}, + {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:354db020b1f8f11207b35360b92d95725621eb92656725c849a61e4b550f4acc"}, + {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:76fc18653a5c95e5301a52d1b5afb27c9adc77175bf00f73e94f501caf0e05ad"}, + {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2646f8270f932d79ba61102a15ea19a50ae0d43b314e22b3f8f4b5fabbfa6e38"}, + {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37dad73a2f82975ed563d6a277fd9b50e5d9c79910c4aec787e2d63547202315"}, + {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:113752a55a8eaece2e4ac96bc8817f134c2c23477e477d085ba89e3aa0f4dc44"}, + {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:8488e973547e8fb1b4193fd9faf5236cf1b7cd5e9e6dc7ff6b4d9afdc4c720cb"}, + {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3d1dde10bd9962b1434053239b1d5490fc31a2b02d8950a5f731bc584c7a5a0f"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2c83892c7bf92b91d30faca53bb8ea21f9d7e39f0ae4008ef2c2f91116d0464a"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:849cff945284c577c5f621d2df76ca7b60f803cc8663ff01b778ad0af0e39bb9"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa89919fbd8a553cd7d03bf23d5bc5deee622e1b5db572121287f0e64979476"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf15145b1f8056d12c67255cd3ce5d317cd4450d5ee747760d8d088d85d12a2d"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4cc6bb11f4e8e5ed91d78b9880774fbc0856cb226151b0a93b549c2b26a00c19"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:832d16f248ca0cc96929139734ec32d21c67669dcf8a9f3f733c85054429c012"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b02b5e1f54c3396c48b665050464803c23c685716eb5d82a1d81bf81b5230da4"}, + {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:1f2d4516c32255782153e858f9a900ca6deadfb217fd3fb21bb2b60b4e04d04d"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0a3e51c2be472b7867eb0c5d025b91400c2b73a0823b89d4303a9097e2ec6655"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:df33902464410a1f1a0411a235f0a34e7e129f12cb6340daca0f9d1390f5fe10"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27828f0227b54804aac6fb077b6bb48e640b5435fdd7fbf0c274093a7b78b69c"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2979dc80246e18e348de51246d4c9b410186ffa3c50e77924bec436b1e36cb"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b28996872b48baf829ee75fa06998b607c66a4847ac838e6fd7473a6b2ab68e7"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ca55c9671bb637ce13d18ef352fd32ae7aba21b4402f300a63f1fb1fd18e0364"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:aecd5ed096b0e5d93fb0367fd8f417cef38ea30b786f2501f6c34eabd9062c38"}, + {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:44aaf1a07ad0824e407dafc637a852e9a44d94664293bbe7d8ee549c356c8882"}, + {file = "pydantic_core-2.14.3.tar.gz", hash = "sha256:3ad083df8fe342d4d8d00cc1d3c1a23f0dc84fce416eb301e69f1ddbbe124d3f"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.1.0" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.1.0-py3-none-any.whl", hash = "sha256:7621c0cb5d90d1140d2f0ef557bdf03573aac7035948109adf2574770b77605a"}, + {file = "pydantic_settings-2.1.0.tar.gz", hash = "sha256:26b1492e0a24755626ac5e6d715e9077ab7ad4fb5f19a8b7ed7011d52f36141c"}, +] + +[package.dependencies] +pydantic = ">=2.3.0" +python-dotenv = ">=0.21.0" [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.1-py3-none-any.whl", hash = "sha256:1b37f1b1e1bff2af52ecaf28cc601e2ef7077000b227a0675da25aef85784bc4"}, + {file = "pygments-2.17.1.tar.gz", hash = "sha256:e45a0e74bf9c530f564ca81b8952343be986a29f6afe7f5ad95c5f06b7bdf5e8"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyld" +version = "2.0.3" +description = "Python implementation of the JSON-LD API" +optional = false +python-versions = "*" +files = [ + {file = "PyLD-2.0.3.tar.gz", hash = "sha256:287445f888c3a332ccbd20a14844c66c2fcbaeab3c99acd506a0788e2ebb2f82"}, +] + +[package.dependencies] +cachetools = "*" +frozendict = "*" +lxml = "*" + +[package.extras] +aiohttp = ["aiohttp"] +cachetools = ["cachetools"] +frozendict = ["frozendict"] +requests = ["requests"] [[package]] name = "pynvml" version = "11.5.0" description = "Python Bindings for the NVIDIA Management Library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ {file = "pynvml-11.5.0-py3-none-any.whl", hash = "sha256:5cce014ac01b098d08f06178f86c37be409b80b2e903a5a03ce15eed60f55e25"}, {file = "pynvml-11.5.0.tar.gz", hash = "sha256:d027b21b95b1088b9fc278117f9f61b7c67f8e33a787e9f83f735f0f71ac32d0"}, @@ -761,34 +1121,28 @@ files = [ [[package]] name = "pyoxigraph" -version = "0.3.19" +version = "0.3.20" description = "Python bindings of Oxigraph, a SPARQL database and RDF toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "pyoxigraph-0.3.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:334d2e8f39745a29536485aa7534bcc0e0bd500f541d7b04fdc95d84e5dffa84"}, - {file = "pyoxigraph-0.3.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04e85e37bfa47f5c0d5eb8f2b3d787799ace63c3e84229b7f6db007f3236c41c"}, - {file = "pyoxigraph-0.3.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56702be11132d4d8e933d26dc26dbdd1d5fdac6394a8b8bc67d81157a24346b4"}, - {file = "pyoxigraph-0.3.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b66e4a9c7ce8137c003c0e2747141749a460910624583815f2a5cce95d8d20"}, - {file = "pyoxigraph-0.3.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0655dfbfb8946d4823956d9e82f53d21dd003fc69ac196ce317c502c58dc3c76"}, - {file = "pyoxigraph-0.3.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:41120a7455dc0547ccbdd6ddd14240eef85758a3d8bd3911b38ee5771643e004"}, - {file = "pyoxigraph-0.3.19-cp37-abi3-macosx_10_14_x86_64.macosx_11_0_arm64.macosx_10_14_universal2.whl", hash = "sha256:aee5d8b5e05cba9d3ea8f7e65bcdff228cf200358e0e520c97acea1c6a338fc8"}, - {file = "pyoxigraph-0.3.19-cp37-abi3-macosx_10_14_x86_64.whl", hash = "sha256:66244dffa3f97f49a2b27bf72faa9099a6b7d427711d71553432ef5abf1716f5"}, - {file = "pyoxigraph-0.3.19-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:d3edd68f008bcdabeefe043370f069a6439a7ca8f12d026aef21a9fd1965373d"}, - {file = "pyoxigraph-0.3.19-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0574bf57068bf191fab21dd92f9725c6f482471a845094e41d8563f3400a6f8"}, - {file = "pyoxigraph-0.3.19-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41d3d2b785900e516b5c8218a614a08232cf4b32e29c3393fd0645775efb18"}, - {file = "pyoxigraph-0.3.19-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:4586446a2389d6339fd1f1b04ed1d29843dcfcf86c0fef9523b92c7ed84b8acb"}, - {file = "pyoxigraph-0.3.19-cp37-abi3-win_amd64.whl", hash = "sha256:0a1e2f4f4d65c39d94b5bb0295f57d41915d990e2e31cb3847123a34a7114aaf"}, - {file = "pyoxigraph-0.3.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19f221ed944347f99a0eae61a4d967ea0e252f323e2016a4976676a39486fa37"}, - {file = "pyoxigraph-0.3.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc91fc3b5b6c2553f0baf3960c81ac92b888d0e4f8f1843f055ab5325fa68021"}, - {file = "pyoxigraph-0.3.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ad06f9ff107683a349abeca0691a6f96b4eff0038057ce8cc4a0abfb0ffb786a"}, - {file = "pyoxigraph-0.3.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0e859f766b89100e7805a7be3fe5cbf2d455dbeaddbbea1db478de6c046fa0f"}, - {file = "pyoxigraph-0.3.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a1db8df6e5f38518ab4fe527240be6aff35cb080a9920b53376c97ff51839ef"}, - {file = "pyoxigraph-0.3.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:251804732f9d9bf50c4fcc60ea181b26c9ae0e578f9658d0daee7d27c281b22d"}, - {file = "pyoxigraph-0.3.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fb0c1b67514e27f34c6d1803427fc1fd5b05754588f3814f9a1fc21c11912f"}, - {file = "pyoxigraph-0.3.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72420b1432c0cb3fc7139a3adf4a60d98509b83dc5d130596a52cf1162afadb"}, - {file = "pyoxigraph-0.3.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fd018bf318c9d9db175fb732c9a342c40867c5703b0d204b9c9856a1efa8fb32"}, - {file = "pyoxigraph-0.3.19.tar.gz", hash = "sha256:bd59af65c5203a359eb8603876a137831eaf044badf48c5942a24730bd3760e6"}, + {file = "pyoxigraph-0.3.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0011e782bbe8b209d942b1d89dd2beefffd2f01ae269766bb32e51b47e429e"}, + {file = "pyoxigraph-0.3.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:682f7ba4c2c857bf541af433b0c2a9e754147177f66750a26462c108fc4fba6e"}, + {file = "pyoxigraph-0.3.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74dfed6ca723e2cf695fa301729b4ed5fec575b60293189e73b28d28d85dcb40"}, + {file = "pyoxigraph-0.3.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0fd4300ca22e2d215848ea1069789103cd9edcf5b7467a72a4f303b1ad65a1f9"}, + {file = "pyoxigraph-0.3.20-cp37-abi3-macosx_10_14_x86_64.macosx_11_0_arm64.macosx_10_14_universal2.whl", hash = "sha256:7abdda38b6083be0b06163316470139e240881c708dcefe375139683ef2b712a"}, + {file = "pyoxigraph-0.3.20-cp37-abi3-macosx_10_14_x86_64.whl", hash = "sha256:1f04370c4fdb78d0bffc4629cddae688dbcdac0faf69da8fd9fd4bd10f07783b"}, + {file = "pyoxigraph-0.3.20-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:acc8070a206cd6ee51487813e7f3ef67862174081655e201fde0d7fc26c5e967"}, + {file = "pyoxigraph-0.3.20-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e646a6a3ee2f731496be26524f1749453c181b282e50b2f48cce22ece55199"}, + {file = "pyoxigraph-0.3.20-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c02d3c07ddb6f6bc9fbc2cf51665e0d2b50ac29ce6098d82a38e071dd8a475c9"}, + {file = "pyoxigraph-0.3.20-cp37-abi3-win_amd64.whl", hash = "sha256:aacbd03bbd363389b5d2b309183c799a3361b653063985f38b231d89f22a316a"}, + {file = "pyoxigraph-0.3.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e75182b23d4990f85d4eb93c8369c1b85e01adfbefbcb6a50352a2490338b68"}, + {file = "pyoxigraph-0.3.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4f212fa83e435ad2653f97ece23ca51b3e950ef849d818dd7c97e0c1e8d48654"}, + {file = "pyoxigraph-0.3.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e35f4beb86764eeaf7af9ed3322d05e136c4fe20d708cb420c31db994700f2ff"}, + {file = "pyoxigraph-0.3.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a4e771f69ae304dbeb7b72522de727297c5b28311e781aa7e70fc9552cd4ae1"}, + {file = "pyoxigraph-0.3.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9977786ed74e69b41a2e587f70c5407db093a65e0fbfe03f635a6f223d6f5364"}, + {file = "pyoxigraph-0.3.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c413fd53e33e7c2df44728a414c266270708dbf1f96899931f95eb3512682e8a"}, + {file = "pyoxigraph-0.3.20.tar.gz", hash = "sha256:e7bae3552188c30df847123843f3ed7bcb3b68537dfa253bbdda2047fa5cc568"}, ] [[package]] @@ -807,13 +1161,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.4.2" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, - {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -973,13 +1327,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.5.2" +version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, - {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, ] [package.dependencies] @@ -1022,20 +1376,77 @@ wheel = ">=0.36.1" [[package]] name = "setuptools" -version = "68.2.2" +version = "69.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, + {file = "setuptools-69.0.1-py3-none-any.whl", hash = "sha256:6875bbd06382d857b1b90cd07cee6a2df701a164f241095706b5192bc56c5c62"}, + {file = "setuptools-69.0.1.tar.gz", hash = "sha256:f25195d54deb649832182d6455bffba7ac3d8fe71d35185e738d2198a4310044"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "shapely" +version = "2.0.2" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6ca8cffbe84ddde8f52b297b53f8e0687bd31141abb2c373fd8a9f032df415d6"}, + {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:baa14fc27771e180c06b499a0a7ba697c7988c7b2b6cba9a929a19a4d2762de3"}, + {file = "shapely-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36480e32c434d168cdf2f5e9862c84aaf4d714a43a8465ae3ce8ff327f0affb7"}, + {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef753200cbffd4f652efb2c528c5474e5a14341a473994d90ad0606522a46a2"}, + {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9a41ff4323fc9d6257759c26eb1cf3a61ebc7e611e024e6091f42977303fd3a"}, + {file = "shapely-2.0.2-cp310-cp310-win32.whl", hash = "sha256:72b5997272ae8c25f0fd5b3b967b3237e87fab7978b8d6cd5fa748770f0c5d68"}, + {file = "shapely-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:34eac2337cbd67650248761b140d2535855d21b969d76d76123317882d3a0c1a"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b0c052709c8a257c93b0d4943b0b7a3035f87e2d6a8ac9407b6a992d206422f"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d217e56ae067e87b4e1731d0dc62eebe887ced729ba5c2d4590e9e3e9fdbd88"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94ac128ae2ab4edd0bffcd4e566411ea7bdc738aeaf92c32a8a836abad725f9f"}, + {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3ee28f5e63a130ec5af4dc3c4cb9c21c5788bb13c15e89190d163b14f9fb89"}, + {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:737dba15011e5a9b54a8302f1748b62daa207c9bc06f820cd0ad32a041f1c6f2"}, + {file = "shapely-2.0.2-cp311-cp311-win32.whl", hash = "sha256:45ac6906cff0765455a7b49c1670af6e230c419507c13e2f75db638c8fc6f3bd"}, + {file = "shapely-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:dc9342fc82e374130db86a955c3c4525bfbf315a248af8277a913f30911bed9e"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:06f193091a7c6112fc08dfd195a1e3846a64306f890b151fa8c63b3e3624202c"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eebe544df5c018134f3c23b6515877f7e4cd72851f88a8d0c18464f414d141a2"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e92e7c255f89f5cdf777690313311f422aa8ada9a3205b187113274e0135cd8"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be46d5509b9251dd9087768eaf35a71360de6afac82ce87c636990a0871aa18b"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5533a925d8e211d07636ffc2fdd9a7f9f13d54686d00577eeb11d16f00be9c4"}, + {file = "shapely-2.0.2-cp312-cp312-win32.whl", hash = "sha256:084b023dae8ad3d5b98acee9d3bf098fdf688eb0bb9b1401e8b075f6a627b611"}, + {file = "shapely-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:ea84d1cdbcf31e619d672b53c4532f06253894185ee7acb8ceb78f5f33cbe033"}, + {file = "shapely-2.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ed1e99702125e7baccf401830a3b94d810d5c70b329b765fe93451fe14cf565b"}, + {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7d897e6bdc6bc64f7f65155dbbb30e49acaabbd0d9266b9b4041f87d6e52b3a"}, + {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0521d76d1e8af01e712db71da9096b484f081e539d4f4a8c97342e7971d5e1b4"}, + {file = "shapely-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:5324be299d4c533ecfcfd43424dfd12f9428fd6f12cda38a4316da001d6ef0ea"}, + {file = "shapely-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:78128357a0cee573257a0c2c388d4b7bf13cb7dbe5b3fe5d26d45ebbe2a39e25"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87dc2be34ac3a3a4a319b963c507ac06682978a5e6c93d71917618b14f13066e"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42997ac806e4583dad51c80a32d38570fd9a3d4778f5e2c98f9090aa7db0fe91"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ccfd5fa10a37e67dbafc601c1ddbcbbfef70d34c3f6b0efc866ddbdb55893a6c"}, + {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7c95d3379ae3abb74058938a9fcbc478c6b2e28d20dace38f8b5c587dde90aa"}, + {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a21353d28209fb0d8cc083e08ca53c52666e0d8a1f9bbe23b6063967d89ed24"}, + {file = "shapely-2.0.2-cp38-cp38-win32.whl", hash = "sha256:03e63a99dfe6bd3beb8d5f41ec2086585bb969991d603f9aeac335ad396a06d4"}, + {file = "shapely-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:c6fd29fbd9cd76350bd5cc14c49de394a31770aed02d74203e23b928f3d2f1aa"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f217d28ecb48e593beae20a0082a95bd9898d82d14b8fcb497edf6bff9a44d7"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:394e5085b49334fd5b94fa89c086edfb39c3ecab7f669e8b2a4298b9d523b3a5"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd3ad17b64466a033848c26cb5b509625c87d07dcf39a1541461cacdb8f7e91c"}, + {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d41a116fcad58048d7143ddb01285e1a8780df6dc1f56c3b1e1b7f12ed296651"}, + {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dea9a0651333cf96ef5bb2035044e3ad6a54f87d90e50fe4c2636debf1b77abc"}, + {file = "shapely-2.0.2-cp39-cp39-win32.whl", hash = "sha256:b8eb0a92f7b8c74f9d8fdd1b40d395113f59bd8132ca1348ebcc1f5aece94b96"}, + {file = "shapely-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:794affd80ca0f2c536fc948a3afa90bd8fb61ebe37fe873483ae818e7f21def4"}, + {file = "shapely-2.0.2.tar.gz", hash = "sha256:1713cc04c171baffc5b259ba8531c58acc2a301707b7f021d88a15ed090649e7"}, +] + +[package.dependencies] +numpy = ">=1.14" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + [[package]] name = "six" version = "1.16.0" @@ -1052,7 +1463,7 @@ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, @@ -1063,7 +1474,7 @@ name = "starlette" version = "0.27.0" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, @@ -1093,24 +1504,23 @@ description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] name = "urllib3" -version = "2.0.4" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1134,13 +1544,13 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "virtualenv" -version = "20.24.5" +version = "20.24.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, - {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, + {file = "virtualenv-20.24.6-py3-none-any.whl", hash = "sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"}, + {file = "virtualenv-20.24.6.tar.gz", hash = "sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af"}, ] [package.dependencies] @@ -1154,13 +1564,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "wheel" -version = "0.41.2" +version = "0.41.3" description = "A built-package format for Python" optional = false python-versions = ">=3.7" files = [ - {file = "wheel-0.41.2-py3-none-any.whl", hash = "sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8"}, - {file = "wheel-0.41.2.tar.gz", hash = "sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985"}, + {file = "wheel-0.41.3-py3-none-any.whl", hash = "sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942"}, + {file = "wheel-0.41.3.tar.gz", hash = "sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841"}, ] [package.extras] diff --git a/prez/app.py b/prez/app.py index 01c0436a..c2dd201f 100644 --- a/prez/app.py +++ b/prez/app.py @@ -1,5 +1,4 @@ import logging -import os from textwrap import dedent import uvicorn @@ -24,9 +23,11 @@ ) from prez.routers.catprez import router as catprez_router from prez.routers.cql import router as cql_router +from prez.routers.catprez import router as catprez_router from prez.routers.identifier import router as identifier_router from prez.routers.management import router as management_router from prez.routers.object import router as object_router +from prez.routers.ogc_catprez import router as ogc_router from prez.routers.profiles import router as profiles_router from prez.routers.search import router as search_router from prez.routers.spaceprez import router as spaceprez_router @@ -64,7 +65,6 @@ } ) - app.include_router(cql_router) app.include_router(management_router) app.include_router(object_router) @@ -77,6 +77,8 @@ app.include_router(vocprez_router) if "SpacePrez" in settings.prez_flavours: app.include_router(spaceprez_router) +if "OGCPrez" in settings.prez_flavours: + app.include_router(ogc_router) app.include_router(identifier_router) @@ -100,16 +102,16 @@ async def add_cors_headers(request, call_next): ) -def prez_open_api_metadata(): - return get_openapi( - title=settings.prez_title, - version=settings.prez_version, - description=settings.prez_desc, - routes=app.routes, - ) - - -app.openapi = prez_open_api_metadata +# def prez_open_api_metadata(): +# return get_openapi( +# title=settings.prez_title, +# version=settings.prez_version, +# description=settings.prez_desc, +# routes=app.routes, +# ) +# +# +# app.openapi = prez_open_api_metadata @app.on_event("startup") diff --git a/prez/config.py b/prez/config.py index 88db3282..d2f9184a 100644 --- a/prez/config.py +++ b/prez/config.py @@ -1,9 +1,10 @@ from os import environ from pathlib import Path -from typing import Optional +from typing import Optional, List import toml -from pydantic import BaseSettings, root_validator +from pydantic import root_validator +from pydantic_settings import BaseSettings from rdflib import URIRef, DCTERMS, RDFS, SDO from rdflib.namespace import SKOS @@ -19,9 +20,6 @@ class Settings(BaseSettings): host: Prez' host domain name. Usually 'localhost' but could be anything port: The port Prez is made accessible on. Default is 8000, could be 80 or anything else that your system has permission to use system_uri: Documentation property. An IRI for the Prez system as a whole. This value appears in the landing page RDF delivered by Prez ('/') - top_level_classes: - collection_classes: - base_classes: log_level: log_output: prez_title: @@ -37,11 +35,8 @@ class Settings(BaseSettings): host: str = "localhost" port: int = 8000 curie_separator: str = ":" - system_uri: Optional[str] - top_level_classes: Optional[dict] - collection_classes: Optional[dict] + system_uri: Optional[str] = f"{protocol}://{host}:{port}" order_lists_by_label: bool = True - base_classes: Optional[dict] prez_flavours: Optional[list] = ["SpacePrez", "VocPrez", "CatPrez", "ProfilesPrez"] label_predicates = [SKOS.prefLabel, DCTERMS.title, RDFS.label, SDO.name] description_predicates = [SKOS.definition, DCTERMS.description, SDO.description] @@ -57,7 +52,7 @@ class Settings(BaseSettings): "A web framework API for delivering Linked Data. It provides read-only access to " "Knowledge Graph data which can be subset according to information profiles." ) - prez_version: Optional[str] + prez_version: Optional[str] = None disable_prefix_generation: bool = False @root_validator() @@ -70,15 +65,33 @@ def get_version(cls, values): Path(Path(__file__).parent.parent) / "pyproject.toml" )["tool"]["poetry"]["version"] - return values - - @root_validator() - def set_system_uri(cls, values): - if not values.get("system_uri"): - values["system_uri"] = URIRef( - f"{values['protocol']}://{values['host']}:{values['port']}" - ) - return values + # @root_validator() + # def check_endpoint_enabled(cls, values): + # if not values.get("sparql_endpoint"): + # raise ValueError( + # 'A SPARQL endpoint must be specified using the "SPARQL_ENDPOINT" environment variable' + # ) + # return values + # + # @root_validator() + # def get_version(cls, values): + # version = environ.get("PREZ_VERSION") + # values["prez_version"] = version + # + # if version is None or version == "": + # values["prez_version"] = toml.load( + # Path(Path(__file__).parent.parent) / "pyproject.toml" + # )["tool"]["poetry"]["version"] + # + # return values + # + # @root_validator() + # def set_system_uri(cls, values): + # if not values.get("system_uri"): + # values["system_uri"] = URIRef( + # f"{values['protocol']}://{values['host']}:{values['port']}" + # ) + # return values settings = Settings() diff --git a/prez/dependencies.py b/prez/dependencies.py index 72c7c123..a3779fe8 100644 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -1,12 +1,17 @@ +import json +import urllib from pathlib import Path +from typing import Optional +from pydantic import BaseModel import httpx -from fastapi import Depends +from fastapi import Depends, Request, HTTPException from pyoxigraph import Store -from prez.cache import store, oxrdflib_store, system_store, profiles_graph_cache +from prez.cache import store, oxrdflib_store from prez.config import settings from prez.sparql.methods import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo +from temp.cql2sparql import CQLParser async def get_async_http_client(): @@ -22,10 +27,6 @@ def get_pyoxi_store(): return store -def get_system_store(): - return system_store - - def get_oxrdflib_store(): return oxrdflib_store @@ -42,17 +43,6 @@ async def get_repo( return RemoteSparqlRepo(http_async_client) -async def get_system_repo( - pyoxi_store: Store = Depends(get_system_store), -): - """ - A pyoxigraph Store with Prez system data including: - - Profiles - # TODO add and test other system data (endpoints etc.) - """ - return PyoxigraphRepo(pyoxi_store) - - async def load_local_data_to_oxigraph(store: Store): """ Loads all the data from the local data directory into the local SPARQL endpoint @@ -61,10 +51,22 @@ async def load_local_data_to_oxigraph(store: Store): store.load(file.read_bytes(), "text/turtle") -async def load_profile_data_to_oxigraph(store: Store): - """ - Loads all the data from the local data directory into the local SPARQL endpoint - """ - # TODO refactor to use the local files directly - graph_bytes = profiles_graph_cache.serialize(format="nt", encoding="utf-8") - store.load(graph_bytes, "application/n-triples") +class CQLRequest(BaseModel): + cql: Optional[dict] + + +async def cql_parser_dependency(request: Request): + try: + body = await request.json() + context = json.load( + (Path(__file__).parent.parent / "temp" / "default_cql_context.json").open() + ) + cql_parser = CQLParser(cql=body, context=context) + cql_parser.generate_jsonld() + return cql_parser.cql_json + except json.JSONDecodeError: + raise HTTPException(status_code=400, detail="Invalid JSON format.") + except Exception as e: # Replace with your specific parsing exception + raise HTTPException( + status_code=400, detail="Invalid CQL format: Parsing failed." + ) diff --git a/prez/models/listing.py b/prez/models/listing.py index 7067ce01..3b39b4c0 100644 --- a/prez/models/listing.py +++ b/prez/models/listing.py @@ -1,6 +1,6 @@ from typing import Optional, FrozenSet -from pydantic import BaseModel, root_validator +from pydantic import BaseModel, field_validator from rdflib import URIRef, Literal, XSD from prez.cache import endpoints_graph_cache @@ -8,6 +8,9 @@ class ListingModel(BaseModel): + class Config: + arbitrary_types_allowed = True + uri: Optional[ URIRef ] = None # this is the URI of the focus object (if listing by membership) @@ -20,25 +23,25 @@ class ListingModel(BaseModel): def __hash__(self): return hash(self.uri) - @root_validator - def populate(cls, values): - endpoint_uri_str = values.get("endpoint_uri") - if endpoint_uri_str: - endpoint_uri = URIRef(endpoint_uri_str) - values["classes"] = frozenset( - [ - klass - for klass in endpoints_graph_cache.objects( - endpoint_uri, ONT.deliversClasses, None - ) - ] - ) - values["base_class"] = endpoints_graph_cache.value( - endpoint_uri, ONT.baseClass - ) - tll_text = endpoints_graph_cache.value(endpoint_uri, ONT.isTopLevelEndpoint) - if tll_text == Literal("true", datatype=XSD.boolean): - values["top_level_listing"] = True - else: - values["top_level_listing"] = False - return values + # @field_validator(): + # def populate(cls, values): + # endpoint_uri_str = values.get("endpoint_uri") + # if endpoint_uri_str: + # endpoint_uri = URIRef(endpoint_uri_str) + # values["classes"] = frozenset( + # [ + # klass + # for klass in endpoints_graph_cache.objects( + # endpoint_uri, ONT.deliversClasses, None + # ) + # ] + # ) + # values["base_class"] = endpoints_graph_cache.value( + # endpoint_uri, ONT.baseClass + # ) + # tll_text = endpoints_graph_cache.value(endpoint_uri, ONT.isTopLevelEndpoint) + # if tll_text == Literal("true", datatype=XSD.boolean): + # values["top_level_listing"] = True + # else: + # values["top_level_listing"] = False + # return values diff --git a/prez/models/object_item.py b/prez/models/object_item.py index 6b19862c..03921a69 100644 --- a/prez/models/object_item.py +++ b/prez/models/object_item.py @@ -12,6 +12,9 @@ class ObjectItem(BaseModel): + class Config: + arbitrary_types_allowed = True + uri: Optional[URIRef] = None classes: Optional[FrozenSet[URIRef]] = frozenset([PROF.Profile]) selected_class: Optional[URIRef] = None diff --git a/prez/models/profiles_and_mediatypes.py b/prez/models/profiles_and_mediatypes.py index dad5c671..65d6b87b 100644 --- a/prez/models/profiles_and_mediatypes.py +++ b/prez/models/profiles_and_mediatypes.py @@ -1,6 +1,6 @@ from typing import FrozenSet, Optional -from pydantic import BaseModel, root_validator +from pydantic import BaseModel, model_validator from rdflib import Namespace, URIRef from starlette.requests import Request @@ -11,6 +11,9 @@ class ProfilesMediatypesInfo(BaseModel): + class Config: + arbitrary_types_allowed = True + request: Request # TODO slim down once connegp is refactored so the whole request doesn't need to be passed through classes: FrozenSet[URIRef] req_profiles: Optional[str] = None @@ -22,29 +25,29 @@ class ProfilesMediatypesInfo(BaseModel): profile_headers: Optional[str] = None avail_profile_uris: Optional[str] = None - @root_validator - def populate_requested_types(cls, values): - request = values.get("request") + @model_validator(mode="after") + def populate_requested_types(self): + request = self.request ( - values["req_profiles"], - values["req_profiles_token"], - values["req_mediatypes"], + self.req_profiles, + self.req_profiles_token, + self.req_mediatypes, ) = get_requested_profile_and_mediatype(request) - return values - - @root_validator - def populate_profile_and_mediatype(cls, values): - req_profiles = values.get("req_profiles") - req_profiles_token = values.get("req_profiles_token") - req_mediatypes = values.get("req_mediatypes") - classes = values.get("classes") + return self + + @model_validator(mode="after") + def populate_profile_and_mediatype(self): + req_profiles = self.req_profiles + req_profiles_token = self.req_profiles_token + req_mediatypes = self.req_mediatypes + classes = self.classes ( - values["profile"], - values["mediatype"], - values["selected_class"], - values["profile_headers"], - values["avail_profile_uris"], + self.profile, + self.mediatype, + self.selected_class, + self.profile_headers, + self.avail_profile_uris, ) = get_profiles_and_mediatypes( classes, req_profiles, req_profiles_token, req_mediatypes ) - return values + return self diff --git a/prez/models/profiles_item.py b/prez/models/profiles_item.py index 7705233c..f4f97c53 100644 --- a/prez/models/profiles_item.py +++ b/prez/models/profiles_item.py @@ -13,6 +13,9 @@ class ProfileItem(BaseModel): + class Config: + arbitrary_types_allowed = True + uri: Optional[URIRef] = None classes: Optional[Set[URIRef]] = frozenset([PROF.Profile]) id: Optional[str] = None @@ -26,16 +29,16 @@ class ProfileItem(BaseModel): def __hash__(self): return hash(self.uri) - @root_validator - def populate(cls, values): - uri = values.get("uri") - id = values.get("id") + # @root_validator + def populate(self): + uri = self.uri + id = self.id assert uri or id if id: - values["uri"] = get_uri_for_curie_id(id) + self.uri = get_uri_for_curie_id(id) elif uri: - values["id"] = get_curie_id_for_uri(uri) - q = f"""SELECT ?class {{ <{values["uri"]}> a ?class }}""" + self.id = get_curie_id_for_uri(uri) + q = f"""SELECT ?class {{ <{self.uri}> a ?class }}""" r = profiles_graph_cache.query(q) if len(r.bindings) > 0: values["classes"] = frozenset([prof.get("class") for prof in r.bindings]) diff --git a/prez/models/profiles_listings.py b/prez/models/profiles_listings.py index 0472f4f9..0694d40f 100644 --- a/prez/models/profiles_listings.py +++ b/prez/models/profiles_listings.py @@ -8,6 +8,9 @@ class ProfilesMembers(BaseModel): + class Config: + arbitrary_types_allowed = True + url_path: str uri: Optional[URIRef] = None base_class: Optional[URIRef] @@ -16,19 +19,19 @@ class ProfilesMembers(BaseModel): link_constructor: Optional[str] top_level_listing: Optional[bool] = True - @root_validator - def populate(cls, values): - url_path = values.get("url_path") - if url_path.startswith("/v/"): - values["base_class"] = PREZ.VocPrezProfile - values["link_constructor"] = "/v/profiles" - elif url_path.startswith("/c/"): - values["base_class"] = PREZ.CatPrezProfile - values["link_constructor"] = "/c/profiles" - elif url_path.startswith("/s/"): - values["base_class"] = PREZ.SpacePrezProfile - values["link_constructor"] = "/s/profiles" - else: - values["base_class"] = PROF.Profile - values["link_constructor"] = "/profiles" - return values + # @root_validator + # def populate(cls, values): + # url_path = values.get("url_path") + # if url_path.startswith("/v/"): + # values["base_class"] = PREZ.VocPrezProfile + # values["link_constructor"] = "/v/profiles" + # elif url_path.startswith("/c/"): + # values["base_class"] = PREZ.CatPrezProfile + # values["link_constructor"] = "/c/profiles" + # elif url_path.startswith("/s/"): + # values["base_class"] = PREZ.SpacePrezProfile + # values["link_constructor"] = "/s/profiles" + # else: + # values["base_class"] = PROF.Profile + # values["link_constructor"] = "/profiles" + # return values diff --git a/prez/models/search_method.py b/prez/models/search_method.py index 2429c797..796dc1f9 100644 --- a/prez/models/search_method.py +++ b/prez/models/search_method.py @@ -5,18 +5,20 @@ from pydantic import BaseConfig -BaseConfig.arbitrary_types_allowed = True PREZ = Namespace("https://prez.dev/") class SearchMethod(BaseModel): + class Config: + arbitrary_types_allowed = True + uri: URIRef = None identifier: Literal = None title: Literal = None template_query: Template = None - top_level_listing = False - search_query = True + top_level_listing: bool = False + search_query: bool = True selected_class: URIRef = None populated_query: str = None link_constructor: str = "/object?uri=" diff --git a/prez/queries/vocprez.py b/prez/queries/vocprez.py index 8ef45b03..bfe0d1c1 100644 --- a/prez/queries/vocprez.py +++ b/prez/queries/vocprez.py @@ -41,7 +41,7 @@ def get_concept_scheme_query(iri: str, bnode_depth: int) -> str: {% for i in range(bnode_depth) %} ?o{{ i }} ?p{{ i + 1 }} ?o{{ i + 1 }} . - FILTER (isBlank(?o0)) + FILTER (isBlank(?0o)) {% endfor %} } """ diff --git a/prez/reference_data/context_ontologies/dcat.nq b/prez/reference_data/context_ontologies/dcat.nq new file mode 100644 index 00000000..c1270034 --- /dev/null +++ b/prez/reference_data/context_ontologies/dcat.nq @@ -0,0 +1,1342 @@ + . + . + "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en . + "En udvalgt og arrangeret samling af metadata om ressourcer (fx datasæt og datatjenester i kontekst af et datakatalog). "@da . + "Una colección curada de metadatos sobre recursos (por ejemplo, conjuntos de datos y servicios de datos en el contexto de un catálogo de datos)."@es . + "Una raccolta curata di metadati sulle risorse (ad es. sui dataset e relativi servizi nel contesto di cataloghi di dati)."@it . + "Une collection élaborée de métadonnées sur les jeux de données"@fr . + "Řízená kolekce metadat o datových sadách a datových službách"@cs . + "Μια επιμελημένη συλλογή μεταδεδομένων περί συνόλων δεδομένων"@el . + "مجموعة من توصيفات قوائم البيانات"@ar . + "データ・カタログは、データセットに関するキュレートされたメタデータの集合です。"@ja . + . + "Catalog"@en . + "Catalogo"@it . + "Catalogue"@fr . + "Catálogo"@es . + "Katalog"@cs . + "Katalog"@da . + "Κατάλογος"@el . + "فهرس قوائم البيانات"@ar . + "カタログ"@ja . + . + "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en . + "En samling af metadata om ressourcer (fx datasæt og datatjenester i kontekst af et datakatalog)."@da . + "Una colección curada de metadatos sobre recursos (por ejemplo, conjuntos de datos y servicios de datos en el contexto de un catálogo de datos)."@es . + "Una raccolta curata di metadati sulle risorse (ad es. sui dataset e relativi servizi nel contesto di cataloghi di dati)."@it . + "Une collection élaborée de métadonnées sur les jeux de données."@fr . + "Řízená kolekce metadat o datových sadách a datových službách."@cs . + "Μια επιμελημένη συλλογή μεταδεδομένων περί συνόλων δεδομένων."@el . + "مجموعة من توصيفات قوائم البيانات"@ar . + "データ・カタログは、データセットに関するキュレートされたメタデータの集合です。"@ja . + "English, Italian, Spanish definitions updated in this revision. Multilingual text not yet updated."@en . + "A web-based data catalog is typically represented as a single instance of this class."@en . + "Et webbaseret datakatalog repræsenteres typisk ved en enkelt instans af denne klasse."@da . + "Normalmente, un catalogo di dati nel web viene rappresentato come una singola istanza di questa classe."@it . + "Normalmente, un catálogo de datos disponible en la web se representa como una única instancia de esta clase."@es . + "Webový datový katalog je typicky reprezentován jako jedna instance této třídy."@cs . + "Συνήθως, ένας κατάλογος δεδομένων στον Παγκόσμιο Ιστό αναπαρίσταται ως ένα στιγμιότυπο αυτής της κλάσης."@el . + "通常、ウェブ・ベースのデータ・カタログは、このクラスの1つのインスタンスとして表わされます。"@ja . + . + . + "1つのデータセットを記述したデータ・カタログ内のレコード。"@ja . + "A record in a data catalog, describing the registration of a single dataset or data service."@en . + "En post i et datakatalog der beskriver registreringen af et enkelt datasæt eller en datatjeneste."@da . + "Un record in un catalogo di dati che descrive un singolo dataset o servizio di dati."@it . + "Un registre du catalogue ou une entrée du catalogue, décrivant un seul jeu de données."@fr . + "Un registro en un catálogo de datos que describe un solo conjunto de datos o un servicio de datos."@es . + "Záznam v datovém katalogu popisující jednu datovou sadu či datovou službu."@cs . + "Μία καταγραφή ενός καταλόγου, η οποία περιγράφει ένα συγκεκριμένο σύνολο δεδομένων."@el . + . + "Catalog Record"@en . + "Katalogizační záznam"@cs . + "Katalogpost"@da . + "Record di catalogo"@it . + "Registre du catalogue"@fr . + "Registro del catálogo"@es . + "Καταγραφή καταλόγου"@el . + "سجل"@ar . + "カタログ・レコード"@ja . + _:c14n5 . + _:c14n7 . + "1つのデータセットを記述したデータ・カタログ内のレコード。"@ja . + "A record in a data catalog, describing the registration of a single dataset or data service."@en . + "En post i et datakatalog der beskriver registreringen af et enkelt datasæt eller en datatjeneste."@da . + "Un record in un catalogo di dati che descrive un singolo dataset o servizio di dati."@it . + "Un registre du catalogue ou une entrée du catalogue, décrivant un seul jeu de données."@fr . + "Un registro en un catálogo de datos que describe un solo conjunto de datos o un servicio de datos."@es . + "Záznam v datovém katalogu popisující jednu datovou sadu či datovou službu."@cs . + "Μία καταγραφή ενός καταλόγου, η οποία περιγράφει ένα συγκεκριμένο σύνολο δεδομένων."@el . + "English definition updated in this revision. Multilingual text not yet updated except the Spanish one and the Czech one and Italian one."@en . + "C'est une classe facultative et tous les catalogues ne l'utiliseront pas. Cette classe existe pour les catalogues ayant une distinction entre les métadonnées sur le jeu de données et les métadonnées sur une entrée du jeu de données dans le catalogue."@fr . + "Denne klasse er valgfri og ikke alle kataloger vil anvende denne klasse. Den kan anvendes i de kataloger hvor der skelnes mellem metadata om datasættet eller datatjenesten og metadata om selve posten til registreringen af datasættet eller datatjenesten i kataloget. Udgivelsesdatoen for datasættet afspejler for eksempel den dato hvor informationerne oprindeligt blev gjort tilgængelige af udgiveren, hvorimod udgivelsesdatoen for katalogposten er den dato hvor datasættet blev føjet til kataloget. I de tilfælde hvor de to datoer er forskellige eller hvor blot sidstnævnte er kendt, bør udgivelsesdatoen kun angives for katalogposten. Bemærk at W3Cs PROV ontologi gør til muligt at tilføje yderligere proveniensoplysninger eksempelvis om processen eller aktøren involveret i en given ændring af datasættet."@da . + "Esta clase es opcional y no todos los catálogos la utilizarán. Esta clase existe para catálogos que hacen una distinción entre los metadatos acerca de un conjunto de datos o un servicio de datos y los metadatos acerca de una entrada en ese conjunto de datos en el catálogo. Por ejemplo, la propiedad sobre la fecha de la publicación de los datos refleja la fecha en que la información fue originalmente publicada, mientras que la fecha de publicación del registro del catálogo es la fecha en que los datos se agregaron al mismo. En caso en que ambas fechas fueran diferentes, o en que sólo la fecha de publicación del registro del catálogo estuviera disponible, sólo debe especificarse en el registro del catálogo. Tengan en cuenta que la ontología PROV de W3C permite describir otra información sobre la proveniencia de los datos, como por ejemplo detalles del proceso y de los agentes involucrados en algún cambio específico a los datos."@es . + "Questa classe è opzionale e non tutti i cataloghi la utilizzeranno. Esiste per cataloghi in cui si opera una distinzione tra i metadati relativi al dataset ed i metadati relativi alla gestione del dataset nel catalogo. Ad esempio, la proprietà per indicare la data di pubblicazione del dataset rifletterà la data in cui l'informazione è stata originariamente messa a disposizione dalla casa editrice, mentre la data di pubblicazione per il record nel catalogo rifletterà la data in cui il dataset è stato aggiunto al catalogo. Nei casi dove solo quest'ultima sia nota, si utilizzerà esclusivamente la data di pubblicazione relativa al record del catalogo. Si noti che l'Ontologia W3C PROV permette di descrivere ulteriori informazioni sulla provenienza, quali i dettagli del processo, la procedura e l'agente coinvolto in una particolare modifica di un dataset."@it . + "Tato třída je volitelná a ne všechny katalogy ji využijí. Existuje pro katalogy, ve kterých se rozlišují metadata datové sady či datové služby a metadata o záznamu o datové sadě či datové službě v katalogu. Například datum publikace datové sady odráží datum, kdy byla datová sada původně zveřejněna poskytovatelem dat, zatímco datum publikace katalogizačního záznamu je datum zanesení datové sady do katalogu. V případech kdy se obě data liší, nebo je známo jen to druhé, by mělo být specifikováno jen datum publikace katalogizačního záznamu. Všimněte si, že ontologie W3C PROV umožňuje popsat další informace o původu jako například podrobnosti o procesu konkrétní změny datové sady a jeho účastnících."@cs . + "This class is optional and not all catalogs will use it. It exists for catalogs where a distinction is made between metadata about a dataset or data service and metadata about the entry for the dataset or data service in the catalog. For example, the publication date property of the dataset reflects the date when the information was originally made available by the publishing agency, while the publication date of the catalog record is the date when the dataset was added to the catalog. In cases where both dates differ, or where only the latter is known, the publication date should only be specified for the catalog record. Notice that the W3C PROV Ontology allows describing further provenance information such as the details of the process and the agent involved in a particular change to a dataset."@en . + "Αυτή η κλάση είναι προαιρετική και δεν χρησιμοποιείται από όλους τους καταλόγους. Υπάρχει για τις περιπτώσεις καταλόγων όπου γίνεται διαχωρισμός μεταξύ των μεταδεδομένων για το σύνολο των δεδομένων και των μεταδεδομένων για την καταγραφή του συνόλου δεδομένων εντός του καταλόγου. Για παράδειγμα, η ιδιότητα της ημερομηνίας δημοσίευσης του συνόλου δεδομένων δείχνει την ημερομηνία κατά την οποία οι πληροφορίες έγιναν διαθέσιμες από τον φορέα δημοσίευσης, ενώ η ημερομηνία δημοσίευσης της καταγραφής του καταλόγου δείχνει την ημερομηνία που το σύνολο δεδομένων προστέθηκε στον κατάλογο. Σε περιπτώσεις που οι δύο ημερομηνίες διαφέρουν, ή που μόνο η τελευταία είναι γνωστή, η ημερομηνία δημοσίευσης θα πρέπει να δίνεται για την καταγραφή του καταλόγου. Να σημειωθεί πως η οντολογία W3C PROV επιτρέπει την περιγραφή επιπλέον πληροφοριών ιστορικού όπως λεπτομέρειες για τη διαδικασία και τον δράστη που εμπλέκονται σε μία συγκεκριμένη αλλαγή εντός του συνόλου δεδομένων."@el . + "このクラスはオプションで、すべてのカタログがそれを用いるとは限りません。これは、データセットに関するメタデータとカタログ内のデータセットのエントリーに関するメタデータとで区別が行われるカタログのために存在しています。例えば、データセットの公開日プロパティーは、公開機関が情報を最初に利用可能とした日付を示しますが、カタログ・レコードの公開日は、データセットがカタログに追加された日付です。両方の日付が異っていたり、後者だけが分かっている場合は、カタログ・レコードに対してのみ公開日を指定すべきです。W3CのPROVオントロジー[prov-o]を用いれば、データセットに対する特定の変更に関連するプロセスやエージェントの詳細などの、さらに詳しい来歴情報の記述が可能となることに注意してください。"@ja . + . + "A site or end-point providing operations related to the discovery of, access to, or processing functions on, data or related resources."@en . + "Et websted eller endpoint der udstiller operationer relateret til opdagelse af, adgang til eller behandlende funktioner på data eller relaterede ressourcer."@da . + "Umístění či přístupový bod poskytující operace související s hledáním, přistupem k, či výkonem funkcí na datech či souvisejících zdrojích."@cs . + "Un sitio o end-point que provee operaciones relacionadas a funciones de descubrimiento, acceso, o procesamiento de datos o recursos relacionados."@es . + "Un sito o end-point che fornisce operazioni relative alla scoperta, all'accesso o all'elaborazione di funzioni su dati o risorse correlate."@it . + "Data service"@en . + "Datatjeneste"@da . + "Servicio de datos"@es . + "Servizio di dati"@it . + . + . + "Dataservice"@da . + "New class added in DCAT 2.0."@en . + "Nová třída přidaná ve verzi DCAT 2.0."@cs . + "Nueva clase añadida en DCAT 2.0."@es . + "Nuova classe aggiunta in DCAT 2.0."@it . + "Ny klasse tilføjet i DCAT 2.0."@da . + "A site or end-point providing operations related to the discovery of, access to, or processing functions on, data or related resources."@en . + "Et site eller endpoint der udstiller operationer relateret til opdagelse af, adgang til eller behandlende funktioner på data eller relaterede ressourcer."@da . + "Umístění či přístupový bod poskytující operace související s hledáním, přistupem k, či výkonem funkcí na datech či souvisejících zdrojích."@cs . + "Un sitio o end-point que provee operaciones relacionadas a funciones de descubrimiento, acceso, o procesamiento de datos o recursos relacionados."@es . + "Un sito o end-point che fornisce operazioni relative alla scoperta, all'accesso o all'elaborazione di funzioni su dati o risorse correlate."@it . + "Datatjenestetypen kan indikeres ved hjælp af egenskaben dct:type. Værdien kan tages fra kontrollerede udfaldsrum såsom INSPIRE spatial data service vocabulary."@da . + "Druh služby může být indikován vlastností dct:type. Její hodnota může být z řízeného slovníku, kterým je například slovník typů prostorových datových služeb INSPIRE."@cs . + "El tipo de servicio puede indicarse usando la propiedad dct:type. Su valor puede provenir de un vocabulario controlado, como por ejemplo el vocabulario de servicios de datos espaciales de INSPIRE."@es . + "Hvis en dcat:DataService er bundet til en eller flere specifikke datasæt kan dette indikeres ved hjælp af egenskaben dcat:servesDataset. "@da . + "If a dcat:DataService is bound to one or more specified Datasets, they are indicated by the dcat:servesDataset property."@en . + "Il tipo di servizio può essere indicato usando la proprietà dct:type. Il suo valore può essere preso da un vocabolario controllato come il vocabolario dei tipi di servizi per dati spaziali di INSPIRE."@it . + "Pokud je dcat:DataService navázána na jednu či více Datových sad, jsou tyto indikovány vlstností dcat:servesDataset."@cs . + "Se un dcat:DataService è associato a uno o più Dataset specificati, questi sono indicati dalla proprietà dcat:serveDataset."@it . + "Si un dcat:DataService está asociado con uno o más conjuntos de datos especificados, dichos conjuntos de datos pueden indicarse con la propiedad dcat:servesDataset."@es . + "The kind of service can be indicated using the dct:type property. Its value may be taken from a controlled vocabulary such as the INSPIRE spatial data service type vocabulary."@en . + . + . + "1つのエージェントによって公開またはキュレートされ、1つ以上の形式でアクセスまたはダウンロードできるデータの集合。"@ja . + "A collection of data, published or curated by a single source, and available for access or download in one or more representations."@en . + "En samling af data, udgivet eller udvalgt og arrangeret af en enkelt kilde og som er til råde for adgang til eller download af i en eller flere repræsentationer."@da . + "Kolekce dat poskytovaná či řízená jedním zdrojem, která je k dispozici pro přístup či stažení v jednom či více formátech."@cs . + "Raccolta di dati, pubblicati o curati da un'unica fonte, disponibili per l'accesso o il download in uno o più formati."@it . + "Una colección de datos, publicados o conservados por una única fuente, y disponibles para ser accedidos o descargados en uno o más formatos."@es . + "Une collection de données, publiée ou élaborée par une seule source, et disponible pour accès ou téléchargement dans un ou plusieurs formats."@fr . + "Μία συλλογή από δεδομένα, δημοσιευμένη ή επιμελημένη από μία και μόνο πηγή, διαθέσιμη δε προς πρόσβαση ή μεταφόρτωση σε μία ή περισσότερες μορφές."@el . + "قائمة بيانات منشورة أو مجموعة من قبل مصدر ما و متاح الوصول إليها أو تحميلها"@ar . + . + "Conjunto de datos"@es . + "Dataset"@en . + "Dataset"@it . + "Datasæt"@da . + "Datová sada"@cs . + "Jeu de données"@fr . + "Σύνολο Δεδομένων"@el . + "قائمة بيانات"@ar . + "データセット"@ja . + . + "Datasamling"@da . + "2018-02 - odstraněno tvrzení o podtřídě dctype:Dataset, jelikož rozsah dcat:Dataset zahrnuje několik dalších typů ze slovníku dctype."@cs . + "2018-02 - se eliminó el axioma de subclase con dctype:Dataset porque el alcance de dcat:Dataset incluye muchos otros tipos del vocabulario dctype."@es . + "2018-02 - sottoclasse di dctype:Dataset rimosso perché l'ambito di dcat:Dataset include diversi altri tipi dal vocabolario dctype."@it . + "2018-02 - subclass of dctype:Dataset removed because scope of dcat:Dataset includes several other types from the dctype vocabulary."@en . + "2018-02 - subklasse af dctype:Dataset fjernet da scope af dcat:Dataset omfatter flere forskellige typer fra dctype-vokabularet."@da . + "1つのエージェントによって公開またはキュレートされ、1つ以上の形式でアクセスまたはダウンロードできるデータの集合。"@ja . + "A collection of data, published or curated by a single source, and available for access or download in one or more represenations."@en . + "En samling a data, udgivet eller udvalgt og arrangeret af en enkelt kilde og som der er adgang til i en eller flere repræsentationer."@da . + "Kolekce dat poskytovaná či řízená jedním zdrojem, která je k dispozici pro přístup či stažení v jednom či více formátech."@cs . + "Raccolta di dati, pubblicati o curati da un'unica fonte, disponibili per l'accesso o il download in uno o più formati."@it . + "Una colección de datos, publicados o conservados por una única fuente, y disponibles para ser accedidos o descargados en uno o más formatos."@es . + "Une collection de données, publiée ou élaborée par une seule source, et disponible pour accès ou téléchargement dans un ou plusieurs formats."@fr . + "Μία συλλογή από δεδομένα, δημοσιευμένη ή επιμελημένη από μία και μόνο πηγή, διαθέσιμη δε προς πρόσβαση ή μεταφόρτωση σε μία ή περισσότερες μορφές."@el . + "قائمة بيانات منشورة أو مجموعة من قبل مصدر ما و متاح الوصول إليها أو تحميلها"@ar . + "2020-03-16 A new scopenote added and need to be translated"@en . + "Cette classe représente le jeu de données publié par le fournisseur de données. Dans les cas où une distinction est nécessaire entre le jeu de donénes et son entrée dans le catalogue, la classe registre de données peut être utilisée pour ce dernier."@fr . + "Denne klasse beskriver det konceptuelle datasæt. En eller flere repræsentationer kan være tilgængelige med forskellige skematiske opsætninger, formater eller serialiseringer."@da . + "Denne klasse repræsenterer det konkrete datasæt som det udgives af datasætleverandøren. I de tilfælde hvor det er nødvendigt at skelne mellem det konkrete datasæt og dets registrering i kataloget (fordi metadata såsom ændringsdato og vedligeholder er forskellige), så kan klassen katalogpost anvendes. "@da . + "Esta clase representa el conjunto de datos publicados. En los casos donde es necesario distinguir entre el conjunto de datos y su entrada en el catálogo de datos, se debe utilizar la clase 'registro del catálogo'."@es . + "Questa classe descrive il dataset dal punto di vista concettuale. Possono essere disponibili una o più rappresentazioni, con diversi layout e formati schematici o serializzazioni."@it . + "Questa classe rappresenta il dataset come pubblicato dall’editore. Nel caso in cui sia necessario operare una distinzione fra i metadati originali del dataset e il record dei metadati ad esso associato nel catalogo (ad esempio, per distinguere la data di modifica del dataset da quella del dataset nel catalogo) si può impiegare la classe catalog record."@it . + "Tato třída reprezentuje datovou sadu tak, jak je publikována poskytovatelem dat. V případě potřeby rozlišení datové sady a jejího katalogizačního záznamu (jelikož metadata jako datum modifikace se mohou lišit) pro něj může být použita třída \"katalogizační záznam\"."@cs . + "The notion of dataset in DCAT is broad and inclusive, with the intention of accommodating resource types arising from all communities. Data comes in many forms including numbers, text, pixels, imagery, sound and other multi-media, and potentially other types, any of which might be collected into a dataset."@en . + "This class describes the conceptual dataset. One or more representations might be available, with differing schematic layouts and formats or serializations."@en . + "This class represents the actual dataset as published by the dataset provider. In cases where a distinction between the actual dataset and its entry in the catalog is necessary (because metadata such as modification date and maintainer might differ), the catalog record class can be used for the latter."@en . + "Η κλάση αυτή αναπαριστά το σύνολο δεδομένων αυτό καθ'εαυτό, όπως έχει δημοσιευθεί από τον εκδότη. Σε περιπτώσεις όπου είναι απαραίτητος ο διαχωρισμός μεταξύ του συνόλου δεδομένων και της καταγραφής αυτού στον κατάλογο (γιατί μεταδεδομένα όπως η ημερομηνία αλλαγής και ο συντηρητής μπορεί να διαφέρουν) η κλάση της καταγραφής καταλόγου μπορεί να χρησιμοποιηθεί για το τελευταίο."@el . + "このクラスは、データセットの公開者が公開する実際のデータセットを表わします。カタログ内の実際のデータセットとそのエントリーとの区別が必要な場合(修正日と維持者などのメタデータが異なるかもしれないので)は、後者にcatalog recordというクラスを使用できます。"@ja . + . + . + "A specific representation of a dataset. A dataset might be available in multiple serializations that may differ in various ways, including natural language, media-type or format, schematic organization, temporal and spatial resolution, level of detail or profiles (which might specify any or all of the above)."@en . + "En specifik repræsentation af et datasæt. Et datasæt kan være tilgængelig i mange serialiseringer der kan variere på forskellige vis, herunder sprog, medietype eller format, systemorganisering, tidslig- og geografisk opløsning, detaljeringsniveau eller profiler (der kan specificere en eller flere af ovenstående)."@da . + "Konkrétní reprezentace datové sady. Datová sada může být dostupná v různých serializacích, které se mohou navzájem lišit různými způsoby, mimo jiné přirozeným jazykem, media-typem či formátem, schematickou organizací, časovým a prostorovým rozlišením, úrovní detailu či profily (které mohou specifikovat některé či všechny tyto rozdíly)."@cs . + "Rappresenta una forma disponibile e specifica del dataset. Ciascun dataset può essere disponibile in forme differenti, che possono rappresentare formati diversi o diversi punti di accesso per un dataset. Esempi di distribuzioni sono un file CSV scaricabile, una API o un RSS feed."@it . + "Représente une forme spécifique d'un jeu de données. Caque jeu de données peut être disponible sous différentes formes, celles-ci pouvant représenter différents formats du jeu de données ou différents endpoint. Des exemples de distribution sont des fichirs CSV, des API ou des flux RSS."@fr . + "Una representación específica de los datos. Cada conjunto de datos puede estar disponible en formas diferentes, las cuáles pueden variar en distintas formas, incluyendo el idioma, 'media-type' o formato, organización esquemática, resolución temporal y espacial, nivel de detalle o perfiles (que pueden especificar cualquiera o todas las diferencias anteriores)."@es . + "Αναπαριστά μία συγκεκριμένη διαθέσιμη μορφή ενός συνόλου δεδομένων. Κάθε σύνολο δεδομενων μπορεί να είναι διαθέσιμο σε διαφορετικές μορφές, οι μορφές αυτές μπορεί να αναπαριστούν διαφορετικές μορφές αρχείων ή διαφορετικά σημεία διάθεσης. Παραδείγματα διανομών συμπεριλαμβάνουν ένα μεταφορτώσιμο αρχείο μορφής CSV, ένα API ή ένα RSS feed."@el . + "شكل محدد لقائمة البيانات يمكن الوصول إليه. قائمة بيانات ما يمكن أن تكون متاحه باشكال و أنواع متعددة. ملف يمكن تحميله أو واجهة برمجية يمكن من خلالها الوصول إلى البيانات هي أمثلة على ذلك."@ar . + "データセットの特定の利用可能な形式を表わします。各データセットは、異なる形式で利用できることがあり、これらの形式は、データセットの異なる形式や、異なるエンドポイントを表わす可能性があります。配信の例には、ダウンロード可能なCSVファイル、API、RSSフィードが含まれます。"@ja . + . + "Distribuce"@cs . + "Distribución"@es . + "Distribution"@da . + "Distribution"@en . + "Distribution"@fr . + "Distribuzione"@it . + "Διανομή"@el . + "التوزيع"@ar . + "配信"@ja . + "Datadistribution"@da . + "Datamanifestation"@da . + "Datarepræsentation"@da . + "Dataudstilling"@da . + "A specific representation of a dataset. A dataset might be available in multiple serializations that may differ in various ways, including natural language, media-type or format, schematic organization, temporal and spatial resolution, level of detail or profiles (which might specify any or all of the above)."@en . + "En specifik repræsentation af et datasæt. Et datasæt kan være tilgængelig i mange serialiseringer der kan variere på forskellige vis, herunder sprog, medietype eller format, systemorganisering, tidslig- og geografisk opløsning, detaljeringsniveau eller profiler (der kan specificere en eller flere af ovenstående)."@da . + "Konkrétní reprezentace datové sady. Datová sada může být dostupná v různých serializacích, které se mohou navzájem lišit různými způsoby, mimo jiné přirozeným jazykem, media-typem či formátem, schematickou organizací, časovým a prostorovým rozlišením, úrovní detailu či profily (které mohou specifikovat některé či všechny tyto rozdíly)."@cs . + "Rappresenta una forma disponibile e specifica del dataset. Ciascun dataset può essere disponibile in forme differenti, che possono rappresentare formati diversi o diversi punti di accesso per un dataset. Esempi di distribuzioni sono un file CSV scaricabile, una API o un RSS feed."@it . + "Représente une forme spécifique d'un jeu de données. Caque jeu de données peut être disponible sous différentes formes, celles-ci pouvant représenter différents formats du jeu de données ou différents endpoint. Des exemples de distribution sont des fichirs CSV, des API ou des flux RSS."@fr . + "Una representación específica de los datos. Cada conjunto de datos puede estar disponible en formas diferentes, las cuáles pueden variar en distintas formas, incluyendo el idioma, 'media-type' o formato, organización esquemática, resolución temporal y espacial, nivel de detalle o perfiles (que pueden especificar cualquiera o todas las diferencias anteriores)."@es . + "Αναπαριστά μία συγκεκριμένη διαθέσιμη μορφή ενός συνόλου δεδομένων. Κάθε σύνολο δεδομενων μπορεί να είναι διαθέσιμο σε διαφορετικές μορφές, οι μορφές αυτές μπορεί να αναπαριστούν διαφορετικές μορφές αρχείων ή διαφορετικά σημεία διάθεσης. Παραδείγματα διανομών συμπεριλαμβάνουν ένα μεταφορτώσιμο αρχείο μορφής CSV, ένα API ή ένα RSS feed."@el . + "شكل محدد لقائمة البيانات يمكن الوصول إليه. قائمة بيانات ما يمكن أن تكون متاحه باشكال و أنواع متعددة. ملف يمكن تحميله أو واجهة برمجية يمكن من خلالها الوصول إلى البيانات هي أمثلة على ذلك."@ar . + "データセットの特定の利用可能な形式を表わします。各データセットは、異なる形式で利用できることがあり、これらの形式は、データセットの異なる形式や、異なるエンドポイントを表わす可能性があります。配信の例には、ダウンロード可能なCSVファイル、API、RSSフィードが含まれます。"@ja . + "Ceci représente une disponibilité générale du jeu de données, et implique qu'il n'existe pas d'information sur la méthode d'accès réelle des données, par exple, si c'est un lien de téléchargement direct ou à travers une page Web."@fr . + "Denne klasse repræsenterer datasættets overordnede tilgængelighed og giver ikke oplysninger om hvilken metode der kan anvendes til at få adgang til data, dvs. om adgang til datasættet realiseres ved direkte download, API eller via et websted. Anvendelsen af egenskaben dcat:downloadURL indikerer at distributionen kan downloades direkte."@da . + "Esta clase representa una disponibilidad general de un conjunto de datos, e implica que no existe información acerca del método de acceso real a los datos, i.e., si es un enlace de descarga directa o a través de una página Web."@es . + "Questa classe rappresenta una disponibilità generale di un dataset e non implica alcuna informazione sul metodo di accesso effettivo ai dati, ad esempio se si tratta di un accesso a download diretto, API, o attraverso una pagina Web. L'utilizzo della proprietà dcat:downloadURL indica distribuzioni direttamente scaricabili."@it . + "This represents a general availability of a dataset it implies no information about the actual access method of the data, i.e. whether by direct download, API, or through a Web page. The use of dcat:downloadURL property indicates directly downloadable distributions."@en . + "Toto popisuje obecnou dostupnost datové sady. Neimplikuje žádnou informaci o skutečné metodě přístupu k datům, tj. zda jsou přímo ke stažení, skrze API či přes webovou stránku. Použití vlastnosti dcat:downloadURL indikuje přímo stažitelné distribuce."@cs . + "Αυτό αναπαριστά μία γενική διαθεσιμότητα ενός συνόλου δεδομένων και δεν υπονοεί τίποτα περί του πραγματικού τρόπου πρόσβασης στα δεδομένα, αν είναι άμεσα μεταφορτώσιμα, μέσω API ή μέσω μίας ιστοσελίδας. Η χρήση της ιδιότητας dcat:downloadURL δείχνει μόνο άμεσα μεταφορτώσιμες διανομές."@el . + "これは、データセットの一般的な利用可能性を表わし、データの実際のアクセス方式に関する情報(つまり、直接ダウンロードなのか、APIなのか、ウェブページを介したものなのか)を意味しません。dcat:downloadURLプロパティーの使用は、直接ダウンロード可能な配信を意味します。"@ja . + . + "An association class for attaching additional information to a relationship between DCAT Resources."@en . + "Asociační třída pro připojení dodatečných informací ke vztahu mezi zdroji DCAT."@cs . + "En associationsklasse til brug for tilknytning af yderligere information til en relation mellem DCAT-ressourcer."@da . + "Una clase de asociación para adjuntar información adicional a una relación entre recursos DCAT."@es . + "Una classe di associazione per il collegamento di informazioni aggiuntive a una relazione tra le risorse DCAT."@it . + "Relación"@es . + "Relation"@da . + "Relationship"@en . + "Relazione"@it . + "Vztah"@cs . + "New class added in DCAT 2.0."@en . + "Nová třída přidaná ve verzi DCAT 2.0."@cs . + "Nueva clase añadida en DCAT 2.0."@es . + "Nuova classe aggiunta in DCAT 2.0."@it . + "Ny klasse i DCAT 2.0."@da . + "An association class for attaching additional information to a relationship between DCAT Resources."@en . + "Asociační třída pro připojení dodatečných informací ke vztahu mezi zdroji DCAT."@cs . + "En associationsklasse til brug for tilknytning af yderligere information til en relation mellem DCAT-ressourcer."@da . + "Una clase de asociación para adjuntar información adicional a una relación entre recursos DCAT."@es . + "Una classe di associazione per il collegamento di informazioni aggiuntive a una relazione tra le risorse DCAT."@it . + "Anvendes til at karakterisere en relation mellem datasæt, og potentielt andre ressourcer, hvor relationen er kendt men ikke tilstrækkeligt beskrevet af de standardiserede egenskaber i Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) eller PROV-O-egenskaber (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@da . + "Používá se pro charakterizaci vztahu mezi datovými sadami a případně i jinými zdroji, kde druh vztahu je sice znám, ale není přiměřeně charakterizován standardními vlastnostmi slovníku Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) či vlastnostmi slovníku PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@cs . + "Se usa para caracterizar la relación entre conjuntos de datos, y potencialmente otros recursos, donde la naturaleza de la relación se conoce pero no está caracterizada adecuadamente con propiedades del estándar 'Dublin Core' (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@es . + "Use to characterize a relationship between datasets, and potentially other resources, where the nature of the relationship is known but is not adequately characterized by the standard Dublin Core properties (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@en . + "Viene utilizzato per caratterizzare la relazione tra insiemi di dati, e potenzialmente altri tipi di risorse, nei casi in cui la natura della relazione è nota ma non adeguatamente caratterizzata dalle proprietà dello standard 'Dublin Core' (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:require, dct:isRequiredBy) o dalle propietà fornite da PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov: hadPrimarySource, prov:alternateOf, prov:specializationOf)."@it . + . + "Recurso publicado o curado por un agente único."@es . + "Resource published or curated by a single agent."@en . + "Ressource udgivet eller udvalgt og arrangeret af en enkelt aktør."@da . + "Risorsa pubblicata o curata da un singolo agente."@it . + "Zdroj publikovaný či řízený jediným činitelem."@cs . + "Catalogued resource"@en . + "Katalogiseret ressource"@da . + "Katalogizovaný zdroj"@cs . + "Recurso catalogado"@es . + "Risorsa catalogata"@it . + "New class added in DCAT 2.0."@en . + "Nová třída přidaná ve verzi DCAT 2.0."@cs . + "Nueva clase agregada en DCAT 2.0."@es . + "Nuova classe aggiunta in DCAT 2.0."@it . + "Ny klasse i DCAT 2.0."@da . + "Recurso publicado o curado por un agente único."@es . + "Resource published or curated by a single agent."@en . + "Ressource udgivet eller udvalgt og arrangeret af en enkelt aktør."@da . + "Risorsa pubblicata o curata da un singolo agente."@it . + "Zdroj publikovaný či řízený jediným činitelem."@cs . + "Klassen for alle katalogiserede ressourcer, den overordnede klasse for dcat:Dataset, dcat:DataService, dcat:Catalog og enhvert medlem af et dcat:Catalog. Denne klasse bærer egenskaber der gælder alle katalogiserede ressourcer, herunder dataset og datatjenester. Det anbefales kraftigt at mere specifikke subklasser oprettes. Når der beskrives ressourcer der ikke er dcat:Dataset eller dcat:DataService, anbefales det at oprette passende subklasser af dcat:Resource eller at dcat:Resource anvendes sammen med egenskaben dct:type til opmærkning med en specifik typeangivelse."@da . + "La clase de todos los recursos catalogados, la superclase de dcat:Dataset, dcat:DataService, dcat:Catalog y cualquier otro miembro de un dcat:Catalog. Esta clase tiene propiedades comunes a todos los recursos catalogados, incluyendo conjuntos de datos y servicios de datos. Se recomienda fuertemente que se use una clase más específica. Cuando se describe un recurso que no es un dcat:Dataset o dcat:DataService, se recomienda crear una sub-clase apropiada de dcat:Resource, o usar dcat:Resource con la propiedad dct:type to indicar el tipo específico."@es . + "La classe di tutte le risorse catalogate, la Superclasse di dcat:Dataset, dcat:DataService, dcat:Catalog e qualsiasi altro membro di dcat:Catalog. Questa classe porta proprietà comuni a tutte le risorse catalogate, inclusi set di dati e servizi dati. Si raccomanda vivamente di utilizzare una sottoclasse più specifica. Quando si descrive una risorsa che non è un dcat:Dataset o dcat:DataService, si raccomanda di creare una sottoclasse di dcat:Resource appropriata, o utilizzare dcat:Resource con la proprietà dct:type per indicare il tipo specifico."@it . + "The class of all catalogued resources, the Superclass of dcat:Dataset, dcat:DataService, dcat:Catalog and any other member of a dcat:Catalog. This class carries properties common to all catalogued resources, including datasets and data services. It is strongly recommended to use a more specific sub-class. When describing a resource which is not a dcat:Dataset or dcat:DataService, it is recommended to create a suitable sub-class of dcat:Resource, or use dcat:Resource with the dct:type property to indicate the specific type."@en . + "Třída všech katalogizovaných zdrojů, nadtřída dcat:Dataset, dcat:DataService, dcat:Catalog a všech ostatních členů dcat:Catalog. Tato třída nese vlastnosti společné všem katalogizovaným zdrojům včetně datových sad a datových služeb. Je silně doporučeno používat specifičtější podtřídy, pokud je to možné. Při popisu zdroje, který není ani dcat:Dataset, ani dcat:DataService se doporučuje vytvořit odpovídající podtřídu dcat:Resrouce a nebo použít dcat:Resource s vlastností dct:type pro určení konkrétního typu."@cs . + "dcat:Resource er et udvidelsespunkt der tillader oprettelsen af enhver type af kataloger. Yderligere subklasser kan defineres i en DCAT-profil eller i en applikation til kataloger med andre typer af ressourcer."@da . + "dcat:Resource es un punto de extensión que permite la definición de cualquier tipo de catálogo. Se pueden definir subclases adicionales en perfil de DCAT o una aplicación para catálogos de otro tipo de recursos."@es . + "dcat:Resource is an extension point that enables the definition of any kind of catalog. Additional subclasses may be defined in a DCAT profile or application for catalogs of other kinds of resources."@en . + "dcat:Resource je bod pro rozšíření umožňující definici různých druhů katalogů. Další podtřídy lze definovat v profilech DCAT či aplikacích pro katalogy zdrojů jiných druhů."@cs . + "dcat:Resource è un punto di estensione che consente la definizione di qualsiasi tipo di catalogo. Sottoclassi aggiuntive possono essere definite in un profilo DCAT o in un'applicazione per cataloghi di altri tipi di risorse."@it . + . + "A role is the function of a resource or agent with respect to another resource, in the context of resource attribution or resource relationships."@en . + "En rolle er den funktion en ressource eller aktør har i forhold til en anden ressource, i forbindelse med ressourcekreditering eller ressourcerelationer."@da . + "Role je funkce zdroje či agenta ve vztahu k jinému zdroji, v kontextu přiřazení zdrojů či vztahů mezi zdroji."@cs . + "Un rol es la función de un recurso o agente con respecto a otro recuros, en el contexto de atribución del recurso o de las relaciones entre recursos."@es . + "Un ruolo è la funzione di una risorsa o di un agente rispetto ad un'altra risorsa, nel contesto dell'attribuzione delle risorse o delle relazioni tra risorse."@it . + "Rol"@es . + "Role"@cs . + "Role"@en . + "Rolle"@da . + "Ruolo"@it . + . + . + "New class added in DCAT 2.0."@en . + "Nová třída přidaná ve verzi DCAT 2.0."@cs . + "Nueva clase agregada en DCAT 2.0."@es . + "Nuova classe aggiunta in DCAT 2.0."@it . + "Ny klasse tilføjet i DCAT 2.0."@en . + "A role is the function of a resource or agent with respect to another resource, in the context of resource attribution or resource relationships."@en . + "En rolle er den funktion en ressource eller aktør har i forhold til en anden ressource, i forbindelse med ressourcekreditering eller ressourcerelationer."@da . + "Role je funkce zdroje či agenta ve vztahu k jinému zdroji, v kontextu přiřazení zdrojů či vztahů mezi zdroji."@cs . + "Un rol es la función de un recurso o agente con respecto a otro recuros, en el contexto de atribución del recurso o de las relaciones entre recursos."@es . + "Un ruolo è la funzione di una risorsa o di un agente rispetto ad un'altra risorsa, nel contesto dell'attribuzione delle risorse o delle relazioni tra risorse."@it . + "Incluída en DCAT para complementar prov:Role (cuyo uso está limitado a roles en el contexto de una actividad, ya que es el rango es prov:hadRole)."@es . + "Introdotta in DCAT per completare prov:Role (il cui uso è limitato ai ruoli nel contesto di un'attività, in conseguenza alla definizione del codominio di prov:hadRole)."@it . + "Introduced into DCAT to complement prov:Role (whose use is limited to roles in the context of an activity, as the range of prov:hadRole)."@en . + "Introduceret i DCAT for at supplere prov:Role (hvis anvendelse er begrænset til roller i forbindelse med en aktivitet, som er rækkevidde for prov:hadRole)."@da . + "Přidáno do DCAT pro doplnění třídy prov:Role (jejíž užití je omezeno na role v kontextu aktivit, jakožto obor hodnot vlastnosti prov:hadRole)."@cs . + "Anvendes i forbindelse med kvalificerede krediteringer til at angive aktørens rolle i forhold til en entitet. Det anbefales at værdierne styres som et kontrolleret udfaldsrum med aktørroller, såsom http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@da . + "Anvendes i forbindelse med kvalificerede relationer til at specificere en entitets rolle i forhold til en anden entitet. Det anbefales at værdierne styres med et kontrolleret udfaldsrum for for entitetsroller såsom: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; DataCite metadata schema; MARC relators https://id.loc.gov/vocabulary/relators."@da . + "Použito v kvalifikovaném přiřazení pro specifikaci role Agenta ve vztahu k Entitě. Je doporučeno množinu hodnot spravovat jako řízený slovník rolí agentů, jako například http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@cs . + "Použito v kvalifikovaném vztahu pro specifikaci role Entity ve vztahu k jiné Entitě. Je doporučeno množinu hodnot spravovat jako řízený slovník rolí entit, jako například ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, či MARC relators https://id.loc.gov/vocabulary/relators."@cs . + "Se usa en una atribución cualificada para especificar el rol de un Agente con respecto a una Entidad. Se recomienda que los valores se administren como un vocabulario controlado de roles de agente, como por ejemplo http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@es . + "Se usa en una relación cualificada para especificar el rol de una Entidad con respecto a otra Entidad. Se recomienda que los valores se administren como los valores de un vocabulario controlado de roles de entidad como por ejemplo: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; el esquema de metadatos de DataCite; MARC relators https://id.loc.gov/vocabulary/relators."@es . + "Used in a qualified-attribution to specify the role of an Agent with respect to an Entity. It is recommended that the values be managed as a controlled vocabulary of agent roles, such as http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@en . + "Used in a qualified-relation to specify the role of an Entity with respect to another Entity. It is recommended that the values be managed as a controlled vocabulary of entity roles such as: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; DataCite metadata schema; MARC relators https://id.loc.gov/vocabulary/relators."@en . + "Utilizzato in un'attribuzione qualificata per specificare il ruolo di un agente rispetto a un'entità. Si consiglia di attribuire i valori considerando un vocabolario controllato dei ruoli dell'agente, ad esempio http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@it . + "Utilizzato in una relazione qualificata per specificare il ruolo di un'entità rispetto a un'altra entità. Si raccomanda che il valore sia preso da un vocabolario controllato di ruoli di entità come ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, o MARC relators https://id.loc.gov/vocabulary/relators."@it . + . + "A site or end-point that gives access to the distribution of the dataset."@en . + "Et websted eller endpoint der giver adgang til en repræsentation af datasættet."@da . + "Umístění či přístupový bod zpřístupňující distribuci datové sady."@cs . + "Un sitio o end-point que da acceso a la distribución de un conjunto de datos."@es . + "Un sito o end-point che dà accesso alla distribuzione del set di dati."@it . + "data access service"@en . + "dataadgangstjeneste"@da . + "servicio de acceso de datos"@es . + "servizio di accesso ai dati"@it . + "služba pro přístup k datům"@cs . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab tilføjet i DCAT 2.0."@da . + "A site or end-point that gives access to the distribution of the dataset."@en . + "Et websted eller endpoint der giver adgang til en repræsentation af datasættet."@da . + "Umístění či přístupový bod zpřístupňující distribuci datové sady."@cs . + "Un sitio o end-point que da acceso a la distribución de un conjunto de datos."@es . + "Un sito o end-point che dà accesso alla distribuzione del set di dati."@it . + . + . + "A URL of a resource that gives access to a distribution of the dataset. E.g. landing page, feed, SPARQL endpoint. Use for all cases except a simple download link, in which case downloadURL is preferred."@en . + "Ceci peut être tout type d'URL qui donne accès à une distribution du jeu de données. Par exemple, un lien à une page HTML contenant un lien au jeu de données, un Flux RSS, un point d'accès SPARQL. Utilisez le lorsque votre catalogue ne contient pas d'information sur quoi il est ou quand ce n'est pas téléchargeable."@fr . + "En URL for en ressource som giver adgang til en repræsentation af datsættet. Fx destinationsside, feed, SPARQL-endpoint. Anvendes i alle sammenhænge undtagen til angivelse af et simpelt download link hvor anvendelse af egenskaben downloadURL foretrækkes."@da . + "Puede ser cualquier tipo de URL que de acceso a una distribución del conjunto de datos, e.g., página de destino, descarga, URL feed, punto de acceso SPARQL. Esta propriedad se debe usar cuando su catálogo de datos no tiene información sobre donde está o cuando no se puede descargar."@es . + "URL zdroje, přes které je přístupná distribuce datové sady. Příkladem může být vstupní stránka, RSS kanál či SPARQL endpoint. Použijte ve všech případech kromě URL souboru ke stažení, pro které je lepší použít dcat:downloadURL."@cs . + "Un URL di una risorsa che consente di accedere a una distribuzione del set di dati. Per esempio, pagina di destinazione, feed, endpoint SPARQL. Da utilizzare per tutti i casi, tranne quando si tratta di un semplice link per il download nel qual caso è preferito downloadURL."@it . + "Μπορεί να είναι οποιουδήποτε είδους URL που δίνει πρόσβαση στη διανομή ενός συνόλου δεδομένων. Π.χ. ιστοσελίδα αρχικής πρόσβασης, μεταφόρτωση, feed URL, σημείο διάθεσης SPARQL. Να χρησιμοποιείται όταν ο κατάλογος δεν περιέχει πληροφορίες εαν πρόκειται ή όχι για μεταφορτώσιμο αρχείο."@el . + "أي رابط يتيح الوصول إلى البيانات. إذا كان الرابط هو ربط مباشر لملف يمكن تحميله استخدم الخاصية downloadURL"@ar . + "データセットの配信にアクセス権を与えるランディング・ページ、フィード、SPARQLエンドポイント、その他の種類の資源。"@ja . + . + . + "URL d'accès"@fr . + "URL de acceso"@es . + "URL πρόσβασης"@el . + "access address"@en . + "adgangsadresse"@da . + "indirizzo di accesso"@it . + "přístupová adresa"@cs . + "رابط وصول"@ar . + "アクセスURL"@ja . + . + _:c14n29 . + "adgangsURL"@da . + "A URL of a resource that gives access to a distribution of the dataset. E.g. landing page, feed, SPARQL endpoint. Use for all cases except a simple download link, in which case downloadURL is preferred."@en . + "Ceci peut être tout type d'URL qui donne accès à une distribution du jeu de données. Par exemple, un lien à une page HTML contenant un lien au jeu de données, un Flux RSS, un point d'accès SPARQL. Utilisez le lorsque votre catalogue ne contient pas d'information sur quoi il est ou quand ce n'est pas téléchargeable."@fr . + "En URL for en ressource som giver adgang til en repræsentation af datsættet. Fx destinationsside, feed, SPARQL-endpoint. Anvendes i alle sammenhænge undtagen til angivelse af et simpelt download link hvor anvendelse af egenskaben downloadURL foretrækkes."@da . + "Puede ser cualquier tipo de URL que de acceso a una distribución del conjunto de datos, e.g., página de destino, descarga, URL feed, punto de acceso SPARQL. Esta propriedad se debe usar cuando su catálogo de datos no tiene información sobre donde está o cuando no se puede descargar."@es . + "URL zdroje, přes které je přístupná distribuce datové sady. Příkladem může být vstupní stránka, RSS kanál či SPARQL endpoint. Použijte ve všech případech kromě URL souboru ke stažení, pro které je lepší použít dcat:downloadURL."@cs . + "Un URL di una risorsa che consente di accedere a una distribuzione del set di dati. Per esempio, pagina di destinazione, feed, endpoint SPARQL. Da utilizzare per tutti i casi, tranne quando si tratta di un semplice link per il download nel qual caso è preferito downloadURL."@it . + "Μπορεί να είναι οποιουδήποτε είδους URL που δίνει πρόσβαση στη διανομή ενός συνόλου δεδομένων. Π.χ. ιστοσελίδα αρχικής πρόσβασης, μεταφόρτωση, feed URL, σημείο διάθεσης SPARQL. Να χρησιμοποιείται όταν ο κατάλογος δεν περιέχει πληροφορίες εαν πρόκειται ή όχι για μεταφορτώσιμο αρχείο."@el . + "أي رابط يتيح الوصول إلى البيانات. إذا كان الرابط هو ربط مباشر لملف يمكن تحميله استخدم الخاصية downloadURL"@ar . + "データセットの配信にアクセス権を与えるランディング・ページ、フィード、SPARQLエンドポイント、その他の種類の資源。"@ja . + "Status: English Definition text modified by DCAT revision team, updated Italian and Czech translation provided, translations for other languages pending."@en . + "rdfs:label, rdfs:comment and skos:scopeNote have been modified. Non-english versions except for Italian must be updated."@en . + "El rango es una URL. Si la distribución es accesible solamente través de una página de destino (es decir, si no se conoce una URL de descarga directa), entonces el enlance a la página de destino debe ser duplicado como accessURL en la distribución."@es . + "Hvis en eller flere distributioner kun er tilgængelige via en destinationsside (dvs. en URL til direkte download er ikke kendt), så bør destinationssidelinket gentages som adgangsadresse for distributionen."@da . + "If the distribution(s) are accessible only through a landing page (i.e. direct download URLs are not known), then the landing page link should be duplicated as accessURL on a distribution."@en . + "La valeur est une URL. Si la distribution est accessible seulement au travers d'une page d'atterrissage (c-à-dire on n'ignore une URL de téléchargement direct), alors le lien à la page d'atterrissage doit être dupliqué comee accessURL sur la distribution."@fr . + "Pokud jsou distribuce přístupné pouze přes vstupní stránku (tj. URL pro přímé stažení nejsou známa), pak by URL přístupové stránky mělo být duplikováno ve vlastnosti distribuce accessURL."@cs . + "Se le distribuzioni sono accessibili solo attraverso una pagina web (ad esempio, gli URL per il download diretto non sono noti), allora il link della pagina web deve essere duplicato come accessURL sulla distribuzione."@it . + "Η τιμή είναι ένα URL. Αν η/οι διανομή/ές είναι προσβάσιμη/ες μόνο μέσω μίας ιστοσελίδας αρχικής πρόσβασης (δηλαδή αν δεν υπάρχουν γνωστές διευθύνσεις άμεσης μεταφόρτωσης), τότε ο σύνδεσμος της ιστοσελίδας αρχικής πρόσβασης πρέπει να αναπαραχθεί ως accessURL σε μία διανομή."@el . + "確実にダウンロードでない場合や、ダウンロードかどうかが不明である場合は、downloadURLではなく、accessURLを用いてください。ランディング・ページを通じてしか配信にアクセスできない場合(つまり、直接的なダウンロードURLが不明)は、配信におけるaccessURLとしてランディング・ページのリンクをコピーすべきです(SHOULD)。"@ja . + . + . + . + "bounding box"@da . + "bounding box"@en . + "cuadro delimitador"@es . + "ohraničení oblasti"@cs . + "quadro di delimitazione"@it . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab tilføjet i DCAT 2.0."@da . + "Propiedad nueva agregada en DCAT 2.0."@es . + "Den geografiske omskrevne firkant af en ressource."@da . + "El cuadro delimitador geográfico para un recurso."@es . + "Il riquadro di delimitazione geografica di una risorsa."@it . + "Ohraničení geografické oblasti zdroje."@cs . + "The geographic bounding box of a resource."@en . + "El rango de esta propiedad es intencionalmente genérico con el propósito de permitir distintas codificaciones geométricas. Por ejemplo, la geometría puede ser codificada como WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@es . + "Il range di questa proprietà è volutamente generica, con lo scopo di consentire diverse codifiche geometriche. Ad esempio, la geometria potrebbe essere codificata con WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@it . + "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé kódování geometrií. Geometrie by kupříkladu mohla být kódována jako WKT (geosparql:wktLiteral [GeoSPARQL]) či [GML] (geosparql:asGML [GeoSPARQL])."@cs . + "Rækkevidden for denne egenskab er bevidst generisk defineret med det formål at tillade forskellige kodninger af geometrier. Geometrien kan eksempelvis repræsenteres som WKT (geosparql:asWKT [GeoSPARQL]) eller [GML] (geosparql:asGML [GeoSPARQL])."@da . + "The range of this property is intentionally generic, with the purpose of allowing different geometry encodings. E.g., the geometry could be encoded with as WKT (geosparql:wktLiteral [GeoSPARQL]) or [GML] (geosparql:asGML [GeoSPARQL])."@en . + . + . + "El tamaño de una distribución en bytes."@es . + "La dimensione di una distribuzione in byte."@it . + "La taille de la distribution en octects"@fr . + "Størrelsen af en distributionen angivet i bytes."@da . + "The size of a distribution in bytes."@en . + "Velikost distribuce v bajtech."@cs . + "Το μέγεθος μιας διανομής σε bytes."@el . + "الحجم بالبايتات "@ar . + "バイトによる配信のサイズ。"@ja . + . + . + "byte size"@en . + "bytestørrelse"@da . + "dimensione in byte"@it . + "taille en octects"@fr . + "tamaño en bytes"@es . + "velikost v bajtech"@cs . + "μέγεθος σε bytes"@el . + "الحجم بالبايت"@ar . + "バイト・サイズ"@ja . + . + "El tamaño de una distribución en bytes."@es . + "La dimensione di una distribuzione in byte."@it . + "La taille de la distribution en octects."@fr . + "Størrelsen af en distribution angivet i bytes."@da . + "The size of a distribution in bytes."@en . + "Velikost distribuce v bajtech."@cs . + "Το μέγεθος μιας διανομής σε bytes."@el . + "الحجم بالبايتات "@ar . + "バイトによる配信のサイズ。"@ja . + "Bytestørrelsen kan approximeres hvis den præcise størrelse ikke er kendt. Værdien af dcat:byteSize bør angives som xsd:decimal."@da . + "El tamaño en bytes puede ser aproximado cuando se desconoce el tamaño exacto. El valor literal de dcat:byteSize debe tener tipo 'xsd:decimal'."@es . + "La dimensione in byte può essere approssimata quando non si conosce la dimensione precisa. Il valore di dcat:byteSize dovrebbe essere espresso come un xsd:decimal."@it . + "La taille en octects peut être approximative lorsque l'on ignore la taille réelle. La valeur littérale de dcat:byteSize doit être de type xsd:decimal."@fr . + "The size in bytes can be approximated when the precise size is not known. The literal value of dcat:byteSize should by typed as xsd:decimal."@en . + "Velikost v bajtech může být přibližná, pokud její přesná hodnota není známa. Literál s hodnotou dcat:byteSize by měl mít datový typ xsd:decimal."@cs . + "Το μέγεθος σε bytes μπορεί να προσεγγιστεί όταν η ακριβής τιμή δεν είναι γνωστή. Η τιμή της dcat:byteSize θα πρέπει να δίνεται με τύπο δεδομένων xsd:decimal."@el . + "الحجم يمكن أن يكون تقريبي إذا كان الحجم الدقيق غير معروف"@ar . + "正確なサイズが不明である場合、サイズは、バイトによる近似値を示すことができます。"@ja . + . + "A catalog whose contents are of interest in the context of this catalog."@en . + "Et katalog hvis indhold er relevant i forhold til det aktuelle katalog."@da . + "Katalog, jehož obsah je v kontextu tohoto katalogu zajímavý."@cs . + "Un catalogo i cui contenuti sono di interesse nel contesto di questo catalogo."@it . + "Un catálogo cuyo contenido es de interés en el contexto del catálogo que está siendo descripto."@es . + . + "catalog"@en . + "catalogo"@it . + "catálogo"@es . + "katalog"@cs . + "katalog"@da . + . + . + . + "har delkatalog"@da . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "A catalog whose contents are of interest in the context of this catalog."@en . + "Et katalog hvis indhold er relevant i forhold til det aktuelle katalog."@da . + "Katalog, jehož obsah je v kontextu tohoto katalogu zajímavý."@cs . + "Un catalogo i cui contenuti sono di interesse nel contesto di questo catalogo."@it . + "Un catálogo cuyo contenido es de interés en el contexto del catálogo que está siendo descripto."@es . + . + . + . + "centroid"@cs . + "centroid"@en . + "centroide"@es . + "centroide"@it . + "geometrisk tyngdepunkt"@da . + . + "centroide"@da . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab tilføjet i DCAT 2.0."@da . + "Det geometrisk tyngdepunkt (centroid) for en ressource."@da . + "El centro geográfico (centroide) de un recurso."@es . + "Geografický střed (centroid) zdroje."@cs . + "Il centro geografico (centroide) di una risorsa."@it . + "The geographic center (centroid) of a resource."@en . + "El rango de esta propiedad es intencionalmente genérico con el objetivo de permitir distintas codificaciones geométricas. Por ejemplo, la geometría puede codificarse como WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@es . + "Il range di questa proprietà è volutamente generica, con lo scopo di consentire diverse codifiche geometriche. Ad esempio, la geometria potrebbe essere codificata con WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@it . + "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé kódování geometrií. Geometrie by kupříkladu mohla být kódována jako WKT (geosparql:wktLiteral [GeoSPARQL]) či [GML] (geosparql:asGML [GeoSPARQL])."@cs . + "Rækkevidden for denne egenskab er bevidst generisk definere med det formål at tillade forskellige geokodninger. Geometrien kan eksempelvis repræsenteres som WKT (geosparql:asWKT [GeoSPARQL]) eller [GML] (geosparql:asGML [GeoSPARQL])."@da . + "The range of this property is intentionally generic, with the purpose of allowing different geometry encodings. E.g., the geometry could be encoded with as WKT (geosparql:wktLiteral [GeoSPARQL]) or [GML] (geosparql:asGML [GeoSPARQL])."@en . + . + . + "El formato de la distribución en el que los datos están en forma comprimida, e.g. para reducir el tamaño del archivo a bajar."@es . + "Formát komprese souboru, ve kterém jsou data poskytována v komprimované podobě, např. ke snížení velikosti souboru ke stažení."@cs . + "Il formato di compressione della distribuzione nel quale i dati sono in forma compressa, ad es. per ridurre le dimensioni del file da scaricare."@it . + "Kompressionsformatet for distributionen som indeholder data i et komprimeret format, fx for at reducere størrelsen af downloadfilen."@da . + "The compression format of the distribution in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file."@en . + . + . + "compression format"@en . + "formato de compresión"@es . + "formato di compressione"@it . + "formát komprese"@cs . + "kompressionsformat"@da . + . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab tilføjet i DCAT 2.0."@da . + "El formato de la distribución en el que los datos están en forma comprimida, e.g. para reducir el tamaño del archivo a bajar."@es . + "Formát komprese souboru, ve kterém jsou data poskytována v komprimované podobě, např. ke snížení velikosti souboru ke stažení."@cs . + "Il formato di compressione della distribuzione nel quale i dati sono in forma compressa, ad es. per ridurre le dimensioni del file da scaricare."@it . + "Kompressionsformatet for distributionen som indeholder data i et komprimeret format, fx for at reducere størrelsen af downloadfilen."@da . + "The compression format of the distribution in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file."@en . + "Denne egenskab kan anvendes når filerne i en distribution er blevet komprimeret, fx i en ZIP-fil. Formatet BØR udtrykkes ved en medietype som defineret i 'IANA media types registry', hvis der optræder en relevant medietype dér: https://www.iana.org/assignments/media-types/."@da . + "Esta propiedad se debe usar cuando los archivos de la distribución están comprimidos, por ejemplo en un archivo ZIP. El formato DEBERÍA expresarse usando un 'media type', tales como los definidos en el registro IANA de 'media types' https://www.iana.org/assignments/media-types/, si está disponibles."@es . + "Questa proprietà deve essere utilizzata quando i file nella distribuzione sono compressi, ad es. in un file ZIP. Il formato DOVREBBE essere espresso usando un tipo di media come definito dal registro dei tipi di media IANA https://www.iana.org/assignments/media-types/, se disponibile."@it . + "Tato vlastnost se použije, když jsou soubory v distribuci komprimovány, např. v ZIP souboru. Formát BY MĚL být vyjádřen pomocí typu média definovaného v registru IANA https://www.iana.org/assignments/media-types/, pokud existuje."@cs . + "This property is to be used when the files in the distribution are compressed, e.g. in a ZIP file. The format SHOULD be expressed using a media type as defined by IANA media types registry https://www.iana.org/assignments/media-types/, if available."@en . + . + . + "Información relevante de contacto para el recurso catalogado. Se recomienda el uso de vCard."@es . + "Informazioni di contatto rilevanti per la risorsa catalogata. Si raccomanda l'uso di vCard."@it . + "Relevant contact information for the catalogued resource. Use of vCard is recommended."@en . + "Relevante kontaktoplysninger for den katalogiserede ressource. Anvendelse af vCard anbefales."@da . + "Relevantní kontaktní informace pro katalogizovaný zdroj. Doporučuje se použít slovník VCard."@cs . + "Relie un jeu de données à une information de contact utile en utilisant VCard."@fr . + "Συνδέει ένα σύνολο δεδομένων με ένα σχετικό σημείο επικοινωνίας, μέσω VCard."@el . + "تربط قائمة البيانات بعنوان اتصال موصف باستخدام VCard"@ar . + "データセットを、VCardを用いて提供されている適切な連絡先情報にリンクします。"@ja . + . + "Punto de contacto"@es . + "contact point"@en . + "kontaktní bod"@cs . + "kontaktpunkt"@da . + "point de contact"@fr . + "punto di contatto"@it . + "σημείο επικοινωνίας"@el . + "عنوان اتصال"@ar . + "窓口"@ja . + . + "Información relevante de contacto para el recurso catalogado. Se recomienda el uso de vCard."@es . + "Informazioni di contatto rilevanti per la risorsa catalogata. Si raccomanda l'uso di vCard."@it . + "Relevant contact information for the catalogued resource. Use of vCard is recommended."@en . + "Relevante kontaktoplysninger for den katalogiserede ressource. Anvendelse af vCard anbefales."@da . + "Relevantní kontaktní informace pro katalogizovaný zdroj. Doporučuje se použít slovník VCard."@cs . + "Relie un jeu de données à une information de contact utile en utilisant VCard."@fr . + "Συνδέει ένα σύνολο δεδομένων με ένα σχετικό σημείο επικοινωνίας, μέσω VCard."@el . + "تربط قائمة البيانات بعنوان اتصال موصف باستخدام VCard"@ar . + "データセットを、VCardを用いて提供されている適切な連絡先情報にリンクします。"@ja . + "Status: English Definition text modified by DCAT revision team, Italian, Spanish and Czech translations provided, other translations pending."@en . + . + . + "A collection of data that is listed in the catalog."@en . + "En samling af data som er opført i kataloget."@da . + "Kolekce dat, která je katalogizována v katalogu."@cs . + "Relie un catalogue à un jeu de données faisant partie de ce catalogue."@fr . + "Un conjunto de datos que se lista en el catálogo."@es . + "Una raccolta di dati che è elencata nel catalogo."@it . + "Συνδέει έναν κατάλογο με ένα σύνολο δεδομένων το οποίο ανήκει στον εν λόγω κατάλογο."@el . + "تربط الفهرس بقائمة بيانات ضمنه"@ar . + "カタログの一部であるデータセット。"@ja . + . + . + "conjunto de datos"@es . + "dataset"@en . + "dataset"@it . + "datasæt"@da . + "datová sada"@cs . + "jeu de données"@fr . + "σύνολο δεδομένων"@el . + "قائمة بيانات"@ar . + "データセット"@ja . + . + . + . + "datasamling"@da . + "har datasæt"@da . + "A collection of data that is listed in the catalog."@en . + "En samling af data som er opført i kataloget."@da . + "Kolekce dat, která je katalogizována v katalogu."@cs . + "Relie un catalogue à un jeu de données faisant partie de ce catalogue."@fr . + "Un conjunto de datos que se lista en el catálogo."@es . + "Una raccolta di dati che è elencata nel catalogo."@it . + "Συνδέει έναν κατάλογο με ένα σύνολο δεδομένων το οποίο ανήκει στον εν λόγω κατάλογο."@el . + "تربط الفهرس بقائمة بيانات ضمنه"@ar . + "カタログの一部であるデータセット。"@ja . + "Status: English Definition text modified by DCAT revision team, Italian, Spanish and Czech translation provided, other translations pending."@en . + . + . + "An available distribution of the dataset."@en . + "Connecte un jeu de données à des distributions disponibles."@fr . + "Dostupná distribuce datové sady."@cs . + "En tilgængelig repræsentation af datasættet."@da . + "Una distribución disponible del conjunto de datos."@es . + "Una distribuzione disponibile per il set di dati."@it . + "Συνδέει ένα σύνολο δεδομένων με μία από τις διαθέσιμες διανομές του."@el . + "تربط قائمة البيانات بطريقة أو بشكل يسمح الوصول الى البيانات"@ar . + "データセットを、その利用可能な配信に接続します。"@ja . + . + . + "distribuce"@cs . + "distribución"@es . + "distribution"@da . + "distribution"@en . + "distribution"@fr . + "distribuzione"@it . + "διανομή"@el . + "توزيع"@ar . + "データセット配信"@ja . + . + . + "har distribution"@da . + "An available distribution of the dataset."@en . + "Connecte un jeu de données à des distributions disponibles."@fr . + "Dostupná distribuce datové sady."@cs . + "En tilgængelig repræsentation af datasættet."@da . + "Una distribución disponible del conjunto de datos."@es . + "Una distribuzione disponibile per il set di dati."@it . + "Συνδέει ένα σύνολο δεδομένων με μία από τις διαθέσιμες διανομές του."@el . + "تربط قائمة البيانات بطريقة أو بشكل يسمح الوصول الى البيانات"@ar . + "データセットを、その利用可能な配信に接続します。"@ja . + "Status: English Definition text modified by DCAT revision team, translations pending (except for Italian, Spanish and Czech)."@en . + . + . + "Ceci est un lien direct à un fichier téléchargeable en un format donnée. Exple fichier CSV ou RDF. Le format est décrit par les propriétés de distribution dct:format et/ou dcat:mediaType."@fr . + "La URL de un archivo descargable en el formato dato. Por ejemplo, archivo CSV o archivo RDF. El formato se describe con las propiedades de la distribución dct:format y/o dcat:mediaType."@es . + "Questo è un link diretto al file scaricabile in un dato formato. E.g. un file CSV o un file RDF. Il formato è descritto dal dct:format e/o dal dcat:mediaType della distribuzione."@it . + "The URL of the downloadable file in a given format. E.g. CSV file or RDF file. The format is indicated by the distribution's dct:format and/or dcat:mediaType."@en . + "URL souboru ke stažení v daném formátu, například CSV nebo RDF soubor. Formát je popsán vlastností distribuce dct:format a/nebo dcat:mediaType."@cs . + "URL til fil der kan downloades i et bestemt format. Fx en CSV-fil eller en RDF-fil. Formatet for distributionen angives ved hjælp af egenskaberne dct:format og/eller dcat:mediaType."@da . + "dcat:downloadURLはdcat:accessURLの特定の形式です。しかし、DCATプロファイルが非ダウンロード・ロケーションに対してのみaccessURLを用いる場合には、より強い分離を課すことを望む可能性があるため、この含意を強化しないように、DCATは、dcat:downloadURLをdcat:accessURLのサブプロパティーであると定義しません。"@ja . + "Είναι ένας σύνδεσμος άμεσης μεταφόρτωσης ενός αρχείου σε μια δεδομένη μορφή. Π.χ. ένα αρχείο CSV ή RDF. Η μορφη αρχείου περιγράφεται από τις ιδιότητες dct:format ή/και dcat:mediaType της διανομής."@el . + "رابط مباشر لملف يمكن تحميله. نوع الملف يتم توصيفه باستخدام الخاصية dct:format dcat:mediaType "@ar . + . + . + "URL de descarga"@es . + "URL de téléchargement"@fr . + "URL di scarico"@it . + "URL souboru ke stažení"@cs . + "URL μεταφόρτωσης"@el . + "download URL"@en . + "downloadURL"@da . + "رابط تحميل"@ar . + "ダウンロードURL"@ja . + . + "Ceci est un lien direct à un fichier téléchargeable en un format donnée. Exple fichier CSV ou RDF. Le format est décrit par les propriétés de distribution dct:format et/ou dcat:mediaType."@fr . + "La URL de un archivo descargable en el formato dato. Por ejemplo, archivo CSV o archivo RDF. El formato se describe con las propiedades de la distribución dct:format y/o dcat:mediaType."@es . + "Questo è un link diretto al file scaricabile in un dato formato. E.g. un file CSV o un file RDF. Il formato è descritto dal dct:format e/o dal dcat:mediaType della distribuzione."@it . + "The URL of the downloadable file in a given format. E.g. CSV file or RDF file. The format is indicated by the distribution's dct:format and/or dcat:mediaType."@en . + "URL souboru ke stažení v daném formátu, například CSV nebo RDF soubor. Formát je popsán vlastností distribuce dct:format a/nebo dcat:mediaType."@cs . + "URL til fil der kan downloades i et bestemt format. Fx en CSV-fil eller en RDF-fil. Formatet for distributionen angives ved hjælp af egenskaberne dct:format og/eller dcat:mediaType."@da . + "dcat:downloadURLはdcat:accessURLの特定の形式です。しかし、DCATプロファイルが非ダウンロード・ロケーションに対してのみaccessURLを用いる場合には、より強い分離を課すことを望む可能性があるため、この含意を強化しないように、DCATは、dcat:downloadURLをdcat:accessURLのサブプロパティーであると定義しません。"@ja . + "Είναι ένας σύνδεσμος άμεσης μεταφόρτωσης ενός αρχείου σε μια δεδομένη μορφή. Π.χ. ένα αρχείο CSV ή RDF. Η μορφη αρχείου περιγράφεται από τις ιδιότητες dct:format ή/και dcat:mediaType της διανομής."@el . + "رابط مباشر لملف يمكن تحميله. نوع الملف يتم توصيفه باستخدام الخاصية dct:format dcat:mediaType "@ar . + "Status: English Definition text modified by DCAT revision team, Italian, Spanish and Czech translation updated, other translations pending."@en . + "rdfs:label, rdfs:comment and/or skos:scopeNote have been modified. Non-english versions must be updated."@en . + "El valor es una URL."@es . + "La valeur est une URL."@fr . + "dcat:downloadURL BY MĚLA být použita pro adresu, ze které je distribuce přímo přístupná, typicky skrze požadavek HTTP Get."@cs . + "dcat:downloadURL BØR anvendes til angivelse af den adresse hvor distributionen er tilgængelig direkte, typisk gennem et HTTP Get request."@da . + "dcat:downloadURL DOVREBBE essere utilizzato per l'indirizzo a cui questa distribuzione è disponibile direttamente, in genere attraverso una richiesta Get HTTP."@it . + "dcat:downloadURL SHOULD be used for the address at which this distribution is available directly, typically through a HTTP Get request."@en . + "Η τιμή είναι ένα URL."@el . + . + . + . + "data di fine"@it . + "datum konce"@cs . + "end date"@en . + "fecha final"@es . + "slutdato"@da . + . + "sluttidspunkt"@da . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab i DCAT 2.0."@da . + "El fin del período."@es . + "Konec doby trvání."@cs . + "La fine del periodo."@it . + "Slutningen på perioden."@da . + "The end of the period."@en . + "El rango de esta propiedad es intencionalmente genérico con el propósito de permitir distintos niveles de precisión temporal para especificar el fin del período. Por ejemplo, puede expresarse como una fecha (xsd:date), una fecha y un tiempo (xsd:dateTime), o un año (xsd:gYear)."@es . + "La range di questa proprietà è volutamente generico, con lo scopo di consentire diversi livelli di precisione temporale per specificare la fine di un periodo. Ad esempio, può essere espresso con una data (xsd:date), una data e un'ora (xsd:dateTime), o un anno (xsd:gYear)."@it . + "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé úrovně časového rozlišení pro specifikaci konce doby trvání. Ten může být kupříkladu vyjádřen datumem (xsd:date), datumem a časem (xsd:dateTime) či rokem (xsd:gYear)."@cs . + "Rækkeviden for denne egenskab er bevidst generisk defineret med det formål at tillade forskellige niveauer af tidslig præcision ifm. angivelse af slutdatoen for en periode. Den kan eksempelvis udtrykkes som en dato (xsd:date), en dato og et tidspunkt (xsd:dateTime), eller et årstal (xsd:gYear)."@da . + "The range of this property is intentionally generic, with the purpose of allowing different level of temporal precision for specifying the end of a period. E.g., it can be expressed with a date (xsd:date), a date and time (xsd:dateTime), or a year (xsd:gYear)."@en . + . + "A description of the service end-point, including its operations, parameters etc."@en . + "En beskrivelse af det pågældende tjenesteendpoint, inklusiv dets operationer, parametre etc."@da . + "Popis přístupového bodu služby včetně operací, parametrů apod."@cs . + "Una descripción del end-point del servicio, incluyendo sus operaciones, parámetros, etc."@es . + "Una descrizione dell'endpoint del servizio, incluse le sue operazioni, parametri, ecc."@it . + . + "descripción del end-point del servicio"@es . + "description of service end-point"@en . + "descrizione dell'endpoint del servizio"@it . + "endpointbeskrivelse"@da . + "popis přístupového bodu služby"@cs . + "New property in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@en . + "Nuova proprietà in DCAT 2.0."@it . + "Ny egenskab i DCAT 2.0."@da . + "A description of the service end-point, including its operations, parameters etc."@en . + "En beskrivelse af det pågældende tjenesteendpoint, inklusiv dets operationer, parametre etc."@da . + "Popis přístupového bodu služby včetně operací, parametrů apod."@cs . + "Una descripción del end-point del servicio, incluyendo sus operaciones, parámetros, etc.."@es . + "Una descrizione dell'endpoint del servizio, incluse le sue operazioni, parametri, ecc."@it . + "An endpoint description may be expressed in a machine-readable form, such as an OpenAPI (Swagger) description, an OGC GetCapabilities response, a SPARQL Service Description, an OpenSearch or WSDL document, a Hydra API description, else in text or some other informal mode if a formal representation is not possible."@en . + "En beskrivelse af et endpoint kan udtrykkes i et maskinlæsbart format, såsom OpenAPI (Swagger)-beskrivelser, et OGC GetCapabilities svar, en SPARQL tjenestebeskrivelse, en OpenSearch- eller et WSDL-dokument, en Hydra-API-beskrivelse, eller i tekstformat eller i et andet uformelt format, hvis en formel repræsentation ikke er mulig."@da . + "Endpointbeskrivelsen giver specifikke oplysninger om den konkrete endpointinstans, mens dct:conformsTo anvendes til at indikere den overordnede standard eller specifikation som endpointet er i overensstemmelse med."@da . + "La descripción del endpoint brinda detalles específicos de la instancia del endpoint, mientras que dct:conformsTo se usa para indicar el estándar general o especificación que implementa el endpoint."@es . + "La descrizione dell'endpoint fornisce dettagli specifici dell'istanza dell'endpoint reale, mentre dct:conformsTo viene utilizzato per indicare lo standard o le specifiche implementate dall'endpoint."@it . + "Popis přístupového bodu dává specifické detaily jeho konkrétní instance, zatímco dct:conformsTo indikuje obecný standard či specifikaci kterou přístupový bod implementuje."@cs . + "Popis přístupového bodu může být vyjádřen ve strojově čitelné formě, například jako popis OpenAPI (Swagger), odpověď služby OGC getCapabilities, pomocí slovníku SPARQL Service Description, jako OpenSearch či WSDL document, jako popis API dle slovníku Hydra, a nebo textově nebo jiným neformálním způsobem, pokud není možno použít formální reprezentaci."@cs . + "The endpoint description gives specific details of the actual endpoint instance, while dct:conformsTo is used to indicate the general standard or specification that the endpoint implements."@en . + "Una descripción del endpoint del servicio puede expresarse en un formato que la máquina puede interpretar, tal como una descripción basada en OpenAPI (Swagger), una respuesta OGC GetCapabilities, una descripción de un servicio SPARQL, un documento OpenSearch o WSDL, una descripción con la Hydra API, o en texto u otro modo informal si la representación formal no es posible."@es . + "Una descrizione dell'endpoint può essere espressa in un formato leggibile dalla macchina, come una descrizione OpenAPI (Swagger), una risposta GetCapabilities OGC, una descrizione del servizio SPARQL, un documento OpenSearch o WSDL, una descrizione API Hydra, o con del testo o qualche altra modalità informale se una rappresentazione formale non è possibile."@it . + . + "Kořenové umístění nebo hlavní přístupový bod služby (IRI přístupné přes Web)."@cs . + "La locazione principale o l'endpoint primario del servizio (un IRI risolvibile via web)."@it . + "La posición raíz o end-point principal del servicio (una IRI web)."@es . + "Rodplaceringen eller det primære endpoint for en tjeneste (en web-resolverbar IRI)."@da . + "The root location or primary endpoint of the service (a web-resolvable IRI)."@en . + . + "end-point del servicio"@es . + "end-point del servizio"@it . + "přístupový bod služby"@cs . + "service end-point"@en . + "tjenesteendpoint"@da . + . + "New property in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà in DCAT 2.0."@it . + "Kořenové umístění nebo hlavní přístupový bod služby (IRI přístupné přes Web)."@cs . + "La locazione principale o l'endpoint primario del servizio (un IRI risolvibile via web)."@it . + "La posición raíz o end-point principal del servicio (una IRI web)."@es . + "Rodplaceringen eller det primære endpoint for en tjeneste (en web-resolverbar IRI)."@da . + "The root location or primary endpoint of the service (a web-resolvable IRI)."@en . + . + "Den funktion en entitet eller aktør har i forhold til en anden ressource."@da . + "Funkce entity či agenta ve vztahu k jiné entitě či zdroji."@cs . + "La función de una entidad o agente con respecto a otra entidad o recurso."@es . + "La funzione di un'entità o un agente rispetto ad un'altra entità o risorsa."@it . + "The function of an entity or agent with respect to another entity or resource."@en . + _:c14n23 . + "haRuolo"@it . + "hadRole"@en . + "havde rolle"@da . + "sehraná role"@cs . + "tiene rol"@it . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Den funktion en entitet eller aktør har i forhold til en anden ressource."@da . + "Funkce entity či agenta ve vztahu k jiné entitě či zdroji."@cs . + "La función de una entidad o agente con respecto a otra entidad o recurso."@es . + "La funzione di un'entità o un agente rispetto ad un'altra entità o risorsa."@it . + "The function of an entity or agent with respect to another entity or resource."@en . + "Agregada en DCAT para complementar prov:hadRole (cuyo uso está limitado a roles en el contexto de una actividad, con dominio prov:Association."@es . + "Introdotta in DCAT per completare prov:hadRole (il cui uso è limitato ai ruoli nel contesto di un'attività, con il dominio di prov:Association."@it . + "Introduced into DCAT to complement prov:hadRole (whose use is limited to roles in the context of an activity, with the domain of prov:Association."@en . + "Introduceret i DCAT for at supplere prov:hadRole (hvis anvendelse er begrænset til roller i forbindelse med en aktivitet med domænet prov:Association)."@da . + "Přidáno do DCAT pro doplnění vlastnosti prov:hadRole (jejíž užití je omezeno na role v kontextu aktivity, s definičním oborem prov:Association)."@cs . + "Kan vendes ved kvalificerede krediteringer til at angive en aktørs rolle i forhold en entitet. Det anbefales at værdierne styres som et kontrolleret udfaldsrum med aktørroller, såsom http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@da . + "May be used in a qualified-attribution to specify the role of an Agent with respect to an Entity. It is recommended that the value be taken from a controlled vocabulary of agent roles, such as http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@en . + "May be used in a qualified-relation to specify the role of an Entity with respect to another Entity. It is recommended that the value be taken from a controlled vocabulary of entity roles such as: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; DataCite metadata schema; MARC relators https://id.loc.gov/vocabulary/relators."@en . + "Může být použito v kvalifikovaném přiřazení pro specifikaci role Agenta ve vztahu k Entitě. Je doporučeno hodnotu vybrat z řízeného slovníku rolí agentů, jako například http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@cs . + "Může být použito v kvalifikovaném vztahu pro specifikaci role Entity ve vztahu k jiné Entitě. Je doporučeno použít hodnotu z řízeného slovníku rolí entit, jako například ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, MARC relators https://id.loc.gov/vocabulary/relators."@cs . + "Puede usarse en una atribución cualificada para especificar el rol de un Agente con respecto a una Entidad. Se recomienda que el valor sea de un vocabulario controlado de roles de agentes, como por ejemplo http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@es . + "Puede usarse en una atribución cualificada para especificar el rol de una Entidad con respecto a otra Entidad. Se recomienda que su valor se tome de un vocabulario controlado de roles de entidades como por ejemplo: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; esquema de metadatos de DataCite; MARC relators https://id.loc.gov/vocabulary/relators."@es . + "Può essere utilizzata in una relazione qualificata per specificare il ruolo di un'entità rispetto a un'altra entità. Si raccomanda che il valore sia preso da un vocabolario controllato di ruoli di entità come ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, o MARC relators https://id.loc.gov/vocabulary/relators."@it . + "Può essere utilizzato in un'attribuzione qualificata per specificare il ruolo di un agente rispetto a un'entità. Si raccomanda che il valore sia preso da un vocabolario controllato di ruoli di agente, come ad esempio http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@it . + . + . + "A keyword or tag describing a resource."@en . + "Et nøgleord eller tag til beskrivelse af en ressource."@da . + "Klíčové slovo nebo značka popisující zdroj."@cs . + "Un mot-clé ou étiquette décrivant une ressource."@fr . + "Una palabra clave o etiqueta que describe un recurso."@es . + "Una parola chiave o un'etichetta per descrivere la risorsa."@it . + "Μία λέξη-κλειδί ή μία ετικέτα που περιγράφει το σύνολο δεδομένων."@el . + "كلمة مفتاحيه توصف قائمة البيانات"@ar . + "データセットを記述しているキーワードまたはタグ。"@ja . + . + "keyword"@en . + "klíčové slovo"@cs . + "mot-clés "@fr . + "nøgleord"@da . + "palabra clave"@es . + "parola chiave"@it . + "λέξη-κλειδί"@el . + "كلمة مفتاحية "@ar . + "キーワード/タグ"@ja . + . + . + "A keyword or tag describing a resource."@en . + "Et nøgleord eller tag til beskrivelse af en ressource."@da . + "Klíčové slovo nebo značka popisující zdroj."@cs . + "Un mot-clé ou étiquette décrivant une ressource."@fr . + "Una palabra clave o etiqueta que describe un recurso."@es . + "Una parola chiave o un'etichetta per descrivere la risorsa."@it . + "Μία λέξη-κλειδί ή μία ετικέτα που περιγράφει το σύνολο δεδομένων."@el . + "كلمة مفتاحيه توصف قائمة البيانات"@ar . + "データセットを記述しているキーワードまたはタグ。"@ja . + . + . + "A Web page that can be navigated to in a Web browser to gain access to the catalog, a dataset, its distributions and/or additional information."@en . + "En webside som der kan navigeres til i en webbrowser for at få adgang til kataloget, et datasæt, dets distributioner og/eller yderligere information."@da . + "Una pagina web che può essere navigata per ottenere l'accesso al catalogo, ad un dataset, alle distribuzioni del dataset e/o ad informazioni addizionali."@it . + "Una página web que puede ser visitada en un explorador Web para tener acceso el catálogo, un conjunto de datos, sus distribuciones y/o información adicional."@es . + "Une page Web accessible par un navigateur Web donnant accès au catalogue, un jeu de données, ses distributions et/ou des informations additionnelles."@fr . + "Webová stránka, na kterou lze pro získání přístupu ke katalogu, datové sadě, jejím distribucím a/nebo dalším informacím přistoupit webovým prohlížečem."@cs . + "Μία ιστοσελίδα πλοηγίσιμη μέσω ενός φυλλομετρητή (Web browser) που δίνει πρόσβαση στο σύνολο δεδομένων, τις διανομές αυτού ή/και επιπρόσθετες πληροφορίες."@el . + "صفحة وب يمكن من خلالها الوصول الى قائمة البيانات أو إلى معلومات إضافية متعلقة بها "@ar . + "データセット、その配信および(または)追加情報にアクセスするためにウエブ・ブラウザでナビゲートできるウェブページ。"@ja . + . + "destinationsside"@da . + "landing page"@en . + "page d'atterrissage"@fr . + "pagina di destinazione"@it . + "página de destino"@es . + "vstupní stránka"@cs . + "ιστοσελίδα αρχικής πρόσβασης"@el . + "صفحة وصول"@ar . + "ランディング・ページ"@ja . + . + . + "A Web page that can be navigated to in a Web browser to gain access to the catalog, a dataset, its distributions and/or additional information."@en . + "En webside som en webbrowser kan navigeres til for at få adgang til kataloget, et datasæt, dets distritbutioner og/eller yderligere information."@da . + "Una pagina web che può essere navigata per ottenere l'accesso al catalogo, ad un dataset, alle distribuzioni del dataset e/o ad informazioni addizionali."@it . + "Una página web que puede ser visitada en un explorador Web para tener acceso el catálogo, un conjunto de datos, sus distribuciones y/o información adicional."@es . + "Une page Web accessible par un navigateur Web donnant accès au catalogue, un jeu de données, ses distributions et/ou des informations additionnelles."@fr . + "Webová stránka, na kterou lze pro získání přístupu ke katalogu, datové sadě, jejím distribucím a/nebo dalším informacím přistoupit webovým prohlížečem."@cs . + "Μία ιστοσελίδα πλοηγίσιμη μέσω ενός φυλλομετρητή (Web browser) που δίνει πρόσβαση στο σύνολο δεδομένων, τις διανομές αυτού ή/και επιπρόσθετες πληροφορίες."@el . + "صفحة وب يمكن من خلالها الوصول الى قائمة البيانات أو إلى معلومات إضافية متعلقة بها "@ar . + "データセット、その配信および(または)追加情報にアクセスするためにウエブ・ブラウザでナビゲートできるウェブページ。"@ja . + "Hvis en eller flere distributioner kun er tilgængelige via en destinationsside (dvs. en URL til direkte download er ikke kendt), så bør destinationssidelinket gentages som adgangsadresse for en distribution."@da . + "If the distribution(s) are accessible only through a landing page (i.e. direct download URLs are not known), then the landing page link should be duplicated as accessURL on a distribution."@en . + "Pokud je distribuce dostupná pouze přes vstupní stránku, t.j. přímý URL odkaz ke stažení není znám, URL přístupové stránky by mělo být duplikováno ve vlastnosti distribuce accessURL."@cs . + "Se la distribuzione è accessibile solo attraverso una pagina di destinazione (cioè, un URL di download diretto non è noto), il link alla pagina di destinazione deve essere duplicato come accessURL sulla distribuzione."@it . + "Si la distribución es accesible solamente través de una página de aterrizaje (i.e., no se conoce una URL de descarga directa), entonces el enlance a la página de aterrizaje debe ser duplicado como accessURL sobre la distribución."@es . + "Si la distribution est seulement accessible à travers une page d'atterrissage (exple. pas de connaissance d'URLS de téléchargement direct ), alors le lien de la page d'atterrissage doit être dupliqué comme accessURL sur la distribution."@fr . + "Αν η/οι διανομή/ές είναι προσβάσιμη/ες μόνο μέσω μίας ιστοσελίδας αρχικής πρόσβασης (δηλαδή αν δεν υπάρχουν γνωστές διευθύνσεις άμεσης μεταφόρτωσης), τότε ο σύνδεσμος της ιστοσελίδας αρχικής πρόσβασης πρέπει να αναπαραχθεί ως accessURL σε μία διανομή."@el . + "ランディング・ページを通じてしか配信にアクセスできない場合(つまり、直接的なダウンロードURLが不明)には、配信におけるaccessURLとしてランディング・ページのリンクをコピーすべきです(SHOULD)。"@ja . + . + . + "Cette propriété doit être utilisée quand c'est définit le type de média de la distribution en IANA, sinon dct:format DOIT être utilisé avec différentes valeurs."@fr . + "Esta propiedad debe ser usada cuando está definido el tipo de media de la distribución en IANA, de otra manera dct:format puede ser utilizado con diferentes valores"@es . + "Il tipo di media della distribuzione come definito da IANA"@it . + "Medietypen for distributionen som den er defineret af IANA."@da . + "The media type of the distribution as defined by IANA"@en . + "Typ média distribuce definovaný v IANA."@cs . + "Η ιδιότητα αυτή ΘΑ ΠΡΕΠΕΙ να χρησιμοποιείται όταν ο τύπος μέσου μίας διανομής είναι ορισμένος στο IANA, αλλιώς η ιδιότητα dct:format ΔΥΝΑΤΑΙ να χρησιμοποιηθεί με διαφορετικές τιμές."@el . + "يجب استخدام هذه الخاصية إذا كان نوع الملف معرف ضمن IANA"@ar . + "このプロパティーは、配信のメディア・タイプがIANAで定義されているときに使用すべきで(SHOULD)、そうでない場合には、dct:formatを様々な値と共に使用できます(MAY)。"@ja . + . + . + "media type"@en . + "medietype"@da . + "tipo de media"@es . + "tipo di media"@it . + "typ média"@cs . + "type de média"@fr . + "τύπος μέσου"@el . + "نوع الميديا"@ar . + "メディア・タイプ"@ja . + . + . + "Il range di dcat:mediaType è stato ristretto come parte della revisione di DCAT."@it . + "Obor hodnot dcat:mediaType byl zúžen v této revizi DCAT."@cs . + "The range of dcat:mediaType has been tightened as part of the revision of DCAT."@en . + "Cette propriété doit être utilisée quand c'est définit le type de média de la distribution en IANA, sinon dct:format DOIT être utilisé avec différentes valeurs."@fr . + "Esta propiedad debe ser usada cuando está definido el tipo de media de la distribución en IANA, de otra manera dct:format puede ser utilizado con diferentes valores."@es . + "Il tipo di media della distribuzione come definito da IANA."@it . + "Medietypen for distributionen som den er defineret af IANA."@da . + "The media type of the distribution as defined by IANA."@en . + "Typ média distribuce definovaný v IANA."@cs . + "Η ιδιότητα αυτή ΘΑ ΠΡΕΠΕΙ να χρησιμοποιείται όταν ο τύπος μέσου μίας διανομής είναι ορισμένος στο IANA, αλλιώς η ιδιότητα dct:format ΔΥΝΑΤΑΙ να χρησιμοποιηθεί με διαφορετικές τιμές."@el . + "يجب استخدام هذه الخاصية إذا كان نوع الملف معرف ضمن IANA"@ar . + "このプロパティーは、配信のメディア・タイプがIANAで定義されているときに使用すべきで(SHOULD)、そうでない場合には、dct:formatを様々な値と共に使用できます(MAY)。"@ja . + "Status: English Definition text modified by DCAT revision team, Italian and Czech translation provided, other translations pending. Note some inconsistency on def vs. usage."@en . + "Denne egenskab BØR anvendes hvis distributionens medietype optræder i 'IANA media types registry' https://www.iana.org/assignments/media-types/, ellers KAN egenskaben dct:format anvendes med et andet udfaldsrum."@da . + "Esta propiedad DEBERÍA usarse cuando el 'media type' de la distribución está definido en el registro IANA de 'media types' https://www.iana.org/assignments/media-types/, de lo contrario, dct:format PUEDE usarse con distintos valores."@es . + "Questa proprietà DEVE essere usata quando il tipo di media della distribuzione è definito nel registro dei tipi di media IANA https://www.iana.org/assignments/media-types/, altrimenti dct:format PUO 'essere usato con differenti valori."@it . + "Tato vlastnost BY MĚLA být použita, je-li typ média distribuce definován v registru IANA https://www.iana.org/assignments/media-types/. V ostatních případech MŮŽE být použita vlastnost dct:format s jinými hodnotami."@cs . + "This property SHOULD be used when the media type of the distribution is defined in the IANA media types registry https://www.iana.org/assignments/media-types/, otherwise dct:format MAY be used with different values."@en . + . + . + "Balíčkový formát souboru, ve kterém je jeden či více souborů seskupeno dohromady, např. aby bylo možné stáhnout sadu souvisejících souborů naráz."@cs . + "El formato del archivo en que se agrupan uno o más archivos de datos, e.g. para permitir que un conjunto de archivos relacionados se bajen juntos."@es . + "Format til pakning af data med henblik på distribution af en eller flere relaterede datafiler der samles til en enhed med henblik på samlet distribution. "@da . + "Il formato di impacchettamento della distribuzione in cui uno o più file di dati sono raggruppati insieme, ad es. per abilitare un insieme di file correlati da scaricare insieme."@it . + "The package format of the distribution in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together."@en . + . + . + "formato de empaquetado"@es . + "formato di impacchettamento"@it . + "formát balíčku"@cs . + "packaging format"@en . + "pakkeformat"@da . + . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab tilføjet i DCAT 2.0."@da . + "Balíčkový formát souboru, ve kterém je jeden či více souborů seskupeno dohromady, např. aby bylo možné stáhnout sadu souvisejících souborů naráz."@cs . + "El formato del archivo en que se agrupan uno o más archivos de datos, e.g. para permitir que un conjunto de archivos relacionados se bajen juntos."@es . + "Il formato di impacchettamento della distribuzione in cui uno o più file di dati sono raggruppati insieme, ad es. per abilitare un insieme di file correlati da scaricare insieme."@it . + "The package format of the distribution in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together."@en . + "Denne egenskab kan anvendes hvis filerne i en distribution er pakket, fx i en TAR-fil, en Frictionless Data Package eller en Bagit-fil. Formatet BØR udtrykkes ved en medietype som defineret i 'IANA media types registry', hvis der optræder en relevant medietype dér: https://www.iana.org/assignments/media-types/."@da . + "Esta propiedad se debe usar cuando los archivos de la distribución están empaquetados, por ejemplo en un archivo TAR, Frictionless Data Package o Bagit. El formato DEBERÍA expresarse usando un 'media type', tales como los definidos en el registro IANA de 'media types' https://www.iana.org/assignments/media-types/, si está disponibles."@es . + "Questa proprietà deve essere utilizzata quando i file nella distribuzione sono impacchettati, ad esempio in un file TAR, Frictionless Data Package o Bagit. Il formato DOVREBBE essere espresso utilizzando un tipo di supporto come definito dal registro dei tipi di media IANA https://www.iana.org/assignments/media-types/, se disponibili."@it . + "Tato vlastnost se použije, když jsou soubory v distribuci zabaleny, např. v souboru TAR, v balíčku Frictionless Data Package nebo v souboru Bagit. Formát BY MĚL být vyjádřen pomocí typu média definovaného v registru IANA https://www.iana.org/assignments/media-types/, pokud existuje."@cs . + "This property to be used when the files in the distribution are packaged, e.g. in a TAR file, a Frictionless Data Package or a Bagit file. The format SHOULD be expressed using a media type as defined by IANA media types registry https://www.iana.org/assignments/media-types/, if available."@en . + . + "Enlace a una descripción de la relación con otro recurso."@es . + "Link a una descrizione di una relazione con un'altra risorsa."@it . + "Link to a description of a relationship with another resource."@en . + "Odkaz na popis vztahu s jiným zdrojem."@cs . + "Reference til en beskrivelse af en relation til en anden ressource."@da . + . + "Kvalificeret relation"@da . + "kvalifikovaný vztah"@cs . + "qualified relation"@en . + "relación calificada"@es . + "relazione qualificata"@it . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab tilføjet i DCAT 2.0."@da . + "Propiedad nueva añadida en DCAT 2.0."@es . + "Enlace a una descripción de la relación con otro recurso."@es . + "Link a una descrizione di una relazione con un'altra risorsa."@it . + "Link to a description of a relationship with another resource."@en . + "Odkaz na popis vztahu s jiným zdrojem."@cs . + "Reference til en beskrivelse af en relation til en anden ressource."@da . + "Introdotta in DCAT per integrare le altre relazioni qualificate di PROV."@it . + "Introduced into DCAT to complement the other PROV qualified relations. "@en . + "Introduceret i DCAT med henblik på at supplere de øvrige kvalificerede relationer fra PROV. "@da . + "Přidáno do DCAT k doplnění jiných kvalifikovaných vztahů ze slovníku PROV."@cs . + "Se incluyó en DCAT para complementar las relaciones calificadas disponibles en PROV."@es . + "Anvendes til at referere til en anden ressource hvor relationens betydning er kendt men ikke matcher en af de standardiserede egenskaber fra Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) eller PROV-O-egenskaber (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@da . + "Použito pro odkazování na jiný zdroj, kde druh vztahu je znám, ale neodpovídá standardním vlastnostem ze slovníku Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) či slovníku PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@cs . + "Se usa para asociar con otro recurso para el cuál la naturaleza de la relación es conocida pero no es ninguna de las propiedades que provee el estándar Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@es . + "Used to link to another resource where the nature of the relationship is known but does not match one of the standard Dublin Core properties (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@en . + "Viene utilizzato per associarsi a un'altra risorsa nei casi per i quali la natura della relazione è nota ma non è alcuna delle proprietà fornite dallo standard Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat , dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:require, dct:isRequiredBy) o dalle proprietà fornite da PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom , prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@it . + . + . + "A record describing the registration of a single dataset or data service that is part of the catalog."@en . + "Describe la registración de un conjunto de datos o un servicio de datos en el catálogo."@es . + "En post der beskriver registreringen af et enkelt datasæt eller en datatjeneste som er opført i kataloget."@da . + "Propojuje katalog a jeho záznamy."@cs . + "Relie un catalogue à ses registres."@fr . + "Un record che descrive la registrazione di un singolo set di dati o di un servizio dati che fa parte del catalogo."@it . + "Záznam popisující registraci jedné datové sady či datové služby jakožto součásti katalogu."@cs . + "Συνδέει έναν κατάλογο με τις καταγραφές του."@el . + "تربط الفهرس بسجل ضمنه"@ar . + "カタログの一部であるカタログ・レコード。"@ja . + . + . + "post"@da . + "record"@en . + "record"@it . + "registre"@fr . + "registro"@es . + "záznam"@cs . + "καταγραφή"@el . + "سجل"@ar . + "カタログ・レコード"@ja . + . + "har post"@da . + "A record describing the registration of a single dataset or data service that is part of the catalog."@en . + "Describe la registración de un conjunto de datos o un servicio de datos en el catálogo."@es . + "En post der beskriver registreringen af et enkelt datasæt eller en datatjeneste som er opført i kataloget."@da . + "Propojuje katalog a jeho záznamy."@cs . + "Relie un catalogue à ses registres."@fr . + "Un record che descrive la registrazione di un singolo set di dati o di un servizio dati che fa parte del catalogo."@it . + "Záznam popisující registraci jedné datové sady či datové služby jakožto součásti katalogu."@cs . + "Συνδέει έναν κατάλογο με τις καταγραφές του."@el . + "تربط الفهرس بسجل ضمنه"@ar . + "カタログの一部であるカタログ・レコード。"@ja . + "Status: English, Italian, Spanish and Czech Definitions modified by DCAT revision team, other translations pending."@en . + . + "A collection of data that this DataService can distribute."@en . + "En samling af data som denne datatjeneste kan distribuere."@da . + "Kolekce dat, kterou je tato Datová služba schopna poskytnout."@cs . + "Una colección de datos que este Servicio de Datos puede distribuir."@es . + "Una raccolta di dati che questo DataService può distribuire."@it . + . + "datatjeneste for datasæt"@da . + "poskytuje datovou sadu"@cs . + "provee conjunto de datos"@es . + "serve set di dati"@it . + "serves dataset"@en . + . + "distribuerer"@da . + "ekspederer"@da . + "udstiller"@da . + "New property in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà in DCAT 2.0."@it . + "A collection of data that this DataService can distribute."@en . + "En samling af data som denne datatjeneste kan distribuere."@da . + "Kolekce dat, kterou je tato Datová služba schopna poskytnout."@cs . + "Una colección de datos que este Servicio de Datos puede distribuir."@es . + "Una raccolta di dati che questo DataService può distribuire."@it . + . + "A site or endpoint that is listed in the catalog."@en . + "Et websted eller et endpoint som er opført i kataloget."@da . + "Umístění či přístupový bod registrovaný v katalogu."@cs . + "Un sitio o 'endpoint' que está listado en el catálogo."@es . + "Un sito o endpoint elencato nel catalogo."@it . + . + "datatjeneste"@da . + "service"@en . + "servicio"@es . + "servizio"@it . + "služba"@cs . + . + . + . + "har datatjeneste"@da . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad añadida en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "A site or endpoint that is listed in the catalog."@en . + "Et websted eller et endpoint som er opført i kataloget."@da . + "Umístění či přístupový bod registrovaný v katalogu."@cs . + "Un sitio o 'endpoint' que está listado en el catálogo."@es . + "Un sito o endpoint elencato nel catalogo."@it . + . + "mindste geografiske afstand som kan erkendes i et datasæt, målt i meter."@da . + "minimum spatial separation resolvable in a dataset, measured in meters."@en-US . + "minimum spatial separation resolvable in a dataset, measured in metres."@en-GB . + "minimální prostorový rozestup rozeznatelný v datové sadě, měřeno v metrech."@cs . + "mínima separacíon espacial disponible en un conjunto de datos, medida en metros."@es . + "separazione spaziale minima risolvibile in un set di dati, misurata in metri."@it . + "geografisk opløsning (meter)"@da . + "prostorové rozlišení (metry)"@cs . + "resolución espacial (metros)"@es . + "risoluzione spaziale (metros)"@it . + "spatial resolution (meters)"@en-US . + "spatial resolution (metres)"@en-GB . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad añadida en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny genskab tilføjet i DCAT 2.0."@da . + "mindste geografiske afstand som kan resolveres i et datasæt, målt i meter."@da . + "minimum spatial separation resolvable in a dataset, measured in meters."@en-US . + "minimum spatial separation resolvable in a dataset, measured in metres."@en-GB . + "minimální prostorový rozestup rozeznatelný v datové sadě, měřeno v metrech."@cs . + "mínima separacíon espacial disponible en un conjunto de datos, medida en metros."@es . + "separazione spaziale minima risolvibile in un set di dati, misurata in metri."@it . + "Kan optræde i forbindelse med beskrivelse af datasættet eller datasætditributionen, så der er ikke angivet et domæne for egenskaben."@da . + "Might appear in the description of a Dataset or a Distribution, so no domain is specified."@en . + "Může se vyskytnout v popisu Datové sady nebo Distribuce, takže nebyl specifikován definiční obor."@cs . + "Alternative geografiske opløsninger kan leveres som forskellige datasætdistributioner."@da . + "Alternative spatial resolutions might be provided as different dataset distributions."@en . + "Distintas distribuciones de un conjunto de datos pueden tener resoluciones espaciales diferentes."@es . + "Hvis datasættet udgøres af et billede eller et grid, så bør dette svare til afstanden mellem elementerne. For andre typer af spatiale datasæt, vil denne egenskab typisk indikere den mindste afstand mellem elementerne i datasættet."@da . + "If the dataset is an image or grid this should correspond to the spacing of items. For other kinds of spatial dataset, this property will usually indicate the smallest distance between items in the dataset."@en . + "Pokud je datová sada obraz či mřížka, měla by tato vlastnost odpovídat rozestupu položek. Pro ostatní druhy prostorových datových sad bude tato vlastnost obvykle indikovat nejmenší vzdálenost mezi položkami této datové sady."@cs . + "Risoluzioni spaziali alternative possono essere fornite come diverse distribuzioni di set di dati."@it . + "Různá prostorová rozlišení mohou být poskytována jako různé distribuce datové sady."@cs . + "Se il set di dati è un'immagine o una griglia, questo dovrebbe corrispondere alla spaziatura degli elementi. Per altri tipi di set di dati spaziali, questa proprietà di solito indica la distanza minima tra gli elementi nel set di dati."@it . + "Si el conjunto de datos es una imágen o grilla, esta propiedad corresponde al espaciado de los elementos. Para otro tipo de conjunto de datos espaciales, esta propieda usualmente indica la menor distancia entre los elementos de dichos datos."@es . + . + . + . + "data di inizio"@it . + "datum začátku"@cs . + "start date"@en . + "startdato"@da . + . + "starttidspunkt"@da . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad agregada en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "Ny egenskab tilføjet i DCAT 2.0."@da . + "El comienzo del período"@es . + "L'inizio del periodo"@it . + "Start på perioden."@da . + "The start of the period"@en . + "Začátek doby trvání"@cs . + "El rango de esta propiedad es intencionalmente genérico con el propósito de permitir distintos niveles de precisión temporal para especificar el comienzo de un período. Por ejemplo, puede expresarse como una fecha (xsd:date), una fecha y un tiempo (xsd:dateTime), o un año (xsd:gYear)."@es . + "Il range di questa proprietà è volutamente generico, con lo scopo di consentire diversi livelli di precisione temporale per specificare l'inizio di un periodo. Ad esempio, può essere espresso con una data (xsd:date), una data e un'ora (xsd:dateTime), o un anno (xsd:gYear)."@it . + "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé úrovně časového rozlišení pro specifikaci začátku doby trvání. Ten může být kupříkladu vyjádřen datumem (xsd:date), datumem a časem (xsd:dateTime) či rokem (xsd:gYear)."@cs . + "Rækkeviden for denne egenskab er bevidst generisk defineret med det formål at tillade forskellige niveauer af tidslig præcision ifm. angivelse af startdatoen for en periode. Den kan eksempelvis udtrykkes som en dato (xsd:date), en dato og et tidspunkt (xsd:dateTime), eller et årstal (xsd:gYear)."@da . + "The range of this property is intentionally generic, with the purpose of allowing different level of temporal precision for specifying the start of a period. E.g., it can be expressed with a date (xsd:date), a date and time (xsd:dateTime), or a year (xsd:gYear)."@en . + . + "mindste tidsperiode der kan resolveres i datasættet."@da . + "minimum time period resolvable in a dataset."@en . + "minimální doba trvání rozlišitelná v datové sadě."@cs . + "periodo di tempo minimo risolvibile in un set di dati."@it . + "período de tiempo mínimo en el conjunto de datos."@es . + "resolución temporal"@es . + "risoluzione temporale"@it . + "temporal resolution"@en . + "tidslig opløsning"@da . + "časové rozlišení"@cs . + . + "New property added in DCAT 2.0."@en . + "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs . + "Nueva propiedad añadida en DCAT 2.0."@es . + "Nuova proprietà aggiunta in DCAT 2.0."@it . + "mindste tidsperiode der kan resolveres i datasættet."@da . + "minimum time period resolvable in a dataset."@en . + "minimální doba trvání rozlišitelná v datové sadě."@cs . + "periodo di tempo minimo risolvibile in un set di dati."@it . + "período de tiempo mínimo en el conjunto de datos."@es . + "Kan optræde i forbindelse med beskrivelse af datasættet eller datasætditributionen, så der er ikke angivet et domæne for egenskaben."@da . + "Might appear in the description of a Dataset or a Distribution, so no domain is specified."@en . + "Může se vyskytnout v popisu Datové sady nebo Distribuce, takže nebyl specifikován definiční obor."@cs . + "Alternative temporal resolutions might be provided as different dataset distributions."@en . + "Alternative tidslige opløsninger kan leveres som forskellige datasætdistributioner."@da . + "Distintas distribuciones del conjunto de datos pueden tener resoluciones temporales diferentes."@es . + "Hvis datasættet er en tidsserie, så bør denne egenskab svare til afstanden mellem elementerne i tidsserien. For andre typer af datasæt indikerer denne egenskab den mindste tidsforskel mellem elementer i datasættet."@da . + "If the dataset is a time-series this should correspond to the spacing of items in the series. For other kinds of dataset, this property will usually indicate the smallest time difference between items in the dataset."@en . + "Pokud je datová sada časovou řadou, měla by tato vlastnost odpovídat rozestupu položek v řadě. Pro ostatní druhy datových sad bude tato vlastnost obvykle indikovat nejmenší časovou vzdálenost mezi položkami této datové sady."@cs . + "Risoluzioni temporali alternative potrebbero essere fornite come diverse distribuzioni di set di dati."@it . + "Různá časová rozlišení mohou být poskytována jako různé distribuce datové sady."@cs . + "Se il set di dati è una serie temporale, questo dovrebbe corrispondere alla spaziatura degli elementi della serie. Per altri tipi di set di dati, questa proprietà di solito indica la più piccola differenza di tempo tra gli elementi nel set di dati."@it . + "Si el conjunto de datos es una serie temporal, debe corresponder al espaciado de los elementos de la serie. Para otro tipo de conjuntos de datos, esta propiedad indicará usualmente la menor diferencia de tiempo entre elementos en el dataset."@es . + . + . + "A main category of the resource. A resource can have multiple themes."@en . + "Et centralt emne for ressourcen. En ressource kan have flere centrale emner."@da . + "Hlavní téma zdroje. Zdroj může mít více témat."@cs . + "La categoria principale della risorsa. Una risorsa può avere più temi."@it . + "La categoría principal del recurso. Un recurso puede tener varios temas."@es . + "La catégorie principale de la ressource. Une ressource peut avoir plusieurs thèmes."@fr . + "Η κύρια κατηγορία του συνόλου δεδομένων. Ένα σύνολο δεδομένων δύναται να έχει πολλαπλά θέματα."@el . + "التصنيف الرئيسي لقائمة البيانات. قائمة البيانات يمكن أن تملك أكثر من تصنيف رئيسي واحد."@ar . + "データセットの主要カテゴリー。データセットは複数のテーマを持つことができます。"@ja . + . + "emne"@da . + "tema"@es . + "tema"@it . + "theme"@en . + "thème"@fr . + "téma"@cs . + "Θέμα"@el . + "التصنيف"@ar . + "テーマ/カテゴリー"@ja . + . + . + "tema"@da . + "A main category of the resource. A resource can have multiple themes."@en . + "Et centralt emne for ressourcen. En ressource kan have flere centrale emner."@da . + "Hlavní téma zdroje. Zdroj může mít více témat."@cs . + "La categoria principale della risorsa. Una risorsa può avere più temi."@it . + "La categoría principal del recurso. Un recurso puede tener varios temas."@es . + "La catégorie principale de la ressource. Une ressource peut avoir plusieurs thèmes."@fr . + "Η κύρια κατηγορία του συνόλου δεδομένων. Ένα σύνολο δεδομένων δύναται να έχει πολλαπλά θέματα."@el . + "التصنيف الرئيسي لقائمة البيانات. قائمة البيانات يمكن أن تملك أكثر من تصنيف رئيسي واحد."@ar . + "データセットの主要カテゴリー。データセットは複数のテーマを持つことができます。"@ja . + "Status: English Definition text modified by DCAT revision team, all except for Italian and Czech translations are pending."@en . + "El conjunto de skos:Concepts utilizados para categorizar los recursos están organizados en un skos:ConceptScheme que describe todas las categorías y sus relaciones en el catálogo."@es . + "Il set di concetti skos usati per categorizzare le risorse sono organizzati in skos:ConceptScheme che descrive tutte le categorie e le loro relazioni nel catalogo."@it . + "Sada instancí třídy skos:Concept použitá pro kategorizaci zdrojů je organizována do schématu konceptů skos:ConceptScheme, které popisuje všechny kategorie v katalogu a jejich vztahy."@cs . + "Samlingen af begreber (skos:Concept) der anvendes til at emneinddele ressourcer organiseres i et begrebssystem (skos:ConceptScheme) som beskriver alle emnerne og deres relationer i kataloget."@da . + "The set of skos:Concepts used to categorize the resources are organized in a skos:ConceptScheme describing all the categories and their relations in the catalog."@en . + "Un ensemble de skos:Concepts utilisés pour catégoriser les ressources sont organisés en un skos:ConceptScheme décrivant toutes les catégories et ses relations dans le catalogue."@fr . + "Το σετ των skos:Concepts που χρησιμοποιείται για να κατηγοριοποιήσει τα σύνολα δεδομένων είναι οργανωμένο εντός ενός skos:ConceptScheme που περιγράφει όλες τις κατηγορίες και τις σχέσεις αυτών στον κατάλογο."@el . + "データセットを分類するために用いられるskos:Conceptの集合は、カタログのすべてのカテゴリーとそれらの関係を記述しているskos:ConceptSchemeで組織化されます。"@ja . + . + . + . + . + . + "El sistema de organización del conocimiento utilizado para clasificar conjuntos de datos de catálogos."@es . + "Il sistema di organizzazione della conoscenza (KOS) usato per classificare i dataset del catalogo."@it . + "Le systhème d'ogranisation de connaissances utilisé pour classifier les jeux de données du catalogue."@fr . + "Systém organizace znalostí (KOS) použitý pro klasifikaci datových sad v katalogu."@cs . + "The knowledge organization system (KOS) used to classify catalog's datasets."@en . + "Vidensorganiseringssystem (KOS) som anvendes til at klassificere datasæt i kataloget."@da . + "Το σύστημα οργάνωσης γνώσης που χρησιμοποιείται για την κατηγοριοποίηση των συνόλων δεδομένων του καταλόγου."@el . + "لائحة التصنيفات المستخدمه لتصنيف قوائم البيانات ضمن الفهرس"@ar . + "カタログのデータセットを分類するために用いられる知識組織化体系(KOS;knowledge organization system)。"@ja . + . + . + "emnetaksonomi"@da . + "tassonomia dei temi"@it . + "taxonomie de thèmes"@fr . + "taxonomie témat"@cs . + "taxonomía de temas"@es . + "theme taxonomy"@en . + "Ταξινομία θεματικών κατηγοριών."@el . + "قائمة التصنيفات"@ar . + "テーマ"@ja . + . + "temataksonomi"@da . + "El sistema de organización del conocimiento utilizado para clasificar conjuntos de datos de catálogos."@es . + "Il sistema di organizzazione della conoscenza (KOS) usato per classificare i dataset del catalogo."@it . + "Le systhème d'ogranisation de connaissances utilisé pour classifier les jeux de données du catalogue."@fr . + "Systém organizace znalostí (KOS) použitý pro klasifikaci datových sad v katalogu."@cs . + "The knowledge organization system (KOS) used to classify catalog's datasets."@en . + "Vidensorganiseringssystem (KOS) som anvendes til at klassificere datasæt i kataloget."@da . + "Το σύστημα οργάνωσης γνώσης που χρησιμοποιείται για την κατηγοριοποίηση των συνόλων δεδομένων του καταλόγου."@el . + "لائحة التصنيفات المستخدمه لتصنيف قوائم البيانات ضمن الفهرس"@ar . + "カタログのデータセットを分類するために用いられる知識組織化体系(KOS;knowledge organization system)。"@ja . + "Det anbefales at taksonomien organiseres i et skos:ConceptScheme, skos:Collection, owl:Ontology eller lignende, som giver mulighed for at ethvert medlem af taksonomien kan forsynes med en IRI og udgives som linked-data."@da . + "It is recommended that the taxonomy is organized in a skos:ConceptScheme, skos:Collection, owl:Ontology or similar, which allows each member to be denoted by an IRI and published as linked-data."@en . + "Je doporučeno, aby byla taxonomie vyjádřena jako skos:ConceptScheme, skos:Collection, owl:Ontology nebo podobné, aby mohla být každá položka identifikována pomocí IRI a publikována jako propojená data."@cs . + "Se recomienda que la taxonomía se organice como un skos:ConceptScheme, skos:Collection, owl:Ontology o similar, los cuáles permiten que cada miembro se denote con una IRI y se publique como datos enlazados."@es . + "Si raccomanda che la tassonomia sia organizzata in uno skos:ConceptScheme, skos:Collection, owl:Ontology o simili, che permette ad ogni membro di essere indicato da un IRI e pubblicato come linked-data."@it . + _:c14n1 . + _:c14n11 . + _:c14n14 . + _:c14n15 . + _:c14n17 . + _:c14n18 . + _:c14n2 . + _:c14n20 . + _:c14n21 . + _:c14n22 . + _:c14n24 . + _:c14n25 . + _:c14n26 . + _:c14n28 . + _:c14n6 . + _:c14n9 . + _:c14n27 . + _:c14n8 . + . + "2012-04-24"^^ . + "2013-09-20"^^ . + "2013-11-28"^^ . + "2017-12-19"^^ . + "2019" . + "2020-11-30"^^ . + "2021-09-14"^^ . + . + "DCAT er et RDF-vokabular som har til formål at understøtte interoperabilitet mellem datakataloger udgivet på nettet. Ved at anvende DCAT til at beskrive datasæt i datakataloger, kan udgivere øge findbarhed og gøre det gøre det lettere for applikationer at anvende metadata fra forskellige kataloger. Derudover understøttes decentraliseret udstilling af kataloger og fødererede datasætsøgninger på tværs af websider. Aggregerede DCAT-metadata kan fungere som fortegnelsesfiler der kan understøtte digital bevaring. DCAT er defineret på http://www.w3.org/TR/vocab-dcat/. Enhver forskel mellem det normative dokument og dette schema er en fejl i dette schema."@da . + "DCAT es un vocabulario RDF diseñado para facilitar la interoperabilidad entre catálogos de datos publicados en la Web. Utilizando DCAT para describir datos disponibles en catálogos se aumenta la posibilidad de que sean descubiertos y se permite que las aplicaciones consuman fácilmente los metadatos de varios catálogos."@es . + "DCAT est un vocabulaire développé pour faciliter l'interopérabilité entre les jeux de données publiées sur le Web. En utilisant DCAT pour décrire les jeux de données dans les catalogues de données, les fournisseurs de données augmentent leur découverte et permettent que les applications facilement les métadonnées de plusieurs catalogues. Il permet en plus la publication décentralisée des catalogues et facilitent la recherche fédérée des données entre plusieurs sites. Les métadonnées DCAT aggrégées peuvent servir comme un manifeste pour faciliter la préservation digitale des ressources. DCAT est définie à l'adresse http://www.w3.org/TR/vocab-dcat/. Une quelconque version de ce document normatif et ce vocabulaire est une erreur dans ce vocabulaire."@fr . + "DCAT is an RDF vocabulary designed to facilitate interoperability between data catalogs published on the Web. By using DCAT to describe datasets in data catalogs, publishers increase discoverability and enable applications easily to consume metadata from multiple catalogs. It further enables decentralized publishing of catalogs and facilitates federated dataset search across sites. Aggregated DCAT metadata can serve as a manifest file to facilitate digital preservation. DCAT is defined at http://www.w3.org/TR/vocab-dcat/. Any variance between that normative document and this schema is an error in this schema."@en . + "DCAT je RDF slovník navržený pro zprostředkování interoperability mezi datovými katalogy publikovanými na Webu. Poskytovatelé dat používáním slovníku DCAT pro popis datových sad v datových katalozích zvyšují jejich dohledatelnost a umožňují aplikacím konzumovat metadata z více katalogů. Dále je umožňena decentralizovaná publikace katalogů a federované dotazování na datové sady napříč katalogy. Agregovaná DCAT metadata mohou také sloužit jako průvodka umožňující digitální uchování informace. DCAT je definován na http://www.w3.org/TR/vocab-dcat/. Jakýkoliv nesoulad mezi odkazovaným dokumentem a tímto schématem je chybou v tomto schématu."@cs . + "DCAT è un vocabolario RDF progettato per facilitare l'interoperabilità tra i cataloghi di dati pubblicati nel Web. Utilizzando DCAT per descrivere i dataset nei cataloghi di dati, i fornitori migliorano la capacità di individuazione dei dati e abilitano le applicazioni al consumo di dati provenienti da cataloghi differenti. DCAT permette di decentralizzare la pubblicazione di cataloghi e facilita la ricerca federata dei dataset. L'aggregazione dei metadati federati può fungere da file manifesto per facilitare la conservazione digitale. DCAT è definito all'indirizzo http://www.w3.org/TR/vocab-dcat/. Qualsiasi scostamento tra tale definizione normativa e questo schema è da considerarsi un errore di questo schema."@it . + "DCATは、ウェブ上で公開されたデータ・カタログ間の相互運用性の促進を目的とするRDFの語彙です。このドキュメントでは、その利用のために、スキーマを定義し、例を提供します。データ・カタログ内のデータセットを記述するためにDCATを用いると、公開者が、発見可能性を増加させ、アプリケーションが複数のカタログのメタデータを容易に利用できるようになります。さらに、カタログの分散公開を可能にし、複数のサイトにまたがるデータセットの統合検索を促進します。集約されたDCATメタデータは、ディジタル保存を促進するためのマニフェスト・ファイルとして使用できます。"@ja . + "Το DCAT είναι ένα RDF λεξιλόγιο που σχεδιάσθηκε για να κάνει εφικτή τη διαλειτουργικότητα μεταξύ καταλόγων δεδομένων στον Παγκόσμιο Ιστό. Χρησιμοποιώντας το DCAT για την περιγραφή συνόλων δεδομένων, οι εκδότες αυτών αυξάνουν την ανακαλυψιμότητα και επιτρέπουν στις εφαρμογές την εύκολη κατανάλωση μεταδεδομένων από πολλαπλούς καταλόγους. Επιπλέον, δίνει τη δυνατότητα για αποκεντρωμένη έκδοση και διάθεση καταλόγων και επιτρέπει δυνατότητες ενοποιημένης αναζήτησης μεταξύ διαφορετικών πηγών. Συγκεντρωτικά μεταδεδομένα που έχουν περιγραφεί με το DCAT μπορούν να χρησιμοποιηθούν σαν ένα δηλωτικό αρχείο (manifest file) ώστε να διευκολύνουν την ψηφιακή συντήρηση."@el . + "هي أنطولوجية تسهل تبادل البيانات بين مختلف الفهارس على الوب. استخدام هذه الأنطولوجية يساعد على اكتشاف قوائم البيانات المنشورة على الوب و يمكن التطبيقات المختلفة من الاستفادة أتوماتيكيا من البيانات المتاحة من مختلف الفهارس."@ar . + "Datakatalogvokabular"@da . + "El vocabulario de catálogo de datos"@es . + "Il vocabolario del catalogo dei dati"@it . + "Le vocabulaire des jeux de données"@fr . + "Slovník pro datové katalogy"@cs . + "The data catalog vocabulary"@en . + "Το λεξιλόγιο των καταλόγων δεδομένων"@el . + "أنطولوجية فهارس قوائم البيانات"@ar . + "データ・カタログ語彙(DCAT)"@ja . + . + . + . + "Dette er en opdateret kopi af DCAT v. 2.0 som er tilgænglig på https://www.w3.org/ns/dcat.ttl"@da . + "Esta es una copia del vocabulario DCAT v2.0 disponible en https://www.w3.org/ns/dcat.ttl"@es . + "Questa è una copia aggiornata del vocabolario DCAT v2.0 disponibile in https://www.w3.org/ns/dcat.ttl"@en . + "This is an updated copy of v2.0 of the DCAT vocabulary, taken from https://www.w3.org/ns/dcat.ttl"@en . + "Toto je aktualizovaná kopie slovníku DCAT verze 2.0, převzatá z https://www.w3.org/ns/dcat.ttl"@cs . + "English language definitions updated in this revision in line with ED. Multilingual text unevenly updated."@en . + _:c14n0 . + . + "This axiom needed so that Protege loads DCAT2 without errors." . + . + "This axiom needed so that Protege loads DCAT2 without errors." . +_:c14n0 . +_:c14n0 "Government Linked Data WG" . +_:c14n1 . +_:c14n1 . +_:c14n1 "Jakub Klímek" . +_:c14n10 . +_:c14n10 "Refinitiv" . +_:c14n11 . +_:c14n11 "Ghislain Auguste Atemezing" . +_:c14n12 . +_:c14n12 . +_:c14n13 . +_:c14n13 "European Commission, DG DIGIT" . +_:c14n14 _:c14n13 . +_:c14n14 "Vassilios Peristeras" . +_:c14n15 "Martin Alvarez-Espinar" . +_:c14n16 . +_:c14n16 _:c14n12 . +_:c14n17 _:c14n10 . +_:c14n17 "David Browning" . +_:c14n18 "Boris Villazón-Terrazas" . +_:c14n19 . +_:c14n19 . +_:c14n2 . +_:c14n2 . +_:c14n2 . +_:c14n2 "Phil Archer" . +_:c14n20 . +_:c14n20 "Shuji Kamitsuna" . +_:c14n21 _:c14n3 . +_:c14n21 "Rufus Pollock" . +_:c14n22 . +_:c14n22 . +_:c14n22 . +_:c14n22 "Riccardo Albertoni" . +_:c14n23 . +_:c14n23 _:c14n16 . +_:c14n24 "Marios Meimaris" . +_:c14n25 . +_:c14n25 . +_:c14n25 "Makx Dekkers" . +_:c14n26 _:c14n30 . +_:c14n26 . +_:c14n26 . +_:c14n26 "Simon J D Cox" . +_:c14n26 . +_:c14n27 "John Erickson" . +_:c14n28 _:c14n4 . +_:c14n28 . +_:c14n28 . +_:c14n28 "Alejandra Gonzalez-Beltran" . +_:c14n29 . +_:c14n29 _:c14n19 . +_:c14n3 . +_:c14n3 "Open Knowledge Foundation" . +_:c14n30 . +_:c14n30 "Commonwealth Scientific and Industrial Research Organisation" . +_:c14n4 . +_:c14n4 "Science and Technology Facilities Council, UK" . +_:c14n5 . +_:c14n5 . +_:c14n5 . +_:c14n6 . +_:c14n6 . +_:c14n6 "Andrea Perego" . +_:c14n7 . +_:c14n7 "1"^^ . +_:c14n7 . +_:c14n8 . +_:c14n8 "Fadi Maali" . +_:c14n9 "Richard Cyganiak" . diff --git a/prez/reference_data/context_ontologies/rdf.nq b/prez/reference_data/context_ontologies/rdf.nq new file mode 100644 index 00000000..850501d8 --- /dev/null +++ b/prez/reference_data/context_ontologies/rdf.nq @@ -0,0 +1,127 @@ + "2019-12-16" . + "This is the RDF Schema for the RDF vocabulary terms in the RDF Namespace, defined in RDF 1.1 Concepts." . + "The RDF Concepts Vocabulary (RDF)" . + . + . + "The class of containers of alternatives." . + . + "Alt" . + . + . + "The class of unordered containers." . + . + "Bag" . + . + . + "A class representing a compound literal." . + . + "CompoundLiteral" . + . + . + . + "The datatype of RDF literals storing fragments of HTML content" . + . + "HTML" . + . + . + . + "The datatype of RDF literals storing JSON content." . + . + "JSON" . + . + . + . + "The class of RDF Lists." . + . + "List" . + . + . + "The class of plain (i.e. untyped) literal values, as used in RIF and OWL 2" . + . + "PlainLiteral" . + . + . + . + "The class of RDF properties." . + . + "Property" . + . + . + "The class of ordered containers." . + . + "Seq" . + . + . + "The class of RDF statements." . + . + "Statement" . + . + . + "The datatype of XML literal values." . + . + "XMLLiteral" . + . + . + "The base direction component of a CompoundLiteral." . + . + . + "direction" . + . + . + "The first item in the subject RDF list." . + . + . + "first" . + . + . + "The datatype of language-tagged string values" . + . + "langString" . + . + . + . + "The language component of a CompoundLiteral." . + . + . + "language" . + . + . + "The empty list, with no items in it. If the rest of a list is nil then the list has no more items in it." . + . + "nil" . + . + "The object of the subject RDF statement." . + . + . + "object" . + . + . + "The predicate of the subject RDF statement." . + . + . + "predicate" . + . + . + "The rest of the subject RDF list after the first item." . + . + . + "rest" . + . + . + "The subject of the subject RDF statement." . + . + . + "subject" . + . + . + "The subject is an instance of a class." . + . + . + "type" . + . + . + "Idiomatic property used for structured values." . + . + . + "value" . + . diff --git a/prez/reference_data/new_endpoints/cql_endpoints.ttl b/prez/reference_data/new_endpoints/cql_endpoints.ttl new file mode 100644 index 00000000..dfc84dd5 --- /dev/null +++ b/prez/reference_data/new_endpoints/cql_endpoints.ttl @@ -0,0 +1,17 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX endpoint: +PREFIX prez: +PREFIX ont: +PREFIX rdfs: +PREFIX sh: +PREFIX skos: + + +endpoint:cql a ont:ListingEndpoint ; + ont:endpointTemplate "/cql" ; + ont:deliversClasses prez:CQLObjectList ; # required to determine the correct profile for ConnegP + sh:targetClass rdfs:Class ; # required for query construction +. + +endpoint:cql ont:defaultLimit 20 ; ont:defaultOffset 0 . diff --git a/prez/reference_data/new_endpoints/ogc_endpoints.ttl b/prez/reference_data/new_endpoints/ogc_endpoints.ttl new file mode 100644 index 00000000..c1c8e6ad --- /dev/null +++ b/prez/reference_data/new_endpoints/ogc_endpoints.ttl @@ -0,0 +1,58 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX endpoint: +PREFIX prez: +PREFIX ont: +PREFIX sh: +prefix skos: + +endpoint:catalog-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/catalogs" ; + ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP + sh:targetClass dcat:Catalog ; # required for query construction +. + +endpoint:catalog-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Catalog ; # required for link generation for objects + ont:endpointTemplate "/catalogs/$object" ; + ont:parentEndpoint endpoint:catalog-listing ; +. + +endpoint:vocab-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/catalogs/$parent_1/collections" ; + ont:deliversClasses prez:SchemesList ; # required to determine the correct profile for ConnegP + ont:ParentToFocusRelation dcterms:hasPart ; + sh:targetClass skos:ConceptScheme ; # required for query construction + ont:parentEndpoint endpoint:catalog-object ; +. + +endpoint:vocab-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses skos:ConceptScheme ; # required for link generation for objects + ont:endpointTemplate "/catalogs/$parent_1/collections/$object" ; + ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentEndpoint endpoint:vocab-listing ; +. + +endpoint:concept-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/catalogs/$parent_2/collections/$parent_1/items" ; + ont:deliversClasses prez:ConceptList ; # required to determine the correct profile for ConnegP + ont:ParentToFocusRelation skos:hasTopConcept ; + sh:targetClass skos:Concept ; # required for query construction + ont:parentEndpoint endpoint:vocab-object ; +. + +endpoint:concept-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses skos:Concept ; # required for link generation for objects + ont:endpointTemplate "/catalogs/$parent_2/collections/$parent_1/items/$object" ; + ont:ParentToFocusRelation skos:hasTopConcept ; + ont:parentEndpoint endpoint:concept-listing ; +. + +endpoint:catalog-listing ont:defaultLimit 20 ; ont:defaultOffset 0 . + +endpoint:vocab-listing ont:defaultLimit 20 ; ont:defaultOffset 0 . + +endpoint:concept-listing ont:defaultLimit 20 ; ont:defaultOffset 0 . \ No newline at end of file diff --git a/prez/reference_data/new_endpoints/spaceprez_endpoints.ttl b/prez/reference_data/new_endpoints/spaceprez_endpoints.ttl new file mode 100644 index 00000000..c82b483f --- /dev/null +++ b/prez/reference_data/new_endpoints/spaceprez_endpoints.ttl @@ -0,0 +1,87 @@ +PREFIX dcat: +PREFIX endpoint: +PREFIX geo: +PREFIX ont: +PREFIX prez: +PREFIX rdfs: +PREFIX sh: +PREFIX xsd: + +endpoint:spaceprez-home a ont:Endpoint ; + ont:endpointTemplate "/s" ; +. + +endpoint:dataset-listing a ont:ListingEndpoint ; + ont:deliversClasses prez:DatasetList ; + sh:targetClass dcat:Dataset ; + ont:isTopLevelEndpoint "true"^^xsd:boolean ; + ont:endpointTemplate "/s/datasets" ; +. + +endpoint:dataset-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:parentEndpoint endpoint:dataset-listing ; + ont:deliversClasses dcat:Dataset ; + ont:endpointTemplate "/s/datasets/$object" ; +. + +endpoint:feature-collection-listing a ont:ListingEndpoint ; + ont:parentEndpoint endpoint:dataset ; + sh:targetClass geo:FeatureCollection ; + ont:deliversClasses prez:FeatureCollectionList ; + ont:endpointTemplate "/s/datasets/$parent_1/collections" ; + ont:ParentToFocusRelation rdfs:member ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 rdfs:member ?focus_node . + ?focus_node rdfs:label ?label . + } + ORDER BY ?label + LIMIT $limit + OFFSET $offset + """ ] ; +. + +endpoint:feature-collection-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:parentEndpoint endpoint:feature-collection-listing ; + ont:deliversClasses geo:FeatureCollection ; + ont:endpointTemplate "/s/datasets/$parent_1/collections/$object" ; + ont:ParentToFocusRelation rdfs:member ; +. + +endpoint:feature-listing a ont:ListingEndpoint ; + ont:parentEndpoint endpoint:feature-collection-object ; + sh:targetClass geo:Feature ; + ont:deliversClasses prez:FeatureList ; + ont:endpointTemplate "/s/datasets/$parent_2/collections/$parent_1/items" ; + ont:ParentToFocusRelation rdfs:member ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 rdfs:member ?focus_node . + ?focus_node rdfs:label ?label . + } + ORDER BY ?label + LIMIT $limit + OFFSET $offset + """ ] ; +. + +endpoint:feature-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:parentEndpoint endpoint:feature-listing ; + ont:deliversClasses geo:Feature ; + ont:endpointTemplate "/s/datasets/$parent_2/collections/$parent_1/items/$object" ; + ont:ParentToFocusRelation rdfs:member ; +. + +endpoint:feature-listing ont:defaultLimit 20 ; + ont:defaultOffset 0 . + +endpoint:feature-collection-listing ont:defaultLimit 20 ; + ont:defaultOffset 0 . + +endpoint:dataset-listing ont:defaultLimit 20 ; + ont:defaultOffset 0 . \ No newline at end of file diff --git a/prez/reference_data/new_endpoints/vocprez_endpoints.ttl b/prez/reference_data/new_endpoints/vocprez_endpoints.ttl new file mode 100644 index 00000000..1e9a3037 --- /dev/null +++ b/prez/reference_data/new_endpoints/vocprez_endpoints.ttl @@ -0,0 +1,115 @@ +PREFIX endpoint: +PREFIX ont: +PREFIX prez: +PREFIX rdfs: +PREFIX sh: +PREFIX skos: +PREFIX xsd: + +endpoint:vocprez-home a ont:Endpoint ; + ont:endpointTemplate "/v" ; +. + +endpoint:collection-listing a ont:ListingEndpoint ; + ont:deliversClasses prez:VocPrezCollectionList ; + sh:targetClass skos:Collection ; + ont:endpointTemplate "/v/collection" ; +. + +endpoint:collection-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:parentEndpoint endpoint:collection-listing ; + ont:deliversClasses skos:Collection ; + ont:endpointTemplate "/v/collection/$object" ; +. + +endpoint:collection-concept a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:parentEndpoint endpoint:collection-object ; + ont:deliversClasses skos:Concept ; + ont:endpointTemplate "/v/collection/$parent_1/$object" ; + ont:ParentToFocusRelation skos:member ; +. + + endpoint:vocabs-listing a ont:ListingEndpoint ; + ont:deliversClasses prez:SchemesList ; + sh:targetClass skos:ConceptScheme ; + ont:endpointTemplate "/v/vocab" ; +. + +endpoint:vocab-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:parentEndpoint endpoint:vocabs-listing ; + ont:deliversClasses skos:ConceptScheme ; + ont:endpointTemplate "/v/vocab/$object" ; +. + +endpoint:vocab-concept a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:parentEndpoint endpoint:vocab-object ; + ont:deliversClasses skos:Concept ; + ont:endpointTemplate "/v/vocab/$parent_1/$object" ; + ont:FocusToParentRelation skos:inScheme ; +. + +endpoint:cs-top-concepts a ont:ListingEndpoint ; + ont:deliversClasses skos:Concept ; + sh:rule [ sh:subject sh:this ; + sh:predicate prez:hasChildren ; + sh:object "?hasChildren" ] ; + sh:target [ sh:select """SELECT DISTINCT ?focus_node ?hasChildren + WHERE { + $parent_1 skos:hasTopConcept|^skos:isTopConceptOf ?focus_node . + ?focus_node skos:prefLabel ?label . + BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) + } + ORDER BY ?label + LIMIT $limit + OFFSET $offset + """ ] ; + sh:targetClass skos:Concept ; + ont:endpointTemplate "/v/vocab/$parent_1/top-concepts" ; +. + +endpoint:cs-children + a ont:ListingEndpoint ; + ont:deliversClasses skos:Concept ; + rdfs:comment """The concepts one level under top concepts. This query demonstrates how pagination of 2 items at a + time could work, the LIMIT is set to 3 such that we can determine if there are further (i.e. >2) objects available.""" ; + sh:rule + [ + sh:object "?hasChildren" ; + sh:predicate prez:hasChildren ; + sh:subject sh:this ; + ] ; + sh:target + [ + sh:select """SELECT DISTINCT ?focus_node ?hasChildren + WHERE { + $parent_1 skos:narrower|^skos:broader ?focus_node . + ?focus_node skos:prefLabel ?label . + BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) + } + ORDER BY ?label + LIMIT $limit + OFFSET $offset + """ ; + ] ; + sh:targetClass skos:Concept ; + ont:endpointTemplate "/v/vocabs/$parent_2/$parent_1/narrowers" ; +. + +ont:ListingEndpoint ont:defaultLimit 20 ; + ont:defaultOffset 0 . + +endpoint:vocabs-listing ont:defaultLimit 20 ; + ont:defaultOffset 0 . + +endpoint:cs-children ont:defaultLimit 20 ; + ont:defaultOffset 0 . + +endpoint:cs-top-concepts ont:defaultLimit 20 ; + ont:defaultOffset 0 . + +endpoint:collection-listing ont:defaultLimit 20 ; + ont:defaultOffset 0 . \ No newline at end of file diff --git a/prez/reference_data/prefixes/testing.ttl b/prez/reference_data/prefixes/testing.ttl index f60c174a..8cc005a9 100644 --- a/prez/reference_data/prefixes/testing.ttl +++ b/prez/reference_data/prefixes/testing.ttl @@ -31,3 +31,19 @@ vann:preferredNamespaceUri ; [ vann:preferredNamespacePrefix "defn" ; vann:preferredNamespaceUri ; ] . + +[ vann:preferredNamespacePrefix "preztest" ; +vann:preferredNamespaceUri ; +] . + +[ vann:preferredNamespacePrefix "sys" ; +vann:preferredNamespaceUri ; +] . + +[ vann:preferredNamespacePrefix "sys" ; +vann:preferredNamespaceUri ; +] . + +[ vann:preferredNamespacePrefix "defn" ; +vann:preferredNamespaceUri ; +] . diff --git a/prez/reference_data/prez_ns.py b/prez/reference_data/prez_ns.py index 25a7f5fd..0a5e1d0d 100644 --- a/prez/reference_data/prez_ns.py +++ b/prez/reference_data/prez_ns.py @@ -4,3 +4,4 @@ ONT = Namespace("https://prez.dev/ont/") ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") REG = Namespace("http://purl.org/linked-data/registry#") +EP = Namespace("https://prez.dev/endpoint/") diff --git a/prez/reference_data/profiles/catprez_default_profiles.ttl b/prez/reference_data/profiles/catprez_default_profiles.ttl index 67b42b05..5fbeab41 100644 --- a/prez/reference_data/profiles/catprez_default_profiles.ttl +++ b/prez/reference_data/profiles/catprez_default_profiles.ttl @@ -11,6 +11,7 @@ PREFIX rdfs: PREFIX sh: PREFIX skos: PREFIX xsd: +PREFIX shext: prez:CatPrezProfile @@ -69,7 +70,8 @@ prez:CatPrezProfile altr-ext:focusToChild dcterms:hasPart ; altr-ext:relativeProperties dcterms:issued , dcterms:creator , dcterms:publisher ; ] -. + . + a prof:Profile , prez:CatPrezProfile ; diff --git a/prez/reference_data/profiles/ogc_profile.ttl b/prez/reference_data/profiles/ogc_profile.ttl new file mode 100644 index 00000000..b22c28cd --- /dev/null +++ b/prez/reference_data/profiles/ogc_profile.ttl @@ -0,0 +1,64 @@ +PREFIX altr-ext: +PREFIX dcat: +PREFIX dcterms: +PREFIX geo: +PREFIX owl: +PREFIX prez: +PREFIX prof: +PREFIX prov: +PREFIX rdf: +PREFIX rdfs: +PREFIX sh: +PREFIX skos: +PREFIX xsd: +PREFIX endpoint: +PREFIX shext: + + +prez:OGCProfile + a prof:Profile ; + dcterms:identifier "ogc"^^xsd:token ; + dcterms:description "A system profile for OGC Records conformant API" ; + dcterms:title "OGC Profile" ; + altr-ext:constrainsClass prez:CatPrez ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass prez:CatalogList , prez:SchemesList , prez:ConceptsList ; + altr-ext:hasDefaultProfile prez:OGCListingProfile + ] , [ + a sh:NodeShape ; + sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept ; + altr-ext:hasDefaultProfile prez:OGCItemProfile + ] ; + . + +prez:OGCListingProfile + a prof:Profile , sh:NodeShape ; + dcterms:title "OGC Listing Profile" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:constrainsClass prez:CatalogList , prez:SchemesList , prez:ConceptList ; + . + +prez:OGCItemProfile + a prof:Profile , sh:NodeShape ; + dcterms:title "OGC Item Profile" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + sh:property [ + sh:path shext:allPredicateValues ; + ] ; + shext:bnode-depth 2 ; + altr-ext:constrainsClass dcat:Catalog , skos:ConceptScheme, skos:Concept ; + . diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index 71034b3b..3b3c7e43 100644 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -8,6 +8,7 @@ PREFIX prof: PREFIX rdf: PREFIX rdfs: PREFIX sh: +PREFIX shext: PREFIX skos: PREFIX xsd: @@ -30,30 +31,20 @@ PREFIX xsd: dcterms:identifier "open" ; dcterms:description "An open profile which will return all direct properties for a resource." ; dcterms:title "Open profile" ; - altr-ext:constrainsClass prez:SPARQLQuery , prez:SearchResult ; + altr-ext:constrainsClass prez:SPARQLQuery , prez:SearchResult , prez:CQLObjectList ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat "application/json" , + altr-ext:hasResourceFormat "application/ld+json" , "application/ld+json" , + "application/anot+ld+json" , "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; + sh:property [ + sh:path shext:allPredicateValues ; + ] ; . - - - a prof:Profile ; - dcterms:description "The OGC API Features specifies the behavior of Web APIs that provide access to features in a dataset in a manner independent of the underlying data store." ; - dcterms:identifier "oai"^^xsd:token ; - dcterms:title "OGC API Features" ; - altr-ext:constrainsClass - prez:Home ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "text/anot+turtle" , - "application/geo+json" ; -. - altr-ext:alt-profile a prof:Profile ; dcterms:description "The representation of the resource that lists all other representations (profiles and Media Types)" ; @@ -62,6 +53,7 @@ altr-ext:alt-profile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/ld+json" , + "application/anot+ld+json" , "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; @@ -89,8 +81,11 @@ prez:profiles dcterms:identifier "profiles"^^xsd:token ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat - "application/json" , - "text/anot+turtle" ; + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; altr-ext:constrainsClass prof:Profile ; . @@ -113,11 +108,4 @@ prez:profiles "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass prez:ProfilesList ; - sh:path prez:link , - dcterms:title , - dcterms:description ; - ] . diff --git a/prez/reference_data/profiles/spaceprez_default_profiles.ttl b/prez/reference_data/profiles/spaceprez_default_profiles.ttl index 350fc09f..4040ef13 100644 --- a/prez/reference_data/profiles/spaceprez_default_profiles.ttl +++ b/prez/reference_data/profiles/spaceprez_default_profiles.ttl @@ -10,11 +10,11 @@ PREFIX rdfs: PREFIX sh: PREFIX skos: PREFIX xsd: +PREFIX shext: prez:SpacePrezProfile a prof:Profile ; - prez:supportedSearchMethod prez:exactMatch , prez:jenaFTName ; dcterms:identifier "spaceprez"^^xsd:token ; dcterms:description "A system profile for SpacePrez" ; skos:prefLabel "SpacePrez profile" ; @@ -27,26 +27,92 @@ prez:SpacePrezProfile ] , [ a sh:NodeShape ; sh:targetClass geo:FeatureCollection ; - altr-ext:hasDefaultProfile + altr-ext:hasDefaultProfile prez:FeatureCollectionProfile ] , [ a sh:NodeShape ; sh:targetClass geo:Feature ; - altr-ext:hasDefaultProfile + altr-ext:hasDefaultProfile prez:FeatureProfile ] , [ a sh:NodeShape ; sh:targetClass prez:DatasetList ; - altr-ext:hasDefaultProfile + altr-ext:hasDefaultProfile ] , [ a sh:NodeShape ; sh:targetClass prez:FeatureCollectionList ; - altr-ext:hasDefaultProfile + altr-ext:hasDefaultProfile prez:GeoListingProfile ] , [ a sh:NodeShape ; sh:targetClass prez:FeatureList ; - altr-ext:hasDefaultProfile + altr-ext:hasDefaultProfile prez:GeoListingProfile ] . +prez:FeatureCollectionProfile a prof:Profile ; + dcterms:description "A profile for GeoSPARQL FeatureCollections" ; + dcterms:identifier "geofc"^^xsd:token ; + dcterms:title "Feature Collection Profile" ; + altr-ext:constrainsClass geo:FeatureCollection ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:targetClass geo:FeatureCollection ; + sh:property + [ + sh:maxCount 0 ; + sh:path rdfs:member ; + ] , + [ + sh:path [ sh:inversePath rdfs:member ] ; + ] ; + shext:bnode-depth 2 ; +. + +prez:FeatureProfile a prof:Profile ; + dcterms:description "A profile for GeoSPARQL Features" ; + dcterms:identifier "geofeat"^^xsd:token ; + dcterms:title "Feature Profile" ; + altr-ext:constrainsClass geo:Feature ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:targetClass geo:Feature ; + sh:property + [ + sh:path [ sh:inversePath rdfs:member ] ; + ] , + [ + sh:path shext:allPredicateValues ; + ] ; + shext:bnode-depth 2 ; +. + + +prez:GeoListingProfile a prof:Profile ; + dcterms:description "A profile for listing GeoSPARQL Features and FeatureCollections" ; + dcterms:identifier "geolisting"^^xsd:token ; + dcterms:title "Geo Listing Profile" ; + altr-ext:constrainsClass prez:FeatureCollectionList , prez:FeatureList ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:property + [ + sh:path [ sh:inversePath rdfs:member ] ; + ] +. + a prof:Profile , prez:SpacePrezProfile ; dcterms:description "An RDF/OWL vocabulary for representing spatial information" ; @@ -56,6 +122,7 @@ prez:SpacePrezProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/ld+json" , + "application/anot+ld+json" , "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; @@ -74,23 +141,12 @@ prez:SpacePrezProfile prez:FeatureList ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , "text/anot+turtle" , + "text/turtle" , "application/geo+json" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass geo:FeatureCollection ; - sh:path rdf:type, - dcterms:identifier, - dcterms:title, - geo:hasBoundingBox, - dcterms:provenance, - rdfs:label, - dcterms:description ; - ] , - [ a sh:NodeShape ; - sh:targetClass geo:FeatureCollection , prez:FeatureCollectionList , prez:FeatureList ; - altr-ext:focusToChild rdfs:member ; - ] . @@ -102,6 +158,7 @@ prez:SpacePrezProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/ld+json" , + "application/anot+ld+json" , "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; @@ -114,17 +171,18 @@ prez:SpacePrezProfile dcterms:title "DCAT" ; altr-ext:constrainsClass dcat:Catalog , - dcat:Dataset , - prez:DatasetList ; + dcat:Dataset ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/ld+json" , + "application/anot+ld+json" , "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass dcat:Dataset ; - altr-ext:focusToChild rdfs:member ; - ] + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + sh:property [ + sh:path shext:allPredicateValues ; + ] ; + shext:bnode-depth 2 ; + altr-ext:constrainsClass dcat:Catalog , dcat:Dataset ; . diff --git a/prez/reference_data/profiles/vocprez_default_profiles.ttl b/prez/reference_data/profiles/vocprez_default_profiles.ttl index 86fc7372..f4220fd2 100644 --- a/prez/reference_data/profiles/vocprez_default_profiles.ttl +++ b/prez/reference_data/profiles/vocprez_default_profiles.ttl @@ -13,11 +13,11 @@ PREFIX skos: PREFIX reg: PREFIX xsd: PREFIX prov: +PREFIX shext: prez:VocPrezProfile a prof:Profile ; - prez:supportedSearchMethod prez:exactMatch , prez:skosPrefLabel , prez:skosWeighted , prez:jenaFTName ; dcterms:identifier "vocprez"^^xsd:token ; dcterms:description "A system profile for VocPrez" ; skos:prefLabel "VocPrez profile" ; @@ -26,7 +26,7 @@ prez:VocPrezProfile altr-ext:hasNodeShape [ a sh:NodeShape ; sh:targetClass prez:SchemesList ; - altr-ext:hasDefaultProfile + altr-ext:hasDefaultProfile ] , [ a sh:NodeShape ; sh:targetClass prez:VocPrezCollectionList ; @@ -96,36 +96,60 @@ prez:VocPrezProfile sh:targetClass skos:ConceptScheme ; altr-ext:childToFocus skos:inScheme ; ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:relativeProperties skos:broader , skos:narrower ; - ] ; altr-ext:hasNodeShape [ a sh:NodeShape ; sh:targetClass skos:Concept ; + sh:property [ + sh:path shext:allPredicateValues ; + ] ; altr-ext:focusToParent skos:inScheme ; ] ; altr-ext:hasNodeShape [ a sh:NodeShape ; sh:targetClass prez:SchemesList ; - sh:path dcterms:publisher, reg:status ; - sh:sequencePath ( - prov:qualifiedDerivation - prov:hadRole - ) ; - sh:sequencePath ( - prov:qualifiedDerivation - prov:entity - ) ; + sh:property [ + sh:minCount 0 ; + sh:path ( + sh:union ( + dcterms:publisher + reg:status + ) + ) + ] ] ; altr-ext:hasNodeShape [ a sh:NodeShape ; sh:targetClass prez:VocPrezCollectionList ; - sh:path skos:definition ; + sh:property [ + sh:minCount 0 ; + sh:path skos:definition ; + ] ; ] ; . + a prof:Profile , sh:NodeShape ; + dcterms:title "VocPub Schemes Listing Profile" ; + altr-ext:constrainsClass prez:SchemesList ; + altr-ext:hasResourceFormat "application/ld+json" , + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:property [ + sh:minCount 0 ; + sh:path ( + sh:union ( + dcterms:publisher + reg:status + ( prov:qualifiedDerivation prov:hadRole ) + ( prov:qualifiedDerivation prov:entity ) + ) + ) + ] + . + + a prof:Profile ; dcterms:description "Schema.org is a collaborative, community activity with a mission to create, maintain, and promote schemas for structured data on the Internet, on web pages, in email messages, and beyond. " ; @@ -138,6 +162,7 @@ prez:VocPrezProfile altr-ext:hasDefaultResourceFormat "text/turtle" ; altr-ext:hasResourceFormat "application/ld+json" , + "application/anot+ld+json" , "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; @@ -153,7 +178,6 @@ prez:VocPrezProfile prez:VocPrezCollectionList , skos:ConceptScheme , skos:Collection ; - altr-ext:hasLabelPredicate skos:prefLabel ; altr-ext:hasNodeShape [ a sh:NodeShape ; sh:targetClass skos:ConceptScheme ; diff --git a/prez/routers/cql.py b/prez/routers/cql.py index 9d014351..a678ffda 100644 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -1,98 +1,27 @@ -from typing import Optional, List +from typing import Optional -from fastapi import APIRouter, Request -from fastapi import Form -from fastapi.responses import JSONResponse, RedirectResponse -from rdflib import Namespace -from prez.config import Settings +from fastapi import APIRouter, Request, Depends +from rdflib import URIRef -PREZ = Namespace("https://prez.dev/") +from prez.dependencies import get_repo, cql_parser_dependency +from prez.services.listings import listing_function_new +from prez.sparql.methods import Repo -router = APIRouter(tags=["CQL"]) +router = APIRouter(tags=["ogcrecords"]) -# # CQL search_methods -# if "SpacePrez" in settings.ENABLED_PREZS: -# dataset_sparql_result, collection_sparql_result = await asyncio.gather( -# list_datasets(), -# list_collections(), -# ) -# datasets = [ -# {"id": result["id"]["value"], "title": result["label"]["value"]} -# for result in dataset_sparql_result -# ] -# collections = [ -# {"id": result["id"]["value"], "title": result["label"]["value"]} -# for result in collection_sparql_result -# ] -# return - -# top-level queryables -@router.get( - "/queryables", - summary="List available query parameters for CQL search_methods globally", -) -async def queryables( - request: Request, -): - settings = Settings() - - """Returns a list of available properties to query against using CQL search_methods globally""" - content = { - "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": f"{request.url.remove_query_params(keys=request.query_params.keys())}", - "type": "object", - } - - properties = {key: value for key, value in settings.cql_props.items()} - for value in properties.values(): - value.pop("qname", None) - - content["properties"] = properties - content["title"] = settings.spaceprez_title - - if settings.spaceprez_desc != "": - content["description"] = settings.spaceprez_desc - - return JSONResponse(content=content) - - -# top-level CQL search_methods form -@router.get( - "/cql", - summary="Endpoint to POST CQL search_methods form data to", +@router.post( + path="/cql", + name="https://prez.dev/endpoint/cql", ) -async def cql( +async def cql_post_endpoint( request: Request, - title: Optional[str] = Form(None), - desc: Optional[str] = Form(None), - filter: Optional[str] = Form(None), - datasets: Optional[List[str]] = Form(None), - collections: Optional[List[str]] = Form(None), + parsed_cql: Optional[dict] = Depends(cql_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), ): - """Handles form data from a CQL search_methods form & redirects to /items containing the filter param""" - filter_params = [] - if title is not None: - filter_params.append(f'title LIKE "{title}"') - if desc is not None: - filter_params.append(f'desc LIKE "{desc}"') - if filter is not None: - filter_params.append(filter) - if datasets is not None: - d_set = set() - for d in datasets: - if "," in d: - d_set.update(d.split(",")) - else: - d_set.add(d) - if collections is not None: - coll_set = set() - for coll in collections: - if "," in coll: - coll_set.update(coll.split(",")) - else: - coll_set.add(coll) - return RedirectResponse( - url=f'/items?filter={" AND ".join(filter_params)}{"&dataset=" + ",".join(d_set) if datasets is not None else ""}{"&collection=" + ",".join(coll_set) if collections is not None else ""}', - status_code=302, + endpoint_uri = URIRef("https://prez.dev/endpoint/cql") + return await listing_function_new( + request, repo, endpoint_uri, page, per_page, parsed_cql ) diff --git a/prez/routers/management.py b/prez/routers/management.py index 98478136..af93d78d 100644 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -50,6 +50,27 @@ async def return_tbox_cache(request: Request): return await return_rdf(tbox_cache, mediatype, profile_headers={}) +@router.get("/health") +async def health_check(): + return {"status": "ok"} + + +async def return_annotation_predicates(): + """ + Returns an RDF linked list of the annotation predicates used for labels, descriptions and provenance. + """ + g = Graph() + g.bind("prez", "https://prez.dev/") + label_list_bn, description_list_bn, provenance_list_bn = BNode(), BNode(), BNode() + g.add((PREZ.AnnotationPropertyList, PREZ.labelList, label_list_bn)) + g.add((PREZ.AnnotationPropertyList, PREZ.descriptionList, description_list_bn)) + g.add((PREZ.AnnotationPropertyList, PREZ.provenanceList, provenance_list_bn)) + Collection(g, label_list_bn, settings.label_predicates) + Collection(g, description_list_bn, settings.description_predicates) + Collection(g, provenance_list_bn, settings.provenance_predicates) + return g + + async def return_annotation_predicates(): """ Returns an RDF linked list of the annotation predicates used for labels, descriptions and provenance. diff --git a/prez/routers/object.py b/prez/routers/object.py index 18189dad..33dfcd4f 100644 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -16,8 +16,6 @@ from prez.models.object_item import ObjectItem from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo from prez.queries.object import object_inbound_query, object_outbound_query -from prez.reference_data.prez_ns import PREZ -from prez.renderers.renderer import return_from_graph, return_profiles from prez.routers.identifier import get_iri_route from prez.services.curie_functions import get_curie_id_for_uri, get_uri_for_curie_id from prez.services.model_methods import get_classes diff --git a/prez/routers/ogc_catprez.py b/prez/routers/ogc_catprez.py new file mode 100644 index 00000000..9d8fd907 --- /dev/null +++ b/prez/routers/ogc_catprez.py @@ -0,0 +1,117 @@ +from typing import Optional + +from fastapi import APIRouter, Request, Depends +from rdflib import URIRef + +from prez.dependencies import get_repo, cql_parser_dependency +from prez.services.curie_functions import get_uri_for_curie_id +from prez.services.listings import listing_function_new +from prez.services.objects import object_function_new +from prez.sparql.methods import Repo + +router = APIRouter(tags=["ogcrecords"]) + +ogc_endpoints = { + "catalog-listing": "https://prez.dev/endpoint/ogcrecords/catalog-listing", + "catalog-object": "https://prez.dev/endpoint/ogcrecords/catalog-object", + "vocab-listing": "https://prez.dev/endpoint/ogcrecords/vocab-listing", + "vocab-object": "https://prez.dev/endpoint/ogcrecords/vocab-object", + "concept-listing": "https://prez.dev/endpoint/ogcrecords/concept-listing", + "concept-object": "https://prez.dev/endpoint/ogcrecords/concept-object", +} + + +@router.get( + "/catalogs", + summary="List Catalogs", + name=ogc_endpoints["catalog-listing"], +) +async def catalog_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), +): + endpoint_uri = URIRef(ogc_endpoints["catalog-listing"]) + return await listing_function_new(request, repo, endpoint_uri, page, per_page) + + +@router.get( + "/catalogs/{catalogId}/collections", + summary="List Vocabularies", + name=ogc_endpoints["vocab-listing"], +) +async def vocab_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), +): + endpoint_uri = URIRef(ogc_endpoints["vocab-listing"]) + return await listing_function_new(request, repo, endpoint_uri, page, per_page) + + +@router.get( + "/catalogs/{catalogId}/collections/{collectionId}/items", + summary="List Concepts", + name=ogc_endpoints["concept-listing"], +) +async def vocab_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), +): + endpoint_uri = URIRef(ogc_endpoints["concept-listing"]) + return await listing_function_new(request, repo, endpoint_uri, page, per_page) + + +@router.get( + "/catalogs/{catalogId}", + summary="Catalog Object", + name=ogc_endpoints["catalog-object"], +) +async def catalog_object( + request: Request, + repo: Repo = Depends(get_repo), +): + request_url = request.scope["path"] + endpoint_uri = URIRef(ogc_endpoints["catalog-object"]) + object_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + return await object_function_new( + request, endpoint_uri, object_uri, request_url, repo + ) + + +@router.get( + "/catalogs/{catalogId}/collections/{collectionId}", + summary="Vocab Object", + name=ogc_endpoints["vocab-object"], +) +async def catalog_object( + request: Request, + repo: Repo = Depends(get_repo), +): + request_url = request.scope["path"] + endpoint_uri = URIRef(ogc_endpoints["vocab-object"]) + object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + return await object_function_new( + request, endpoint_uri, object_uri, request_url, repo + ) + + +@router.get( + "/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", + summary="Concept Object", + name=ogc_endpoints["concept-object"], +) +async def catalog_object( + request: Request, + repo: Repo = Depends(get_repo), +): + request_url = request.scope["path"] + endpoint_uri = URIRef(ogc_endpoints["concept-object"]) + object_uri = get_uri_for_curie_id(request.path_params["itemId"]) + return await object_function_new( + request, endpoint_uri, request_url, repo, object_uri + ) diff --git a/prez/routers/spaceprez.py b/prez/routers/spaceprez.py index ce20ae60..9002ab6a 100644 --- a/prez/routers/spaceprez.py +++ b/prez/routers/spaceprez.py @@ -11,6 +11,8 @@ router = APIRouter(tags=["SpacePrez"]) +SP_EP = Namespace("https://prez.dev/endpoint/spaceprez/") + @router.get("/s", summary="SpacePrez Home") async def spaceprez_profiles(): @@ -20,7 +22,7 @@ async def spaceprez_profiles(): @router.get( "/s/datasets", summary="List Datasets", - name="https://prez.dev/endpoint/spaceprez/dataset-listing", + name=SP_EP["dataset-listing"], ) async def list_datasets( request: Request, @@ -36,7 +38,7 @@ async def list_datasets( @router.get( "/s/datasets/{dataset_curie}/collections", summary="List Feature Collections", - name="https://prez.dev/endpoint/spaceprez/feature-collection-listing", + name=SP_EP["feature-collection-listing"], ) async def list_feature_collections( request: Request, @@ -45,6 +47,7 @@ async def list_feature_collections( page: Optional[int] = 1, per_page: Optional[int] = 20, ): + endpoint_uri = SP_EP["feature-collection-listing"] dataset_uri = get_uri_for_curie_id(dataset_curie) return await listing_function( request=request, @@ -58,7 +61,7 @@ async def list_feature_collections( @router.get( "/s/datasets/{dataset_curie}/collections/{collection_curie}/items", summary="List Features", - name="https://prez.dev/endpoint/spaceprez/feature-listing", + name=SP_EP["feature-listing"], ) async def list_features( request: Request, @@ -81,7 +84,7 @@ async def list_features( @router.get( "/s/datasets/{dataset_curie}", summary="Get Dataset", - name="https://prez.dev/endpoint/spaceprez/dataset", + name=SP_EP["dataset-object"] ) async def dataset_item( request: Request, @@ -94,7 +97,7 @@ async def dataset_item( @router.get( "/s/datasets/{dataset_curie}/collections/{collection_curie}", summary="Get Feature Collection", - name="https://prez.dev/endpoint/spaceprez/feature-collection", + name=SP_EP["feature-collection-object"], ) async def feature_collection_item( request: Request, diff --git a/prez/routers/vocprez.py b/prez/routers/vocprez.py index a2e4fbe2..09c302fa 100644 --- a/prez/routers/vocprez.py +++ b/prez/routers/vocprez.py @@ -2,8 +2,8 @@ from fastapi import APIRouter, Request from fastapi import Depends -from fastapi.responses import RedirectResponse -from rdflib import URIRef, SKOS +from fastapi import Depends +from rdflib import URIRef from starlette.responses import PlainTextResponse from prez.bnode import get_bnode_depth @@ -30,12 +30,44 @@ log = logging.getLogger(__name__) +vp_endpoints = { + "vocabs-listing": "https://prez.dev/endpoint/vocprez/vocabs-listing", + "collection-listing": "https://prez.dev/endpoint/vocprez/collection-listing", + "vocab-object": "https://prez.dev/endpoint/vocprez/vocab-object", + "collection-object": "https://prez.dev/endpoint/vocprez/collection-object", + "vocab-concept": "https://prez.dev/endpoint/vocprez/vocab-concept", + "collection-concept": "https://prez.dev/endpoint/vocprez/collection-concept", + "cs-children": "https://prez.dev/endpoint/vocprez/cs-children", + "cs-top-concepts": "https://prez.dev/endpoint/vocprez/cs-top-concepts", +} + @router.get("/v", summary="VocPrez Home") async def vocprez_home(): return PlainTextResponse("VocPrez Home") +@router.get( + "/v/vocab", + summary="List Vocabularies", + name=vp_endpoints["vocabs-listing"], +) +async def vocab_endpoint( + request: Request, + repo: Repo = Depends(get_repo), + page: int = 1, + per_page: int = 20, +): + endpoint_uri = URIRef(vp_endpoints["vocabs-listing"]) + return await listing_function_new( + request=request, + repo=repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + ) + + @router.get( "/v/vocab", summary="List Vocabularies", @@ -55,7 +87,7 @@ async def vocab_endpoint( @router.get( "/v/collection", summary="List Collections", - name="https://prez.dev/endpoint/vocprez/collection-listing", + name=vp_endpoints["collection-listing"], ) async def collection_endpoint( request: Request, @@ -86,7 +118,7 @@ async def vocprez_scheme( @router.get( "/v/vocab/{concept_scheme_curie}", summary="Get a SKOS Concept Scheme", - name="https://prez.dev/endpoint/vocprez/collection", + name=vp_endpoints["vocab-object"], response_class=StreamingTurtleAnnotatedResponse, responses={ 200: { @@ -133,6 +165,7 @@ async def concept_scheme_route( @router.get( "/v/vocab/{concept_scheme_curie}/top-concepts", + name=vp_endpoints["cs-top-concepts"], summary="Get a SKOS Concept Scheme's top concepts", response_class=StreamingTurtleAnnotatedResponse, responses={ @@ -141,11 +174,11 @@ async def concept_scheme_route( }, }, ) -async def concept_scheme_top_concepts_route( - request: Request, - concept_scheme_curie: str, - page: int = 1, - per_page: int = 20, +async def cs_top_concepts_endpoint( + request: Request, + repo: Repo = Depends(get_repo), + page: int = 1, + per_page: int = 20, repo: Repo = Depends(get_repo), ): """Get a SKOS Concept Scheme's top concepts. @@ -179,6 +212,7 @@ async def concept_scheme_top_concepts_route( @router.get( "/v/vocab/{concept_scheme_curie}/{concept_curie}/narrowers", + name=vp_endpoints["cs-children"], summary="Get a SKOS Concept's narrower concepts", response_class=StreamingTurtleAnnotatedResponse, responses={ @@ -222,7 +256,7 @@ async def concept_narrowers_route( @router.get( "/v/vocab/{concept_scheme_curie}/{concept_curie}", summary="Get a SKOS Concept", - name="https://prez.dev/endpoint/vocprez/vocab-concept", + name=vp_endpoints["vocab-concept"], response_class=StreamingTurtleAnnotatedResponse, responses={ 200: { @@ -243,7 +277,7 @@ async def concept_route( @router.get( "/v/collection/{collection_curie}", summary="Get Collection", - name="https://prez.dev/endpoint/vocprez/collection", + name=vp_endpoints["collection-object"], ) async def vocprez_collection( request: Request, @@ -256,7 +290,7 @@ async def vocprez_collection( @router.get( "/v/collection/{collection_curie}/{concept_curie}", summary="Get Concept", - name="https://prez.dev/endpoint/vocprez/collection-concept", + name=vp_endpoints["collection-concept"], ) async def vocprez_collection_concept( request: Request, diff --git a/prez/services/app_service.py b/prez/services/app_service.py index f4e9e6dc..ac82722a 100644 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -138,7 +138,9 @@ async def add_prefixes_to_prefix_graph(repo: Repo): async def create_endpoints_graph(repo) -> Graph: flavours = ["CatPrez", "SpacePrez", "VocPrez"] added_anything = False - for f in (Path(__file__).parent.parent / "reference_data/endpoints").glob("*.ttl"): + for f in (Path(__file__).parent.parent / "reference_data/new_endpoints").glob( + "*.ttl" + ): # Check if file starts with any of the flavour prefixes matching_flavour = next( (flavour for flavour in flavours if f.name.startswith(flavour.lower())), diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py index 3d17b0cf..59e01090 100644 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -4,7 +4,7 @@ from rdflib import Graph, URIRef, RDF, PROF, Literal -from prez.cache import profiles_graph_cache +from prez.cache import profiles_graph_cache, prefix_graph from prez.config import settings from prez.models.model_exceptions import NoProfilesException from prez.reference_data.prez_ns import PREZ @@ -85,7 +85,9 @@ def get_profiles_and_mediatypes( query = select_profile_mediatype( classes, requested_profile, requested_profile_token, requested_mediatype ) + log.debug(f"ConnegP query: {query}") response = profiles_graph_cache.query(query) + log.debug(f"ConnegP response:{results_pretty_printer(response)}") if len(response.bindings[0]) == 0: raise NoProfilesException(classes) top_result = response.bindings[0] @@ -100,6 +102,53 @@ def get_profiles_and_mediatypes( return profile, mediatype, selected_class, profile_headers, avail_profile_uris +def results_pretty_printer(response): + # Calculate max width for each column, including the new "#" column + max_widths = [ + len(str(len(response.bindings))) + ] # length of the highest row number as a string + for header in response.vars: + max_width = max( + len(header.n3(prefix_graph.namespace_manager)), + max( + len( + row[header].n3(prefix_graph.namespace_manager) + if row[header] + else "" + ) + for row in response.bindings + ), + ) + max_widths.append(max_width) + + # Header row + header_row = "\n" + " | ".join( + ["#".ljust(max_widths[0])] + + [ + str(header).ljust(max_widths[i + 1]) + for i, header in enumerate(response.vars) + ] + ) + pp_string = header_row + "\n" + pp_string += ("-" * len(header_row)) + "\n" # Divider + + # Data rows + row_number = 1 + for row in response.bindings: + row_data = [str(row_number).ljust(max_widths[0])] + row_data += [ + ( + row[header].n3(prefix_graph.namespace_manager) if row[header] else "" + ).ljust(max_widths[i + 1]) + for i, header in enumerate(response.vars) + ] + formatted_row = " | ".join(row_data) + pp_string += formatted_row + "\n" + row_number += 1 + + return pp_string + + def generate_profiles_headers(selected_class, response, profile, mediatype): headers = { "Access-Control-Allow-Origin": "*", diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index 349b6713..3d327608 100644 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -1,17 +1,45 @@ +import logging from string import Template from typing import FrozenSet -from rdflib import Graph, Literal, URIRef, DCTERMS +from rdflib import Graph, Literal, URIRef, DCTERMS, BNode from prez.cache import endpoints_graph_cache, links_ids_graph_cache from prez.reference_data.prez_ns import PREZ from prez.services.curie_functions import get_curie_id_for_uri +from prez.services.generate_profiles import results_pretty_printer from prez.services.model_methods import get_classes from prez.sparql.methods import Repo from prez.sparql.objects_listings import ( get_endpoint_template_queries, generate_relationship_query, ) +from temp.shacl2sparql import ONT + +log = logging.getLogger(__name__) + + +async def _add_prez_link_to_collection_page( + item_graph: Graph, item_uri: URIRef, request_url: str, endpoint_uri: URIRef +): + """ + 1. get the request's URL; this will be the URL of the current object page + 2. look up the endpoint that hasParentEndpoint the object endpoint in the endpoints graph cache + 3. take the fragment (suffix) of the endpoint template for the child endpoint identified in step 2 + 4. append the fragment to the URL from step 1 + """ + child_endpoint = endpoints_graph_cache.value( + predicate=ONT.parentEndpoint, object=endpoint_uri + ) + child_endpoint_template = endpoints_graph_cache.value( + subject=child_endpoint, predicate=ONT.endpointTemplate + ) + if child_endpoint_template: + last_part_of_url = child_endpoint_template.split("/")[-1] + collections_url = f"{request_url}/{last_part_of_url}" + bnode = BNode() + item_graph.add((item_uri, PREZ.members, bnode)) + item_graph.add((bnode, PREZ.link, Literal(collections_url))) async def _add_prez_links(graph: Graph, repo): diff --git a/prez/services/listings.py b/prez/services/listings.py index 5b993d21..9a6fb05a 100644 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -1,16 +1,23 @@ +from typing import Optional + from fastapi import Request +from rdflib import SH from rdflib import URIRef, PROF -from prez.cache import profiles_graph_cache +from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.models.listing import ListingModel from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo +from prez.reference_data.prez_ns import ONT from prez.renderers.renderer import return_from_graph, return_profiles from prez.services.link_generation import _add_prez_links from prez.sparql.methods import Repo from prez.sparql.objects_listings import ( generate_listing_construct, - generate_listing_count_construct, + temp_listing_count, ) +from temp.cql2sparql import CQLParser +from temp.grammar import SubSelect +from temp.shacl2sparql import SHACLParser async def listing_function( @@ -50,7 +57,7 @@ async def listing_function( per_page=per_page, ordering_predicate=ordering_predicate, ) - count_query = generate_listing_count_construct(listing_item, endpoint_uri) + count_query = temp_listing_count(listing_item, endpoint_uri) if listing_item.selected_class in [ URIRef("https://prez.dev/ProfilesList"), PROF.Profile, @@ -70,3 +77,98 @@ async def listing_function( prof_and_mt_info.selected_class, repo, ) + + +async def listing_function_new( + request: Request, + repo: Repo, + endpoint_uri: URIRef, + page: int = 1, + per_page: int = 20, + parent_uri: Optional[URIRef] = None, + cql: dict = None, +): + # class is from endpoint definition. + listing_class = endpoints_graph_cache.value(endpoint_uri, ONT.deliversClasses) + target_class = endpoints_graph_cache.value(endpoint_uri, SH.targetClass) + + prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=[listing_class]) + selected_class, selected_profile = ( + prof_and_mt_info.selected_class, + prof_and_mt_info.profile, + ) + + if prof_and_mt_info.profile == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): + return await return_profiles( + classes=frozenset(selected_class), prof_and_mt_info=prof_and_mt_info + ) + runtime_values = {"limit": per_page, "offset": (page - 1) * per_page, "parent_1": parent_uri} + shacl_parser = SHACLParser( + runtime_values, + endpoints_graph_cache, + profiles_graph_cache, + endpoint_uri, + selected_profile, + ) + + if cql: + cql_parser = CQLParser(cql_json=cql) + cql_parser.parse() + cql_select_ggps = cql_parser.ggps_inner_select + shacl_parser.additional_ggps = cql_select_ggps + + shacl_parser.generate_sparql() + query_str = shacl_parser.sparql + + # pull the subselect out of the query string + subselect = find_instances(shacl_parser.main_where_ggps, SubSelect)[0] # assume there's only one subselect + subselect.solution_modifier = None # remove the limit and offset from the subselect so that we can get a count + count_query = temp_listing_count(subselect, target_class) + + # if selected_class in [ + # URIRef("https://prez.dev/ProfilesList"), + # PROF.Profile, + # ]: + # list_graph = profiles_graph_cache.query(item_members_query).graph + # count_graph = profiles_graph_cache.query(count_query).graph + # item_graph = list_graph + count_graph + # else: + item_graph, _ = await repo.send_queries( + rdf_queries=[count_query, query_str], + tabular_queries=[], + ) + if "anot+" in prof_and_mt_info.mediatype: + await _add_prez_links(item_graph, repo) + return await return_from_graph( + item_graph, + prof_and_mt_info.mediatype, + selected_profile, + prof_and_mt_info.profile_headers, + prof_and_mt_info.selected_class, + repo, + ) + + +def find_instances(obj, cls): + found = [] + + # Check if the object itself is an instance of the class + if isinstance(obj, cls): + found.append(obj) + + # If the object is iterable, iterate and search recursively + elif isinstance(obj, dict): + for key, value in obj.items(): + found.extend(find_instances(value, cls)) + elif hasattr(obj, '__iter__') and not isinstance(obj, str): + for item in obj: + found.extend(find_instances(item, cls)) + + # If the object has attributes, search recursively in each + elif hasattr(obj, '__dict__'): + for key, value in obj.__dict__.items(): + found.extend(find_instances(value, cls)) + + return found diff --git a/prez/services/model_methods.py b/prez/services/model_methods.py index 6accd67f..b3965f5d 100644 --- a/prez/services/model_methods.py +++ b/prez/services/model_methods.py @@ -3,6 +3,8 @@ from prez.cache import endpoints_graph_cache from prez.sparql.methods import Repo +from prez.cache import endpoints_graph_cache +from prez.sparql.methods import Repo async def get_classes( uri: URIRef, repo: Repo, endpoint: URIRef = None diff --git a/prez/services/objects.py b/prez/services/objects.py index 9f975452..1dba764c 100644 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -1,28 +1,33 @@ +import logging from typing import Optional -from fastapi import Depends -from fastapi import Request, HTTPException +from fastapi import HTTPException +from fastapi import Request from rdflib import URIRef -from prez.cache import profiles_graph_cache -from prez.config import settings -from prez.dependencies import get_repo +from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.models.object_item import ObjectItem from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo -from prez.reference_data.prez_ns import PREZ +from prez.reference_data.prez_ns import PREZ, EP from prez.renderers.renderer import return_from_graph, return_profiles from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.model_methods import get_classes -from prez.services.link_generation import _add_prez_links -from prez.sparql.objects_listings import ( - generate_item_construct, - generate_listing_construct, +from prez.services.link_generation import ( + _add_prez_links, + _add_prez_link_to_collection_page, ) +from prez.services.model_methods import get_classes +from prez.sparql.methods import Repo +from prez.sparql.objects_listings import generate_item_construct +from prez.sparql.objects_listings import generate_listing_construct +from temp.shacl2sparql import SHACLParser + + +log = logging.getLogger(__name__) async def object_function( request: Request, - repo=Depends(get_repo), + repo: Repo, object_curie: Optional[str] = None, ): endpoint_uri = URIRef(request.scope["route"].name) @@ -39,7 +44,7 @@ async def object_function( else: raise HTTPException( status_code=400, - detail="The 'object_curie' is required for non-object endpoints", + detail="The 'uri' is required for non-object endpoints", ) klasses = await get_classes(uri=uri, repo=repo, endpoint=endpoint_uri) @@ -90,3 +95,66 @@ async def object_function( prof_and_mt_info.selected_class, repo, ) + + +async def object_function_new( + request: Request, + endpoint_uri: URIRef, + uri: URIRef, + request_url: str, + repo=Repo, +): + klasses = await get_classes(uri=uri, repo=repo, endpoint=endpoint_uri) + # ConnegP + prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=klasses) + # if we're on the object endpoint and a profile hasn't been requested, use the open profile + if (endpoint_uri == EP.object) and not ( + prof_and_mt_info.req_profiles or prof_and_mt_info.req_profiles_token + ): + prof_and_mt_info.selected_class = None + prof_and_mt_info.profile = PREZ["profile/open"] + # create the object with all required info + object_item = ObjectItem( # object item now does not need request + uri=uri, + classes=klasses, + profile=prof_and_mt_info.profile, + selected_class=prof_and_mt_info.selected_class, + ) + if prof_and_mt_info.profile == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): + return await return_profiles( + classes=frozenset(object_item.selected_class), + prof_and_mt_info=prof_and_mt_info, + repo=repo, + ) + runtime_values = {"object": uri} + shacl_parser = SHACLParser( + runtime_values, + endpoints_graph_cache, + profiles_graph_cache, + endpoint_uri, + prof_and_mt_info.profile, + ) + shacl_parser.generate_sparql() + query = shacl_parser.sparql + log.debug(f"Object Query: {query}") + + if object_item.selected_class == URIRef("http://www.w3.org/ns/dx/prof/Profile"): + item_graph = profiles_graph_cache.query(query).graph + else: + item_graph, _ = await repo.send_queries([query], []) + if "anot+" in prof_and_mt_info.mediatype: + if not endpoint_uri == EP.object: + await _add_prez_link_to_collection_page( + item_graph, uri, request_url, endpoint_uri + ) + await _add_prez_links(item_graph, repo) + return await return_from_graph( + item_graph, + prof_and_mt_info.mediatype, + object_item.profile, + prof_and_mt_info.profile_headers, + prof_and_mt_info.selected_class, + repo, + ) diff --git a/prez/sparql/methods.py b/prez/sparql/methods.py index ab88ad5b..f12611d5 100644 --- a/prez/sparql/methods.py +++ b/prez/sparql/methods.py @@ -51,6 +51,9 @@ async def send_queries( def sparql(self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET"): pass + @abstractmethod + async def tabular_query_to_table(self, query: str, context: URIRef = None): + pass class RemoteSparqlRepo(Repo): def __init__(self, async_client: httpx.AsyncClient): diff --git a/prez/sparql/objects_listings.py b/prez/sparql/objects_listings.py index 8594223a..5b3c365c 100644 --- a/prez/sparql/objects_listings.py +++ b/prez/sparql/objects_listings.py @@ -285,6 +285,12 @@ def generate_exclude_predicates(exclude_predicates): return "" +def generate_exclude_predicates(exclude_predicates): + if exclude_predicates: + return f"""FILTER(?p NOT IN ({chr(10).join([f"<{p}>" for p in exclude_predicates])}))""" + return "" + + def generate_inverse_predicates(inverse_predicates): """ Generates a SPARQL VALUES clause for a list of inverse predicates, of the form: @@ -489,8 +495,12 @@ def get_annotations_from_tbox_cache( } # get all the annotations we can from the cache all = list(chain(*props_from_cache.values())) + default_language = settings.default_language for triple in all: - labels_from_cache.add(triple) + if triple[2].language == default_language: + labels_from_cache.add(triple) + elif triple[2].language is None: + labels_from_cache.add(triple) # the remaining terms are not in the cache; we need to query the SPARQL endpoint to attempt to get them uncached_props = { k: list(set(terms) - set(triple[0] for triple in v)) @@ -552,6 +562,20 @@ def generate_listing_count_construct(item: ListingModel, endpoint_uri: str): return query +def temp_listing_count(subquery: SubSelect, klass): + """ + TODO: Implement COUNT and other expressions in SPARQL grammar. + """ + return f""" + PREFIX prez: <{PREZ}> + CONSTRUCT {{ + {klass.n3()} prez:count ?count + }} + WHERE {{ + SELECT (COUNT(?focus_node) as ?count) {{ {subquery} }} + }}""" + + def get_relevant_shape_bns_for_profile(selected_class, profile): """ Gets the shape blank nodes URIs from the profiles graph for a given profile. @@ -881,13 +905,12 @@ def generate_relationship_query( uri_str = f"<{uri}>" for i, relation in enumerate(relations): predicate, direction = relation - parent = "?parent_" + str(i + 1) if predicate: if direction == URIRef("https://prez.dev/ont/ParentToFocusRelation"): subquery += f"{parent} <{predicate}> {uri_str} .\n" else: # assuming the direction is "focus_to_parent" subquery += f"{uri_str} <{predicate}> {parent} .\n" - uri_str = parent + uri_str = parent subquery += "}}" subqueries.append(subquery) diff --git a/pyproject.toml b/pyproject.toml index 91363a86..b00e1881 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,11 +9,11 @@ python = "^3.11" uvicorn = "^0.21.1" httpx = "*" rdflib = "^6.3.1" -connegp = { file = "connegp-0.1.5-py3-none-any.whl" } +connegp = { file = "connegp-0.1.6-py3-none-any.whl" } async-lru = "^1.0.3" geojson-rewind = "^1.0.3" toml = "^0.10.2" -fastapi = "^0.95.0" +fastapi = "^0.104.0" python-multipart = "^0.0.6" jinja2 = "^3.1.2" oxrdflib = "^0.3.6" diff --git a/temp/cql2sparql.py b/temp/cql2sparql.py new file mode 100644 index 00000000..78a923e2 --- /dev/null +++ b/temp/cql2sparql.py @@ -0,0 +1,275 @@ +from typing import Generator + +from pyld import jsonld +from rdflib import URIRef, Namespace, Variable, Literal +from rdflib.namespace import GEO + +from temp.grammar import ( + GroupOrUnionGraphPattern, + GroupGraphPatternSub, + TriplesBlock, + SimplifiedTriple, + GroupGraphPattern, + GraphPatternNotTriples, + Filter, + InlineDataOneVar, + InlineData, + DataBlock, + WhereClause, + ConstructTemplate, + SolutionModifier, + ConstructQuery, + ConstructTriples, +) +from temp.cql_sparql_reference import cql_sparql_spatial_mapping, cql_to_shapely_mapping + +CQL = Namespace("http://www.opengis.net/doc/IS/cql2/1.0/") + + +class CQLParser: + def __init__(self, cql=None, context: dict = None, cql_json: dict = None): + self.ggps_inner_select = None + self.cql = cql + self.context = context + self.cql_json = cql_json + self.var_counter = 0 + self.query_object = None + self.query_str = None + # self.prefixes = self.extract_prefixes(self.context) + + def generate_jsonld(self): + combined = {"@context": self.context, **self.cql} + self.cql_json = jsonld.expand(combined, options={"base": "h"})[0] + + def extract_prefixes(self, prefix_dict: dict) -> dict: + """ + Extracts prefixes and their URIs from the dictionary and formats them for SPARQL queries. + + :param prefix_dict: Dictionary containing prefixes and their URIs. + :return: Dictionary containing PREFIX statements for SPARQL queries. + """ + sparql_prefixes = {} + + # Filtering out keys that don't correspond to prefixes or are special keys + special_keys = ["args", "op", "property", "@version"] + for prefix, entry in prefix_dict.items(): + if prefix not in special_keys and isinstance(entry, str): + sparql_prefixes[prefix] = URIRef(entry) + return sparql_prefixes + + def parse(self): + root = self.cql_json + self.ggps_inner_select = next(self.parse_logical_operators(root)) + where = WhereClause( + group_graph_pattern=GroupGraphPattern(content=self.ggps_inner_select) + ) + construct_template = ConstructTemplate( + construct_triples=ConstructTriples(triples=where.collect_triples()) + ) + solution_modifier = SolutionModifier() + self.query_object = ConstructQuery( + construct_template=construct_template, + where_clause=where, + solution_modifier=solution_modifier, + ) + self.query_str = "".join(part for part in self.query_object.render()) + + def parse_logical_operators( + self, element, existing_ggps=None + ) -> Generator[GroupGraphPatternSub, None, None]: + operator = element.get(str(CQL.operator))[0].get("@value") + args = element.get(str(CQL.args)) + + ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() + + if operator == "and": + and_components = [] + for arg in args: + # Process each argument and update the same ggps without yielding + list(self.parse_logical_operators(arg, ggps)) + # If a new ggps was created (not passed from outside), yield it + if existing_ggps is None: + yield ggps + + elif operator == "or": + # Collect components and then yield a GroupOrUnionGraphPattern + # ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() + or_components = [] + for arg in args: + # If the result is not a GroupGraphPatternSub, wrap it. + component = next(self.parse_logical_operators(arg), None) + if isinstance(component, GroupGraphPatternSub): + component = GroupGraphPattern(content=component) + or_components.append(component) + + gougp = GroupOrUnionGraphPattern(group_graph_patterns=or_components) + gpnt = GraphPatternNotTriples(content=gougp) + if ggps.graph_patterns_or_triples_blocks: + ggps.graph_patterns_or_triples_blocks.append(gpnt) + else: + ggps.graph_patterns_or_triples_blocks = [gpnt] + if existing_ggps is None: + yield ggps + + if operator in ["<", "=", ">", "<=", ">="]: + yield from self._handle_comparison(operator, args, ggps) + elif operator == "like": + yield from self._handle_like(args, ggps) + elif operator in cql_sparql_spatial_mapping: + yield from self._handle_spatial(operator, args, ggps) + elif operator == "in": + yield from self._handle_in(args, ggps) + else: + raise NotImplementedError(f"Operator {operator} not implemented.") + + def _add_triple(self, ggps, subject, predicate, object): + simple_triple = SimplifiedTriple( + subject=subject, predicate=predicate, object=object + ) + if ggps.triples_block: + ggps.triples_block.triples.append(simple_triple) + else: + ggps.triples_block = TriplesBlock(triples=[simple_triple]) + + def _append_graph_pattern(self, ggps, graph_pattern): + if ggps.graph_patterns_or_triples_blocks: + ggps.graph_patterns_or_triples_blocks.append(graph_pattern) + else: + ggps.graph_patterns_or_triples_blocks = [graph_pattern] + + def _handle_comparison(self, operator, args, existing_ggps=None): + self.var_counter += 1 + ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() + + prop = args[0].get(str(CQL.property))[0].get("@id") + inverse = False # for inverse properties + if prop.startswith("^"): + prop = prop[1:] + inverse = True + value = args[1].get("@value") + subject = Variable("focus_node") + predicate = URIRef(prop) + + object = Variable(f"var_{self.var_counter}") + if operator == "=": + inline_data_one_var = InlineDataOneVar( + variable=object, values=[Literal(value)] + ) + gpnt = GraphPatternNotTriples( + content=InlineData(data_block=DataBlock(block=inline_data_one_var)) + ) + self._append_graph_pattern(ggps, gpnt) + else: + filter_clause = Filter( + variable=object, expression=operator, value=Literal(value) + ) + self._append_graph_pattern(ggps, filter_clause) + + if inverse: + self._add_triple(ggps, object, predicate, subject) + else: + self._add_triple(ggps, subject, predicate, object) + + yield ggps + + def _handle_like(self, args, existing_ggps=None): + self.var_counter += 1 + ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() + prop = args[0].get(str(CQL.property))[0].get("@id") + inverse = False + if prop.startswith("^"): + prop = prop[1:] + inverse = True + value = ( + args[1] + .get("@value") + .replace("%", ".*") + .replace("_", ".") + .replace("\\", "\\\\") + ) + + subject = Variable("focus_node") + predicate = URIRef(prop) + obj = Variable(f"var_{self.var_counter}") + if inverse: + self._add_triple(ggps, obj, predicate, subject) + else: + self._add_triple(ggps, subject, predicate, obj) + filter_clause = Filter(variable=obj, expression="regex", value=Literal(value)) + self._append_graph_pattern(ggps, filter_clause) + yield ggps + + def _handle_spatial(self, operator, args, existing_ggps=None): + self.var_counter += 1 + ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() + + coordinates_list = args[1].get("http://example.com/vocab/coordinates") + coordinates, geom_type = self._extract_spatial_info(coordinates_list, args) + + if coordinates: + wkt = cql_to_shapely_mapping[geom_type](coordinates).wkt + subject = Variable("focus_node") + geom_bn_var = Variable("geom_bnode") + geom_lit_var = Variable("geom_var") + self._add_triple(ggps, subject, GEO.hasGeometry, geom_bn_var) + self._add_triple(ggps, geom_bn_var, GEO.asWKT, geom_lit_var) + spatial_filter = Filter( + variable=geom_lit_var, + expression=cql_sparql_spatial_mapping[operator], + value=Literal(wkt), + ) + self._append_graph_pattern(ggps, spatial_filter) + + yield ggps + + def _handle_in(self, args, existing_ggps=None): + self.var_counter += 1 + ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() + + prop = args[0].get(str(CQL.property))[0].get("@id") + inverse = False + if prop.startswith("^"): + prop = prop[1:] + inverse = True + literal_values = [item["@value"] for item in args if "@value" in item] + uri_values = [item["@id"] for item in args if "@id" in item] + rdflib_literal_values = [Literal(value) for value in literal_values] + rdflib_uri_values = [URIRef(value) for value in uri_values] + all_values = rdflib_literal_values + rdflib_uri_values + subject = Variable("focus_node") + predicate = URIRef(prop) + object = Variable(f"var_{self.var_counter}") + if inverse: + self._add_triple(ggps, object, predicate, subject) + else: + self._add_triple(ggps, subject, predicate, object) + inline_data_one_var = InlineDataOneVar(variable=object, values=all_values) + gpnt = GraphPatternNotTriples( + content=InlineData(data_block=DataBlock(block=inline_data_one_var)) + ) + self._append_graph_pattern(ggps, gpnt) + + yield ggps + + def _extract_spatial_info(self, coordinates_list, args): + coordinates = [] + geom_type = None + if coordinates_list: + coordinates = [ + (coordinates_list[i]["@value"], coordinates_list[i + 1]["@value"]) + for i in range(0, len(coordinates_list), 2) + ] + geom_type = args[1]["http://www.opengis.net/ont/sf#type"][0]["@value"] + bbox_list = args[1].get("http://example.com/vocab/bbox") + if bbox_list: + geom_type = "Polygon" + bbox_values = [item["@value"] for item in bbox_list] + if len(bbox_values) == 4: + coordinates = [ + (bbox_values[0], bbox_values[1]), + (bbox_values[0], bbox_values[3]), + (bbox_values[2], bbox_values[3]), + (bbox_values[2], bbox_values[1]), + (bbox_values[0], bbox_values[1]), + ] + return coordinates, geom_type diff --git a/temp/cql_sparql_reference.py b/temp/cql_sparql_reference.py new file mode 100644 index 00000000..23c4be3e --- /dev/null +++ b/temp/cql_sparql_reference.py @@ -0,0 +1,31 @@ +from rdflib import Namespace +from shapely import ( + Polygon, + MultiPolygon, + Point, + MultiPoint, + LineString, + MultiLineString, +) + +GEOF = Namespace("http://www.opengis.net/def/function/geosparql/") + +cql_sparql_spatial_mapping = { + "s_intersects": GEOF.sfIntersects, + "s_within": GEOF.sfWithin, + "s_contains": GEOF.sfContains, + "s_disjoint": GEOF.sfDisjoint, + "s_equals": GEOF.sfEquals, + "s_overlaps": GEOF.sfOverlaps, + "s_touches": GEOF.sfTouches, + "s_crosses": GEOF.sfCrosses, +} + +cql_to_shapely_mapping = { + "Polygon": Polygon, + "MultiPolygon": MultiPolygon, + "Point": Point, + "MultiPoint": MultiPoint, + "LineString": LineString, + "MultiLineString": MultiLineString, +} diff --git a/temp/default_cql_context.json b/temp/default_cql_context.json new file mode 100644 index 00000000..4ca742ca --- /dev/null +++ b/temp/default_cql_context.json @@ -0,0 +1,24 @@ +{ + "@version": 1.1, + "@base": "http://example.com/", + "@vocab": "http://example.com/vocab/", + "cql": "http://www.opengis.net/doc/IS/cql2/1.0/", + "sf": "http://www.opengis.net/ont/sf#", + "geo": "http://www.opengis.net/ont/geosparql#", + "landsat": "http://example.com/landsat/", + "ro": "http://example.com/ro/", + "args": { + "@container": "@set", + "@id": "cql:args" + }, + "property": { + "@type": "@id", + "@id": "cql:property" + }, + "op": { + "@id": "cql:operator" + }, + "type": { + "@id": "sf:type" + } +} diff --git a/temp/grammar.py b/temp/grammar.py new file mode 100644 index 00000000..6d64be9d --- /dev/null +++ b/temp/grammar.py @@ -0,0 +1,542 @@ +from __future__ import annotations + +from typing import List, Union, Optional, Generator + +from pydantic import BaseModel, field_validator +from rdflib import URIRef, Variable, BNode, Literal +from rdflib.plugins.sparql import prepareQuery +from rdflib.plugins.sparql.algebra import translateAlgebra + +from temp.cql_sparql_reference import cql_sparql_spatial_mapping + + +class SPARQLGrammarBase(BaseModel): + indent_level: int = 0 + + class Config: + arbitrary_types_allowed = True + + def __str__(self): + return "".join(part for part in self.render()) + + def __repr__(self): + return f"{self.__class__.__name__}({self})" + + def render(self): + raise NotImplementedError("Subclasses must implement this method.") + + def collect_triples(self) -> List[SimplifiedTriple]: + """ + Recursively collect SimplifiedTriple instances from this object. + """ + triples = [] + + # Iterate through all attributes of the object + for attribute_name in self.model_fields: + attribute_value = getattr(self, attribute_name) + + # Check if the attribute is a SimplifiedTriple and collect it + if isinstance(attribute_value, SimplifiedTriple): + triples.append(attribute_value) + + # If the attribute is a list, iterate through it and collect SimplifiedTriples + elif isinstance(attribute_value, list): + for item in attribute_value: + if isinstance(item, SimplifiedTriple): + triples.append(item) + # If the item is an instance of BaseClass, recurse into it + elif isinstance(item, SPARQLGrammarBase): + triples.extend(item.collect_triples()) + + # If the attribute is an instance of BaseClass, recurse into it + elif isinstance(attribute_value, SPARQLGrammarBase): + triples.extend(attribute_value.collect_triples()) + + # deduplicate + triples = list(set(triples)) + return triples + + +class SimplifiedTriple(SPARQLGrammarBase): + """A simplified implmementation the triple pattern matches in the SPARQL grammar, to avoid implementing many classes + such as TriplesSameSubjectPath""" + + subject: Union[URIRef, Variable, BNode] + predicate: Union[URIRef, Variable] + object: Union[URIRef, Literal, Variable, BNode] + + def render(self) -> Generator[str, None, None]: + yield f"\t{self.subject.n3()} {self.predicate.n3()} {self.object.n3()} ." + + def __hash__(self): + return hash((self.subject, self.predicate, self.object)) + + +class TriplesBlock(SPARQLGrammarBase): + triples: List[SimplifiedTriple] = [] + + def render(self) -> Generator[str, None, None]: + for i, triple in enumerate(self.triples): + yield from triple.render() + if i < len(self.triples) - 1: # Check if it's not the last triple + yield "\n" + + +class InlineDataOneVar(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rInlineDataOneVar + InlineDataOneVar ::= Var '{' DataBlockValue* '}' + """ + + variable: Variable + values: List[Union[URIRef, Literal]] + + def render(self) -> Generator[str, None, None]: + yield f"{self.variable.n3()} {{ " + yield " ".join(value.n3() for value in self.values) + yield " }" + + +class InlineDataFull(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rInlineDataFull + ( NIL | '(' Var* ')' ) '{' ( '(' DataBlockValue* ')' | NIL )* '}' + """ + + variables: List[Variable] + values: List[List[Union[URIRef, Literal]]] + + def render(self) -> Generator[str, None, None]: + if self.vars: + yield "(" + yield " ".join(var.n3() for var in self.vars) + yield ") {" + else: + yield "{" + + if self.values_blocks is None: + yield "()" + else: + for values_block in self.values_blocks: + if values_block: + yield "(" + yield " ".join(value.n3() for value in values_block) + yield ")" + else: + yield "()" + yield "}" + + +class DataBlock(SPARQLGrammarBase): + block: Union[InlineDataOneVar, InlineDataFull] + + def render(self) -> Generator[str, None, None]: + yield from self.block.render() + + +class InlineData(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rInlineData + InlineData ::= 'VALUES' DataBlock + """ + + data_block: DataBlock + + def render(self) -> Generator[str, None, None]: + yield "\n\tVALUES " + yield from self.data_block.render() + + +class ValuesClause(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rValuesClause + ValuesClause ::= ( 'VALUES' DataBlock )? + """ + + data_block: Optional[DataBlock] + + def render(self) -> Generator[str, None, None]: + if self.data_block: + yield "\n\tVALUES " + yield from self.data_block.render() + + +class GraphPatternNotTriples(SPARQLGrammarBase): + """ + Partially implemented + https://www.w3.org/TR/sparql11-query/#rGraphPatternNotTriples + GraphPatternNotTriples ::= GroupOrUnionGraphPattern | OptionalGraphPattern | MinusGraphPattern | GraphGraphPattern | ServiceGraphPattern | Filter | Bind | InlineData + """ + + content: Union[GroupOrUnionGraphPattern, OptionalGraphPattern, Filter, InlineData] + + def render(self) -> Generator[str, None, None]: + yield from self.content.render() + + +class GroupGraphPatternSub(SPARQLGrammarBase): + """ + GraphPatternNotTriples partially implemented + https://www.w3.org/TR/sparql11-query/#rGroupGraphPatternSub + GroupGraphPatternSub ::= TriplesBlock? ( GraphPatternNotTriples '.'? TriplesBlock? )* + """ + + triples_block: Optional[TriplesBlock] = None + graph_patterns_or_triples_blocks: Optional[ + List[Union[GraphPatternNotTriples, TriplesBlock]] + ] = None + + def render(self) -> Generator[str, None, None]: + if self.triples_block: + yield from self.triples_block.render() + if self.graph_patterns_or_triples_blocks: + for item in self.graph_patterns_or_triples_blocks: + yield from item.render() + + def add_pattern(self, pattern): + if not isinstance(pattern, (TriplesBlock, GraphPatternNotTriples)): + raise TypeError( + "Pattern must be an instance of TriplesBlock or GraphPatternNotTriples." + ) + if self.graph_patterns_or_triples_blocks is None: + self.graph_patterns_or_triples_blocks = [] + self.graph_patterns_or_triples_blocks.append(pattern) + + def add_triple(self, triple): + if not isinstance(triple, SimplifiedTriple): + raise TypeError("Triple must be an instance of SimplifiedTriple.") + if self.triples_block is None: + self.triples_block = TriplesBlock() + # prevent duplicates + if triple not in self.triples_block.triples: + self.triples_block.triples.append(triple) + + +# TODO future implementation below simplifies things to a single list, needs to be tested: + +# class GroupGraphPatternSub(SPARQLGrammarBase): +# """ +# GroupGraphPatternSub ::= TriplesBlock? (GraphPatternNotTriples '.'? TriplesBlock?)* +# """ +# patterns: Optional[List[Union[TriplesBlock, GraphPatternNotTriples]]] = None +# +# def render(self) -> Generator[str, None, None]: +# for pattern in self.patterns: +# yield from pattern.render() +# +# def append_triples(self, triples: TriplesBlock): +# # If the last item in the list is a TriplesBlock, append the triples to it +# if not self.patterns: +# self.patterns = [] +# if self.patterns and isinstance(self.patterns[-1], TriplesBlock): +# self.patterns[-1].append(triples) +# else: +# # Otherwise, add a new TriplesBlock to the list +# self.patterns.append(triples) + + +class SelectClause(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rSelectClause + SelectClause ::= 'SELECT' ( 'DISTINCT' | 'REDUCED' )? ( ( Var | ( '(' Expression 'AS' Var ')' ) )+ | '*' ) + Simplified model excluding casting of variables (e.g. (?var AS ?alias)) + """ + + distinct: Optional[bool] = None + reduced: Optional[bool] = None + variables_or_all: Union[List[Variable], str] + + def render(self): + yield "SELECT" + if self.distinct: + yield " DISTINCT" + elif self.reduced: + yield " REDUCED" + if isinstance(self.variables_or_all, str): + yield " *" + else: + for var in self.variables_or_all: + yield f" {var.n3()}" + + +class SubSelect(SPARQLGrammarBase): + select_clause: SelectClause + where_clause: WhereClause + solution_modifier: Optional[SolutionModifier] = None + values_clause: Optional[ValuesClause] = None + + def render(self): + yield from self.select_clause.render() + yield from self.where_clause.render() + if self.solution_modifier: + yield from self.solution_modifier.render() + if self.values_clause: + yield from self.values_clause.render() + + +class SubSelectString(SubSelect): + """Inherits from the SubSelect class such that it can be used as a drop in replacement where a subselect is provided + as text, such as via sh:target / sh:select. NB by providing a subquery this way, the query cannot be validated. Use + of translateAlgebra will to some extent "validate" the query though, and will expand any prefixes known to RDFLib.""" + + select_clause: Optional[str] = None + where_clause: Optional[str] = None + select_string: str + + @field_validator("select_string") + def validate_and_transform_select_string(cls, v): + try: + return translateAlgebra(prepareQuery(v)) + except Exception as e: + # Handle exceptions from your translation function here + raise ValueError(f"Invalid Select Subquery: {e}") + + def render(self): + yield self.select_string + + +class GroupGraphPattern(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rGroupGraphPattern + GroupGraphPattern ::= '{' ( SubSelect | GroupGraphPatternSub ) '}' + """ + + content: Union[SubSelect, GroupGraphPatternSub] + + def render(self) -> Generator[str, None, None]: + yield "{\n" + yield from self.content.render() + yield "\n}" + + +class Filter(SPARQLGrammarBase): + variable: Variable + expression: Union[URIRef, str] + value: Optional[Union[Literal, List[Union[URIRef, Literal]]]] = None + + def render(self) -> Generator[str, None, None]: + if self.expression in ["<", ">", "<=", ">="]: + yield f"\n\tFILTER({self.variable.n3()}{self.expression}{self.value.n3()})" + elif self.expression == "regex": + yield f"\n\tFILTER regex({self.variable.n3()}, {self.value.n3()})" + elif self.expression in cql_sparql_spatial_mapping.values(): + yield f"\n\tFILTER({self.expression.n3()}({self.variable.n3()}, {self.value.n3()}))" + elif self.expression == "NOT IN": + yield f'\n\tFILTER({self.variable.n3()} NOT IN({", ".join([value.n3() for value in self.value])}))' + elif self.expression == "ISBLANK": + yield f"\n\tFILTER(ISBLANK({self.variable.n3()}))" + + +class Bind(SPARQLGrammarBase): + """ + An incorrect implemenation of BIND so as to avoid implementing a lot of the Grammar + This is a simplified implementation that at present ONLY caters to the following kind of bind + BIND({ triple pattern } AS ?var + Ideally the whole SPARQL Grammar is implemented as per spec and convenience functions are created for common use + cases + + Bind ::= 'BIND' '(' Expression 'AS' Var ')' + https://www.w3.org/TR/sparql11-query/#rBind + """ + + expression: str + triple: SimplifiedTriple + var: Variable + + def render(self): + yield f"\n\tBIND({self.expression}{{ {self.triple.render()} }} AS {self.var.n3()})" + + +class OptionalGraphPattern(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rOptionalGraphPattern + OptionalGraphPattern ::= 'OPTIONAL' GroupGraphPattern + """ + + group_graph_pattern: GroupGraphPattern + + def render(self) -> Generator[str, None, None]: + yield "\nOPTIONAL " + yield from self.group_graph_pattern.render() + + +class GroupOrUnionGraphPattern(SPARQLGrammarBase): + """ + For UNION statements + https://www.w3.org/TR/sparql11-query/#rGroupOrUnionGraphPattern + GroupOrUnionGraphPattern ::= GroupGraphPattern ( 'UNION' GroupGraphPattern )* + """ + + group_graph_patterns: List[GroupGraphPattern] + + def render(self) -> Generator[str, None, None]: + ggps_iter = iter(self.group_graph_patterns) + first_ggp = next(ggps_iter) + + yield "\n" + yield from first_ggp.render() + for ggp in ggps_iter: # UNION goes between 2:N group graph patterns + yield "\nUNION\n" + yield from ggp.render() + + +class LimitClause(SPARQLGrammarBase): + limit: int + + def render(self) -> Generator[str, None, None]: + yield f"LIMIT {self.limit}" + + +class OffsetClause(SPARQLGrammarBase): + offset: int + + def render(self) -> Generator[str, None, None]: + yield f"OFFSET {self.offset}" + + +class OrderCondition(SPARQLGrammarBase): + var: Variable + direction: Optional[str] = None + + def render(self): + if self.direction: + yield f"{self.direction}({self.var.n3()})" + else: + yield self.var.n3() + + +class OrderClause(SPARQLGrammarBase): + conditions: List[OrderCondition] + + def render(self): + yield "\nORDER BY " + yield " ".join( + part for condition in self.conditions for part in condition.render() + ) + + +class LimitOffsetClauses(SPARQLGrammarBase): + """ + Represents the LIMIT and OFFSET clauses in SPARQL queries. + According to the SPARQL grammar: + LimitOffsetClauses ::= LimitClause OffsetClause? | OffsetClause LimitClause? + """ + + limit_clause: Optional[LimitClause] = None + offset_clause: Optional[OffsetClause] = None + + def render(self) -> Generator[str, None, None]: + if self.limit_clause: + yield from self.limit_clause.render() + if self.offset_clause: + if self.limit_clause: + yield "\n" + yield from self.offset_clause.render() + + +class SolutionModifier(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rSolutionModifier + SolutionModifier ::= GroupClause? HavingClause? OrderClause? LimitOffsetClauses? + """ + + order_by: Optional[OrderClause] = None + limit_offset: Optional[LimitOffsetClauses] = None + + # having: Optional[HavingClause] + # group_by: Optional[GroupClause] + + def render(self) -> str: + if self.order_by: + yield from self.order_by.render() + if self.limit_offset: + if self.order_by: + yield "\n" + yield from self.limit_offset.render() + + +class ConstructTriples(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rConstructTriples + ConstructTriples ::= TriplesSameSubject ( '.' ConstructTriples? )? + + Simplified implementation that only accepts a list of SimplifiedTriples - avoids implementing the classes associated + with ; and , for TriplesSameSubject etc. in the SPARQL Grammar + """ + + triples: List[SimplifiedTriple] + + def render(self) -> Generator[str, None, None]: + for i, triple in enumerate(self.triples): + yield from triple.render() + if i < len(self.triples) - 1: # Check if it's not the last triple + yield "\n" + + +class ConstructTemplate(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rConstructTemplate + ConstructTemplate ::= '{' ConstructTriples? '}' + """ + + construct_triples: ConstructTriples + + def render(self) -> Generator[str, None, None]: + yield "{\n" + yield from self.construct_triples.render() + yield "\n}" + + +class WhereClause(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rWhereClause + WhereClause ::= 'WHERE'? GroupGraphPattern + """ + + group_graph_pattern: GroupGraphPattern + + def render(self) -> Generator[str, None, None]: + yield "\nWHERE " + yield from self.group_graph_pattern.render() + + +class ConstructQuery(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rConstructQuery + ConstructQuery ::= 'CONSTRUCT' ( ConstructTemplate DatasetClause* WhereClause SolutionModifier | DatasetClause* 'WHERE' '{' TriplesTemplate? '}' SolutionModifier ) + + Currently simplified to only accept a ConstructTemplate, WhereClause, and SolutionModifier. + """ + + construct_template: ConstructTemplate + where_clause: WhereClause + solution_modifier: SolutionModifier + + def render(self) -> Generator[str, None, None]: + yield "CONSTRUCT " + yield from self.construct_template.render() + yield from self.where_clause.render() + yield from self.solution_modifier.render() + + +# class DescriptionSPARQLQuery(SPARQLGrammarBase): +# # prolog: Prolog +# blocks: List[Union[SelectBlock, SPARQLComponent]] +# +# def render(self) -> Generator[str, None, None]: +# # yield from self.prolog.render() +# yield "\n\nCONSTRUCT {\n" +# for block in self.blocks: +# if isinstance(block, SelectBlock): +# yield "\t" + "\n\t".join(block.extract_triples()) +# else: +# yield from block.extract_triples() +# yield "\n}" +# # Join the parts produced by the generator into a string and then yield +# yield "\nWHERE {" +# for block in self.blocks: +# yield from block.render() +# yield "\n}" +# +# def render(self) -> str: +# return "".join(part for part in self.render()) diff --git a/temp/shacl2sparql.py b/temp/shacl2sparql.py new file mode 100644 index 00000000..f5f2f8e1 --- /dev/null +++ b/temp/shacl2sparql.py @@ -0,0 +1,522 @@ +import json +from pathlib import Path +from string import Template +from typing import Union, Optional + +from rdflib import URIRef, Variable, Namespace, Graph, SH, RDF, BNode, Literal +from rdflib.collection import Collection + +from temp.grammar import ( + TriplesBlock, + OptionalGraphPattern, + SolutionModifier, + GroupGraphPattern, + SimplifiedTriple, + SubSelect, + SubSelectString, + GroupOrUnionGraphPattern, + GroupGraphPatternSub, + GraphPatternNotTriples, + SelectClause, + WhereClause, + LimitClause, + OffsetClause, + LimitOffsetClauses, + InlineDataOneVar, + DataBlock, + InlineData, + ConstructTemplate, + ConstructTriples, + ConstructQuery, + Filter, +) + +ONT = Namespace("https://prez.dev/ont/") +ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") +SHEXT = Namespace("http://example.com/shacl-extension#") + + +class SHACLParser: + def __init__( + self, + runtime_values: dict, + endpoint_graph: Graph, + profile_graph: Graph, + endpoint_uri: Optional[URIRef] = None, + profile_uri: Optional[URIRef] = None, + additional_ggps: Optional[GroupGraphPatternSub] = None, + ): + self.runtime_values = runtime_values + self.endpoint_graph: Graph = endpoint_graph + self.profile_graph: Graph = profile_graph + self.endpoint_uri: Optional[URIRef] = endpoint_uri + self.profile_uri: Optional[URIRef] = profile_uri + self.additional_ggps: Optional[GroupGraphPatternSub] = additional_ggps + + self.focus_node: Union[URIRef, Variable] = Variable("focus_node") + + self.sparql = None + self.results = None + + self.construct_triples = None + self.main_where_ggps = GroupGraphPatternSub() + self.sub_select_ggps = None + self.optional_patterns = None + self.where_patterns = None + + self.default_limit = None + self.default_offset = None + + self.runtime_vals_expanded = None + self.merged_runtime_and_default_vals = None + self._expand_runtime_vars() + self._merge_runtime_and_default_vars() + + def _expand_runtime_vars(self): + self.runtime_vals_expanded = {} + for k, v in self.runtime_values.items(): + if k in ["limit", "offset"]: + self.runtime_vals_expanded[k] = v + elif v: + self.runtime_vals_expanded[k] = URIRef(v).n3() + + def _merge_runtime_and_default_vars(self): + default_args = {"limit": self.default_limit, "offset": self.default_offset} + self.merged_runtime_and_default_vals = default_args | self.runtime_vals_expanded + + def generate_sparql(self): + """ + Generates SPARQL query from SHACL profile_graph. + """ + self.parse_endpoint_definition() + self.parse_profile() + self._generate_query() + + def _generate_query(self): + where = WhereClause( + group_graph_pattern=GroupGraphPattern(content=self.main_where_ggps) + ) + if self.construct_triples: + self.construct_triples.extend(where.collect_triples()) + else: + self.construct_triples = where.collect_triples() + self.construct_triples = list(set(self.construct_triples)) + construct_template = ConstructTemplate( + construct_triples=ConstructTriples(triples=self.construct_triples) + ) + solution_modifier = SolutionModifier() + query = ConstructQuery( + construct_template=construct_template, + where_clause=where, + solution_modifier=solution_modifier, + ) + query_str = "".join(part for part in query.render()) + self.sparql = query_str + + def parse_endpoint_definition(self): + """ + Either set the focus_node to a URIRef, if a target node is provided, or generate a triple pattern to get list items + Generates triples for the endpoint definition with runtime values substituted. + """ + # sparql targets + target_bn = list( + self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.target) + ) + target_nodes = list( + self.endpoint_graph.objects( + subject=self.endpoint_uri, predicate=SH.targetNode + ) + ) + target_classes = list( + self.endpoint_graph.objects( + subject=self.endpoint_uri, predicate=SH.targetClass + ) + ) + rule_nodes = list( + self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.rule) + ) + + # objects - just set the focus node. + if target_nodes: + target_node_var = str(target_nodes[0]) + target_node_val = target_node_var[1:] + target_uri = URIRef(self.runtime_values[target_node_val]) + self.focus_node = target_uri + + # rule nodes - for CONSTRUCT TRIPLES patterns. + if rule_nodes: + for rule_node in rule_nodes: + self._create_construct_triples_from_sh_rules(rule_node) + + # if it's a listing endpoint, get limit and offset if available, otherwise use defaults. + endpoint_type = self.get_endpoint_type() + if endpoint_type == ONT.ListingEndpoint: + # default limit and offset + self._set_default_limit_and_offset() + self._merge_runtime_and_default_vars() + + # sh:target / sh:select + if target_bn: + ggp = self.create_select_subquery_from_template(target_bn) + self._add_target_class(target_classes[0]) + self._add_ggp_to_main_ggps(ggp) + + # don't use the target class if there's a sh:target / sh:select #TODO confirm why this caused issues - duplicate + # pattern matches in the subquery? + elif target_classes: + if ( + endpoint_type == ONT.ListingEndpoint + ): # ignore class for non listing at present + ggp = self.create_select_subquery_for_class_listing(target_classes) + self._add_ggp_to_main_ggps(ggp) + + def _add_ggp_to_main_ggps(self, ggp): + gorugp = GroupOrUnionGraphPattern(group_graph_patterns=[ggp]) + gpnt = GraphPatternNotTriples(content=gorugp) + self.main_where_ggps.add_pattern(gpnt) + + def _create_construct_triples_from_sh_rules(self, rule_node): + subject = self.endpoint_graph.value(subject=rule_node, predicate=SH.subject) + predicate = self.endpoint_graph.value(subject=rule_node, predicate=SH.predicate) + object = self.endpoint_graph.value(subject=rule_node, predicate=SH.object) + if subject == SH.this: + subject = self.focus_node + if isinstance(object, Literal): # assume it's a variable of the form ?xyz + object = Variable(str(object)[1:]) + triple = SimplifiedTriple(subject=subject, predicate=predicate, object=object) + if self.construct_triples: + self.construct_triples.append(triple) + else: + self.construct_triples = [triple] + + def create_select_subquery_for_class_listing(self, target_classes): + target_class_var = URIRef(target_classes[0]) + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple( + subject=self.focus_node, predicate=RDF.type, object=target_class_var + ) + ] + ) + if self.additional_ggps: # for example from cql + ggps = GroupGraphPatternSub( + # triples_block=triples_block, # triples block from SHACL profile + graph_patterns_or_triples_blocks=[ + GraphPatternNotTriples( + content=GroupOrUnionGraphPattern( + group_graph_patterns=[ + GroupGraphPattern(content=self.additional_ggps) + ] + ) + ) + ] + ) + else: + ggps = GroupGraphPatternSub(triples_block=triples_block) + ggp = GroupGraphPattern(content=ggps) + sub_select_where = WhereClause(group_graph_pattern=ggp) + select_clause = SelectClause(variables_or_all="*") + limit = self.merged_runtime_and_default_vals["limit"] + offset = self.merged_runtime_and_default_vals["offset"] + if limit is not None and offset is not None: # int = 0 is boolean False + limit_clause = LimitClause(limit=limit) + offset_clause = OffsetClause(offset=offset) + limit_offset_clauses = LimitOffsetClauses( + limit_clause=limit_clause, offset_clause=offset_clause + ) + solution_modifier = SolutionModifier(limit_offset=limit_offset_clauses) + else: + solution_modifier = SolutionModifier() + ss = SubSelect( + select_clause=select_clause, + where_clause=sub_select_where, + solution_modifier=solution_modifier, + ) + ggp = GroupGraphPattern(content=ss) + return ggp + + def create_select_subquery_from_template(self, target_bn): + select_statement = Template( + str(self.endpoint_graph.value(target_bn[0], SH.select, default=None)) + ) + # expand any prefixes etc. in case the prefixes are not defined in the query this subquery is being inserted + # into. NB SHACL does provide a mechanism to declare prefixes used in SPARQL targets - this has not been + # implemented + substituted_query = select_statement.substitute( + self.merged_runtime_and_default_vals + ) + sss = SubSelectString(select_string=substituted_query) + ggp = GroupGraphPattern(content=sss) + return ggp + + def _set_default_limit_and_offset(self): + default_limit = list( + self.endpoint_graph.objects( + subject=self.endpoint_uri, predicate=ONT["defaultLimit"] + ) + ) + default_offset = list( + self.endpoint_graph.objects( + subject=self.endpoint_uri, predicate=ONT["defaultOffset"] + ) + ) + if not default_limit or not default_offset: + raise ValueError( + "Listing endpoint must have both a default limit and a default offset" + ) + self.default_limit = int(default_limit[0]) + self.default_offset = int(default_offset[0]) + + def get_endpoint_type(self): + endpoint_type = list( + self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=RDF.type) + ) + if not endpoint_type: + raise ValueError( + 'Endpoint definition must have a type of either "https://prez.dev/ont/ListingEndpoint" ' + 'or "https://prez.dev/ont/ObjectEndpoint"' + ) + endpoint_type = endpoint_type[0] + return endpoint_type + + def parse_profile(self): + for i, property_node in enumerate( + self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) + ): + self._parse_property_shapes(property_node, i) + self._build_bnode_blocks() + + def _add_target_class(self, target_class): + triples = [ + SimplifiedTriple( + subject=self.focus_node, predicate=RDF.type, object=target_class + ) + ] + if self.construct_triples: + self.construct_triples.extend(triples) + else: + self.construct_triples = triples + + def _build_bnode_blocks(self): + bnode_depth = list(self.profile_graph.objects(subject=self.profile_uri, predicate=SHEXT["bnode-depth"])) + if not bnode_depth or bnode_depth == [0]: + return + else: + bnode_depth = int(bnode_depth[0]) + p1 = Variable(f"?bn_p_1") + o1 = Variable(f"?bn_o_1") + p2 = Variable(f"?bn_p_2") + o2 = Variable(f"?bn_o_2") + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple(subject=self.focus_node, predicate=p1, object=o1), + SimplifiedTriple(subject=o1, predicate=p2, object=o2), + ] + ) + filter_block = Filter(variable=o1, expression="ISBLANK") + gpnt = GraphPatternNotTriples(content=filter_block) + ggps = GroupGraphPatternSub( + triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] + ) + ggp = GroupGraphPattern(content=ggps) + outer_opt = OptionalGraphPattern(group_graph_pattern=ggp) + container_gpnt = GraphPatternNotTriples(content=outer_opt) + container_ggps = GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[container_gpnt] + ) + container_ggp = GroupGraphPattern(content=container_ggps) + + def process_bn_level(depth, max_depth, outer_ggps): + old_o_var = Variable(f"?bn_o_{depth}") + new_p_var = Variable(f"?bn_p_{depth + 1}") + new_o_var = Variable(f"?bn_o_{depth + 1}") + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple( + subject=old_o_var, predicate=new_p_var, object=new_o_var + ) + ] + ) + filter_block = Filter(variable=old_o_var, expression="ISBLANK") + gpnt = GraphPatternNotTriples(content=filter_block) + ggps = GroupGraphPatternSub( + triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] + ) + ggp = GroupGraphPattern(content=ggps) + opt = OptionalGraphPattern(group_graph_pattern=ggp) + outer_ggps.graph_patterns_or_triples_blocks.append(opt) + if depth < max_depth: + process_bn_level(depth + 1, max_depth, ggps) + + if bnode_depth > 1: + process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) + self._add_ggp_to_main_ggps(container_ggp) + + def _parse_property_shapes(self, property_node, i): + def process_path_object(path_object): + # if path_object == SHEXT.allPredicateValues: + # predicates.append(Variable("preds")) + if isinstance(path_object, BNode): + predicate_objects_gen = self.profile_graph.predicate_objects( + subject=path_object + ) + bnode_pred, bnode_obj = next(predicate_objects_gen, (None, None)) + if bnode_obj == SH.union: + pass + elif bnode_pred == SH.inversePath: + inverse_preds.append(bnode_obj) + elif bnode_pred == SH.alternativePath: + predicates.extend(list(Collection(self.profile_graph, bnode_obj))) + else: # "regular" paths - no special predicate, just list members + predicates.append( + tuple(Collection(self.profile_graph, path_object)) + ) + else: # a plain path specification to restrict the predicate to a specific value + predicates.append(path_object) + + inverse_preds = [] + predicates = [] + union_items = None + path_object = self.profile_graph.value( + subject=property_node, predicate=SH.path, default=None + ) + if isinstance(path_object, BNode): + predicate_objects_gen = self.profile_graph.predicate_objects( + subject=path_object + ) + bnode_pred, bnode_obj = next(predicate_objects_gen, (None, None)) + if bnode_obj == SH.union: + union_list_bnode = list(Collection(self.profile_graph, path_object))[1] + union_items = list(Collection(self.profile_graph, union_list_bnode)) + + ggp_list = [] + if union_items: + for item in union_items: + process_path_object(item) + else: + process_path_object(path_object) + + if inverse_preds: + ggps_under_under_union = GroupGraphPatternSub() + ggps = ggps_under_under_union + ggp = GroupGraphPattern(content=ggps_under_under_union) + ggp_list.append(ggp) + self._add_inverse_preds(ggps, inverse_preds, i) + if predicates: + self._add_predicate_constraints(predicates, property_node, ggp_list) + self._add_object_constrains(ggp_list, property_node) + union = GroupOrUnionGraphPattern(group_graph_patterns=ggp_list) + gpnt = GraphPatternNotTriples(content=union) + + min = int( + self.profile_graph.value( + subject=property_node, predicate=SH.minCount, default=1 + ) + ) + if min == 0: # Add Optional GroupGraphPatternSub "wrapper" as the main GGPS + ggps_under_optional = GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[gpnt] + ) + ggp = GroupGraphPattern(content=ggps_under_optional) + optional = OptionalGraphPattern(group_graph_pattern=ggp) + gpnt = GraphPatternNotTriples(content=optional) + self.main_where_ggps.add_pattern(gpnt) + + def _add_inverse_preds(self, ggps, inverse_preds, i): + if inverse_preds: + ggps.add_triple( + SimplifiedTriple( + subject=Variable(f"inv_path_{i}"), + predicate=Variable(f"inv_pred_{i}"), + object=self.focus_node, + ) + ) + inline_data_one_var = InlineDataOneVar( + variable=Variable(f"inv_pred_{i}"), values=inverse_preds + ) + data_block = DataBlock(block=inline_data_one_var) + inline_data = InlineData(data_block=data_block) + gpnt = GraphPatternNotTriples(content=inline_data) + # ggps_sub = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt]) + ggps.add_pattern(gpnt) + + def _add_predicate_constraints(self, predicates, property_node, ggp_list): + # check for any sequence paths - process separately + sps = [p for p in predicates if isinstance(p, tuple)] + predicates = [p for p in predicates if not isinstance(p, tuple)] + + for i, (pred1, pred2) in enumerate(sps): + t1 = SimplifiedTriple( + subject=self.focus_node, + predicate=pred1, + object=Variable(f"seq_obj_{i + 1}"), + ) + t2 = SimplifiedTriple( + subject=Variable(f"seq_obj_{i + 1}"), + predicate=pred2, + object=Variable(f"seq_obj_terminal{i + 1}"), + ) + tb = TriplesBlock(triples=[t1, t2]) + ggps = GroupGraphPatternSub(triples_block=tb) + ggp = GroupGraphPattern(content=ggps) + ggp_list.append(ggp) + + # process direct path predicates + max = self.profile_graph.value(subject=property_node, predicate=SH.maxCount) + simplified_triple = SimplifiedTriple( + subject=self.focus_node, + predicate=Variable("preds"), + object=Variable("objs"), + ) + tb = TriplesBlock(triples=[simplified_triple]) + if predicates: + # filters must be added to all union statements + if max == Literal(0): + values_constraint = Filter(variable=Variable("preds"), expression="NOT IN", value=predicates) + gpnt = GraphPatternNotTriples(content=values_constraint) + if ggp_list: + for ggp in ggp_list: + ggp.content.add_pattern(gpnt) + else: + ggps = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt, tb]) + ggp = GroupGraphPattern(content=ggps) + ggp_list.append(ggp) + elif SHEXT.allPredicateValues not in predicates: # add VALUES clause + inline_data_one_var = InlineDataOneVar( + variable=Variable("preds"), values=predicates + ) + data_block = DataBlock(block=inline_data_one_var) + inline_data = InlineData(data_block=data_block) + gpnt = GraphPatternNotTriples(content=inline_data) + ggps = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt, tb]) + ggp = GroupGraphPattern(content=ggps) + ggp_list.append(ggp) + elif predicates == [SHEXT.allPredicateValues]: + ggps = GroupGraphPatternSub(triples_block=tb) + ggp = GroupGraphPattern(content=ggps) + ggp_list.append(ggp) + + + def _add_object_constrains(self, ggp_list, property_node): + value = self.profile_graph.value( + subject=property_node, predicate=SH.hasValue, default=None + ) + values_bn = self.profile_graph.value( + subject=property_node, predicate=SH["in"], default=None + ) + if value: # a specific value + objects = [value] + elif values_bn: # a set of values + c = Collection(self.profile_graph, values_bn) + objects = list(c) + if value or values_bn: + ggps = GroupGraphPatternSub() + ggp = GroupGraphPattern(content=ggps) + ggp_list.append(ggp) + inline_data_one_var = InlineDataOneVar( + variable=Variable("objs"), values=objects + ) + data_block = DataBlock(block=inline_data_one_var) + inline_data = InlineData(data_block=data_block) + gpnt = GraphPatternNotTriples(content=inline_data) + ggps.add_pattern(gpnt) diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index de6414fb..00000000 --- a/tests/conftest.py +++ /dev/null @@ -1,3 +0,0 @@ -import os - -os.environ["SPARQL_REPO_TYPE"] = "pyoxigraph" diff --git a/tests/data/catprez/expected_responses/catalog_anot.ttl b/tests/data/catprez/expected_responses/catalog_anot.ttl index e1b3e004..1159a94d 100644 --- a/tests/data/catprez/expected_responses/catalog_anot.ttl +++ b/tests/data/catprez/expected_responses/catalog_anot.ttl @@ -50,7 +50,11 @@ prov:qualifiedAttribution rdfs:label "qualified attribution" . dcterms:created "2022-07-31"^^xsd:date ; dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. + rdfs:label "author"@en ; + dcterms:provenance "Presented in the original standard's codelist"@en ; + ns1:status ; + skos:definition "party who authored the resource" ; + skos:prefLabel "author"@en . The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; dcterms:hasPart , diff --git a/tests/data/catprez/input/_system-catalog.ttl b/tests/data/catprez/input/_system-catalog.ttl index c00da0de..8c1f0ec9 100644 --- a/tests/data/catprez/input/_system-catalog.ttl +++ b/tests/data/catprez/input/_system-catalog.ttl @@ -30,3 +30,8 @@ PREFIX xsd: a dcat:Resource . + + a dcat:Resource . + + a dcat:Resource . + diff --git a/tests/data/cql/input/example01.json b/tests/data/cql/input/example01.json new file mode 100644 index 00000000..b81dafdf --- /dev/null +++ b/tests/data/cql/input/example01.json @@ -0,0 +1,7 @@ +{ + "op": "=", + "args": [ + { "property": "scene_id" }, + "LC82030282019133LGN00" + ] +} diff --git a/tests/data/cql/input/example02.json b/tests/data/cql/input/example02.json new file mode 100644 index 00000000..ced9d24b --- /dev/null +++ b/tests/data/cql/input/example02.json @@ -0,0 +1,7 @@ +{ + "op": "like", + "args": [ + { "property": "eo:instrument" }, + "OLI%" + ] +} diff --git a/tests/data/cql/input/example03.json b/tests/data/cql/input/example03.json new file mode 100644 index 00000000..fc98d5e9 --- /dev/null +++ b/tests/data/cql/input/example03.json @@ -0,0 +1,7 @@ +{ + "op": "in", + "args": [ + { "property": "landsat:wrs_path" }, + [ "153", "154", "15X" ] + ] +} diff --git a/tests/data/cql/input/example05a.json b/tests/data/cql/input/example05a.json new file mode 100644 index 00000000..de561ead --- /dev/null +++ b/tests/data/cql/input/example05a.json @@ -0,0 +1,19 @@ +{ + "op": "or", + "args": [ + { + "op": "=", + "args": [ + { "property": "ro:cloud_cover" }, + 0.1 + ] + }, + { + "op": "=", + "args": [ + { "property": "ro:cloud_cover" }, + 0.2 + ] + } + ] +} diff --git a/tests/data/cql/input/example05b.json b/tests/data/cql/input/example05b.json new file mode 100644 index 00000000..aa0fa9a6 --- /dev/null +++ b/tests/data/cql/input/example05b.json @@ -0,0 +1,7 @@ +{ + "op": "in", + "args": [ + { "property": "ro:cloud_cover" }, + [ 0.1, 0.2 ] + ] +} diff --git a/tests/data/cql/input/example06a.json b/tests/data/cql/input/example06a.json new file mode 100644 index 00000000..8adeb429 --- /dev/null +++ b/tests/data/cql/input/example06a.json @@ -0,0 +1,26 @@ +{ + "op": "and", + "args": [ + { + "op": "between", + "args": [ + { "property": "eo:cloud_cover" }, + 0.1, 0.2 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_row" }, + 28 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_path" }, + 203 + ] + } + ] +} diff --git a/tests/data/cql/input/example06b.json b/tests/data/cql/input/example06b.json new file mode 100644 index 00000000..e91f8c71 --- /dev/null +++ b/tests/data/cql/input/example06b.json @@ -0,0 +1,33 @@ +{ + "op": "and", + "args": [ + { + "op": ">=", + "args": [ + { "property": "eo:cloud_cover" }, + 0.1 + ] + }, + { + "op": "<=", + "args": [ + { "property": "eo:cloud_cover" }, + 0.2 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_row" }, + 28 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_path" }, + 203 + ] + } + ] +} diff --git a/tests/data/cql/input/example07.json b/tests/data/cql/input/example07.json new file mode 100644 index 00000000..d3b9cd68 --- /dev/null +++ b/tests/data/cql/input/example07.json @@ -0,0 +1,35 @@ +{ + "op": "and", + "args": [ + { + "op": "like", + "args": [ + { "property": "eo:instrument" }, + "OLI%" + ] + }, + { + "op": "s_intersects", + "args": [ + { "property": "footprint" }, + { + "type": "Polygon", + "coordinates": [ + [ [ 43.5845, -79.5442 ], + [ 43.6079, -79.4893 ], + [ 43.5677, -79.4632 ], + [ 43.6129, -79.3925 ], + [ 43.6223, -79.3238 ], + [ 43.6576, -79.3163 ], + [ 43.7945, -79.1178 ], + [ 43.8144, -79.1542 ], + [ 43.8555, -79.1714 ], + [ 43.7509, -79.639 ], + [ 43.5845, -79.5442 ] + ] + ] + } + ] + } + ] +} diff --git a/tests/data/spaceprez/expected_responses/dataset_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_anot.ttl index 6e6c7f63..e1d2600f 100644 --- a/tests/data/spaceprez/expected_responses/dataset_anot.ttl +++ b/tests/data/spaceprez/expected_responses/dataset_anot.ttl @@ -51,17 +51,23 @@ skos:prefLabel rdfs:label "preferred label"@en ; dcterms:title "Geofabric Contracted Catchments"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . - dcterms:description "Sandgate area demo Facilities"@en ; + rdfs:label "Sandgate are demo Facilities"@en ; + dcterms:description "Sandgate area demo Facilities"@en ; + dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:title "Sandgate are demo Facilities"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:facilities" . - dcterms:description "Sandgate flooded areas"@en ; + rdfs:label "Sandgate flooded areas"@en ; + dcterms:description "Sandgate flooded areas"@en ; + dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:title "Sandgate flooded areas"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:floods" . - dcterms:description "Sandgate main roads"@en ; + rdfs:label "Sandgate main roads"@en ; + dcterms:description "Sandgate main roads"@en ; + dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:title "Sandgate main roads"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:roads" . @@ -69,5 +75,6 @@ skos:prefLabel rdfs:label "preferred label"@en ; geo:Geometry skos:definition "A coherent set of direct positions in space. The positions are held within a Spatial Reference System (SRS)."@en ; skos:prefLabel "Geometry"@en . -dcat:Dataset rdfs:label "Dataset"@en . +dcat:Dataset rdfs:label "Dataset"@en ; + skos:definition "A collection of data, published or curated by a single source, and available for access or download in one or more represenations."@en . diff --git a/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl index 0a557b75..97655a36 100644 --- a/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl @@ -48,5 +48,6 @@ skos:prefLabel rdfs:label "preferred label"@en ; prez:link "/s/datasets/preztest:dataset" . dcat:Dataset rdfs:label "Dataset"@en ; + skos:definition "A collection of data, published or curated by a single source, and available for access or download in one or more represenations."@en ; prez:count 4 . diff --git a/tests/data/spaceprez/expected_responses/feature_anot.ttl b/tests/data/spaceprez/expected_responses/feature_anot.ttl index 35ae5c4e..9975acf5 100644 --- a/tests/data/spaceprez/expected_responses/feature_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_anot.ttl @@ -10,7 +10,8 @@ dcterms:identifier "exds:sandgate"^^prez:identifier ; dcterms:title "Sandgate example dataset"@en . - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; + rdfs:label "Geofabric Contracted Catchments"@en ; + dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en . @@ -51,6 +52,24 @@ skos:definition rdfs:label "definition"@en ; skos:prefLabel rdfs:label "preferred label"@en ; skos:definition "The preferred lexical label for a resource, in a given language."@en . +rdfs:member rdfs:label "member" . + +skos:definition rdfs:label "definition"@en ; + skos:definition "A statement or formal explanation of the meaning of a concept."@en . + +skos:prefLabel rdfs:label "preferred label"@en ; + skos:definition "The preferred lexical label for a resource, in a given language."@en . + + a geo:Feature, + ; + rdfs:label "Contracted Catchment 12109444" ; + dcterms:identifier "cc12109444"^^xsd:token, + "sndgt:cc12109444"^^prez:identifier ; + geo:hasGeometry [ a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral ] ; + prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . + geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; skos:prefLabel "Feature"@en . diff --git a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl index 41adf6a4..159b107d 100644 --- a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl @@ -8,18 +8,9 @@ dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; dcterms:identifier "exds:sandgate"^^prez:identifier ; - dcterms:title "Sandgate example dataset"@en . - - a geo:FeatureCollection ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "catchments"^^xsd:token, - "sndgt:catchments"^^prez:identifier ; - dcterms:title "Geofabric Contracted Catchments"@en ; - geo:hasBoundingBox [ a geo:Geometry ; - geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral ] ; - rdfs:member , - ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . + dcterms:title "Sandgate example dataset"@en ; + rdfs:member ; + prez:link "/s/datasets/exds:sandgate" . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . diff --git a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl index 0f7919fa..148a5e7f 100644 --- a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl @@ -1,6 +1,9 @@ @prefix dcterms: . +@prefix geo: . @prefix prez: . +@prefix rdf: . @prefix rdfs: . +@prefix skos: . @prefix xsd: . dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; @@ -10,7 +13,6 @@ , , ; - prez:count 4 ; prez:link "/s/datasets/exds:sandgate" . dcterms:description rdfs:label "Description"@en ; @@ -21,27 +23,52 @@ dcterms:identifier rdfs:label "Identifier"@en ; dcterms:title rdfs:label "Title"@en . +rdf:type rdfs:label "type" . + rdfs:label rdfs:label "label" . rdfs:member rdfs:label "member" . - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; +skos:definition rdfs:label "definition"@en ; + skos:definition "A statement or formal explanation of the meaning of a concept."@en . + +skos:prefLabel rdfs:label "preferred label"@en ; + skos:definition "The preferred lexical label for a resource, in a given language."@en . + + a geo:FeatureCollection ; + rdfs:label "Geofabric Contracted Catchments"@en ; + dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; + dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . - dcterms:description "Sandgate area demo Facilities"@en ; + a geo:FeatureCollection ; + rdfs:label "Sandgate are demo Facilities"@en ; + dcterms:description "Sandgate area demo Facilities"@en ; + dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:title "Sandgate are demo Facilities"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:facilities" . - dcterms:description "Sandgate flooded areas"@en ; + a geo:FeatureCollection ; + rdfs:label "Sandgate flooded areas"@en ; + dcterms:description "Sandgate flooded areas"@en ; + dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:title "Sandgate flooded areas"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:floods" . - dcterms:description "Sandgate main roads"@en ; + a geo:FeatureCollection ; + rdfs:label "Sandgate main roads"@en ; + dcterms:description "Sandgate main roads"@en ; + dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:title "Sandgate main roads"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:roads" . +geo:FeatureCollection skos:definition "A collection of individual Features."@en ; + skos:prefLabel "Feature Collection"@en ; + prez:count 4 . + + diff --git a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl index cc0d1613..72aaa69d 100644 --- a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl @@ -1,18 +1,21 @@ @prefix dcterms: . +@prefix geo: . @prefix prez: . +@prefix rdf: . @prefix rdfs: . +@prefix skos: . @prefix xsd: . dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; dcterms:identifier "exds:sandgate"^^prez:identifier ; dcterms:title "Sandgate example dataset"@en . - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; + rdfs:label "Geofabric Contracted Catchments"@en ; + dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en ; rdfs:member , ; - prez:count 2 ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . dcterms:description rdfs:label "Description"@en ; @@ -23,15 +26,27 @@ dcterms:identifier rdfs:label "Identifier"@en ; dcterms:title rdfs:label "Title"@en . +rdf:type rdfs:label "type" . + rdfs:label rdfs:label "label" . rdfs:member rdfs:label "member" . - rdfs:label "Contracted Catchment 12109444" ; +skos:definition rdfs:label "definition"@en ; + skos:definition "A statement or formal explanation of the meaning of a concept."@en . + +skos:prefLabel rdfs:label "preferred label"@en ; + skos:definition "The preferred lexical label for a resource, in a given language."@en . + + a geo:Feature ; + rdfs:label "Contracted Catchment 12109444" ; + dcterms:identifier "sndgt:cc12109444"^^prez:identifier ; dcterms:identifier "sndgt:cc12109444"^^prez:identifier ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . - rdfs:label "Contracted Catchment 12109445" ; + a geo:Feature ; + rdfs:label "Contracted Catchment 12109445" ; + dcterms:identifier "sndgt:cc12109445"^^prez:identifier ; dcterms:identifier "sndgt:cc12109445"^^prez:identifier ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109445" . diff --git a/tests/data/spaceprez/input/sandgate.ttl b/tests/data/spaceprez/input/sandgate.ttl index 0733b153..de42fbfb 100644 --- a/tests/data/spaceprez/input/sandgate.ttl +++ b/tests/data/spaceprez/input/sandgate.ttl @@ -27,6 +27,7 @@ sand:catchments dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "catchments"^^xsd:token ; dcterms:title "Geofabric Contracted Catchments"@en ; + rdfs:label "Geofabric Contracted Catchments"@en ; rdfs:member sand:cc12109444 , sand:cc12109445 ; @@ -41,6 +42,7 @@ sand:facilities dcterms:description "Sandgate area demo Facilities"@en ; dcterms:identifier "facilities"^^xsd:token ; dcterms:title "Sandgate are demo Facilities"@en ; + rdfs:label "Sandgate are demo Facilities"@en ; rdfs:member sand:bhc , sand:bhca , @@ -63,6 +65,7 @@ sand:floods dcterms:description "Sandgate flooded areas"@en ; dcterms:identifier "floods"^^xsd:token ; dcterms:title "Sandgate flooded areas"@en ; + rdfs:label "Sandgate flooded areas"@en ; rdfs:member sand:f001 , sand:f023 , @@ -79,6 +82,7 @@ sand:roads dcterms:description "Sandgate main roads"@en ; dcterms:identifier "roads"^^xsd:token ; dcterms:title "Sandgate main roads"@en ; + rdfs:label "Sandgate main roads"@en ; rdfs:member sand:bt , sand:fp ; diff --git a/tests/test_count.py b/tests/test_count.py deleted file mode 100644 index c4dc4cc9..00000000 --- a/tests/test_count.py +++ /dev/null @@ -1,84 +0,0 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store - -from prez.app import app -from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def test_client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -def get_curie(test_client: TestClient, iri: str) -> str: - response = test_client.get(f"/identifier/curie/{iri}") - if response.status_code != 200: - raise ValueError(f"Failed to retrieve curie for {iri}. {response.text}") - return response.text - - -@pytest.mark.parametrize( - "iri, inbound, outbound, count", - [ - [ - "http://linked.data.gov.au/def/borehole-purpose", - "http://www.w3.org/2004/02/skos/core#inScheme", - None, - 0, - ], - [ - "http://linked.data.gov.au/def/borehole-purpose-no-children", - "http://www.w3.org/2004/02/skos/core#inScheme", - None, - 0, - ], - [ - "http://linked.data.gov.au/def/borehole-purpose", - None, - "http://www.w3.org/2004/02/skos/core#hasTopConcept", - 0, - ], - ], -) -def test_count( - test_client: TestClient, - iri: str, - inbound: str | None, - outbound: str | None, - count: int, -): - curie = get_curie(test_client, iri) - params = {"curie": curie, "inbound": inbound, "outbound": outbound} - response = test_client.get(f"/count", params=params) - assert int(response.text) == count diff --git a/tests/test_cql.py b/tests/test_cql.py new file mode 100644 index 00000000..20ee6871 --- /dev/null +++ b/tests/test_cql.py @@ -0,0 +1,54 @@ +import json +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store + +from prez.app import app +from prez.dependencies import get_repo +from prez.sparql.methods import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +@pytest.fixture(scope="session") +def client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +@pytest.mark.parametrize( + "cql_json_filename", + ["example01.json", "example02.json", "example03.json"], +) +def test_simple(client, cql_json_filename): + cql_json = Path(__file__).parent / f"data/cql/input/{cql_json_filename}" + cql_json_as_json = json.loads(cql_json.read_text()) + headers = {"content-type": "application/json"} + response = client.post("/cql", json=cql_json_as_json, headers=headers) + assert response.status_code == 200 diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py deleted file mode 100644 index 0740434d..00000000 --- a/tests/test_endpoints_catprez.py +++ /dev/null @@ -1,118 +0,0 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store -from rdflib import Graph, URIRef -from rdflib.namespace import RDF, DCAT - -from prez.app import app -from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -@pytest.fixture(scope="session") -def a_catalog_link(client): - # get link for first catalog - r = client.get("/c/catalogs") - g = Graph().parse(data=r.text) - member_uri = g.value(None, RDF.type, DCAT.Catalog) - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link - - -@pytest.fixture(scope="session") -def a_resource_link(client, a_catalog_link): - r = client.get(a_catalog_link) - g = Graph().parse(data=r.text) - links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) - for link in links: - if link != a_catalog_link: - return link - - -# @pytest.mark.xfail(reason="passes locally - setting to xfail pending test changes to pyoxigraph") -def test_catalog_listing_anot(client): - r = client.get(f"/c/catalogs?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/catalog_listing_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Missing:{(expected_graph - response_graph).serialize()}" - f"Extra:{(response_graph - expected_graph).serialize()}" - ) - - -def test_catalog_anot(client, a_catalog_link): - r = client.get(f"/c/catalogs/pd:democat?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/catalog_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Missing:{(expected_graph - response_graph).serialize()}" - f"Extra:{(response_graph - expected_graph).serialize()}" - ) - - -def test_resource_listing_anot(client, a_catalog_link): - r = client.get( - f"{a_catalog_link}/resources?_mediatype=text/anot+turtle&ordering-pred=http://purl.org/dc/terms/title" - ) - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/resource_listing_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Missing:{(expected_graph - response_graph).serialize()}" - f"Extra:{(response_graph - expected_graph).serialize()}" - ) - - -def test_resource_anot(client, a_resource_link): - r = client.get(f"{a_resource_link}?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/resource_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Missing:{(expected_graph - response_graph).serialize()}" - f"Extra:{(response_graph - expected_graph).serialize()}" - ) diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py deleted file mode 100644 index 2dfcbe6c..00000000 --- a/tests/test_endpoints_profiles.py +++ /dev/null @@ -1,78 +0,0 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store -from rdflib import Graph, URIRef -from rdflib.namespace import RDF, PROF - -from prez.app import app -from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -def test_profile(client): - # check the example remote profile is loaded - r = client.get("/profiles") - g = Graph().parse(data=r.text) - assert (URIRef("https://prez.dev/profile/prez"), RDF.type, PROF.Profile) in g - - -def test_cp_profile(client): - # check the example remote profile is loaded - r = client.get("/profiles/prez:CatPrezProfile") - g = Graph().parse(data=r.text) - assert (URIRef("https://prez.dev/CatPrezProfile"), RDF.type, PROF.Profile) in g - - -def test_sp_profile(client): - # check the example remote profile is loaded - r = client.get("/profiles/prez:SpacePrezProfile") - g = Graph().parse(data=r.text) - assert (URIRef("https://prez.dev/SpacePrezProfile"), RDF.type, PROF.Profile) in g - - -def test_vp_profile(client): - # check the example remote profile is loaded - r = client.get("/profiles/prez:VocPrezProfile") - g = Graph().parse(data=r.text) - assert (URIRef("https://prez.dev/VocPrezProfile"), RDF.type, PROF.Profile) in g - - -def test_pp_profile(client): - # check the example remote profile is loaded - r = client.get("/profiles/prez:profiles") - g = Graph().parse(data=r.text) - assert (URIRef("https://prez.dev/profiles"), RDF.type, PROF.Profile) in g diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py deleted file mode 100644 index 0dcb5ab8..00000000 --- a/tests/test_endpoints_spaceprez.py +++ /dev/null @@ -1,146 +0,0 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store -from rdflib import Graph, URIRef -from rdflib.namespace import RDF, DCAT, RDFS - -from prez.app import app -from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -@pytest.fixture(scope="session") -def a_dataset_link(client): - r = client.get("/s/datasets") - g = Graph().parse(data=r.text) - member_uri = g.value(None, RDF.type, DCAT.Dataset) - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link - - -@pytest.fixture(scope="session") -def an_fc_link(client, a_dataset_link): - r = client.get(f"{a_dataset_link}/collections") - g = Graph().parse(data=r.text) - member_uri = g.value( - URIRef("http://example.com/datasets/sandgate"), RDFS.member, None - ) - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link - - -@pytest.fixture(scope="session") -def a_feature_link(client, an_fc_link): - r = client.get(f"{an_fc_link}/items") - g = Graph().parse(data=r.text) - member_uri = g.value( - URIRef("http://example.com/datasets/sandgate/catchments"), RDFS.member, None - ) - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link - - -def test_dataset_anot(client, a_dataset_link): - r = client.get(f"{a_dataset_link}?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/dataset_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Graph delta:{(expected_graph - response_graph).serialize()}" - ) - - -def test_feature_collection_anot(client, an_fc_link): - r = client.get(f"{an_fc_link}?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_collection_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Graph delta:{(expected_graph - response_graph).serialize()}" - ) - - -def test_feature_anot(client, a_feature_link): - r = client.get(f"{a_feature_link}?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Graph delta:{(expected_graph - response_graph).serialize()}" - ) - - -def test_dataset_listing_anot(client): - r = client.get("/s/datasets?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Graph delta:{(expected_graph - response_graph).serialize()}" - ) - - -def test_feature_collection_listing_anot(client, a_dataset_link): - r = client.get(f"{a_dataset_link}/collections?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Graph delta:{(expected_graph - response_graph).serialize()}" - ) - - -def test_feature_listing_anot(client, an_fc_link): - r = client.get(f"{an_fc_link}/items?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_listing_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Graph delta:{(expected_graph - response_graph).serialize()}" - ) diff --git a/tests/test_endpoints_vocprez.py b/tests/test_endpoints_vocprez.py deleted file mode 100644 index 97ef532c..00000000 --- a/tests/test_endpoints_vocprez.py +++ /dev/null @@ -1,249 +0,0 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store -from rdflib import Graph, URIRef -from rdflib.compare import isomorphic - -from prez.app import app -from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def test_client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -@pytest.fixture(scope="session") -def links(test_client: TestClient): - r = test_client.get("/v/collection") - g = Graph().parse(data=r.text) - vocab_uri = URIRef("http://resource.geosciml.org/classifier/cgi/contacttype") - vocab_link = g.value(vocab_uri, URIRef(f"https://prez.dev/link", None)) - # vocab_uri = g.value(None, RDF.type, SKOS.ConceptScheme) - # vocab_link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return vocab_link - - -def get_curie(test_client: TestClient, iri: str) -> str: - response = test_client.get(f"/identifier/curie/{iri}") - if response.status_code != 200: - raise ValueError(f"Failed to retrieve curie for {iri}. {response.text}") - return response.text - - -def test_vocab_listing(test_client: TestClient): - response = test_client.get(f"/v/vocab?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=response.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/vocprez/expected_responses/vocab_listing_anot.ttl" - ) - assert isomorphic(expected_graph, response_graph), print( - f"Missing triples\n{(expected_graph - response_graph).serialize()}", - f"Extra triples\n{(response_graph - expected_graph).serialize()}", - ) - - -@pytest.mark.xfail( - reason="oxigraph's DESCRIBE does not include blank nodes so the expected response is not what will " - "be returned - route should not need describe query" -) -@pytest.mark.parametrize( - "iri, expected_result_file, description", - [ - [ - "http://linked.data.gov.au/def2/borehole-purpose", - "concept_scheme_with_children.ttl", - "Return concept scheme and a prez:childrenCount of 8", - ], - [ - "http://linked.data.gov.au/def2/borehole-purpose-no-children", - "concept_scheme_no_children.ttl", - "Return concept scheme and a prez:childrenCount of 0", - ], - ], -) -def test_concept_scheme( - test_client: TestClient, iri: str, expected_result_file: str, description: str -): - curie = get_curie(test_client, iri) - - response = test_client.get(f"/v/vocab/{curie}?_mediatype=text/anot+turtle") - response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) - expected_graph = Graph().parse( - Path(__file__).parent - / f"../tests/data/vocprez/expected_responses/{expected_result_file}" - ) - assert isomorphic(expected_graph, response_graph), f"Failed test: {description}" - - -# bedding surface works if stepped through - this will be another case of the local SPARQL store not being able to -# process the queries in parallel -@pytest.mark.xfail(reason="query error + issue with oxigraph") -@pytest.mark.parametrize( - "iri, expected_result_file, description", - [ - [ - "http://linked.data.gov.au/def2/borehole-purpose", - "concept_scheme_top_concepts_with_children.ttl", - "Return concept scheme and a prez:childrenCount of 8", - ], - [ - "http://linked.data.gov.au/def2/borehole-purpose-no-children", - "empty.ttl", - "Return concept scheme and a prez:childrenCount of 0", - ], - [ - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure", - "beddingsurfacestructure_top_concepts.ttl", - "Top concepts have the correct annotation values for reg:status and color", - ], - ], -) -def test_concept_scheme_top_concepts( - test_client: TestClient, iri: str, expected_result_file: str, description: str -): - curie = get_curie(test_client, iri) - response = test_client.get( - f"/v/vocab/{curie}/top-concepts?_mediatype=text/anot+turtle" - ) - response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) - expected_graph = Graph().parse( - Path(__file__).parent - / f"../tests/data/vocprez/expected_responses/{expected_result_file}" - ) - assert isomorphic(expected_graph, response_graph), f"Failed test: {description}" - - -@pytest.mark.xfail( - reason="issue with oxigraph counting children that do not exist (giving childrenCount 1; should be 0)" -) -@pytest.mark.parametrize( - "concept_scheme_iri, concept_iri, expected_result_file, description", - [ - [ - "http://linked.data.gov.au/def2/borehole-purpose", - "http://linked.data.gov.au/def/borehole-purpose/coal", - "concept-with-2-narrower-concepts.ttl", - "Return concept with 2 narrower concepts.", - ], - [ - "http://linked.data.gov.au/def2/borehole-purpose", - "http://linked.data.gov.au/def2/borehole-purpose/open-cut-coal-mining", - "empty.ttl", - "Return nothing, no children.", - ], - ], -) -def test_concept_narrowers( - test_client: TestClient, - concept_scheme_iri: str, - concept_iri: str, - expected_result_file: str, - description: str, -): - concept_scheme_curie = get_curie(test_client, concept_scheme_iri) - concept_curie = get_curie(test_client, concept_iri) - response = test_client.get( - f"/v/vocab/{concept_scheme_curie}/{concept_curie}/narrowers?_mediatype=text/anot+turtle" - ) - response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) - expected_graph = Graph().parse( - Path(__file__).parent - / f"../tests/data/vocprez/expected_responses/{expected_result_file}" - ) - assert isomorphic(expected_graph, response_graph), f"Failed test: {description}" - - -@pytest.mark.parametrize( - "concept_scheme_iri, concept_iri, expected_result_file, description", - [ - # [ - # "http://linked.data.gov.au/def/borehole-purpose", - # "http://linked.data.gov.au/def/borehole-purpose/coal", - # "concept-coal.ttl", - # "Return the coal concept and its properties.", - # ], - [ - "http://linked.data.gov.au/def/borehole-purpose", - "http://linked.data.gov.au/def/borehole-purpose/open-cut-coal-mining", - "concept-open-cut-coal-mining.ttl", - "Return the open-cut-coal-mining concept and its properties.", - ], - ], -) -def test_concept( - test_client: TestClient, - concept_scheme_iri: str, - concept_iri: str, - expected_result_file: str, - description: str, -): - concept_scheme_curie = get_curie(test_client, concept_scheme_iri) - concept_curie = get_curie(test_client, concept_iri) - response = test_client.get( - f"/v/vocab/{concept_scheme_curie}/{concept_curie}?_mediatype=text/anot+turtle" - ) - response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) - expected_graph = Graph().parse( - Path(__file__).parent - / f"../tests/data/vocprez/expected_responses/{expected_result_file}" - ) - assert isomorphic(expected_graph, response_graph) - - -def test_collection_listing(test_client: TestClient): - response = test_client.get(f"/v/collection?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=response.text, format="turtle") - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/vocprez/expected_responses/collection_listing_anot.ttl" - ) - assert isomorphic(expected_graph, response_graph) - - -# TODO figure out why this fails and yet when run via debugger, passes.. -def test_collection_listing_item(test_client: TestClient, links): - response = test_client.get("/v/collection/cgi:contacttype") - assert response.status_code == 200 - response_graph = Graph().parse(data=response.text, format="turtle") - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/vocprez/expected_responses/collection_listing_item.ttl" - ) - assert isomorphic(response_graph, expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", - ) diff --git a/tests/test_search.py b/tests/test_search.py index 60e2fbde..f650a739 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -115,7 +115,7 @@ def test_search_filter_to_focus_multiple(client: TestClient): @pytest.mark.xfail( - reason="This generates a valid query that has been tested in Fuseki, which RDFLib struggles with" + reason="This generates a valid query that has been tested in Fuseki, which RDFLib and Pyoxigraph cannot run(!)" ) def test_search_focus_to_filter_multiple(client: TestClient): base_url = "/search" From f70c1200d3df6f2759238187eec8c58ea99a440c Mon Sep 17 00:00:00 2001 From: david Date: Wed, 6 Dec 2023 17:40:56 +1000 Subject: [PATCH 02/25] MVP working with all flavours --- Dockerfile | 2 +- poetry.lock | 471 +++++------ prez/app.py | 15 +- prez/config.py | 37 +- prez/dependencies.py | 40 +- prez/models/object_item.py | 34 + prez/models/profiles_item.py | 7 +- .../new_endpoints/cql_endpoints.ttl | 5 +- .../new_endpoints/ogc_catprez_endpoints.ttl | 81 ++ .../new_endpoints/ogc_endpoints.ttl | 58 -- ...points.ttl => ogc_spaceprez_endpoints.ttl} | 52 +- .../new_endpoints/ogc_vocprez_endpoints.ttl | 126 +++ .../new_endpoints/system_endpoints.ttl | 24 + ...oints.ttl => vocprez_endpoints.ttl.unused} | 25 +- .../profiles/_dd_to_refactor.ttl | 57 ++ .../profiles/catprez_default_profiles.ttl | 89 --- ...gc_profile.ttl => ogc_records_profile.ttl} | 41 +- .../profiles/prez_default_profiles.ttl | 76 +- .../profiles/spaceprez_default_profiles.ttl | 52 +- .../profiles/vocprez_default_profiles.ttl | 209 ----- prez/routers/catprez.py | 84 -- prez/routers/cql.py | 7 +- prez/routers/management.py | 16 - prez/routers/object.py | 26 +- prez/routers/ogc_catprez.py | 92 ++- prez/routers/ogc_spaceprez.py | 169 ++++ prez/routers/ogc_vocprez.py | 171 ++++ prez/routers/profiles.py | 40 +- prez/routers/search.py | 4 +- prez/routers/spaceprez.py | 123 --- prez/routers/sparql.py | 3 +- prez/routers/vocprez.py | 301 ------- prez/routers/vocprez.py.unused | 215 +++++ prez/services/link_generation.py | 35 +- prez/services/listings.py | 95 +-- prez/services/model_methods.py | 7 +- prez/services/objects.py | 83 +- prez/sparql/methods.py | 22 +- prez/sparql/objects_listings.py | 45 +- prez/url.py | 6 +- pyproject.toml | 7 +- temp/cql2sparql.py | 5 +- temp/grammar.py | 740 ++++++++++++++++-- temp/shacl2sparql.py | 252 ++++-- test_data/catprez.ttl | 21 + test_data/spaceprez.ttl | 27 + test_data/vocprez.ttl | 37 + tests/conftest.py | 4 + .../expected_responses/catalog_anot.ttl | 205 ----- .../top_level_catalog_anot.ttl | 453 +++++++++++ ...ttl => top_level_catalog_listing_anot.ttl} | 21 +- tests/data/catprez/input/AAC-SA.ttl | 51 -- tests/data/catprez/input/_idn-ac.ttl | 23 - tests/data/catprez/input/_idn-dc.ttl | 25 - tests/data/catprez/input/_system-catalog.ttl | 37 - tests/data/catprez/input/agents.ttl | 133 ---- tests/data/catprez/input/catalog.ttl | 20 + tests/data/catprez/input/labels.ttl | 13 - tests/data/catprez/input/pd_democat.ttl | 716 ----------------- tests/data/cql/input/example06a.json | 26 - tests/data/cql/input/example08.json | 48 ++ tests/data/cql/input/example09.json | 7 + tests/data/cql/input/example10.json | 7 + tests/data/cql/input/example11.json | 7 + tests/data/cql/input/example12.json | 7 + tests/data/cql/input/example14.json | 7 + tests/data/cql/input/example15.json | 19 + tests/data/cql/input/example17.json | 31 + tests/data/cql/input/example29.json | 7 + tests/data/cql/input/example31.json | 7 + tests/data/cql/input/example32.json | 7 + tests/data/cql/input/example33.json | 7 + tests/data/cql/input/example34.json | 7 + tests/data/cql/input/example35.json | 7 + tests/data/cql/input/example39.json | 7 + .../expected_responses/dataset_anot.ttl | 9 +- .../expected_responses/feature_anot.ttl | 20 +- .../feature_collection_anot.ttl | 17 +- .../feature_collection_listing_anot.ttl | 5 - .../feature_listing_anot.ttl | 5 +- tests/test_count.py | 84 ++ tests/test_cql.py | 23 +- tests/test_endpoints_catprez.py | 139 ++++ tests/test_endpoints_ok.py | 100 +++ tests/test_endpoints_profiles.py | 71 ++ tests/test_endpoints_spaceprez.py | 165 ++++ tests/test_endpoints_vocprez.py | 276 +++++++ tests/test_object_listings.py | 32 - tests/test_search.py | 2 +- tests/test_sparql.py | 14 +- 90 files changed, 4010 insertions(+), 2995 deletions(-) create mode 100644 prez/reference_data/new_endpoints/ogc_catprez_endpoints.ttl delete mode 100644 prez/reference_data/new_endpoints/ogc_endpoints.ttl rename prez/reference_data/new_endpoints/{spaceprez_endpoints.ttl => ogc_spaceprez_endpoints.ttl} (63%) create mode 100644 prez/reference_data/new_endpoints/ogc_vocprez_endpoints.ttl create mode 100644 prez/reference_data/new_endpoints/system_endpoints.ttl rename prez/reference_data/new_endpoints/{vocprez_endpoints.ttl => vocprez_endpoints.ttl.unused} (86%) create mode 100644 prez/reference_data/profiles/_dd_to_refactor.ttl delete mode 100644 prez/reference_data/profiles/catprez_default_profiles.ttl rename prez/reference_data/profiles/{ogc_profile.ttl => ogc_records_profile.ttl} (64%) delete mode 100644 prez/reference_data/profiles/vocprez_default_profiles.ttl delete mode 100644 prez/routers/catprez.py create mode 100644 prez/routers/ogc_spaceprez.py create mode 100644 prez/routers/ogc_vocprez.py delete mode 100644 prez/routers/spaceprez.py delete mode 100644 prez/routers/vocprez.py create mode 100644 prez/routers/vocprez.py.unused create mode 100644 test_data/catprez.ttl create mode 100644 test_data/spaceprez.ttl create mode 100644 test_data/vocprez.ttl create mode 100644 tests/conftest.py delete mode 100644 tests/data/catprez/expected_responses/catalog_anot.ttl create mode 100644 tests/data/catprez/expected_responses/top_level_catalog_anot.ttl rename tests/data/catprez/expected_responses/{catalog_listing_anot.ttl => top_level_catalog_listing_anot.ttl} (81%) delete mode 100644 tests/data/catprez/input/AAC-SA.ttl delete mode 100644 tests/data/catprez/input/_idn-ac.ttl delete mode 100644 tests/data/catprez/input/_idn-dc.ttl delete mode 100644 tests/data/catprez/input/_system-catalog.ttl delete mode 100644 tests/data/catprez/input/agents.ttl create mode 100644 tests/data/catprez/input/catalog.ttl delete mode 100644 tests/data/catprez/input/labels.ttl delete mode 100644 tests/data/catprez/input/pd_democat.ttl delete mode 100644 tests/data/cql/input/example06a.json create mode 100644 tests/data/cql/input/example08.json create mode 100644 tests/data/cql/input/example09.json create mode 100644 tests/data/cql/input/example10.json create mode 100644 tests/data/cql/input/example11.json create mode 100644 tests/data/cql/input/example12.json create mode 100644 tests/data/cql/input/example14.json create mode 100644 tests/data/cql/input/example15.json create mode 100644 tests/data/cql/input/example17.json create mode 100644 tests/data/cql/input/example29.json create mode 100644 tests/data/cql/input/example31.json create mode 100644 tests/data/cql/input/example32.json create mode 100644 tests/data/cql/input/example33.json create mode 100644 tests/data/cql/input/example34.json create mode 100644 tests/data/cql/input/example35.json create mode 100644 tests/data/cql/input/example39.json create mode 100644 tests/test_count.py create mode 100644 tests/test_endpoints_catprez.py create mode 100644 tests/test_endpoints_ok.py create mode 100644 tests/test_endpoints_profiles.py create mode 100644 tests/test_endpoints_spaceprez.py create mode 100644 tests/test_endpoints_vocprez.py delete mode 100644 tests/test_object_listings.py diff --git a/Dockerfile b/Dockerfile index aabdef4a..9288b542 100644 --- a/Dockerfile +++ b/Dockerfile @@ -45,7 +45,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \ WORKDIR /app COPY ./prez /app/prez COPY ./temp /app/temp -COPY ./rdf /app/rdf +COPY ./test_data /app/test_data # copy the pyproject.toml as the application reads the version from here COPY pyproject.toml . diff --git a/poetry.lock b/poetry.lock index 9eeba6f8..47479f51 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,11 +1,38 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "aiocache" +version = "0.12.2" +description = "multi backend asyncio cache" +optional = false +python-versions = "*" +files = [ + {file = "aiocache-0.12.2-py2.py3-none-any.whl", hash = "sha256:9b6fa30634ab0bfc3ecc44928a91ff07c6ea16d27d55469636b296ebc6eb5918"}, + {file = "aiocache-0.12.2.tar.gz", hash = "sha256:b41c9a145b050a5dcbae1599f847db6dd445193b1f3bd172d8e0fe0cb9e96684"}, +] + +[package.extras] +memcached = ["aiomcache (>=0.5.2)"] +msgpack = ["msgpack (>=0.5.5)"] +redis = ["redis (>=4.2.0)"] + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + [[package]] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, @@ -91,7 +118,7 @@ name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -212,10 +239,10 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudpickle" -version = "2.2.1" -description = "Extended pickling support for Python objects" +version = "3.0.0" +description = "Pickler class to extend the standard pickle.Pickler functionality" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, @@ -362,48 +389,48 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "frozendict" -version = "2.3.8" +version = "2.3.10" description = "A simple immutable dictionary" optional = false python-versions = ">=3.6" files = [ - {file = "frozendict-2.3.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d188d062084fba0e4bf32719ff7380b26c050b932ff164043ce82ab90587c52b"}, - {file = "frozendict-2.3.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2a4e818ac457f6354401dcb631527af25e5a20fcfc81e6b5054b45fc245caca"}, - {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a506d807858fa961aaa5b48dab6154fdc6bd045bbe9310788bbff141bb42d13"}, - {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:750632cc890d8ee9484fe6d31b261159144b6efacc08e1317fe46accd1410373"}, - {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ee5fe2658a8ac9a57f748acaf563f6a47f80b8308cbf0a04fac0ba057d41f75"}, - {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23c4bb46e6b8246e1e7e49b5593c2bc09221db0d8f31f7c092be8dfb42b9e620"}, - {file = "frozendict-2.3.8-cp310-cp310-win_amd64.whl", hash = "sha256:c31abc8acea309b132dde441856829f6003a3d242da8b54bce4c0f2a3c8c63f0"}, - {file = "frozendict-2.3.8-cp310-cp310-win_arm64.whl", hash = "sha256:9ea5520e85447ff8d4681e181941e482662817ccba921b7cb3f87922056d892a"}, - {file = "frozendict-2.3.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f83fed36497af9562ead5e9fb8443224ba2781786bd3b92b1087cb7d0ff20135"}, - {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27c5c1d29d0eda7979253ec88abc239da1313b38f39f4b16984db3b3e482300"}, - {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c785de7f1a13f15963945f400656b18f057c2fc76c089dacf127a2bb188c03"}, - {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8cf35ddd25513428ec152614def9696afb93ae5ec0eb54fa6aa6206eda77ac4c"}, - {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ffc684773de7c88724788fa9787d0016fd75830412d58acbd9ed1a04762c675b"}, - {file = "frozendict-2.3.8-cp36-cp36m-win_amd64.whl", hash = "sha256:4c258aab9c8488338634f2ec670ef049dbf0ab0e7a2fa9bc2c7b5009cb614801"}, - {file = "frozendict-2.3.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47fc26468407fdeb428cfc89495b7921419e670355c21b383765482fdf6c5c14"}, - {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ea638228692db2bf94bce40ea4b25f4077588497b516bd16576575560094bd9"}, - {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a75bf87e76c4386caecdbdd02a99e53ad43a6b5c38fb3d5a634a9fc9ce41462"}, - {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed5a6c5c7a0f57269577c2a338a6002949aea21a23b7b7d06da7e7dced8b605b"}, - {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d086440328a465dea9bef2dbad7548d75d1a0a0d21f43a08c03e1ec79ac5240e"}, - {file = "frozendict-2.3.8-cp37-cp37m-win_amd64.whl", hash = "sha256:0bc4767e2f83db5b701c787e22380296977368b0c57e485ca71b2eedfa11c4a3"}, - {file = "frozendict-2.3.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:638cf363d3cbca31a341503cf2219eac52a5f5140449676fae3d9644cd3c5487"}, - {file = "frozendict-2.3.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b2fd8ce36277919b36e3c834d2389f3cd7ac068ae730c312671dd4439a5dd65"}, - {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3957d52f1906b0c85f641a1911d214255873f6408ab4e5ad657cc27a247fb145"}, - {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72cfe08ab8ae524e54848fa90b22d02c1b1ecfb3064438696bcaa4b953f18772"}, - {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4742e76c4111bd09198d3ab66cef94be8506212311338f9182d6ef5f5cb60493"}, - {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:313ed8d9ba6bac35d7635cd9580ee5721a0fb016f4d2d20f0efa05dbecbdb1be"}, - {file = "frozendict-2.3.8-cp38-cp38-win_amd64.whl", hash = "sha256:d3c6ce943946c2a61501c8cf116fff0892d11dd579877eb36e2aea2c27fddfef"}, - {file = "frozendict-2.3.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0f573dc4861dd7ec9e055c8cceaf45355e894e749f621f199aab7b311ac4bdb"}, - {file = "frozendict-2.3.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b3435e5f1ca5ae68a5e95e64b09d6d5c645cadd6b87569a0b3019dd248c8d00"}, - {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:145afd033ebfade28416093335261b8ec1af5cccc593482309e7add062ec8668"}, - {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da98427de26b5a2865727947480cbb53860089c4d195baa29c539da811cea617"}, - {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e82befa7c385a668d569cebbebbdf49cee6fea4083f08e869a1b08cfb640a9f"}, - {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80abe81d36e889ceec665e06ec764a7638000fa3e7be09786ac4d3ddc64b76db"}, - {file = "frozendict-2.3.8-cp39-cp39-win_amd64.whl", hash = "sha256:8ccc94ac781710db44e142e1a11ff9b31d02c032c01c6868d51fcbef73086225"}, - {file = "frozendict-2.3.8-cp39-cp39-win_arm64.whl", hash = "sha256:e72dbc1bcc2203cef38d205f692396f5505921a5680f66aa9a7e8bb71fd38f28"}, - {file = "frozendict-2.3.8-py311-none-any.whl", hash = "sha256:ba41a7ed019bd03b62d63ed3f8dea35b8243d1936f7c9ed4b5298ca45a01928e"}, - {file = "frozendict-2.3.8.tar.gz", hash = "sha256:5526559eca8f1780a4ee5146896f59afc31435313560208dd394a3a5e537d3ff"}, + {file = "frozendict-2.3.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df2d2afa5af41bfa09dc9d5a8e6d73ae39b677a8572200c65a5ea353387ffccd"}, + {file = "frozendict-2.3.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b10df7f5d8637b1af319434f99dc25ca6f5537e28b293e4c405ebfb4bf9581fa"}, + {file = "frozendict-2.3.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da22a3e873f365f97445c49afc1e6d5198ed6d172f3efaf0e9fde0edcca3cea1"}, + {file = "frozendict-2.3.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89218738e2122b50bf8a0444083dbe2de280402e9c2ef0929c0db0f93ff11271"}, + {file = "frozendict-2.3.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aa11add43a71fd47523fbd011be5cc011df79e25ec0b0339fc0d728623aaa7ec"}, + {file = "frozendict-2.3.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:af267bd6d98cbc10580105dc76f28f7156856fa48a5bbcadd40edb85f93657ae"}, + {file = "frozendict-2.3.10-cp310-cp310-win_amd64.whl", hash = "sha256:c112024df64b8926a315d7e36b860967fcad8aae0c592b9f117589391373e893"}, + {file = "frozendict-2.3.10-cp310-cp310-win_arm64.whl", hash = "sha256:a0065db2bc76628853dd620bd08c1ca44ad0b711e92e89b4156493153add6f9d"}, + {file = "frozendict-2.3.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:93634af5a6d71762aebc7d78bdce92890b7e612588faf887c9eaf752dc7ccdb1"}, + {file = "frozendict-2.3.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b4d05e231dc1a2ec874f847fd7348cbee469555468efb875a89994ecde31a81"}, + {file = "frozendict-2.3.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d40d0644f19365fc6cc428db31c0f113fa550bd15920262f9d77ccf6556d87b"}, + {file = "frozendict-2.3.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:12b40526219f9583b30690011288bca4d6cce8724cda96b3c3ab08b67c5a7f09"}, + {file = "frozendict-2.3.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6b552fffeba8e41b43ce10cc0fc467e048a7c9a71ae3241057510342132555b9"}, + {file = "frozendict-2.3.10-cp36-cp36m-win_amd64.whl", hash = "sha256:07208e4718cb70aa259ac886c19b96a4aad1cf00e9199f211746f738951bbf7c"}, + {file = "frozendict-2.3.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e8bec6d11f7254e405290cb1b081caffa0c18b6aa779130da9a546349c56be83"}, + {file = "frozendict-2.3.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b089c7e8c95d8b043e82e7da26e165f4220d7310efaad5e94445db7e3bc8321e"}, + {file = "frozendict-2.3.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08a5829d708657c9d5ad58f4a7e4baa73a3d57290f9613bdd909d481fc203a3a"}, + {file = "frozendict-2.3.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c015852dacf144dbeadf203673d8c714f788fcc2b810a36504994b3c4f5a436"}, + {file = "frozendict-2.3.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb9f15a5ed924be2b1cb3654b7ea3b7bae265ff39e2b5784d42bd4a6e1353e45"}, + {file = "frozendict-2.3.10-cp37-cp37m-win_amd64.whl", hash = "sha256:809bb9c6c657bded925710a309bb2a2350bdbfdc9371df427f1a93cb8ab7ec3e"}, + {file = "frozendict-2.3.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ff7a9cca3a3a1e584349e859d028388bd96a5475f76721471b73797472c6db17"}, + {file = "frozendict-2.3.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cdd496933ddb428f3854bea9ffdce0245bb27c27909f663ad396409fb4dffb5"}, + {file = "frozendict-2.3.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9df392b655fadaa0174c1923e6205b30ad1ccca248e8e146e63a8147a355ee01"}, + {file = "frozendict-2.3.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7901828700f36fe12486705afe7afc5583434390c8f69b5419de1b6c566fb00d"}, + {file = "frozendict-2.3.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c9aa28ce48d848ee520409533fd0254de4caf025c5cf1b9f27c98c1dd8cf90aa"}, + {file = "frozendict-2.3.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0856af4f5b4288b2270e0b74078fad5cbaf4f799326b82183865f6f367008b2c"}, + {file = "frozendict-2.3.10-cp38-cp38-win_amd64.whl", hash = "sha256:ac41c671ff33cbefc0f06c4b2a630d18ab59f5256f45f57d5632252ae4a8c07a"}, + {file = "frozendict-2.3.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:893205dc5a4e5c4b24e5822ceb21ef14fed8ca4afae7ac688e2fc24294c85225"}, + {file = "frozendict-2.3.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e78c5ac5d71f3b73f07ff9d9e3cc32dfbf7954f2c57c2d0e1fe8f1600e980b40"}, + {file = "frozendict-2.3.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c4ca4cc42bc30b20476616411d4b49aae6084760b99251f1cbdfed879ae53ea"}, + {file = "frozendict-2.3.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c865962216f7cfd6dac8693f4de431a9d98a7225185ff23613ecd10c42423adc"}, + {file = "frozendict-2.3.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:99b2f47b292cc4d68f6679918e8e9e6dc5e816924d8369d07018be56b93fb20f"}, + {file = "frozendict-2.3.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e7abf4539b73c8e5680dd2fdbd19ca4fc3e2b2f3666f80f022217839bb859fd"}, + {file = "frozendict-2.3.10-cp39-cp39-win_amd64.whl", hash = "sha256:901e774629fc63f84d24b5e46b59de1eed22392ee98b7f92e694a127d541edac"}, + {file = "frozendict-2.3.10-cp39-cp39-win_arm64.whl", hash = "sha256:6f8681c0ffe92be9aba40c9b9960c48f0ae7f6ea585af2b93fc9542cc3865969"}, + {file = "frozendict-2.3.10-py3-none-any.whl", hash = "sha256:66cded65f144393b4226bda9fe9ac2f42451d2d603e8a486015744bb566a7008"}, + {file = "frozendict-2.3.10.tar.gz", hash = "sha256:aadc83510ce82751a0bb3575231f778bc37cbb373f5f05a52b888e26cbb92f79"}, ] [[package]] @@ -442,28 +469,28 @@ files = [ [package.dependencies] certifi = "*" h11 = ">=0.13,<0.15" -sniffio = "==1.*" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" -version = "0.25.1" +version = "0.25.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.25.1-py3-none-any.whl", hash = "sha256:fec7d6cc5c27c578a391f7e87b9aa7d3d8fbcd034f6399f9f79b45bcc12a866a"}, - {file = "httpx-0.25.1.tar.gz", hash = "sha256:ffd96d5cf901e63863d9f1b4b6807861dbea4d301613415d9e6e57ead15fc5d0"}, + {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, + {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, ] [package.dependencies] anyio = "*" certifi = "*" -httpcore = "*" +httpcore = "==1.*" idna = "*" sniffio = "*" @@ -489,13 +516,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -836,21 +863,6 @@ files = [ pyoxigraph = ">=0.3.14,<0.4.0" rdflib = ">=6.3,<8.0" -[[package]] -name = "oxrdflib" -version = "0.3.6" -description = "rdflib stores based on pyoxigraph" -optional = false -python-versions = ">=3.7" -files = [ - {file = "oxrdflib-0.3.6-py3-none-any.whl", hash = "sha256:a645a3e5ba86e0c8ff33f6429ca623fe01d93d30234c8f2ad1f553636b4b756a"}, - {file = "oxrdflib-0.3.6.tar.gz", hash = "sha256:50f675773b87dd656f1753e24bf3b92fde06ad9ae7e8c95629a7593521d0aa06"}, -] - -[package.dependencies] -pyoxigraph = ">=0.3.14,<0.4.0" -rdflib = ">=6.3,<8.0" - [[package]] name = "packaging" version = "23.2" @@ -875,13 +887,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.11.0" +version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, + {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, ] [package.extras] @@ -923,18 +935,18 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "1.10.12" -description = "Data validation and settings management using python type hints" +version = "2.5.2" +description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-2.5.1-py3-none-any.whl", hash = "sha256:dc5244a8939e0d9a68f1f1b5f550b2e1c879912033b1becbedb315accc75441b"}, - {file = "pydantic-2.5.1.tar.gz", hash = "sha256:0b8be5413c06aadfbe56f6dc1d45c9ed25fd43264414c571135c97dd77c2bedb"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.3" +pydantic-core = "2.14.5" typing-extensions = ">=4.6.1" [package.extras] @@ -942,116 +954,116 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.3" +version = "2.14.5" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic_core-2.14.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ba44fad1d114539d6a1509966b20b74d2dec9a5b0ee12dd7fd0a1bb7b8785e5f"}, - {file = "pydantic_core-2.14.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a70d23eedd88a6484aa79a732a90e36701048a1509078d1b59578ef0ea2cdf5"}, - {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cc24728a1a9cef497697e53b3d085fb4d3bc0ef1ef4d9b424d9cf808f52c146"}, - {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab4a2381005769a4af2ffddae74d769e8a4aae42e970596208ec6d615c6fb080"}, - {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:905a12bf088d6fa20e094f9a477bf84bd823651d8b8384f59bcd50eaa92e6a52"}, - {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38aed5a1bbc3025859f56d6a32f6e53ca173283cb95348e03480f333b1091e7d"}, - {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1767bd3f6370458e60c1d3d7b1d9c2751cc1ad743434e8ec84625a610c8b9195"}, - {file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7cb0c397f29688a5bd2c0dbd44451bc44ebb9b22babc90f97db5ec3e5bb69977"}, - {file = "pydantic_core-2.14.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ff737f24b34ed26de62d481ef522f233d3c5927279f6b7229de9b0deb3f76b5"}, - {file = "pydantic_core-2.14.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1a39fecb5f0b19faee9a8a8176c805ed78ce45d760259a4ff3d21a7daa4dfc1"}, - {file = "pydantic_core-2.14.3-cp310-none-win32.whl", hash = "sha256:ccbf355b7276593c68fa824030e68cb29f630c50e20cb11ebb0ee450ae6b3d08"}, - {file = "pydantic_core-2.14.3-cp310-none-win_amd64.whl", hash = "sha256:536e1f58419e1ec35f6d1310c88496f0d60e4f182cacb773d38076f66a60b149"}, - {file = "pydantic_core-2.14.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f1f46700402312bdc31912f6fc17f5ecaaaa3bafe5487c48f07c800052736289"}, - {file = "pydantic_core-2.14.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:88ec906eb2d92420f5b074f59cf9e50b3bb44f3cb70e6512099fdd4d88c2f87c"}, - {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:056ea7cc3c92a7d2a14b5bc9c9fa14efa794d9f05b9794206d089d06d3433dc7"}, - {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076edc972b68a66870cec41a4efdd72a6b655c4098a232314b02d2bfa3bfa157"}, - {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e71f666c3bf019f2490a47dddb44c3ccea2e69ac882f7495c68dc14d4065eac2"}, - {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f518eac285c9632be337323eef9824a856f2680f943a9b68ac41d5f5bad7df7c"}, - {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dbab442a8d9ca918b4ed99db8d89d11b1f067a7dadb642476ad0889560dac79"}, - {file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0653fb9fc2fa6787f2fa08631314ab7fc8070307bd344bf9471d1b7207c24623"}, - {file = "pydantic_core-2.14.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c54af5069da58ea643ad34ff32fd6bc4eebb8ae0fef9821cd8919063e0aeeaab"}, - {file = "pydantic_core-2.14.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc956f78651778ec1ab105196e90e0e5f5275884793ab67c60938c75bcca3989"}, - {file = "pydantic_core-2.14.3-cp311-none-win32.whl", hash = "sha256:5b73441a1159f1fb37353aaefb9e801ab35a07dd93cb8177504b25a317f4215a"}, - {file = "pydantic_core-2.14.3-cp311-none-win_amd64.whl", hash = "sha256:7349f99f1ef8b940b309179733f2cad2e6037a29560f1b03fdc6aa6be0a8d03c"}, - {file = "pydantic_core-2.14.3-cp311-none-win_arm64.whl", hash = "sha256:ec79dbe23702795944d2ae4c6925e35a075b88acd0d20acde7c77a817ebbce94"}, - {file = "pydantic_core-2.14.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8f5624f0f67f2b9ecaa812e1dfd2e35b256487566585160c6c19268bf2ffeccc"}, - {file = "pydantic_core-2.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6c2d118d1b6c9e2d577e215567eedbe11804c3aafa76d39ec1f8bc74e918fd07"}, - {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe863491664c6720d65ae438d4efaa5eca766565a53adb53bf14bc3246c72fe0"}, - {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:136bc7247e97a921a020abbd6ef3169af97569869cd6eff41b6a15a73c44ea9b"}, - {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aeafc7f5bbddc46213707266cadc94439bfa87ecf699444de8be044d6d6eb26f"}, - {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16aaf788f1de5a85c8f8fcc9c1ca1dd7dd52b8ad30a7889ca31c7c7606615b8"}, - {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc652c354d3362e2932a79d5ac4bbd7170757a41a62c4fe0f057d29f10bebb"}, - {file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b92e72babfd56585c75caf44f0b15258c58e6be23bc33f90885cebffde3400"}, - {file = "pydantic_core-2.14.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:75f3f534f33651b73f4d3a16d0254de096f43737d51e981478d580f4b006b427"}, - {file = "pydantic_core-2.14.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c9ffd823c46e05ef3eb28b821aa7bc501efa95ba8880b4a1380068e32c5bed47"}, - {file = "pydantic_core-2.14.3-cp312-none-win32.whl", hash = "sha256:12e05a76b223577a4696c76d7a6b36a0ccc491ffb3c6a8cf92d8001d93ddfd63"}, - {file = "pydantic_core-2.14.3-cp312-none-win_amd64.whl", hash = "sha256:1582f01eaf0537a696c846bea92082082b6bfc1103a88e777e983ea9fbdc2a0f"}, - {file = "pydantic_core-2.14.3-cp312-none-win_arm64.whl", hash = "sha256:96fb679c7ca12a512d36d01c174a4fbfd912b5535cc722eb2c010c7b44eceb8e"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:71ed769b58d44e0bc2701aa59eb199b6665c16e8a5b8b4a84db01f71580ec448"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:5402ee0f61e7798ea93a01b0489520f2abfd9b57b76b82c93714c4318c66ca06"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaab9dc009e22726c62fe3b850b797e7f0e7ba76d245284d1064081f512c7226"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92486a04d54987054f8b4405a9af9d482e5100d6fe6374fc3303015983fc8bda"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf08b43d1d5d1678f295f0431a4a7e1707d4652576e1d0f8914b5e0213bfeee5"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8ca13480ce16daad0504be6ce893b0ee8ec34cd43b993b754198a89e2787f7e"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44afa3c18d45053fe8d8228950ee4c8eaf3b5a7f3b64963fdeac19b8342c987f"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56814b41486e2d712a8bc02a7b1f17b87fa30999d2323bbd13cf0e52296813a1"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3dc2920cc96f9aa40c6dc54256e436cc95c0a15562eb7bd579e1811593c377e"}, - {file = "pydantic_core-2.14.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e483b8b913fcd3b48badec54185c150cb7ab0e6487914b84dc7cde2365e0c892"}, - {file = "pydantic_core-2.14.3-cp37-none-win32.whl", hash = "sha256:364dba61494e48f01ef50ae430e392f67ee1ee27e048daeda0e9d21c3ab2d609"}, - {file = "pydantic_core-2.14.3-cp37-none-win_amd64.whl", hash = "sha256:a402ae1066be594701ac45661278dc4a466fb684258d1a2c434de54971b006ca"}, - {file = "pydantic_core-2.14.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:10904368261e4509c091cbcc067e5a88b070ed9a10f7ad78f3029c175487490f"}, - {file = "pydantic_core-2.14.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:260692420028319e201b8649b13ac0988974eeafaaef95d0dfbf7120c38dc000"}, - {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1bf1a7b05a65d3b37a9adea98e195e0081be6b17ca03a86f92aeb8b110f468"}, - {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7abd17a838a52140e3aeca271054e321226f52df7e0a9f0da8f91ea123afe98"}, - {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5c51460ede609fbb4fa883a8fe16e749964ddb459966d0518991ec02eb8dfb9"}, - {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d06c78074646111fb01836585f1198367b17d57c9f427e07aaa9ff499003e58d"}, - {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af452e69446fadf247f18ac5d153b1f7e61ef708f23ce85d8c52833748c58075"}, - {file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3ad4968711fb379a67c8c755beb4dae8b721a83737737b7bcee27c05400b047"}, - {file = "pydantic_core-2.14.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c5ea0153482e5b4d601c25465771c7267c99fddf5d3f3bdc238ef930e6d051cf"}, - {file = "pydantic_core-2.14.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96eb10ef8920990e703da348bb25fedb8b8653b5966e4e078e5be382b430f9e0"}, - {file = "pydantic_core-2.14.3-cp38-none-win32.whl", hash = "sha256:ea1498ce4491236d1cffa0eee9ad0968b6ecb0c1cd711699c5677fc689905f00"}, - {file = "pydantic_core-2.14.3-cp38-none-win_amd64.whl", hash = "sha256:2bc736725f9bd18a60eec0ed6ef9b06b9785454c8d0105f2be16e4d6274e63d0"}, - {file = "pydantic_core-2.14.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:1ea992659c03c3ea811d55fc0a997bec9dde863a617cc7b25cfde69ef32e55af"}, - {file = "pydantic_core-2.14.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2b53e1f851a2b406bbb5ac58e16c4a5496038eddd856cc900278fa0da97f3fc"}, - {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c7f8e8a7cf8e81ca7d44bea4f181783630959d41b4b51d2f74bc50f348a090f"}, - {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3b9c91eeb372a64ec6686c1402afd40cc20f61a0866850f7d989b6bf39a41a"}, - {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef3e2e407e4cad2df3c89488a761ed1f1c33f3b826a2ea9a411b0a7d1cccf1b"}, - {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f86f20a9d5bee1a6ede0f2757b917bac6908cde0f5ad9fcb3606db1e2968bcf5"}, - {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61beaa79d392d44dc19d6f11ccd824d3cccb865c4372157c40b92533f8d76dd0"}, - {file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d41df8e10b094640a6b234851b624b76a41552f637b9fb34dc720b9fe4ef3be4"}, - {file = "pydantic_core-2.14.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c08ac60c3caa31f825b5dbac47e4875bd4954d8f559650ad9e0b225eaf8ed0c"}, - {file = "pydantic_core-2.14.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d8b3932f1a369364606417ded5412c4ffb15bedbcf797c31317e55bd5d920e"}, - {file = "pydantic_core-2.14.3-cp39-none-win32.whl", hash = "sha256:caa94726791e316f0f63049ee00dff3b34a629b0d099f3b594770f7d0d8f1f56"}, - {file = "pydantic_core-2.14.3-cp39-none-win_amd64.whl", hash = "sha256:2494d20e4c22beac30150b4be3b8339bf2a02ab5580fa6553ca274bc08681a65"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:fe272a72c7ed29f84c42fedd2d06c2f9858dc0c00dae3b34ba15d6d8ae0fbaaf"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7e63a56eb7fdee1587d62f753ccd6d5fa24fbeea57a40d9d8beaef679a24bdd6"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7692f539a26265cece1e27e366df5b976a6db6b1f825a9e0466395b314ee48b"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af46f0b7a1342b49f208fed31f5a83b8495bb14b652f621e0a6787d2f10f24ee"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e2f9d76c00e805d47f19c7a96a14e4135238a7551a18bfd89bb757993fd0933"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:de52ddfa6e10e892d00f747bf7135d7007302ad82e243cf16d89dd77b03b649d"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:38113856c7fad8c19be7ddd57df0c3e77b1b2336459cb03ee3903ce9d5e236ce"}, - {file = "pydantic_core-2.14.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:354db020b1f8f11207b35360b92d95725621eb92656725c849a61e4b550f4acc"}, - {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:76fc18653a5c95e5301a52d1b5afb27c9adc77175bf00f73e94f501caf0e05ad"}, - {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2646f8270f932d79ba61102a15ea19a50ae0d43b314e22b3f8f4b5fabbfa6e38"}, - {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37dad73a2f82975ed563d6a277fd9b50e5d9c79910c4aec787e2d63547202315"}, - {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:113752a55a8eaece2e4ac96bc8817f134c2c23477e477d085ba89e3aa0f4dc44"}, - {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:8488e973547e8fb1b4193fd9faf5236cf1b7cd5e9e6dc7ff6b4d9afdc4c720cb"}, - {file = "pydantic_core-2.14.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3d1dde10bd9962b1434053239b1d5490fc31a2b02d8950a5f731bc584c7a5a0f"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2c83892c7bf92b91d30faca53bb8ea21f9d7e39f0ae4008ef2c2f91116d0464a"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:849cff945284c577c5f621d2df76ca7b60f803cc8663ff01b778ad0af0e39bb9"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa89919fbd8a553cd7d03bf23d5bc5deee622e1b5db572121287f0e64979476"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf15145b1f8056d12c67255cd3ce5d317cd4450d5ee747760d8d088d85d12a2d"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4cc6bb11f4e8e5ed91d78b9880774fbc0856cb226151b0a93b549c2b26a00c19"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:832d16f248ca0cc96929139734ec32d21c67669dcf8a9f3f733c85054429c012"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b02b5e1f54c3396c48b665050464803c23c685716eb5d82a1d81bf81b5230da4"}, - {file = "pydantic_core-2.14.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:1f2d4516c32255782153e858f9a900ca6deadfb217fd3fb21bb2b60b4e04d04d"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0a3e51c2be472b7867eb0c5d025b91400c2b73a0823b89d4303a9097e2ec6655"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:df33902464410a1f1a0411a235f0a34e7e129f12cb6340daca0f9d1390f5fe10"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27828f0227b54804aac6fb077b6bb48e640b5435fdd7fbf0c274093a7b78b69c"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2979dc80246e18e348de51246d4c9b410186ffa3c50e77924bec436b1e36cb"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b28996872b48baf829ee75fa06998b607c66a4847ac838e6fd7473a6b2ab68e7"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ca55c9671bb637ce13d18ef352fd32ae7aba21b4402f300a63f1fb1fd18e0364"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:aecd5ed096b0e5d93fb0367fd8f417cef38ea30b786f2501f6c34eabd9062c38"}, - {file = "pydantic_core-2.14.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:44aaf1a07ad0824e407dafc637a852e9a44d94664293bbe7d8ee549c356c8882"}, - {file = "pydantic_core-2.14.3.tar.gz", hash = "sha256:3ad083df8fe342d4d8d00cc1d3c1a23f0dc84fce416eb301e69f1ddbbe124d3f"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, ] [package.dependencies] @@ -1074,13 +1086,13 @@ python-dotenv = ">=0.21.0" [[package]] name = "pygments" -version = "2.17.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "pygments-2.17.1-py3-none-any.whl", hash = "sha256:1b37f1b1e1bff2af52ecaf28cc601e2ef7077000b227a0675da25aef85784bc4"}, - {file = "pygments-2.17.1.tar.gz", hash = "sha256:e45a0e74bf9c530f564ca81b8952343be986a29f6afe7f5ad95c5f06b7bdf5e8"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] @@ -1113,7 +1125,7 @@ name = "pynvml" version = "11.5.0" description = "Python Bindings for the NVIDIA Management Library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ {file = "pynvml-11.5.0-py3-none-any.whl", hash = "sha256:5cce014ac01b098d08f06178f86c37be409b80b2e903a5a03ce15eed60f55e25"}, {file = "pynvml-11.5.0.tar.gz", hash = "sha256:d027b21b95b1088b9fc278117f9f61b7c67f8e33a787e9f83f735f0f71ac32d0"}, @@ -1121,28 +1133,41 @@ files = [ [[package]] name = "pyoxigraph" -version = "0.3.20" +version = "0.3.22" description = "Python bindings of Oxigraph, a SPARQL database and RDF toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "pyoxigraph-0.3.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0011e782bbe8b209d942b1d89dd2beefffd2f01ae269766bb32e51b47e429e"}, - {file = "pyoxigraph-0.3.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:682f7ba4c2c857bf541af433b0c2a9e754147177f66750a26462c108fc4fba6e"}, - {file = "pyoxigraph-0.3.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74dfed6ca723e2cf695fa301729b4ed5fec575b60293189e73b28d28d85dcb40"}, - {file = "pyoxigraph-0.3.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0fd4300ca22e2d215848ea1069789103cd9edcf5b7467a72a4f303b1ad65a1f9"}, - {file = "pyoxigraph-0.3.20-cp37-abi3-macosx_10_14_x86_64.macosx_11_0_arm64.macosx_10_14_universal2.whl", hash = "sha256:7abdda38b6083be0b06163316470139e240881c708dcefe375139683ef2b712a"}, - {file = "pyoxigraph-0.3.20-cp37-abi3-macosx_10_14_x86_64.whl", hash = "sha256:1f04370c4fdb78d0bffc4629cddae688dbcdac0faf69da8fd9fd4bd10f07783b"}, - {file = "pyoxigraph-0.3.20-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:acc8070a206cd6ee51487813e7f3ef67862174081655e201fde0d7fc26c5e967"}, - {file = "pyoxigraph-0.3.20-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e646a6a3ee2f731496be26524f1749453c181b282e50b2f48cce22ece55199"}, - {file = "pyoxigraph-0.3.20-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c02d3c07ddb6f6bc9fbc2cf51665e0d2b50ac29ce6098d82a38e071dd8a475c9"}, - {file = "pyoxigraph-0.3.20-cp37-abi3-win_amd64.whl", hash = "sha256:aacbd03bbd363389b5d2b309183c799a3361b653063985f38b231d89f22a316a"}, - {file = "pyoxigraph-0.3.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e75182b23d4990f85d4eb93c8369c1b85e01adfbefbcb6a50352a2490338b68"}, - {file = "pyoxigraph-0.3.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4f212fa83e435ad2653f97ece23ca51b3e950ef849d818dd7c97e0c1e8d48654"}, - {file = "pyoxigraph-0.3.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e35f4beb86764eeaf7af9ed3322d05e136c4fe20d708cb420c31db994700f2ff"}, - {file = "pyoxigraph-0.3.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a4e771f69ae304dbeb7b72522de727297c5b28311e781aa7e70fc9552cd4ae1"}, - {file = "pyoxigraph-0.3.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9977786ed74e69b41a2e587f70c5407db093a65e0fbfe03f635a6f223d6f5364"}, - {file = "pyoxigraph-0.3.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c413fd53e33e7c2df44728a414c266270708dbf1f96899931f95eb3512682e8a"}, - {file = "pyoxigraph-0.3.20.tar.gz", hash = "sha256:e7bae3552188c30df847123843f3ed7bcb3b68537dfa253bbdda2047fa5cc568"}, + {file = "pyoxigraph-0.3.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49609d3c8d6637193872181e8f9d8b85ae304b3d944b1d50a2e363bd4d3ad878"}, + {file = "pyoxigraph-0.3.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0a0f2bd4348e9b92fbb92c71f449b7e42f6ac6fb67ce5797cbd8ab3b673c86"}, + {file = "pyoxigraph-0.3.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:95c43d3da6d43460368f0a5f4b497412b0d6509e55eb12245b0f173248118656"}, + {file = "pyoxigraph-0.3.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d466025962895e67a7c4a4ba303fe23a911f99d2158f5f53eb50f56949125f"}, + {file = "pyoxigraph-0.3.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90dc1e4010e2011c5440b7a3832153a14f52257e12a90a0d7fc6ed16e88a7961"}, + {file = "pyoxigraph-0.3.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:469039b1ed6a31fef59b8b6c2ef5c836dd147944aa7120b4f4e6db4fd5abf60a"}, + {file = "pyoxigraph-0.3.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2baadd8dba65ff91bdcdf85e57d928806d94612b85da58d64526f0f1d5cd4df"}, + {file = "pyoxigraph-0.3.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f7e217e82e541f7df4697705c7cbfbd62e019c50786669647cb261445d75215"}, + {file = "pyoxigraph-0.3.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c99cd7d305a5f154d6fa7eca3a93b153ac94ad2a4aff6c404ec56db38d538ea4"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-macosx_10_14_x86_64.macosx_11_0_arm64.macosx_10_14_universal2.whl", hash = "sha256:32d5630c9fb3d7b819a25401b3afdbd01dbfc9624b1519d41216622fe3af52e6"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-macosx_10_14_x86_64.whl", hash = "sha256:6368f24bc236a6055171f4a80cb63b9ad76fcbdbcb4a3ef981eb6d86d8975c11"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:821e1103cf1e8f12d0738cf1b2625c8374758e33075ca67161ead3669f53e4cb"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630f1090d67d1199c86f358094289816e0c00a21000164cfe06499c8689f8b9e"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aca511243209005da32470bbfec9e023ac31095bbeaa8cedabe0a652adce38c"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:60b7f13331b91827e2edfa8633ffb7e3bfc8630b708578fb0bc8d43c76754f20"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-win_amd64.whl", hash = "sha256:9a4ffd8ce28c3e8ce888662e0d9e9155e5226ecd8cd967f3c46391cf266c4c1d"}, + {file = "pyoxigraph-0.3.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4b8fde463e507c394f5b165a7a2571fd74028a8b343c161d81f63eb83a7d7c7"}, + {file = "pyoxigraph-0.3.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6ad3d8037af4ab5b1de75999fd2ba1b93becf24a9ee5e46ea0ee20a4efe270b"}, + {file = "pyoxigraph-0.3.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9211b2a9d9f13875aec4acede8e1395ff617d64ac7cff0f80cbaf4c08fc8b648"}, + {file = "pyoxigraph-0.3.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00645cb370ebafc79cfecd08c5ac4656469af9ec450cb9207d94f6939e26ba0e"}, + {file = "pyoxigraph-0.3.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6d55de26adabe7d6fece9e1dad4556d648c4166ee79d65e4f7c64acd898656e"}, + {file = "pyoxigraph-0.3.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e2bebace02e29d1cf3bc324815058f50b2ff59980a02193280a89c905d8437ab"}, + {file = "pyoxigraph-0.3.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e363d0b788f870b1008bb75e41a31b01a6277d9a7cc028ed6534a23bba69e60"}, + {file = "pyoxigraph-0.3.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0508eb4515ce1b3c7548d3f9382c1b366f6602c2e01e9e036c20e730d8fece47"}, + {file = "pyoxigraph-0.3.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33ca01c1727e079af3335883d75e5390619e7d2ece813c8065ba1cbcd71d17a3"}, + {file = "pyoxigraph-0.3.22-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55322d5b9b852c4813c293575aa5e676cec19c617d0aad5ae7ce47c49b113f0b"}, + {file = "pyoxigraph-0.3.22-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3397138f3a6d2c3299250ebde2bca7c95a25b58b29009eb0b29c2f5d1438d954"}, + {file = "pyoxigraph-0.3.22-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1031f91a0e75c6cd3ae9008f2d5bcdd7b2832bc1354f40dcab04ef7957f1140b"}, + {file = "pyoxigraph-0.3.22-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16f44f28fff015d310840c9744cdaaa31f6c1a548918c2316873f10bba76e17f"}, + {file = "pyoxigraph-0.3.22.tar.gz", hash = "sha256:430b18cb3cec37b8c71cee0f70ea10601b9e479f1b8c364861660ae9f8629fd9"}, ] [[package]] @@ -1285,13 +1310,13 @@ files = [ [[package]] name = "rdflib" -version = "6.3.2" +version = "7.0.0" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8.1,<4.0.0" files = [ - {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, - {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, + {file = "rdflib-7.0.0-py3-none-any.whl", hash = "sha256:0438920912a642c866a513de6fe8a0001bd86ef975057d6962c79ce4771687cd"}, + {file = "rdflib-7.0.0.tar.gz", hash = "sha256:9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae"}, ] [package.dependencies] @@ -1376,13 +1401,13 @@ wheel = ">=0.36.1" [[package]] name = "setuptools" -version = "69.0.1" +version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.1-py3-none-any.whl", hash = "sha256:6875bbd06382d857b1b90cd07cee6a2df701a164f241095706b5192bc56c5c62"}, - {file = "setuptools-69.0.1.tar.gz", hash = "sha256:f25195d54deb649832182d6455bffba7ac3d8fe71d35185e738d2198a4310044"}, + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, ] [package.extras] @@ -1463,7 +1488,7 @@ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, @@ -1474,7 +1499,7 @@ name = "starlette" version = "0.27.0" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, @@ -1499,10 +1524,10 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, @@ -1513,7 +1538,7 @@ name = "urllib3" version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, @@ -1544,19 +1569,19 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "virtualenv" -version = "20.24.6" +version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.6-py3-none-any.whl", hash = "sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"}, - {file = "virtualenv-20.24.6.tar.gz", hash = "sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" +platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] @@ -1564,13 +1589,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "wheel" -version = "0.41.3" +version = "0.42.0" description = "A built-package format for Python" optional = false python-versions = ">=3.7" files = [ - {file = "wheel-0.41.3-py3-none-any.whl", hash = "sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942"}, - {file = "wheel-0.41.3.tar.gz", hash = "sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841"}, + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, ] [package.extras] @@ -1579,4 +1604,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "40d4b5bc8d9efe4e318e045a36275e2a36f2ffe307aa7ddb3189fe10e68cb372" +content-hash = "67c2ad668e9468f8b6121615f92ac0e7fe4125ae47337670d14f52673d6f7896" diff --git a/prez/app.py b/prez/app.py index c2dd201f..bbb127be 100644 --- a/prez/app.py +++ b/prez/app.py @@ -3,7 +3,6 @@ import uvicorn from fastapi import FastAPI -from fastapi.openapi.utils import get_openapi from rdflib import Graph from starlette.middleware.cors import CORSMiddleware @@ -14,25 +13,23 @@ load_local_data_to_oxigraph, get_oxrdflib_store, get_system_store, - load_profile_data_to_oxigraph, + load_system_data_to_oxigraph, ) from prez.models.model_exceptions import ( ClassNotFoundException, URINotFoundException, NoProfilesException, ) -from prez.routers.catprez import router as catprez_router from prez.routers.cql import router as cql_router -from prez.routers.catprez import router as catprez_router from prez.routers.identifier import router as identifier_router from prez.routers.management import router as management_router from prez.routers.object import router as object_router -from prez.routers.ogc_catprez import router as ogc_router +from prez.routers.ogc_vocprez import router as vocprez_router +from prez.routers.ogc_spaceprez import router as spaceprez_router +from prez.routers.ogc_catprez import router as catprez_router from prez.routers.profiles import router as profiles_router from prez.routers.search import router as search_router -from prez.routers.spaceprez import router as spaceprez_router from prez.routers.sparql import router as sparql_router -from prez.routers.vocprez import router as vocprez_router from prez.services.app_service import ( healthcheck_sparql_endpoints, count_objects, @@ -77,8 +74,6 @@ app.include_router(vocprez_router) if "SpacePrez" in settings.prez_flavours: app.include_router(spaceprez_router) -if "OGCPrez" in settings.prez_flavours: - app.include_router(ogc_router) app.include_router(identifier_router) @@ -150,7 +145,7 @@ async def app_startup(): await add_common_context_ontologies_to_tbox_cache() app.state.pyoxi_system_store = get_system_store() - await load_profile_data_to_oxigraph(app.state.pyoxi_system_store) + await load_system_data_to_oxigraph(app.state.pyoxi_system_store) @app.on_event("shutdown") diff --git a/prez/config.py b/prez/config.py index d2f9184a..20fd72d8 100644 --- a/prez/config.py +++ b/prez/config.py @@ -30,7 +30,6 @@ class Settings(BaseSettings): sparql_endpoint: Optional[str] = None sparql_username: Optional[str] = None sparql_password: Optional[str] = None - sparql_auth: Optional[tuple] protocol: str = "http" host: str = "localhost" port: int = 8000 @@ -38,15 +37,23 @@ class Settings(BaseSettings): system_uri: Optional[str] = f"{protocol}://{host}:{port}" order_lists_by_label: bool = True prez_flavours: Optional[list] = ["SpacePrez", "VocPrez", "CatPrez", "ProfilesPrez"] - label_predicates = [SKOS.prefLabel, DCTERMS.title, RDFS.label, SDO.name] - description_predicates = [SKOS.definition, DCTERMS.description, SDO.description] - provenance_predicates = [DCTERMS.provenance] - other_predicates = [SDO.color, REG.status] - sparql_timeout = 30.0 + label_predicates: Optional[List[URIRef]] = [ + SKOS.prefLabel, + DCTERMS.title, + RDFS.label, + SDO.name, + ] + description_predicates: Optional[List[URIRef]] = [ + SKOS.definition, + DCTERMS.description, + SDO.description, + ] + provenance_predicates: Optional[List[URIRef]] = [DCTERMS.provenance] + other_predicates: Optional[List[URIRef]] = [SDO.color, REG.status] sparql_repo_type: str = "remote" - - log_level = "INFO" - log_output = "stdout" + sparql_timeout: int = 30 + log_level: str = "INFO" + log_output: str = "stdout" prez_title: Optional[str] = "Prez" prez_desc: Optional[str] = ( "A web framework API for delivering Linked Data. It provides read-only access to " @@ -54,16 +61,8 @@ class Settings(BaseSettings): ) prez_version: Optional[str] = None disable_prefix_generation: bool = False - - @root_validator() - def get_version(cls, values): - version = environ.get("PREZ_VERSION") - values["prez_version"] = version - - if version is None or version == "": - values["prez_version"] = toml.load( - Path(Path(__file__).parent.parent) / "pyproject.toml" - )["tool"]["poetry"]["version"] + default_language: str = "en" + local_rdf_dir: str = "rdf" # @root_validator() # def check_endpoint_enabled(cls, values): diff --git a/prez/dependencies.py b/prez/dependencies.py index a3779fe8..344f6c2d 100644 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -1,14 +1,19 @@ import json -import urllib from pathlib import Path from typing import Optional -from pydantic import BaseModel import httpx from fastapi import Depends, Request, HTTPException +from pydantic import BaseModel from pyoxigraph import Store -from prez.cache import store, oxrdflib_store +from prez.cache import ( + store, + oxrdflib_store, + system_store, + profiles_graph_cache, + endpoints_graph_cache, +) from prez.config import settings from prez.sparql.methods import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo from temp.cql2sparql import CQLParser @@ -27,6 +32,10 @@ def get_pyoxi_store(): return store +def get_system_store(): + return system_store + + def get_oxrdflib_store(): return oxrdflib_store @@ -43,14 +52,37 @@ async def get_repo( return RemoteSparqlRepo(http_async_client) +async def get_system_repo( + pyoxi_store: Store = Depends(get_system_store), +): + """ + A pyoxigraph Store with Prez system data including: + - Profiles + # TODO add and test other system data (endpoints etc.) + """ + return PyoxigraphRepo(pyoxi_store) + + async def load_local_data_to_oxigraph(store: Store): """ Loads all the data from the local data directory into the local SPARQL endpoint """ - for file in (Path(__file__).parent.parent / "rdf").glob("*.ttl"): + for file in (Path(__file__).parent.parent / settings.local_rdf_dir).glob("*.ttl"): store.load(file.read_bytes(), "text/turtle") +async def load_system_data_to_oxigraph(store: Store): + """ + Loads all the data from the local data directory into the local SPARQL endpoint + """ + # TODO refactor to use the local files directly + profiles_bytes = profiles_graph_cache.serialize(format="nt", encoding="utf-8") + store.load(profiles_bytes, "application/n-triples") + + endpoints_bytes = endpoints_graph_cache.serialize(format="nt", encoding="utf-8") + store.load(endpoints_bytes, "application/n-triples") + + class CQLRequest(BaseModel): cql: Optional[dict] diff --git a/prez/models/object_item.py b/prez/models/object_item.py index 03921a69..ff348ae6 100644 --- a/prez/models/object_item.py +++ b/prez/models/object_item.py @@ -23,3 +23,37 @@ class Config: def __hash__(self): return hash(self.uri) + + # @root_validator + # def populate(cls, values): + # values["top_level_listing"] = False # this class is for objects, not listings. + # uri_str = values.get("uri") + # endpoint_uri_str = values.get("endpoint_uri") + # if endpoint_uri_str: + # endpoint_uri = URIRef(endpoint_uri_str) + # values["classes"] = frozenset( + # [ + # klass + # for klass in endpoints_graph_cache.objects( + # endpoint_uri, ONT.deliversClasses, None + # ) + # ] + # ) + # values["base_class"] = endpoints_graph_cache.value( + # endpoint_uri, ONT.baseClass + # ) + # else: + # try: + # values["classes"] = frozenset( + # tup[1] for tup in get_classes([values["uri"]]) + # ) + # except ClassNotFoundException: + # # TODO return a generic DESCRIBE on the object - we can't use any of prez's profiles/endpoints to render + # # information about the object, but we can provide any RDF we have for it. + # pass + # if uri_str: + # values["uri"] = URIRef(uri_str) + # else: + # values["uri"] = get_uri_for_curie_id(values["uri"]) + # + # return values diff --git a/prez/models/profiles_item.py b/prez/models/profiles_item.py index f4f97c53..f29b1585 100644 --- a/prez/models/profiles_item.py +++ b/prez/models/profiles_item.py @@ -41,8 +41,7 @@ def populate(self): q = f"""SELECT ?class {{ <{self.uri}> a ?class }}""" r = profiles_graph_cache.query(q) if len(r.bindings) > 0: - values["classes"] = frozenset([prof.get("class") for prof in r.bindings]) - label = values.get("label") + self.classes = frozenset([prof.get("class") for prof in r.bindings]) + label = self.label if not label: - values["label"] = settings.label_predicates[0] - return values + self.label = settings.label_predicates[0] diff --git a/prez/reference_data/new_endpoints/cql_endpoints.ttl b/prez/reference_data/new_endpoints/cql_endpoints.ttl index dfc84dd5..cedf762e 100644 --- a/prez/reference_data/new_endpoints/cql_endpoints.ttl +++ b/prez/reference_data/new_endpoints/cql_endpoints.ttl @@ -6,12 +6,13 @@ PREFIX ont: PREFIX rdfs: PREFIX sh: PREFIX skos: +PREFIX shext: endpoint:cql a ont:ListingEndpoint ; ont:endpointTemplate "/cql" ; ont:deliversClasses prez:CQLObjectList ; # required to determine the correct profile for ConnegP sh:targetClass rdfs:Class ; # required for query construction + shext:limit 20 ; + shext:offset 0 ; . - -endpoint:cql ont:defaultLimit 20 ; ont:defaultOffset 0 . diff --git a/prez/reference_data/new_endpoints/ogc_catprez_endpoints.ttl b/prez/reference_data/new_endpoints/ogc_catprez_endpoints.ttl new file mode 100644 index 00000000..bccd8bf9 --- /dev/null +++ b/prez/reference_data/new_endpoints/ogc_catprez_endpoints.ttl @@ -0,0 +1,81 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX endpoint: +PREFIX prez: +PREFIX ont: +PREFIX sh: +prefix skos: +PREFIX shext: +PREFIX xsd: + + +endpoint:top-level-catalog-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/c/catalogs" ; + ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP + sh:targetClass dcat:Catalog ; # required for query construction + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + ?focus_node dcterms:hasPart ?child_catalog . + ?child_catalog a dcat:Catalog . + } + """ ] ; +. + +endpoint:top-level-catalog-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Catalog ; # required for link generation for objects + ont:endpointTemplate "/c/catalogs/$object" ; + ont:parentEndpoint endpoint:top-level-catalog-listing ; +. + +endpoint:lower-level-catalog-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/c/catalogs/$parent_1/collections" ; + ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP + ont:ParentToFocusRelation dcterms:hasPart ; + sh:targetClass dcat:Catalog ; # required for query construction + ont:parentEndpoint endpoint:top-level-catalog-object ; + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 dcterms:hasPart ?focus_node . + } + """ ] ; +. + +endpoint:lower-level-catalog-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Catalog ; # required for link generation for objects + ont:endpointTemplate "/c/catalogs/$parent_1/collections/$object" ; + ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentEndpoint endpoint:lower-level-catalog-listing ; +. + +endpoint:resource-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/c/catalogs/$parent_2/collections/$parent_1/items" ; + ont:deliversClasses prez:ResourceList ; # required to determine the correct profile for ConnegP + ont:ParentToFocusRelation dcterms:hasPart ; + sh:targetClass dcat:Resource ; # required for query construction + ont:parentEndpoint endpoint:lower-level-catalog-object ; + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 dcterms:hasPart ?focus_node . + } + """ ] ; +. + +endpoint:resource-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Resource ; # required for link generation for objects + ont:endpointTemplate "/c/catalogs/$parent_2/collections/$parent_1/items/$object" ; + ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentEndpoint endpoint:resource-listing ; +. + diff --git a/prez/reference_data/new_endpoints/ogc_endpoints.ttl b/prez/reference_data/new_endpoints/ogc_endpoints.ttl deleted file mode 100644 index c1c8e6ad..00000000 --- a/prez/reference_data/new_endpoints/ogc_endpoints.ttl +++ /dev/null @@ -1,58 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX endpoint: -PREFIX prez: -PREFIX ont: -PREFIX sh: -prefix skos: - -endpoint:catalog-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/catalogs" ; - ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP - sh:targetClass dcat:Catalog ; # required for query construction -. - -endpoint:catalog-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses dcat:Catalog ; # required for link generation for objects - ont:endpointTemplate "/catalogs/$object" ; - ont:parentEndpoint endpoint:catalog-listing ; -. - -endpoint:vocab-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/catalogs/$parent_1/collections" ; - ont:deliversClasses prez:SchemesList ; # required to determine the correct profile for ConnegP - ont:ParentToFocusRelation dcterms:hasPart ; - sh:targetClass skos:ConceptScheme ; # required for query construction - ont:parentEndpoint endpoint:catalog-object ; -. - -endpoint:vocab-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses skos:ConceptScheme ; # required for link generation for objects - ont:endpointTemplate "/catalogs/$parent_1/collections/$object" ; - ont:ParentToFocusRelation dcterms:hasPart ; - ont:parentEndpoint endpoint:vocab-listing ; -. - -endpoint:concept-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/catalogs/$parent_2/collections/$parent_1/items" ; - ont:deliversClasses prez:ConceptList ; # required to determine the correct profile for ConnegP - ont:ParentToFocusRelation skos:hasTopConcept ; - sh:targetClass skos:Concept ; # required for query construction - ont:parentEndpoint endpoint:vocab-object ; -. - -endpoint:concept-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses skos:Concept ; # required for link generation for objects - ont:endpointTemplate "/catalogs/$parent_2/collections/$parent_1/items/$object" ; - ont:ParentToFocusRelation skos:hasTopConcept ; - ont:parentEndpoint endpoint:concept-listing ; -. - -endpoint:catalog-listing ont:defaultLimit 20 ; ont:defaultOffset 0 . - -endpoint:vocab-listing ont:defaultLimit 20 ; ont:defaultOffset 0 . - -endpoint:concept-listing ont:defaultLimit 20 ; ont:defaultOffset 0 . \ No newline at end of file diff --git a/prez/reference_data/new_endpoints/spaceprez_endpoints.ttl b/prez/reference_data/new_endpoints/ogc_spaceprez_endpoints.ttl similarity index 63% rename from prez/reference_data/new_endpoints/spaceprez_endpoints.ttl rename to prez/reference_data/new_endpoints/ogc_spaceprez_endpoints.ttl index c82b483f..97af374e 100644 --- a/prez/reference_data/new_endpoints/spaceprez_endpoints.ttl +++ b/prez/reference_data/new_endpoints/ogc_spaceprez_endpoints.ttl @@ -6,6 +6,7 @@ PREFIX prez: PREFIX rdfs: PREFIX sh: PREFIX xsd: +PREFIX shext: endpoint:spaceprez-home a ont:Endpoint ; ont:endpointTemplate "/s" ; @@ -15,31 +16,32 @@ endpoint:dataset-listing a ont:ListingEndpoint ; ont:deliversClasses prez:DatasetList ; sh:targetClass dcat:Dataset ; ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:endpointTemplate "/s/datasets" ; + ont:endpointTemplate "/s/catalogs" ; + shext:limit 20 ; + shext:offset 0 ; . endpoint:dataset-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:parentEndpoint endpoint:dataset-listing ; ont:deliversClasses dcat:Dataset ; - ont:endpointTemplate "/s/datasets/$object" ; + ont:endpointTemplate "/s/catalogs/$object" ; . endpoint:feature-collection-listing a ont:ListingEndpoint ; - ont:parentEndpoint endpoint:dataset ; + ont:parentEndpoint endpoint:dataset-object ; sh:targetClass geo:FeatureCollection ; ont:deliversClasses prez:FeatureCollectionList ; - ont:endpointTemplate "/s/datasets/$parent_1/collections" ; + ont:endpointTemplate "/s/catalogs/$parent_1/collections" ; ont:ParentToFocusRelation rdfs:member ; + shext:limit 20 ; + shext:offset 0 ; + shext:orderBy [ sh:path rdfs:label ] ; sh:target [ sh:select """SELECT ?focus_node WHERE { $parent_1 rdfs:member ?focus_node . - ?focus_node rdfs:label ?label . } - ORDER BY ?label - LIMIT $limit - OFFSET $offset """ ] ; . @@ -47,7 +49,7 @@ endpoint:feature-collection-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:parentEndpoint endpoint:feature-collection-listing ; ont:deliversClasses geo:FeatureCollection ; - ont:endpointTemplate "/s/datasets/$parent_1/collections/$object" ; + ont:endpointTemplate "/s/catalogs/$parent_1/collections/$object" ; ont:ParentToFocusRelation rdfs:member ; . @@ -55,33 +57,23 @@ endpoint:feature-listing a ont:ListingEndpoint ; ont:parentEndpoint endpoint:feature-collection-object ; sh:targetClass geo:Feature ; ont:deliversClasses prez:FeatureList ; - ont:endpointTemplate "/s/datasets/$parent_2/collections/$parent_1/items" ; + ont:endpointTemplate "/s/catalogs/$parent_2/collections/$parent_1/items" ; ont:ParentToFocusRelation rdfs:member ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - $parent_1 rdfs:member ?focus_node . - ?focus_node rdfs:label ?label . - } - ORDER BY ?label - LIMIT $limit - OFFSET $offset - """ ] ; + shext:limit 20 ; + shext:offset 0 ; + shext:orderBy [ sh:path rdfs:label ] ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 rdfs:member ?focus_node . + } + """ ] ; . endpoint:feature-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:parentEndpoint endpoint:feature-listing ; ont:deliversClasses geo:Feature ; - ont:endpointTemplate "/s/datasets/$parent_2/collections/$parent_1/items/$object" ; + ont:endpointTemplate "/s/catalogs/$parent_2/collections/$parent_1/items/$object" ; ont:ParentToFocusRelation rdfs:member ; . - -endpoint:feature-listing ont:defaultLimit 20 ; - ont:defaultOffset 0 . - -endpoint:feature-collection-listing ont:defaultLimit 20 ; - ont:defaultOffset 0 . - -endpoint:dataset-listing ont:defaultLimit 20 ; - ont:defaultOffset 0 . \ No newline at end of file diff --git a/prez/reference_data/new_endpoints/ogc_vocprez_endpoints.ttl b/prez/reference_data/new_endpoints/ogc_vocprez_endpoints.ttl new file mode 100644 index 00000000..5b93bc05 --- /dev/null +++ b/prez/reference_data/new_endpoints/ogc_vocprez_endpoints.ttl @@ -0,0 +1,126 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX endpoint: +PREFIX prez: +PREFIX ont: +PREFIX sh: +prefix skos: +PREFIX shext: +PREFIX xsd: +PREFIX rdfs: + + +endpoint:catalog-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/v/catalogs" ; + ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP + sh:targetClass dcat:Catalog ; # required for query construction + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + ?focus_node dcterms:hasPart ?child_vocab . + ?child_vocab a skos:ConceptScheme . + } + """ ] ; +. + +endpoint:catalog-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Catalog ; # required for link generation for objects + ont:endpointTemplate "/v/catalogs/$object" ; + ont:parentEndpoint endpoint:catalog-listing ; +. + +endpoint:vocab-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/v/catalogs/$parent_1/collections" ; + ont:deliversClasses prez:SchemesList ; # required to determine the correct profile for ConnegP + ont:ParentToFocusRelation dcterms:hasPart ; + sh:targetClass skos:ConceptScheme ; # required for query construction + ont:parentEndpoint endpoint:catalog-object ; + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 dcterms:hasPart ?focus_node . + } + """ ] ; +. + +endpoint:vocab-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses skos:ConceptScheme ; # required for link generation for objects + ont:endpointTemplate "/v/catalogs/$parent_1/collections/$object" ; + ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentEndpoint endpoint:vocab-listing ; +. + +endpoint:concept-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/items" ; + ont:deliversClasses prez:ConceptList ; # required to determine the correct profile for ConnegP + ont:FocusToParentRelation skos:inScheme ; + sh:targetClass skos:Concept ; # required for query construction + ont:parentEndpoint endpoint:vocab-object ; + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + ?focus_node skos:inScheme $parent_1 . + } + """ ] ; +. + +endpoint:concept-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses skos:Concept ; # required for link generation for objects + ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/items/$object" ; + ont:FocusToParentRelation skos:inScheme ; + ont:parentEndpoint endpoint:concept-listing ; +. + +endpoint:narrowers + a ont:ListingEndpoint ; + ont:parentEndpoint endpoint:concept-object ; + ont:deliversClasses skos:Concept ; + sh:rule + [ + sh:object "?hasChildren" ; + sh:predicate prez:hasChildren ; + sh:subject sh:this ; + ] ; + sh:target + [ + sh:select """SELECT DISTINCT ?focus_node ?hasChildren + WHERE { + $parent_1 skos:narrower|^skos:broader ?focus_node . + BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) + } + """ ; + ] ; + shext:limit 20 ; + shext:offset 0 ; + shext:orderBy [ sh:path rdfs:label ] ; + sh:targetClass skos:Concept ; + ont:endpointTemplate "/v/catalogs/$parent_3/collections/$parent_2/items/$parent_1/narrowers" ; +. + +endpoint:top-concepts a ont:ListingEndpoint ; + ont:deliversClasses skos:Concept ; + ont:parentEndpoint endpoint:vocab-object ; + sh:rule [ sh:subject sh:this ; + sh:predicate prez:hasChildren ; + sh:object "?hasChildren" ] ; + sh:target [ sh:select """SELECT DISTINCT ?focus_node ?hasChildren + WHERE { + $parent_1 skos:hasTopConcept|^skos:isTopConceptOf ?focus_node . + BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) + } + """ ] ; + shext:limit 20 ; + shext:offset 0 ; + shext:orderBy [ sh:path rdfs:label ] ; + sh:targetClass skos:Concept ; + ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/top-concepts" ; +. \ No newline at end of file diff --git a/prez/reference_data/new_endpoints/system_endpoints.ttl b/prez/reference_data/new_endpoints/system_endpoints.ttl new file mode 100644 index 00000000..befc3be9 --- /dev/null +++ b/prez/reference_data/new_endpoints/system_endpoints.ttl @@ -0,0 +1,24 @@ +PREFIX dcat: +PREFIX endpoint: +PREFIX geo: +PREFIX ont: +PREFIX prez: +PREFIX rdfs: +PREFIX sh: +PREFIX xsd: +PREFIX prof: +PREFIX shext: + +endpoint:profiles-listing a ont:ListingEndpoint ; + ont:deliversClasses prez:ProfilesList ; + sh:targetClass prof:Profile ; + ont:endpointTemplate "/profiles" ; + shext:limit 20 ; + shext:offset 0 ; +. + +endpoint:profile-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses prof:Profile ; + ont:endpointTemplate "/profiles/$object" ; +. \ No newline at end of file diff --git a/prez/reference_data/new_endpoints/vocprez_endpoints.ttl b/prez/reference_data/new_endpoints/vocprez_endpoints.ttl.unused similarity index 86% rename from prez/reference_data/new_endpoints/vocprez_endpoints.ttl rename to prez/reference_data/new_endpoints/vocprez_endpoints.ttl.unused index 1e9a3037..6cb1edfd 100644 --- a/prez/reference_data/new_endpoints/vocprez_endpoints.ttl +++ b/prez/reference_data/new_endpoints/vocprez_endpoints.ttl.unused @@ -5,6 +5,7 @@ PREFIX rdfs: PREFIX sh: PREFIX skos: PREFIX xsd: +PREFIX shext: endpoint:vocprez-home a ont:Endpoint ; ont:endpointTemplate "/v" ; @@ -87,29 +88,13 @@ endpoint:cs-children sh:select """SELECT DISTINCT ?focus_node ?hasChildren WHERE { $parent_1 skos:narrower|^skos:broader ?focus_node . - ?focus_node skos:prefLabel ?label . BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) - } - ORDER BY ?label - LIMIT $limit - OFFSET $offset + } """ ; ] ; + shext:limit 20 ; + shext:offset 0 ; + shext:orderBy [ sh:path skos:prefLabel ] ; sh:targetClass skos:Concept ; ont:endpointTemplate "/v/vocabs/$parent_2/$parent_1/narrowers" ; . - -ont:ListingEndpoint ont:defaultLimit 20 ; - ont:defaultOffset 0 . - -endpoint:vocabs-listing ont:defaultLimit 20 ; - ont:defaultOffset 0 . - -endpoint:cs-children ont:defaultLimit 20 ; - ont:defaultOffset 0 . - -endpoint:cs-top-concepts ont:defaultLimit 20 ; - ont:defaultOffset 0 . - -endpoint:collection-listing ont:defaultLimit 20 ; - ont:defaultOffset 0 . \ No newline at end of file diff --git a/prez/reference_data/profiles/_dd_to_refactor.ttl b/prez/reference_data/profiles/_dd_to_refactor.ttl new file mode 100644 index 00000000..1793dfb9 --- /dev/null +++ b/prez/reference_data/profiles/_dd_to_refactor.ttl @@ -0,0 +1,57 @@ +PREFIX altr-ext: +PREFIX dcat: +PREFIX dcterms: +PREFIX geo: +PREFIX owl: +PREFIX prez: +PREFIX prof: +PREFIX rdf: +PREFIX rdfs: +PREFIX schema: +PREFIX sh: +PREFIX skos: +PREFIX reg: +PREFIX xsd: +PREFIX prov: +PREFIX shext: + + + + a prof:Profile ; + dcterms:description "A simple data model to provide items for form drop-down lists. The basic information is an ID & name tuple and the optional extra value is an item's parent. For vocabularies, this is then URI, prefLabel or URI, prefLabel & broader Concept" ; + dcterms:identifier "dd"^^xsd:token ; + dcterms:title "Drop-Down List" ; + altr-ext:constrainsClass + prez:SchemesList , + prez:VocPrezCollectionList , + skos:ConceptScheme , + skos:Collection ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass skos:ConceptScheme ; + altr-ext:childToFocus skos:inScheme ; + altr-ext:relativeProperties skos:broader ; + ] ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass skos:Collection ; + altr-ext:focusToChild skos:member ; + altr-ext:relativeProperties skos:definition ; + ] ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass prez:SchemesList ; + altr-ext:containerClass skos:ConceptScheme ; + altr-ext:relativeProperties skos:definition, dcterms:publisher, reg:status ; + ] ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass prez:VocPrezCollectionList ; + altr-ext:containerClass skos:Collection ; + altr-ext:relativeProperties skos:definition, dcterms:publisher, reg:status ; + ] ; + altr-ext:hasDefaultResourceFormat "application/json" ; + altr-ext:hasResourceFormat + "application/json" , + "text/csv" +. diff --git a/prez/reference_data/profiles/catprez_default_profiles.ttl b/prez/reference_data/profiles/catprez_default_profiles.ttl deleted file mode 100644 index 5fbeab41..00000000 --- a/prez/reference_data/profiles/catprez_default_profiles.ttl +++ /dev/null @@ -1,89 +0,0 @@ -PREFIX altr-ext: -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX owl: -PREFIX prez: -PREFIX prof: -PREFIX prov: -PREFIX rdf: -PREFIX rdfs: -PREFIX sh: -PREFIX skos: -PREFIX xsd: -PREFIX shext: - - -prez:CatPrezProfile - a prof:Profile ; - prez:supportedSearchMethod prez:exactMatch , prez:jenaFTName ; - dcterms:identifier "catprez"^^xsd:token ; - dcterms:description "A system profile for CatPrez" ; - skos:prefLabel "CatPrez profile" ; - altr-ext:constrainsClass prez:CatPrez ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass prez:CatalogList ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass dcat:Catalog ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass dcat:Resource ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass prez:ResourceList ; - altr-ext:hasDefaultProfile - ] - . - - - a prof:Profile , prez:CatPrezProfile ; - dcterms:description "Dataset Catalog Vocabulary (DCAT) is a W3C-authored RDF vocabulary designed to facilitate interoperability between data catalogs" ; - dcterms:identifier "dcat"^^xsd:token ; - dcterms:title "DCAT" ; - altr-ext:constrainsClass - dcat:Catalog , - dcat:Dataset , - dcat:Resource , - prez:CatalogList , - prez:ResourceList ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass dcat:Catalog ; - altr-ext:exclude dcterms:hasPart ; - altr-ext:focusToChild dcterms:hasPart ; - ] , - [ - a sh:NodeShape ; - sh:targetClass prez:ResourceList ; - altr-ext:focusToChild dcterms:hasPart ; - altr-ext:relativeProperties dcterms:issued , dcterms:creator , dcterms:publisher ; - ] - . - - - - a prof:Profile , prez:CatPrezProfile ; - dcterms:description "Schema.org is a collaborative, community activity with a mission to create, maintain, and promote schemas for structured data on the Internet, on web pages, in email messages, and beyond. " ; - dcterms:identifier "sdo"^^xsd:token ; - dcterms:title "schema.org" ; - altr-ext:constrainsClass - skos:Dataset ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; -. diff --git a/prez/reference_data/profiles/ogc_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl similarity index 64% rename from prez/reference_data/profiles/ogc_profile.ttl rename to prez/reference_data/profiles/ogc_records_profile.ttl index b22c28cd..1c135242 100644 --- a/prez/reference_data/profiles/ogc_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -6,6 +6,7 @@ PREFIX owl: PREFIX prez: PREFIX prof: PREFIX prov: +PREFIX reg: PREFIX rdf: PREFIX rdfs: PREFIX sh: @@ -15,7 +16,7 @@ PREFIX endpoint: PREFIX shext: -prez:OGCProfile +prez:OGCRecordsProfile a prof:Profile ; dcterms:identifier "ogc"^^xsd:token ; dcterms:description "A system profile for OGC Records conformant API" ; @@ -24,13 +25,17 @@ prez:OGCProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasNodeShape [ a sh:NodeShape ; - sh:targetClass prez:CatalogList , prez:SchemesList , prez:ConceptsList ; + sh:targetClass prez:CatalogList , prez:ConceptsList ; altr-ext:hasDefaultProfile prez:OGCListingProfile + ] , [ + a sh:NodeShape ; + sh:targetClass prez:SchemesList ; + altr-ext:hasDefaultProfile prez:OGCSchemesListProfile ] , [ a sh:NodeShape ; sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept ; altr-ext:hasDefaultProfile prez:OGCItemProfile - ] ; + ] . prez:OGCListingProfile @@ -46,6 +51,30 @@ prez:OGCListingProfile altr-ext:constrainsClass prez:CatalogList , prez:SchemesList , prez:ConceptList ; . +prez:OGCSchemesListProfile + a prof:Profile , sh:NodeShape ; + dcterms:title "OGC Concept Scheme Listing Profile" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:constrainsClass prez:SchemesList ; + sh:property [ + sh:minCount 0 ; + sh:path ( + sh:union ( + dcterms:publisher + reg:status + ( prov:qualifiedDerivation prov:hadRole ) + ( prov:qualifiedDerivation prov:entity ) + ) + ) + ] + . + prez:OGCItemProfile a prof:Profile , sh:NodeShape ; dcterms:title "OGC Item Profile" ; @@ -58,7 +87,11 @@ prez:OGCItemProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; sh:property [ sh:path shext:allPredicateValues ; + ] , + [ + sh:minCount 0 ; + sh:path [ sh:inversePath dcterms:hasPart ] ; ] ; shext:bnode-depth 2 ; - altr-ext:constrainsClass dcat:Catalog , skos:ConceptScheme, skos:Concept ; + altr-ext:constrainsClass dcat:Catalog , dcat:Resource , skos:ConceptScheme, skos:Concept ; . diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index 3b3c7e43..05a6e3cb 100644 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -22,16 +22,16 @@ PREFIX xsd: altr-ext:hasNodeShape [ a sh:NodeShape ; sh:targetClass prez:SPARQLQuery ; - altr-ext:hasDefaultProfile + altr-ext:hasDefaultProfile ] . - + a prof:Profile ; - dcterms:identifier "open" ; - dcterms:description "An open profile which will return all direct properties for a resource." ; + dcterms:identifier "openobj"^^xsd:token ; + dcterms:description "An open profile for objects which will return all direct properties for a resource." ; dcterms:title "Open profile" ; - altr-ext:constrainsClass prez:SPARQLQuery , prez:SearchResult , prez:CQLObjectList ; + altr-ext:constrainsClass prez:SPARQLQuery , prof:Profile ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/ld+json" , "application/ld+json" , @@ -42,8 +42,30 @@ PREFIX xsd: sh:property [ sh:path shext:allPredicateValues ; ] ; + shext:bnode-depth 2 ; . + + a prof:Profile ; + dcterms:description "A very basic data model that lists the members of container objects only, i.e. not their other properties" ; + dcterms:identifier "mem"^^xsd:token ; + dcterms:title "Members" ; + altr-ext:constrainsClass prez:DatasetList , + prez:FeatureCollectionList , + prez:FeatureList , + prez:ProfilesList , + prez:SchemesList , + prez:VocPrezCollectionList , + prez:CatalogList , + prez:CQLObjectList ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/json" , + "application/ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; +. altr-ext:alt-profile a prof:Profile ; @@ -74,38 +96,26 @@ altr-ext:alt-profile prez:VocPrezCollectionList ; . -prez:profiles - a prof:Profile ; - dcterms:title "Profiles" ; - dcterms:description "List of profiles" ; - dcterms:identifier "profiles"^^xsd:token ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - altr-ext:constrainsClass prof:Profile ; -. - - a prof:Profile ; - dcterms:description "A very basic data model that lists the members of container objects only, i.e. not their other properties" ; - dcterms:identifier "mem"^^xsd:token ; - dcterms:title "Members" ; - altr-ext:constrainsClass prez:DatasetList , - prez:FeatureCollectionList , - prez:FeatureList , - prez:ProfilesList , - prez:SchemesList , - prez:VocPrezCollectionList , - prez:CatalogList ; + + + a prof:Profile , prez:CatPrezProfile ; + dcterms:description "Dataset Catalog Vocabulary (DCAT) is a W3C-authored RDF vocabulary designed to facilitate interoperability between data catalogs" ; + dcterms:identifier "dcat"^^xsd:token ; + dcterms:title "DCAT" ; + altr-ext:constrainsClass + dcat:Catalog , + dcat:Dataset , + dcat:Resource , + prez:CatalogList , + prez:ResourceList ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat - "application/json" , "application/ld+json" , "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; -. + sh:property [ + sh:path shext:allPredicateValues + ] ; + . \ No newline at end of file diff --git a/prez/reference_data/profiles/spaceprez_default_profiles.ttl b/prez/reference_data/profiles/spaceprez_default_profiles.ttl index 4040ef13..9e6a3c8a 100644 --- a/prez/reference_data/profiles/spaceprez_default_profiles.ttl +++ b/prez/reference_data/profiles/spaceprez_default_profiles.ttl @@ -113,57 +113,6 @@ prez:GeoListingProfile a prof:Profile ; ] . - - a prof:Profile , prez:SpacePrezProfile ; - dcterms:description "An RDF/OWL vocabulary for representing spatial information" ; - dcterms:identifier "geo"^^xsd:token ; - dcterms:title "GeoSPARQL" ; - altr-ext:constrainsClass geo:Feature ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; -. - - - a prof:Profile , prez:SpacePrezProfile ; - dcterms:description "The OGC API Features specifies the behavior of Web APIs that provide access to features in a dataset in a manner independent of the underlying data store." ; - dcterms:identifier "oai"^^xsd:token ; - dcterms:title "OGC API Features" ; - altr-ext:constrainsClass - dcat:Dataset , - geo:FeatureCollection , - geo:Feature , - prez:FeatureCollectionList , - prez:FeatureList ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" , - "application/geo+json" ; -. - - - a prof:Profile , prez:SpacePrezProfile ; - dcterms:description "A null profile of the Data Catalog Vocabulary (DCAT)" ; - dcterms:identifier "dcat"^^xsd:token ; - dcterms:title "DCAT" ; - altr-ext:constrainsClass dcat:Dataset ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; -. - a prof:Profile , prez:SpacePrezProfile ; dcterms:description "Dataset Catalog Vocabulary (DCAT) is a W3C-authored RDF vocabulary designed to facilitate interoperability between data catalogs" ; @@ -186,3 +135,4 @@ prez:GeoListingProfile a prof:Profile ; shext:bnode-depth 2 ; altr-ext:constrainsClass dcat:Catalog , dcat:Dataset ; . + diff --git a/prez/reference_data/profiles/vocprez_default_profiles.ttl b/prez/reference_data/profiles/vocprez_default_profiles.ttl deleted file mode 100644 index f4220fd2..00000000 --- a/prez/reference_data/profiles/vocprez_default_profiles.ttl +++ /dev/null @@ -1,209 +0,0 @@ -PREFIX altr-ext: -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX owl: -PREFIX prez: -PREFIX prof: -PREFIX rdf: -PREFIX rdfs: -PREFIX schema: -PREFIX sh: -PREFIX skos: -PREFIX reg: -PREFIX xsd: -PREFIX prov: -PREFIX shext: - - -prez:VocPrezProfile - a prof:Profile ; - dcterms:identifier "vocprez"^^xsd:token ; - dcterms:description "A system profile for VocPrez" ; - skos:prefLabel "VocPrez profile" ; - altr-ext:constrainsClass prez:VocPrez ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass prez:SchemesList ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass prez:VocPrezCollectionList ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass skos:Collection ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass skos:Concept ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:hasDefaultProfile - ] - . - - - a prof:Profile , prez:VocPrezProfile ; - dcterms:description "Dataset Catalog Vocabulary (DCAT) is a W3C-authored RDF vocabulary designed to facilitate interoperability between data catalogs" ; - dcterms:identifier "dcat"^^xsd:token ; - dcterms:title "DCAT" ; - altr-ext:constrainsClass - dcat:Catalog , - dcat:Dataset , - prez:VocPrezHome ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; -. - - - a prof:Profile , prez:VocPrezProfile ; - dcterms:description "This is a profile of the taxonomy data model SKOS - i.e. SKOS with additional constraints." ; - dcterms:identifier "vocpub"^^xsd:token ; - dcterms:title "VocPub" ; - altr-ext:otherAnnotationProps schema:color, reg:status ; - altr-ext:constrainsClass - skos:ConceptScheme , - skos:Concept , - skos:Collection , - prez:SchemesList , - prez:VocPrezCollectionList ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:focusToChild skos:hasTopConcept ; - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass skos:Collection ; - altr-ext:focusToChild skos:member ; - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:childToFocus skos:inScheme ; - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass skos:Concept ; - sh:property [ - sh:path shext:allPredicateValues ; - ] ; - altr-ext:focusToParent skos:inScheme ; - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass prez:SchemesList ; - sh:property [ - sh:minCount 0 ; - sh:path ( - sh:union ( - dcterms:publisher - reg:status - ) - ) - ] - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass prez:VocPrezCollectionList ; - sh:property [ - sh:minCount 0 ; - sh:path skos:definition ; - ] ; - ] ; -. - - a prof:Profile , sh:NodeShape ; - dcterms:title "VocPub Schemes Listing Profile" ; - altr-ext:constrainsClass prez:SchemesList ; - altr-ext:hasResourceFormat "application/ld+json" , - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - sh:property [ - sh:minCount 0 ; - sh:path ( - sh:union ( - dcterms:publisher - reg:status - ( prov:qualifiedDerivation prov:hadRole ) - ( prov:qualifiedDerivation prov:entity ) - ) - ) - ] - . - - - - a prof:Profile ; - dcterms:description "Schema.org is a collaborative, community activity with a mission to create, maintain, and promote schemas for structured data on the Internet, on web pages, in email messages, and beyond. " ; - dcterms:identifier "sdo"^^xsd:token ; - dcterms:title "schema.org" ; - altr-ext:constrainsClass - skos:Dataset , - skos:ConceptScheme , - skos:Collection ; - altr-ext:hasDefaultResourceFormat "text/turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; -. - - - a prof:Profile ; - dcterms:description "A simple data model to provide items for form drop-down lists. The basic information is an ID & name tuple and the optional extra value is an item's parent. For vocabularies, this is then URI, prefLabel or URI, prefLabel & broader Concept" ; - dcterms:identifier "dd"^^xsd:token ; - dcterms:title "Drop-Down List" ; - altr-ext:constrainsClass - prez:SchemesList , - prez:VocPrezCollectionList , - skos:ConceptScheme , - skos:Collection ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:childToFocus skos:inScheme ; - altr-ext:relativeProperties skos:broader ; - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass skos:Collection ; - altr-ext:focusToChild skos:member ; - altr-ext:relativeProperties skos:definition ; - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass prez:SchemesList ; - altr-ext:containerClass skos:ConceptScheme ; - altr-ext:relativeProperties skos:definition, dcterms:publisher, reg:status ; - ] ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass prez:VocPrezCollectionList ; - altr-ext:containerClass skos:Collection ; - altr-ext:relativeProperties skos:definition, dcterms:publisher, reg:status ; - ] ; - altr-ext:hasDefaultResourceFormat "application/json" ; - altr-ext:hasResourceFormat - "application/json" , - "text/csv" -. diff --git a/prez/routers/catprez.py b/prez/routers/catprez.py deleted file mode 100644 index 48bed64e..00000000 --- a/prez/routers/catprez.py +++ /dev/null @@ -1,84 +0,0 @@ -from typing import Optional - -from fastapi import APIRouter, Request, Depends -from starlette.responses import PlainTextResponse - -from prez.dependencies import get_repo -from prez.services.objects import object_function -from prez.services.listings import listing_function -from prez.services.curie_functions import get_uri_for_curie_id -from prez.sparql.methods import Repo - -router = APIRouter(tags=["CatPrez"]) - - -@router.get("/c", summary="CatPrez Home") -async def catprez_profiles(): - return PlainTextResponse("CatPrez Home") - - -@router.get( - "/c/catalogs", - summary="List Catalogs", - name="https://prez.dev/endpoint/catprez/catalog-listing", -) -async def catalog_list( - request: Request, - page: Optional[int] = 1, - per_page: Optional[int] = 20, - repo: Repo = Depends(get_repo), -): - return await listing_function( - request=request, page=page, per_page=per_page, repo=repo - ) - - -@router.get( - "/c/catalogs/{catalog_curie}/resources", - summary="List Resources", - name="https://prez.dev/endpoint/catprez/resource-listing", -) -async def resource_list( - request: Request, - catalog_curie: str, - repo: Repo = Depends(get_repo), - page: Optional[int] = 1, - per_page: Optional[int] = 20, -): - catalog_uri = get_uri_for_curie_id(catalog_curie) - return await listing_function( - request=request, - page=page, - per_page=per_page, - repo=repo, - uri=catalog_uri, - ) - - -@router.get( - "/c/catalogs/{catalog_curie}/resources/{resource_curie}", - summary="Get Resource", - name="https://prez.dev/endpoint/catprez/resource", -) -async def resource_item( - request: Request, - catalog_curie: str, - resource_curie: str, - repo: Repo = Depends(get_repo), -): - return await object_function( - request=request, object_curie=resource_curie, repo=repo - ) - - -@router.get( - "/c/catalogs/{catalog_curie}", - summary="Get Catalog", - name="https://prez.dev/endpoint/catprez/catalog", -) -async def catalog_item( - request: Request, - catalog_curie: str, - repo: Repo = Depends(get_repo), -): - return await object_function(request=request, object_curie=catalog_curie, repo=repo) diff --git a/prez/routers/cql.py b/prez/routers/cql.py index a678ffda..c4cafd82 100644 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -23,5 +23,10 @@ async def cql_post_endpoint( ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql") return await listing_function_new( - request, repo, endpoint_uri, page, per_page, parsed_cql + request=request, + repo=repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + cql=parsed_cql, ) diff --git a/prez/routers/management.py b/prez/routers/management.py index af93d78d..4614f77b 100644 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -69,19 +69,3 @@ async def return_annotation_predicates(): Collection(g, description_list_bn, settings.description_predicates) Collection(g, provenance_list_bn, settings.provenance_predicates) return g - - -async def return_annotation_predicates(): - """ - Returns an RDF linked list of the annotation predicates used for labels, descriptions and provenance. - """ - g = Graph() - g.bind("prez", "https://prez.dev/") - label_list_bn, description_list_bn, provenance_list_bn = BNode(), BNode(), BNode() - g.add((PREZ.AnnotationPropertyList, PREZ.labelList, label_list_bn)) - g.add((PREZ.AnnotationPropertyList, PREZ.descriptionList, description_list_bn)) - g.add((PREZ.AnnotationPropertyList, PREZ.provenanceList, provenance_list_bn)) - Collection(g, label_list_bn, settings.label_predicates) - Collection(g, description_list_bn, settings.description_predicates) - Collection(g, provenance_list_bn, settings.provenance_predicates) - return g diff --git a/prez/routers/object.py b/prez/routers/object.py index 33dfcd4f..a357871d 100644 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -1,33 +1,11 @@ -from string import Template -from typing import FrozenSet, Optional - from fastapi import APIRouter, Request, HTTPException, status, Query from fastapi import Depends -from rdflib import Graph, Literal, URIRef, PROF, DCTERMS from starlette.responses import PlainTextResponse -from prez.cache import ( - endpoints_graph_cache, - profiles_graph_cache, - links_ids_graph_cache, -) from prez.dependencies import get_repo -from prez.models.listing import ListingModel -from prez.models.object_item import ObjectItem -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo from prez.queries.object import object_inbound_query, object_outbound_query from prez.routers.identifier import get_iri_route -from prez.services.curie_functions import get_curie_id_for_uri, get_uri_for_curie_id -from prez.services.model_methods import get_classes -from prez.services.objects import object_function -from prez.sparql.methods import Repo -from prez.sparql.objects_listings import ( - get_endpoint_template_queries, - generate_relationship_query, - generate_item_construct, - generate_listing_construct, - generate_listing_count_construct, -) +from prez.services.objects import object_function_new router = APIRouter(tags=["Object"]) @@ -92,4 +70,4 @@ async def count_route( @router.get("/object", summary="Object", name="https://prez.dev/endpoint/object") async def object_route(request: Request, repo=Depends(get_repo)): - return await object_function(request, repo=repo) + return await object_function_new(request, repo=repo) diff --git a/prez/routers/ogc_catprez.py b/prez/routers/ogc_catprez.py index 9d8fd907..c6ae279e 100644 --- a/prez/routers/ogc_catprez.py +++ b/prez/routers/ogc_catprez.py @@ -3,115 +3,129 @@ from fastapi import APIRouter, Request, Depends from rdflib import URIRef -from prez.dependencies import get_repo, cql_parser_dependency +from prez.dependencies import get_repo, cql_parser_dependency, get_system_repo from prez.services.curie_functions import get_uri_for_curie_id from prez.services.listings import listing_function_new from prez.services.objects import object_function_new from prez.sparql.methods import Repo -router = APIRouter(tags=["ogcrecords"]) +router = APIRouter(tags=["ogccatprez"]) ogc_endpoints = { - "catalog-listing": "https://prez.dev/endpoint/ogcrecords/catalog-listing", - "catalog-object": "https://prez.dev/endpoint/ogcrecords/catalog-object", - "vocab-listing": "https://prez.dev/endpoint/ogcrecords/vocab-listing", - "vocab-object": "https://prez.dev/endpoint/ogcrecords/vocab-object", - "concept-listing": "https://prez.dev/endpoint/ogcrecords/concept-listing", - "concept-object": "https://prez.dev/endpoint/ogcrecords/concept-object", + "top-level-catalog-listing": "https://prez.dev/endpoint/ogccatprez/top-level-catalog-listing", + "top-level-catalog-object": "https://prez.dev/endpoint/ogccatprez/top-level-catalog-object", + "lower-level-catalog-listing": "https://prez.dev/endpoint/ogccatprez/lower-level-catalog-listing", + "lower-level-catalog-object": "https://prez.dev/endpoint/ogccatprez/lower-level-catalog-object", + "resource-listing": "https://prez.dev/endpoint/ogccatprez/resource-listing", + "resource-object": "https://prez.dev/endpoint/ogccatprez/resource-object", } @router.get( - "/catalogs", - summary="List Catalogs", - name=ogc_endpoints["catalog-listing"], + "/c/catalogs", + summary="List Top Level Catalogs", + name=ogc_endpoints["top-level-catalog-listing"], ) async def catalog_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): - endpoint_uri = URIRef(ogc_endpoints["catalog-listing"]) - return await listing_function_new(request, repo, endpoint_uri, page, per_page) + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page + ) @router.get( - "/catalogs/{catalogId}/collections", - summary="List Vocabularies", - name=ogc_endpoints["vocab-listing"], + "/c/catalogs/{catalogId}/collections", + summary="List Lower Level Catalogs", + name=ogc_endpoints["lower-level-catalog-listing"], ) async def vocab_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): - endpoint_uri = URIRef(ogc_endpoints["vocab-listing"]) - return await listing_function_new(request, repo, endpoint_uri, page, per_page) + parent_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + ) @router.get( - "/catalogs/{catalogId}/collections/{collectionId}/items", - summary="List Concepts", - name=ogc_endpoints["concept-listing"], + "/c/catalogs/{catalogId}/collections/{collectionId}/items", + summary="List Resources", + name=ogc_endpoints["resource-listing"], ) -async def vocab_list( +async def concept_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): - endpoint_uri = URIRef(ogc_endpoints["concept-listing"]) - return await listing_function_new(request, repo, endpoint_uri, page, per_page) + parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + ) @router.get( - "/catalogs/{catalogId}", - summary="Catalog Object", - name=ogc_endpoints["catalog-object"], + "/c/catalogs/{catalogId}", + summary="Top Level Catalog Object", + name=ogc_endpoints["top-level-catalog-object"], ) async def catalog_object( request: Request, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): request_url = request.scope["path"] - endpoint_uri = URIRef(ogc_endpoints["catalog-object"]) + endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["catalogId"]) return await object_function_new( - request, endpoint_uri, object_uri, request_url, repo + request, endpoint_uri, object_uri, request_url, repo, system_repo ) @router.get( - "/catalogs/{catalogId}/collections/{collectionId}", - summary="Vocab Object", - name=ogc_endpoints["vocab-object"], + "/c/catalogs/{catalogId}/collections/{collectionId}", + summary="Lower Level Catalog Object", + name=ogc_endpoints["lower-level-catalog-object"], ) async def catalog_object( request: Request, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): request_url = request.scope["path"] - endpoint_uri = URIRef(ogc_endpoints["vocab-object"]) + endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) return await object_function_new( - request, endpoint_uri, object_uri, request_url, repo + request, endpoint_uri, object_uri, request_url, repo, system_repo ) @router.get( - "/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", - summary="Concept Object", - name=ogc_endpoints["concept-object"], + "/c/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", + summary="Resource Object", + name=ogc_endpoints["resource-object"], ) async def catalog_object( request: Request, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): request_url = request.scope["path"] - endpoint_uri = URIRef(ogc_endpoints["concept-object"]) + endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["itemId"]) return await object_function_new( - request, endpoint_uri, request_url, repo, object_uri + request, endpoint_uri, object_uri, request_url, repo, system_repo ) diff --git a/prez/routers/ogc_spaceprez.py b/prez/routers/ogc_spaceprez.py new file mode 100644 index 00000000..84b865a5 --- /dev/null +++ b/prez/routers/ogc_spaceprez.py @@ -0,0 +1,169 @@ +from typing import Optional + +from fastapi import APIRouter, Request, Depends +from rdflib import Namespace +from starlette.responses import PlainTextResponse + +from prez.dependencies import get_repo, get_system_repo +from prez.services.curie_functions import get_uri_for_curie_id +from prez.services.listings import listing_function_new +from prez.services.objects import object_function_new +from prez.sparql.methods import Repo + +router = APIRouter(tags=["SpacePrez"]) + +SP_EP = Namespace("https://prez.dev/endpoint/spaceprez/") + + +@router.get("/s", summary="SpacePrez Home") +async def spaceprez_profiles(): + return PlainTextResponse("SpacePrez Home") + + +@router.get( + "/s/catalogs", + summary="List Datasets", + name=SP_EP["dataset-listing"], +) +async def list_datasets( + request: Request, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), + page: Optional[int] = 1, + per_page: Optional[int] = 20, +): + endpoint_uri = SP_EP["dataset-listing"] + return await listing_function_new( + request=request, + repo=repo, + system_repo=system_repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + ) + + +@router.get( + "/s/catalogs/{dataset_curie}/collections", + summary="List Feature Collections", + name=SP_EP["feature-collection-listing"], +) +async def list_feature_collections( + request: Request, + dataset_curie: str, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), + page: Optional[int] = 1, + per_page: Optional[int] = 20, +): + endpoint_uri = SP_EP["feature-collection-listing"] + dataset_uri = get_uri_for_curie_id(dataset_curie) + return await listing_function_new( + request=request, + repo=repo, + system_repo=system_repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + parent_uri=dataset_uri, + ) + + +@router.get( + "/s/catalogs/{dataset_curie}/collections/{collection_curie}/items", + summary="List Features", + name=SP_EP["feature-listing"], +) +async def list_features( + request: Request, + dataset_curie: str, + collection_curie: str, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), + page: Optional[int] = 1, + per_page: Optional[int] = 20, +): + collection_uri = get_uri_for_curie_id(collection_curie) + endpoint_uri = SP_EP["feature-listing"] + return await listing_function_new( + request=request, + repo=repo, + system_repo=system_repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + parent_uri=collection_uri, + ) + + +@router.get( + "/s/catalogs/{dataset_curie}", summary="Get Dataset", name=SP_EP["dataset-object"] +) +async def dataset_item( + request: Request, + dataset_curie: str, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + request_url = request.scope["path"] + endpoint_uri = SP_EP["dataset-object"] + dataset_uri = get_uri_for_curie_id(dataset_curie) + return await object_function_new( + request=request, + endpoint_uri=endpoint_uri, + uri=dataset_uri, + request_url=request_url, + repo=repo, + system_repo=system_repo, + ) + + +@router.get( + "/s/catalogs/{dataset_curie}/collections/{collection_curie}", + summary="Get Feature Collection", + name=SP_EP["feature-collection-object"], +) +async def feature_collection_item( + request: Request, + dataset_curie: str, + collection_curie: str, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + request_url = request.scope["path"] + endpoint_uri = SP_EP["feature-collection-object"] + collection_uri = get_uri_for_curie_id(collection_curie) + return await object_function_new( + request=request, + endpoint_uri=endpoint_uri, + uri=collection_uri, + request_url=request_url, + repo=repo, + system_repo=system_repo, + ) + + +@router.get( + "/s/catalogs/{dataset_curie}/collections/{collection_curie}/items/{feature_curie}", + summary="Get Feature", + name="https://prez.dev/endpoint/spaceprez/feature", +) +async def feature_item( + request: Request, + dataset_curie: str, + collection_curie: str, + feature_curie: str, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + request_url = request.scope["path"] + endpoint_uri = SP_EP["feature-object"] + feature_uri = get_uri_for_curie_id(feature_curie) + return await object_function_new( + request=request, + endpoint_uri=endpoint_uri, + uri=feature_uri, + request_url=request_url, + repo=repo, + system_repo=system_repo, + ) diff --git a/prez/routers/ogc_vocprez.py b/prez/routers/ogc_vocprez.py new file mode 100644 index 00000000..a2e9c33e --- /dev/null +++ b/prez/routers/ogc_vocprez.py @@ -0,0 +1,171 @@ +from typing import Optional + +from fastapi import APIRouter, Request, Depends +from rdflib import URIRef + +from prez.dependencies import get_repo, get_system_repo +from prez.services.curie_functions import get_uri_for_curie_id +from prez.services.listings import listing_function_new +from prez.services.objects import object_function_new +from prez.sparql.methods import Repo + +router = APIRouter(tags=["ogcvocprez"]) + +ogc_endpoints = { + "catalog-listing": "https://prez.dev/endpoint/ogcvocprez/catalog-listing", + "catalog-object": "https://prez.dev/endpoint/ogcvocprez/catalog-object", + "vocab-listing": "https://prez.dev/endpoint/ogcvocprez/vocab-listing", + "vocab-object": "https://prez.dev/endpoint/ogcvocprez/vocab-object", + "concept-listing": "https://prez.dev/endpoint/ogcvocprez/concept-listing", + "concept-object": "https://prez.dev/endpoint/ogcvocprez/concept-object", + "top-concepts": "https://prez.dev/endpoint/ogcvocprez/top-concepts", + "narrowers": "https://prez.dev/endpoint/ogcvocprez/narrowers", +} + + +@router.get( + "/v/catalogs", + summary="List Catalogs", + name=ogc_endpoints["catalog-listing"], +) +async def catalog_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page + ) + + +@router.get( + "/v/catalogs/{catalogId}/collections", + summary="List Vocabularies", + name=ogc_endpoints["vocab-listing"], +) +async def vocab_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + parent_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + ) + + +@router.get( + "/v/catalogs/{catalogId}/collections/{collectionId}/items", + summary="List Concepts", + name=ogc_endpoints["concept-listing"], +) +async def concept_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + ) + + +@router.get( + "/v/catalogs/{catalogId}/collections/{collectionId}/top-concepts", + summary="List Top Concepts", + name=ogc_endpoints["top-concepts"], +) +async def concept_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + ) + + +@router.get( + "/v/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}/narrowers", + summary="List Narrower Concepts", + name=ogc_endpoints["narrowers"], +) +async def concept_list( + request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + parent_uri = get_uri_for_curie_id(request.path_params["itemId"]) + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + ) + + +@router.get( + "/v/catalogs/{catalogId}", + summary="Catalog Object", + name=ogc_endpoints["catalog-object"], +) +async def catalog_object( + request: Request, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + request_url = request.scope["path"] + endpoint_uri = URIRef(request.scope.get("route").name) + object_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + return await object_function_new( + request, endpoint_uri, object_uri, request_url, repo, system_repo + ) + + +@router.get( + "/v/catalogs/{catalogId}/collections/{collectionId}", + summary="Vocab Object", + name=ogc_endpoints["vocab-object"], +) +async def catalog_object( + request: Request, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + request_url = request.scope["path"] + endpoint_uri = URIRef(request.scope.get("route").name) + object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + return await object_function_new( + request, endpoint_uri, object_uri, request_url, repo, system_repo + ) + + +@router.get( + "/v/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", + summary="Concept Object", + name=ogc_endpoints["concept-object"], +) +async def catalog_object( + request: Request, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + request_url = request.scope["path"] + endpoint_uri = URIRef(request.scope.get("route").name) + object_uri = get_uri_for_curie_id(request.path_params["itemId"]) + return await object_function_new( + request, endpoint_uri, object_uri, request_url, repo, system_repo + ) diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index 0b87bbf5..440ddb25 100644 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -1,8 +1,10 @@ from fastapi import APIRouter, Request, Depends -from prez.dependencies import get_repo, get_system_repo -from prez.services.objects import object_function -from prez.services.listings import listing_function +from prez.dependencies import get_system_repo +from prez.services.curie_functions import get_uri_for_curie_id +from prez.services.listings import listing_function_new +from prez.services.objects import object_function_new +from rdflib import URIRef router = APIRouter(tags=["Profiles"]) @@ -10,22 +12,22 @@ @router.get( "/profiles", summary="List Profiles", - name="https://prez.dev/endpoint/profiles-listing", + name="https://prez.dev/endpoint/system/profiles-listing", ) @router.get( "/s/profiles", summary="SpacePrez Profiles", - name="https://prez.dev/endpoint/spaceprez-profiles-listing", + name="https://prez.dev/endpoint/system/spaceprez-profiles-listing", ) @router.get( "/v/profiles", summary="VocPrez Profiles", - name="https://prez.dev/endpoint/vocprez-profiles-listing", + name="https://prez.dev/endpoint/system/vocprez-profiles-listing", ) @router.get( "/c/profiles", summary="CatPrez Profiles", - name="https://prez.dev/endpoint/catprez-profiles-listing", + name="https://prez.dev/endpoint/system/catprez-profiles-listing", ) async def profiles( request: Request, @@ -33,15 +35,31 @@ async def profiles( per_page: int = 20, repo=Depends(get_system_repo), ): - return await listing_function( - request=request, page=page, per_page=per_page, repo=repo + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function_new( + request=request, + repo=repo, + system_repo=repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, ) @router.get( "/profiles/{profile_curie}", summary="Profile", - name="https://prez.dev/endpoint/profile", + name="https://prez.dev/endpoint/system/profile-object", ) async def profile(request: Request, profile_curie: str, repo=Depends(get_system_repo)): - return await object_function(request, object_curie=profile_curie, repo=repo) + request_url = request.scope["path"] + endpoint_uri = URIRef(request.scope.get("route").name) + profile_uri = get_uri_for_curie_id(profile_curie) + return await object_function_new( + request=request, + endpoint_uri=endpoint_uri, + uri=profile_uri, + request_url=request_url, + repo=repo, + system_repo=repo, + ) diff --git a/prez/routers/search.py b/prez/routers/search.py index 29e01af3..272a9de6 100644 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -71,7 +71,7 @@ async def search( ) full_query = generate_item_construct( - search_query, URIRef("https://prez.dev/profile/open") + search_query, URIRef("https://prez.dev/profile/open-object") ) graph, _ = await repo.send_queries([full_query], []) @@ -86,7 +86,7 @@ async def search( return await return_from_graph( graph, mediatype=prof_and_mt_info.mediatype, - profile=URIRef("https://prez.dev/profile/open"), + profile=URIRef("https://prez.dev/profile/open-object"), profile_headers=prof_and_mt_info.profile_headers, selected_class=prof_and_mt_info.selected_class, repo=repo, diff --git a/prez/routers/spaceprez.py b/prez/routers/spaceprez.py deleted file mode 100644 index 9002ab6a..00000000 --- a/prez/routers/spaceprez.py +++ /dev/null @@ -1,123 +0,0 @@ -from typing import Optional - -from fastapi import APIRouter, Request, Depends -from starlette.responses import PlainTextResponse - -from prez.dependencies import get_repo -from prez.services.objects import object_function -from prez.services.listings import listing_function -from prez.services.curie_functions import get_uri_for_curie_id -from prez.sparql.methods import Repo - -router = APIRouter(tags=["SpacePrez"]) - -SP_EP = Namespace("https://prez.dev/endpoint/spaceprez/") - - -@router.get("/s", summary="SpacePrez Home") -async def spaceprez_profiles(): - return PlainTextResponse("SpacePrez Home") - - -@router.get( - "/s/datasets", - summary="List Datasets", - name=SP_EP["dataset-listing"], -) -async def list_datasets( - request: Request, - repo: Repo = Depends(get_repo), - page: Optional[int] = 1, - per_page: Optional[int] = 20, -): - return await listing_function( - request=request, page=page, per_page=per_page, repo=repo - ) - - -@router.get( - "/s/datasets/{dataset_curie}/collections", - summary="List Feature Collections", - name=SP_EP["feature-collection-listing"], -) -async def list_feature_collections( - request: Request, - dataset_curie: str, - repo: Repo = Depends(get_repo), - page: Optional[int] = 1, - per_page: Optional[int] = 20, -): - endpoint_uri = SP_EP["feature-collection-listing"] - dataset_uri = get_uri_for_curie_id(dataset_curie) - return await listing_function( - request=request, - page=page, - per_page=per_page, - uri=dataset_uri, - repo=repo, - ) - - -@router.get( - "/s/datasets/{dataset_curie}/collections/{collection_curie}/items", - summary="List Features", - name=SP_EP["feature-listing"], -) -async def list_features( - request: Request, - dataset_curie: str, - collection_curie: str, - repo: Repo = Depends(get_repo), - page: Optional[int] = 1, - per_page: Optional[int] = 20, -): - collection_uri = get_uri_for_curie_id(collection_curie) - return await listing_function( - request=request, - page=page, - per_page=per_page, - uri=collection_uri, - repo=repo, - ) - - -@router.get( - "/s/datasets/{dataset_curie}", - summary="Get Dataset", - name=SP_EP["dataset-object"] -) -async def dataset_item( - request: Request, - dataset_curie: str, - repo: Repo = Depends(get_repo), -): - return await object_function(request, object_curie=dataset_curie, repo=repo) - - -@router.get( - "/s/datasets/{dataset_curie}/collections/{collection_curie}", - summary="Get Feature Collection", - name=SP_EP["feature-collection-object"], -) -async def feature_collection_item( - request: Request, - dataset_curie: str, - collection_curie: str, - repo: Repo = Depends(get_repo), -): - return await object_function(request, object_curie=collection_curie, repo=repo) - - -@router.get( - "/s/datasets/{dataset_curie}/collections/{collection_curie}/items/{feature_curie}", - summary="Get Feature", - name="https://prez.dev/endpoint/spaceprez/feature", -) -async def feature_item( - request: Request, - dataset_curie: str, - collection_curie: str, - feature_curie: str, - repo: Repo = Depends(get_repo), -): - return await object_function(request=request, object_curie=feature_curie, repo=repo) diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index 359343f9..82ad842b 100644 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -58,8 +58,7 @@ async def sparql_endpoint( return JSONResponse(content=query_result) elif isinstance(query_result, Graph): return Response( - content=query_result.serialize(format="text/turtle"), - status_code=200 + content=query_result.serialize(format="text/turtle"), status_code=200 ) else: return StreamingResponse( diff --git a/prez/routers/vocprez.py b/prez/routers/vocprez.py deleted file mode 100644 index 09c302fa..00000000 --- a/prez/routers/vocprez.py +++ /dev/null @@ -1,301 +0,0 @@ -import logging - -from fastapi import APIRouter, Request -from fastapi import Depends -from fastapi import Depends -from rdflib import URIRef -from starlette.responses import PlainTextResponse - -from prez.bnode import get_bnode_depth -from prez.dependencies import get_repo -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo -from prez.queries.vocprez import ( - get_concept_scheme_query, - get_concept_scheme_top_concepts_query, - get_concept_narrowers_query, -) -from prez.renderers.renderer import ( - return_from_graph, -) -from prez.response import StreamingTurtleAnnotatedResponse -from prez.routers.identifier import get_iri_route -from prez.services.objects import object_function -from prez.services.listings import listing_function -from prez.services.link_generation import _add_prez_links -from prez.services.curie_functions import get_curie_id_for_uri -from prez.sparql.methods import Repo -from prez.sparql.resource import get_resource - -router = APIRouter(tags=["VocPrez"]) - -log = logging.getLogger(__name__) - -vp_endpoints = { - "vocabs-listing": "https://prez.dev/endpoint/vocprez/vocabs-listing", - "collection-listing": "https://prez.dev/endpoint/vocprez/collection-listing", - "vocab-object": "https://prez.dev/endpoint/vocprez/vocab-object", - "collection-object": "https://prez.dev/endpoint/vocprez/collection-object", - "vocab-concept": "https://prez.dev/endpoint/vocprez/vocab-concept", - "collection-concept": "https://prez.dev/endpoint/vocprez/collection-concept", - "cs-children": "https://prez.dev/endpoint/vocprez/cs-children", - "cs-top-concepts": "https://prez.dev/endpoint/vocprez/cs-top-concepts", -} - - -@router.get("/v", summary="VocPrez Home") -async def vocprez_home(): - return PlainTextResponse("VocPrez Home") - - -@router.get( - "/v/vocab", - summary="List Vocabularies", - name=vp_endpoints["vocabs-listing"], -) -async def vocab_endpoint( - request: Request, - repo: Repo = Depends(get_repo), - page: int = 1, - per_page: int = 20, -): - endpoint_uri = URIRef(vp_endpoints["vocabs-listing"]) - return await listing_function_new( - request=request, - repo=repo, - endpoint_uri=endpoint_uri, - page=page, - per_page=per_page, - ) - - -@router.get( - "/v/vocab", - summary="List Vocabularies", - name="https://prez.dev/endpoint/vocprez/vocabs-listing", -) -async def vocab_endpoint( - request: Request, - repo: Repo = Depends(get_repo), - page: int = 1, - per_page: int = 20, -): - return await listing_function( - request=request, page=page, per_page=per_page, repo=repo - ) - - -@router.get( - "/v/collection", - summary="List Collections", - name=vp_endpoints["collection-listing"], -) -async def collection_endpoint( - request: Request, - repo: Repo = Depends(get_repo), - page: int = 1, - per_page: int = 20, -): - return await listing_function( - request=request, page=page, per_page=per_page, repo=repo - ) - - -@router.get( - "/v/vocab/{scheme_curie}/all", - summary="Get Concept Scheme and all its concepts", - name="https://prez.dev/endpoint/vocprez/vocab", -) -async def vocprez_scheme( - request: Request, scheme_curie: str, repo: Repo = Depends(get_repo) -): - """Get a SKOS Concept Scheme and all of its concepts. - - Note: This may be a very expensive operation depending on the size of the concept scheme. - """ - return await object_function(request, object_curie=scheme_curie, repo=repo) - - -@router.get( - "/v/vocab/{concept_scheme_curie}", - summary="Get a SKOS Concept Scheme", - name=vp_endpoints["vocab-object"], - response_class=StreamingTurtleAnnotatedResponse, - responses={ - 200: { - "content": {"text/turtle": {}}, - }, - }, -) -async def concept_scheme_route( - request: Request, - concept_scheme_curie: str, - repo: Repo = Depends(get_repo), -): - """Get a SKOS Concept Scheme. - - `prez:childrenCount` is an `xsd:integer` count of the number of top concepts for this Concept Scheme. - """ - profiles_mediatypes_info = ProfilesMediatypesInfo( - request=request, classes=frozenset([SKOS.ConceptScheme]) - ) - - if ( - str(profiles_mediatypes_info.mediatype) != "text/anot+turtle" - or str(profiles_mediatypes_info.mediatype) == "text/anot+turtle" - and str(profiles_mediatypes_info.profile) != "https://w3id.org/profile/vocpub" - ): - return RedirectResponse( - f"{request.url.path}/all{'?' if request.url.query else ''}{request.url.query}" - ) - - iri = get_iri_route(concept_scheme_curie) - resource = await get_resource(iri, repo) - bnode_depth = get_bnode_depth(iri, resource) - concept_scheme_query = get_concept_scheme_query(iri, bnode_depth) - item_graph, _ = await repo.send_queries([concept_scheme_query], []) - return await return_from_graph( - item_graph, - profiles_mediatypes_info.mediatype, - profiles_mediatypes_info.profile, - profiles_mediatypes_info.profile_headers, - profiles_mediatypes_info.selected_class, - repo, - ) - - -@router.get( - "/v/vocab/{concept_scheme_curie}/top-concepts", - name=vp_endpoints["cs-top-concepts"], - summary="Get a SKOS Concept Scheme's top concepts", - response_class=StreamingTurtleAnnotatedResponse, - responses={ - 200: { - "content": {"text/turtle": {}}, - }, - }, -) -async def cs_top_concepts_endpoint( - request: Request, - repo: Repo = Depends(get_repo), - page: int = 1, - per_page: int = 20, - repo: Repo = Depends(get_repo), -): - """Get a SKOS Concept Scheme's top concepts. - - `prez:childrenCount` is an `xsd:integer` count of the number of top concepts for this Concept Scheme. - """ - profiles_mediatypes_info = ProfilesMediatypesInfo( - request=request, classes=frozenset([SKOS.ConceptScheme]) - ) - - iri = get_iri_route(concept_scheme_curie) - concept_scheme_top_concepts_query = get_concept_scheme_top_concepts_query( - iri, page, per_page - ) - - graph, _ = await repo.send_queries([concept_scheme_top_concepts_query], []) - for concept in graph.objects(iri, SKOS.hasTopConcept): - if isinstance(concept, URIRef): - concept_curie = get_curie_id_for_uri(concept) - if "anot+" in profiles_mediatypes_info.mediatype: - await _add_prez_links(graph, repo) - return await return_from_graph( - graph, - profiles_mediatypes_info.mediatype, - profiles_mediatypes_info.profile, - profiles_mediatypes_info.profile_headers, - profiles_mediatypes_info.selected_class, - repo, - ) - - -@router.get( - "/v/vocab/{concept_scheme_curie}/{concept_curie}/narrowers", - name=vp_endpoints["cs-children"], - summary="Get a SKOS Concept's narrower concepts", - response_class=StreamingTurtleAnnotatedResponse, - responses={ - 200: { - "content": {"text/turtle": {}}, - }, - }, -) -async def concept_narrowers_route( - request: Request, - concept_scheme_curie: str, - concept_curie: str, - repo: Repo = Depends(get_repo), - page: int = 1, - per_page: int = 20, -): - """Get a SKOS Concept's narrower concepts. - - `prez:childrenCount` is an `xsd:integer` count of the number of narrower concepts for this concept. - """ - profiles_mediatypes_info = ProfilesMediatypesInfo( - request=request, classes=frozenset([SKOS.Concept]) - ) - - iri = get_iri_route(concept_curie) - concept_narrowers_query = get_concept_narrowers_query(iri, page, per_page) - - graph, _ = await repo.send_queries([concept_narrowers_query], []) - if "anot+" in profiles_mediatypes_info.mediatype: - await _add_prez_links(graph, repo) - return await return_from_graph( - graph, - profiles_mediatypes_info.mediatype, - profiles_mediatypes_info.profile, - profiles_mediatypes_info.profile_headers, - profiles_mediatypes_info.selected_class, - repo, - ) - - -@router.get( - "/v/vocab/{concept_scheme_curie}/{concept_curie}", - summary="Get a SKOS Concept", - name=vp_endpoints["vocab-concept"], - response_class=StreamingTurtleAnnotatedResponse, - responses={ - 200: { - "content": {"text/turtle": {}}, - }, - }, -) -async def concept_route( - request: Request, - concept_scheme_curie: str, - concept_curie: str, - repo: Repo = Depends(get_repo), -): - """Get a SKOS Concept.""" - return await object_function(request, object_curie=concept_curie, repo=repo) - - -@router.get( - "/v/collection/{collection_curie}", - summary="Get Collection", - name=vp_endpoints["collection-object"], -) -async def vocprez_collection( - request: Request, - collection_curie: str, - repo: Repo = Depends(get_repo), -): - return await object_function(request, object_curie=collection_curie, repo=repo) - - -@router.get( - "/v/collection/{collection_curie}/{concept_curie}", - summary="Get Concept", - name=vp_endpoints["collection-concept"], -) -async def vocprez_collection_concept( - request: Request, - collection_curie: str, - concept_curie: str, - repo: Repo = Depends(get_repo), -): - return await object_function(request, object_curie=concept_curie, repo=repo) diff --git a/prez/routers/vocprez.py.unused b/prez/routers/vocprez.py.unused new file mode 100644 index 00000000..dcc16e50 --- /dev/null +++ b/prez/routers/vocprez.py.unused @@ -0,0 +1,215 @@ +import logging + +from fastapi import APIRouter, Request +from fastapi import Depends +from rdflib import URIRef +from starlette.responses import PlainTextResponse + +from prez.dependencies import get_repo +from prez.response import StreamingTurtleAnnotatedResponse +from prez.services.curie_functions import get_uri_for_curie_id +from prez.services.listings import listing_function_new +from prez.services.objects import object_function_new +from prez.sparql.methods import Repo + +router = APIRouter(tags=["VocPrez"]) + +log = logging.getLogger(__name__) + +vp_endpoints = { + "vocabs-listing": "https://prez.dev/endpoint/vocprez/vocabs-listing", + "collection-listing": "https://prez.dev/endpoint/vocprez/collection-listing", + "vocab-object": "https://prez.dev/endpoint/vocprez/vocab-object", + "collection-object": "https://prez.dev/endpoint/vocprez/collection-object", + "vocab-concept": "https://prez.dev/endpoint/vocprez/vocab-concept", + "collection-concept": "https://prez.dev/endpoint/vocprez/collection-concept", + "cs-children": "https://prez.dev/endpoint/vocprez/cs-children", + "cs-top-concepts": "https://prez.dev/endpoint/vocprez/cs-top-concepts", +} + + +@router.get("/v", summary="VocPrez Home") +async def vocprez_home(): + return PlainTextResponse("VocPrez Home") + + +@router.get( + "/v/vocab", + summary="List Vocabularies", + name=vp_endpoints["vocabs-listing"], +) +async def vocab_endpoint( + request: Request, + repo: Repo = Depends(get_repo), + page: int = 1, + per_page: int = 20, +): + endpoint_uri = URIRef(vp_endpoints["vocabs-listing"]) + return await listing_function_new( + request=request, + repo=repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + ) + + +@router.get( + "/v/collection", + summary="List Collections", + name=vp_endpoints["collection-listing"], +) +async def collection_endpoint( + request: Request, + repo: Repo = Depends(get_repo), + page: int = 1, + per_page: int = 20, +): + endpoint_uri = URIRef(vp_endpoints["collection-listing"]) + return await listing_function_new( + request=request, + repo=repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + ) + + +@router.get( + "/v/vocab/{scheme_curie}/all", + summary="Get Concept Scheme and all its concepts", + name="https://prez.dev/endpoint/vocprez/vocab", +) +async def vocprez_scheme( + request: Request, scheme_curie: str, repo: Repo = Depends(get_repo) +): + """Get a SKOS Concept Scheme and all of its concepts. + + Note: This may be a very expensive operation depending on the size of the concept scheme. + """ + return await object_function_new(request, object_curie=scheme_curie, repo=repo) + + +@router.get( + "/v/vocab/{concept_scheme_curie}", + summary="Get a SKOS Concept Scheme", + name=vp_endpoints["vocab-object"], + response_class=StreamingTurtleAnnotatedResponse, + responses={ + 200: { + "content": {"text/turtle": {}}, + }, + }, +) +async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): + request_url = request.scope["path"] + endpoint_uri = URIRef(vp_endpoints["vocab-object"]) + object_uri = get_uri_for_curie_id(request.path_params["concept_scheme_curie"]) + return await object_function_new( + request, endpoint_uri, request_url, repo, object_uri + ) + + +@router.get( + "/v/vocab/{concept_scheme_curie}/top-concepts", + name=vp_endpoints["cs-top-concepts"], + summary="Get a SKOS Concept Scheme's top concepts", + response_class=StreamingTurtleAnnotatedResponse, + responses={ + 200: { + "content": {"text/turtle": {}}, + }, + }, +) +async def cs_top_concepts_endpoint( + request: Request, + repo: Repo = Depends(get_repo), + page: int = 1, + per_page: int = 20, +): + endpoint_uri = URIRef(vp_endpoints["cs-top-concepts"]) + return await listing_function_new( + request=request, + repo=repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + ) + + +@router.get( + "/v/vocab/{concept_scheme_curie}/{concept_curie}/narrowers", + name=vp_endpoints["cs-children"], + summary="Get a SKOS Concept's narrower concepts", + response_class=StreamingTurtleAnnotatedResponse, + responses={ + 200: { + "content": {"text/turtle": {}}, + }, + }, +) +async def cs_narrowers_endpoint( + request: Request, + concept_curie: str, + repo: Repo = Depends(get_repo), + page: int = 1, + per_page: int = 20, +): + endpoint_uri = URIRef(vp_endpoints["cs-children"]) + parent_uri = get_uri_for_curie_id(concept_curie) + return await listing_function_new( + request=request, + repo=repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + parent_uri=parent_uri, + ) + + +@router.get( + "/v/vocab/{concept_scheme_curie}/{concept_curie}", + summary="Get a SKOS Concept", + name=vp_endpoints["vocab-concept"], + response_class=StreamingTurtleAnnotatedResponse, + responses={ + 200: { + "content": {"text/turtle": {}}, + }, + }, +) +async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): + request_url = request.scope["path"] + endpoint_uri = URIRef(vp_endpoints["vocab-concept"]) + object_uri = get_uri_for_curie_id(request.path_params["concept_curie"]) + return await object_function_new( + request, endpoint_uri, request_url, repo, object_uri + ) + + +@router.get( + "/v/collection/{collection_curie}", + summary="Get Collection", + name=vp_endpoints["collection-object"], +) +async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): + request_url = request.scope["path"] + endpoint_uri = URIRef(vp_endpoints["concept-object"]) + object_uri = get_uri_for_curie_id(request.path_params["concept_curie"]) + return await object_function_new( + request, endpoint_uri, request_url, repo, object_uri + ) + + +@router.get( + "/v/collection/{collection_curie}/{concept_curie}", + summary="Get Concept", + name=vp_endpoints["collection-concept"], +) +async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): + request_url = request.scope["path"] + endpoint_uri = URIRef(vp_endpoints["collection-concept"]) + object_uri = get_uri_for_curie_id(request.path_params["concept_curie"]) + return await object_function_new( + request, endpoint_uri, request_url, repo, object_uri + ) diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index 3d327608..ef07556f 100644 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -1,13 +1,15 @@ import logging +import time from string import Template from typing import FrozenSet +from fastapi import Depends from rdflib import Graph, Literal, URIRef, DCTERMS, BNode from prez.cache import endpoints_graph_cache, links_ids_graph_cache +from prez.dependencies import get_system_repo from prez.reference_data.prez_ns import PREZ from prez.services.curie_functions import get_curie_id_for_uri -from prez.services.generate_profiles import results_pretty_printer from prez.services.model_methods import get_classes from prez.sparql.methods import Repo from prez.sparql.objects_listings import ( @@ -42,14 +44,18 @@ async def _add_prez_link_to_collection_page( item_graph.add((bnode, PREZ.link, Literal(collections_url))) -async def _add_prez_links(graph: Graph, repo): +async def _add_prez_links(graph: Graph, repo: Repo, system_repo: Repo): # get all URIRefs - if Prez can find a class and endpoint for them, an internal link will be generated. uris = [uri for uri in graph.all_nodes() if isinstance(uri, URIRef)] + uri_to_klasses = {} for uri in uris: - await _create_internal_links_graph(uri, graph, repo) + uri_to_klasses[uri] = await get_classes(uri, repo) + for uri, klasses in uri_to_klasses.items(): + await _create_internal_links_graph(uri, graph, repo, klasses, system_repo) -async def _create_internal_links_graph(uri, graph, repo: Repo): + +async def _create_internal_links_graph(uri, graph, repo: Repo, klasses, system_repo): quads = list( links_ids_graph_cache.quads((None, None, None, uri)) ) # context required as not all triples that relate to links or identifiers for a particular object have that object's URI as the subject @@ -57,9 +63,10 @@ async def _create_internal_links_graph(uri, graph, repo: Repo): for quad in quads: graph.add(quad[:3]) else: - klasses = await get_classes(uri, repo) for klass in klasses: - endpoint_to_relations = get_endpoint_info_for_classes(frozenset([klass])) + endpoint_to_relations = await get_endpoint_info_for_classes( + frozenset([klass]), system_repo + ) relationship_query = generate_relationship_query(uri, endpoint_to_relations) if relationship_query: _, tabular_results = await repo.send_queries( @@ -72,7 +79,9 @@ async def _create_internal_links_graph(uri, graph, repo: Repo): links_ids_graph_cache.add(quad) # add the quad to the cache -def get_endpoint_info_for_classes(classes: FrozenSet[URIRef]) -> dict: +async def get_endpoint_info_for_classes( + classes: FrozenSet[URIRef], system_repo +) -> dict: """ Queries Prez's in memory reference data for endpoints to determine which endpoints are relevant for the classes an object has, along with information about "parent" objects included in the URL path for the object. This information @@ -80,13 +89,17 @@ def get_endpoint_info_for_classes(classes: FrozenSet[URIRef]) -> dict: and the predicate used for the relationship. """ endpoint_query = get_endpoint_template_queries(classes) - results = endpoints_graph_cache.query(endpoint_query) + results = await system_repo.send_queries([], [(None, endpoint_query)]) endpoint_to_relations = {} - if results.bindings != [{}]: - for result in results.bindings: - endpoint_template = result["endpoint_template"] + if results[1][0][1] != [{}]: + for result in results[1][0][1]: + endpoint_template = result["endpoint_template"]["value"] relation = result.get("relation_predicate") + if relation: + relation = URIRef(relation["value"]) direction = result.get("relation_direction") + if direction: + direction = URIRef(direction["value"]) if endpoint_template not in endpoint_to_relations: endpoint_to_relations[endpoint_template] = [(relation, direction)] else: diff --git a/prez/services/listings.py b/prez/services/listings.py index 9a6fb05a..e1b3c002 100644 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -1,93 +1,38 @@ +import logging +import time from typing import Optional from fastapi import Request from rdflib import SH -from rdflib import URIRef, PROF +from rdflib import URIRef from prez.cache import profiles_graph_cache, endpoints_graph_cache -from prez.models.listing import ListingModel from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo from prez.reference_data.prez_ns import ONT from prez.renderers.renderer import return_from_graph, return_profiles from prez.services.link_generation import _add_prez_links from prez.sparql.methods import Repo from prez.sparql.objects_listings import ( - generate_listing_construct, temp_listing_count, ) from temp.cql2sparql import CQLParser from temp.grammar import SubSelect from temp.shacl2sparql import SHACLParser - -async def listing_function( - request: Request, - repo: Repo, - page: int = 1, - per_page: int = 20, - uri: str = None, -): - endpoint_uri = request.scope["route"].name - listing_item = ListingModel( - **request.path_params, - **request.query_params, - endpoint_uri=endpoint_uri, - uri=uri, - ) - prof_and_mt_info = ProfilesMediatypesInfo( - request=request, classes=listing_item.classes - ) - listing_item.selected_class = prof_and_mt_info.selected_class - listing_item.profile = prof_and_mt_info.profile - - if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): - return await return_profiles( - classes=frozenset(listing_item.selected_class), - prof_and_mt_info=prof_and_mt_info, - repo=repo, - ) - - ordering_predicate = request.query_params.get("ordering-pred", None) - item_members_query = generate_listing_construct( - listing_item, - prof_and_mt_info.profile, - page=page, - per_page=per_page, - ordering_predicate=ordering_predicate, - ) - count_query = temp_listing_count(listing_item, endpoint_uri) - if listing_item.selected_class in [ - URIRef("https://prez.dev/ProfilesList"), - PROF.Profile, - ]: - list_graph = profiles_graph_cache.query(item_members_query).graph - count_graph = profiles_graph_cache.query(count_query).graph - item_graph = list_graph + count_graph - else: - item_graph, _ = await repo.send_queries([count_query, item_members_query], []) - if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(item_graph, repo) - return await return_from_graph( - item_graph, - prof_and_mt_info.mediatype, - listing_item.profile, - prof_and_mt_info.profile_headers, - prof_and_mt_info.selected_class, - repo, - ) +log = logging.getLogger(__name__) async def listing_function_new( request: Request, repo: Repo, + system_repo: Repo, endpoint_uri: URIRef, page: int = 1, per_page: int = 20, parent_uri: Optional[URIRef] = None, cql: dict = None, ): + queries = [] # class is from endpoint definition. listing_class = endpoints_graph_cache.value(endpoint_uri, ONT.deliversClasses) target_class = endpoints_graph_cache.value(endpoint_uri, SH.targetClass) @@ -104,7 +49,11 @@ async def listing_function_new( return await return_profiles( classes=frozenset(selected_class), prof_and_mt_info=prof_and_mt_info ) - runtime_values = {"limit": per_page, "offset": (page - 1) * per_page, "parent_1": parent_uri} + runtime_values = { + "limit": per_page, + "offset": (page - 1) * per_page, + "parent_1": parent_uri, + } shacl_parser = SHACLParser( runtime_values, endpoints_graph_cache, @@ -120,12 +69,16 @@ async def listing_function_new( shacl_parser.additional_ggps = cql_select_ggps shacl_parser.generate_sparql() - query_str = shacl_parser.sparql + queries.append(shacl_parser.sparql) - # pull the subselect out of the query string - subselect = find_instances(shacl_parser.main_where_ggps, SubSelect)[0] # assume there's only one subselect - subselect.solution_modifier = None # remove the limit and offset from the subselect so that we can get a count - count_query = temp_listing_count(subselect, target_class) + # add a count query if it's an annotated mediatype + if "anot+" in prof_and_mt_info.mediatype: + # pull the subselect out of the query string + subselect = find_instances(shacl_parser.main_where_ggps, SubSelect)[ + 0 + ] # assume there's only one subselect + subselect.solution_modifier = None # remove the limit and offset from the subselect so that we can get a count + queries.append(temp_listing_count(subselect, target_class)) # if selected_class in [ # URIRef("https://prez.dev/ProfilesList"), @@ -136,11 +89,11 @@ async def listing_function_new( # item_graph = list_graph + count_graph # else: item_graph, _ = await repo.send_queries( - rdf_queries=[count_query, query_str], + rdf_queries=queries, tabular_queries=[], ) if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(item_graph, repo) + await _add_prez_links(item_graph, repo, system_repo) return await return_from_graph( item_graph, prof_and_mt_info.mediatype, @@ -162,12 +115,12 @@ def find_instances(obj, cls): elif isinstance(obj, dict): for key, value in obj.items(): found.extend(find_instances(value, cls)) - elif hasattr(obj, '__iter__') and not isinstance(obj, str): + elif hasattr(obj, "__iter__") and not isinstance(obj, str): for item in obj: found.extend(find_instances(item, cls)) # If the object has attributes, search recursively in each - elif hasattr(obj, '__dict__'): + elif hasattr(obj, "__dict__"): for key, value in obj.__dict__.items(): found.extend(find_instances(value, cls)) diff --git a/prez/services/model_methods.py b/prez/services/model_methods.py index b3965f5d..950a813f 100644 --- a/prez/services/model_methods.py +++ b/prez/services/model_methods.py @@ -1,10 +1,11 @@ +import logging from rdflib import URIRef from prez.cache import endpoints_graph_cache from prez.sparql.methods import Repo -from prez.cache import endpoints_graph_cache -from prez.sparql.methods import Repo +log = logging.getLogger(__name__) + async def get_classes( uri: URIRef, repo: Repo, endpoint: URIRef = None @@ -16,7 +17,9 @@ async def get_classes( SELECT ?class {{ <{uri}> a ?class }} """ + # a = time.time() _, r = await repo.send_queries([], [(uri, q)]) + # log.debug(f"Time to query: {q}\n{time.time() - a}") tabular_result = r[0] # should only be one result - only one query sent if endpoint != URIRef("https://prez.dev/endpoint/object"): endpoint_classes = list( diff --git a/prez/services/objects.py b/prez/services/objects.py index 1dba764c..4e6e0107 100644 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -1,7 +1,5 @@ import logging -from typing import Optional -from fastapi import HTTPException from fastapi import Request from rdflib import URIRef @@ -10,99 +8,24 @@ from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo from prez.reference_data.prez_ns import PREZ, EP from prez.renderers.renderer import return_from_graph, return_profiles -from prez.services.curie_functions import get_uri_for_curie_id from prez.services.link_generation import ( _add_prez_links, _add_prez_link_to_collection_page, ) from prez.services.model_methods import get_classes from prez.sparql.methods import Repo -from prez.sparql.objects_listings import generate_item_construct -from prez.sparql.objects_listings import generate_listing_construct from temp.shacl2sparql import SHACLParser - log = logging.getLogger(__name__) -async def object_function( - request: Request, - repo: Repo, - object_curie: Optional[str] = None, -): - endpoint_uri = URIRef(request.scope["route"].name) - if endpoint_uri == URIRef("https://prez.dev/endpoint/object"): - if not request.query_params.get("uri"): - raise HTTPException( - status_code=400, - detail="A URI for an object must be supplied on the /object endpoint, for example " - "/object?uri=https://an-object-uri", - ) - uri = URIRef(request.query_params.get("uri")) - elif object_curie: - uri = get_uri_for_curie_id(object_curie) - else: - raise HTTPException( - status_code=400, - detail="The 'uri' is required for non-object endpoints", - ) - - klasses = await get_classes(uri=uri, repo=repo, endpoint=endpoint_uri) - # ConnegP - needs improvement - prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=klasses) - # if we're on the object endpoint and a profile hasn't been requested, use the open profile - if (endpoint_uri == URIRef("https://prez.dev/endpoint/object")) and not ( - prof_and_mt_info.req_profiles or prof_and_mt_info.req_profiles_token - ): - prof_and_mt_info.selected_class = None - prof_and_mt_info.profile = PREZ["profile/open"] - # create the object with all required info - object_item = ObjectItem( # object item now does not need request - uri=uri, - classes=klasses, - profile=prof_and_mt_info.profile, - selected_class=prof_and_mt_info.selected_class, - ) - if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): - return await return_profiles( - classes=frozenset(object_item.selected_class), - prof_and_mt_info=prof_and_mt_info, - repo=repo, - ) - - item_query = generate_item_construct(object_item, object_item.profile) - - ordering_predicate = request.query_params.get("ordering-pred", None) - item_members_query = generate_listing_construct( - object_item, prof_and_mt_info.profile, 1, 20, ordering_predicate - ) - if object_item.selected_class == URIRef("http://www.w3.org/ns/dx/prof/Profile"): - item_graph = profiles_graph_cache.query(item_query).graph - if item_members_query: - list_graph = profiles_graph_cache.query(item_members_query).graph - item_graph += list_graph - else: - item_graph, _ = await repo.send_queries([item_query, item_members_query], []) - if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(item_graph, repo) - return await return_from_graph( - item_graph, - prof_and_mt_info.mediatype, - object_item.profile, - prof_and_mt_info.profile_headers, - prof_and_mt_info.selected_class, - repo, - ) - - async def object_function_new( request: Request, endpoint_uri: URIRef, uri: URIRef, request_url: str, - repo=Repo, + repo: Repo, + system_repo: Repo, ): klasses = await get_classes(uri=uri, repo=repo, endpoint=endpoint_uri) # ConnegP @@ -149,7 +72,7 @@ async def object_function_new( await _add_prez_link_to_collection_page( item_graph, uri, request_url, endpoint_uri ) - await _add_prez_links(item_graph, repo) + await _add_prez_links(item_graph, repo, system_repo) return await return_from_graph( item_graph, prof_and_mt_info.mediatype, diff --git a/prez/sparql/methods.py b/prez/sparql/methods.py index f12611d5..3ad0118b 100644 --- a/prez/sparql/methods.py +++ b/prez/sparql/methods.py @@ -1,5 +1,6 @@ import asyncio import logging +import time from abc import ABC, abstractmethod from typing import List from typing import Tuple @@ -27,7 +28,7 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None): pass async def send_queries( - self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef, str]] = None + self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef, str]] = None ): # Common logic to send both query types in parallel results = await asyncio.gather( @@ -48,12 +49,11 @@ async def send_queries( return g, tabular_results @abstractmethod - def sparql(self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET"): + def sparql( + self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" + ): pass - @abstractmethod - async def tabular_query_to_table(self, query: str, context: URIRef = None): - pass class RemoteSparqlRepo(Repo): def __init__(self, async_client: httpx.AsyncClient): @@ -90,8 +90,12 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None): The optional context parameter allows an identifier to be supplied with the query, such that multiple results can be distinguished from each other. """ + a = time.time() + log.debug(msg=f"query sent:{a} || {context} || {query}") response = await self._send_query(query, "application/sparql-results+json") await response.aread() + log.debug(msg=f"response received || {context} {time.time()}") + log.debug(msg=f"time diff: {time.time() -a }") return context, response.json()["results"]["bindings"] async def sparql( @@ -118,7 +122,9 @@ class PyoxigraphRepo(Repo): def __init__(self, pyoxi_store: pyoxigraph.Store): self.pyoxi_store = pyoxi_store - def _handle_query_solution_results(self, results: pyoxigraph.QuerySolutions) -> dict: + def _handle_query_solution_results( + self, results: pyoxigraph.QuerySolutions + ) -> dict: """Organise the query results into format serializable by FastAPIs JSONResponse.""" variables = results.variables results_dict = {"head": {"vars": [v.value for v in results.variables]}} @@ -181,7 +187,9 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None) -> li self._sync_tabular_query_to_table, query, context ) - async def sparql(self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "") -> list | Graph | bool: + async def sparql( + self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "" + ) -> list | Graph | bool: return self._sparql(query) @staticmethod diff --git a/prez/sparql/objects_listings.py b/prez/sparql/objects_listings.py index 5b3c365c..d856ad55 100644 --- a/prez/sparql/objects_listings.py +++ b/prez/sparql/objects_listings.py @@ -14,6 +14,7 @@ from prez.models.profiles_listings import ProfilesMembers from prez.reference_data.prez_ns import ONT from prez.services.curie_functions import get_uri_for_curie_id +from temp.grammar import SubSelect log = logging.getLogger(__name__) @@ -285,12 +286,6 @@ def generate_exclude_predicates(exclude_predicates): return "" -def generate_exclude_predicates(exclude_predicates): - if exclude_predicates: - return f"""FILTER(?p NOT IN ({chr(10).join([f"<{p}>" for p in exclude_predicates])}))""" - return "" - - def generate_inverse_predicates(inverse_predicates): """ Generates a SPARQL VALUES clause for a list of inverse predicates, of the form: @@ -497,10 +492,11 @@ def get_annotations_from_tbox_cache( all = list(chain(*props_from_cache.values())) default_language = settings.default_language for triple in all: - if triple[2].language == default_language: - labels_from_cache.add(triple) - elif triple[2].language is None: - labels_from_cache.add(triple) + if isinstance(triple[2], Literal): + if triple[2].language == default_language: + labels_from_cache.add(triple) + elif triple[2].language is None: + labels_from_cache.add(triple) # the remaining terms are not in the cache; we need to query the SPARQL endpoint to attempt to get them uncached_props = { k: list(set(terms) - set(triple[0] for triple in v)) @@ -572,7 +568,7 @@ def temp_listing_count(subquery: SubSelect, klass): {klass.n3()} prez:count ?count }} WHERE {{ - SELECT (COUNT(?focus_node) as ?count) {{ {subquery} }} + SELECT (COUNT(DISTINCT ?focus_node) as ?count) {{ {subquery} }} }}""" @@ -749,11 +745,11 @@ def select_profile_mediatype( the base class delivered by that API endpoint. The base classes delivered by each API endpoint are: SpacePrez: - /s/datasets -> prez:DatasetList - /s/datasets/{ds_id} -> dcat:Dataset - /s/datasets/{ds_id}/collections/{fc_id} -> geo:FeatureCollection - /s/datasets/{ds_id}/collections -> prez:FeatureCollectionList - /s/datasets/{ds_id}/collections/{fc_id}/features -> geo:Feature + /s/catalogs -> prez:DatasetList + /s/catalogs/{ds_id} -> dcat:Dataset + /s/catalogs/{ds_id}/collections/{fc_id} -> geo:FeatureCollection + /s/catalogs/{ds_id}/collections -> prez:FeatureCollectionList + /s/catalogs/{ds_id}/collections/{fc_id}/features -> geo:Feature VocPrez: /v/schemes -> skos:ConceptScheme @@ -797,9 +793,9 @@ def select_profile_mediatype( ?class rdfs:subClassOf* ?mid . ?mid rdfs:subClassOf* ?base_class . VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection prez:FeatureCollectionList prez:FeatureList geo:Feature - skos:ConceptScheme skos:Concept skos:Collection prez:DatasetList prez:VocPrezCollectionList prez:SchemesList + skos:ConceptScheme skos:Concept prez:ConceptList skos:Collection prez:DatasetList prez:VocPrezCollectionList prez:SchemesList prez:CatalogList prez:ResourceList prez:ProfilesList dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery - prez:SearchResult }} + prez:SearchResult prez:CQLObjectList }} ?profile altr-ext:constrainsClass ?class ; altr-ext:hasResourceFormat ?format ; dcterms:title ?title .\ @@ -861,14 +857,16 @@ def get_endpoint_template_queries(classes: FrozenSet[URIRef]): }} UNION {{ - ?endpoint ?relation_direction ?relation_predicate ; + ?parent_endpoint ?relation_direction ?relation_predicate . + ?endpoint ?ep_relation_direction ?ep_relation_predicate ; ont:endpointTemplate ?endpoint_template ; ont:deliversClasses ?classes . FILTER(?classes IN ({", ".join('<' + str(klass) + '>' for klass in classes)})) VALUES ?relation_direction {{ont:FocusToParentRelation ont:ParentToFocusRelation}} + VALUES ?ep_relation_direction {{ont:FocusToParentRelation ont:ParentToFocusRelation}} {{ SELECT ?parent_endpoint ?endpoint (count(?intermediate) as ?distance) {{ - ?endpoint ont:parentEndpoint+ ?intermediate ; + ?endpoint ont:parentEndpoint* ?intermediate ; ont:deliversClasses ?classes . ?intermediate ont:parentEndpoint* ?parent_endpoint . ?intermediate a ?intermediateEPClass . @@ -880,7 +878,7 @@ def get_endpoint_template_queries(classes: FrozenSet[URIRef]): }} }} - }} ORDER BY DESC(?distance) + }} ORDER BY ASC(?distance) """ return query @@ -892,7 +890,7 @@ def generate_relationship_query( Generates a SPARQL query of the form: SELECT * {{ SELECT ?endpoint ?parent_1 ?parent_2 WHERE { - BIND("/s/datasets/$parent_1/collections/$object" as ?endpoint) + BIND("/s/catalogs/$parent_1/collections/$object" as ?endpoint) ?parent_1 . }}} """ @@ -900,12 +898,13 @@ def generate_relationship_query( return None subqueries = [] for endpoint, relations in endpoint_to_relations.items(): - subquery = f"""{{ SELECT ?endpoint {" ".join(["?parent_" + str(i + 1) for i, _ in enumerate(relations)])} + subquery = f"""{{ SELECT ?endpoint {" ".join(["?parent_" + str(i + 1) for i, pred in enumerate(relations)])} WHERE {{\n BIND("{endpoint}" as ?endpoint)\n""" uri_str = f"<{uri}>" for i, relation in enumerate(relations): predicate, direction = relation if predicate: + parent = "?parent_" + str(i) if direction == URIRef("https://prez.dev/ont/ParentToFocusRelation"): subquery += f"{parent} <{predicate}> {uri_str} .\n" else: # assuming the direction is "focus_to_parent" diff --git a/prez/url.py b/prez/url.py index 2d836081..0635d59a 100644 --- a/prez/url.py +++ b/prez/url.py @@ -10,11 +10,11 @@ def order_urls(order: list[str], values: list[str]): >>> preferred_order = [ >>> "/v/vocab", >>> "/v/collection", - >>> "/s/datasets", + >>> "/s/catalogs", >>> "/c/catalogs" >>> ] >>> urls = [ - >>> "/s/datasets/blah", + >>> "/s/catalogs/blah", >>> "/object/blah", >>> "/v/collection/123", >>> "/c/catalogs/321", @@ -24,7 +24,7 @@ def order_urls(order: list[str], values: list[str]): >>> assert sorted_urls == [ >>> "/v/vocab/some-scheme", >>> "/v/collection/123", - >>> "/s/datasets/blah", + >>> "/s/catalogs/blah", >>> "/c/catalogs/321", >>> "/object/blah" >>> ] diff --git a/pyproject.toml b/pyproject.toml index b00e1881..aac7de44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ authors = ["Jamie Feiss ", "Nicholas Car Generator[str, None, None]: - yield f"\t{self.subject.n3()} {self.predicate.n3()} {self.object.n3()} ." + yield from self.subject.render() + yield " " + yield from self.predicate.render() + yield " " + yield from self.object.render() + yield " ." def __hash__(self): return hash((self.subject, self.predicate, self.object)) @@ -88,12 +96,15 @@ class InlineDataOneVar(SPARQLGrammarBase): InlineDataOneVar ::= Var '{' DataBlockValue* '}' """ - variable: Variable - values: List[Union[URIRef, Literal]] + variable: Var + values: List[Union[IRI, RDFLiteral]] def render(self) -> Generator[str, None, None]: - yield f"{self.variable.n3()} {{ " - yield " ".join(value.n3() for value in self.values) + yield from self.variable.render() + yield "{ " + for value in self.values: + yield from value.render() + yield " " yield " }" @@ -103,13 +114,15 @@ class InlineDataFull(SPARQLGrammarBase): ( NIL | '(' Var* ')' ) '{' ( '(' DataBlockValue* ')' | NIL )* '}' """ - variables: List[Variable] - values: List[List[Union[URIRef, Literal]]] + vars: List[Var] + values: List[List[Union[IRI, RDFLiteral]]] def render(self) -> Generator[str, None, None]: if self.vars: yield "(" - yield " ".join(var.n3() for var in self.vars) + for var in self.vars: + yield from var.render() + yield " " yield ") {" else: yield "{" @@ -120,7 +133,9 @@ def render(self) -> Generator[str, None, None]: for values_block in self.values_blocks: if values_block: yield "(" - yield " ".join(value.n3() for value in values_block) + for value in values_block: + yield from value.render() + yield " " yield ")" else: yield "()" @@ -235,16 +250,37 @@ def add_triple(self, triple): # self.patterns.append(triples) +# class SelectClause(SPARQLGrammarBase): +# """ +# https://www.w3.org/TR/sparql11-query/#rSelectClause +# SelectClause ::= 'SELECT' ( 'DISTINCT' | 'REDUCED' )? ( ( Var | ( '(' Expression 'AS' Var ')' ) )+ | '*' ) +# Simplified model excluding casting of variables (e.g. (?var AS ?alias)) +# """ +# +# distinct: Optional[bool] = None +# reduced: Optional[bool] = None +# variables_or_all: Union[List[Var], str] +# +# def render(self): +# yield "SELECT" +# if self.distinct: +# yield " DISTINCT" +# elif self.reduced: +# yield " REDUCED" +# if isinstance(self.variables_or_all, str): +# yield " *" +# else: +# for var in self.variables_or_all: +# yield from var.render() class SelectClause(SPARQLGrammarBase): """ https://www.w3.org/TR/sparql11-query/#rSelectClause SelectClause ::= 'SELECT' ( 'DISTINCT' | 'REDUCED' )? ( ( Var | ( '(' Expression 'AS' Var ')' ) )+ | '*' ) - Simplified model excluding casting of variables (e.g. (?var AS ?alias)) """ distinct: Optional[bool] = None reduced: Optional[bool] = None - variables_or_all: Union[List[Variable], str] + variables_or_all: Union[List[Union[Var, Tuple[Expression, Var]]], str] def render(self): yield "SELECT" @@ -255,11 +291,25 @@ def render(self): if isinstance(self.variables_or_all, str): yield " *" else: - for var in self.variables_or_all: - yield f" {var.n3()}" + for item in self.variables_or_all: + if isinstance(item, Var): + yield " " + yield from item.render() + elif isinstance(item, Tuple): + expression, as_var = item + yield " (" + yield from expression.render() + yield " AS " + yield from as_var.render() + yield ")" class SubSelect(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rSubSelect + SubSelect ::= SelectClause WhereClause SolutionModifier ValuesClause + """ + select_clause: SelectClause where_clause: WhereClause solution_modifier: Optional[SolutionModifier] = None @@ -281,6 +331,7 @@ class SubSelectString(SubSelect): select_clause: Optional[str] = None where_clause: Optional[str] = None + solution_modifier: Optional[SolutionModifier] = None select_string: str @field_validator("select_string") @@ -288,11 +339,13 @@ def validate_and_transform_select_string(cls, v): try: return translateAlgebra(prepareQuery(v)) except Exception as e: - # Handle exceptions from your translation function here - raise ValueError(f"Invalid Select Subquery: {e}") + log.error(msg=f'Potential query issue, or RDFLib bug: "{str(e)}"') + return v def render(self): yield self.select_string + if self.solution_modifier: + yield from self.solution_modifier.render() class GroupGraphPattern(SPARQLGrammarBase): @@ -310,41 +363,141 @@ def render(self) -> Generator[str, None, None]: class Filter(SPARQLGrammarBase): - variable: Variable - expression: Union[URIRef, str] - value: Optional[Union[Literal, List[Union[URIRef, Literal]]]] = None + """ + Represents a SPARQL FILTER clause. + Filter ::= 'FILTER' Constraint + """ + + constraint: Constraint def render(self) -> Generator[str, None, None]: - if self.expression in ["<", ">", "<=", ">="]: - yield f"\n\tFILTER({self.variable.n3()}{self.expression}{self.value.n3()})" - elif self.expression == "regex": - yield f"\n\tFILTER regex({self.variable.n3()}, {self.value.n3()})" - elif self.expression in cql_sparql_spatial_mapping.values(): - yield f"\n\tFILTER({self.expression.n3()}({self.variable.n3()}, {self.value.n3()}))" - elif self.expression == "NOT IN": - yield f'\n\tFILTER({self.variable.n3()} NOT IN({", ".join([value.n3() for value in self.value])}))' - elif self.expression == "ISBLANK": - yield f"\n\tFILTER(ISBLANK({self.variable.n3()}))" + yield "FILTER(" + yield from self.constraint.render() + yield ")" + + @classmethod + def filter_in( + cls, focus: Var, comparators: List[PrimaryExpression], not_in: bool = False + ) -> Filter: + """ + Convenience method to create a FILTER clause to check if the focus is in/not in the list of comparators. + """ + # Wrap the focus in an NumericExpression + numeric_left = NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=PrimaryExpression(content=focus) + ) + ) + ) + ) + # Wrap each comparator in an Expression + comparator_exprs = [Expression.from_primary_expr(comp) for comp in comparators] + # Create the ExpressionList for IN/NOT IN + in_list = ExpressionList(expressions=comparator_exprs) + # Build the RelationalExpression for IN or NOT IN + relational_expr = RelationalExpression( + left=numeric_left, operator="NOT IN" if not_in else "IN", right=in_list + ) + # Build the ValueLogical to wrap the RelationalExpression + value_logical = ValueLogical(relational_expression=relational_expr) + # Build the ConditionalAndExpression to wrap the ValueLogical + conditional_and_expr = ConditionalAndExpression(value_logicals=[value_logical]) + # Build the ConditionalOrExpression to wrap the ConditionalAndExpression + conditional_or_expr = ConditionalOrExpression( + conditional_and_expressions=[conditional_and_expr] + ) + expression = Expression(conditional_or_expression=conditional_or_expr) + # Create and return the Filter + bracketted_expr = BrackettedExpression(expression=expression) + return cls(constraint=Constraint(content=bracketted_expr)) -class Bind(SPARQLGrammarBase): +class Constraint(SPARQLGrammarBase): + """ + Represents a SPARQL Constraint. + Constraint ::= BrackettedExpression | BuiltInCall | FunctionCall + """ + + content: Union[BrackettedExpression, BuiltInCall, FunctionCall] + + def render(self) -> Generator[str, None, None]: + yield from self.content.render() + + +class FunctionCall(SPARQLGrammarBase): + """ + Represents a SPARQL FunctionCall. + FunctionCall ::= iri ArgList """ - An incorrect implemenation of BIND so as to avoid implementing a lot of the Grammar - This is a simplified implementation that at present ONLY caters to the following kind of bind - BIND({ triple pattern } AS ?var - Ideally the whole SPARQL Grammar is implemented as per spec and convenience functions are created for common use - cases + iri: IRI + arg_list: ArgList + + def render(self) -> Generator[str, None, None]: + yield self.iri.render() + yield "(" + yield from self.arg_list.render() + yield ")" + + +class ArgList(SPARQLGrammarBase): + """ + Represents a SPARQL ArgList. + ArgList ::= NIL | '(' 'DISTINCT'? Expression ( ',' Expression )* ')' + """ + + expressions: Optional[List["Expression"]] + distinct: bool = False + + def render(self) -> Generator[str, None, None]: + if not self.expressions: + yield "()" + else: + yield "(" + if self.distinct: + yield "DISTINCT " + for i, expr in enumerate(self.expressions): + yield from expr.render() + if i < len(self.expressions) - 1: + yield ", " + yield ")" + + +# class Filter(SPARQLGrammarBase): +# variable: Var +# expression: Union[IRI, str] +# value: Optional[Union[RDFLiteral, List[Union[IRI, RDFLiteral]]]] = None +# +# def render(self) -> Generator[str, None, None]: +# if self.expression in ["<", ">", "<=", ">="]: +# yield f"\n\tFILTER({self.variable.render()}{self.expression}{self.value.n3()})" +# elif self.expression == "regex": +# yield f"\n\tFILTER regex({self.variable.render()}, {self.value.n3()})" +# elif self.expression in cql_sparql_spatial_mapping.values(): +# yield f"\n\tFILTER({self.expression.n3()}({self.variable.render()}, {self.value.n3()}))" +# elif self.expression == "NOT IN": +# yield f'\n\tFILTER({self.variable.render()} NOT IN({", ".join([value.n3() for value in self.value])}))' +# elif self.expression == "ISBLANK": +# yield f"\n\tFILTER(ISBLANK({self.variable.render()}))" + + +class Bind(SPARQLGrammarBase): + """ Bind ::= 'BIND' '(' Expression 'AS' Var ')' https://www.w3.org/TR/sparql11-query/#rBind """ - expression: str - triple: SimplifiedTriple - var: Variable + expression: Expression + var: Var - def render(self): - yield f"\n\tBIND({self.expression}{{ {self.triple.render()} }} AS {self.var.n3()})" + def render(self) -> Generator[str, None, None]: + yield f"BIND(" + yield from self.expression.render() + yield f" AS" + yield from self.var.render() + yield ")" class OptionalGraphPattern(SPARQLGrammarBase): @@ -395,14 +548,16 @@ def render(self) -> Generator[str, None, None]: class OrderCondition(SPARQLGrammarBase): - var: Variable + var: Var direction: Optional[str] = None def render(self): if self.direction: - yield f"{self.direction}({self.var.n3()})" + yield f"{self.direction}(" + yield from self.var.render() + yield ")" else: - yield self.var.n3() + yield from self.var.render() class OrderClause(SPARQLGrammarBase): @@ -442,9 +597,8 @@ class SolutionModifier(SPARQLGrammarBase): order_by: Optional[OrderClause] = None limit_offset: Optional[LimitOffsetClauses] = None - # having: Optional[HavingClause] - # group_by: Optional[GroupClause] + group_by: Optional[GroupClause] = None def render(self) -> str: if self.order_by: @@ -455,6 +609,45 @@ def render(self) -> str: yield from self.limit_offset.render() +class GroupClause(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rGroupClause + GroupClause ::= 'GROUP' 'BY' GroupCondition+ + """ + + group_conditions: List[GroupCondition] + + def render(self) -> Generator[str, None, None]: + yield "\nGROUP BY " + for condition in self.group_conditions: + yield from condition.render() + + +class GroupCondition(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rGroupCondition + GroupCondition ::= BuiltInCall | FunctionCall | '(' Expression ( 'AS' Var )? ')' | Var + """ + + variable: Optional[Var] = None + expression: Optional[str] = None + as_variable: Optional[Var] = None + + def render(self) -> Generator[str, None, None]: + if self.variable: + yield self.variable.render() + elif self.expression: + yield f"({self.expression}" + if self.as_variable: + yield f" AS {self.as_variable.render()})" + else: + yield ")" + else: + raise ValueError( + "GroupCondition must have either a variable or an expression defined." + ) + + class ConstructTriples(SPARQLGrammarBase): """ https://www.w3.org/TR/sparql11-query/#rConstructTriples @@ -519,24 +712,433 @@ def render(self) -> Generator[str, None, None]: yield from self.solution_modifier.render() -# class DescriptionSPARQLQuery(SPARQLGrammarBase): -# # prolog: Prolog -# blocks: List[Union[SelectBlock, SPARQLComponent]] -# -# def render(self) -> Generator[str, None, None]: -# # yield from self.prolog.render() -# yield "\n\nCONSTRUCT {\n" -# for block in self.blocks: -# if isinstance(block, SelectBlock): -# yield "\t" + "\n\t".join(block.extract_triples()) -# else: -# yield from block.extract_triples() -# yield "\n}" -# # Join the parts produced by the generator into a string and then yield -# yield "\nWHERE {" -# for block in self.blocks: -# yield from block.render() -# yield "\n}" -# -# def render(self) -> str: -# return "".join(part for part in self.render()) +class Var(SPARQLGrammarBase): + value: str + + def render(self) -> Generator[str, None, None]: + yield Variable(self.value).n3() + + def __hash__(self): + return hash(self.value) + + +class BuiltInCall(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rBuiltInCall + """ + + other_expressions: Optional[Union[Aggregate, RegexExpression]] = None + function_name: Optional[str] = None + arguments: Optional[ + List[Union[Expression, Var]] + ] = None # TODO implement remaining argument types e.g. expression list + + @field_validator("function_name") + def validate_function_name(cls, v): + implemented = ["URI", "STR", "CONCAT", "SHA256", "LCASE", "SUM", "isBLANK"] + if v not in implemented: + raise ValueError(f"{v} is not a valid SPARQL built-in function") + return v + + def render(self) -> Generator[str, None, None]: + yield f"{self.function_name}(" + for i, arg in enumerate(self.arguments): + yield from arg.render() + if i < len(self.arguments) - 1: + yield ", " + yield ")" + + @classmethod + def create_with_one_expr( + cls, function_name: str, expression: PrimaryExpression + ) -> "BuiltInCall": + """ + Convenience method for functions that take a single PrimaryExpression as an argument. + Uses create_with_expression_list for consistency in handling expressions. + """ + return cls.create_with_n_expr(function_name, [expression]) + + @classmethod + def create_with_n_expr( + cls, function_name: str, expressions: List[PrimaryExpression] + ) -> "BuiltInCall": + """ + Convenience method for functions that take a list of PrimaryExpressions as arguments. + Wraps each PrimaryExpression in an Expression. + """ + wrapped_expressions = [Expression.from_primary_expr(pe) for pe in expressions] + + # Create a BuiltInCall instance for the specified function with the list of wrapped expressions + return cls(function_name=function_name, arguments=wrapped_expressions) + + +class Expression(SPARQLGrammarBase): + """ + Expression ::= ConditionalOrExpression + """ + + conditional_or_expression: ConditionalOrExpression + + def render(self) -> Generator[str, None, None]: + yield from self.conditional_or_expression.render() + + @classmethod + def from_primary_expr(cls, primary_expression: PrimaryExpression) -> Expression: + """ + Convenience method to create an Expression directly from a Var, wrapped in a PrimaryExpression. + """ + return cls( + conditional_or_expression=ConditionalOrExpression( + conditional_and_expressions=[ + ConditionalAndExpression( + value_logicals=[ + ValueLogical( + relational_expression=RelationalExpression( + left=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=primary_expression + ) + ) + ) + ) + ) + ) + ] + ) + ] + ) + ) + + +class RelationalExpression(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rRelationalExpression + RelationalExpression ::= NumericExpression ( '=' NumericExpression | '!=' NumericExpression | '<' NumericExpression | '>' NumericExpression | '<=' NumericExpression | '>=' NumericExpression | 'IN' ExpressionList | 'NOT' 'IN' ExpressionList )? + """ + + left: NumericExpression + operator: Optional[str] = None # '=', '!=', '<', '>', '<=', '>=' + right: Optional[Union[NumericExpression, ExpressionList]] = None + # expression_list: Optional[ExpressionList] = None #TODO implement expression list + not_in: bool = False # To distinguish between 'IN' and 'NOT IN' + + def render(self) -> Generator[str, None, None]: + yield from self.left.render() + if self.operator: + yield f" {self.operator} " + if self.right: + yield from self.right.render() + # elif self.expression_list: + # if self.not_in: + # yield " NOT IN " + # else: + # yield " IN " + # yield from self.expression_list.render() + + +class ValueLogical(SPARQLGrammarBase): + relational_expression: RelationalExpression + + def render(self) -> Generator[str, None, None]: + yield from self.relational_expression.render() + + +class AdditiveExpression(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rAdditiveExpression + AdditiveExpression ::= MultiplicativeExpression ( '+' MultiplicativeExpression | '-' MultiplicativeExpression | ( NumericLiteralPositive | NumericLiteralNegative ) ( ( '*' UnaryExpression ) | ( '/' UnaryExpression ) )* )* + #TODO implement NumericLiteralPositive, NumericLiteralNegative - these should be options in the additional expressions + """ + + base_expression: MultiplicativeExpression + additional_expressions: Optional[ + List[Tuple[str, Union[MultiplicativeExpression, UnaryExpression]]] + ] = [] + + @field_validator("additional_expressions") + def validate_additional_expressions(cls, v): + if v[0] not in ["+", "-", "*", "/"]: + raise ValueError("Operator must be one of '+', '-', '*', or '/'") + return v + + def render(self) -> Generator[str, None, None]: + yield from self.base_expression.render() + for operator, expression in self.additional_expressions: + yield f" {operator} " + yield from expression.render() + + +class NumericExpression(SPARQLGrammarBase): + additive_expression: AdditiveExpression + + def render(self) -> Generator[str, None, None]: + yield from self.additive_expression.render() + + +class ConditionalAndExpression(SPARQLGrammarBase): + """ + ConditionalAndExpression ::= ValueLogical ( '&&' ValueLogical )* + """ + + value_logicals: List[ValueLogical] + + def render(self) -> Generator[str, None, None]: + for i, value_logical in enumerate(self.value_logicals): + yield from value_logical.render() + if i < len(self.value_logicals) - 1: + yield " && " + + +class ConditionalOrExpression(SPARQLGrammarBase): + """ + ConditionalOrExpression ::= ConditionalAndExpression ( '||' ConditionalAndExpression )* + """ + + conditional_and_expressions: List[ConditionalAndExpression] + + def render(self) -> Generator[str, None, None]: + for i, conditional_and_expression in enumerate( + self.conditional_and_expressions + ): + yield from conditional_and_expression.render() + if i < len(self.conditional_and_expressions) - 1: + yield " || " + + +class NumericLiteral(SPARQLGrammarBase): + """ + not implemented properly - only does integer literals + """ + + value: float + + def render(self) -> Generator[str, None, None]: + yield str(int(self.value)) + + def __hash__(self): + return hash(self.value) + + +class BooleanLiteral(SPARQLGrammarBase): + value: bool + + def render(self) -> Generator[str, None, None]: + yield "true" if self.value else "false" + + +class RDFLiteral(SPARQLGrammarBase): + value: str + + def render(self) -> Generator[str, None, None]: + yield f'"{self.value}"' + + def __hash__(self): + return hash(self.value) + + +class GraphTerm(SPARQLGrammarBase): + """ + Represents a SPARQL GraphTerm. + GraphTerm ::= iri | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | NIL + """ + + content: Union[IRI, RDFLiteral, NumericLiteral, BooleanLiteral, BlankNode] + + def render(self) -> Generator[str, None, None]: + if self.content == "NIL": + yield "()" + else: + yield from self.content.render() + + +class IRI(SPARQLGrammarBase): + """ + Represents a SPARQL iri. + iri ::= IRIREF | PrefixedName + """ + + value: Union[URIRef, str] + + def render(self) -> Generator[str, None, None]: + if isinstance(self.value, URIRef): + yield self.value.n3() + else: + yield self.value + + def __hash__(self): + return hash(self.value) + + +class BrackettedExpression(SPARQLGrammarBase): + expression: Expression + + def render(self) -> Generator[str, None, None]: + yield "(" + yield from self.expression.render() + yield ")" + + +class PrimaryExpression(SPARQLGrammarBase): + """ + PrimaryExpression ::= BrackettedExpression | BuiltInCall | iriOrFunction | RDFLiteral | NumericLiteral | BooleanLiteral | Var + """ + + content: Union[ + BrackettedExpression, + BuiltInCall, + IRIOrFunction, + RDFLiteral, + NumericLiteral, + BooleanLiteral, + Var, + ] + + def render(self) -> Generator[str, None, None]: + yield from self.content.render() + + +class IRIOrFunction(SPARQLGrammarBase): + """ + iriOrFunction ::= iri ArgList? + """ + + iri: IRI + arg_list: Optional[ArgList] = None + + def render(self) -> Generator[str, None, None]: + yield from self.iri.render() + if self.arg_list: + yield "(" + yield from self.arg_list.render() + yield ")" + + +class UnaryExpression(SPARQLGrammarBase): + operator: Optional[str] = None # '!', '+', or '-' + primary_expression: PrimaryExpression + + def render(self) -> Generator[str, None, None]: + if self.operator: + yield f"{self.operator} " + yield from self.primary_expression.render() + + +class MultiplicativeExpression(SPARQLGrammarBase): + base_expression: UnaryExpression + additional_expressions: Optional[List[Tuple[str, UnaryExpression]]] = [] + + @field_validator("additional_expressions") + def validate_additional_expressions(cls, v): + if v[0] not in ["*", "/"]: + raise ValueError("Operator must be '*' or '/'") + return v + + def render(self) -> Generator[str, None, None]: + yield from self.base_expression.render() + for operator, expression in self.additional_expressions: + yield f" {operator} " + yield from expression.render() + + +class ExpressionList(SPARQLGrammarBase): + expressions: Optional[List[Expression]] = [] + + def render(self) -> Generator[str, None, None]: + if not self.expressions: + yield "()" + else: + yield "(" + for i, expression in enumerate(self.expressions): + yield from expression.render() + if i < len(self.expressions) - 1: + yield ", " + yield ")" + + +class Aggregate(SPARQLGrammarBase): + function_name: str # One of 'COUNT', 'SUM', 'MIN', 'MAX', 'AVG', 'SAMPLE', 'GROUP_CONCAT' + distinct: bool = False + expression: Optional[Expression] = None # '*' for COUNT, else Expression + separator: Optional[str] = None # Only used for GROUP_CONCAT + + def render(self) -> Generator[str, None, None]: + yield f"{self.function_name}(" + if self.distinct: + yield "DISTINCT " + + # For COUNT, '*' is a valid expression + if self.function_name == "COUNT" and self.expression is None: + yield "*" + elif self.expression is not None: + yield from self.expression.render() + + # Handle the separator for GROUP_CONCAT + if self.function_name == "GROUP_CONCAT" and self.separator is not None: + yield f" ; SEPARATOR='{self.separator}'" + + yield ")" + + +class RegexExpression(SPARQLGrammarBase): + """ + Represents a SPARQL REGEX expression. + REGEX(Expression, Expression, Expression) + """ + + text_expression: Expression + pattern_expression: Expression + flags_expression: Optional[Expression] = None + + def render(self) -> Generator[str, None, None]: + yield "REGEX(" + yield from self.text_expression.render() + yield ", " + yield from self.pattern_expression.render() + + if self.flags_expression: + yield ", " + yield from self.flags_expression.render() + + yield ")" + + +class BlankNode(SPARQLGrammarBase): + """ + BlankNode ::= BLANK_NODE_LABEL | ANON + """ + + value: Union[BlankNodeLabel, Anon] + + def render(self): + yield from self.value.render() + + def __hash__(self): + return hash(self.value) + + +class BlankNodeLabel(SPARQLGrammarBase): + """ + BLANK_NODE_LABEL ::= '_:' ( PN_CHARS_U | [0-9] ) ((PN_CHARS|'.')* PN_CHARS)? + """ + + part_1: str + part_2: Optional[str] = None + + def render(self): + yield "_:" + yield self.part_1 + if self.part_2: + yield self.part_2 + + +class Anon: + """ + ANON ::= '[' WS* ']' + https://www.w3.org/TR/sparql11-query/#rANON + """ + + # TODO not sure how to make this more useful - allow input of whitespace? + def render(self): + yield "[]" diff --git a/temp/shacl2sparql.py b/temp/shacl2sparql.py index f5f2f8e1..91c33db1 100644 --- a/temp/shacl2sparql.py +++ b/temp/shacl2sparql.py @@ -1,7 +1,5 @@ -import json -from pathlib import Path from string import Template -from typing import Union, Optional +from typing import Union, Optional, List from rdflib import URIRef, Variable, Namespace, Graph, SH, RDF, BNode, Literal from rdflib.collection import Collection @@ -29,6 +27,17 @@ ConstructTriples, ConstructQuery, Filter, + OrderCondition, + OrderClause, + IRI, + Var, + Constraint, + BuiltInCall, + PrimaryExpression, + BrackettedExpression, + Expression, + RDFLiteral, + IRIOrFunction, ) ONT = Namespace("https://prez.dev/ont/") @@ -53,7 +62,7 @@ def __init__( self.profile_uri: Optional[URIRef] = profile_uri self.additional_ggps: Optional[GroupGraphPatternSub] = additional_ggps - self.focus_node: Union[URIRef, Variable] = Variable("focus_node") + self.focus_node: Union[IRI, Var] = Var(value="focus_node") self.sparql = None self.results = None @@ -66,6 +75,8 @@ def __init__( self.default_limit = None self.default_offset = None + self.default_order_by = None + self.default_order_by_desc = None self.runtime_vals_expanded = None self.merged_runtime_and_default_vals = None @@ -75,13 +86,19 @@ def __init__( def _expand_runtime_vars(self): self.runtime_vals_expanded = {} for k, v in self.runtime_values.items(): - if k in ["limit", "offset"]: + if k in ["limit", "offset", "term"]: self.runtime_vals_expanded[k] = v elif v: - self.runtime_vals_expanded[k] = URIRef(v).n3() + val = "".join(IRI(value=v).render()) + self.runtime_vals_expanded[k] = val def _merge_runtime_and_default_vars(self): - default_args = {"limit": self.default_limit, "offset": self.default_offset} + default_args = { + "limit": self.default_limit, + "offset": self.default_offset, + "order_by": self.default_order_by, + "order_by_desc": self.default_order_by_desc, + } self.merged_runtime_and_default_vals = default_args | self.runtime_vals_expanded def generate_sparql(self): @@ -140,7 +157,7 @@ def parse_endpoint_definition(self): if target_nodes: target_node_var = str(target_nodes[0]) target_node_val = target_node_var[1:] - target_uri = URIRef(self.runtime_values[target_node_val]) + target_uri = IRI(value=self.runtime_values[target_node_val]) self.focus_node = target_uri # rule nodes - for CONSTRUCT TRIPLES patterns. @@ -175,14 +192,44 @@ def _add_ggp_to_main_ggps(self, ggp): gpnt = GraphPatternNotTriples(content=gorugp) self.main_where_ggps.add_pattern(gpnt) + def sh_rule_type_conversion(self, items: List): + """ + Assumes Literals are actually Variables. + """ + new_items = [] + for item in items: + if isinstance(item, URIRef): + item = IRI(value=item) + elif isinstance(item, Literal): + item = Var(value=item[1:]) + new_items.append(item) + return new_items + def _create_construct_triples_from_sh_rules(self, rule_node): + """CONSTRUCT {?s ?p ?o} based on sh:rule [ sh:subject ... ]""" subject = self.endpoint_graph.value(subject=rule_node, predicate=SH.subject) predicate = self.endpoint_graph.value(subject=rule_node, predicate=SH.predicate) object = self.endpoint_graph.value(subject=rule_node, predicate=SH.object) if subject == SH.this: subject = self.focus_node - if isinstance(object, Literal): # assume it's a variable of the form ?xyz - object = Variable(str(object)[1:]) + subject, predicate, object = self.sh_rule_type_conversion( + [subject, predicate, object] + ) + + # for item in subject, predicate, object: + # if isinstance(item, URIRef): + # item = IRI(value=item) + # elif isinstance(item, BNode): + # if subject == SH.this: + # if isinstance(self.focus_node, Var): + # subject = self.focus_node + # else: + # subject = IRI(value=self.focus_node) + # elif isinstance(subject, Literal): # assume it's a variable of the form ?xyz + # subject = Var(value=str(subject)[1:]) + # if isinstance(object, Literal): # assume it's a variable of the form ?xyz + # object = Var(value=str(object)[1:]) + triple = SimplifiedTriple(subject=subject, predicate=predicate, object=object) if self.construct_triples: self.construct_triples.append(triple) @@ -190,11 +237,13 @@ def _create_construct_triples_from_sh_rules(self, rule_node): self.construct_triples = [triple] def create_select_subquery_for_class_listing(self, target_classes): - target_class_var = URIRef(target_classes[0]) + target_class_var = IRI(value=target_classes[0]) triples_block = TriplesBlock( triples=[ SimplifiedTriple( - subject=self.focus_node, predicate=RDF.type, object=target_class_var + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=target_class_var, ) ] ) @@ -216,21 +265,13 @@ def create_select_subquery_for_class_listing(self, target_classes): ggp = GroupGraphPattern(content=ggps) sub_select_where = WhereClause(group_graph_pattern=ggp) select_clause = SelectClause(variables_or_all="*") - limit = self.merged_runtime_and_default_vals["limit"] - offset = self.merged_runtime_and_default_vals["offset"] - if limit is not None and offset is not None: # int = 0 is boolean False - limit_clause = LimitClause(limit=limit) - offset_clause = OffsetClause(offset=offset) - limit_offset_clauses = LimitOffsetClauses( - limit_clause=limit_clause, offset_clause=offset_clause - ) - solution_modifier = SolutionModifier(limit_offset=limit_offset_clauses) - else: - solution_modifier = SolutionModifier() + sol_mod, order_by_triple = self._create_focus_node_solution_modifier() + if order_by_triple: + ggps.add_triple(order_by_triple) ss = SubSelect( select_clause=select_clause, where_clause=sub_select_where, - solution_modifier=solution_modifier, + solution_modifier=sol_mod, ) ggp = GroupGraphPattern(content=ss) return ggp @@ -244,26 +285,88 @@ def create_select_subquery_from_template(self, target_bn): # implemented substituted_query = select_statement.substitute( self.merged_runtime_and_default_vals + ).rstrip() + sol_mod, order_by_triple = self._create_focus_node_solution_modifier() + if order_by_triple: # insert it before the end of the string, + order_by_triple_text = "".join(order_by_triple.render()) + substituted_query = ( + substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" + ) + sss = SubSelectString( + select_string=substituted_query, solution_modifier=sol_mod ) - sss = SubSelectString(select_string=substituted_query) ggp = GroupGraphPattern(content=sss) return ggp + def _create_focus_node_solution_modifier(self): + order_clause = order_by_triple = None # order clause is optional + order_by_path = self.merged_runtime_and_default_vals.get("order_by") + if order_by_path: + direction = self.merged_runtime_and_default_vals.get("order_by_desc") + if direction: + direction = "DESC" + else: + direction = "ASC" + order_cond = OrderCondition( + var=Var(value="order_by_var"), direction=direction + ) + order_clause = OrderClause(conditions=[order_cond]) + order_by_triple = SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=order_by_path[0]), + object=Var(value="order_by_var"), + ) + limit = int(self.merged_runtime_and_default_vals["limit"]) + offset = int(self.merged_runtime_and_default_vals["offset"]) + limit_clause = LimitClause(limit=limit) + offset_clause = OffsetClause(offset=offset) + limit_offset_clauses = LimitOffsetClauses( + limit_clause=limit_clause, offset_clause=offset_clause + ) + sol_mod = SolutionModifier( + order_by=order_clause, limit_offset=limit_offset_clauses + ) + return sol_mod, order_by_triple + def _set_default_limit_and_offset(self): default_limit = list( self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=ONT["defaultLimit"] + subject=self.endpoint_uri, predicate=SHEXT.limit ) ) default_offset = list( self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=ONT["defaultOffset"] + subject=self.endpoint_uri, predicate=SHEXT.offset ) ) + default_order_by = list( + self.endpoint_graph.objects( + subject=self.endpoint_uri, predicate=SHEXT.orderBy + ) + ) + if not default_limit or not default_offset: raise ValueError( "Listing endpoint must have both a default limit and a default offset" ) + + # Process each blank node in the default_order_by list + for blank_node in default_order_by: + # Extract sh:path + path = next(self.endpoint_graph.objects(blank_node, SH.path), None) + if not path: + continue # Skip if no sh:path is found + + # Check for sh:desc + desc_node = next(self.endpoint_graph.objects(blank_node, SHEXT.desc), None) + is_descending = ( + True if desc_node and (desc_node == Literal(True)) else False + ) + + # Add the configuration to the list + self.default_order_by = (path,) + self.default_order_by_desc = is_descending + self.default_limit = int(default_limit[0]) self.default_offset = int(default_offset[0]) @@ -289,7 +392,9 @@ def parse_profile(self): def _add_target_class(self, target_class): triples = [ SimplifiedTriple( - subject=self.focus_node, predicate=RDF.type, object=target_class + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=IRI(value=target_class), ) ] if self.construct_triples: @@ -298,22 +403,30 @@ def _add_target_class(self, target_class): self.construct_triples = triples def _build_bnode_blocks(self): - bnode_depth = list(self.profile_graph.objects(subject=self.profile_uri, predicate=SHEXT["bnode-depth"])) + bnode_depth = list( + self.profile_graph.objects( + subject=self.profile_uri, predicate=SHEXT["bnode-depth"] + ) + ) if not bnode_depth or bnode_depth == [0]: return else: bnode_depth = int(bnode_depth[0]) - p1 = Variable(f"?bn_p_1") - o1 = Variable(f"?bn_o_1") - p2 = Variable(f"?bn_p_2") - o2 = Variable(f"?bn_o_2") + p1 = Var(value="bn_p_1") + o1 = Var(value="bn_o_1") + p2 = Var(value="bn_p_2") + o2 = Var(value="bn_o_2") triples_block = TriplesBlock( triples=[ SimplifiedTriple(subject=self.focus_node, predicate=p1, object=o1), SimplifiedTriple(subject=o1, predicate=p2, object=o2), ] ) - filter_block = Filter(variable=o1, expression="ISBLANK") + o1_pe = PrimaryExpression(content=o1) + constraint = Constraint( + content=BuiltInCall.create_with_one_expr("isBLANK", o1_pe) + ) + filter_block = Filter(constraint=constraint) gpnt = GraphPatternNotTriples(content=filter_block) ggps = GroupGraphPatternSub( triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] @@ -327,9 +440,9 @@ def _build_bnode_blocks(self): container_ggp = GroupGraphPattern(content=container_ggps) def process_bn_level(depth, max_depth, outer_ggps): - old_o_var = Variable(f"?bn_o_{depth}") - new_p_var = Variable(f"?bn_p_{depth + 1}") - new_o_var = Variable(f"?bn_o_{depth + 1}") + old_o_var = Var(value=f"bn_o_{depth}") + new_p_var = Var(value=f"bn_p_{depth + 1}") + new_o_var = Var(value=f"bn_o_{depth + 1}") triples_block = TriplesBlock( triples=[ SimplifiedTriple( @@ -337,7 +450,11 @@ def process_bn_level(depth, max_depth, outer_ggps): ) ] ) - filter_block = Filter(variable=old_o_var, expression="ISBLANK") + old_o_var_pe = PrimaryExpression(content=old_o_var) + constraint = Constraint( + content=BuiltInCall.create_with_one_expr("isBLANK", old_o_var_pe) + ) + filter_block = Filter(constraint=constraint) gpnt = GraphPatternNotTriples(content=filter_block) ggps = GroupGraphPatternSub( triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] @@ -364,10 +481,10 @@ def process_path_object(path_object): if bnode_obj == SH.union: pass elif bnode_pred == SH.inversePath: - inverse_preds.append(bnode_obj) + inverse_preds.append(IRI(value=bnode_obj)) elif bnode_pred == SH.alternativePath: predicates.extend(list(Collection(self.profile_graph, bnode_obj))) - else: # "regular" paths - no special predicate, just list members + else: # sequence paths predicates.append( tuple(Collection(self.profile_graph, path_object)) ) @@ -426,13 +543,13 @@ def _add_inverse_preds(self, ggps, inverse_preds, i): if inverse_preds: ggps.add_triple( SimplifiedTriple( - subject=Variable(f"inv_path_{i}"), - predicate=Variable(f"inv_pred_{i}"), + subject=Var(value=f"inv_path_{i}"), + predicate=Var(value=f"inv_pred_{i}"), object=self.focus_node, ) ) inline_data_one_var = InlineDataOneVar( - variable=Variable(f"inv_pred_{i}"), values=inverse_preds + variable=Var(value=f"inv_pred_{i}"), values=inverse_preds ) data_block = DataBlock(block=inline_data_one_var) inline_data = InlineData(data_block=data_block) @@ -442,19 +559,21 @@ def _add_inverse_preds(self, ggps, inverse_preds, i): def _add_predicate_constraints(self, predicates, property_node, ggp_list): # check for any sequence paths - process separately - sps = [p for p in predicates if isinstance(p, tuple)] - predicates = [p for p in predicates if not isinstance(p, tuple)] + sps = [p for p in predicates if isinstance(p, tuple)] # convert to IRIs here + predicates = [ + IRI(value=p) for p in predicates if not isinstance(p, tuple) + ] # convert to IRIs below for i, (pred1, pred2) in enumerate(sps): t1 = SimplifiedTriple( subject=self.focus_node, - predicate=pred1, - object=Variable(f"seq_obj_{i + 1}"), + predicate=IRI(value=pred1), + object=Var(value=f"seq_obj_{i + 1}"), ) t2 = SimplifiedTriple( - subject=Variable(f"seq_obj_{i + 1}"), - predicate=pred2, - object=Variable(f"seq_obj_terminal{i + 1}"), + subject=Var(value=f"seq_obj_{i + 1}"), + predicate=IRI(value=pred2), + object=Var(value=f"seq_obj_terminal{i + 1}"), ) tb = TriplesBlock(triples=[t1, t2]) ggps = GroupGraphPatternSub(triples_block=tb) @@ -465,25 +584,33 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): max = self.profile_graph.value(subject=property_node, predicate=SH.maxCount) simplified_triple = SimplifiedTriple( subject=self.focus_node, - predicate=Variable("preds"), - object=Variable("objs"), + predicate=Var(value="preds"), + object=Var(value="objs"), ) tb = TriplesBlock(triples=[simplified_triple]) if predicates: - # filters must be added to all union statements if max == Literal(0): - values_constraint = Filter(variable=Variable("preds"), expression="NOT IN", value=predicates) + values = [ + PrimaryExpression(content=IRIOrFunction(iri=p)) for p in predicates + ] + values_constraint = Filter.filter_in( + focus=Var(value="preds"), comparators=values, not_in=True + ) gpnt = GraphPatternNotTriples(content=values_constraint) if ggp_list: for ggp in ggp_list: ggp.content.add_pattern(gpnt) else: - ggps = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt, tb]) + ggps = GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[gpnt, tb] + ) ggp = GroupGraphPattern(content=ggps) ggp_list.append(ggp) - elif SHEXT.allPredicateValues not in predicates: # add VALUES clause + elif ( + IRI(value=SHEXT.allPredicateValues) not in predicates + ): # add VALUES clause inline_data_one_var = InlineDataOneVar( - variable=Variable("preds"), values=predicates + variable=Var(value="preds"), values=predicates ) data_block = DataBlock(block=inline_data_one_var) inline_data = InlineData(data_block=data_block) @@ -491,12 +618,11 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): ggps = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt, tb]) ggp = GroupGraphPattern(content=ggps) ggp_list.append(ggp) - elif predicates == [SHEXT.allPredicateValues]: + elif predicates == [IRI(value=SHEXT.allPredicateValues)]: ggps = GroupGraphPatternSub(triples_block=tb) ggp = GroupGraphPattern(content=ggps) ggp_list.append(ggp) - def _add_object_constrains(self, ggp_list, property_node): value = self.profile_graph.value( subject=property_node, predicate=SH.hasValue, default=None @@ -513,8 +639,14 @@ def _add_object_constrains(self, ggp_list, property_node): ggps = GroupGraphPatternSub() ggp = GroupGraphPattern(content=ggps) ggp_list.append(ggp) + objs = [] + for obj in objects: + if isinstance(obj, Literal): + objs.append(RDFLiteral(value=obj)) + elif isinstance(obj, URIRef): + objs.append(IRI(value=obj)) inline_data_one_var = InlineDataOneVar( - variable=Variable("objs"), values=objects + variable=Var(value="objs"), values=objs ) data_block = DataBlock(block=inline_data_one_var) inline_data = InlineData(data_block=data_block) diff --git a/test_data/catprez.ttl b/test_data/catprez.ttl new file mode 100644 index 00000000..6cd0956b --- /dev/null +++ b/test_data/catprez.ttl @@ -0,0 +1,21 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX ex: +PREFIX rdfs: + +ex:TopLevelCatalog a dcat:Catalog ; + rdfs:label "Top level catalog" ; + dcterms:hasPart ex:LowerLevelCatalog ; + ex:property "top level catalog property" ; +. + +ex:LowerLevelCatalog a dcat:Catalog ; + rdfs:label "Lower level catalog" ; + dcterms:hasPart ex:Resource ; + ex:property "lower level catalog property" +. + +ex:Resource a dcat:Resource ; + rdfs:label "Resource" ; + ex:property "resource property" ; +. \ No newline at end of file diff --git a/test_data/spaceprez.ttl b/test_data/spaceprez.ttl new file mode 100644 index 00000000..446ac86e --- /dev/null +++ b/test_data/spaceprez.ttl @@ -0,0 +1,27 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX ex: +PREFIX geo: +PREFIX rdfs: + +ex:Dataset a dcat:Dataset ; + rdfs:label "Dataset" ; + rdfs:member ex:FeatureCollection ; + ex:property "top level dataset property" ; +. + +ex:FeatureCollection a geo:FeatureCollection ; + rdfs:label "Geo Feature Collection" ; + rdfs:member ex:Feature1 , ex:Feature2 ; + ex:property "lower level feature collection property" +. + +ex:Feature1 a geo:Feature ; + rdfs:label "Feature 1" ; + ex:property "feature property" ; +. + +ex:Feature2 a geo:Feature ; + rdfs:label "Feature 2" ; + ex:property "feature property" ; +. \ No newline at end of file diff --git a/test_data/vocprez.ttl b/test_data/vocprez.ttl new file mode 100644 index 00000000..3fd7ea34 --- /dev/null +++ b/test_data/vocprez.ttl @@ -0,0 +1,37 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX ex: +PREFIX rdfs: +PREFIX skos: + +ex:VocPrezCatalog a dcat:Catalog ; + rdfs:label "Catalog" ; + dcterms:hasPart ex:SchemingConceptScheme ; + ex:property "cataract" ; +. + +ex:SchemingConceptScheme a skos:ConceptScheme ; + skos:prefLabel "The Scheming Concept Scheme" ; + skos:hasTopConcept ex:TopLevelConcept ; + ex:property "schemish conceptual property" +. + +ex:TopLevelConcept a skos:Concept ; + skos:prefLabel "The toppiest of concepts" ; + ex:property "a property of the toppiest concept" ; + skos:narrower ex:SecondLevelConcept ; + skos:inScheme ex:SchemingConceptScheme ; +. + +ex:SecondLevelConcept a skos:Concept ; + skos:prefLabel "A second level concept" ; + ex:property "a property of the second level concept" ; + skos:narrower ex:ThirdLevelConcept ; + skos:inScheme ex:SchemingConceptScheme ; +. + +ex:ThirdLevelConcept a skos:Concept ; + skos:prefLabel "A third level concept" ; + ex:property "a property of the third level concept" ; + skos:inScheme ex:SchemingConceptScheme ; +. \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..ba5929dd --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,4 @@ +import os + +os.environ["SPARQL_REPO_TYPE"] = "pyoxigraph" +os.environ["LOG_LEVEL"] = "DEBUG" diff --git a/tests/data/catprez/expected_responses/catalog_anot.ttl b/tests/data/catprez/expected_responses/catalog_anot.ttl deleted file mode 100644 index 1159a94d..00000000 --- a/tests/data/catprez/expected_responses/catalog_anot.ttl +++ /dev/null @@ -1,205 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix prov: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - -dcterms:created rdfs:label "Date Created"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:hasPart rdfs:label "Has Part"@en ; - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Part Of."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:modified rdfs:label "Date Modified"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -dcterms:title rdfs:label "Title"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - -dcat:hadRole rdfs:label "hadRole"@en . - -prov:agent rdfs:label "agent" . - -prov:qualifiedAttribution rdfs:label "qualified attribution" . - - a dcat:Catalog ; - rdfs:label "IDN Demonstration Catalogue" ; - dcterms:created "2022-07-31"^^xsd:date ; - dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. - - rdfs:label "author"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - ns1:status ; - skos:definition "party who authored the resource" ; - skos:prefLabel "author"@en . - -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:hasPart , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - ; - dcterms:identifier "democat"^^xsd:token, - "pd:democat"^^prez:identifier ; - dcterms:modified "2022-08-29"^^xsd:date ; - dcterms:title "IDN Demonstration Catalogue" ; - prov:qualifiedAttribution [ dcat:hadRole , - , - ; - prov:agent ] ; - prez:link "/c/catalogs/pd:democat" . - -schema:description rdfs:label "description" . - -schema:name rdfs:label "name" . - - rdfs:label "author"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - ns1:status ; - skos:definition "party who authored the resource" ; - skos:prefLabel "author"@en . - - rdfs:label "custodian"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - ns1:status ; - skos:definition "party that accepts accountability and responsibility for the resource and ensures appropriate care and maintenance of the resource" ; - skos:prefLabel "custodian"@en . - - rdfs:label "owner"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - ns1:status ; - skos:definition "party that owns the resource" ; - skos:prefLabel "owner"@en . - -dcat:Catalog rdfs:label "Catalog"@en . - - dcterms:description """This dataset has been developed by the Australian Government as an authoritative source of indigenous location names across Australia. It is sponsored by the Spatial Policy Branch within the Department of Communications and managed solely by the Department of Human Services. -The dataset is designed to support the accurate positioning, consistent reporting, and effective delivery of Australian Government programs and services to indigenous locations. -The dataset contains Preferred and Alternate names for indigenous locations where Australian Government programs and services have been, are being, or may be provided. The Preferred name will always default to a State or Territory jurisdiction's gazetted name so the term 'preferred' does not infer that this is the locally known name for the location. Similarly, locational details are aligned, where possible, with those published in State and Territory registers. -This dataset is NOT a complete listing of all locations at which indigenous people reside. Town and city names are not included in the dataset. The dataset contains names that represent indigenous communities, outstations, defined indigenous areas within a town or city or locations where services have been provided.""" ; - dcterms:title "Australian Government Indigenous Programs & Policy Locations (AGIL) dataset" . - - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:identifier "pd:AAC-SA"^^prez:identifier ; - dcterms:title "Annual Aboriginal Census,1921-1944 - South Australia" ; - prez:link "/c/catalogs/pd:democat/resources/pd:AAC-SA" . - - dcterms:description "A 2020 review of First Nations Identified physical collections held by the ANU. Not published." ; - dcterms:title "2020 ANU First Nations Collections Review" . - - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Region level. - -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Regions" . - - dcterms:description """Austlang provides information about Indigenous Australian languages which has been assembled from referenced sources. -The dataset provided here includes the language names, each with a unique alpha-numeric code which functions as a stable identifier, alternative/variant names and spellings and the approximate location of each language variety.""" ; - dcterms:title "Austlang database." . - - dcterms:description """The Indigenous Protected Areas (IPA) programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community. - -The Birriliburu & Matuwa Kurrara Kurrara (MKK) IPAs have provided an opportunity for Martu people to reconnect with and actively manage their traditional country. - -The two IPAs have proved a useful tool with which to leverage third party investment, through a joint management arrangement with the Western Australia (WA) Government, project specific funding from environmental NGOs and mutually beneficial partnerships with the private sector. - -Increased and diversified investment from a range of funding sources would meet the high demand for Ranger jobs and could deliver a more expansive programme of works, which would, in turn, increase the social, economic and cultural outcomes for Martu Rangers and Community Members.""" ; - dcterms:title "SRI Investment Analysis of the Birriliburu and Matuwa Kurrara Kurrara Indigenous Protected Areas (2016)" . - - dcterms:description "UTS has taken over this data, but needs help to turn it into an ongoing public database" ; - dcterms:title "Aboriginal Deaths and Injuries in Custody" . - - dcterms:description "(Torrens University). An earlier application with Marcia for AIATSIS funding was never considered." ; - dcterms:title "GDP and Genuine Progress Indicator" . - - dcterms:description "Land that is owned or managed by Australia’s Indigenous communities, or over which Indigenous people have use and rights, was compiled from information supplied by Australian, state and territory governments and other statutory authorities with Indigenous land and sea management interests." ; - dcterms:title "Indigenous Land and Sea Interests " . - - dcterms:description "Registered & Notified Indigenous Land Use Agreements – (as per s. 24BH(1)(a), s. 24CH and s. 24DI(1)(a)) across Australia, The Central Resource for Sharing and Enabling Environmental Data in NSW" ; - dcterms:title "Indigenous Land Use Agreement Boundaries with basic metadata and status" . - - dcterms:description "Printed catalog highlighting ANU Indigenous Research activities at the time of publication" ; - dcterms:title "Indigenous Research Compendium 2018" . - - dcterms:description "These are extensive paper records which Ian Anderson has proposed incorporating in a database. Negotiation is still needed." ; - dcterms:title "Tasmanian Aboriginal genealogies" . - - dcterms:description "NSW prison population data and quarterly custody reports" ; - dcterms:title "NSW Custody Statistics" . - - dcterms:description "This comprises records of about 70,000 Indigenous and 30,000 non-Indigenous people surveyed in the 1970s and 1980s. Some paper records are held at AIATSIS. Microfilms of others are at UNSW Archives. There have been preliminary discussions with AIATSIS, the National Library and former members of the Hollows team about a program to digitise the records. IDN staff/resources would be needed." ; - dcterms:title "The Fred Hollows Archive (National Trachoma and Eye Health Program)" . - - dcterms:description """Conference powerpoint presentation - -Case study in exemplary IDG. -- Survey of native title prescribed bodies corporate (PBCs) -- Collect data on PBCs’ capacity, capabilities, needs and aspirations to better inform policies that affect PBCs -- Started data collection May 2019, to finish in 3rd quarter 2019""" ; - dcterms:title "Prescribed bodies corporate (PBCs) Survey 2019" . - - dcterms:description """Aboriginal and Torres Strait Islander people are the Indigenous people of Australia. They are not one group, but comprise hundreds of groups that have their own distinct set of languages, histories and cultural traditions. - -AIHW reports and other products include information about Indigenous Australians, where data quality permits. Thus, information and statistics about Indigenous Australians can be found in most AIHW products. - -In December 2021, AIHW released the Regional Insights for Indigenous Communities (RIFIC). The aim of this website is to provide access to data at a regional level, to help communities set their priorities and participate in joint planning with government and service providers. - -AIHW products that focus specifically on Indigenous Australians are captured on this page.""" ; - dcterms:title "Regional Insights for Indigenous Communities" . - - dcterms:description "Access still to be negotiated with the Museum." ; - dcterms:title "The Sandra Smith Archive" . - - dcterms:description "Strong demand but controversial." ; - dcterms:title "Tindale/Horton map" . - - dcterms:description """TLCMap is a set of tools that work together for mapping Australian history and culture. - -Note that historical placenames in TLCmap is a HASS-I integration activity.""" ; - dcterms:title "Time Layered Cultural Map of Australia" . - - rdfs:label "Indigenous Data Network" ; - schema:description "The IDN is within the University of Melbourne. It was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities.", - "The Indigenous Data Network (IDN) was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities."@en ; - schema:name "Indigenous Data Network" . - diff --git a/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl new file mode 100644 index 00000000..e2278cde --- /dev/null +++ b/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl @@ -0,0 +1,453 @@ +@prefix dcat: . +@prefix dcterms: . +@prefix prez: . +@prefix prov: . +@prefix rdf: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . + +dcterms:created rdfs:label "Date Created"@en ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . + +dcterms:description rdfs:label "Description"@en ; + dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . + +dcterms:hasPart rdfs:label "Has Part"@en ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Part Of."@en . + +dcterms:identifier rdfs:label "Identifier"@en ; + dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . + +dcterms:modified rdfs:label "Date Modified"@en ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . + +dcterms:provenance rdfs:label "Provenance"@en ; + dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . + +dcterms:title rdfs:label "Title"@en . + +rdf:type rdfs:label "type" . + +rdfs:label rdfs:label "label" . + +skos:definition rdfs:label "definition"@en ; + skos:definition "A statement or formal explanation of the meaning of a concept."@en . + +skos:prefLabel rdfs:label "preferred label"@en ; + skos:definition "The preferred lexical label for a resource, in a given language."@en . + +dcat:hadRole rdfs:label "hadRole"@en ; + skos:definition "The function of an entity or agent with respect to another entity or resource."@en . + +prov:agent rdfs:label "agent" . + +prov:qualifiedAttribution rdfs:label "qualified attribution" . + + a dcat:Catalog ; + rdfs:label "IDN Demonstration Catalogue" ; + dcterms:created "2022-07-31"^^xsd:date ; + dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. + +The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. + +The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; + dcterms:hasPart , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + dcterms:identifier "democat"^^xsd:token, + "pd:democat"^^prez:identifier ; + dcterms:modified "2022-08-29"^^xsd:date ; + dcterms:title "IDN Demonstration Catalogue" ; + prov:qualifiedAttribution [ dcat:hadRole , + , + ; + prov:agent ] ; + prez:link "/catalogs/pd:democat" ; + prez:members [ prez:link "/catalogs/pd:democat/collections" ] . + +schema:description rdfs:label "description" . + +schema:name rdfs:label "name" . + + rdfs:label "author"@en ; + dcterms:provenance "Presented in the original standard's codelist"@en ; + skos:definition "party who authored the resource" ; + skos:prefLabel "author"@en . + + rdfs:label "custodian"@en ; + dcterms:provenance "Presented in the original standard's codelist"@en ; + skos:definition "party that accepts accountability and responsibility for the resource and ensures appropriate care and maintenance of the resource" ; + skos:prefLabel "custodian"@en . + + rdfs:label "owner"@en ; + dcterms:provenance "Presented in the original standard's codelist"@en ; + skos:definition "party that owns the resource" ; + skos:prefLabel "owner"@en . + + dcterms:description """Needs to be integrated with KHRD. Negotiation required with State Library. + +Comprises Barwick's publications and conference papers; Barwick's PhD.; work with the Australian Institute of Aboriginal Studies and the Aboriginal History journal; work on major research projects; incoming and outgoing correspondence; reference material, and collected genealogies of Aboriginal Victorian families.""" ; + dcterms:title "The Diane Barwick Archive" . + +dcat:Catalog rdfs:label "Catalog"@en ; + skos:definition "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en . + + dcterms:description """This dataset has been developed by the Australian Government as an authoritative source of indigenous location names across Australia. It is sponsored by the Spatial Policy Branch within the Department of Communications and managed solely by the Department of Human Services. +The dataset is designed to support the accurate positioning, consistent reporting, and effective delivery of Australian Government programs and services to indigenous locations. +The dataset contains Preferred and Alternate names for indigenous locations where Australian Government programs and services have been, are being, or may be provided. The Preferred name will always default to a State or Territory jurisdiction's gazetted name so the term 'preferred' does not infer that this is the locally known name for the location. Similarly, locational details are aligned, where possible, with those published in State and Territory registers. +This dataset is NOT a complete listing of all locations at which indigenous people reside. Town and city names are not included in the dataset. The dataset contains names that represent indigenous communities, outstations, defined indigenous areas within a town or city or locations where services have been provided.""" ; + dcterms:title "Australian Government Indigenous Programs & Policy Locations (AGIL) dataset" . + + dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. + +Special care notice: +Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; + dcterms:title "Annual Aboriginal Census,1921-1944 - Australia" . + + dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. + +Special care notice: +Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; + dcterms:title "Annual Aboriginal Census,1921-1944 - South Australia" . + + dcterms:description "Existing database at ANU" ; + dcterms:title "The Australian Dictionary of Biography" . + + dcterms:description "A database of Agents - Organisations & People - with roles relating to indigenous data" ; + dcterms:title "Indigenous Data Network's Agents DB" . + + dcterms:description "An Indigenous geography and gazetteer, including a Loc-I framework for tribal, language and community data. Requires developmental work in collaboration with Universities, ABS, AIHW, Geoscience Australia, AURIN etc etc." ; + dcterms:title "Indigenous Gazetteer" . + + dcterms:description "The Australian National University is home to many research collections of national and international significance. Material from the ANU Archives, ANU Classics Museum, ANU Library, Asia Pacific Map Collection and the Noel Butlin Archives Centre are being progressivley digitised and made available through this repository." ; + dcterms:title "ANU Archive and Library Collections - \"Indigenous\" Search" . + + dcterms:description "A 2020 review of First Nations Identified physical collections held by the ANU. Not published." ; + dcterms:title "2020 ANU First Nations Collections Review" . + + dcterms:description "The University's Open Research digital repository ecompasses a number of research collections which the wider community is free to browse." ; + dcterms:title "ANU Open Research Collections" . + + dcterms:description """The Australian National University, through its Open Research repository collects, maintains, preserves, promotes and disseminates its open access scholarly materials. + +Open Research holds a variety of scholarly publications including journal articles; books and book chapters; conference papers, posters and presentations; theses; creative works; photographs and much more in a number of collections and formats. The wider community is free to browse this material and all members of the ANU community (past and present) are encouraged to contribute their research.""" ; + dcterms:title "ANU Open Research Library - \"Indigenous\" Search (Thesis Library)" . + + dcterms:description "Publications, Ethics, Grants" ; + dcterms:title "ANU Research Information Enterprise System" . + + dcterms:description """Needs to be made fully maintainable, sustainable interoperable and web-accessible + +ATNS provides an online portal for people seeking information on agreements with Indigenous peoples. We aim to promote knowledge and transparency by capturing the range and variety of agreement making occurring in Australia and other parts of the world. + +We gather and review information from publicly available academic sources, online materials and documents provided by the organisations and agencies involved in agreement-making processes. No confidential material is published. """ ; + dcterms:title "The Agreements, Treaties and Negotiated Settlements Database" . + + dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the Aboriginal Community level. +The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; + dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: Aboriginal Community, ACT" . + + dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Region level. + +The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; + dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Regions" . + + dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Zone level. +The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; + dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Zones" . + + dcterms:description "ATSIDA is a specialised trusted research data management facility, and thematic archive within the Australian Data Archive for Australian Aboriginal and Torres Strait Islander research data managed by the UTS Library. ATSIDA provides a transformational research platform working at the nexus of researchers, communities and other stakeholders in preserving and ensuring ethical access to research data related to Indigenous communities. ATSIDA works with universities, government and other organisations to increase Indigenous student and staff research capacity, support Indigenous researchers and those working with Indigenous research data. It engages with communities to manage appropriate access and return of digital materials.", + "The Aboriginal and Torres Strait Islander Data Archive at the Australian Data Archive and ANU Archives. This was specifically mentioned in the NCRIS Roadmap as an existing strength to be built on. It needs staff at the Data Archive to fully curate and digitise these collections and make them web-accessible." ; + dcterms:title "ABORIGINAL & TORRES STRAIT ISLANDER DATA ARCHIVE", + "The Aboriginal and Torres Strait Islander Data Archive at ADA, ANU" . + + dcterms:description "This looks like a mirror of the ADA archive. Many links are broken." ; + dcterms:title "The Aboriginal and Torres Strait Islander Data Archive at Jumbunna, UTS" . + + dcterms:description """Austlang provides information about Indigenous Australian languages which has been assembled from referenced sources. +The dataset provided here includes the language names, each with a unique alpha-numeric code which functions as a stable identifier, alternative/variant names and spellings and the approximate location of each language variety.""" ; + dcterms:title "Austlang database." . + + dcterms:description """The Indigenous Protected Areas (IPA) programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community. + +The Birriliburu & Matuwa Kurrara Kurrara (MKK) IPAs have provided an opportunity for Martu people to reconnect with and actively manage their traditional country. + +The two IPAs have proved a useful tool with which to leverage third party investment, through a joint management arrangement with the Western Australia (WA) Government, project specific funding from environmental NGOs and mutually beneficial partnerships with the private sector. + +Increased and diversified investment from a range of funding sources would meet the high demand for Ranger jobs and could deliver a more expansive programme of works, which would, in turn, increase the social, economic and cultural outcomes for Martu Rangers and Community Members.""" ; + dcterms:title "SRI Investment Analysis of the Birriliburu and Matuwa Kurrara Kurrara Indigenous Protected Areas (2016)" . + + dcterms:description "Historical population data and biographical records" ; + dcterms:title "Briscoe-Smith Archive" . + + dcterms:description """The Composite Gazetteer of Australia is a cloud-based system allowing users to easily discover, interrogate and download place names information from Australia and its external territories. It is developed as a partnership between contributing agencies of the Intergovernmental Committee on Surveying and Mapping (ICSM) and is built on modern infrastructure providing automated ingestion and validation, producing a composite dataset from the individual jurisdictional gazetteers. + +The place names database is a collection of jurisdictional data that is combined to create the Composite Gazetteer of Australia. Place name information is managed at a local level by jurisdictions. The place name database and the Composite Gazetteer of Australia are maintained by ICSM.""" ; + dcterms:title "Compound Gazetteer of Australia" . + + dcterms:description "The Cultural Heritage Parties dataset is the spatial representation of state-wide Aboriginal and Torres Strait Islander Native Title Party boundaries within Queensland as described under the Aboriginal Cultural Heritage Act 2003 and the Torres Strait Islander Cultural Heritage Act 2003 (the Acts)." ; + dcterms:title "Cultural Heritage Party boundaries - Queensland" . + + dcterms:description "Productivity Commissions data dashboard arising from the National Agreement on Closing the Gap." ; + dcterms:title "Closing the gap information repository" . + + dcterms:description "Norman B. Tindale ; tribal boundaries drawn by Winifred Mumford on a base map produced by the Division of National Mapping, Department of National Development, Canberra, Australia." ; + dcterms:title "Distribution of the Aboriginal Tribes of Australia (1940)" . + + dcterms:description "UTS has taken over this data, but needs help to turn it into an ongoing public database" ; + dcterms:title "Aboriginal Deaths and Injuries in Custody" . + + dcterms:description "Barry Hansen and Yothu Yindi Foundation have done extensive work on where the money goes in the NT. Needs to be a national database." ; + dcterms:title "Expenditure on Indigenous Advancement" . + + dcterms:description "(Torrens University). An earlier application with Marcia for AIATSIS funding was never considered." ; + dcterms:title "GDP and Genuine Progress Indicator" . + + dcterms:description "The Snapshot is an ongoing research project that links enterprises on Indigenous business registries to data held by the Australian Bureau of Statistics. It will enable us to track the industries, revenue, employment outcome and growth of Indigenous businesses. This report provides an unprecedented snapshot of the Indigenous business sector to help dismantle the many stereotypes and myths that have led to lost opportunities for Indigenous business growth. There is mention of an I-BLADE dataset." ; + dcterms:title "Indigenous Business Sector Snapshot 1.1 Indigenous Businesses Sector Snapshot Study, Insights from I-BLADE 1.0" . + + dcterms:description "Land that is owned or managed by Australia’s Indigenous communities, or over which Indigenous people have use and rights, was compiled from information supplied by Australian, state and territory governments and other statutory authorities with Indigenous land and sea management interests." ; + dcterms:title "Indigenous Land and Sea Interests " . + + dcterms:description "Registered & Notified Indigenous Land Use Agreements – (as per s. 24BH(1)(a), s. 24CH and s. 24DI(1)(a)) across Australia, The Central Resource for Sharing and Enabling Environmental Data in NSW" ; + dcterms:title "Indigenous Land Use Agreement Boundaries with basic metadata and status" . + + dcterms:description "Printed catalog highlighting ANU Indigenous Research activities at the time of publication" ; + dcterms:title "Indigenous Research Compendium 2018" . + + dcterms:description """Various projects from $10 million Indigenous Research Fund administered by AIATSIS. +A number of projects are described p13-15 here. +One might expect a number of these would give rise to relevant data collections and information on methods. +Each of these projects should be catalogued? Or not?""" ; + dcterms:title "Indigenous Research Exchange/Knowledge Exchange Platform" . + + dcterms:description """Sandra Silcot has identified the steps required to make this fully maintainable and sustainable. +Koori Health Research Database (Janet McCalman) traces BDM of 7,800 Aboriginals in Victoria & New South Wales Australia from 19th Century to the present. It is built from Yggdrasil, an existing open-source web database application designed for large population studies of family history https://rdxx.org/notes.sandra/khrd/slides/khrd-apa2012-talk.pdf.html""" ; + dcterms:title "The Koori Health Research Database" . + + dcterms:description """The Mayi Kuwayu Study looks at how Aboriginal and Torres Strait Islander wellbeing is linked to things like connection to country, cultural practices, spirituality and language use. +Our research team follows a large number of Aboriginal and Torres Strait Islander people and asks about their culture and wellbeing. As a longitudinal study, we are surveying people and then ask them to take the same survey every few years, so that we can understand what influences changes over time. +This is the first time a national study of this type has been done and will provide an evidence base to allow for the creation of better policies and programs. +This study has been created by and for Aboriginal and Torres Strait Islander people. It is an Aboriginal and Torres Strait Islander controlled research resource. +The Mayi Kuwayu team are experienced at working closely with communities across Australia, and the study has majority Aboriginal and Torres Strait Islander staffing and study governance (decision making) structure.""" ; + dcterms:title "The National Study of Aboriginal and Torres Strait Islander Wellbeing" . + + dcterms:description "These are extensive paper records which Ian Anderson has proposed incorporating in a database. Negotiation is still needed." ; + dcterms:title "Tasmanian Aboriginal genealogies" . + + dcterms:description "The Historical Census and Colonial Data Archive (HCCDA) is an archive of Australian colonial census publications and reports covering the period from 1833 to 1901, the year of Australia's federation. The corpus includes 18,638 pages of text, and approximately 15000 tables, all with full digital images, text conversion and individually identified pages and tables. Please note that the archive contains colonial census reports, but not individual census returns." ; + dcterms:title "The Historical Census and Colonial Data Archive" . + + dcterms:description "Noongar Boodjar Language Centre (NBLC) in Perth have partnered with the Atlas of Living Australia to link Noongar-Wudjari language and knowledge for plants and animals to western science knowledge to create the Noongar-Wudjari Plant and Animal online Encyclopedia. This project focused on the Noongar-Wudjari clan, from the South coast of WA, and worked specifically with Wudjari knowledge holders - Lynette Knapp and Gail Yorkshire to record, preserve and share their ancestral language and knowledge about plants and animals. Knowledge and language for 90 plants and animals were collected and are now ready for publication through the Atlas of Living Australia (ala.org.au)." ; + dcterms:title "Noongar Boodjar Plants and Animals" . + + dcterms:description """We are making a national resource for Indigenous health and heritage, which is based on our collection of biological samples, genome data and documents from Indigenous communities in many parts of Australia. You can find out more about NCIG and its collections at ncig.anu.edu.au. + +Information in these collections tells two kinds of stories. + +We are working with Indigenous communities to decide how to tell the stories of the people who are represented in the collection. We do not make personal information available, but the website lets you know what collections we have and how to contact us if you want to know more. + +There is also the story about how the collection was made and how it can be useful to researchers and other people. + +This website helps to tell this second story by making some records and documents from the collection openly available. There is information about the people who collected the samples and made the records, why they carried out their studies, the places they visited and some of the results of their studies.""" ; + dcterms:title "National Centre for Indigenous Genomics data" . + + dcterms:description "NSW prison population data and quarterly custody reports" ; + dcterms:title "NSW Custody Statistics" . + + dcterms:description "Existing database at the National Library" ; + dcterms:title "People Australia" . + + dcterms:description "Databases held by the NNTT" ; + dcterms:title "Native Title Databases at the National Native Title Tribunal" . + + dcterms:description "This comprises records of about 70,000 Indigenous and 30,000 non-Indigenous people surveyed in the 1970s and 1980s. Some paper records are held at AIATSIS. Microfilms of others are at UNSW Archives. There have been preliminary discussions with AIATSIS, the National Library and former members of the Hollows team about a program to digitise the records. IDN staff/resources would be needed." ; + dcterms:title "The Fred Hollows Archive (National Trachoma and Eye Health Program)" . + + dcterms:description """Conference powerpoint presentation + +Case study in exemplary IDG. +- Survey of native title prescribed bodies corporate (PBCs) +- Collect data on PBCs’ capacity, capabilities, needs and aspirations to better inform policies that affect PBCs +- Started data collection May 2019, to finish in 3rd quarter 2019""" ; + dcterms:title "Prescribed bodies corporate (PBCs) Survey 2019" . + + dcterms:title "AG Productivity Commission - Report on Government Services: Indigenous Compendium reports 2005-2015" . + + dcterms:description "This dataset is of police offences by Aboriginals in Western Australia" ; + dcterms:title "Police Offenses WA (Erin Mathews)" . + + dcterms:description """Aboriginal and Torres Strait Islander people are the Indigenous people of Australia. They are not one group, but comprise hundreds of groups that have their own distinct set of languages, histories and cultural traditions. + +AIHW reports and other products include information about Indigenous Australians, where data quality permits. Thus, information and statistics about Indigenous Australians can be found in most AIHW products. + +In December 2021, AIHW released the Regional Insights for Indigenous Communities (RIFIC). The aim of this website is to provide access to data at a regional level, to help communities set their priorities and participate in joint planning with government and service providers. + +AIHW products that focus specifically on Indigenous Australians are captured on this page.""" ; + dcterms:title "Regional Insights for Indigenous Communities" . + + dcterms:description """Data workbooks presenting the latest Social Health Atlases of Australia are available for the whole of Australia by Population Health Area, Local Government Area, and Primary Health Network, and by Indigenous Area for the Aboriginal & Torres Strait Islander population. Data are also available by Quintile of Socioeconomic Disadvantage of Area (current period and time series), and Remoteness Area (current period and time series), for both the whole population, and the Aboriginal & Torres Strait Islander population (current period only). + +These workbooks are derived from ABS Census data releases.""" ; + dcterms:title "Social Health Atlases of Australia" . + + dcterms:description "Summarises all available aerial survey data and metadata used to characterise the long-term distribution and abundance of magpie geese in the Northern Territory undertaken by different institutions and publically available in several journals (Appendix A). Summarised also are results from a PhD study (E. Ligtermoet) documenting the cultural harvesting values of magpie geese ascertained by interviews with Kakadu Traditional Owners (2011-2015)." ; + dcterms:title "Supplementary Material used to characterise the spatial and temporal dynamics of magpie goose populations in the Kakadu Region NT and their cultural harvesting values" . + + dcterms:description "The Minyumai Indigenous Protected Areas (IPA) has provided an opportunity for the Bandjalang clan to re-engage with culture and language through country. Through land and fire management work, Bandjalang traditional owners have seen the restoration of native plants and animals that were thought to have been lost. Their return serves as a powerful reminder of the resilience of the Bandjalang people and enables them to better understand themselves, their culture, and their place in the world. The IPA programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community." ; + dcterms:title "Social Return on Investment analysis of the Minyumai Indigenous Protected Area" . + + dcterms:description "Access still to be negotiated with the Museum." ; + dcterms:title "The Sandra Smith Archive" . + + dcterms:description "Strong demand but controversial." ; + dcterms:title "Tindale/Horton map" . + + dcterms:description """TLCMap is a set of tools that work together for mapping Australian history and culture. + +Note that historical placenames in TLCmap is a HASS-I integration activity.""" ; + dcterms:title "Time Layered Cultural Map of Australia" . + + dcterms:description """The Victorian Perinatal Data Collection (VPDC) is a population-based surveillance system that collects for analysis comprehensive information on the health of mothers and babies, in order to contribute to improvements in their health. + +The VPDC contains information on obstetric conditions, procedures and outcomes, neonatal morbidity and congenital anomalies relating to every birth in Victoria. + +This data is reported annually to the AIHW as part of the National Perinatal Data Collection managed by the AIHW. The AIHW produces the annual report Australia’s mothers and babies, using the National Perinatal Data Collection and other data.""" ; + dcterms:title "The Victorian Perinatal database" . + + dcterms:description """This was nominated by Sandra Eades. Investigation, documentation and negotiation needed. + +https://www.datalinkage-wa.org.au/dlb-services/derived-indigenous-status-flag/ ?""" ; + dcterms:title "Western Australia Linked Data" . + + dcterms:description "In 2012, the remote Aboriginal community of Wilcannia in western NSW hosted the first Australian pilot of a Cuban mass adult literacy campaign model known as Yes I Can. The aim was to investigate the appropriateness of this model in Aboriginal Australia. Building on an intensive community development process of ‘socialisation and mobilisation’, sixteen community members with very low literacy graduated from the basic literacy course, with the majority continuing on into post-literacy activities, further training and/or employment." ; + dcterms:title "Aboriginal adult literacy campaign - Wilcannia Pilot Project Final Evaluation Report" . + + dcterms:description """The Yawuru Knowing Our Community (YKC) Household Survey was commissioned by the Nyamba Buru Yawuru Board of Directors in December 2010. This report and associated data base are the property of the NBY Board. The report was designed and produced by The Kimberley Institute, Centre for Aboriginal Economic Policy Research at The Australian National University, and the Broome Aboriginal community. +In September 2010, the NBY Board resolved to undertake a comprehensive population survey of Broome to inform the Board’s investment strategy, particularly on social housing.""" ; + dcterms:title "Yawuru Knowing Our Community Household Survey" . + + dcterms:description """Yumi Sabe is an Australian Kriol term that translates to 'we know', or, 'we have the knowledge'. + +Yumi Sabe is an Indigenous Knowledge Exchange that helps Indigenous communities, researchers and policy makers to access and use data to inform and improve policies and programs and demonstrate the complexity and diversity of Aboriginal and Torres Strait Islander peoples', research and culture. + +This is a beta product that is still being refined and developed. Please contact us if you have any issues or feedback.""" ; + dcterms:title "Indigenous Research Exchange Platform" . + + dcterms:description "The Australia's Indigenous land and forest estate (2020) is a continental spatial dataset that identifies and reports separately the individual attributes of Australia's Indigenous estate, namely the extent of land and forest over which Indigenous peoples and communities have ownership, management and co-management, or other special rights." ; + dcterms:title "Australia's Indigenous land and forest estate (2020)" . + + dcterms:description """Tandana is owned and managed by the National Aboriginal Cultural Institute Inc. It is Australia’s oldest Aboriginal-owned and managed multi-arts centre. +As Tandana is government funded it reports annually on the funding supplied and its distribution.""" ; + dcterms:title "Tandanya Annual Reporting Regulatory Data" . + + dcterms:description "Indigenous Areas (IAREs) are medium sized geographic areas built from whole Indigenous Locations. They are designed for the release and analysis of more detailed statistics for Aboriginal and Torres Strait Islander people. Whole Indigenous Areas aggregate to form Indigenous Regions."@en ; + dcterms:title "Indigenous Areas within the ASGS" . + + dcterms:description """This is a reference geospatial dataset developed by the Australian Bureau of Statistics which provides the most granular form of Indigenous Structure represented in the Australian Statistical Geography Standard (ASGS), currently at Edition 3 (2021). Indigenous Locations (ILOCs) are designed to allow the production and analysis of statistics relating to Aboriginal and Torres Strait Islander people with a high level of spatial accuracy, while also maintaining the confidentiality of individuals. It has been designed in consultation with the ABS Centre for Aboriginal and Torres Strait Islander Statistics to incorporate statistical and community requirements wherever possible. + +ILOCs are geographic areas built from whole Statistical Areas Level 1 (SA1s). They are designed to represent small Aboriginal and Torres Strait Islander communities (urban and rural) that are near each other or that share language, traditional borders or Native Title. They usually have a minimum population of about 90 people. In some cases, Indigenous Locations have a smaller Aboriginal and Torres Strait Islander population to meet statistical requirements or to better represent the local community. + +Where a community is too small for confidentiality requirements, it is combined with another, related population. Remaining Statistical Areas Level 1 are combined into larger areas, which will include a more dispersed Aboriginal and Torres Strait Islander population. + +In some cases, Aboriginal and Torres Strait Islander communities that are too small to be identified separately have been combined with other nearby and associated communities. This has resulted in some multi-part Indigenous Locations where related communities are represented as a single Indigenous Location but are geographically separate. This enables the release of Census of Population and Housing data and other data for Aboriginal and Torres Strait Islander communities in a meaningful way, while balancing confidentiality and statistical requirements. + +There are 1,139 ILOCs covering the whole of Australia without gaps or overlaps. Whole ILOCs aggregate to form Indigenous Areas (IAREs). Whole Indigenous Areas aggregate to form Indigenous Regions (IREGs). + +Indigenous Locations are identified by eight-digit hierarchical codes consisting of a one-digit State or Territory identifier, followed by a two-digit Indigenous Region identifier, a three-digit Indigenous Area identifier and finally a two-digit Indigenous Location identifier. Within each Indigenous Area, Indigenous Location identifiers are unique. When change occurs, old codes are retired and the next available identifier is assigned. + +Shapefiles for Indigenous Locations and other components of the ABS's Indigenous Structure are available: https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3/jul2021-jun2026/access-and-downloads/digital-boundary-files + +This catalog entry refers to the latest ASGS release. For all releases refer to the ABS: https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3"""@en ; + dcterms:title "Indigenous Locations within the Australian Statistical Geography Standard (ASGS) Edition 3" . + + dcterms:description "Indigenous Regions (IREGs) are large geographic areas built from whole Indigenous Areas and are based on historical boundaries. The larger population of Indigenous Regions enables highly detailed analysis."@en ; + dcterms:title "Indigenous Regions within the ASGS" . + + rdfs:label "Indigenous Data Network" ; + schema:description "The IDN is within the University of Melbourne. It was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities.", + "The Indigenous Data Network (IDN) was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities."@en ; + schema:name "Indigenous Data Network" . + + dcterms:description """Aboriginal and Torres Strait Islander collections, including the Mountford-Sheard Collection INDIGENOUS COLLECTIONS +The State Library has a significant and developing amount of specialist material relating to Aboriginal and Torres Strait Islander people including the Mountford-Sheard Collection. +The papers of the Mountford-Sheard Collection which comprise an extensive collection of Charles P. Mountford's expedition journals, photographs, film, sound recordings, artworks, objects and research. The papers were compiled with the assistance and encouragement of friend and colleague Harold L Sheard. Mountford developed his appreciation of Australian Aboriginal people and their customs, beliefs and art over many years of expeditions, making it his life's work.""" ; + dcterms:title "Mountford-Sheard Collection" . + + dcterms:description "The Deebing Creek mission was founded by the Aboriginal Protection Society of Ipswich. Work started on the establishment of an Aboriginal mission at Deebing Creek around 1887. The correspondence records of the Home Secretary’s Office, Chief Protector of Aboriginals and the Southern Protector of Aboriginals Offices are a valuable source of information relating to Deebing Creek." ; + dcterms:title "Correspondence relating to Aboriginal and Torres Strait Islander people - Deebing Creek explanatory notes" . + + dcterms:description """This dataset details the Dedicated Indigenous Protected Areas (IPA) across Australia through the implementation of the Indigenous Protected Areas Programme. These boundaries are not legally binding. +An Indigenous Protected Area (IPA) is an area of Indigenous-owned land or sea where traditional Indigenous owners have entered into an agreement with the Australian Government to promote biodiversity and cultural resource conservation- making up over over half of Australia's National Reserve System. + +Further information can be found at the website below. + +https://www.awe.gov.au/agriculture-land/land/indigenous-protected-areas""" ; + dcterms:title "Indigenous Protected Areas (IPA) - Dedicated" . + diff --git a/tests/data/catprez/expected_responses/catalog_listing_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl similarity index 81% rename from tests/data/catprez/expected_responses/catalog_listing_anot.ttl rename to tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl index 23b11e2e..e741a08d 100644 --- a/tests/data/catprez/expected_responses/catalog_listing_anot.ttl +++ b/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl @@ -3,6 +3,7 @@ @prefix prez: . @prefix rdf: . @prefix rdfs: . +@prefix skos: . @prefix xsd: . dcterms:description rdfs:label "Description"@en ; @@ -17,6 +18,15 @@ rdf:type rdfs:label "type" . rdfs:label rdfs:label "label" . +skos:definition rdfs:label "definition"@en ; + skos:definition "A statement or formal explanation of the meaning of a concept."@en . + + a dcat:Catalog ; + rdfs:label "Container catalog Catalogue" ; + dcterms:description "container catalog to be used for testing" ; + dcterms:identifier "pd:container-catalog"^^prez:identifier ; + prez:link "/catalogs/pd:container-catalog" . + a dcat:Catalog ; rdfs:label "IDN Demonstration Catalogue" ; dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. @@ -26,7 +36,7 @@ The purpose of this catalogue is not to act as a master catalogue of indigenous The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; dcterms:identifier "pd:democat"^^prez:identifier ; dcterms:title "IDN Demonstration Catalogue" ; - prez:link "/c/catalogs/pd:democat" . + prez:link "/catalogs/pd:democat" . a dcat:Catalog ; dcterms:description """The Indigenous Data Network's catalogue of Agents. This catalogue contains instances of Agents - People and Organisations - related to the holding of indigenous data. This includes non-indigenous Agents @@ -34,7 +44,7 @@ The content of this catalogue conforms to the Indigenous Data Network's Catalogu This catalogue extends on standard Agent information to include properties useful to understand the indigeneity of Agents: whether they are or not, or how much they are, indigenous"""@en ; dcterms:identifier "dtst:agents"^^prez:identifier ; dcterms:title "IDN Agents Catalogue" ; - prez:link "/c/catalogs/dtst:agents" . + prez:link "/catalogs/dtst:agents" . a dcat:Catalog ; dcterms:description """The Indigenous Data Network's catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. @@ -44,14 +54,15 @@ The purpose of this catalogue is not to act as a master catalogue of indigenous The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; dcterms:identifier "dtst:democat"^^prez:identifier ; dcterms:title "IDN Datasets Catalogue" ; - prez:link "/c/catalogs/dtst:democat" . + prez:link "/catalogs/dtst:democat" . a dcat:Catalog ; dcterms:description "This is the system catalogue implemented by this instance of CatPrez that lists all its other Catalog instances"@en ; dcterms:identifier "sys:catprez"^^prez:identifier ; dcterms:title "CatPrez System Catalogue" ; - prez:link "/c/catalogs/sys:catprez" . + prez:link "/catalogs/sys:catprez" . dcat:Catalog rdfs:label "Catalog"@en ; - prez:count 4 . + skos:definition "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en ; + prez:count 5 . diff --git a/tests/data/catprez/input/AAC-SA.ttl b/tests/data/catprez/input/AAC-SA.ttl deleted file mode 100644 index f370e3b2..00000000 --- a/tests/data/catprez/input/AAC-SA.ttl +++ /dev/null @@ -1,51 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX ex: -PREFIX iso: -PREFIX prov: -PREFIX xsd: - - - dcterms:hasPart ; -. - - - dcterms:hasPart ; -. - - - a dcat:Resource ; - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:identifier "AAC-SA"^^xsd:token ; - dcterms:issued "2011-07-22"^^xsd:date ; - dcterms:license "All Rights Reserved" ; - dcterms:rights "Copyright © 2011, The Australian National University. All rights reserved." ; - dcterms:spatial - , - ; - dcterms:temporal "1921-1944" ; - dcterms:title "Annual Aboriginal Census,1921-1944 - South Australia" ; - dcterms:accessRights ; - dcat:accessURL "https://www.atsida.edu.au/archive/datasets/au.edu.anu.ada.ddi.20002-sa"^^xsd:anyURI ; - dcat:theme - , - ; - prov:qualifiedAttribution - [ - dcat:hadRole iso:originator ; - prov:agent "Gordon Briscoe, Len Smith" - ] , - [ - dcat:hadRole iso:rightsHolder ; - prov:agent - ] , - [ - dcat:hadRole iso:custodian ; - prov:agent "ATSIDA.1" - ] ; - ex:home "https://www.atsida.edu.au/" ; - ex:notes "The Annual Aboriginal Census is considered as a significant official source of Aboriginal population statistics. It was conducted annually in June from 1921 to 1944, exempting the war years between 1941 and 1944 in each State and Territory. The 1944 census was incomplete with New South Wales not taking part at all. Enumeration of Aboriginal populations was poor and difficulties in classification occurred. The Census was a collaboration of the Commonwealth Bureau of Census and Statistics who initiated the study, State and Territory Statisticians, the Protector of Aborigines, and local police officers who conducted the enumeration. The Annual Aboriginal Census is also referred to as the Annual Census of Aborigines and Police Census." ; -. diff --git a/tests/data/catprez/input/_idn-ac.ttl b/tests/data/catprez/input/_idn-ac.ttl deleted file mode 100644 index 5752fb28..00000000 --- a/tests/data/catprez/input/_idn-ac.ttl +++ /dev/null @@ -1,23 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX isoroles: -PREFIX prov: -PREFIX xsd: - - - a dcat:Catalog ; - dcterms:created "2022-08-15"^^xsd:date ; - dcterms:description """The Indigenous Data Network's catalogue of Agents. This catalogue contains instances of Agents - People and Organisations - related to the holding of indigenous data. This includes non-indigenous Agents - -This catalogue extends on standard Agent information to include properties useful to understand the indigeneity of Agents: whether they are or not, or how much they are, indigenous"""@en ; - dcterms:identifier "idnac"^^xsd:token ; - dcterms:modified "2022-08-15"^^xsd:date ; - dcterms:title "IDN Agents Catalogue" ; - prov:qualifiedAttribution [ - dcat:hadRole - isoroles:author , - isoroles:custodian , - isoroles:owner ; - prov:agent - ] ; -. diff --git a/tests/data/catprez/input/_idn-dc.ttl b/tests/data/catprez/input/_idn-dc.ttl deleted file mode 100644 index df21d2a7..00000000 --- a/tests/data/catprez/input/_idn-dc.ttl +++ /dev/null @@ -1,25 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX isoroles: -PREFIX prov: -PREFIX xsd: - - - a dcat:Catalog ; - dcterms:created "2022-07-31"^^xsd:date ; - dcterms:description """The Indigenous Data Network's catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. - -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. - -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:identifier "idndc"^^xsd:token ; - dcterms:modified "2022-08-29"^^xsd:date ; - dcterms:title "IDN Datasets Catalogue" ; - prov:qualifiedAttribution [ - dcat:hadRole - isoroles:author , - isoroles:custodian , - isoroles:owner ; - prov:agent - ] ; -. diff --git a/tests/data/catprez/input/_system-catalog.ttl b/tests/data/catprez/input/_system-catalog.ttl deleted file mode 100644 index 8c1f0ec9..00000000 --- a/tests/data/catprez/input/_system-catalog.ttl +++ /dev/null @@ -1,37 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX idnth: -PREFIX idnroles: -PREFIX isoroles: -PREFIX owl: -PREFIX prov: -PREFIX sdo: -PREFIX skos: -PREFIX xsd: - - - a dcat:Catalog ; - dcterms:identifier "catprez"^^xsd:token ; - dcterms:title "CatPrez System Catalogue" ; - dcterms:description """This is the system catalogue implemented by this instance of CatPrez that lists all its other Catalog instances"""@en ; - dcterms:created "2022-08-03"^^xsd:date ; - dcterms:modified "2022-08-29"^^xsd:date ; - prov:qualifiedAttribution [ - a prov:Attribution; - prov:agent ; - dcat:hadRole isoroles:author , isoroles:owner , isoroles:custodian ; - ] ; - dcterms:hasPart - , - ; -. - - a dcat:Resource . - - a dcat:Resource . - - - a dcat:Resource . - - a dcat:Resource . - diff --git a/tests/data/catprez/input/agents.ttl b/tests/data/catprez/input/agents.ttl deleted file mode 100644 index 5d4fc709..00000000 --- a/tests/data/catprez/input/agents.ttl +++ /dev/null @@ -1,133 +0,0 @@ -PREFIX aarr: -PREFIX dcat: -PREFIX dcterms: -PREFIX democat: -PREFIX idncp: -PREFIX prov: -PREFIX rdfs: -PREFIX sdo: -PREFIX xsd: - - - a sdo:Organization ; - sdo:name "Indigenous Data Network" ; - rdfs:label "Indigenous Data Network" ; - sdo:description "The IDN is within the University of Melbourne. It was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities." ; - sdo:url "https://mspgh.unimelb.edu.au/centres-institutes/centre-for-health-equity/research-group/indigenous-data-network"^^xsd:anyURI ; - dcterms:type ; -. - - - a sdo:Person ; - dcterms:type sdo:Person ; - sdo:name "Nicholas J. Car"@en ; - rdfs:label "Nicholas J. Car"@en ; - sdo:email "nick@kurrawong.net"^^xsd:anyURI ; - dcat:relation [ - dcat:hadRole aarr:affiliateOf ; - prov:agent ; - ] ; - dcterms:type ; -. - - - a sdo:Person ; - sdo:name "Sandra Silcot"@en ; - rdfs:label "Sandra Silcot"@en ; - sdo:email "ssilcot@gmail.com"^^xsd:anyURI ; - dcat:relation [ - dcat:hadRole aarr:affiliateOf ; - prov:agent ; - ] ; -. - - - a sdo:Organization ; - sdo:name "KurrawongAI" ; - rdfs:label "KurrawongAI" ; - sdo:description "Kurrawong AI is a small, Artificial Intelligence, company in Australia specialising in Knowledge Graphs." ; - sdo:url "https://kurrawong.net"^^xsd:anyURI ; - sdo:identifier "31 353 542 036"^^idncp:abnId ; - dcterms:type ; -. - - - a sdo:Organization ; - sdo:name "Australian Federal Government" ; - rdfs:label "Australian Federal Government" ; - sdo:url "https://www.australia.gov.au"^^xsd:anyURI ; - dcterms:type ; -. - - - a sdo:Organization ; - sdo:name "Australian Bureau of Statistics" ; - rdfs:label "Australian Bureau of Statistics" ; - sdo:url "https://www.abs.gov.au"^^xsd:anyURI ; - dcterms:type ; -. - - - a sdo:Organization ; - dcat:relation [ - dcat:hadRole aarr:precursorOrganisation ; - prov:agent ; - ] ; - sdo:name "Services Australia" ; - rdfs:label "Services Australia" ; - sdo:url "https://www.servicesaustralia.gov.au"^^xsd:anyURI ; - dcterms:type ; -. - - - a sdo:Organization ; - sdo:identifier - "O-000880"^^idncp:agorId , - "CA-7853"^^idncp:crsId ; - dcat:relation [ - dcat:hadRole aarr:descendantOrganisation ; - prov:agent ; - ] ; - sdo:name "Department of Human Services" ; - rdfs:label "Department of Human Services" ; - sdo:url "https://www.humanservices.gov.au"^^xsd:anyURI ; - dcterms:type ; -. - - - a sdo:Organization ; - sdo:name "Australian Government Indigenous Locations Working Group 2007-2012" ; - rdfs:label "Australian Government Indigenous Locations Working Group 2007-2012" ; - dcat:relation [ - dcat:hadRole aarr:partOf ; - prov:agent ; # TODO: Find a better parent - ] ; - dcterms:type ; -. - - - a sdo:Organization ; - sdo:name "Australian Bureau of Statistics Centre of Aboriginal and Torres Strait Islander Statistics" ; - rdfs:label "Australian Bureau of Statistics Centre of Aboriginal and Torres Strait Islander Statistics" ; - dcat:relation [ - dcat:hadRole aarr:partOf ; - prov:agent ; - ] ; - dcterms:type ; - sdo:url "https://www.abs.gov.au/about/aboriginal-and-torres-strait-islander-peoples/aboriginal-and-torres-strait-islander-engagement"^^xsd:anyURI ; -. - - - a sdo:Organization ; - sdo:name "University of Technology Sydney" ; - rdfs:label "University of Technology Sydney" ; - sdo:url "https://www.uts.edu.au"^^xsd:anyURI ; - dcterms:type ; -. - - - a sdo:Organisation ; - sdo:name "National Library of Australia" ; - rdfs:label "National Library of Australia" ; - sdo:url "https://www.nla.gov.au/"^^xsd:anyURI ; -. diff --git a/tests/data/catprez/input/catalog.ttl b/tests/data/catprez/input/catalog.ttl new file mode 100644 index 00000000..0d0d01bf --- /dev/null +++ b/tests/data/catprez/input/catalog.ttl @@ -0,0 +1,20 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX ex: +PREFIX rdfs: + +ex:TopLevelCatalog a dcat:Catalog ; + rdfs:label "Top level catalog" ; + dcterms:hasPart ex:LowerLevelCatalog ; + ex:property "top level catalog property" ; + . + +ex:LowerLevelCatalog a dcat:Catalog ; + rdfs:label "Lower level catalog" ; + dcterms:hasPart ex:Resource ; + ex:property "lower level catalog property" . + +ex:Resource a dcat:Resource ; + rdfs:label "Resource" ; + ex:property "resource property" ; +. \ No newline at end of file diff --git a/tests/data/catprez/input/labels.ttl b/tests/data/catprez/input/labels.ttl deleted file mode 100644 index 51f3e6c3..00000000 --- a/tests/data/catprez/input/labels.ttl +++ /dev/null @@ -1,13 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX geofab: -PREFIX rdfs: -PREFIX sand: -PREFIX xsd: - - -dcterms:identifier rdfs:label "Identifier"@en ; - rdfs:comment "A unique identifier of the item." . - -dcat:Dataset rdfs:label "Dataset"@en . diff --git a/tests/data/catprez/input/pd_democat.ttl b/tests/data/catprez/input/pd_democat.ttl deleted file mode 100644 index 34c95b82..00000000 --- a/tests/data/catprez/input/pd_democat.ttl +++ /dev/null @@ -1,716 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix prov: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - -dcterms:created rdfs:label "Date Created"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:creator rdfs:label "Creator"@en ; - dcterms:description "Recommended practice is to identify the creator with a URI. If this is not possible or feasible, a literal value that identifies the creator may be provided."@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:hasPart rdfs:label "Has Part"@en ; - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Part Of."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:issued rdfs:label "Date Issued"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:modified rdfs:label "Date Modified"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -dcterms:publisher rdfs:label "Publisher"@en . - -dcterms:title rdfs:label "Title"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - -dcat:hadRole rdfs:label "hadRole"@en . - -prov:agent rdfs:label "agent" . - -prov:qualifiedAttribution rdfs:label "qualified attribution" . - - a dcat:Catalog ; - rdfs:label "IDN Demonstration Catalogue" ; - dcterms:created "2022-07-31"^^xsd:date ; - dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. - -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. - -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:hasPart , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - ; - dcterms:identifier "democat"^^xsd:token, - "pd:democat"^^prez:identifier ; - dcterms:modified "2022-08-29"^^xsd:date ; - dcterms:title "IDN Demonstration Catalogue" ; - prov:qualifiedAttribution [ dcat:hadRole , - , - ; - prov:agent ] ; - . - - rdfs:label "IDN Role Codes"@en ; - dcterms:identifier "vcb:idn-role-codes"^^prez:identifier ; - dcterms:provenance "Derived from the ISO 19115's CI Role Code vocabulary"@en ; - skos:definition "The Indigenous Data Network's vocabulary of the types of roles Agents - People and Organisations - play in relation to data."@en ; - skos:prefLabel "IDN Role Codes"@en . - - rdfs:label "ISO CI Role Code codes"@en ; - dcterms:identifier "dn-rl-cds:iso-roles"^^prez:identifier ; - dcterms:provenance "The list of Concepts from the original ISO CI Role Code vocabulary"@en ; - skos:definition "Codes from the original ISO CI Role Code codelist"@en ; - skos:prefLabel "ISO CI Role Code codes"@en . - -schema:name rdfs:label "name" . - - rdfs:label "author"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - ns1:status ; - skos:definition "party who authored the resource" ; - skos:prefLabel "author"@en ; - . - - rdfs:label "custodian"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - ns1:status ; - skos:definition "party that accepts accountability and responsibility for the resource and ensures appropriate care and maintenance of the resource" ; - skos:prefLabel "custodian"@en ; - . - - rdfs:label "owner"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - ns1:status ; - skos:definition "party that owns the resource" ; - skos:prefLabel "owner"@en ; - . - - - a dcat:Resource ; - dcterms:creator ; - dcterms:description """Needs to be integrated with KHRD. Negotiation required with State Library. - -Comprises Barwick's publications and conference papers; Barwick's PhD.; work with the Australian Institute of Aboriginal Studies and the Aboriginal History journal; work on major research projects; incoming and outgoing correspondence; reference material, and collected genealogies of Aboriginal Victorian families.""" ; - dcterms:issued "2007-01-10"^^xsd:date ; - dcterms:publisher ; - dcterms:title "The Diane Barwick Archive" ; -. - -dcat:Catalog rdfs:label "Catalog"@en . - - dcterms:creator ; - dcterms:description """This dataset has been developed by the Australian Government as an authoritative source of indigenous location names across Australia. It is sponsored by the Spatial Policy Branch within the Department of Communications and managed solely by the Department of Human Services. -The dataset is designed to support the accurate positioning, consistent reporting, and effective delivery of Australian Government programs and services to indigenous locations. -The dataset contains Preferred and Alternate names for indigenous locations where Australian Government programs and services have been, are being, or may be provided. The Preferred name will always default to a State or Territory jurisdiction's gazetted name so the term 'preferred' does not infer that this is the locally known name for the location. Similarly, locational details are aligned, where possible, with those published in State and Territory registers. -This dataset is NOT a complete listing of all locations at which indigenous people reside. Town and city names are not included in the dataset. The dataset contains names that represent indigenous communities, outstations, defined indigenous areas within a town or city or locations where services have been provided.""" ; - dcterms:issued "2013-12-02"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Australian Government Indigenous Programs & Policy Locations (AGIL) dataset" ; - . - - dcterms:creator ; - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:issued "2011-07-22"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Annual Aboriginal Census,1921-1944 - Australia" ; - . - - a dcat:Resource ; - dcterms:creator ; - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:issued "2011-07-22"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Annual Aboriginal Census,1921-1944 - South Australia" ; - . - - dcterms:description "Existing database at ANU" ; - dcterms:publisher ; - dcterms:title "The Australian Dictionary of Biography" ; -. - - dcterms:description "A database of Agents - Organisations & People - with roles relating to indigenous data" ; - dcterms:publisher ; - dcterms:title "Indigenous Data Network's Agents DB" ; -. - - dcterms:description "An Indigenous geography and gazetteer, including a Loc-I framework for tribal, language and community data. Requires developmental work in collaboration with Universities, ABS, AIHW, Geoscience Australia, AURIN etc etc." ; - dcterms:publisher , - , - ; - dcterms:title "Indigenous Gazetteer" ; - . - - dcterms:creator ; - dcterms:description "The Australian National University is home to many research collections of national and international significance. Material from the ANU Archives, ANU Classics Museum, ANU Library, Asia Pacific Map Collection and the Noel Butlin Archives Centre are being progressivley digitised and made available through this repository." ; - dcterms:publisher ; - dcterms:title "ANU Archive and Library Collections - \"Indigenous\" Search" ; - . - - dcterms:description "A 2020 review of First Nations Identified physical collections held by the ANU. Not published." ; - dcterms:publisher ; - dcterms:title "2020 ANU First Nations Collections Review" ; - . - - dcterms:description "The University's Open Research digital repository ecompasses a number of research collections which the wider community is free to browse." ; - dcterms:title "ANU Open Research Collections" ; - . - - dcterms:creator ; - dcterms:description """The Australian National University, through its Open Research repository collects, maintains, preserves, promotes and disseminates its open access scholarly materials. - -Open Research holds a variety of scholarly publications including journal articles; books and book chapters; conference papers, posters and presentations; theses; creative works; photographs and much more in a number of collections and formats. The wider community is free to browse this material and all members of the ANU community (past and present) are encouraged to contribute their research.""" ; - dcterms:issued "2016-05-19"^^xsd:date ; - dcterms:publisher ; - dcterms:title "ANU Open Research Library - \"Indigenous\" Search (Thesis Library)" ; -. - - dcterms:description "Publications, Ethics, Grants" ; - dcterms:publisher ; - dcterms:title "ANU Research Information Enterprise System" ; - . - - dcterms:creator ; - dcterms:description """Needs to be made fully maintainable, sustainable interoperable and web-accessible - -ATNS provides an online portal for people seeking information on agreements with Indigenous peoples. We aim to promote knowledge and transparency by capturing the range and variety of agreement making occurring in Australia and other parts of the world. - -We gather and review information from publicly available academic sources, online materials and documents provided by the organisations and agencies involved in agreement-making processes. No confidential material is published. """ ; - dcterms:issued "1905-07-11"^^xsd:date ; - dcterms:publisher ; - dcterms:title "The Agreements, Treaties and Negotiated Settlements Database" ; - . - - dcterms:creator ; - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the Aboriginal Community level. -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:issued "2005-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: Aboriginal Community, ACT" ; - . - - dcterms:creator ; - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Region level. - -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:issued "2007-03-16"^^xsd:date ; - dcterms:publisher ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Regions" ; -. - - dcterms:creator ; - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Zone level. -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:issued "2005-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Zones" ; -. - - dcterms:creator ; - dcterms:description "ATSIDA is a specialised trusted research data management facility, and thematic archive within the Australian Data Archive for Australian Aboriginal and Torres Strait Islander research data managed by the UTS Library. ATSIDA provides a transformational research platform working at the nexus of researchers, communities and other stakeholders in preserving and ensuring ethical access to research data related to Indigenous communities. ATSIDA works with universities, government and other organisations to increase Indigenous student and staff research capacity, support Indigenous researchers and those working with Indigenous research data. It engages with communities to manage appropriate access and return of digital materials.", - "The Aboriginal and Torres Strait Islander Data Archive at the Australian Data Archive and ANU Archives. This was specifically mentioned in the NCRIS Roadmap as an existing strength to be built on. It needs staff at the Data Archive to fully curate and digitise these collections and make them web-accessible." ; - dcterms:issued "2008-01-01"^^xsd:date ; - dcterms:publisher , - ; - dcterms:title "ABORIGINAL & TORRES STRAIT ISLANDER DATA ARCHIVE", - "The Aboriginal and Torres Strait Islander Data Archive at ADA, ANU" ; -. - - dcterms:description "This looks like a mirror of the ADA archive. Many links are broken." ; - dcterms:publisher ; - dcterms:title "The Aboriginal and Torres Strait Islander Data Archive at Jumbunna, UTS" ; -. - - dcterms:creator ; - dcterms:description """Austlang provides information about Indigenous Australian languages which has been assembled from referenced sources. -The dataset provided here includes the language names, each with a unique alpha-numeric code which functions as a stable identifier, alternative/variant names and spellings and the approximate location of each language variety.""" ; - dcterms:publisher ; - dcterms:title "Austlang database." ; -. - - dcterms:creator ; - dcterms:description """The Indigenous Protected Areas (IPA) programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community. - -The Birriliburu & Matuwa Kurrara Kurrara (MKK) IPAs have provided an opportunity for Martu people to reconnect with and actively manage their traditional country. - -The two IPAs have proved a useful tool with which to leverage third party investment, through a joint management arrangement with the Western Australia (WA) Government, project specific funding from environmental NGOs and mutually beneficial partnerships with the private sector. - -Increased and diversified investment from a range of funding sources would meet the high demand for Ranger jobs and could deliver a more expansive programme of works, which would, in turn, increase the social, economic and cultural outcomes for Martu Rangers and Community Members.""" ; - dcterms:issued "0601-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "SRI Investment Analysis of the Birriliburu and Matuwa Kurrara Kurrara Indigenous Protected Areas (2016)" ; -. - - dcterms:description "Historical population data and biographical records" ; - dcterms:publisher ; - dcterms:title "Briscoe-Smith Archive" ; -. - - dcterms:creator ; - dcterms:description """The Composite Gazetteer of Australia is a cloud-based system allowing users to easily discover, interrogate and download place names information from Australia and its external territories. It is developed as a partnership between contributing agencies of the Intergovernmental Committee on Surveying and Mapping (ICSM) and is built on modern infrastructure providing automated ingestion and validation, producing a composite dataset from the individual jurisdictional gazetteers. - -The place names database is a collection of jurisdictional data that is combined to create the Composite Gazetteer of Australia. Place name information is managed at a local level by jurisdictions. The place name database and the Composite Gazetteer of Australia are maintained by ICSM.""" ; - dcterms:issued "2018-01-02"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Compound Gazetteer of Australia" ; -. - - dcterms:creator ; - dcterms:description "The Cultural Heritage Parties dataset is the spatial representation of state-wide Aboriginal and Torres Strait Islander Native Title Party boundaries within Queensland as described under the Aboriginal Cultural Heritage Act 2003 and the Torres Strait Islander Cultural Heritage Act 2003 (the Acts)." ; - dcterms:issued "2022-08-08"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Cultural Heritage Party boundaries - Queensland" ; -. - - dcterms:description "Productivity Commissions data dashboard arising from the National Agreement on Closing the Gap." ; - dcterms:issued "2022-03-31"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Closing the gap information repository" ; -. - - dcterms:creator ; - dcterms:description "Norman B. Tindale ; tribal boundaries drawn by Winifred Mumford on a base map produced by the Division of National Mapping, Department of National Development, Canberra, Australia." ; - dcterms:issued "1974-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Distribution of the Aboriginal Tribes of Australia (1940)" ; -. - - dcterms:description "UTS has taken over this data, but needs help to turn it into an ongoing public database" ; - dcterms:publisher , - ; - dcterms:title "Aboriginal Deaths and Injuries in Custody" ; -. - - dcterms:description "Barry Hansen and Yothu Yindi Foundation have done extensive work on where the money goes in the NT. Needs to be a national database." ; - dcterms:publisher ; - dcterms:title "Expenditure on Indigenous Advancement" ; -. - - dcterms:description "(Torrens University). An earlier application with Marcia for AIATSIS funding was never considered." ; - dcterms:publisher ; - dcterms:title "GDP and Genuine Progress Indicator" ; -. - - dcterms:creator ; - dcterms:description "The Snapshot is an ongoing research project that links enterprises on Indigenous business registries to data held by the Australian Bureau of Statistics. It will enable us to track the industries, revenue, employment outcome and growth of Indigenous businesses. This report provides an unprecedented snapshot of the Indigenous business sector to help dismantle the many stereotypes and myths that have led to lost opportunities for Indigenous business growth. There is mention of an I-BLADE dataset." ; - dcterms:issued "2021-05-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Business Sector Snapshot 1.1 Indigenous Businesses Sector Snapshot Study, Insights from I-BLADE 1.0" ; -. - - dcterms:creator ; - dcterms:description "Land that is owned or managed by Australia’s Indigenous communities, or over which Indigenous people have use and rights, was compiled from information supplied by Australian, state and territory governments and other statutory authorities with Indigenous land and sea management interests." ; - dcterms:issued "2019-04-03"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Land and Sea Interests " ; -. - - dcterms:creator ; - dcterms:description "Registered & Notified Indigenous Land Use Agreements – (as per s. 24BH(1)(a), s. 24CH and s. 24DI(1)(a)) across Australia, The Central Resource for Sharing and Enabling Environmental Data in NSW" ; - dcterms:issued "2013-12-05"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Land Use Agreement Boundaries with basic metadata and status" ; -. - - dcterms:description "Printed catalog highlighting ANU Indigenous Research activities at the time of publication" ; - dcterms:publisher ; - dcterms:title "Indigenous Research Compendium 2018" ; -. - - dcterms:description """Various projects from $10 million Indigenous Research Fund administered by AIATSIS. -A number of projects are described p13-15 here. -One might expect a number of these would give rise to relevant data collections and information on methods. -Each of these projects should be catalogued? Or not?""" ; - dcterms:publisher ; - dcterms:title "Indigenous Research Exchange/Knowledge Exchange Platform" ; -. - - dcterms:creator ; - dcterms:description """Sandra Silcot has identified the steps required to make this fully maintainable and sustainable. -Koori Health Research Database (Janet McCalman) traces BDM of 7,800 Aboriginals in Victoria & New South Wales Australia from 19th Century to the present. It is built from Yggdrasil, an existing open-source web database application designed for large population studies of family history https://rdxx.org/notes.sandra/khrd/slides/khrd-apa2012-talk.pdf.html""" ; - dcterms:publisher ; - dcterms:title "The Koori Health Research Database" ; -. - - dcterms:creator ; - dcterms:description """The Mayi Kuwayu Study looks at how Aboriginal and Torres Strait Islander wellbeing is linked to things like connection to country, cultural practices, spirituality and language use. -Our research team follows a large number of Aboriginal and Torres Strait Islander people and asks about their culture and wellbeing. As a longitudinal study, we are surveying people and then ask them to take the same survey every few years, so that we can understand what influences changes over time. -This is the first time a national study of this type has been done and will provide an evidence base to allow for the creation of better policies and programs. -This study has been created by and for Aboriginal and Torres Strait Islander people. It is an Aboriginal and Torres Strait Islander controlled research resource. -The Mayi Kuwayu team are experienced at working closely with communities across Australia, and the study has majority Aboriginal and Torres Strait Islander staffing and study governance (decision making) structure.""" ; - dcterms:issued "2018-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "The National Study of Aboriginal and Torres Strait Islander Wellbeing" ; -. - - dcterms:description "These are extensive paper records which Ian Anderson has proposed incorporating in a database. Negotiation is still needed." ; - dcterms:publisher ; - dcterms:title "Tasmanian Aboriginal genealogies" ; -. - - dcterms:creator ; - dcterms:description "The Historical Census and Colonial Data Archive (HCCDA) is an archive of Australian colonial census publications and reports covering the period from 1833 to 1901, the year of Australia's federation. The corpus includes 18,638 pages of text, and approximately 15000 tables, all with full digital images, text conversion and individually identified pages and tables. Please note that the archive contains colonial census reports, but not individual census returns." ; - dcterms:issued "1833-07-09"^^xsd:date ; - dcterms:publisher ; - dcterms:title "The Historical Census and Colonial Data Archive" ; -. - - dcterms:creator ; - dcterms:description "Noongar Boodjar Language Centre (NBLC) in Perth have partnered with the Atlas of Living Australia to link Noongar-Wudjari language and knowledge for plants and animals to western science knowledge to create the Noongar-Wudjari Plant and Animal online Encyclopedia. This project focused on the Noongar-Wudjari clan, from the South coast of WA, and worked specifically with Wudjari knowledge holders - Lynette Knapp and Gail Yorkshire to record, preserve and share their ancestral language and knowledge about plants and animals. Knowledge and language for 90 plants and animals were collected and are now ready for publication through the Atlas of Living Australia (ala.org.au)." ; - dcterms:publisher ; - dcterms:title "Noongar Boodjar Plants and Animals" ; -. - - dcterms:creator ; - dcterms:description """We are making a national resource for Indigenous health and heritage, which is based on our collection of biological samples, genome data and documents from Indigenous communities in many parts of Australia. You can find out more about NCIG and its collections at ncig.anu.edu.au. - -Information in these collections tells two kinds of stories. - -We are working with Indigenous communities to decide how to tell the stories of the people who are represented in the collection. We do not make personal information available, but the website lets you know what collections we have and how to contact us if you want to know more. - -There is also the story about how the collection was made and how it can be useful to researchers and other people. - -This website helps to tell this second story by making some records and documents from the collection openly available. There is information about the people who collected the samples and made the records, why they carried out their studies, the places they visited and some of the results of their studies.""" ; - dcterms:issued "2015-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "National Centre for Indigenous Genomics data" ; -. - - dcterms:creator ; - dcterms:description "NSW prison population data and quarterly custody reports" ; - dcterms:issued "2022-08-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "NSW Custody Statistics" ; -. - - dcterms:description "Existing database at the National Library" ; - dcterms:publisher ; - dcterms:title "People Australia" ; -. - - dcterms:description "Databases held by the NNTT" ; - dcterms:publisher ; - dcterms:title "Native Title Databases at the National Native Title Tribunal" ; -. - - dcterms:description "This comprises records of about 70,000 Indigenous and 30,000 non-Indigenous people surveyed in the 1970s and 1980s. Some paper records are held at AIATSIS. Microfilms of others are at UNSW Archives. There have been preliminary discussions with AIATSIS, the National Library and former members of the Hollows team about a program to digitise the records. IDN staff/resources would be needed." ; - dcterms:publisher , - ; - dcterms:title "The Fred Hollows Archive (National Trachoma and Eye Health Program)" ; -. - - dcterms:creator ; - dcterms:description """Conference powerpoint presentation - -Case study in exemplary IDG. -- Survey of native title prescribed bodies corporate (PBCs) -- Collect data on PBCs’ capacity, capabilities, needs and aspirations to better inform policies that affect PBCs -- Started data collection May 2019, to finish in 3rd quarter 2019""" ; - dcterms:issued "2019-07-03"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Prescribed bodies corporate (PBCs) Survey 2019" ; -. - - dcterms:publisher ; - dcterms:title "AG Productivity Commission - Report on Government Services: Indigenous Compendium reports 2005-2015" ; -. - - dcterms:description "This dataset is of police offences by Aboriginals in Western Australia" ; - dcterms:publisher ; - dcterms:title "Police Offenses WA (Erin Mathews)" ; -. - - dcterms:creator ; - dcterms:description """Aboriginal and Torres Strait Islander people are the Indigenous people of Australia. They are not one group, but comprise hundreds of groups that have their own distinct set of languages, histories and cultural traditions. - -AIHW reports and other products include information about Indigenous Australians, where data quality permits. Thus, information and statistics about Indigenous Australians can be found in most AIHW products. - -In December 2021, AIHW released the Regional Insights for Indigenous Communities (RIFIC). The aim of this website is to provide access to data at a regional level, to help communities set their priorities and participate in joint planning with government and service providers. - -AIHW products that focus specifically on Indigenous Australians are captured on this page.""" ; - dcterms:issued "1101-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Regional Insights for Indigenous Communities" ; -. - - dcterms:creator ; - dcterms:description """Data workbooks presenting the latest Social Health Atlases of Australia are available for the whole of Australia by Population Health Area, Local Government Area, and Primary Health Network, and by Indigenous Area for the Aboriginal & Torres Strait Islander population. Data are also available by Quintile of Socioeconomic Disadvantage of Area (current period and time series), and Remoteness Area (current period and time series), for both the whole population, and the Aboriginal & Torres Strait Islander population (current period only). - -These workbooks are derived from ABS Census data releases.""" ; - dcterms:issued "2022-06"^^xsd:gYearMonth ; - dcterms:publisher ; - dcterms:title "Social Health Atlases of Australia" ; -. - - dcterms:creator ; - dcterms:description "Summarises all available aerial survey data and metadata used to characterise the long-term distribution and abundance of magpie geese in the Northern Territory undertaken by different institutions and publically available in several journals (Appendix A). Summarised also are results from a PhD study (E. Ligtermoet) documenting the cultural harvesting values of magpie geese ascertained by interviews with Kakadu Traditional Owners (2011-2015)." ; - dcterms:issued "2016-12-15"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Supplementary Material used to characterise the spatial and temporal dynamics of magpie goose populations in the Kakadu Region NT and their cultural harvesting values" ; -. - - dcterms:creator ; - dcterms:description "The Minyumai Indigenous Protected Areas (IPA) has provided an opportunity for the Bandjalang clan to re-engage with culture and language through country. Through land and fire management work, Bandjalang traditional owners have seen the restoration of native plants and animals that were thought to have been lost. Their return serves as a powerful reminder of the resilience of the Bandjalang people and enables them to better understand themselves, their culture, and their place in the world. The IPA programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community." ; - dcterms:issued "0601-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Social Return on Investment analysis of the Minyumai Indigenous Protected Area" ; -. - - dcterms:description "Access still to be negotiated with the Museum." ; - dcterms:publisher ; - dcterms:title "The Sandra Smith Archive" ; -. - - dcterms:description "Strong demand but controversial." ; - dcterms:publisher ; - dcterms:title "Tindale/Horton map" ; -. - - dcterms:description """TLCMap is a set of tools that work together for mapping Australian history and culture. - -Note that historical placenames in TLCmap is a HASS-I integration activity.""" ; - dcterms:publisher ; - dcterms:title "Time Layered Cultural Map of Australia" ; -. - - dcterms:creator ; - dcterms:description """The Victorian Perinatal Data Collection (VPDC) is a population-based surveillance system that collects for analysis comprehensive information on the health of mothers and babies, in order to contribute to improvements in their health. - -The VPDC contains information on obstetric conditions, procedures and outcomes, neonatal morbidity and congenital anomalies relating to every birth in Victoria. - -This data is reported annually to the AIHW as part of the National Perinatal Data Collection managed by the AIHW. The AIHW produces the annual report Australia’s mothers and babies, using the National Perinatal Data Collection and other data.""" ; - dcterms:issued "2022-01-07"^^xsd:date ; - dcterms:publisher ; - dcterms:title "The Victorian Perinatal database" ; -. - - dcterms:description """This was nominated by Sandra Eades. Investigation, documentation and negotiation needed. - -https://www.datalinkage-wa.org.au/dlb-services/derived-indigenous-status-flag/ ?""" ; - dcterms:title "Western Australia Linked Data" ; -. - - dcterms:creator ; - dcterms:description "In 2012, the remote Aboriginal community of Wilcannia in western NSW hosted the first Australian pilot of a Cuban mass adult literacy campaign model known as Yes I Can. The aim was to investigate the appropriateness of this model in Aboriginal Australia. Building on an intensive community development process of ‘socialisation and mobilisation’, sixteen community members with very low literacy graduated from the basic literacy course, with the majority continuing on into post-literacy activities, further training and/or employment." ; - dcterms:issued "2013-06-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Aboriginal adult literacy campaign - Wilcannia Pilot Project Final Evaluation Report" ; -. - - dcterms:creator ; - dcterms:description """The Yawuru Knowing Our Community (YKC) Household Survey was commissioned by the Nyamba Buru Yawuru Board of Directors in December 2010. This report and associated data base are the property of the NBY Board. The report was designed and produced by The Kimberley Institute, Centre for Aboriginal Economic Policy Research at The Australian National University, and the Broome Aboriginal community. -In September 2010, the NBY Board resolved to undertake a comprehensive population survey of Broome to inform the Board’s investment strategy, particularly on social housing.""" ; - dcterms:issued "2011-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Yawuru Knowing Our Community Household Survey" ; -. - - dcterms:creator ; - dcterms:description """Yumi Sabe is an Australian Kriol term that translates to 'we know', or, 'we have the knowledge'. - -Yumi Sabe is an Indigenous Knowledge Exchange that helps Indigenous communities, researchers and policy makers to access and use data to inform and improve policies and programs and demonstrate the complexity and diversity of Aboriginal and Torres Strait Islander peoples', research and culture. - -This is a beta product that is still being refined and developed. Please contact us if you have any issues or feedback.""" ; - dcterms:issued "2022-07-04"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Research Exchange Platform" ; -. - - dcterms:creator ; - dcterms:description "The Australia's Indigenous land and forest estate (2020) is a continental spatial dataset that identifies and reports separately the individual attributes of Australia's Indigenous estate, namely the extent of land and forest over which Indigenous peoples and communities have ownership, management and co-management, or other special rights." ; - dcterms:issued "0301-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Australia's Indigenous land and forest estate (2020)" ; -. - - rdfs:label "Agreements Treaties and Negotiated Settlements" . - - rdfs:label "ATSIDA" ; - schema:name "ATSIDA.1" . - - rdfs:label "Indigenous Studies Unit" . - - dcterms:creator ; - dcterms:description """Tandana is owned and managed by the National Aboriginal Cultural Institute Inc. It is Australia’s oldest Aboriginal-owned and managed multi-arts centre. -As Tandana is government funded it reports annually on the funding supplied and its distribution.""" ; - dcterms:issued "2018-01-10"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Tandanya Annual Reporting Regulatory Data" ; -. - - dcterms:creator ; - dcterms:description "Indigenous Areas (IAREs) are medium sized geographic areas built from whole Indigenous Locations. They are designed for the release and analysis of more detailed statistics for Aboriginal and Torres Strait Islander people. Whole Indigenous Areas aggregate to form Indigenous Regions."@en ; - dcterms:issued "2021-10-06"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Areas within the ASGS" ; -. - - dcterms:creator ; - dcterms:description """This is a reference geospatial dataset developed by the Australian Bureau of Statistics which provides the most granular form of Indigenous Structure represented in the Australian Statistical Geography Standard (ASGS), currently at Edition 3 (2021). Indigenous Locations (ILOCs) are designed to allow the production and analysis of statistics relating to Aboriginal and Torres Strait Islander people with a high level of spatial accuracy, while also maintaining the confidentiality of individuals. It has been designed in consultation with the ABS Centre for Aboriginal and Torres Strait Islander Statistics to incorporate statistical and community requirements wherever possible. - -ILOCs are geographic areas built from whole Statistical Areas Level 1 (SA1s). They are designed to represent small Aboriginal and Torres Strait Islander communities (urban and rural) that are near each other or that share language, traditional borders or Native Title. They usually have a minimum population of about 90 people. In some cases, Indigenous Locations have a smaller Aboriginal and Torres Strait Islander population to meet statistical requirements or to better represent the local community. - -Where a community is too small for confidentiality requirements, it is combined with another, related population. Remaining Statistical Areas Level 1 are combined into larger areas, which will include a more dispersed Aboriginal and Torres Strait Islander population. - -In some cases, Aboriginal and Torres Strait Islander communities that are too small to be identified separately have been combined with other nearby and associated communities. This has resulted in some multi-part Indigenous Locations where related communities are represented as a single Indigenous Location but are geographically separate. This enables the release of Census of Population and Housing data and other data for Aboriginal and Torres Strait Islander communities in a meaningful way, while balancing confidentiality and statistical requirements. - -There are 1,139 ILOCs covering the whole of Australia without gaps or overlaps. Whole ILOCs aggregate to form Indigenous Areas (IAREs). Whole Indigenous Areas aggregate to form Indigenous Regions (IREGs). - -Indigenous Locations are identified by eight-digit hierarchical codes consisting of a one-digit State or Territory identifier, followed by a two-digit Indigenous Region identifier, a three-digit Indigenous Area identifier and finally a two-digit Indigenous Location identifier. Within each Indigenous Area, Indigenous Location identifiers are unique. When change occurs, old codes are retired and the next available identifier is assigned. - -Shapefiles for Indigenous Locations and other components of the ABS's Indigenous Structure are available: https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3/jul2021-jun2026/access-and-downloads/digital-boundary-files - -This catalog entry refers to the latest ASGS release. For all releases refer to the ABS: https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3"""@en ; - dcterms:issued "2021-10-06"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Locations within the Australian Statistical Geography Standard (ASGS) Edition 3" ; -. - - dcterms:creator ; - dcterms:description "Indigenous Regions (IREGs) are large geographic areas built from whole Indigenous Areas and are based on historical boundaries. The larger population of Indigenous Regions enables highly detailed analysis."@en ; - dcterms:issued "2021-10-06"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Regions within the ASGS" ; -. - - schema:name "Services Australia" . - - rdfs:label "University of Melbourne" ; - schema:description "The University of Melbourne is a public research university located in Melbourne, Australia. Founded in 1853, it is Australia's second oldest university and the oldest in Victoria." ; - schema:name "The University of Melbourne" . - - schema:name "Marcia Langton" . - - dcterms:description """Aboriginal and Torres Strait Islander collections, including the Mountford-Sheard Collection INDIGENOUS COLLECTIONS -The State Library has a significant and developing amount of specialist material relating to Aboriginal and Torres Strait Islander people including the Mountford-Sheard Collection. -The papers of the Mountford-Sheard Collection which comprise an extensive collection of Charles P. Mountford's expedition journals, photographs, film, sound recordings, artworks, objects and research. The papers were compiled with the assistance and encouragement of friend and colleague Harold L Sheard. Mountford developed his appreciation of Australian Aboriginal people and their customs, beliefs and art over many years of expeditions, making it his life's work.""" ; - dcterms:title "Mountford-Sheard Collection" ; -. - - dcterms:creator ; - dcterms:description "The Deebing Creek mission was founded by the Aboriginal Protection Society of Ipswich. Work started on the establishment of an Aboriginal mission at Deebing Creek around 1887. The correspondence records of the Home Secretary’s Office, Chief Protector of Aboriginals and the Southern Protector of Aboriginals Offices are a valuable source of information relating to Deebing Creek." ; - dcterms:issued "2501-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Correspondence relating to Aboriginal and Torres Strait Islander people - Deebing Creek explanatory notes" ; -. - - dcterms:creator ; - dcterms:description """This dataset details the Dedicated Indigenous Protected Areas (IPA) across Australia through the implementation of the Indigenous Protected Areas Programme. These boundaries are not legally binding. -An Indigenous Protected Area (IPA) is an area of Indigenous-owned land or sea where traditional Indigenous owners have entered into an agreement with the Australian Government to promote biodiversity and cultural resource conservation- making up over over half of Australia's National Reserve System. - -Further information can be found at the website below. - -https://www.awe.gov.au/agriculture-land/land/indigenous-protected-areas""" ; - dcterms:issued "2201-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Protected Areas (IPA) - Dedicated" ; -. - - schema:name "Australian Government" . - - rdfs:label "Indigenous Data Network" ; - schema:description "The IDN is within the University of Melbourne. It was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities.", - "The Indigenous Data Network (IDN) was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities."@en ; - schema:name "Indigenous Data Network" . - - schema:name "Australian Bureau of Statistics" . - - rdfs:label "AIATSIS" . - - rdfs:label "Australian National University" ; - schema:description "ANU is a world-leading university in Australia’s capital. Excellence is embedded in our approach to research and education." ; - schema:name "Australian National University" . - diff --git a/tests/data/cql/input/example06a.json b/tests/data/cql/input/example06a.json deleted file mode 100644 index 8adeb429..00000000 --- a/tests/data/cql/input/example06a.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "between", - "args": [ - { "property": "eo:cloud_cover" }, - 0.1, 0.2 - ] - }, - { - "op": "=", - "args": [ - { "property": "landsat:wrs_row" }, - 28 - ] - }, - { - "op": "=", - "args": [ - { "property": "landsat:wrs_path" }, - 203 - ] - } - ] -} diff --git a/tests/data/cql/input/example08.json b/tests/data/cql/input/example08.json new file mode 100644 index 00000000..8eb14bd9 --- /dev/null +++ b/tests/data/cql/input/example08.json @@ -0,0 +1,48 @@ +{ + "op": "and", + "args": [ + { + "op": "=", + "args": [ + { "property": "beamMode" }, + "ScanSAR Narrow" + ] + }, + { + "op": "=", + "args": [ + { "property": "swathDirection" }, + "ascending" + ] + }, + { + "op": "=", + "args": [ + { "property": "polarization" }, + "HH+VV+HV+VH" + ] + }, + { + "op": "s_intersects", + "args": [ + { + "property": "footprint" + }, + { + "type": "Polygon", + "coordinates": [ + [ [ -77.117938, 38.93686 ], + [ -77.040604, 39.995648 ], + [ -76.910536, 38.892912 ], + [ -77.039359, 38.791753 ], + [ -77.047906, 38.841462 ], + [ -77.034183, 38.840655 ], + [ -77.033142, 38.85749 ], + [ -77.117938, 38.93686 ] + ] + ] + } + ] + } + ] +} diff --git a/tests/data/cql/input/example09.json b/tests/data/cql/input/example09.json new file mode 100644 index 00000000..60d0c83b --- /dev/null +++ b/tests/data/cql/input/example09.json @@ -0,0 +1,7 @@ +{ + "op": ">", + "args": [ + { "property": "floors" }, + 5 + ] +} diff --git a/tests/data/cql/input/example10.json b/tests/data/cql/input/example10.json new file mode 100644 index 00000000..8a0db022 --- /dev/null +++ b/tests/data/cql/input/example10.json @@ -0,0 +1,7 @@ +{ + "op": "<=", + "args": [ + { "property": "taxes" }, + 500 + ] +} diff --git a/tests/data/cql/input/example11.json b/tests/data/cql/input/example11.json new file mode 100644 index 00000000..d605fe18 --- /dev/null +++ b/tests/data/cql/input/example11.json @@ -0,0 +1,7 @@ +{ + "op": "like", + "args": [ + { "property": "owner" }, + "%Jones%" + ] +} diff --git a/tests/data/cql/input/example12.json b/tests/data/cql/input/example12.json new file mode 100644 index 00000000..e7bca46e --- /dev/null +++ b/tests/data/cql/input/example12.json @@ -0,0 +1,7 @@ +{ + "op": "like", + "args": [ + { "property": "owner" }, + "Mike%" + ] +} diff --git a/tests/data/cql/input/example14.json b/tests/data/cql/input/example14.json new file mode 100644 index 00000000..129dce86 --- /dev/null +++ b/tests/data/cql/input/example14.json @@ -0,0 +1,7 @@ +{ + "op": "=", + "args": [ + { "property": "swimming_pool" }, + true + ] +} diff --git a/tests/data/cql/input/example15.json b/tests/data/cql/input/example15.json new file mode 100644 index 00000000..8f7704c7 --- /dev/null +++ b/tests/data/cql/input/example15.json @@ -0,0 +1,19 @@ +{ + "op": "and", + "args": [ + { + "op": ">", + "args": [ + { "property": "floor" }, + 5 + ] + }, + { + "op": "=", + "args": [ + { "property": "swimming_pool" }, + true + ] + } + ] +} diff --git a/tests/data/cql/input/example17.json b/tests/data/cql/input/example17.json new file mode 100644 index 00000000..67b4bb45 --- /dev/null +++ b/tests/data/cql/input/example17.json @@ -0,0 +1,31 @@ +{ + "op": "or", + "args": [ + { + "op": "and", + "args": [ + { + "op": ">", + "args": [ + { "property": "floors" }, + 5 + ] + }, + { + "op": "=", + "args": [ + { "property": "material" }, + "brick" + ] + } + ] + }, + { + "op": "=", + "args": [ + { "property": "swimming_pool" }, + true + ] + } + ] +} diff --git a/tests/data/cql/input/example29.json b/tests/data/cql/input/example29.json new file mode 100644 index 00000000..dbb44221 --- /dev/null +++ b/tests/data/cql/input/example29.json @@ -0,0 +1,7 @@ +{ + "op": "=", + "args": [ + { "property": "id" }, + "fa7e1920-9107-422d-a3db-c468cbc5d6df" + ] +} diff --git a/tests/data/cql/input/example31.json b/tests/data/cql/input/example31.json new file mode 100644 index 00000000..6c2c03a0 --- /dev/null +++ b/tests/data/cql/input/example31.json @@ -0,0 +1,7 @@ +{ + "op": "<", + "args": [ + { "property": "value" }, + 10 + ] +} diff --git a/tests/data/cql/input/example32.json b/tests/data/cql/input/example32.json new file mode 100644 index 00000000..ef1b6cad --- /dev/null +++ b/tests/data/cql/input/example32.json @@ -0,0 +1,7 @@ +{ + "op": ">", + "args": [ + { "property": "value" }, + 10 + ] +} diff --git a/tests/data/cql/input/example33.json b/tests/data/cql/input/example33.json new file mode 100644 index 00000000..12cfdc21 --- /dev/null +++ b/tests/data/cql/input/example33.json @@ -0,0 +1,7 @@ +{ + "op": "<=", + "args": [ + { "property": "value" }, + 10 + ] +} diff --git a/tests/data/cql/input/example34.json b/tests/data/cql/input/example34.json new file mode 100644 index 00000000..a8d6ca23 --- /dev/null +++ b/tests/data/cql/input/example34.json @@ -0,0 +1,7 @@ +{ + "op": ">=", + "args": [ + { "property": "value" }, + 10 + ] +} diff --git a/tests/data/cql/input/example35.json b/tests/data/cql/input/example35.json new file mode 100644 index 00000000..3b6fee0e --- /dev/null +++ b/tests/data/cql/input/example35.json @@ -0,0 +1,7 @@ +{ + "op": "like", + "args": [ + { "property": "name" }, + "foo%" + ] +} diff --git a/tests/data/cql/input/example39.json b/tests/data/cql/input/example39.json new file mode 100644 index 00000000..b24ff974 --- /dev/null +++ b/tests/data/cql/input/example39.json @@ -0,0 +1,7 @@ +{ + "op": "in", + "args": [ + { "property": "value" }, + [ 1.0, 2.0, 3.0 ] + ] +} diff --git a/tests/data/spaceprez/expected_responses/dataset_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_anot.ttl index e1d2600f..1e367d30 100644 --- a/tests/data/spaceprez/expected_responses/dataset_anot.ttl +++ b/tests/data/spaceprez/expected_responses/dataset_anot.ttl @@ -18,7 +18,8 @@ , , ; - prez:link "/s/datasets/exds:sandgate" . + prez:link "/s/datasets/exds:sandgate" ; + prez:members [ prez:link "/s/datasets/exds:sandgate/collections" ] . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -46,7 +47,8 @@ skos:definition rdfs:label "definition"@en ; skos:prefLabel rdfs:label "preferred label"@en ; skos:definition "The preferred lexical label for a resource, in a given language."@en . - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; + rdfs:label "Geofabric Contracted Catchments"@en ; + dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . @@ -54,21 +56,18 @@ skos:prefLabel rdfs:label "preferred label"@en ; rdfs:label "Sandgate are demo Facilities"@en ; dcterms:description "Sandgate area demo Facilities"@en ; dcterms:identifier "sndgt:facilities"^^prez:identifier ; - dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:title "Sandgate are demo Facilities"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:facilities" . rdfs:label "Sandgate flooded areas"@en ; dcterms:description "Sandgate flooded areas"@en ; dcterms:identifier "sndgt:floods"^^prez:identifier ; - dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:title "Sandgate flooded areas"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:floods" . rdfs:label "Sandgate main roads"@en ; dcterms:description "Sandgate main roads"@en ; dcterms:identifier "sndgt:roads"^^prez:identifier ; - dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:title "Sandgate main roads"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:roads" . diff --git a/tests/data/spaceprez/expected_responses/feature_anot.ttl b/tests/data/spaceprez/expected_responses/feature_anot.ttl index 9975acf5..28d7f7de 100644 --- a/tests/data/spaceprez/expected_responses/feature_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_anot.ttl @@ -13,17 +13,9 @@ rdfs:label "Geofabric Contracted Catchments"@en ; dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; - dcterms:title "Geofabric Contracted Catchments"@en . - - a geo:Feature, - ; - rdfs:label "Contracted Catchment 12109444" ; - dcterms:identifier "cc12109444"^^xsd:token, - "sndgt:cc12109444"^^prez:identifier ; - geo:hasGeometry [ a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral ] ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . + dcterms:title "Geofabric Contracted Catchments"@en ; + rdfs:member ; + prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -46,12 +38,6 @@ rdf:type rdfs:label "type" . rdfs:label rdfs:label "label" . -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - rdfs:member rdfs:label "member" . skos:definition rdfs:label "definition"@en ; diff --git a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl index 159b107d..04d39f4a 100644 --- a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl @@ -38,13 +38,16 @@ skos:definition rdfs:label "definition"@en ; skos:prefLabel rdfs:label "preferred label"@en ; skos:definition "The preferred lexical label for a resource, in a given language."@en . - rdfs:label "Contracted Catchment 12109444" ; - dcterms:identifier "sndgt:cc12109444"^^prez:identifier ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . - - rdfs:label "Contracted Catchment 12109445" ; - dcterms:identifier "sndgt:cc12109445"^^prez:identifier ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109445" . + a geo:FeatureCollection ; + rdfs:label "Geofabric Contracted Catchments"@en ; + dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; + dcterms:identifier "catchments"^^xsd:token, + "sndgt:catchments"^^prez:identifier ; + dcterms:title "Geofabric Contracted Catchments"@en ; + geo:hasBoundingBox [ a geo:Geometry ; + geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral ] ; + prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" ; + prez:members [ prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items" ] . geo:FeatureCollection skos:definition "A collection of individual Features."@en ; skos:prefLabel "Feature Collection"@en . diff --git a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl index 148a5e7f..501351f6 100644 --- a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl @@ -39,7 +39,6 @@ skos:prefLabel rdfs:label "preferred label"@en ; rdfs:label "Geofabric Contracted Catchments"@en ; dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; - dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . @@ -47,7 +46,6 @@ skos:prefLabel rdfs:label "preferred label"@en ; rdfs:label "Sandgate are demo Facilities"@en ; dcterms:description "Sandgate area demo Facilities"@en ; dcterms:identifier "sndgt:facilities"^^prez:identifier ; - dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:title "Sandgate are demo Facilities"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:facilities" . @@ -55,7 +53,6 @@ skos:prefLabel rdfs:label "preferred label"@en ; rdfs:label "Sandgate flooded areas"@en ; dcterms:description "Sandgate flooded areas"@en ; dcterms:identifier "sndgt:floods"^^prez:identifier ; - dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:title "Sandgate flooded areas"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:floods" . @@ -63,7 +60,6 @@ skos:prefLabel rdfs:label "preferred label"@en ; rdfs:label "Sandgate main roads"@en ; dcterms:description "Sandgate main roads"@en ; dcterms:identifier "sndgt:roads"^^prez:identifier ; - dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:title "Sandgate main roads"@en ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:roads" . @@ -71,4 +67,3 @@ geo:FeatureCollection skos:definition "A collection of individual Features."@en skos:prefLabel "Feature Collection"@en ; prez:count 4 . - diff --git a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl index 72aaa69d..75d60e88 100644 --- a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl @@ -41,12 +41,13 @@ skos:prefLabel rdfs:label "preferred label"@en ; a geo:Feature ; rdfs:label "Contracted Catchment 12109444" ; dcterms:identifier "sndgt:cc12109444"^^prez:identifier ; - dcterms:identifier "sndgt:cc12109444"^^prez:identifier ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . a geo:Feature ; rdfs:label "Contracted Catchment 12109445" ; dcterms:identifier "sndgt:cc12109445"^^prez:identifier ; - dcterms:identifier "sndgt:cc12109445"^^prez:identifier ; prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109445" . +geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; + skos:prefLabel "Feature"@en ; + prez:count 2 . diff --git a/tests/test_count.py b/tests/test_count.py new file mode 100644 index 00000000..c4dc4cc9 --- /dev/null +++ b/tests/test_count.py @@ -0,0 +1,84 @@ +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store + +from prez.app import app +from prez.dependencies import get_repo +from prez.sparql.methods import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +@pytest.fixture(scope="session") +def test_client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +def get_curie(test_client: TestClient, iri: str) -> str: + response = test_client.get(f"/identifier/curie/{iri}") + if response.status_code != 200: + raise ValueError(f"Failed to retrieve curie for {iri}. {response.text}") + return response.text + + +@pytest.mark.parametrize( + "iri, inbound, outbound, count", + [ + [ + "http://linked.data.gov.au/def/borehole-purpose", + "http://www.w3.org/2004/02/skos/core#inScheme", + None, + 0, + ], + [ + "http://linked.data.gov.au/def/borehole-purpose-no-children", + "http://www.w3.org/2004/02/skos/core#inScheme", + None, + 0, + ], + [ + "http://linked.data.gov.au/def/borehole-purpose", + None, + "http://www.w3.org/2004/02/skos/core#hasTopConcept", + 0, + ], + ], +) +def test_count( + test_client: TestClient, + iri: str, + inbound: str | None, + outbound: str | None, + count: int, +): + curie = get_curie(test_client, iri) + params = {"curie": curie, "inbound": inbound, "outbound": outbound} + response = test_client.get(f"/count", params=params) + assert int(response.text) == count diff --git a/tests/test_cql.py b/tests/test_cql.py index 20ee6871..0f065d60 100644 --- a/tests/test_cql.py +++ b/tests/test_cql.py @@ -44,7 +44,28 @@ def override_get_repo(): @pytest.mark.parametrize( "cql_json_filename", - ["example01.json", "example02.json", "example03.json"], + [ + "example01.json", + "example02.json", + "example03.json", + "example05a.json", + "example05b.json", + "example06b.json", + "example09.json", + "example10.json", + "example11.json", + "example12.json", + "example14.json", + "example15.json", + "example17.json", + "example29.json", + "example31.json", + "example32.json", + "example33.json", + "example34.json", + "example35.json", + "example39.json", + ], ) def test_simple(client, cql_json_filename): cql_json = Path(__file__).parent / f"data/cql/input/{cql_json_filename}" diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py new file mode 100644 index 00000000..f2a96b65 --- /dev/null +++ b/tests/test_endpoints_catprez.py @@ -0,0 +1,139 @@ +import time +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store +from rdflib import Graph, URIRef +from rdflib.namespace import RDF, DCAT +from rdflib.compare import isomorphic + +from prez.app import app +from prez.dependencies import get_repo +from prez.sparql.methods import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +def wait_for_app_to_be_ready(client, timeout=10): + start_time = time.time() + while time.time() - start_time < timeout: + try: + response = client.get("/health") + if response.status_code == 200: + return + except Exception as e: + print(e) + time.sleep(0.5) + raise RuntimeError("App did not start within the specified timeout") + + +@pytest.fixture(scope="session") +def client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + wait_for_app_to_be_ready(c) + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +@pytest.fixture(scope="session") +def a_catalog_link(client): + # get link for first catalog + r = client.get("/catalogs") + g = Graph().parse(data=r.text) + member_uri = g.value(None, RDF.type, DCAT.Catalog) + link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +@pytest.fixture(scope="session") +def a_resource_link(client, a_catalog_link): + r = client.get(a_catalog_link) + g = Graph().parse(data=r.text) + links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) + for link in links: + if link != a_catalog_link: + return link + + +def test_catalog_listing_anot(client): + r = client.get( + f"/catalogs?_mediatype=text/anot+turtle&_profile=prez:OGCListingProfile" + ) + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_catalog_anot(client, a_catalog_link): + r = client.get(f"{a_catalog_link}?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/catprez/expected_responses/top_level_catalog_anot.ttl" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_resource_listing_anot(client, a_catalog_link): + r = client.get(f"{a_catalog_link}/collections?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/catprez/expected_responses/resource_listing_anot.ttl" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_resource_anot(client, a_resource_link): + r = client.get(f"{a_resource_link}?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/catprez/expected_responses/resource_anot.ttl" + ) + assert response_graph.isomorphic(expected_graph), print( + f"Missing:{(expected_graph - response_graph).serialize()}" + f"Extra:{(response_graph - expected_graph).serialize()}" + ) diff --git a/tests/test_endpoints_ok.py b/tests/test_endpoints_ok.py new file mode 100644 index 00000000..6ed71bdd --- /dev/null +++ b/tests/test_endpoints_ok.py @@ -0,0 +1,100 @@ +import logging +import time +from pathlib import Path +from typing import Optional, Set, Dict + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store +from rdflib import Graph + +from prez.app import app +from prez.dependencies import get_repo +from prez.reference_data.prez_ns import PREZ +from prez.sparql.methods import Repo, PyoxigraphRepo + +log = logging.getLogger(__name__) + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +def wait_for_app_to_be_ready(client, timeout=10): + start_time = time.time() + while time.time() - start_time < timeout: + try: + response = client.get("/health") + if response.status_code == 200: + return + except Exception as e: + print(e) + time.sleep(0.5) + raise RuntimeError("App did not start within the specified timeout") + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +@pytest.fixture(scope="session") +def client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + wait_for_app_to_be_ready(c) + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +def test_catprez_links(client: TestClient, visited: Optional[Set] = None, link="/c/catalogs"): + if not visited: + visited = set() + response = client.get(link) + links_in_response = list(Graph().parse(data=response.text, format="turtle").objects(None, PREZ.link)) + assert response.status_code == 200 + for link in links_in_response: + if link not in visited: + visited.add(link) + test_catprez_links(client, visited, str(link)) + + + +def test_vocprez_links(client: TestClient, visited: Optional[Set] = None, link="/v/catalogs"): + if not visited: + visited = set() + response = client.get(link) + links_in_response = list(Graph().parse(data=response.text, format="turtle").objects(None, PREZ.link)) + assert response.status_code == 200 + for link in links_in_response: + if link not in visited: + visited.add(link) + test_catprez_links(client, visited, str(link)) + + + +def test_spaceprez_links(client: TestClient, visited: Optional[Set] = None, link="/s/datasets"): + if not visited: + visited = set() + response = client.get(link) + links_in_response = list(Graph().parse(data=response.text, format="turtle").objects(None, PREZ.link)) + assert response.status_code == 200 + for link in links_in_response: + if link not in visited: + visited.add(link) + test_catprez_links(client, visited, str(link)) \ No newline at end of file diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py new file mode 100644 index 00000000..d232b595 --- /dev/null +++ b/tests/test_endpoints_profiles.py @@ -0,0 +1,71 @@ +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store +from rdflib import Graph, URIRef +from rdflib.namespace import RDF, PROF + +from prez.app import app +from prez.dependencies import get_repo +from prez.sparql.methods import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +@pytest.fixture(scope="session") +def client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +def test_profile(client): + # check the example remote profile is loaded + r = client.get("/profiles") + g = Graph().parse(data=r.text) + assert (URIRef("https://prez.dev/profile/prez"), RDF.type, PROF.Profile) in g + + +def test_ogcprez_profile(client): + # check the example remote profile is loaded + r = client.get("/profiles/prez:OGCProfile") + g = Graph().parse(data=r.text) + assert (URIRef("https://prez.dev/OGCProfile"), RDF.type, PROF.Profile) in g + + +def test_sp_profile(client): + # check the example remote profile is loaded + r = client.get("/profiles/prez:SpacePrezProfile") + g = Graph().parse(data=r.text) + assert (URIRef("https://prez.dev/SpacePrezProfile"), RDF.type, PROF.Profile) in g + + +def test_vp_profile(client): + # check the example remote profile is loaded + r = client.get("/profiles/prez:VocPrezProfile") + g = Graph().parse(data=r.text) + assert (URIRef("https://prez.dev/VocPrezProfile"), RDF.type, PROF.Profile) in g diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py new file mode 100644 index 00000000..f740e6c5 --- /dev/null +++ b/tests/test_endpoints_spaceprez.py @@ -0,0 +1,165 @@ +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store +from rdflib import Graph, URIRef +from rdflib.compare import isomorphic +from rdflib.namespace import RDF, DCAT, RDFS + +from prez.app import app +from prez.dependencies import get_repo +from prez.sparql.methods import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +@pytest.fixture(scope="session") +def client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +@pytest.fixture(scope="session") +def a_dataset_link(client): + r = client.get("/s/datasets") + g = Graph().parse(data=r.text) + member_uri = g.value(None, RDF.type, DCAT.Dataset) + link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +@pytest.fixture(scope="session") +def an_fc_link(client, a_dataset_link): + r = client.get(f"{a_dataset_link}/collections") + g = Graph().parse(data=r.text) + member_uri = g.value( + URIRef("http://example.com/datasets/sandgate"), RDFS.member, None + ) + link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +@pytest.fixture(scope="session") +def a_feature_link(client, an_fc_link): + r = client.get(f"{an_fc_link}/items") + g = Graph().parse(data=r.text) + member_uri = g.value( + URIRef("http://example.com/datasets/sandgate/catchments"), RDFS.member, None + ) + link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +def test_dataset_anot(client, a_dataset_link): + r = client.get(f"{a_dataset_link}?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/spaceprez/expected_responses/dataset_anot.ttl" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_feature_collection_anot(client, an_fc_link): + r = client.get(f"{an_fc_link}?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/spaceprez/expected_responses/feature_collection_anot.ttl" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_feature_anot(client, a_feature_link): + r = client.get(f"{a_feature_link}?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/spaceprez/expected_responses/feature_anot.ttl" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_dataset_listing_anot(client): + r = client.get("/s/datasets?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl" + ) + assert response_graph.isomorphic(expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_feature_collection_listing_anot(client, a_dataset_link): + r = client.get(f"{a_dataset_link}/collections?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl" + ) + assert response_graph.isomorphic(expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_feature_listing_anot(client, an_fc_link): + r = client.get(f"{an_fc_link}/items?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=r.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/spaceprez/expected_responses/feature_listing_anot.ttl" + ) + assert response_graph.isomorphic(expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) diff --git a/tests/test_endpoints_vocprez.py b/tests/test_endpoints_vocprez.py new file mode 100644 index 00000000..a39a32fd --- /dev/null +++ b/tests/test_endpoints_vocprez.py @@ -0,0 +1,276 @@ +import time +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store +from rdflib import Graph, URIRef +from rdflib.compare import isomorphic + +from prez.app import app +from prez.dependencies import get_repo +from prez.sparql.methods import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +def wait_for_app_to_be_ready(client, timeout=10): + start_time = time.time() + while time.time() - start_time < timeout: + try: + response = client.get("/health") + if response.status_code == 200: + return + except Exception as e: + print(e) + time.sleep(0.5) + raise RuntimeError("App did not start within the specified timeout") + + +@pytest.fixture(scope="session") +def test_client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + wait_for_app_to_be_ready(c) + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +@pytest.fixture(scope="session") +def links(test_client: TestClient): + r = test_client.get("/v/collection") + g = Graph().parse(data=r.text) + vocab_uri = URIRef("http://resource.geosciml.org/classifier/cgi/contacttype") + vocab_link = g.value(vocab_uri, URIRef(f"https://prez.dev/link", None)) + # vocab_uri = g.value(None, RDF.type, SKOS.ConceptScheme) + # vocab_link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return vocab_link + + +def get_curie(test_client: TestClient, iri: str) -> str: + response = test_client.get(f"/identifier/curie/{iri}") + if response.status_code != 200: + raise ValueError(f"Failed to retrieve curie for {iri}. {response.text}") + return response.text + + +def test_vocab_listing(test_client: TestClient): + response = test_client.get( + f"/v/vocab?_mediatype=text/anot+turtle&_profile=vocpub:schemes-list" + ) + response_graph = Graph().parse(data=response.text) + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/vocprez/expected_responses/vocab_listing_anot.ttl" + ) + assert isomorphic(expected_graph, response_graph), print( + f"Missing triples\n{(expected_graph - response_graph).serialize()}", + f"Extra triples\n{(response_graph - expected_graph).serialize()}", + ) + + +# @pytest.mark.xfail( +# reason="oxigraph's DESCRIBE does not include blank nodes so the expected response is not what will " +# "be returned - route should not need describe query" +# ) +@pytest.mark.parametrize( + "iri, expected_result_file, description", + [ + [ + "http://linked.data.gov.au/def2/borehole-purpose", + "concept_scheme_with_children.ttl", + "Return concept scheme and a prez:childrenCount of 8", + ], + [ + "http://linked.data.gov.au/def2/borehole-purpose-no-children", + "concept_scheme_no_children.ttl", + "Return concept scheme and a prez:childrenCount of 0", + ], + ], +) +def test_concept_scheme( + test_client: TestClient, iri: str, expected_result_file: str, description: str +): + curie = get_curie(test_client, iri) + + response = test_client.get(f"/v/vocab/{curie}?_mediatype=text/anot+turtle") + response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) + expected_graph = Graph().parse( + Path(__file__).parent + / f"../tests/data/vocprez/expected_responses/{expected_result_file}" + ) + assert isomorphic(expected_graph, response_graph), f"Failed test: {description}" + + +# bedding surface works if stepped through - this will be another case of the local SPARQL store not being able to +# process the queries in parallel +@pytest.mark.xfail(reason="query error + issue with oxigraph") +@pytest.mark.parametrize( + "iri, expected_result_file, description", + [ + [ + "http://linked.data.gov.au/def2/borehole-purpose", + "concept_scheme_top_concepts_with_children.ttl", + "Return concept scheme and a prez:childrenCount of 8", + ], + [ + "http://linked.data.gov.au/def2/borehole-purpose-no-children", + "empty.ttl", + "Return concept scheme and a prez:childrenCount of 0", + ], + [ + "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure", + "beddingsurfacestructure_top_concepts.ttl", + "Top concepts have the correct annotation values for reg:status and color", + ], + ], +) +def test_concept_scheme_top_concepts( + test_client: TestClient, iri: str, expected_result_file: str, description: str +): + curie = get_curie(test_client, iri) + response = test_client.get( + f"/v/vocab/{curie}/top-concepts?_mediatype=text/anot+turtle" + ) + response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) + expected_graph = Graph().parse( + Path(__file__).parent + / f"../tests/data/vocprez/expected_responses/{expected_result_file}" + ) + assert isomorphic(expected_graph, response_graph), f"Failed test: {description}" + + +# @pytest.mark.xfail( +# reason="issue with oxigraph counting children that do not exist (giving childrenCount 1; should be 0)" +# ) +@pytest.mark.parametrize( + "concept_scheme_iri, concept_iri, expected_result_file, description", + [ + [ + "http://linked.data.gov.au/def2/borehole-purpose", + "http://linked.data.gov.au/def/borehole-purpose/coal", + "concept-with-2-narrower-concepts.ttl", + "Return concept with 2 narrower concepts.", + ], + [ + "http://linked.data.gov.au/def2/borehole-purpose", + "http://linked.data.gov.au/def2/borehole-purpose/open-cut-coal-mining", + "empty.ttl", + "Return nothing, no children.", + ], + ], +) +def test_concept_narrowers( + test_client: TestClient, + concept_scheme_iri: str, + concept_iri: str, + expected_result_file: str, + description: str, +): + concept_scheme_curie = get_curie(test_client, concept_scheme_iri) + concept_curie = get_curie(test_client, concept_iri) + response = test_client.get( + f"/v/vocab/{concept_scheme_curie}/{concept_curie}/narrowers?_mediatype=text/anot+turtle&_profile=https://w3id.org/profile/vocpub" + ) + response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) + expected_graph = Graph().parse( + Path(__file__).parent + / f"../tests/data/vocprez/expected_responses/{expected_result_file}" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +@pytest.mark.parametrize( + "concept_scheme_iri, concept_iri, expected_result_file, description", + [ + # [ + # "http://linked.data.gov.au/def/borehole-purpose", + # "http://linked.data.gov.au/def/borehole-purpose/coal", + # "concept-coal.ttl", + # "Return the coal concept and its properties.", + # ], + [ + "http://linked.data.gov.au/def/borehole-purpose", + "http://linked.data.gov.au/def/borehole-purpose/open-cut-coal-mining", + "concept-open-cut-coal-mining.ttl", + "Return the open-cut-coal-mining concept and its properties.", + ], + ], +) +def test_concept( + test_client: TestClient, + concept_scheme_iri: str, + concept_iri: str, + expected_result_file: str, + description: str, +): + concept_scheme_curie = get_curie(test_client, concept_scheme_iri) + concept_curie = get_curie(test_client, concept_iri) + response = test_client.get( + f"/v/vocab/{concept_scheme_curie}/{concept_curie}?_mediatype=text/anot+turtle" + ) + response_graph = Graph(bind_namespaces="rdflib").parse(data=response.text) + expected_graph = Graph().parse( + Path(__file__).parent + / f"../tests/data/vocprez/expected_responses/{expected_result_file}" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) + + +def test_collection_listing(test_client: TestClient): + response = test_client.get(f"/v/collection?_mediatype=text/anot+turtle") + response_graph = Graph().parse(data=response.text, format="turtle") + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/vocprez/expected_responses/collection_listing_anot.ttl" + ) + assert isomorphic(expected_graph, response_graph) + + +# TODO figure out why this fails and yet when run via debugger, passes.. +def test_collection_listing_item(test_client: TestClient, links): + response = test_client.get("/v/collection/cgi:contacttype") + assert response.status_code == 200 + response_graph = Graph().parse(data=response.text, format="turtle") + expected_graph = Graph().parse( + Path(__file__).parent + / "../tests/data/vocprez/expected_responses/collection_listing_item.ttl" + ) + assert isomorphic(response_graph, expected_graph), print( + f"RESPONSE GRAPH\n{response_graph.serialize()}," + f"EXPECTED GRAPH\n{expected_graph.serialize()}", + f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", + f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + ) diff --git a/tests/test_object_listings.py b/tests/test_object_listings.py deleted file mode 100644 index be2d9b41..00000000 --- a/tests/test_object_listings.py +++ /dev/null @@ -1,32 +0,0 @@ -from rdflib.namespace import PROV - -from prez.sparql.objects_listings import generate_sequence_construct - - -def test_generate_sequence_construct() -> None: - sequence_construct, sequence_construct_where = generate_sequence_construct( - [ - [PROV.qualifiedDerivation, PROV.hadRole], - [PROV.qualifiedDerivation, PROV.entity], - ], - "?top_level_item", - ) - - expected_sequence_construct = """\t?top_level_item ?seq_o1_0 . -\t?seq_o1_0 ?seq_o2_0 .\t?top_level_item ?seq_o1_1 . -\t?seq_o1_1 ?seq_o2_1 .""" - - assert sequence_construct == expected_sequence_construct - - expected_sequence_construct_where = """\ -OPTIONAL { -\t?top_level_item ?seq_o1_0 . -\t?seq_o1_0 ?seq_o2_0 . -} -OPTIONAL { -\t?top_level_item ?seq_o1_1 . -\t?seq_o1_1 ?seq_o2_1 . -} -""" - - assert sequence_construct_where == expected_sequence_construct_where diff --git a/tests/test_search.py b/tests/test_search.py index f650a739..60e2fbde 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -115,7 +115,7 @@ def test_search_filter_to_focus_multiple(client: TestClient): @pytest.mark.xfail( - reason="This generates a valid query that has been tested in Fuseki, which RDFLib and Pyoxigraph cannot run(!)" + reason="This generates a valid query that has been tested in Fuseki, which RDFLib struggles with" ) def test_search_focus_to_filter_multiple(client: TestClient): base_url = "/search" diff --git a/tests/test_sparql.py b/tests/test_sparql.py index 53c4c921..dddd7682 100644 --- a/tests/test_sparql.py +++ b/tests/test_sparql.py @@ -43,15 +43,23 @@ def override_get_repo(): def test_select(client): """check that a valid select query returns a 200 response.""" - r = client.get("/sparql?query=SELECT%20*%0AWHERE%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20LIMIT%201") + r = client.get( + "/sparql?query=SELECT%20*%0AWHERE%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20LIMIT%201" + ) assert (r.status_code, 200) + def test_construct(client): """check that a valid construct query returns a 200 response.""" - r = client.get("/sparql?query=CONSTRUCT%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20WHERE%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20LIMIT%201") + r = client.get( + "/sparql?query=CONSTRUCT%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20WHERE%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20LIMIT%201" + ) assert (r.status_code, 200) + def test_ask(client): """check that a valid ask query returns a 200 response.""" - r = client.get("/sparql?query=PREFIX%20ex%3A%20%3Chttp%3A%2F%2Fexample.com%2Fdatasets%2F%3E%0APREFIX%20dcterms%3A%20%3Chttp%3A%2F%2Fpurl.org%2Fdc%2Fterms%2F%3E%0A%0AASK%0AWHERE%20%7B%0A%20%20%3Fsubject%20dcterms%3Atitle%20%3Ftitle%20.%0A%20%20FILTER%20CONTAINS(LCASE(%3Ftitle)%2C%20%22sandgate%22)%0A%7D") + r = client.get( + "/sparql?query=PREFIX%20ex%3A%20%3Chttp%3A%2F%2Fexample.com%2Fdatasets%2F%3E%0APREFIX%20dcterms%3A%20%3Chttp%3A%2F%2Fpurl.org%2Fdc%2Fterms%2F%3E%0A%0AASK%0AWHERE%20%7B%0A%20%20%3Fsubject%20dcterms%3Atitle%20%3Ftitle%20.%0A%20%20FILTER%20CONTAINS(LCASE(%3Ftitle)%2C%20%22sandgate%22)%0A%7D" + ) assert (r.status_code, 200) From 21bde0012f3d06feb22c1fe5277a9b02962002a9 Mon Sep 17 00:00:00 2001 From: david Date: Sat, 9 Dec 2023 01:19:45 +1000 Subject: [PATCH 03/25] MVP CQL --- prez/app.py | 2 - prez/dependencies.py | 21 +- prez/models/__init__.py | 1 - prez/models/cql_query.py | 1 - prez/models/listing.py | 47 - prez/models/profiles_item.py | 47 - prez/models/profiles_listings.py | 21 +- prez/models/search_method.py | 41 - prez/models/vocprez_listings.py | 34 - prez/queries/vocprez.py | 192 --- .../endpoints/catprez_endpoints.ttl | 40 - .../cql_endpoints.ttl | 1 - .../ogc_catprez_endpoints.ttl | 0 .../ogc_spaceprez_endpoints.ttl | 0 .../ogc_vocprez_endpoints.ttl | 8 +- .../endpoints/profiles_endpoints.ttl | 48 - .../endpoints/spaceprez_endpoints.ttl | 54 - .../system_endpoints.ttl | 12 + .../endpoints/vocprez_endpoints.ttl | 50 - .../vocprez_endpoints.ttl.unused | 0 .../profiles/{_dd_to_refactor.ttl => dd.ttl} | 15 +- .../profiles/ogc_records_profile.ttl | 2 +- .../profiles/prez_default_profiles.ttl | 28 +- prez/renderers/renderer.py | 42 +- prez/routers/cql.py | 43 +- prez/routers/object.py | 4 +- prez/routers/ogc_catprez.py | 57 +- prez/routers/ogc_spaceprez.py | 29 +- prez/routers/ogc_vocprez.py | 84 +- prez/routers/profiles.py | 8 +- prez/routers/search.py | 126 +- prez/services/app_service.py | 4 +- prez/services/generate_profiles.py | 2 +- prez/services/listings.py | 98 +- prez/services/model_methods.py | 2 - prez/services/objects.py | 29 +- prez/services/search_methods.py | 50 - prez/sparql/methods.py | 4 - prez/sparql/objects_listings.py | 413 +------ prez/sparql/search_query.py | 397 +++++++ temp/cql2sparql.py | 113 +- temp/grammar.py | 1036 +++++++++-------- temp/shacl2sparql.py | 144 ++- temp/test_search.py | 13 + test_data/catprez.ttl | 17 + test_data/vocprez.ttl | 2 +- tests/conftest.py | 2 +- .../expected_responses/dataset_anot.ttl | 12 +- .../dataset_listing_anot.ttl | 8 +- .../expected_responses/feature_anot.ttl | 4 +- .../feature_collection_anot.ttl | 6 +- .../feature_collection_listing_anot.ttl | 10 +- .../feature_listing_anot.ttl | 6 +- .../data/vocprez/input/catalog-of-vocabs.ttl | 12 + tests/test_dd_profiles.py | 2 +- tests/test_endpoints_ok.py | 53 +- tests/test_endpoints_spaceprez.py | 4 +- tests/test_search.py | 2 - 58 files changed, 1557 insertions(+), 1946 deletions(-) delete mode 100644 prez/models/__init__.py delete mode 100644 prez/models/cql_query.py delete mode 100644 prez/models/listing.py delete mode 100644 prez/models/profiles_item.py delete mode 100644 prez/models/search_method.py delete mode 100644 prez/models/vocprez_listings.py delete mode 100644 prez/queries/vocprez.py delete mode 100644 prez/reference_data/endpoints/catprez_endpoints.ttl rename prez/reference_data/{new_endpoints => endpoints}/cql_endpoints.ttl (90%) rename prez/reference_data/{new_endpoints => endpoints}/ogc_catprez_endpoints.ttl (100%) rename prez/reference_data/{new_endpoints => endpoints}/ogc_spaceprez_endpoints.ttl (100%) rename prez/reference_data/{new_endpoints => endpoints}/ogc_vocprez_endpoints.ttl (96%) delete mode 100644 prez/reference_data/endpoints/profiles_endpoints.ttl delete mode 100644 prez/reference_data/endpoints/spaceprez_endpoints.ttl rename prez/reference_data/{new_endpoints => endpoints}/system_endpoints.ttl (66%) delete mode 100644 prez/reference_data/endpoints/vocprez_endpoints.ttl rename prez/reference_data/{new_endpoints => endpoints}/vocprez_endpoints.ttl.unused (100%) rename prez/reference_data/profiles/{_dd_to_refactor.ttl => dd.ttl} (84%) delete mode 100644 prez/services/search_methods.py create mode 100644 prez/sparql/search_query.py create mode 100644 temp/test_search.py create mode 100644 tests/data/vocprez/input/catalog-of-vocabs.ttl diff --git a/prez/app.py b/prez/app.py index bbb127be..769ad17d 100644 --- a/prez/app.py +++ b/prez/app.py @@ -48,7 +48,6 @@ ) from prez.services.generate_profiles import create_profiles_graph from prez.services.prez_logging import setup_logger -from prez.services.search_methods import get_all_search_methods from prez.sparql.methods import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo app = FastAPI( @@ -137,7 +136,6 @@ async def app_startup(): ) await add_prefixes_to_prefix_graph(app.state.repo) - await get_all_search_methods(app.state.repo) await create_profiles_graph(app.state.repo) await create_endpoints_graph(app.state.repo) await count_objects(app.state.repo) diff --git a/prez/dependencies.py b/prez/dependencies.py index 344f6c2d..b2c91af7 100644 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -87,7 +87,7 @@ class CQLRequest(BaseModel): cql: Optional[dict] -async def cql_parser_dependency(request: Request): +async def cql_post_parser_dependency(request: Request): try: body = await request.json() context = json.load( @@ -95,7 +95,24 @@ async def cql_parser_dependency(request: Request): ) cql_parser = CQLParser(cql=body, context=context) cql_parser.generate_jsonld() - return cql_parser.cql_json + return cql_parser + except json.JSONDecodeError: + raise HTTPException(status_code=400, detail="Invalid JSON format.") + except Exception as e: # Replace with your specific parsing exception + raise HTTPException( + status_code=400, detail="Invalid CQL format: Parsing failed." + ) + + +async def cql_get_parser_dependency(request: Request): + try: + query = json.loads(request.query_params["q"]) + context = json.load( + (Path(__file__).parent.parent / "temp" / "default_cql_context.json").open() + ) + cql_parser = CQLParser(cql=query, context=context) + cql_parser.generate_jsonld() + return cql_parser except json.JSONDecodeError: raise HTTPException(status_code=400, detail="Invalid JSON format.") except Exception as e: # Replace with your specific parsing exception diff --git a/prez/models/__init__.py b/prez/models/__init__.py deleted file mode 100644 index 680c5e73..00000000 --- a/prez/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from prez.models.search_method import SearchMethod diff --git a/prez/models/cql_query.py b/prez/models/cql_query.py deleted file mode 100644 index 46409041..00000000 --- a/prez/models/cql_query.py +++ /dev/null @@ -1 +0,0 @@ -# TODO diff --git a/prez/models/listing.py b/prez/models/listing.py deleted file mode 100644 index 3b39b4c0..00000000 --- a/prez/models/listing.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Optional, FrozenSet - -from pydantic import BaseModel, field_validator -from rdflib import URIRef, Literal, XSD - -from prez.cache import endpoints_graph_cache -from prez.reference_data.prez_ns import ONT - - -class ListingModel(BaseModel): - class Config: - arbitrary_types_allowed = True - - uri: Optional[ - URIRef - ] = None # this is the URI of the focus object (if listing by membership) - classes: Optional[FrozenSet[URIRef]] = None - endpoint_uri: Optional[URIRef] = None - selected_class: Optional[FrozenSet[URIRef]] = None - profile: Optional[URIRef] = None - top_level_listing: Optional[bool] = False - - def __hash__(self): - return hash(self.uri) - - # @field_validator(): - # def populate(cls, values): - # endpoint_uri_str = values.get("endpoint_uri") - # if endpoint_uri_str: - # endpoint_uri = URIRef(endpoint_uri_str) - # values["classes"] = frozenset( - # [ - # klass - # for klass in endpoints_graph_cache.objects( - # endpoint_uri, ONT.deliversClasses, None - # ) - # ] - # ) - # values["base_class"] = endpoints_graph_cache.value( - # endpoint_uri, ONT.baseClass - # ) - # tll_text = endpoints_graph_cache.value(endpoint_uri, ONT.isTopLevelEndpoint) - # if tll_text == Literal("true", datatype=XSD.boolean): - # values["top_level_listing"] = True - # else: - # values["top_level_listing"] = False - # return values diff --git a/prez/models/profiles_item.py b/prez/models/profiles_item.py deleted file mode 100644 index f29b1585..00000000 --- a/prez/models/profiles_item.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Optional -from typing import Set - -from pydantic import BaseModel, root_validator -from rdflib import URIRef, PROF, Namespace - -from prez.cache import profiles_graph_cache -from prez.config import settings -from prez.services.curie_functions import get_uri_for_curie_id, get_curie_id_for_uri -from prez.services.model_methods import get_classes - -PREZ = Namespace("https://prez.dev/") - - -class ProfileItem(BaseModel): - class Config: - arbitrary_types_allowed = True - - uri: Optional[URIRef] = None - classes: Optional[Set[URIRef]] = frozenset([PROF.Profile]) - id: Optional[str] = None - link_constructor: str = "/profiles" - label: str = None - - # base_class: Optional[URIRef] = None - # url_path: Optional[str] = None - selected_class: Optional[URIRef] = None - - def __hash__(self): - return hash(self.uri) - - # @root_validator - def populate(self): - uri = self.uri - id = self.id - assert uri or id - if id: - self.uri = get_uri_for_curie_id(id) - elif uri: - self.id = get_curie_id_for_uri(uri) - q = f"""SELECT ?class {{ <{self.uri}> a ?class }}""" - r = profiles_graph_cache.query(q) - if len(r.bindings) > 0: - self.classes = frozenset([prof.get("class") for prof in r.bindings]) - label = self.label - if not label: - self.label = settings.label_predicates[0] diff --git a/prez/models/profiles_listings.py b/prez/models/profiles_listings.py index 0694d40f..a5d5750b 100644 --- a/prez/models/profiles_listings.py +++ b/prez/models/profiles_listings.py @@ -1,8 +1,8 @@ from typing import Optional, FrozenSet -from pydantic import BaseModel, root_validator +from pydantic import BaseModel from rdflib import Namespace -from rdflib.namespace import URIRef, PROF +from rdflib.namespace import URIRef PREZ = Namespace("https://prez.dev/") @@ -18,20 +18,3 @@ class Config: selected_class: Optional[URIRef] = None link_constructor: Optional[str] top_level_listing: Optional[bool] = True - - # @root_validator - # def populate(cls, values): - # url_path = values.get("url_path") - # if url_path.startswith("/v/"): - # values["base_class"] = PREZ.VocPrezProfile - # values["link_constructor"] = "/v/profiles" - # elif url_path.startswith("/c/"): - # values["base_class"] = PREZ.CatPrezProfile - # values["link_constructor"] = "/c/profiles" - # elif url_path.startswith("/s/"): - # values["base_class"] = PREZ.SpacePrezProfile - # values["link_constructor"] = "/s/profiles" - # else: - # values["base_class"] = PROF.Profile - # values["link_constructor"] = "/profiles" - # return values diff --git a/prez/models/search_method.py b/prez/models/search_method.py deleted file mode 100644 index 796dc1f9..00000000 --- a/prez/models/search_method.py +++ /dev/null @@ -1,41 +0,0 @@ -from string import Template - -from pydantic import BaseModel -from rdflib import URIRef, Namespace, Literal - -from pydantic import BaseConfig - - -PREZ = Namespace("https://prez.dev/") - - -class SearchMethod(BaseModel): - class Config: - arbitrary_types_allowed = True - - uri: URIRef = None - identifier: Literal = None - title: Literal = None - template_query: Template = None - top_level_listing: bool = False - search_query: bool = True - selected_class: URIRef = None - populated_query: str = None - link_constructor: str = "/object?uri=" - - def __hash__(self): - return hash(self.uri) - - def populate_query( - self, term, limit, offset, focus_to_filter, filter_to_focus, predicates - ): - self.populated_query = self.template_query.substitute( - { - "TERM": term, - "LIMIT": limit, - "OFFSET": offset, - "FOCUS_TO_FILTER": focus_to_filter, - "FILTER_TO_FOCUS": filter_to_focus, - "PREDICATES": predicates, - } - ) diff --git a/prez/models/vocprez_listings.py b/prez/models/vocprez_listings.py deleted file mode 100644 index 2bf2ff3c..00000000 --- a/prez/models/vocprez_listings.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import Optional, FrozenSet - -from pydantic import BaseModel, root_validator -from rdflib import Namespace -from rdflib.namespace import URIRef, SKOS - -PREZ = Namespace("https://prez.dev/") - - -class VocabMembers(BaseModel): - url_path: str - uri: Optional[URIRef] = None - base_class: Optional[URIRef] - classes: Optional[FrozenSet[URIRef]] - selected_class: Optional[URIRef] = None - link_constructor: Optional[str] - top_level_listing: Optional[bool] = True - - @root_validator - def populate(cls, values): - url_path = values.get("url_path") - if url_path == "/v/collection": - values["base_class"] = SKOS.Collection - values["link_constructor"] = "/v/collection" - values["classes"] = frozenset([PREZ.VocPrezCollectionList]) - elif url_path == "/v/scheme": - values["base_class"] = SKOS.ConceptScheme - values["link_constructor"] = "/v/scheme" - values["classes"] = frozenset([PREZ.SchemesList]) - elif url_path == "/v/vocab": - values["base_class"] = SKOS.ConceptScheme - values["link_constructor"] = "/v/vocab" - values["classes"] = frozenset([PREZ.SchemesList]) - return values diff --git a/prez/queries/vocprez.py b/prez/queries/vocprez.py deleted file mode 100644 index bfe0d1c1..00000000 --- a/prez/queries/vocprez.py +++ /dev/null @@ -1,192 +0,0 @@ -from textwrap import dedent - -from jinja2 import Template - - -def get_concept_scheme_query(iri: str, bnode_depth: int) -> str: - query = Template( - """ - PREFIX prez: - PREFIX skos: - - CONSTRUCT { - ?iri ?p ?o . - - {% if bnode_depth > 0 +%} - ?iri ?p0 ?o0 . - {% endif %} - - {% for i in range(bnode_depth) %} - ?o{{ i }} ?p{{ i + 1 }} ?o{{ i + 1 }} . - {% endfor %} - - ?iri prez:childrenCount ?childrenCount . - } - WHERE { - BIND(<{{ iri }}> as ?iri) - ?iri ?p ?o . - FILTER (?p != skos:hasTopConcept) - - { - SELECT (COUNT(?topConcept) AS ?childrenCount) - WHERE { - BIND(<{{ iri }}> as ?iri) - ?iri skos:hasTopConcept ?topConcept . - } - } - - {% if bnode_depth > 0 %} - ?iri ?p0 ?o0 . - {% endif %} - - {% for i in range(bnode_depth) %} - ?o{{ i }} ?p{{ i + 1 }} ?o{{ i + 1 }} . - FILTER (isBlank(?0o)) - {% endfor %} - } - """ - ).render(iri=iri, bnode_depth=bnode_depth) - - return dedent(query) - - -# TODO query appears to erroneously create TopConcepts where they don't exist - perhaps from the optional statements -# see test_concept_scheme_top_concepts test w/ borehole-purpose-no-children -def get_concept_scheme_top_concepts_query(iri: str, page: int, per_page: int) -> str: - query = Template( - """ - PREFIX prez: - PREFIX rdf: - PREFIX rdfs: - PREFIX skos: - - CONSTRUCT { - ?concept skos:prefLabel ?label . - ?concept prez:childrenCount ?narrowerChildrenCount . - ?iri prez:childrenCount ?childrenCount . - ?iri skos:hasTopConcept ?concept . - ?iri rdf:type ?type . - ?concept rdf:type ?conceptType . - } - WHERE { - BIND(<{{ iri }}> as ?iri) - OPTIONAL { - ?iri skos:hasTopConcept ?concept . - ?concept skos:prefLabel ?label . - } - OPTIONAL { - ?concept skos:topConceptOf ?iri . - ?concept skos:prefLabel ?label . - } - ?iri rdf:type ?type . - ?concept rdf:type ?conceptType . - - { - SELECT (COUNT(?childConcept) AS ?childrenCount) - WHERE { - BIND(<{{ iri }}> as ?iri) - ?iri skos:hasTopConcept ?childConcept . - } - } - - { - SELECT ?concept ?label (COUNT(?narrowerConcept) AS ?narrowerChildrenCount) - WHERE { - BIND(<{{ iri }}> as ?iri) - - OPTIONAL { - ?iri skos:hasTopConcept ?concept . - ?concept skos:prefLabel ?label . - } - OPTIONAL { - ?concept skos:topConceptOf ?iri . - ?concept skos:prefLabel ?label . - } - - OPTIONAL { - ?narrowerConcept skos:broader ?concept . - } - OPTIONAL { - ?concept skos:narrower ?narrowerConcept . - } - } - GROUP BY ?concept ?label - ORDER BY str(?label) - LIMIT {{ limit }} - OFFSET {{ offset }} - } - } - """ - ).render(iri=iri, limit=per_page, offset=(page - 1) * per_page) - - return dedent(query) - - -def get_concept_narrowers_query(iri: str, page: int, per_page: int) -> str: - query = Template( - """ - PREFIX prez: - PREFIX rdf: - PREFIX rdfs: - PREFIX skos: - - CONSTRUCT { - ?concept skos:prefLabel ?label . - ?concept prez:childrenCount ?narrowerChildrenCount . - ?iri prez:childrenCount ?childrenCount . - ?iri skos:narrower ?concept . - ?iri rdf:type ?type . - ?concept rdf:type ?conceptType . - } - WHERE { - BIND(<{{ iri }}> as ?iri) - OPTIONAL { - ?concept skos:broader ?iri . - ?concept skos:prefLabel ?label . - } - OPTIONAL { - ?iri skos:narrower ?concept . - ?concept skos:prefLabel ?label . - } - ?iri rdf:type ?type . - ?concept rdf:type ?conceptType . - - { - SELECT (COUNT(?childConcept) AS ?childrenCount) - WHERE { - BIND(<{{ iri }}> as ?iri) - ?childConcept skos:broader ?iri . - } - } - - { - SELECT ?concept ?label (COUNT(?narrowerConcept) AS ?narrowerChildrenCount) - WHERE { - BIND(<{{ iri }}> as ?iri) - - OPTIONAL { - ?concept skos:broader ?iri . - ?concept skos:prefLabel ?label . - } - OPTIONAL { - ?iri skos:narrower ?concept . - ?concept skos:prefLabel ?label . - } - - OPTIONAL { - ?narrowerConcept skos:broader ?concept . - } - OPTIONAL { - ?concept skos:narrower ?narrowerConcept . - } - } - GROUP BY ?concept ?label - ORDER BY str(?label) - LIMIT {{ limit }} - OFFSET {{ offset }} - } - } - """ - ).render(iri=iri, limit=per_page, offset=(page - 1) * per_page) - - return dedent(query) diff --git a/prez/reference_data/endpoints/catprez_endpoints.ttl b/prez/reference_data/endpoints/catprez_endpoints.ttl deleted file mode 100644 index 83435292..00000000 --- a/prez/reference_data/endpoints/catprez_endpoints.ttl +++ /dev/null @@ -1,40 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX endpoint: -PREFIX ont: -PREFIX prez: -PREFIX rdfs: -PREFIX skos: -PREFIX xsd: - -endpoint:catprez-home a ont:Endpoint ; - ont:endpointTemplate "/c" ; -. - -endpoint:catalog-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:CatalogList ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:baseClass dcat:Catalog ; - ont:endpointTemplate "/c/catalogs" ; -. - -endpoint:catalog a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:catalog-listing ; - ont:deliversClasses dcat:Catalog ; - ont:endpointTemplate "/c/catalogs/$object" ; -. - -endpoint:resource-listing a ont:ListingEndpoint ; - ont:parentEndpoint endpoint:catalog ; - ont:deliversClasses prez:ResourceList ; - ont:baseClass dcat:Resource ; - ont:endpointTemplate "/c/catalogs/$parent_1/resources" ; - ont:ParentToFocusRelation dcterms:hasPart ; -. - -endpoint:resource a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:resource-listing ; - ont:deliversClasses dcat:Resource ; - ont:endpointTemplate "/c/catalogs/$parent_1/resources/$object" ; - ont:ParentToFocusRelation dcterms:hasPart ; -. diff --git a/prez/reference_data/new_endpoints/cql_endpoints.ttl b/prez/reference_data/endpoints/cql_endpoints.ttl similarity index 90% rename from prez/reference_data/new_endpoints/cql_endpoints.ttl rename to prez/reference_data/endpoints/cql_endpoints.ttl index cedf762e..40414293 100644 --- a/prez/reference_data/new_endpoints/cql_endpoints.ttl +++ b/prez/reference_data/endpoints/cql_endpoints.ttl @@ -12,7 +12,6 @@ PREFIX shext: endpoint:cql a ont:ListingEndpoint ; ont:endpointTemplate "/cql" ; ont:deliversClasses prez:CQLObjectList ; # required to determine the correct profile for ConnegP - sh:targetClass rdfs:Class ; # required for query construction shext:limit 20 ; shext:offset 0 ; . diff --git a/prez/reference_data/new_endpoints/ogc_catprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl similarity index 100% rename from prez/reference_data/new_endpoints/ogc_catprez_endpoints.ttl rename to prez/reference_data/endpoints/ogc_catprez_endpoints.ttl diff --git a/prez/reference_data/new_endpoints/ogc_spaceprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl similarity index 100% rename from prez/reference_data/new_endpoints/ogc_spaceprez_endpoints.ttl rename to prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl diff --git a/prez/reference_data/new_endpoints/ogc_vocprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl similarity index 96% rename from prez/reference_data/new_endpoints/ogc_vocprez_endpoints.ttl rename to prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl index 5b93bc05..3c2b22cf 100644 --- a/prez/reference_data/new_endpoints/ogc_vocprez_endpoints.ttl +++ b/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl @@ -83,7 +83,7 @@ endpoint:concept-object a ont:ObjectEndpoint ; endpoint:narrowers a ont:ListingEndpoint ; ont:parentEndpoint endpoint:concept-object ; - ont:deliversClasses skos:Concept ; + ont:deliversClasses prez:ConceptList ; sh:rule [ sh:object "?hasChildren" ; @@ -101,13 +101,13 @@ endpoint:narrowers ] ; shext:limit 20 ; shext:offset 0 ; - shext:orderBy [ sh:path rdfs:label ] ; + shext:orderBy [ sh:path skos:prefLabel ] ; sh:targetClass skos:Concept ; ont:endpointTemplate "/v/catalogs/$parent_3/collections/$parent_2/items/$parent_1/narrowers" ; . endpoint:top-concepts a ont:ListingEndpoint ; - ont:deliversClasses skos:Concept ; + ont:deliversClasses prez:ConceptList ; ont:parentEndpoint endpoint:vocab-object ; sh:rule [ sh:subject sh:this ; sh:predicate prez:hasChildren ; @@ -120,7 +120,7 @@ endpoint:top-concepts a ont:ListingEndpoint ; """ ] ; shext:limit 20 ; shext:offset 0 ; - shext:orderBy [ sh:path rdfs:label ] ; + shext:orderBy [ sh:path skos:prefLabel ] ; sh:targetClass skos:Concept ; ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/top-concepts" ; . \ No newline at end of file diff --git a/prez/reference_data/endpoints/profiles_endpoints.ttl b/prez/reference_data/endpoints/profiles_endpoints.ttl deleted file mode 100644 index 0464dd6c..00000000 --- a/prez/reference_data/endpoints/profiles_endpoints.ttl +++ /dev/null @@ -1,48 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX endpoint: -PREFIX ont: -PREFIX prez: -PREFIX prof: -PREFIX rdfs: -PREFIX skos: -PREFIX xsd: - -endpoint:profiles-home a ont:Endpoint ; - ont:endpointTemplate "/profiles" ; -. - - -endpoint:profiles-listing a ont:Endpoint ; - ont:deliversClasses prez:ProfilesList ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:baseClass prof:Profile ; - ont:endpointTemplate "/profiles" ; -. - -endpoint:profile a ont:Endpoint ; - ont:parentEndpoint endpoint:profiles-listing ; - ont:deliversClasses prof:Profile ; - ont:endpointTemplate "/profiles/$object" ; -. - -endpoint:catprez-profiles-listing a ont:Endpoint ; - ont:deliversClasses prez:ProfilesList ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:baseClass prez:CatPrezProfile ; - ont:endpointTemplate "/c/profiles" ; -. - -endpoint:spaceprez-profiles-listing a ont:Endpoint ; - ont:deliversClasses prez:ProfilesList ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:baseClass prez:SpacePrezProfile ; - ont:endpointTemplate "/s/profiles" ; -. - -endpoint:vocprez-profiles-listing a ont:Endpoint ; - ont:deliversClasses prez:ProfilesList ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:baseClass prez:VocPrezProfile ; - ont:endpointTemplate "/s/profiles" ; -. diff --git a/prez/reference_data/endpoints/spaceprez_endpoints.ttl b/prez/reference_data/endpoints/spaceprez_endpoints.ttl deleted file mode 100644 index c404fe34..00000000 --- a/prez/reference_data/endpoints/spaceprez_endpoints.ttl +++ /dev/null @@ -1,54 +0,0 @@ -PREFIX dcat: -PREFIX endpoint: -PREFIX geo: -PREFIX ont: -PREFIX prez: -PREFIX rdfs: -PREFIX xsd: - -endpoint:spaceprez-home a ont:Endpoint ; - ont:endpointTemplate "/s" ; -. - -endpoint:dataset-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:DatasetList ; - ont:baseClass dcat:Dataset ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:endpointTemplate "/s/datasets" ; -. - -endpoint:dataset a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:dataset-listing ; - ont:deliversClasses dcat:Dataset ; - ont:endpointTemplate "/s/datasets/$object" ; -. - -endpoint:feature-collection-listing a ont:ListingEndpoint ; - ont:parentEndpoint endpoint:dataset ; - ont:baseClass geo:FeatureCollection ; - ont:deliversClasses prez:FeatureCollectionList ; - ont:endpointTemplate "/s/datasets/$parent_1/collections" ; - ont:ParentToFocusRelation rdfs:member ; -. - -endpoint:feature-collection a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:feature-collection-listing ; - ont:deliversClasses geo:FeatureCollection ; - ont:endpointTemplate "/s/datasets/$parent_1/collections/$object" ; - ont:ParentToFocusRelation rdfs:member ; -. - -endpoint:feature-listing a ont:ListingEndpoint ; - ont:parentEndpoint endpoint:feature-collection ; - ont:baseClass geo:Feature ; - ont:deliversClasses prez:FeatureList ; - ont:endpointTemplate "/s/datasets/$parent_2/collections/$parent_1/items" ; - ont:ParentToFocusRelation rdfs:member ; -. - -endpoint:feature a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:feature-listing ; - ont:deliversClasses geo:Feature ; - ont:endpointTemplate "/s/datasets/$parent_2/collections/$parent_1/items/$object" ; - ont:ParentToFocusRelation rdfs:member ; -. diff --git a/prez/reference_data/new_endpoints/system_endpoints.ttl b/prez/reference_data/endpoints/system_endpoints.ttl similarity index 66% rename from prez/reference_data/new_endpoints/system_endpoints.ttl rename to prez/reference_data/endpoints/system_endpoints.ttl index befc3be9..969e1ee6 100644 --- a/prez/reference_data/new_endpoints/system_endpoints.ttl +++ b/prez/reference_data/endpoints/system_endpoints.ttl @@ -21,4 +21,16 @@ endpoint:profile-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:deliversClasses prof:Profile ; ont:endpointTemplate "/profiles/$object" ; +. + +endpoint:alt-profiles-listing a ont:ListingEndpoint ; + ont:deliversClasses prez:ProfilesList ; + sh:targetClass prof:Profile ; + sh:target [ sh:select """SELECT ?focus_node + WHERE { + ?focus_node a prof:Profile ; + $selectedClass . + }""" ] ; + shext:limit 20 ; + shext:offset 0 ; . \ No newline at end of file diff --git a/prez/reference_data/endpoints/vocprez_endpoints.ttl b/prez/reference_data/endpoints/vocprez_endpoints.ttl deleted file mode 100644 index 5b132657..00000000 --- a/prez/reference_data/endpoints/vocprez_endpoints.ttl +++ /dev/null @@ -1,50 +0,0 @@ -PREFIX endpoint: -PREFIX ont: -PREFIX prez: -PREFIX rdfs: -PREFIX skos: -PREFIX xsd: - -endpoint:vocprez-home a ont:Endpoint ; - ont:endpointTemplate "/v" ; -. - -endpoint:collection-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:VocPrezCollectionList ; - ont:baseClass skos:Collection ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:endpointTemplate "/v/collection" ; -. - -endpoint:collection a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:collection-listing ; - ont:deliversClasses skos:Collection ; - ont:endpointTemplate "/v/collection/$object" ; -. - -endpoint:collection-concept a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:collection ; - ont:deliversClasses skos:Concept ; - ont:endpointTemplate "/v/collection/$parent_1/$object" ; - ont:ParentToFocusRelation skos:member ; -. - - endpoint:vocabs-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:SchemesList ; - ont:baseClass skos:ConceptScheme ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; - ont:endpointTemplate "/v/vocab" ; -. - -endpoint:vocab a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:vocabs-listing ; - ont:deliversClasses skos:ConceptScheme ; - ont:endpointTemplate "/v/vocab/$object" ; -. - -endpoint:vocab-concept a ont:ObjectEndpoint ; - ont:parentEndpoint endpoint:vocab ; - ont:deliversClasses skos:Concept ; - ont:endpointTemplate "/v/vocab/$parent_1/$object" ; - ont:FocusToParentRelation skos:inScheme ; -. diff --git a/prez/reference_data/new_endpoints/vocprez_endpoints.ttl.unused b/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused similarity index 100% rename from prez/reference_data/new_endpoints/vocprez_endpoints.ttl.unused rename to prez/reference_data/endpoints/vocprez_endpoints.ttl.unused diff --git a/prez/reference_data/profiles/_dd_to_refactor.ttl b/prez/reference_data/profiles/dd.ttl similarity index 84% rename from prez/reference_data/profiles/_dd_to_refactor.ttl rename to prez/reference_data/profiles/dd.ttl index 1793dfb9..1d774ffa 100644 --- a/prez/reference_data/profiles/_dd_to_refactor.ttl +++ b/prez/reference_data/profiles/dd.ttl @@ -22,10 +22,22 @@ PREFIX shext: dcterms:identifier "dd"^^xsd:token ; dcterms:title "Drop-Down List" ; altr-ext:constrainsClass + prez:CatalogList , prez:SchemesList , prez:VocPrezCollectionList , + dcat:Catalog , skos:ConceptScheme , skos:Collection ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass dcat:Catalog ; + altr-ext:focusToChild dcterms:hasPart ; + ] ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass prez:CatalogList ; + altr-ext:containerClass dcat:Catalog ; + ] ; altr-ext:hasNodeShape [ a sh:NodeShape ; sh:targetClass skos:ConceptScheme ; @@ -42,7 +54,7 @@ PREFIX shext: a sh:NodeShape ; sh:targetClass prez:SchemesList ; altr-ext:containerClass skos:ConceptScheme ; - altr-ext:relativeProperties skos:definition, dcterms:publisher, reg:status ; + altr-ext:relativeProperties skos:definition, dcterms:publisher, reg:status , skos:prefLabel ; ] ; altr-ext:hasNodeShape [ a sh:NodeShape ; @@ -52,6 +64,7 @@ PREFIX shext: ] ; altr-ext:hasDefaultResourceFormat "application/json" ; altr-ext:hasResourceFormat + "text/turtle" , "application/json" , "text/csv" . diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 1c135242..fd34ab32 100644 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -25,7 +25,7 @@ prez:OGCRecordsProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasNodeShape [ a sh:NodeShape ; - sh:targetClass prez:CatalogList , prez:ConceptsList ; + sh:targetClass prez:CatalogList , prez:ConceptList ; altr-ext:hasDefaultProfile prez:OGCListingProfile ] , [ a sh:NodeShape ; diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index 05a6e3cb..ea6a82d0 100644 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -23,6 +23,10 @@ PREFIX xsd: a sh:NodeShape ; sh:targetClass prez:SPARQLQuery ; altr-ext:hasDefaultProfile + ] , [ + a sh:NodeShape ; + sh:targetClass prez:AltProfilesList ; + altr-ext:hasDefaultProfile ] . @@ -31,7 +35,7 @@ PREFIX xsd: dcterms:identifier "openobj"^^xsd:token ; dcterms:description "An open profile for objects which will return all direct properties for a resource." ; dcterms:title "Open profile" ; - altr-ext:constrainsClass prez:SPARQLQuery , prof:Profile ; + altr-ext:constrainsClass prez:SPARQLQuery , prof:Profile , prez:SearchResult ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/ld+json" , "application/ld+json" , @@ -68,7 +72,7 @@ PREFIX xsd: . altr-ext:alt-profile - a prof:Profile ; + a prof:Profile , sh:NodeShape ; dcterms:description "The representation of the resource that lists all other representations (profiles and Media Types)" ; dcterms:identifier "alt"^^xsd:token ; dcterms:title "Alternates Profile" ; @@ -87,13 +91,27 @@ altr-ext:alt-profile dcat:Dataset , prez:DatasetList , dcat:Catalog , - dcat:Resource , prez:CatalogList , + dcat:Resource , + prez:ResourceList , skos:ConceptScheme , + prez:SchemesList , skos:Concept , + prez:ConceptList , skos:Collection , - prez:SchemesList , - prez:VocPrezCollectionList ; + prez:VocPrezCollectionList , + prez:AltProfilesList ; + sh:property [ + sh:path ( + sh:union ( + altr-ext:hasResourceFormat + altr-ext:hasDefaultResourceFormat + dcterms:description + dcterms:title + dcterms:identifier + ) + ) + ] ; . diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py index 6f4b2d84..f03ecd74 100644 --- a/prez/renderers/renderer.py +++ b/prez/renderers/renderer.py @@ -1,24 +1,18 @@ import io import json import logging -from typing import Optional from connegp import RDF_MEDIATYPES, RDF_SERIALIZER_TYPES_MAP from fastapi import status from fastapi.exceptions import HTTPException from fastapi.responses import StreamingResponse -from rdflib import Graph, URIRef, Namespace, RDF -from starlette.requests import Request -from starlette.responses import Response +from rdflib import Graph, URIRef, RDF -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo -from prez.models.profiles_item import ProfileItem from prez.renderers.csv_renderer import render_csv_dropdown from prez.renderers.json_renderer import render_json_dropdown, NotFoundError from prez.services.curie_functions import get_curie_id_for_uri from prez.sparql.methods import Repo from prez.sparql.objects_listings import ( - generate_item_construct, get_annotation_properties, ) @@ -134,37 +128,3 @@ async def return_annotated_rdf( graph.bind("prez", "https://prez.dev/") return graph - - -async def return_profiles( - classes: frozenset, - repo: Repo, - request: Optional[Request] = None, - prof_and_mt_info: Optional[ProfilesMediatypesInfo] = None, -) -> Response: - from prez.cache import profiles_graph_cache - - if not prof_and_mt_info: - prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=classes) - if not request: - request = prof_and_mt_info.request - items = [ - ProfileItem(uri=str(uri), url_path=str(request.url.path)) - for uri in prof_and_mt_info.avail_profile_uris - ] - queries = [ - generate_item_construct(profile, URIRef("http://kurrawong.net/profile/prez")) - for profile in items - ] - g = Graph(bind_namespaces="rdflib") - g.bind("altr-ext", Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#")) - for q in queries: - g += profiles_graph_cache.query(q) - return await return_from_graph( - g, - prof_and_mt_info.mediatype, - prof_and_mt_info.profile, - prof_and_mt_info.profile_headers, - prof_and_mt_info.selected_class, - repo, - ) diff --git a/prez/routers/cql.py b/prez/routers/cql.py index c4cafd82..e6a6ea5f 100644 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -3,8 +3,13 @@ from fastapi import APIRouter, Request, Depends from rdflib import URIRef -from prez.dependencies import get_repo, cql_parser_dependency -from prez.services.listings import listing_function_new +from prez.dependencies import ( + get_repo, + cql_post_parser_dependency, + get_system_repo, + cql_get_parser_dependency, +) +from prez.services.listings import listing_function from prez.sparql.methods import Repo router = APIRouter(tags=["ogcrecords"]) @@ -12,21 +17,47 @@ @router.post( path="/cql", - name="https://prez.dev/endpoint/cql", + name="https://prez.dev/endpoint/cql-post", ) async def cql_post_endpoint( request: Request, - parsed_cql: Optional[dict] = Depends(cql_parser_dependency), + cql_parser: Optional[dict] = Depends(cql_post_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + endpoint_uri = URIRef("https://prez.dev/endpoint/cql") + return await listing_function( + request=request, + repo=repo, + system_repo=system_repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + cql_parser=cql_parser, + ) + + +@router.get( + path="/cql", + name="https://prez.dev/endpoint/cql-get", +) +async def cql_get_endpoint( + request: Request, + cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), page: int = 1, per_page: int = 20, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql") - return await listing_function_new( + return await listing_function( request=request, repo=repo, + system_repo=system_repo, endpoint_uri=endpoint_uri, page=page, per_page=per_page, - cql=parsed_cql, + cql_parser=cql_parser, ) diff --git a/prez/routers/object.py b/prez/routers/object.py index a357871d..664914d1 100644 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -5,7 +5,7 @@ from prez.dependencies import get_repo from prez.queries.object import object_inbound_query, object_outbound_query from prez.routers.identifier import get_iri_route -from prez.services.objects import object_function_new +from prez.services.objects import object_function router = APIRouter(tags=["Object"]) @@ -70,4 +70,4 @@ async def count_route( @router.get("/object", summary="Object", name="https://prez.dev/endpoint/object") async def object_route(request: Request, repo=Depends(get_repo)): - return await object_function_new(request, repo=repo) + return await object_function(request, repo=repo) diff --git a/prez/routers/ogc_catprez.py b/prez/routers/ogc_catprez.py index c6ae279e..b4c5152b 100644 --- a/prez/routers/ogc_catprez.py +++ b/prez/routers/ogc_catprez.py @@ -1,12 +1,13 @@ from typing import Optional from fastapi import APIRouter, Request, Depends +from fastapi.responses import PlainTextResponse from rdflib import URIRef -from prez.dependencies import get_repo, cql_parser_dependency, get_system_repo +from prez.dependencies import get_repo, cql_post_parser_dependency, get_system_repo from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function_new -from prez.services.objects import object_function_new +from prez.services.listings import listing_function +from prez.services.objects import object_function from prez.sparql.methods import Repo router = APIRouter(tags=["ogccatprez"]) @@ -21,6 +22,11 @@ } +@router.get("/c", summary="CatPrez Home") +async def catprez_home(): + return PlainTextResponse("CatPrez Home") + + @router.get( "/c/catalogs", summary="List Top Level Catalogs", @@ -30,12 +36,20 @@ async def catalog_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + search_term=search_term, ) @@ -48,13 +62,23 @@ async def vocab_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") + parent_uri = get_uri_for_curie_id(request.path_params["catalogId"]) endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + parent_uri, + search_term=search_term, ) @@ -67,13 +91,22 @@ async def concept_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + parent_uri, + search_term=search_term, ) @@ -90,7 +123,7 @@ async def catalog_object( request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["catalogId"]) - return await object_function_new( + return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -108,7 +141,7 @@ async def catalog_object( request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) - return await object_function_new( + return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -126,6 +159,6 @@ async def catalog_object( request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["itemId"]) - return await object_function_new( + return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) diff --git a/prez/routers/ogc_spaceprez.py b/prez/routers/ogc_spaceprez.py index 84b865a5..2d1ab2f7 100644 --- a/prez/routers/ogc_spaceprez.py +++ b/prez/routers/ogc_spaceprez.py @@ -2,12 +2,12 @@ from fastapi import APIRouter, Request, Depends from rdflib import Namespace -from starlette.responses import PlainTextResponse +from fastapi.responses import PlainTextResponse from prez.dependencies import get_repo, get_system_repo from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function_new -from prez.services.objects import object_function_new +from prez.services.listings import listing_function +from prez.services.objects import object_function from prez.sparql.methods import Repo router = APIRouter(tags=["SpacePrez"]) @@ -16,7 +16,7 @@ @router.get("/s", summary="SpacePrez Home") -async def spaceprez_profiles(): +async def spaceprez_home(): return PlainTextResponse("SpacePrez Home") @@ -31,15 +31,18 @@ async def list_datasets( system_repo: Repo = Depends(get_system_repo), page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, ): + search_term = request.query_params.get("q") endpoint_uri = SP_EP["dataset-listing"] - return await listing_function_new( + return await listing_function( request=request, repo=repo, system_repo=system_repo, endpoint_uri=endpoint_uri, page=page, per_page=per_page, + search_term=search_term, ) @@ -55,10 +58,12 @@ async def list_feature_collections( system_repo: Repo = Depends(get_system_repo), page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, ): + search_term = request.query_params.get("q") endpoint_uri = SP_EP["feature-collection-listing"] dataset_uri = get_uri_for_curie_id(dataset_curie) - return await listing_function_new( + return await listing_function( request=request, repo=repo, system_repo=system_repo, @@ -66,6 +71,7 @@ async def list_feature_collections( page=page, per_page=per_page, parent_uri=dataset_uri, + search_term=search_term, ) @@ -82,10 +88,12 @@ async def list_features( system_repo: Repo = Depends(get_system_repo), page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, ): + search_term = request.query_params.get("q") collection_uri = get_uri_for_curie_id(collection_curie) endpoint_uri = SP_EP["feature-listing"] - return await listing_function_new( + return await listing_function( request=request, repo=repo, system_repo=system_repo, @@ -93,6 +101,7 @@ async def list_features( page=page, per_page=per_page, parent_uri=collection_uri, + search_term=search_term, ) @@ -108,7 +117,7 @@ async def dataset_item( request_url = request.scope["path"] endpoint_uri = SP_EP["dataset-object"] dataset_uri = get_uri_for_curie_id(dataset_curie) - return await object_function_new( + return await object_function( request=request, endpoint_uri=endpoint_uri, uri=dataset_uri, @@ -133,7 +142,7 @@ async def feature_collection_item( request_url = request.scope["path"] endpoint_uri = SP_EP["feature-collection-object"] collection_uri = get_uri_for_curie_id(collection_curie) - return await object_function_new( + return await object_function( request=request, endpoint_uri=endpoint_uri, uri=collection_uri, @@ -159,7 +168,7 @@ async def feature_item( request_url = request.scope["path"] endpoint_uri = SP_EP["feature-object"] feature_uri = get_uri_for_curie_id(feature_curie) - return await object_function_new( + return await object_function( request=request, endpoint_uri=endpoint_uri, uri=feature_uri, diff --git a/prez/routers/ogc_vocprez.py b/prez/routers/ogc_vocprez.py index a2e9c33e..21661c76 100644 --- a/prez/routers/ogc_vocprez.py +++ b/prez/routers/ogc_vocprez.py @@ -1,12 +1,13 @@ from typing import Optional from fastapi import APIRouter, Request, Depends +from fastapi.responses import PlainTextResponse from rdflib import URIRef from prez.dependencies import get_repo, get_system_repo from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function_new -from prez.services.objects import object_function_new +from prez.services.listings import listing_function +from prez.services.objects import object_function from prez.sparql.methods import Repo router = APIRouter(tags=["ogcvocprez"]) @@ -32,12 +33,20 @@ async def catalog_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + search_term=search_term, ) @@ -50,13 +59,22 @@ async def vocab_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") parent_uri = get_uri_for_curie_id(request.path_params["catalogId"]) endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + parent_uri, + search_term=search_term, ) @@ -69,13 +87,22 @@ async def concept_list( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + parent_uri, + search_term=search_term, ) @@ -84,36 +111,59 @@ async def concept_list( summary="List Top Concepts", name=ogc_endpoints["top-concepts"], ) -async def concept_list( +async def top_concepts( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + parent_uri, + search_term=search_term, ) +@router.get("/v", summary="VocPrez Home") +async def vocprez_home(): + return PlainTextResponse("VocPrez Home") + + @router.get( "/v/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}/narrowers", summary="List Narrower Concepts", name=ogc_endpoints["narrowers"], ) -async def concept_list( +async def narrowers( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + search_term = request.query_params.get("q") parent_uri = get_uri_for_curie_id(request.path_params["itemId"]) endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( - request, repo, system_repo, endpoint_uri, page, per_page, parent_uri + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + page, + per_page, + parent_uri, + search_term=search_term, ) @@ -130,7 +180,7 @@ async def catalog_object( request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["catalogId"]) - return await object_function_new( + return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -148,7 +198,7 @@ async def catalog_object( request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) - return await object_function_new( + return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -166,6 +216,6 @@ async def catalog_object( request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["itemId"]) - return await object_function_new( + return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index 440ddb25..95b42cd9 100644 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -2,8 +2,8 @@ from prez.dependencies import get_system_repo from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function_new -from prez.services.objects import object_function_new +from prez.services.listings import listing_function +from prez.services.objects import object_function from rdflib import URIRef router = APIRouter(tags=["Profiles"]) @@ -36,7 +36,7 @@ async def profiles( repo=Depends(get_system_repo), ): endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function_new( + return await listing_function( request=request, repo=repo, system_repo=repo, @@ -55,7 +55,7 @@ async def profile(request: Request, profile_curie: str, repo=Depends(get_system_ request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) profile_uri = get_uri_for_curie_id(profile_curie) - return await object_function_new( + return await object_function( request=request, endpoint_uri=endpoint_uri, uri=profile_uri, diff --git a/prez/routers/search.py b/prez/routers/search.py index 272a9de6..b2590844 100644 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -1,87 +1,52 @@ -import re - from fastapi import APIRouter, Request, Depends -from rdflib import Literal, URIRef -from starlette.responses import PlainTextResponse +from fastapi.responses import PlainTextResponse +from rdflib import URIRef, Literal +from rdflib.namespace import RDF -from prez.cache import search_methods from prez.config import settings from prez.dependencies import get_repo from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph from prez.services.link_generation import _add_prez_links -from prez.services.curie_functions import get_uri_for_curie_id from prez.sparql.methods import Repo -from prez.sparql.objects_listings import generate_item_construct +from prez.sparql.search_query import SearchQuery router = APIRouter(tags=["Search"]) -@router.get("/search", summary="Global Search") +@router.get("/search", summary="Search") async def search( request: Request, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_repo), ): - term = request.query_params.get("term") - limit = request.query_params.get("limit", 20) + term = request.query_params.get("q") + limit = request.query_params.get("limit", 10) offset = request.query_params.get("offset", 0) - foc_2_filt, filt_2_foc = extract_qsa_params(request.query_params) - if not term: - return PlainTextResponse( - status_code=400, - content="A search_methods term must be provided as a query string argument (?term=)", - ) - selected_method = determine_search_method(request) - if Literal(selected_method) not in search_methods.keys(): - return PlainTextResponse( - status_code=400, - content=f'Search method "{selected_method}" not found. Available methods are: ' - f"{', '.join([str(m) for m in search_methods.keys()])}", - ) - search_query = search_methods[Literal(selected_method)].copy() - filter_to_focus_str = "" - focus_to_filter_str = "" - - if filt_2_foc: - for idx, filter_pair in enumerate(filt_2_foc, start=1): - filter_values = " ".join(f"<{f}>" for f in filter_pair[1].split(",")) - filter_to_focus_str += f"""?filter_to_focus_{idx} <{filter_pair[0]}> ?search_result_uri. - VALUES ?filter_to_focus_{idx} {{ {filter_values} }}""" - - if foc_2_filt: - for idx, filter_pair in enumerate(foc_2_filt, start=1): - filter_values = " ".join(f"<{f}>" for f in filter_pair[1].split(",")) - focus_to_filter_str += f"""?search_result_uri <{filter_pair[0]}> ?focus_to_filter_{idx}. - VALUES ?focus_to_filter_{idx} {{ {filter_values} }}""" - - predicates = ( - settings.label_predicates - + settings.description_predicates - + settings.provenance_predicates - ) - predicates_sparql_string = " ".join(f"<{p}>" for p in predicates) - search_query.populate_query( - term, - limit, - offset, - filter_to_focus_str, - focus_to_filter_str, - predicates_sparql_string, - ) - - full_query = generate_item_construct( - search_query, URIRef("https://prez.dev/profile/open-object") - ) - - graph, _ = await repo.send_queries([full_query], []) - graph.bind("prez", "https://prez.dev/") + pred_vals = request.query_params.get("predicates", settings.label_predicates) + query = SearchQuery( + search_term=term, + limit=limit, + offset=offset, + pred_vals=pred_vals, + ).render() + graph, _ = await repo.send_queries([query], []) + + count = len(list(graph.subjects(RDF.type, PREZ.SearchResult))) + graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) prof_and_mt_info = ProfilesMediatypesInfo( request=request, classes=frozenset([PREZ.SearchResult]) ) + + req_mt = prof_and_mt_info.req_mediatypes + if req_mt: + if list(req_mt)[0] == "application/sparql-query": + return PlainTextResponse(query, media_type="application/sparql-query") + if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(graph, repo) + await _add_prez_links(graph, repo, system_repo) return await return_from_graph( graph, @@ -91,42 +56,3 @@ async def search( selected_class=prof_and_mt_info.selected_class, repo=repo, ) - - -def extract_qsa_params(query_string_keys): - focus_to_filter = [] - filter_to_focus = [] - - for key in query_string_keys: - if "focus-to-filter[" in key: - predicate = re.search(r"\[(.*?)]", key).group(1) - val = query_string_keys[key] - if not predicate.startswith(("http://", "https://")): - predicate = get_uri_for_curie_id(predicate) - if not val.startswith(("http://", "https://")) and ":" in val: - val = get_uri_for_curie_id(val) - focus_to_filter.append((predicate, val)) - elif "filter-to-focus[" in key: - predicate = re.search(r"\[(.*?)]", key).group(1) - val = query_string_keys[key] - if not predicate.startswith(("http://", "https://")): - predicate = get_uri_for_curie_id(predicate) - if not val.startswith(("http://", "https://")) and ":" in val: - val = get_uri_for_curie_id(val) - filter_to_focus.append((predicate, val)) - - return focus_to_filter, filter_to_focus - - -def determine_search_method(request): - """Returns the search_methods method to use based on the request headers""" - specified_method = request.query_params.get("method") - if specified_method: - return specified_method - else: - return get_default_search_methods() - - -def get_default_search_methods(): - # TODO return from profiles - return "default" diff --git a/prez/services/app_service.py b/prez/services/app_service.py index ac82722a..f4e9e6dc 100644 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -138,9 +138,7 @@ async def add_prefixes_to_prefix_graph(repo: Repo): async def create_endpoints_graph(repo) -> Graph: flavours = ["CatPrez", "SpacePrez", "VocPrez"] added_anything = False - for f in (Path(__file__).parent.parent / "reference_data/new_endpoints").glob( - "*.ttl" - ): + for f in (Path(__file__).parent.parent / "reference_data/endpoints").glob("*.ttl"): # Check if file starts with any of the flavour prefixes matching_flavour = next( (flavour for flavour in flavours if f.name.startswith(flavour.lower())), diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py index 59e01090..0c17464a 100644 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -87,7 +87,7 @@ def get_profiles_and_mediatypes( ) log.debug(f"ConnegP query: {query}") response = profiles_graph_cache.query(query) - log.debug(f"ConnegP response:{results_pretty_printer(response)}") + # log.debug(f"ConnegP response:{results_pretty_printer(response)}") if len(response.bindings[0]) == 0: raise NoProfilesException(classes) top_result = response.bindings[0] diff --git a/prez/services/listings.py b/prez/services/listings.py index e1b3c002..9ff914d7 100644 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -1,20 +1,22 @@ import logging -import time from typing import Optional from fastapi import Request -from rdflib import SH -from rdflib import URIRef +from fastapi.responses import PlainTextResponse +from rdflib import URIRef, Literal +from rdflib.namespace import PROF, RDF, SH from prez.cache import profiles_graph_cache, endpoints_graph_cache +from prez.config import settings from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo -from prez.reference_data.prez_ns import ONT -from prez.renderers.renderer import return_from_graph, return_profiles +from prez.reference_data.prez_ns import ONT, PREZ +from prez.renderers.renderer import return_from_graph from prez.services.link_generation import _add_prez_links from prez.sparql.methods import Repo from prez.sparql.objects_listings import ( temp_listing_count, ) +from prez.sparql.search_query import SearchQuery from temp.cql2sparql import CQLParser from temp.grammar import SubSelect from temp.shacl2sparql import SHACLParser @@ -22,7 +24,7 @@ log = logging.getLogger(__name__) -async def listing_function_new( +async def listing_function( request: Request, repo: Repo, system_repo: Repo, @@ -30,7 +32,8 @@ async def listing_function_new( page: int = 1, per_page: int = 20, parent_uri: Optional[URIRef] = None, - cql: dict = None, + cql_parser: CQLParser = None, + search_term: Optional[str] = None, ): queries = [] # class is from endpoint definition. @@ -43,17 +46,17 @@ async def listing_function_new( prof_and_mt_info.profile, ) + runtime_values = {} if prof_and_mt_info.profile == URIRef( "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): - return await return_profiles( - classes=frozenset(selected_class), prof_and_mt_info=prof_and_mt_info - ) - runtime_values = { - "limit": per_page, - "offset": (page - 1) * per_page, - "parent_1": parent_uri, - } + endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") + runtime_values["selectedClass"] = listing_class + + runtime_values["limit"] = per_page + runtime_values["offset"] = (page - 1) * per_page + runtime_values["parent_1"] = parent_uri + shacl_parser = SHACLParser( runtime_values, endpoints_graph_cache, @@ -62,38 +65,63 @@ async def listing_function_new( selected_profile, ) - if cql: - cql_parser = CQLParser(cql_json=cql) + if cql_parser: cql_parser.parse() cql_select_ggps = cql_parser.ggps_inner_select shacl_parser.additional_ggps = cql_select_ggps shacl_parser.generate_sparql() - queries.append(shacl_parser.sparql) + main_query = shacl_parser.sparql + + if search_term: + subselect = find_instances(shacl_parser.main_where_ggps, SubSelect)[ + 0 + ] # assume there's only one subselect + search_query = SearchQuery( + search_term=search_term, + pred_vals=settings.label_predicates, + additional_ss=subselect, + limit=runtime_values["limit"], + offset=runtime_values["offset"], + ).render() + queries.append(search_query) + else: + queries.append(main_query) + req_mt = prof_and_mt_info.req_mediatypes + if req_mt: + if list(req_mt)[0] == "application/sparql-query": + return PlainTextResponse(queries[0], media_type="application/sparql-query") # add a count query if it's an annotated mediatype - if "anot+" in prof_and_mt_info.mediatype: + if "anot+" in prof_and_mt_info.mediatype and not search_term: # pull the subselect out of the query string subselect = find_instances(shacl_parser.main_where_ggps, SubSelect)[ 0 ] # assume there's only one subselect subselect.solution_modifier = None # remove the limit and offset from the subselect so that we can get a count - queries.append(temp_listing_count(subselect, target_class)) - - # if selected_class in [ - # URIRef("https://prez.dev/ProfilesList"), - # PROF.Profile, - # ]: - # list_graph = profiles_graph_cache.query(item_members_query).graph - # count_graph = profiles_graph_cache.query(count_query).graph - # item_graph = list_graph + count_graph - # else: - item_graph, _ = await repo.send_queries( - rdf_queries=queries, - tabular_queries=[], - ) - if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(item_graph, repo, system_repo) + if prof_and_mt_info.profile == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): + count_class = PROF.Profile + else: + count_class = target_class + if count_class: # target_class may be unknown (None) for queries involving CQL + queries.append(temp_listing_count(subselect, count_class)) + + if prof_and_mt_info.profile == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): + item_graph, _ = await system_repo.send_queries(queries, []) + if "anot+" in prof_and_mt_info.mediatype: + await _add_prez_links(item_graph, system_repo, system_repo) + else: + item_graph, _ = await repo.send_queries(queries, []) + if "anot+" in prof_and_mt_info.mediatype: + await _add_prez_links(item_graph, repo, system_repo) + # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated + if search_term: + count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) + item_graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) return await return_from_graph( item_graph, prof_and_mt_info.mediatype, diff --git a/prez/services/model_methods.py b/prez/services/model_methods.py index 950a813f..02a3d0b4 100644 --- a/prez/services/model_methods.py +++ b/prez/services/model_methods.py @@ -17,9 +17,7 @@ async def get_classes( SELECT ?class {{ <{uri}> a ?class }} """ - # a = time.time() _, r = await repo.send_queries([], [(uri, q)]) - # log.debug(f"Time to query: {q}\n{time.time() - a}") tabular_result = r[0] # should only be one result - only one query sent if endpoint != URIRef("https://prez.dev/endpoint/object"): endpoint_classes = list( diff --git a/prez/services/objects.py b/prez/services/objects.py index 4e6e0107..3b03d0ac 100644 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -1,13 +1,14 @@ import logging from fastapi import Request +from fastapi.responses import PlainTextResponse from rdflib import URIRef from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.models.object_item import ObjectItem from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo from prez.reference_data.prez_ns import PREZ, EP -from prez.renderers.renderer import return_from_graph, return_profiles +from prez.renderers.renderer import return_from_graph from prez.services.link_generation import ( _add_prez_links, _add_prez_link_to_collection_page, @@ -19,7 +20,7 @@ log = logging.getLogger(__name__) -async def object_function_new( +async def object_function( request: Request, endpoint_uri: URIRef, uri: URIRef, @@ -43,15 +44,16 @@ async def object_function_new( profile=prof_and_mt_info.profile, selected_class=prof_and_mt_info.selected_class, ) + + # handle alternate profiles + runtime_values = {} if prof_and_mt_info.profile == URIRef( "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): - return await return_profiles( - classes=frozenset(object_item.selected_class), - prof_and_mt_info=prof_and_mt_info, - repo=repo, - ) - runtime_values = {"object": uri} + endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") + runtime_values["selectedClass"] = object_item.selected_class + + runtime_values["object"] = uri shacl_parser = SHACLParser( runtime_values, endpoints_graph_cache, @@ -61,10 +63,15 @@ async def object_function_new( ) shacl_parser.generate_sparql() query = shacl_parser.sparql - log.debug(f"Object Query: {query}") + req_mt = prof_and_mt_info.req_mediatypes + if req_mt: + if list(req_mt)[0] == "application/sparql-query": + return PlainTextResponse(query, media_type="application/sparql-query") - if object_item.selected_class == URIRef("http://www.w3.org/ns/dx/prof/Profile"): - item_graph = profiles_graph_cache.query(query).graph + if prof_and_mt_info.profile == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): + item_graph, _ = await system_repo.send_queries([query], []) else: item_graph, _ = await repo.send_queries([query], []) if "anot+" in prof_and_mt_info.mediatype: diff --git a/prez/services/search_methods.py b/prez/services/search_methods.py deleted file mode 100644 index c5d73117..00000000 --- a/prez/services/search_methods.py +++ /dev/null @@ -1,50 +0,0 @@ -import logging -from pathlib import Path -from string import Template - -from rdflib import Graph, RDF, DCTERMS, Literal, RDFS - -from prez.cache import search_methods -from prez.models import SearchMethod -from prez.reference_data.prez_ns import PREZ - -log = logging.getLogger(__name__) - - -async def get_all_search_methods(repo): - await get_local_search_methods() - await get_remote_search_methods(repo) - - -async def get_remote_search_methods(repo): - remote_search_methods_query = f""" - PREFIX prez: <{PREZ}> - CONSTRUCT {{?s ?p ?o}} - WHERE {{ ?s a prez:SearchMethod ; - ?p ?o . }} - """ - graph, _ = await repo.send_queries([remote_search_methods_query], []) - if len(graph) > 1: - await generate_search_methods(graph) - log.info(f"Remote search methods found and added.") - else: - log.info("No remote search methods found.") - - -async def get_local_search_methods(): - for f in (Path(__file__).parent.parent / "reference_data/search_methods").glob( - "*.ttl" - ): - g = Graph().parse(f, format="ttl") - await generate_search_methods(g) - - -async def generate_search_methods(g): - uri = g.value(None, RDF.type, PREZ.SearchMethod) - identifier = g.value(uri, DCTERMS.identifier, None) - title: Literal = g.value(uri, RDFS.label, None) - template_query = Template(g.value(uri, RDF.value, None)) - sm = SearchMethod( - uri=uri, identifier=identifier, title=title, template_query=template_query - ) - search_methods.update({identifier: sm}) diff --git a/prez/sparql/methods.py b/prez/sparql/methods.py index 3ad0118b..09b162f8 100644 --- a/prez/sparql/methods.py +++ b/prez/sparql/methods.py @@ -90,12 +90,8 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None): The optional context parameter allows an identifier to be supplied with the query, such that multiple results can be distinguished from each other. """ - a = time.time() - log.debug(msg=f"query sent:{a} || {context} || {query}") response = await self._send_query(query, "application/sparql-results+json") await response.aread() - log.debug(msg=f"response received || {context} {time.time()}") - log.debug(msg=f"time diff: {time.time() -a }") return context, response.json()["results"]["bindings"] async def sparql( diff --git a/prez/sparql/objects_listings.py b/prez/sparql/objects_listings.py index d856ad55..e3010c85 100644 --- a/prez/sparql/objects_listings.py +++ b/prez/sparql/objects_listings.py @@ -1,18 +1,12 @@ import logging -from functools import lru_cache from itertools import chain from textwrap import dedent -from typing import List, Optional, Tuple, Dict, FrozenSet +from typing import List, Tuple, Dict, FrozenSet from rdflib import Graph, URIRef, Namespace, Literal -from prez.cache import endpoints_graph_cache, tbox_cache, profiles_graph_cache +from prez.cache import tbox_cache, profiles_graph_cache from prez.config import settings -from prez.models import SearchMethod -from prez.models.listing import ListingModel -from prez.models.profiles_item import ProfileItem -from prez.models.profiles_listings import ProfilesMembers -from prez.reference_data.prez_ns import ONT from prez.services.curie_functions import get_uri_for_curie_id from temp.grammar import SubSelect @@ -22,356 +16,6 @@ PREZ = Namespace("https://prez.dev/") -def generate_listing_construct( - focus_item, - profile: URIRef, - page: Optional[int] = 1, - per_page: Optional[int] = 20, - ordering_predicate: URIRef = None, -): - """ - For a given URI, finds items with the specified relation(s). - Generates a SPARQL construct query for a listing of items - """ - if not ordering_predicate: - ordering_predicate = settings.label_predicates[0] - - if isinstance(focus_item, (ProfilesMembers, ListingModel)): # listings can include - # "context" in the same way objects can, using include/exclude predicates etc. - ( - include_predicates, - exclude_predicates, - inverse_predicates, - sequence_predicates, - ) = get_item_predicates(profile, focus_item.selected_class) - else: # for objects, this context is already included in the separate "generate_item_construct" function, so these - # predicates are explicitly set to None here to avoid duplication. - include_predicates = ( - exclude_predicates - ) = inverse_predicates = sequence_predicates = None - ( - child_to_focus, - parent_to_focus, - focus_to_child, - focus_to_parent, - relative_properties, - ) = get_listing_predicates(profile, focus_item.selected_class) - if ( - focus_item.uri - # and not focus_item.top_level_listing # if it's a top level class we don't need a listing relation - we're - # # searching by class - and not child_to_focus - and not parent_to_focus - and not focus_to_child - and not focus_to_parent - # do not need to check relative properties - they will only be used if one of the other listing relations - # are defined - ): - log.warning( - f"Requested listing of objects related to {focus_item.uri}, however the profile {profile} does not" - f" define any listing relations for this for this class, for example focus to child." - ) - return None - uri_or_tl_item = ( - "?top_level_item" if focus_item.top_level_listing else f"<{focus_item.uri}>" - ) # set the focus - - # item to a variable if it's a top level listing (this will utilise "class based" listing, where objects are listed - # based on them being an instance of a class), else use the URI of the "parent" off of which members will be listed. - # TODO collapse this to an inline expression below; include change in both object and listing queries - sequence_construct, sequence_construct_where = generate_sequence_construct( - sequence_predicates, uri_or_tl_item - ) - query = dedent( - f""" - PREFIX dcterms: - PREFIX prez: - PREFIX rdf: - PREFIX rdfs: - PREFIX xsd: - PREFIX skos: - - CONSTRUCT {{ - {f'{uri_or_tl_item} a <{focus_item.base_class}> .{chr(10)}' if focus_item.top_level_listing else ""}\ - {sequence_construct} - {f'{uri_or_tl_item} ?focus_to_child ?child_item .{chr(10)}' if focus_to_child else ""}\ - {f'{uri_or_tl_item} ?focus_to_parent ?parent_item .{chr(10)}' if focus_to_parent else ""}\ - {f'?child_to_focus_s ?child_to_focus {uri_or_tl_item} .{chr(10)}' if child_to_focus else ""}\ - {f'?parent_to_focus_s ?parent_to_focus {uri_or_tl_item} .{chr(10)}' if parent_to_focus else ""}\ - {generate_relative_properties("construct", relative_properties, child_to_focus, parent_to_focus, - focus_to_child, focus_to_parent)}\ - {f"{uri_or_tl_item} ?p ?o ." if include_predicates else ""}\ - }} - WHERE {{ - {f'{uri_or_tl_item} a <{focus_item.base_class}> .{chr(10)}' if focus_item.top_level_listing else ""}\ - {f'OPTIONAL {{ {uri_or_tl_item} ?p ?o .' if include_predicates else ""}\ - {f'{generate_include_predicates(include_predicates)} }}' if include_predicates else ""} \ - {sequence_construct_where}\ - {generate_focus_to_x_predicates(uri_or_tl_item, focus_to_child, focus_to_parent)} \ - {generate_x_to_focus_predicates(uri_or_tl_item, child_to_focus, parent_to_focus)} {chr(10)} \ - {generate_relative_properties("select", relative_properties, child_to_focus, parent_to_focus, - focus_to_child, focus_to_parent)}\ - {{ - SELECT ?top_level_item ?child_item - WHERE {{ - {f'{uri_or_tl_item} a <{focus_item.base_class}> .{chr(10)}' if focus_item.top_level_listing else generate_focus_to_x_predicates(uri_or_tl_item, focus_to_child, focus_to_parent)}\ - - {f''' - OPTIONAL {{ - {f'{uri_or_tl_item} <{ordering_predicate}> ?label .' if focus_item.top_level_listing else ""} - }} - ''' if settings.order_lists_by_label else ""} - }} - {f''' - {'ORDER BY ASC(?label)' if ordering_predicate else "ORDER BY ?top_level_item"} - ''' if settings.order_lists_by_label else ""} - {f"LIMIT {per_page}{chr(10)}" - f"OFFSET {(page - 1) * per_page}" if page is not None and per_page is not None else ""} - }} - }} - - """ - ).strip() - - log.debug(f"Listing construct query for {focus_item} is:\n{query}") - return query - - -@lru_cache(maxsize=128) -def generate_item_construct(focus_item, profile: URIRef): - search_query = ( - True if isinstance(focus_item, SearchMethod) else False - ) # generates a listing of search results - ( - include_predicates, - exclude_predicates, - inverse_predicates, - sequence_predicates, - ) = get_item_predicates(profile, focus_item.selected_class) - bnode_depth = profiles_graph_cache.value( - profile, - ALTREXT.hasBNodeDepth, - None, - default=2, - ) - if search_query: - uri_or_search_item = "?search_result_uri" - else: - uri_or_search_item = f"<{focus_item.uri}>" - - sequence_construct, sequence_construct_where = generate_sequence_construct( - sequence_predicates, uri_or_search_item - ) - - construct_query = dedent( - f""" PREFIX dcterms: - PREFIX rdfs: - PREFIX prez: - CONSTRUCT {{ - {f'{search_query_construct()} {chr(10)}' if search_query else ""}\ - \t{uri_or_search_item} ?p ?o1 . - {sequence_construct} - {f'{chr(9)}?s ?inverse_predicate {uri_or_search_item} .' if inverse_predicates else ""} - {generate_bnode_construct(bnode_depth)} \ - \n}} - WHERE {{ - {{ {f'{focus_item.populated_query}' if search_query else ""} }} - {{ - {uri_or_search_item} ?p ?o1 . {chr(10)} \ - {f'?s ?inverse_predicate {uri_or_search_item}{chr(10)}' if inverse_predicates else chr(10)} \ - {generate_exclude_predicates(exclude_predicates)} \ - {generate_include_predicates(include_predicates)} \ - {generate_inverse_predicates(inverse_predicates)} \ - {generate_bnode_select(bnode_depth)}\ - }} - - UNION {{ - {sequence_construct_where}\ - }} - }} - """ - ) - log.debug(f"Item Construct query for {uri_or_search_item} is:\n{construct_query}") - return construct_query - - -def search_query_construct(): - return dedent( - f"""?hashID a prez:SearchResult ; - prez:searchResultWeight ?weight ; - prez:searchResultPredicate ?predicate ; - prez:searchResultMatch ?match ; - prez:searchResultURI ?search_result_uri .""" - ) - - -def generate_relative_properties( - construct_select, - relative_properties, - in_children, - in_parents, - out_children, - out_parents, -): - """ - Generate the relative properties construct or select for a listing query. - i.e. properties on nodes related to the focus item NOT the focus item itself - """ - if not relative_properties: - return "" - rel_string = "" - kvs = { - "ic": in_children, - "ip": in_parents, - "oc": out_children, - "op": out_parents, - } - other_kvs = { - "ic": "child_to_focus_s", - "ip": "parent_to_focus_s", - "oc": "child_item", - "op": "parent_item", - } - for k, v in kvs.items(): - if v: - if construct_select == "select": - rel_string += f"""OPTIONAL {{ """ - rel_string += f"""?{other_kvs[k]} ?rel_{k}_props ?rel_{k}_val .\n""" - if construct_select == "select": - rel_string += f"""VALUES ?rel_{k}_props {{ {" ".join('<' + str(pred) + '>' for pred in relative_properties)} }} }}\n""" - return rel_string - - -def generate_focus_to_x_predicates(uri_or_tl_item, focus_to_child, focus_to_parent): - where = "" - if focus_to_child: - where += f"""{uri_or_tl_item} ?focus_to_child ?child_item . - VALUES ?focus_to_child {{ {" ".join('<' + str(pred) + '>' for pred in focus_to_child)} }}\n""" - if focus_to_parent: - where += f"""{uri_or_tl_item} ?focus_to_parent ?parent_item . - VALUES ?focus_to_parent {{ {" ".join('<' + str(pred) + '>' for pred in focus_to_parent)} }}\n""" - # if not focus_to_child and not focus_to_parent: - # where += "VALUES ?focus_to_child {}\nVALUES ?focus_to_parent {}" - return where - - -def generate_x_to_focus_predicates(uri_or_tl_item, child_to_focus, parent_to_focus): - if not child_to_focus and not parent_to_focus: - return "" - where = "" - if child_to_focus: - where += f"""?child_to_focus_s ?child_to_focus {uri_or_tl_item} ; - VALUES ?child_to_focus {{ {" ".join('<' + str(pred) + '>' for pred in child_to_focus)} }}\n""" - if parent_to_focus: - where += f"""?parent_to_focus_s ?parent_to_focus {uri_or_tl_item} ; - VALUES ?parent_to_focus {{ {" ".join('<' + str(pred) + '>' for pred in parent_to_focus)} }}\n""" - # if not child_to_focus and not parent_to_focus: - # where += "VALUES ?child_to_focus {}\nVALUES ?parent_to_focus {}" - return where - - -def generate_include_predicates(include_predicates): - """ - Generates a SPARQL VALUES clause for a list of predicates, of the form: - VALUES ?p { } - """ - if include_predicates: - return f"""VALUES ?p{{\n{chr(10).join([f"<{p}>" for p in include_predicates])}\n}}""" - return "" - - -def generate_exclude_predicates(exclude_predicates): - if exclude_predicates: - return f"""FILTER(?p NOT IN ({chr(10).join([f"<{p}>" for p in exclude_predicates])}))""" - return "" - - -def generate_inverse_predicates(inverse_predicates): - """ - Generates a SPARQL VALUES clause for a list of inverse predicates, of the form: - VALUES ?inverse_predicate { } - """ - if inverse_predicates: - return f"""VALUES ?inverse_predicate{{\n{chr(10).join([f"<{p}>" for p in inverse_predicates])}\n}}""" - return "" - - -def _generate_sequence_construct(object_uri, sequence_predicates, path_n=0): - """ - Generates part of a SPARQL CONSTRUCT query for property paths, given a list of lists of property paths. - """ - if sequence_predicates: - all_sequence_construct = "" - for predicate_list in sequence_predicates: - construct_and_where = ( - f"\t{object_uri} <{predicate_list[0]}> ?seq_o1_{path_n} ." - ) - for i in range(1, len(predicate_list)): - construct_and_where += f"\n\t?seq_o{i}_{path_n} <{predicate_list[i]}> ?seq_o{i + 1}_{path_n} ." - all_sequence_construct += construct_and_where - return all_sequence_construct - return "" - - -def generate_sequence_construct( - sequence_predicates: list[list[URIRef]], uri_or_tl_item: str -) -> tuple[str, str]: - sequence_construct = "" - sequence_construct_where = "" - if sequence_predicates: - for i, sequence_predicate in enumerate(sequence_predicates): - seq_partial_str = "OPTIONAL {\n" - generate_sequence_construct_result: str = _generate_sequence_construct( - uri_or_tl_item, [sequence_predicate], i - ) - seq_partial_str += generate_sequence_construct_result - seq_partial_str += "\n}\n" - sequence_construct_where += seq_partial_str - sequence_construct += generate_sequence_construct_result - - return sequence_construct, sequence_construct_where - - -def generate_bnode_construct(depth): - """ - Generate the construct query for the bnodes, this is of the form: - ?o1 ?p2 ?o2 . - ?o2 ?p3 ?o3 . - ... - """ - return "\n" + "\n".join( - [f"\t?o{i + 1} ?p{i + 2} ?o{i + 2} ." for i in range(depth)] - ) - - -def generate_bnode_select(depth): - """ - Generates a SPARQL select string for bnodes to a given depth, of the form: - OPTIONAL { - FILTER(ISBLANK(?o1)) - ?o1 ?p2 ?o2 ; - OPTIONAL { - FILTER(ISBLANK(?o2)) - ?o2 ?p3 ?o3 ; - OPTIONAL { ... - } - } - } - """ - part_one = "\n".join( - [ - f"""{chr(9) * (i + 1)}OPTIONAL {{ -{chr(9) * (i + 2)}FILTER(ISBLANK(?o{i + 1})) -{chr(9) * (i + 2)}?o{i + 1} ?p{i + 2} ?o{i + 2} .""" - for i in range(depth) - ] - ) - part_two = "".join( - [f"{chr(10)}{chr(9) * (i + 1)}}}" for i in reversed(range(depth))] - ) - return part_one + part_two - - async def get_annotation_properties( item_graph: Graph, ): @@ -505,59 +149,6 @@ def get_annotations_from_tbox_cache( return uncached_props, labels_from_cache -# hit the count cache first, if it's not there, hit the SPARQL endpoint -def generate_listing_count_construct(item: ListingModel, endpoint_uri: str): - """ - Generates a SPARQL construct query to count either: - 1. the members of a collection, if a URI is given, or; - 2. the number of instances of a base class, given a base class. - """ - if not item.top_level_listing: - # count based on relation to a parent object - first find the relevant parent->child or child->parent relation - # from the endpoint definition. - p2f_relation = endpoints_graph_cache.value( - subject=URIRef(endpoint_uri), predicate=ONT.ParentToFocusRelation - ) - f2p_relation = endpoints_graph_cache.value( - subject=URIRef(endpoint_uri), predicate=ONT.FocusToParentRelation - ) - assert p2f_relation or f2p_relation, ( - f"Endpoint {endpoint_uri} does not have a parent to focus or focus to " - f"parent relation defined." - ) - p2f_statement = f"<{item.uri}> <{p2f_relation}> ?item ." if p2f_relation else "" - f2p_statement = f"?item <{f2p_relation}> <{item.uri}> ." if f2p_relation else "" - query = dedent( - f""" - PREFIX prez: - PREFIX rdfs: - - CONSTRUCT {{ <{item.uri}> prez:count ?count }} - WHERE {{ - SELECT (COUNT(?item) as ?count) - WHERE {{ - {p2f_statement} - {f2p_statement} - }} - }}""" - ).strip() - return query - else: # item.selected_class - query = dedent( - f""" - PREFIX prez: - - CONSTRUCT {{ <{item.base_class}> prez:count ?count }} - WHERE {{ - SELECT (COUNT(?item) as ?count) - WHERE {{ - ?item a <{item.base_class}> . - }} - }}""" - ).strip() - return query - - def temp_listing_count(subquery: SubSelect, klass): """ TODO: Implement COUNT and other expressions in SPARQL grammar. diff --git a/prez/sparql/search_query.py b/prez/sparql/search_query.py new file mode 100644 index 00000000..639253ea --- /dev/null +++ b/prez/sparql/search_query.py @@ -0,0 +1,397 @@ +from typing import Optional, List, Union, Tuple + +from pydantic import BaseModel +from rdflib import RDF, URIRef + +from prez.reference_data.prez_ns import PREZ +from temp.grammar import ( + Var, + LANGTAG, + BooleanLiteral, + PrimaryExpression, + GroupGraphPattern, + GroupGraphPatternSub, + SimplifiedTriple, + Bind, + Expression, + GraphPatternNotTriples, + NumericLiteral, + BuiltInCall, + Filter, + RDFLiteral, + RegexExpression, + Constraint, + GroupOrUnionGraphPattern, + OptionalGraphPattern, + FunctionCall, + ArgList, + BrackettedExpression, + InlineData, + InlineDataOneVar, + DataBlock, + IRI, + SelectClause, + Aggregate, + SubSelect, + GroupClause, + GroupCondition, + SolutionModifier, + WhereClause, + OrderClause, + OrderCondition, + SubSelectString, + ConstructTemplate, + ConstructTriples, + ConstructQuery, + LimitClause, + LimitOffsetClauses, + OffsetClause, + DataBlockValue, +) + + +class SearchQuery(BaseModel): + class Config: + arbitrary_types_allowed = True + + search_term: str + pred_vals: List[URIRef] + additional_ss: Optional[SubSelect] = None + limit: int = 10 + offset: int = 0 + + sr_uri: Var = Var(value="focus_node") + pred: Var = Var(value="pred") + match: Var = Var(value="match") + weight: Var = Var(value="weight") + hashid: Var = Var(value="hashID") + w: Var = Var(value="w") + + @property + def sr_uri_pe(self): + return PrimaryExpression(content=self.sr_uri) + + @property + def pred_pe(self): + return PrimaryExpression(content=self.pred) + + @property + def match_pe(self): + return PrimaryExpression(content=self.match) + + @property + def weight_pe(self): + return PrimaryExpression(content=self.weight) + + @property + def w_pe(self): + return PrimaryExpression(content=self.w) + + @property + def inner_select_vars(self): + return { + "one": { + "weight_val": 100, + "function": "LCASE", + "prefix": "", + "case_insensitive": None, + }, + "two": { + "weight_val": 20, + "function": "REGEX", + "prefix": "^", + "case_insensitive": True, + }, + "three": { + "weight_val": 10, + "function": "REGEX", + "prefix": "", + "case_insensitive": True, + }, + } + + def render(self): + cq = self.create_construct_query() + return "".join(part for part in cq.render()) + + def create_construct_query(self): + cq = ConstructQuery( + construct_template=self.create_construct_template(), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=self.create_outer_subselect() + ) + ), + solution_modifier=SolutionModifier(), + ) + return cq + + def create_construct_template(self): + """ + ?hashID a prez:SearchResult ; + prez:searchResultWeight ?weight ; + prez:searchResultPredicate ?predicate ; + prez:searchResultMatch ?match ; + prez:searchResultURI ?search_result_uri . + """ + search_result_triples = [ + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultWeight), + object=self.weight, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultPredicate), + object=self.pred, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultMatch), + object=self.match, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultURI), + object=self.sr_uri, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=RDF.type), + object=IRI(value=PREZ.SearchResult), + ), + ] + ct = ConstructTemplate( + construct_triples=ConstructTriples(triples=search_result_triples) + ) + return ct + + def create_outer_subselect(self): + outer_ss = SubSelect( + select_clause=self.create_outer_select_clause(), + where_clause=self.create_outer_where_clause(), + solution_modifier=self.create_solution_modifier(), + ) + return outer_ss + + def create_outer_select_clause(self): + """ + SELECT ?focus_node ?predicate ?match ?weight (URI(CONCAT("urn:hash:", SHA256(CONCAT(STR(?focus_node), STR(?predicate), STR(?match), STR(?weight))))) AS ?hashID) + """ + expressions = [self.sr_uri_pe, self.pred_pe, self.match_pe, self.weight_pe] + str_builtins = [BuiltInCall.create_with_one_expr("STR", e) for e in expressions] + str_expressions = [PrimaryExpression(content=b) for b in str_builtins] + inner_concat = BuiltInCall.create_with_n_expr("CONCAT", str_expressions) + sha256_expr = PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "SHA256", PrimaryExpression(content=inner_concat) + ) + ) + urn_literal = PrimaryExpression(content=RDFLiteral(value="urn:hash:")) + outer_concat = BuiltInCall.create_with_n_expr( + "CONCAT", [urn_literal, sha256_expr] + ) + uri_expr = BuiltInCall.create_with_one_expr( + "URI", PrimaryExpression(content=outer_concat) + ) + uri_pr_exp = PrimaryExpression(content=uri_expr) + uri_exp = Expression.from_primary_expr(uri_pr_exp) + sc = SelectClause( + variables_or_all=[ + self.sr_uri, + self.pred, + self.match, + self.weight, + (uri_exp, self.hashid), + ] + ) + return sc + + def create_outer_where_clause(self): + """Wrapper WHERE clause""" + inner_ss = self.create_inner_subselect() + inner_ggp = GroupGraphPattern(content=inner_ss) + outer_wc = WhereClause(group_graph_pattern=inner_ggp) + return outer_wc + + def create_solution_modifier(self): + """ORDER BY DESC(?weight)""" + ocond = OrderCondition(var=self.weight, direction="DESC") + oclause = OrderClause(conditions=[ocond]) + limit = LimitClause(limit=self.limit) + offset = OffsetClause(offset=self.offset) + loc = LimitOffsetClauses(limit_clause=limit, offset_clause=offset) + sm = SolutionModifier(order_by=oclause, limit_offset=loc) + return sm + + def create_inner_subselect(self): + inner_ss = SubSelect( + select_clause=self.create_inner_select_clause(), + where_clause=self.create_inner_where_clause(), + solution_modifier=self.create_group_by_solution_modifier(), + ) + return inner_ss + + def create_group_by_solution_modifier(self): + """ + GROUP BY ?focus_node ?predicate ?match + """ + gc_sr_uri = GroupCondition(condition=self.sr_uri) + gc_pred = GroupCondition(condition=self.pred) + gc_match = GroupCondition(condition=self.match) + gc = GroupClause(group_conditions=[gc_sr_uri, gc_pred, gc_match]) + sm = SolutionModifier(group_by=gc) + return sm + + def create_inner_select_clause(self): + """ + SELECT ?focus_node ?predicate ?match (SUM(?w) AS ?weight) + """ + pr_exp = PrimaryExpression(content=self.w) + exp = Expression.from_primary_expr(pr_exp) + sum_agg = Aggregate(function_name="SUM", expression=exp) + sum_bic = BuiltInCall(other_expressions=sum_agg) + sum_pr_exp = PrimaryExpression(content=sum_bic) + sum_exp = Expression.from_primary_expr(sum_pr_exp) + sc = SelectClause( + variables_or_all=[ + self.sr_uri, + self.pred, + self.match, + (sum_exp, self.weight), + ] + ) + return sc + + def create_inner_where_clause(self): + # outer group graph pattern sub + iri_pred_vals = [IRI(value=p) for p in self.pred_vals] + iri_db_vals = [DataBlockValue(value=p) for p in iri_pred_vals] + ildov = InlineDataOneVar(variable=self.pred, datablockvalues=iri_db_vals) + ild = InlineData(data_block=DataBlock(block=ildov)) + gpnt_ild = GraphPatternNotTriples(content=ild) + + # union statements + gougp = self.create_union_of_inner_ggps() + gpnt_gougp = GraphPatternNotTriples(content=gougp) + + outer_ggps = GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[gpnt_ild, gpnt_gougp] + ) + outer_ggp = GroupGraphPattern(content=outer_ggps) + wc = WhereClause(group_graph_pattern=outer_ggp) + return wc + + def create_union_of_inner_ggps(self): + # inner group graph patterns (unioned statements) + inner_select_ggp_list = [] + for var_dict in self.inner_select_vars.values(): + inner_select_ggp_list.append(self.create_inner_ggp(**var_dict)) + gougp = GroupOrUnionGraphPattern(group_graph_patterns=inner_select_ggp_list) + return gougp + + def create_inner_ggp( + self, + weight_val: int, + function: str, + prefix: str, + case_insensitive: Optional[bool], + ) -> GroupGraphPattern: + ggp = GroupGraphPattern(content=GroupGraphPatternSub()) + + # triple pattern e.g. (?focus_node ?pred ?match) + ggp.content.add_triple( + SimplifiedTriple( + subject=self.sr_uri, + predicate=self.pred, + object=self.match, + ) + ) + + # add additional focus node selection e.g. from endpoint definitions + if self.additional_ss: + if isinstance(self.additional_ss, SubSelectString): + ss_ggp = GroupGraphPattern(content=self.additional_ss) + gougp = GroupOrUnionGraphPattern(group_graph_patterns=[ss_ggp]) + gpnt = GraphPatternNotTriples(content=gougp) + ggp.content.add_pattern(gpnt) + elif isinstance(self.additional_ss, SubSelect): + ss_ggps = self.additional_ss.where_clause.group_graph_pattern.content + ss_tb = ss_ggps.triples_block + ss_gpotb = ss_ggps.graph_patterns_or_triples_blocks + if ss_tb: + ggp.content.add_pattern(ss_tb) + if ss_gpotb: + for pattern in ss_gpotb: + ggp.content.add_pattern(pattern) + + # bind e.g. BIND(100 AS ?w) + bind_for_w = Bind( + expression=Expression.from_primary_expr( + PrimaryExpression(content=NumericLiteral(value=weight_val)) + ), + var=Var(value="w"), + ) + bind_gpnt = GraphPatternNotTriples(content=bind_for_w) + ggp.content.add_pattern(bind_gpnt) + + # FILTER (REGEX(?match, "^$term", "i")) + pe_st = PrimaryExpression(content=RDFLiteral(value=(prefix + self.search_term))) + if function == "REGEX": + e_ci = None + if case_insensitive: + pe_ci = PrimaryExpression(content=RDFLiteral(value="i")) + e_ci = Expression.from_primary_expr(pe_ci) + regex_expression = RegexExpression( + text_expression=Expression.from_primary_expr( + self.match_pe + ), # Expression for the text + pattern_expression=Expression.from_primary_expr(pe_st), # Search Term + flags_expression=e_ci, # Case insensitivity + ) + bic = BuiltInCall(other_expressions=regex_expression) + cons = Constraint(content=bic) + filter_expr = Filter(constraint=cons) + # filter e.g. FILTER(LCASE(?match) = "search term") + elif function == "LCASE": + bifc = BuiltInCall(function_name=function, arguments=[self.match]) + pe_focus = PrimaryExpression(content=bifc) + filter_expr = Filter.filter_relational( + focus=pe_focus, comparators=pe_st, operator="=" + ) + else: + raise ValueError("Only LCASE and REGEX handled at present") + filter_gpnt = GraphPatternNotTriples(content=filter_expr) + ggp.content.add_pattern(filter_gpnt) + return ggp + + +# if __name__ == "__main__": +# # additional_ss = SubSelectString(select_string="SELECT * {?focus_node a owl:Class}") +# sr_uri = Var(value="focus_node") +# additional_ss = SubSelect( +# select_clause=SelectClause(variables_or_all=[sr_uri]), +# where_clause=WhereClause( +# group_graph_pattern=GroupGraphPattern( +# content=GroupGraphPatternSub( +# triples_block=TriplesBlock( +# triples=[ +# SimplifiedTriple( +# subject=sr_uri, +# predicate=IRI( +# value="http://www.w3.org/1999/02/22-rdf-syntax-ns#type" +# ), +# object=IRI(value="http://www.w3.org/2002/07/owl#Class"), +# ) +# ] +# ) +# ) +# ) +# ), +# ) +# sq = SearchQuery( +# search_term="test", +# pred_vals=[RDFS.label], +# additional_ss=additional_ss, +# ).render() +# print(sq) diff --git a/temp/cql2sparql.py b/temp/cql2sparql.py index ca0081d6..9230fb1c 100644 --- a/temp/cql2sparql.py +++ b/temp/cql2sparql.py @@ -20,6 +20,17 @@ SolutionModifier, ConstructQuery, ConstructTriples, + Var, + IRI, + RDFLiteral, + PrimaryExpression, + RegexExpression, + Expression, + BuiltInCall, + Constraint, + FunctionCall, + NumericLiteral, + DataBlockValue, ) from temp.cql_sparql_reference import ( cql_sparql_spatial_mapping, @@ -32,7 +43,7 @@ class CQLParser: def __init__(self, cql=None, context: dict = None, cql_json: dict = None): self.ggps_inner_select = None - self.cql = cql + self.cql: dict = cql self.context = context self.cql_json = cql_json self.var_counter = 0 @@ -149,24 +160,30 @@ def _handle_comparison(self, operator, args, existing_ggps=None): if prop.startswith("^"): prop = prop[1:] inverse = True - value = args[1].get("@value") - subject = Variable("focus_node") - predicate = URIRef(prop) - - object = Variable(f"var_{self.var_counter}") + val = args[1].get("@value") + if isinstance(val, str): + value = RDFLiteral(value=val) + elif isinstance(val, (int, float)): + value = NumericLiteral(value=val) + subject = Var(value="focus_node") + predicate = IRI(value=prop) + + object = Var(value=f"var_{self.var_counter}") + object_pe = PrimaryExpression(content=object) if operator == "=": - inline_data_one_var = InlineDataOneVar( - variable=object, values=[Literal(value)] - ) + iri_db_vals = [DataBlockValue(value=value)] + ildov = InlineDataOneVar(variable=object, datablockvalues=iri_db_vals) gpnt = GraphPatternNotTriples( - content=InlineData(data_block=DataBlock(block=inline_data_one_var)) + content=InlineData(data_block=DataBlock(block=ildov)) ) self._append_graph_pattern(ggps, gpnt) else: - filter_clause = Filter( - variable=object, expression=operator, value=Literal(value) + value_pe = PrimaryExpression(content=value) + values_constraint = Filter.filter_relational( + focus=object_pe, comparators=value_pe, operator=operator ) - self._append_graph_pattern(ggps, filter_clause) + gpnt = GraphPatternNotTriples(content=values_constraint) + self._append_graph_pattern(ggps, gpnt) if inverse: self._add_triple(ggps, object, predicate, subject) @@ -191,15 +208,26 @@ def _handle_like(self, args, existing_ggps=None): .replace("\\", "\\\\") ) - subject = Variable("focus_node") - predicate = URIRef(prop) - obj = Variable(f"var_{self.var_counter}") + subject = Var(value="focus_node") + predicate = IRI(value=URIRef(prop)) + obj = Var(value=f"var_{self.var_counter}") if inverse: self._add_triple(ggps, obj, predicate, subject) else: self._add_triple(ggps, subject, predicate, obj) - filter_clause = Filter(variable=obj, expression="regex", value=Literal(value)) - self._append_graph_pattern(ggps, filter_clause) + + te = Expression.from_primary_expr( + primary_expression=PrimaryExpression(content=obj) + ) + pe = Expression.from_primary_expr( + primary_expression=PrimaryExpression(content=RDFLiteral(value=value)) + ) + re = RegexExpression(text_expression=te, pattern_expression=pe) + bic = BuiltInCall(other_expressions=re) + cons = Constraint(content=bic) + filter_expr = Filter(constraint=cons) + + self._append_graph_pattern(ggps, filter_expr) yield ggps def _handle_spatial(self, operator, args, existing_ggps=None): @@ -211,16 +239,22 @@ def _handle_spatial(self, operator, args, existing_ggps=None): if coordinates: wkt = cql_to_shapely_mapping[geom_type](coordinates).wkt - subject = Variable("focus_node") - geom_bn_var = Variable("geom_bnode") - geom_lit_var = Variable("geom_var") - self._add_triple(ggps, subject, GEO.hasGeometry, geom_bn_var) - self._add_triple(ggps, geom_bn_var, GEO.asWKT, geom_lit_var) - spatial_filter = Filter( - variable=geom_lit_var, - expression=cql_sparql_spatial_mapping[operator], - value=Literal(wkt), + subject = Var(value="focus_node") + geom_bn_var = Var(value="geom_bnode") + geom_lit_var = Var(value="geom_var") + self._add_triple(ggps, subject, IRI(value=GEO.hasGeometry), geom_bn_var) + self._add_triple(ggps, geom_bn_var, IRI(value=GEO.asWKT), geom_lit_var) + + geom_func_iri = IRI(value=cql_sparql_spatial_mapping[operator]) + geom_1_exp = Expression.from_primary_expr( + primary_expression=PrimaryExpression(content=geom_lit_var) + ) + geom_2_exp = Expression.from_primary_expr( + primary_expression=PrimaryExpression(content=RDFLiteral(value=wkt)) ) + fc = FunctionCall(iri=geom_func_iri, arg_list=[geom_1_exp, geom_2_exp]) + + spatial_filter = Filter(constraint=Constraint(content=fc)) self._append_graph_pattern(ggps, spatial_filter) yield ggps @@ -236,19 +270,28 @@ def _handle_in(self, args, existing_ggps=None): inverse = True literal_values = [item["@value"] for item in args if "@value" in item] uri_values = [item["@id"] for item in args if "@id" in item] - rdflib_literal_values = [Literal(value) for value in literal_values] - rdflib_uri_values = [URIRef(value) for value in uri_values] - all_values = rdflib_literal_values + rdflib_uri_values - subject = Variable("focus_node") - predicate = URIRef(prop) - object = Variable(f"var_{self.var_counter}") + grammar_literal_values = [] + for val in literal_values: + if isinstance(val, str): + value = RDFLiteral(value=val) + elif isinstance(val, (int, float)): + value = NumericLiteral(value=val) + grammar_literal_values.append(value) + grammar_uri_values = [IRI(value=URIRef(value)) for value in uri_values] + all_values = grammar_literal_values + grammar_uri_values + subject = Var(value="focus_node") + predicate = IRI(value=URIRef(prop)) + object = Var(value=f"var_{self.var_counter}") if inverse: self._add_triple(ggps, object, predicate, subject) else: self._add_triple(ggps, subject, predicate, object) - inline_data_one_var = InlineDataOneVar(variable=object, values=all_values) + + iri_db_vals = [DataBlockValue(value=p) for p in all_values] + ildov = InlineDataOneVar(variable=object, datablockvalues=iri_db_vals) + gpnt = GraphPatternNotTriples( - content=InlineData(data_block=DataBlock(block=inline_data_one_var)) + content=InlineData(data_block=DataBlock(block=ildov)) ) self._append_graph_pattern(ggps, gpnt) diff --git a/temp/grammar.py b/temp/grammar.py index 1979d8ca..67e88c75 100644 --- a/temp/grammar.py +++ b/temp/grammar.py @@ -1,14 +1,16 @@ from __future__ import annotations import logging +from decimal import Decimal from typing import List, Union, Optional, Generator, Tuple from pydantic import BaseModel, field_validator -from rdflib import URIRef, Variable, BNode, Literal +from rdflib import RDF +from rdflib import URIRef, Variable from rdflib.plugins.sparql import prepareQuery from rdflib.plugins.sparql.algebra import translateAlgebra -from temp.cql_sparql_reference import cql_sparql_spatial_mapping +from prez.reference_data.prez_ns import PREZ log = logging.getLogger(__name__) @@ -28,7 +30,7 @@ def __repr__(self): def render(self): raise NotImplementedError("Subclasses must implement this method.") - def collect_triples(self) -> List[SimplifiedTriple]: + def collect_triples(self) -> List["SimplifiedTriple"]: """ Recursively collect SimplifiedTriple instances from this object. """ @@ -60,6 +62,131 @@ def collect_triples(self) -> List[SimplifiedTriple]: return triples +class BlankNodeLabel(SPARQLGrammarBase): + """ + BLANK_NODE_LABEL ::= '_:' ( PN_CHARS_U | [0-9] ) ((PN_CHARS|'.')* PN_CHARS)? + """ + + part_1: str + part_2: Optional[str] = None + + def render(self): + yield "_:" + yield self.part_1 + if self.part_2: + yield self.part_2 + + +class Anon: + """ + ANON ::= '[' WS* ']' + https://www.w3.org/TR/sparql11-query/#rANON + """ + + # TODO not sure how to make this more useful - allow input of whitespace? + def render(self): + yield "[]" + + +class Var(SPARQLGrammarBase): + value: str + + def render(self) -> Generator[str, None, None]: + yield Variable(self.value).n3() + + def __hash__(self): + return hash(self.value) + + +class IRI(SPARQLGrammarBase): + """ + Represents a SPARQL iri. + iri ::= IRIREF | PrefixedName + """ + + value: Union[URIRef, str] + + def render(self) -> Generator[str, None, None]: + if isinstance(self.value, URIRef): + yield self.value.n3() + else: + yield "<" + yield self.value + yield ">" + + def __hash__(self): + return hash(self.value) + + +class BlankNode(SPARQLGrammarBase): + """ + BlankNode ::= BLANK_NODE_LABEL | ANON + """ + + value: Union["BlankNodeLabel", "Anon"] + + def render(self): + yield from self.value.render() + + def __hash__(self): + return hash(self.value) + + +class RDFLiteral(SPARQLGrammarBase): + """ + RDFLiteral ::= String ( LANGTAG | ( '^^' iri ) )? + """ + + value: str + langtag_or_datatype: Optional[Union[LANGTAG, IRI]] = None + + def render(self) -> Generator[str, None, None]: + yield f'"{self.value}"' + if self.langtag_or_datatype: + yield from self.langtag_or_datatype.render() + + def __hash__(self): + return hash(self.value) + + +class LANGTAG(SPARQLGrammarBase): + """ + LANGTAG ::= '@' [a-zA-Z]+ ('-' [a-zA-Z0-9]+)* + """ + + tag: str + subtag: Optional[str] = None + + def render(self) -> Generator[str, None, None]: + yield f"@{self.tag}" + if self.subtag: + yield f"-{self.subtag}" + + +class NIL(SPARQLGrammarBase): + """ + Represents a SPARQL NIL. + NIL ::= '(' WS* ')' + """ + + def render(self) -> Generator[str, None, None]: + yield "()" + + +class NumericLiteral(SPARQLGrammarBase): + """ + not implemented properly - only does integer literals + """ + + value: Union[float, int, Decimal] + + def render(self) -> Generator[str, None, None]: + yield str(self.value) + + def __hash__(self): + return hash(self.value) + + class SimplifiedTriple(SPARQLGrammarBase): """A simplified implmementation the triple pattern matches in the SPARQL grammar, to avoid implementing many classes such as TriplesSameSubjectPath""" @@ -90,6 +217,188 @@ def render(self) -> Generator[str, None, None]: yield "\n" +class PrimaryExpression(SPARQLGrammarBase): + """ + PrimaryExpression ::= BrackettedExpression | BuiltInCall | iriOrFunction | RDFLiteral | NumericLiteral | BooleanLiteral | Var + """ + + content: Union[ + BrackettedExpression, + BuiltInCall, + IRIOrFunction, + RDFLiteral, + NumericLiteral, + BooleanLiteral, + Var, + ] + + def render(self) -> Generator[str, None, None]: + yield from self.content.render() + + +class UnaryExpression(SPARQLGrammarBase): + operator: Optional[str] = None # '!', '+', or '-' + primary_expression: PrimaryExpression + + def render(self) -> Generator[str, None, None]: + if self.operator: + yield f"{self.operator} " + yield from self.primary_expression.render() + + +class MultiplicativeExpression(SPARQLGrammarBase): + base_expression: UnaryExpression + additional_expressions: Optional[List[Tuple[str, UnaryExpression]]] = [] + + @field_validator("additional_expressions") + def validate_additional_expressions(cls, v): + if v[0] not in ["*", "/"]: + raise ValueError("Operator must be '*' or '/'") + return v + + def render(self) -> Generator[str, None, None]: + yield from self.base_expression.render() + for operator, expression in self.additional_expressions: + yield f" {operator} " + yield from expression.render() + + +class AdditiveExpression(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rAdditiveExpression + AdditiveExpression ::= MultiplicativeExpression ( '+' MultiplicativeExpression | '-' MultiplicativeExpression | ( NumericLiteralPositive | NumericLiteralNegative ) ( ( '*' UnaryExpression ) | ( '/' UnaryExpression ) )* )* + #TODO implement NumericLiteralPositive, NumericLiteralNegative - these should be options in the additional expressions + """ + + base_expression: "MultiplicativeExpression" + additional_expressions: Optional[ + List[Tuple[str, Union[MultiplicativeExpression, UnaryExpression]]] + ] = [] + + @field_validator("additional_expressions") + def validate_additional_expressions(cls, v): + if v[0] not in ["+", "-", "*", "/"]: + raise ValueError("Operator must be one of '+', '-', '*', or '/'") + return v + + def render(self) -> Generator[str, None, None]: + yield from self.base_expression.render() + for operator, expression in self.additional_expressions: + yield f" {operator} " + yield from expression.render() + + +class NumericExpression(SPARQLGrammarBase): + additive_expression: AdditiveExpression + + def render(self) -> Generator[str, None, None]: + yield from self.additive_expression.render() + + +class RelationalExpression(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rRelationalExpression + RelationalExpression ::= NumericExpression ( '=' NumericExpression | '!=' NumericExpression | '<' NumericExpression | '>' NumericExpression | '<=' NumericExpression | '>=' NumericExpression | 'IN' ExpressionList | 'NOT' 'IN' ExpressionList )? + """ + + left: NumericExpression + operator: Optional[str] = None # '=', '!=', '<', '>', '<=', '>=', 'IN' and 'NOT IN' + right: Optional[Union[NumericExpression, "ExpressionList"]] = None + + def render(self) -> Generator[str, None, None]: + yield from self.left.render() + if self.operator: + yield f" {self.operator} " + if self.right: + yield from self.right.render() + + +class ValueLogical(SPARQLGrammarBase): + relational_expression: RelationalExpression + + def render(self) -> Generator[str, None, None]: + yield from self.relational_expression.render() + + +class ConditionalAndExpression(SPARQLGrammarBase): + """ + ConditionalAndExpression ::= ValueLogical ( '&&' ValueLogical )* + """ + + value_logicals: List[ValueLogical] + + def render(self) -> Generator[str, None, None]: + for i, value_logical in enumerate(self.value_logicals): + yield from value_logical.render() + if i < len(self.value_logicals) - 1: + yield " && " + + +class ConditionalOrExpression(SPARQLGrammarBase): + """ + ConditionalOrExpression ::= ConditionalAndExpression ( '||' ConditionalAndExpression )* + """ + + conditional_and_expressions: List[ConditionalAndExpression] + + def render(self) -> Generator[str, None, None]: + for i, conditional_and_expression in enumerate( + self.conditional_and_expressions + ): + yield from conditional_and_expression.render() + if i < len(self.conditional_and_expressions) - 1: + yield " || " + + +class Expression(SPARQLGrammarBase): + """ + Expression ::= ConditionalOrExpression + """ + + conditional_or_expression: ConditionalOrExpression + + def render(self) -> Generator[str, None, None]: + yield from self.conditional_or_expression.render() + + @classmethod + def from_primary_expr(cls, primary_expression: PrimaryExpression) -> Expression: + """ + Convenience method to create an Expression directly from a Var, wrapped in a PrimaryExpression. + """ + return cls( + conditional_or_expression=ConditionalOrExpression( + conditional_and_expressions=[ + ConditionalAndExpression( + value_logicals=[ + ValueLogical( + relational_expression=RelationalExpression( + left=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=primary_expression + ) + ) + ) + ) + ) + ) + ] + ) + ] + ) + ) + + +class BrackettedExpression(SPARQLGrammarBase): + expression: Expression + + def render(self) -> Generator[str, None, None]: + yield "(" + yield from self.expression.render() + yield ")" + + class InlineDataOneVar(SPARQLGrammarBase): """ https://www.w3.org/TR/sparql11-query/#rInlineDataOneVar @@ -97,24 +406,39 @@ class InlineDataOneVar(SPARQLGrammarBase): """ variable: Var - values: List[Union[IRI, RDFLiteral]] + datablockvalues: List[Union[DataBlockValue]] def render(self) -> Generator[str, None, None]: yield from self.variable.render() yield "{ " - for value in self.values: + for value in self.datablockvalues: yield from value.render() yield " " yield " }" +class DataBlockValue(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rDataBlockValue + DataBlockValue ::= iri | RDFLiteral | NumericLiteral | BooleanLiteral | 'UNDEF' + """ + + value: Union[IRI, RDFLiteral, NumericLiteral, BooleanLiteral, str] + + def render(self) -> Generator[str, None, None]: + if isinstance(self.value, str): + yield self.value + else: + yield from self.value.render() + + class InlineDataFull(SPARQLGrammarBase): """ https://www.w3.org/TR/sparql11-query/#rInlineDataFull ( NIL | '(' Var* ')' ) '{' ( '(' DataBlockValue* ')' | NIL )* '}' """ - vars: List[Var] + vars: Union[NIL, List[Var]] values: List[List[Union[IRI, RDFLiteral]]] def render(self) -> Generator[str, None, None]: @@ -183,7 +507,9 @@ class GraphPatternNotTriples(SPARQLGrammarBase): GraphPatternNotTriples ::= GroupOrUnionGraphPattern | OptionalGraphPattern | MinusGraphPattern | GraphGraphPattern | ServiceGraphPattern | Filter | Bind | InlineData """ - content: Union[GroupOrUnionGraphPattern, OptionalGraphPattern, Filter, InlineData] + content: Union[ + GroupOrUnionGraphPattern, OptionalGraphPattern, Filter, Bind, InlineData + ] def render(self) -> Generator[str, None, None]: yield from self.content.render() @@ -250,28 +576,6 @@ def add_triple(self, triple): # self.patterns.append(triples) -# class SelectClause(SPARQLGrammarBase): -# """ -# https://www.w3.org/TR/sparql11-query/#rSelectClause -# SelectClause ::= 'SELECT' ( 'DISTINCT' | 'REDUCED' )? ( ( Var | ( '(' Expression 'AS' Var ')' ) )+ | '*' ) -# Simplified model excluding casting of variables (e.g. (?var AS ?alias)) -# """ -# -# distinct: Optional[bool] = None -# reduced: Optional[bool] = None -# variables_or_all: Union[List[Var], str] -# -# def render(self): -# yield "SELECT" -# if self.distinct: -# yield " DISTINCT" -# elif self.reduced: -# yield " REDUCED" -# if isinstance(self.variables_or_all, str): -# yield " *" -# else: -# for var in self.variables_or_all: -# yield from var.render() class SelectClause(SPARQLGrammarBase): """ https://www.w3.org/TR/sparql11-query/#rSelectClause @@ -331,7 +635,7 @@ class SubSelectString(SubSelect): select_clause: Optional[str] = None where_clause: Optional[str] = None - solution_modifier: Optional[SolutionModifier] = None + solution_modifier: Optional["SolutionModifier"] = None select_string: str @field_validator("select_string") @@ -371,34 +675,50 @@ class Filter(SPARQLGrammarBase): constraint: Constraint def render(self) -> Generator[str, None, None]: - yield "FILTER(" + yield "FILTER " yield from self.constraint.render() - yield ")" @classmethod - def filter_in( - cls, focus: Var, comparators: List[PrimaryExpression], not_in: bool = False + def filter_relational( + cls, + focus: PrimaryExpression, + comparators: Union[PrimaryExpression, List[PrimaryExpression]], + operator: str, ) -> Filter: """ - Convenience method to create a FILTER clause to check if the focus is in/not in the list of comparators. + Convenience method to create a FILTER clause to compare the focus node to comparators. """ # Wrap the focus in an NumericExpression numeric_left = NumericExpression( additive_expression=AdditiveExpression( base_expression=MultiplicativeExpression( - base_expression=UnaryExpression( - primary_expression=PrimaryExpression(content=focus) - ) + base_expression=UnaryExpression(primary_expression=focus) ) ) ) - # Wrap each comparator in an Expression - comparator_exprs = [Expression.from_primary_expr(comp) for comp in comparators] - # Create the ExpressionList for IN/NOT IN - in_list = ExpressionList(expressions=comparator_exprs) - # Build the RelationalExpression for IN or NOT IN + # for operators in '=', '!=', '<', '>', '<=', '>=' + if isinstance(comparators, PrimaryExpression): + assert operator not in [ + "IN", + "NOT IN", + ], "an ExpressionList must be supplied for 'IN' or 'NOT IN'" + expression_rhs = NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression(primary_expression=comparators) + ) + ) + ) + else: # for operators 'IN' and 'NOT IN' + # Wrap each comparator in an Expression + assert operator in ["IN", "NOT IN"] + comparator_exprs = [ + Expression.from_primary_expr(comp) for comp in comparators + ] + expression_rhs = ExpressionList(expressions=comparator_exprs) + # Build the RelationalExpression relational_expr = RelationalExpression( - left=numeric_left, operator="NOT IN" if not_in else "IN", right=in_list + left=numeric_left, operator=operator, right=expression_rhs ) # Build the ValueLogical to wrap the RelationalExpression value_logical = ValueLogical(relational_expression=relational_expr) @@ -448,16 +768,16 @@ class ArgList(SPARQLGrammarBase): ArgList ::= NIL | '(' 'DISTINCT'? Expression ( ',' Expression )* ')' """ - expressions: Optional[List["Expression"]] + expressions: Optional[Union[NIL, List[Expression]]] distinct: bool = False - def render(self) -> Generator[str, None, None]: - if not self.expressions: - yield "()" + def render(self) -> Generator[str, None, None]: + if isinstance(self.expressions, NIL): + yield from self.expressions.render() else: - yield "(" if self.distinct: yield "DISTINCT " + yield "(" for i, expr in enumerate(self.expressions): yield from expr.render() if i < len(self.expressions) - 1: @@ -465,24 +785,6 @@ def render(self) -> Generator[str, None, None]: yield ")" -# class Filter(SPARQLGrammarBase): -# variable: Var -# expression: Union[IRI, str] -# value: Optional[Union[RDFLiteral, List[Union[IRI, RDFLiteral]]]] = None -# -# def render(self) -> Generator[str, None, None]: -# if self.expression in ["<", ">", "<=", ">="]: -# yield f"\n\tFILTER({self.variable.render()}{self.expression}{self.value.n3()})" -# elif self.expression == "regex": -# yield f"\n\tFILTER regex({self.variable.render()}, {self.value.n3()})" -# elif self.expression in cql_sparql_spatial_mapping.values(): -# yield f"\n\tFILTER({self.expression.n3()}({self.variable.render()}, {self.value.n3()}))" -# elif self.expression == "NOT IN": -# yield f'\n\tFILTER({self.variable.render()} NOT IN({", ".join([value.n3() for value in self.value])}))' -# elif self.expression == "ISBLANK": -# yield f"\n\tFILTER(ISBLANK({self.variable.render()}))" - - class Bind(SPARQLGrammarBase): """ Bind ::= 'BIND' '(' Expression 'AS' Var ')' @@ -548,6 +850,10 @@ def render(self) -> Generator[str, None, None]: class OrderCondition(SPARQLGrammarBase): + """ + Default direction is ASC if not specified + """ + var: Var direction: Optional[str] = None @@ -590,410 +896,194 @@ def render(self) -> Generator[str, None, None]: class SolutionModifier(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rSolutionModifier - SolutionModifier ::= GroupClause? HavingClause? OrderClause? LimitOffsetClauses? - """ - - order_by: Optional[OrderClause] = None - limit_offset: Optional[LimitOffsetClauses] = None - # having: Optional[HavingClause] - group_by: Optional[GroupClause] = None - - def render(self) -> str: - if self.order_by: - yield from self.order_by.render() - if self.limit_offset: - if self.order_by: - yield "\n" - yield from self.limit_offset.render() - - -class GroupClause(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rGroupClause - GroupClause ::= 'GROUP' 'BY' GroupCondition+ - """ - - group_conditions: List[GroupCondition] - - def render(self) -> Generator[str, None, None]: - yield "\nGROUP BY " - for condition in self.group_conditions: - yield from condition.render() - - -class GroupCondition(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rGroupCondition - GroupCondition ::= BuiltInCall | FunctionCall | '(' Expression ( 'AS' Var )? ')' | Var - """ - - variable: Optional[Var] = None - expression: Optional[str] = None - as_variable: Optional[Var] = None - - def render(self) -> Generator[str, None, None]: - if self.variable: - yield self.variable.render() - elif self.expression: - yield f"({self.expression}" - if self.as_variable: - yield f" AS {self.as_variable.render()})" - else: - yield ")" - else: - raise ValueError( - "GroupCondition must have either a variable or an expression defined." - ) - - -class ConstructTriples(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rConstructTriples - ConstructTriples ::= TriplesSameSubject ( '.' ConstructTriples? )? - - Simplified implementation that only accepts a list of SimplifiedTriples - avoids implementing the classes associated - with ; and , for TriplesSameSubject etc. in the SPARQL Grammar - """ - - triples: List[SimplifiedTriple] - - def render(self) -> Generator[str, None, None]: - for i, triple in enumerate(self.triples): - yield from triple.render() - if i < len(self.triples) - 1: # Check if it's not the last triple - yield "\n" - - -class ConstructTemplate(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rConstructTemplate - ConstructTemplate ::= '{' ConstructTriples? '}' - """ - - construct_triples: ConstructTriples - - def render(self) -> Generator[str, None, None]: - yield "{\n" - yield from self.construct_triples.render() - yield "\n}" - - -class WhereClause(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rWhereClause - WhereClause ::= 'WHERE'? GroupGraphPattern - """ - - group_graph_pattern: GroupGraphPattern - - def render(self) -> Generator[str, None, None]: - yield "\nWHERE " - yield from self.group_graph_pattern.render() - - -class ConstructQuery(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rConstructQuery - ConstructQuery ::= 'CONSTRUCT' ( ConstructTemplate DatasetClause* WhereClause SolutionModifier | DatasetClause* 'WHERE' '{' TriplesTemplate? '}' SolutionModifier ) - - Currently simplified to only accept a ConstructTemplate, WhereClause, and SolutionModifier. - """ - - construct_template: ConstructTemplate - where_clause: WhereClause - solution_modifier: SolutionModifier - - def render(self) -> Generator[str, None, None]: - yield "CONSTRUCT " - yield from self.construct_template.render() - yield from self.where_clause.render() - yield from self.solution_modifier.render() - - -class Var(SPARQLGrammarBase): - value: str - - def render(self) -> Generator[str, None, None]: - yield Variable(self.value).n3() - - def __hash__(self): - return hash(self.value) - - -class BuiltInCall(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rBuiltInCall - """ - - other_expressions: Optional[Union[Aggregate, RegexExpression]] = None - function_name: Optional[str] = None - arguments: Optional[ - List[Union[Expression, Var]] - ] = None # TODO implement remaining argument types e.g. expression list - - @field_validator("function_name") - def validate_function_name(cls, v): - implemented = ["URI", "STR", "CONCAT", "SHA256", "LCASE", "SUM", "isBLANK"] - if v not in implemented: - raise ValueError(f"{v} is not a valid SPARQL built-in function") - return v - - def render(self) -> Generator[str, None, None]: - yield f"{self.function_name}(" - for i, arg in enumerate(self.arguments): - yield from arg.render() - if i < len(self.arguments) - 1: - yield ", " - yield ")" - - @classmethod - def create_with_one_expr( - cls, function_name: str, expression: PrimaryExpression - ) -> "BuiltInCall": - """ - Convenience method for functions that take a single PrimaryExpression as an argument. - Uses create_with_expression_list for consistency in handling expressions. - """ - return cls.create_with_n_expr(function_name, [expression]) - - @classmethod - def create_with_n_expr( - cls, function_name: str, expressions: List[PrimaryExpression] - ) -> "BuiltInCall": - """ - Convenience method for functions that take a list of PrimaryExpressions as arguments. - Wraps each PrimaryExpression in an Expression. - """ - wrapped_expressions = [Expression.from_primary_expr(pe) for pe in expressions] - - # Create a BuiltInCall instance for the specified function with the list of wrapped expressions - return cls(function_name=function_name, arguments=wrapped_expressions) - - -class Expression(SPARQLGrammarBase): - """ - Expression ::= ConditionalOrExpression - """ - - conditional_or_expression: ConditionalOrExpression - - def render(self) -> Generator[str, None, None]: - yield from self.conditional_or_expression.render() - - @classmethod - def from_primary_expr(cls, primary_expression: PrimaryExpression) -> Expression: - """ - Convenience method to create an Expression directly from a Var, wrapped in a PrimaryExpression. - """ - return cls( - conditional_or_expression=ConditionalOrExpression( - conditional_and_expressions=[ - ConditionalAndExpression( - value_logicals=[ - ValueLogical( - relational_expression=RelationalExpression( - left=NumericExpression( - additive_expression=AdditiveExpression( - base_expression=MultiplicativeExpression( - base_expression=UnaryExpression( - primary_expression=primary_expression - ) - ) - ) - ) - ) - ) - ] - ) - ] - ) - ) - - -class RelationalExpression(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rRelationalExpression - RelationalExpression ::= NumericExpression ( '=' NumericExpression | '!=' NumericExpression | '<' NumericExpression | '>' NumericExpression | '<=' NumericExpression | '>=' NumericExpression | 'IN' ExpressionList | 'NOT' 'IN' ExpressionList )? - """ - - left: NumericExpression - operator: Optional[str] = None # '=', '!=', '<', '>', '<=', '>=' - right: Optional[Union[NumericExpression, ExpressionList]] = None - # expression_list: Optional[ExpressionList] = None #TODO implement expression list - not_in: bool = False # To distinguish between 'IN' and 'NOT IN' - - def render(self) -> Generator[str, None, None]: - yield from self.left.render() - if self.operator: - yield f" {self.operator} " - if self.right: - yield from self.right.render() - # elif self.expression_list: - # if self.not_in: - # yield " NOT IN " - # else: - # yield " IN " - # yield from self.expression_list.render() - - -class ValueLogical(SPARQLGrammarBase): - relational_expression: RelationalExpression - - def render(self) -> Generator[str, None, None]: - yield from self.relational_expression.render() - - -class AdditiveExpression(SPARQLGrammarBase): - """ - https://www.w3.org/TR/sparql11-query/#rAdditiveExpression - AdditiveExpression ::= MultiplicativeExpression ( '+' MultiplicativeExpression | '-' MultiplicativeExpression | ( NumericLiteralPositive | NumericLiteralNegative ) ( ( '*' UnaryExpression ) | ( '/' UnaryExpression ) )* )* - #TODO implement NumericLiteralPositive, NumericLiteralNegative - these should be options in the additional expressions - """ - - base_expression: MultiplicativeExpression - additional_expressions: Optional[ - List[Tuple[str, Union[MultiplicativeExpression, UnaryExpression]]] - ] = [] - - @field_validator("additional_expressions") - def validate_additional_expressions(cls, v): - if v[0] not in ["+", "-", "*", "/"]: - raise ValueError("Operator must be one of '+', '-', '*', or '/'") - return v - - def render(self) -> Generator[str, None, None]: - yield from self.base_expression.render() - for operator, expression in self.additional_expressions: - yield f" {operator} " - yield from expression.render() - + """ + https://www.w3.org/TR/sparql11-query/#rSolutionModifier + SolutionModifier ::= GroupClause? HavingClause? OrderClause? LimitOffsetClauses? + """ -class NumericExpression(SPARQLGrammarBase): - additive_expression: AdditiveExpression + order_by: Optional[OrderClause] = None + limit_offset: Optional[LimitOffsetClauses] = None + # having: Optional[HavingClause] + group_by: Optional["GroupClause"] = None - def render(self) -> Generator[str, None, None]: - yield from self.additive_expression.render() + def render(self) -> str: + if self.order_by: + yield from self.order_by.render() + if self.limit_offset: + if self.order_by: + yield "\n" + yield from self.limit_offset.render() + if self.group_by: + yield from self.group_by.render() -class ConditionalAndExpression(SPARQLGrammarBase): +class GroupClause(SPARQLGrammarBase): """ - ConditionalAndExpression ::= ValueLogical ( '&&' ValueLogical )* + https://www.w3.org/TR/sparql11-query/#rGroupClause + GroupClause ::= 'GROUP' 'BY' GroupCondition+ """ - value_logicals: List[ValueLogical] + group_conditions: List["GroupCondition"] def render(self) -> Generator[str, None, None]: - for i, value_logical in enumerate(self.value_logicals): - yield from value_logical.render() - if i < len(self.value_logicals) - 1: - yield " && " + yield "\nGROUP BY " + for i, condition in enumerate(self.group_conditions): + yield from condition.render() + if i < len(self.group_conditions) - 1: # Check if it's not the last triple + yield " " -class ConditionalOrExpression(SPARQLGrammarBase): +class GroupCondition(SPARQLGrammarBase): """ - ConditionalOrExpression ::= ConditionalAndExpression ( '||' ConditionalAndExpression )* + https://www.w3.org/TR/sparql11-query/#rGroupCondition + GroupCondition ::= BuiltInCall | FunctionCall | '(' Expression ( 'AS' Var )? ')' | Var """ - conditional_and_expressions: List[ConditionalAndExpression] + condition: Union["BuiltInCall", FunctionCall, Tuple[Expression, Var], Var] def render(self) -> Generator[str, None, None]: - for i, conditional_and_expression in enumerate( - self.conditional_and_expressions - ): - yield from conditional_and_expression.render() - if i < len(self.conditional_and_expressions) - 1: - yield " || " + if isinstance(self.condition, Tuple): + yield "(" + yield from self.condition[0].render() + yield " AS " + yield from self.condition[1].render() + yield ")" + else: + yield from self.condition.render() -class NumericLiteral(SPARQLGrammarBase): +class ConstructTriples(SPARQLGrammarBase): """ - not implemented properly - only does integer literals + https://www.w3.org/TR/sparql11-query/#rConstructTriples + ConstructTriples ::= TriplesSameSubject ( '.' ConstructTriples? )? + + Simplified implementation that only accepts a list of SimplifiedTriples - avoids implementing the classes associated + with ; and , for TriplesSameSubject etc. in the SPARQL Grammar """ - value: float + triples: List[SimplifiedTriple] def render(self) -> Generator[str, None, None]: - yield str(int(self.value)) + for i, triple in enumerate(self.triples): + yield from triple.render() + if i < len(self.triples) - 1: # Check if it's not the last triple + yield "\n" - def __hash__(self): - return hash(self.value) +class ConstructTemplate(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rConstructTemplate + ConstructTemplate ::= '{' ConstructTriples? '}' + """ -class BooleanLiteral(SPARQLGrammarBase): - value: bool + construct_triples: ConstructTriples def render(self) -> Generator[str, None, None]: - yield "true" if self.value else "false" + yield "{\n" + yield from self.construct_triples.render() + yield "\n}" -class RDFLiteral(SPARQLGrammarBase): - value: str +class WhereClause(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rWhereClause + WhereClause ::= 'WHERE'? GroupGraphPattern + """ - def render(self) -> Generator[str, None, None]: - yield f'"{self.value}"' + group_graph_pattern: GroupGraphPattern - def __hash__(self): - return hash(self.value) + def render(self) -> Generator[str, None, None]: + yield "\nWHERE " + yield from self.group_graph_pattern.render() -class GraphTerm(SPARQLGrammarBase): +class ConstructQuery(SPARQLGrammarBase): """ - Represents a SPARQL GraphTerm. - GraphTerm ::= iri | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | NIL + https://www.w3.org/TR/sparql11-query/#rConstructQuery + ConstructQuery ::= 'CONSTRUCT' ( ConstructTemplate DatasetClause* WhereClause SolutionModifier | DatasetClause* 'WHERE' '{' TriplesTemplate? '}' SolutionModifier ) + + Currently simplified to only accept a ConstructTemplate, WhereClause, and SolutionModifier. """ - content: Union[IRI, RDFLiteral, NumericLiteral, BooleanLiteral, BlankNode] + construct_template: ConstructTemplate + where_clause: WhereClause + solution_modifier: SolutionModifier def render(self) -> Generator[str, None, None]: - if self.content == "NIL": - yield "()" - else: - yield from self.content.render() + yield "CONSTRUCT " + yield from self.construct_template.render() + yield from self.where_clause.render() + yield from self.solution_modifier.render() -class IRI(SPARQLGrammarBase): +class BuiltInCall(SPARQLGrammarBase): """ - Represents a SPARQL iri. - iri ::= IRIREF | PrefixedName + https://www.w3.org/TR/sparql11-query/#rBuiltInCall """ - value: Union[URIRef, str] + other_expressions: Optional[Union[Aggregate, RegexExpression]] = None + function_name: Optional[str] = None + arguments: Optional[ + List[Union[Expression, Tuple[Expression], ExpressionList, Var, NIL]] + ] = None + + @field_validator("function_name") + def validate_function_name(cls, v): + implemented = ["URI", "STR", "CONCAT", "SHA256", "LCASE", "isBLANK"] + if v not in implemented: + raise ValueError(f"{v} is not a valid SPARQL built-in function") + return v def render(self) -> Generator[str, None, None]: - if isinstance(self.value, URIRef): - yield self.value.n3() + if self.other_expressions: + yield from self.other_expressions.render() else: - yield self.value + yield f"{self.function_name}(" + if self.arguments: + for i, arg in enumerate(self.arguments): + yield from arg.render() + if i < len(self.arguments) - 1: + yield ", " + yield ")" - def __hash__(self): - return hash(self.value) + @classmethod + def create_with_one_expr( + cls, function_name: str, expression: PrimaryExpression + ) -> "BuiltInCall": + """ + Convenience method for functions that take a single PrimaryExpression as an argument. + Uses create_with_expression_list for consistency in handling expressions. + """ + return cls.create_with_n_expr(function_name, [expression]) + + @classmethod + def create_with_n_expr( + cls, function_name: str, expressions: List[PrimaryExpression] + ) -> "BuiltInCall": + """ + Convenience method for functions that take a list of PrimaryExpressions as arguments. + Wraps each PrimaryExpression in an Expression. + """ + wrapped_expressions = [Expression.from_primary_expr(pe) for pe in expressions] + + # Create a BuiltInCall instance for the specified function with the list of wrapped expressions + return cls(function_name=function_name, arguments=wrapped_expressions) -class BrackettedExpression(SPARQLGrammarBase): - expression: Expression +class BooleanLiteral(SPARQLGrammarBase): + value: bool def render(self) -> Generator[str, None, None]: - yield "(" - yield from self.expression.render() - yield ")" + yield "true" if self.value else "false" -class PrimaryExpression(SPARQLGrammarBase): +class GraphTerm(SPARQLGrammarBase): """ - PrimaryExpression ::= BrackettedExpression | BuiltInCall | iriOrFunction | RDFLiteral | NumericLiteral | BooleanLiteral | Var + Represents a SPARQL GraphTerm. + GraphTerm ::= iri | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | NIL """ - content: Union[ - BrackettedExpression, - BuiltInCall, - IRIOrFunction, - RDFLiteral, - NumericLiteral, - BooleanLiteral, - Var, - ] + content: Union[IRI, RDFLiteral, NumericLiteral, BooleanLiteral, BlankNode, NIL] def render(self) -> Generator[str, None, None]: yield from self.content.render() @@ -1015,33 +1105,6 @@ def render(self) -> Generator[str, None, None]: yield ")" -class UnaryExpression(SPARQLGrammarBase): - operator: Optional[str] = None # '!', '+', or '-' - primary_expression: PrimaryExpression - - def render(self) -> Generator[str, None, None]: - if self.operator: - yield f"{self.operator} " - yield from self.primary_expression.render() - - -class MultiplicativeExpression(SPARQLGrammarBase): - base_expression: UnaryExpression - additional_expressions: Optional[List[Tuple[str, UnaryExpression]]] = [] - - @field_validator("additional_expressions") - def validate_additional_expressions(cls, v): - if v[0] not in ["*", "/"]: - raise ValueError("Operator must be '*' or '/'") - return v - - def render(self) -> Generator[str, None, None]: - yield from self.base_expression.render() - for operator, expression in self.additional_expressions: - yield f" {operator} " - yield from expression.render() - - class ExpressionList(SPARQLGrammarBase): expressions: Optional[List[Expression]] = [] @@ -1058,33 +1121,62 @@ def render(self) -> Generator[str, None, None]: class Aggregate(SPARQLGrammarBase): + """ + https://www.w3.org/TR/sparql11-query/#rAggregate + Aggregate ::= 'COUNT' '(' 'DISTINCT'? ( '*' | Expression ) ')' + | 'SUM' '(' 'DISTINCT'? Expression ')' + | 'MIN' '(' 'DISTINCT'? Expression ')' + | 'MAX' '(' 'DISTINCT'? Expression ')' + | 'AVG' '(' 'DISTINCT'? Expression ')' + | 'SAMPLE' '(' 'DISTINCT'? Expression ')' + | 'GROUP_CONCAT' '(' 'DISTINCT'? Expression ( ';' 'SEPARATOR' '=' String )? ')' + """ + function_name: str # One of 'COUNT', 'SUM', 'MIN', 'MAX', 'AVG', 'SAMPLE', 'GROUP_CONCAT' - distinct: bool = False - expression: Optional[Expression] = None # '*' for COUNT, else Expression + distinct: Optional[bool] = None + expression: Optional[ + Union[str, Expression] + ] = None # '*' for COUNT, else Expression separator: Optional[str] = None # Only used for GROUP_CONCAT + @field_validator("function_name") + def validate_function_name(cls, v): + if v not in ["COUNT", "SUM", "MIN", "MAX", "AVG", "SAMPLE", "GROUP_CONCAT"]: + raise ValueError( + "Function name must be one of 'COUNT', 'SUM', 'MIN', 'MAX', 'AVG', 'SAMPLE', 'GROUP_CONCAT'" + ) + return v + + @field_validator("expression") + def validate_expression(cls, v): + if v == "*" and cls.function_name != "COUNT": + raise ValueError("'*' can only be used for COUNT") + return v + + @field_validator("separator") + def validate_separator(cls, v): + if cls.function_name != "GROUP_CONCAT": + raise ValueError("'SEPARATOR' can only be used for GROUP_CONCAT") + return v + def render(self) -> Generator[str, None, None]: yield f"{self.function_name}(" if self.distinct: yield "DISTINCT " - - # For COUNT, '*' is a valid expression - if self.function_name == "COUNT" and self.expression is None: + if self.expression == "*": yield "*" - elif self.expression is not None: + else: yield from self.expression.render() - # Handle the separator for GROUP_CONCAT - if self.function_name == "GROUP_CONCAT" and self.separator is not None: + if self.separator: yield f" ; SEPARATOR='{self.separator}'" - yield ")" class RegexExpression(SPARQLGrammarBase): """ - Represents a SPARQL REGEX expression. - REGEX(Expression, Expression, Expression) + https://www.w3.org/TR/sparql11-query/#rRegexExpression + RegexExpression ::= 'REGEX' '(' Expression ',' Expression ( ',' Expression )? ')' """ text_expression: Expression @@ -1102,43 +1194,3 @@ def render(self) -> Generator[str, None, None]: yield from self.flags_expression.render() yield ")" - - -class BlankNode(SPARQLGrammarBase): - """ - BlankNode ::= BLANK_NODE_LABEL | ANON - """ - - value: Union[BlankNodeLabel, Anon] - - def render(self): - yield from self.value.render() - - def __hash__(self): - return hash(self.value) - - -class BlankNodeLabel(SPARQLGrammarBase): - """ - BLANK_NODE_LABEL ::= '_:' ( PN_CHARS_U | [0-9] ) ((PN_CHARS|'.')* PN_CHARS)? - """ - - part_1: str - part_2: Optional[str] = None - - def render(self): - yield "_:" - yield self.part_1 - if self.part_2: - yield self.part_2 - - -class Anon: - """ - ANON ::= '[' WS* ']' - https://www.w3.org/TR/sparql11-query/#rANON - """ - - # TODO not sure how to make this more useful - allow input of whitespace? - def render(self): - yield "[]" diff --git a/temp/shacl2sparql.py b/temp/shacl2sparql.py index 91c33db1..2acb4e9e 100644 --- a/temp/shacl2sparql.py +++ b/temp/shacl2sparql.py @@ -38,6 +38,7 @@ Expression, RDFLiteral, IRIOrFunction, + DataBlockValue, ) ONT = Namespace("https://prez.dev/ont/") @@ -86,7 +87,7 @@ def __init__( def _expand_runtime_vars(self): self.runtime_vals_expanded = {} for k, v in self.runtime_values.items(): - if k in ["limit", "offset", "term"]: + if k in ["limit", "offset", "q"]: self.runtime_vals_expanded[k] = v elif v: val = "".join(IRI(value=v).render()) @@ -180,12 +181,12 @@ def parse_endpoint_definition(self): # don't use the target class if there's a sh:target / sh:select #TODO confirm why this caused issues - duplicate # pattern matches in the subquery? - elif target_classes: - if ( - endpoint_type == ONT.ListingEndpoint - ): # ignore class for non listing at present - ggp = self.create_select_subquery_for_class_listing(target_classes) - self._add_ggp_to_main_ggps(ggp) + # elif target_classes: + elif ( + endpoint_type == ONT.ListingEndpoint + ): # ignore class for non listing at present + ggp = self.create_select_subquery_for_class_listing(target_classes) + self._add_ggp_to_main_ggps(ggp) def _add_ggp_to_main_ggps(self, ggp): gorugp = GroupOrUnionGraphPattern(group_graph_patterns=[ggp]) @@ -216,61 +217,45 @@ def _create_construct_triples_from_sh_rules(self, rule_node): [subject, predicate, object] ) - # for item in subject, predicate, object: - # if isinstance(item, URIRef): - # item = IRI(value=item) - # elif isinstance(item, BNode): - # if subject == SH.this: - # if isinstance(self.focus_node, Var): - # subject = self.focus_node - # else: - # subject = IRI(value=self.focus_node) - # elif isinstance(subject, Literal): # assume it's a variable of the form ?xyz - # subject = Var(value=str(subject)[1:]) - # if isinstance(object, Literal): # assume it's a variable of the form ?xyz - # object = Var(value=str(object)[1:]) - triple = SimplifiedTriple(subject=subject, predicate=predicate, object=object) if self.construct_triples: self.construct_triples.append(triple) else: self.construct_triples = [triple] - def create_select_subquery_for_class_listing(self, target_classes): - target_class_var = IRI(value=target_classes[0]) - triples_block = TriplesBlock( - triples=[ - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=target_class_var, - ) - ] - ) - if self.additional_ggps: # for example from cql - ggps = GroupGraphPatternSub( - # triples_block=triples_block, # triples block from SHACL profile - graph_patterns_or_triples_blocks=[ - GraphPatternNotTriples( - content=GroupOrUnionGraphPattern( - group_graph_patterns=[ - GroupGraphPattern(content=self.additional_ggps) - ] - ) + def create_select_subquery_for_class_listing(self, target_classes: Optional[List[URIRef]] = None): + ggp = GroupGraphPattern(content=GroupGraphPatternSub()) + + if target_classes: + target_class_var = IRI(value=target_classes[0]) + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=target_class_var, ) ] ) + if self.additional_ggps: # for example from cql + gpnt = GraphPatternNotTriples( + content=GroupOrUnionGraphPattern( + group_graph_patterns=[ + GroupGraphPattern(content=self.additional_ggps) + ] + ) + ) + ggp.content.add_pattern(gpnt) else: - ggps = GroupGraphPatternSub(triples_block=triples_block) - ggp = GroupGraphPattern(content=ggps) - sub_select_where = WhereClause(group_graph_pattern=ggp) - select_clause = SelectClause(variables_or_all="*") + ggp.content.add_pattern(triples_block) + wc = WhereClause(group_graph_pattern=ggp) + sc = SelectClause(variables_or_all="*") sol_mod, order_by_triple = self._create_focus_node_solution_modifier() if order_by_triple: - ggps.add_triple(order_by_triple) + ggp.content.add_triple(order_by_triple) ss = SubSelect( - select_clause=select_clause, - where_clause=sub_select_where, + select_clause=sc, + where_clause=wc, solution_modifier=sol_mod, ) ggp = GroupGraphPattern(content=ss) @@ -299,6 +284,9 @@ def create_select_subquery_from_template(self, target_bn): return ggp def _create_focus_node_solution_modifier(self): + """ + Solution modifiers include LIMIT, OFFSET, ORDER BY clauses. + """ order_clause = order_by_triple = None # order clause is optional order_by_path = self.merged_runtime_and_default_vals.get("order_by") if order_by_path: @@ -329,6 +317,9 @@ def _create_focus_node_solution_modifier(self): return sol_mod, order_by_triple def _set_default_limit_and_offset(self): + """ + Sets the default limit, offset, and ordering for a listing endpoint. + """ default_limit = list( self.endpoint_graph.objects( subject=self.endpoint_uri, predicate=SHEXT.limit @@ -470,29 +461,25 @@ def process_bn_level(depth, max_depth, outer_ggps): self._add_ggp_to_main_ggps(container_ggp) def _parse_property_shapes(self, property_node, i): - def process_path_object(path_object): - # if path_object == SHEXT.allPredicateValues: - # predicates.append(Variable("preds")) - if isinstance(path_object, BNode): - predicate_objects_gen = self.profile_graph.predicate_objects( - subject=path_object + def process_path_object(path_obj: Union[URIRef, BNode]): + if isinstance(path_obj, BNode): + pred_objects_gen = self.profile_graph.predicate_objects( + subject=path_obj ) - bnode_pred, bnode_obj = next(predicate_objects_gen, (None, None)) - if bnode_obj == SH.union: + bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + if bn_obj == SH.union: pass - elif bnode_pred == SH.inversePath: - inverse_preds.append(IRI(value=bnode_obj)) - elif bnode_pred == SH.alternativePath: - predicates.extend(list(Collection(self.profile_graph, bnode_obj))) + elif bn_pred == SH.inversePath: + inverse_preds.append(IRI(value=bn_obj)) + elif bn_pred == SH.alternativePath: + predicates.extend(list(Collection(self.profile_graph, bn_obj))) else: # sequence paths - predicates.append( - tuple(Collection(self.profile_graph, path_object)) - ) + predicates.append(tuple(Collection(self.profile_graph, path_obj))) else: # a plain path specification to restrict the predicate to a specific value - predicates.append(path_object) + predicates.append(path_obj) - inverse_preds = [] - predicates = [] + inverse_preds = [] # list of IRIs + predicates = [] # list of IRIs union_items = None path_object = self.profile_graph.value( subject=property_node, predicate=SH.path, default=None @@ -539,7 +526,9 @@ def process_path_object(path_object): gpnt = GraphPatternNotTriples(content=optional) self.main_where_ggps.add_pattern(gpnt) - def _add_inverse_preds(self, ggps, inverse_preds, i): + def _add_inverse_preds( + self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i + ): if inverse_preds: ggps.add_triple( SimplifiedTriple( @@ -548,13 +537,13 @@ def _add_inverse_preds(self, ggps, inverse_preds, i): object=self.focus_node, ) ) - inline_data_one_var = InlineDataOneVar( - variable=Var(value=f"inv_pred_{i}"), values=inverse_preds + dbv_list = [DataBlockValue(value=p) for p in inverse_preds] + ildov = InlineDataOneVar( + variable=Var(value=f"inv_pred_{i}"), datablockvalues=dbv_list ) - data_block = DataBlock(block=inline_data_one_var) + data_block = DataBlock(block=ildov) inline_data = InlineData(data_block=data_block) gpnt = GraphPatternNotTriples(content=inline_data) - # ggps_sub = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt]) ggps.add_pattern(gpnt) def _add_predicate_constraints(self, predicates, property_node, ggp_list): @@ -593,8 +582,9 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): values = [ PrimaryExpression(content=IRIOrFunction(iri=p)) for p in predicates ] - values_constraint = Filter.filter_in( - focus=Var(value="preds"), comparators=values, not_in=True + focus_pe = PrimaryExpression(content=Var(value="preds")) + values_constraint = Filter.filter_relational( + focus=focus_pe, comparators=values, operator="NOT IN" ) gpnt = GraphPatternNotTriples(content=values_constraint) if ggp_list: @@ -609,8 +599,9 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): elif ( IRI(value=SHEXT.allPredicateValues) not in predicates ): # add VALUES clause + dbv_list = [DataBlockValue(value=p) for p in predicates] inline_data_one_var = InlineDataOneVar( - variable=Var(value="preds"), values=predicates + variable=Var(value="preds"), datablockvalues=dbv_list ) data_block = DataBlock(block=inline_data_one_var) inline_data = InlineData(data_block=data_block) @@ -645,8 +636,9 @@ def _add_object_constrains(self, ggp_list, property_node): objs.append(RDFLiteral(value=obj)) elif isinstance(obj, URIRef): objs.append(IRI(value=obj)) + dbv_list = [DataBlockValue(value=p) for p in objs] inline_data_one_var = InlineDataOneVar( - variable=Var(value="objs"), values=objs + variable=Var(value="objs"), datablockvalues=dbv_list ) data_block = DataBlock(block=inline_data_one_var) inline_data = InlineData(data_block=data_block) diff --git a/temp/test_search.py b/temp/test_search.py new file mode 100644 index 00000000..0255d2d9 --- /dev/null +++ b/temp/test_search.py @@ -0,0 +1,13 @@ +from rdflib import RDFS + +from prez.sparql.search_query import SearchQuery + +# from temp.grammar import SearchQuery + +test = SearchQuery( + search_term="test", + pred_vals=[RDFS.label], + limit=10, + offset=0, +).render() +print("") diff --git a/test_data/catprez.ttl b/test_data/catprez.ttl index 6cd0956b..f599aeed 100644 --- a/test_data/catprez.ttl +++ b/test_data/catprez.ttl @@ -18,4 +18,21 @@ ex:LowerLevelCatalog a dcat:Catalog ; ex:Resource a dcat:Resource ; rdfs:label "Resource" ; ex:property "resource property" ; +. + +ex:TopLevelCatalogTwo a dcat:Catalog ; + rdfs:label "amazing catalog" ; + dcterms:hasPart ex:LowerLevelCatalogTwo ; + ex:property "complete" ; +. + +ex:LowerLevelCatalogTwo a dcat:Catalog ; + rdfs:label "rightful" ; + dcterms:hasPart ex:ResourceTwo ; + ex:property "exposure" +. + +ex:ResourceTwo a dcat:Resource ; + rdfs:label "salty" ; + ex:property "proficient" ; . \ No newline at end of file diff --git a/test_data/vocprez.ttl b/test_data/vocprez.ttl index 3fd7ea34..b379711a 100644 --- a/test_data/vocprez.ttl +++ b/test_data/vocprez.ttl @@ -5,7 +5,7 @@ PREFIX rdfs: PREFIX skos: ex:VocPrezCatalog a dcat:Catalog ; - rdfs:label "Catalog" ; + rdfs:label "A Demo Catalog" ; dcterms:hasPart ex:SchemingConceptScheme ; ex:property "cataract" ; . diff --git a/tests/conftest.py b/tests/conftest.py index ba5929dd..a0c54127 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ import os os.environ["SPARQL_REPO_TYPE"] = "pyoxigraph" -os.environ["LOG_LEVEL"] = "DEBUG" +# os.environ["LOG_LEVEL"] = "DEBUG" diff --git a/tests/data/spaceprez/expected_responses/dataset_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_anot.ttl index 1e367d30..325d30a2 100644 --- a/tests/data/spaceprez/expected_responses/dataset_anot.ttl +++ b/tests/data/spaceprez/expected_responses/dataset_anot.ttl @@ -18,8 +18,8 @@ , , ; - prez:link "/s/datasets/exds:sandgate" ; - prez:members [ prez:link "/s/datasets/exds:sandgate/collections" ] . + prez:link "/s/catalogs/exds:sandgate" ; + prez:members [ prez:link "/s/catalogs/exds:sandgate/collections" ] . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -51,25 +51,25 @@ skos:prefLabel rdfs:label "preferred label"@en ; dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . rdfs:label "Sandgate are demo Facilities"@en ; dcterms:description "Sandgate area demo Facilities"@en ; dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:title "Sandgate are demo Facilities"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:facilities" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:facilities" . rdfs:label "Sandgate flooded areas"@en ; dcterms:description "Sandgate flooded areas"@en ; dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:title "Sandgate flooded areas"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:floods" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:floods" . rdfs:label "Sandgate main roads"@en ; dcterms:description "Sandgate main roads"@en ; dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:title "Sandgate main roads"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:roads" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:roads" . geo:Geometry skos:definition "A coherent set of direct positions in space. The positions are held within a Spatial Reference System (SRS)."@en ; skos:prefLabel "Geometry"@en . diff --git a/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl index 97655a36..08b86eb5 100644 --- a/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl @@ -10,7 +10,7 @@ dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; dcterms:identifier "exds:sandgate"^^prez:identifier ; dcterms:title "Sandgate example dataset"@en ; - prez:link "/s/datasets/exds:sandgate" . + prez:link "/s/catalogs/exds:sandgate" . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -34,18 +34,18 @@ skos:prefLabel rdfs:label "preferred label"@en ; dcterms:description "The Australian national dataset of important hydrological features such as rivers, water bodies, aquifers and monitoring points"@en ; dcterms:identifier "ldgovau:geofabric"^^prez:identifier ; dcterms:title "Australian Hydrological Geospatial Fabric"@en ; - prez:link "/s/datasets/ldgovau:geofabric" . + prez:link "/s/catalogs/ldgovau:geofabric" . a dcat:Dataset ; dcterms:description "The Australian Geocoded National Address File (G-NAF) is Australia’s authoritative, geocoded address file. It is built and maintained by Geoscape Australia using authoritative government data.."@en ; dcterms:identifier "ldgovau:gnaf"^^prez:identifier ; dcterms:title "Geocoded National Address File"@en ; skos:prefLabel "Geocoded National Address File"@en ; - prez:link "/s/datasets/ldgovau:gnaf" . + prez:link "/s/catalogs/ldgovau:gnaf" . a dcat:Dataset ; dcterms:identifier "preztest:dataset"^^prez:identifier ; - prez:link "/s/datasets/preztest:dataset" . + prez:link "/s/catalogs/preztest:dataset" . dcat:Dataset rdfs:label "Dataset"@en ; skos:definition "A collection of data, published or curated by a single source, and available for access or download in one or more represenations."@en ; diff --git a/tests/data/spaceprez/expected_responses/feature_anot.ttl b/tests/data/spaceprez/expected_responses/feature_anot.ttl index 28d7f7de..f3495b07 100644 --- a/tests/data/spaceprez/expected_responses/feature_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_anot.ttl @@ -15,7 +15,7 @@ dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en ; rdfs:member ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -54,7 +54,7 @@ skos:prefLabel rdfs:label "preferred label"@en ; geo:hasGeometry [ a geo:Geometry ; geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}"^^geo:geoJSONLiteral ; geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral ] ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; skos:prefLabel "Feature"@en . diff --git a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl index 04d39f4a..80ee4e30 100644 --- a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl @@ -10,7 +10,7 @@ dcterms:identifier "exds:sandgate"^^prez:identifier ; dcterms:title "Sandgate example dataset"@en ; rdfs:member ; - prez:link "/s/datasets/exds:sandgate" . + prez:link "/s/catalogs/exds:sandgate" . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -46,8 +46,8 @@ skos:prefLabel rdfs:label "preferred label"@en ; dcterms:title "Geofabric Contracted Catchments"@en ; geo:hasBoundingBox [ a geo:Geometry ; geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral ] ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" ; - prez:members [ prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items" ] . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" ; + prez:members [ prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items" ] . geo:FeatureCollection skos:definition "A collection of individual Features."@en ; skos:prefLabel "Feature Collection"@en . diff --git a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl index 501351f6..b610456b 100644 --- a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl @@ -13,7 +13,7 @@ , , ; - prez:link "/s/datasets/exds:sandgate" . + prez:link "/s/catalogs/exds:sandgate" . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -40,28 +40,28 @@ skos:prefLabel rdfs:label "preferred label"@en ; dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; dcterms:identifier "sndgt:catchments"^^prez:identifier ; dcterms:title "Geofabric Contracted Catchments"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . a geo:FeatureCollection ; rdfs:label "Sandgate are demo Facilities"@en ; dcterms:description "Sandgate area demo Facilities"@en ; dcterms:identifier "sndgt:facilities"^^prez:identifier ; dcterms:title "Sandgate are demo Facilities"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:facilities" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:facilities" . a geo:FeatureCollection ; rdfs:label "Sandgate flooded areas"@en ; dcterms:description "Sandgate flooded areas"@en ; dcterms:identifier "sndgt:floods"^^prez:identifier ; dcterms:title "Sandgate flooded areas"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:floods" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:floods" . a geo:FeatureCollection ; rdfs:label "Sandgate main roads"@en ; dcterms:description "Sandgate main roads"@en ; dcterms:identifier "sndgt:roads"^^prez:identifier ; dcterms:title "Sandgate main roads"@en ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:roads" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:roads" . geo:FeatureCollection skos:definition "A collection of individual Features."@en ; skos:prefLabel "Feature Collection"@en ; diff --git a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl index 75d60e88..8607dab8 100644 --- a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl +++ b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl @@ -16,7 +16,7 @@ dcterms:title "Geofabric Contracted Catchments"@en ; rdfs:member , ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -41,12 +41,12 @@ skos:prefLabel rdfs:label "preferred label"@en ; a geo:Feature ; rdfs:label "Contracted Catchment 12109444" ; dcterms:identifier "sndgt:cc12109444"^^prez:identifier ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . a geo:Feature ; rdfs:label "Contracted Catchment 12109445" ; dcterms:identifier "sndgt:cc12109445"^^prez:identifier ; - prez:link "/s/datasets/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109445" . + prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109445" . geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; skos:prefLabel "Feature"@en ; diff --git a/tests/data/vocprez/input/catalog-of-vocabs.ttl b/tests/data/vocprez/input/catalog-of-vocabs.ttl new file mode 100644 index 00000000..3674424b --- /dev/null +++ b/tests/data/vocprez/input/catalog-of-vocabs.ttl @@ -0,0 +1,12 @@ +PREFIX dcat: +PREFIX dcterms: + + a dcat:Catalog ; + dcterms:hasPart , + , + , + , + , + , + ; + . \ No newline at end of file diff --git a/tests/test_dd_profiles.py b/tests/test_dd_profiles.py index 31c7e90b..194a63bf 100644 --- a/tests/test_dd_profiles.py +++ b/tests/test_dd_profiles.py @@ -47,7 +47,7 @@ def override_get_repo(): "url, mediatype, expected_data", [ [ - "/v/vocab?_profile=prfl:dd&_mediatype=", + "/v/catalogs/prez:vocprez-container-catalog/collections?_profile=prfl:dd&_mediatype=", "application/json", { "@context": { diff --git a/tests/test_endpoints_ok.py b/tests/test_endpoints_ok.py index 6ed71bdd..88a49e83 100644 --- a/tests/test_endpoints_ok.py +++ b/tests/test_endpoints_ok.py @@ -21,7 +21,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + for file in Path(__file__).parent.glob("../test_data/*.ttl"): store.load(file.read_bytes(), "text/turtle") return store @@ -62,39 +62,64 @@ def override_get_repo(): app.dependency_overrides.clear() -def test_catprez_links(client: TestClient, visited: Optional[Set] = None, link="/c/catalogs"): +def test_catprez_links( + client: TestClient, visited: Optional[Set] = None, link="/c/catalogs" +): if not visited: visited = set() response = client.get(link) - links_in_response = list(Graph().parse(data=response.text, format="turtle").objects(None, PREZ.link)) + g = Graph().parse(data=response.text, format="turtle") + links = list(g.objects(None, PREZ.link)) + member_bnode_list = list(g.objects(None, PREZ.members)) + if member_bnode_list: + member_bnode = member_bnode_list[0] + member_links = list(g.objects(member_bnode, PREZ.link)) + links.extend(member_links) assert response.status_code == 200 - for link in links_in_response: + for link in links: + print(link) if link not in visited: visited.add(link) test_catprez_links(client, visited, str(link)) - -def test_vocprez_links(client: TestClient, visited: Optional[Set] = None, link="/v/catalogs"): +def test_vocprez_links( + client: TestClient, visited: Optional[Set] = None, link="/v/catalogs" +): if not visited: visited = set() response = client.get(link) - links_in_response = list(Graph().parse(data=response.text, format="turtle").objects(None, PREZ.link)) + g = Graph().parse(data=response.text, format="turtle") + links = list(g.objects(None, PREZ.link)) + member_bnode_list = list(g.objects(None, PREZ.members)) + if member_bnode_list: + member_bnode = member_bnode_list[0] + member_links = list(g.objects(member_bnode, PREZ.link)) + links.extend(member_links) assert response.status_code == 200 - for link in links_in_response: + for link in links: + print(link) if link not in visited: visited.add(link) - test_catprez_links(client, visited, str(link)) - + test_vocprez_links(client, visited, str(link)) -def test_spaceprez_links(client: TestClient, visited: Optional[Set] = None, link="/s/datasets"): +def test_spaceprez_links( + client: TestClient, visited: Optional[Set] = None, link="/s/catalogs" +): if not visited: visited = set() response = client.get(link) - links_in_response = list(Graph().parse(data=response.text, format="turtle").objects(None, PREZ.link)) + g = Graph().parse(data=response.text, format="turtle") + links = list(g.objects(None, PREZ.link)) + member_bnode_list = list(g.objects(None, PREZ.members)) + if member_bnode_list: + member_bnode = member_bnode_list[0] + member_links = list(g.objects(member_bnode, PREZ.link)) + links.extend(member_links) assert response.status_code == 200 - for link in links_in_response: + for link in links: + print(link) if link not in visited: visited.add(link) - test_catprez_links(client, visited, str(link)) \ No newline at end of file + test_spaceprez_links(client, visited, str(link)) diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index f740e6c5..c387170d 100644 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -46,7 +46,7 @@ def override_get_repo(): @pytest.fixture(scope="session") def a_dataset_link(client): - r = client.get("/s/datasets") + r = client.get("/s/catalogs") g = Graph().parse(data=r.text) member_uri = g.value(None, RDF.type, DCAT.Dataset) link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) @@ -121,7 +121,7 @@ def test_feature_anot(client, a_feature_link): def test_dataset_listing_anot(client): - r = client.get("/s/datasets?_mediatype=text/anot+turtle") + r = client.get("/s/catalogs?_mediatype=text/anot+turtle") response_graph = Graph().parse(data=r.text) expected_graph = Graph().parse( Path(__file__).parent diff --git a/tests/test_search.py b/tests/test_search.py index 60e2fbde..990fc012 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -9,8 +9,6 @@ from prez.app import app from prez.dependencies import get_repo -from prez.models.search_method import SearchMethod -from prez.routers.search import extract_qsa_params from prez.sparql.methods import Repo, PyoxigraphRepo From 380320d96e9d012904dca66cc945af3ee46153fe Mon Sep 17 00:00:00 2001 From: david Date: Mon, 11 Dec 2023 22:12:03 +1000 Subject: [PATCH 04/25] further ingetration --- demo/prez-v4-backend/config.ttl | 54 +++ demo/prez-v4-backend/docker-compose.yml | 33 ++ demo/prez-v4-backend/readme.md | 3 + poetry.lock | 54 ++- prez/dependencies.py | 33 +- .../context_ontologies/prez-ontology.nq | 21 + .../endpoints/cql_endpoints.ttl | 27 +- .../endpoints/ogc_catprez_endpoints.ttl | 8 +- .../endpoints/ogc_spaceprez_endpoints.ttl | 9 +- .../endpoints/ogc_vocprez_endpoints.ttl | 10 +- .../endpoints/system_endpoints.ttl | 7 +- .../endpoints/vocprez_endpoints.ttl.unused | 4 +- .../profiles/prez_default_profiles.ttl | 5 +- prez/routers/cql.py | 32 +- prez/routers/object.py | 24 +- prez/routers/ogc_spaceprez.py | 38 +- prez/routers/ogc_vocprez.py | 47 +- prez/services/app_service.py | 4 +- prez/services/listings.py | 6 +- prez/services/model_methods.py | 4 +- prez/services/objects.py | 12 +- prez/sparql/methods.py | 5 +- prez/sparql/objects_listings.py | 8 +- pyproject.toml | 3 +- temp/cql2sparql.py | 19 +- temp/grammar.py | 24 +- temp/shacl2sparql.py | 26 +- test_data/object_catalog_bblocks_catalog.ttl | 12 + test_data/object_vocab_api_bblocks.ttl | 38 ++ test_data/object_vocab_datatype_bblocks.ttl | 38 ++ test_data/object_vocab_parameter_bblocks.ttl | 61 +++ test_data/object_vocab_schema_bblocks.ttl | 414 +++++++++++++++++ test_data/sandgate.ttl | 296 ++++++++++++ tests/{test_cql.py => _test_cql.py} | 19 + .../top_level_catalog_anot.ttl | 439 +----------------- .../top_level_catalog_listing_anot.ttl | 53 +-- .../input/{catalog.ttl => catprez.ttl} | 24 +- tests/data/cql/input/geo_intersects.json | 35 ++ tests/test_endpoints_catprez.py | 60 +-- 39 files changed, 1375 insertions(+), 634 deletions(-) create mode 100644 demo/prez-v4-backend/config.ttl create mode 100644 demo/prez-v4-backend/docker-compose.yml create mode 100644 demo/prez-v4-backend/readme.md create mode 100644 prez/reference_data/context_ontologies/prez-ontology.nq create mode 100644 test_data/object_catalog_bblocks_catalog.ttl create mode 100644 test_data/object_vocab_api_bblocks.ttl create mode 100644 test_data/object_vocab_datatype_bblocks.ttl create mode 100644 test_data/object_vocab_parameter_bblocks.ttl create mode 100644 test_data/object_vocab_schema_bblocks.ttl create mode 100644 test_data/sandgate.ttl rename tests/{test_cql.py => _test_cql.py} (73%) rename tests/data/catprez/input/{catalog.ttl => catprez.ttl} (52%) create mode 100644 tests/data/cql/input/geo_intersects.json diff --git a/demo/prez-v4-backend/config.ttl b/demo/prez-v4-backend/config.ttl new file mode 100644 index 00000000..91157cd0 --- /dev/null +++ b/demo/prez-v4-backend/config.ttl @@ -0,0 +1,54 @@ +## Licensed under the terms of http://www.apache.org/licenses/LICENSE-2.0 + +PREFIX : <#> +PREFIX fuseki: +PREFIX rdf: +PREFIX rdfs: +PREFIX ja: +PREFIX geosparql: + +[] rdf:type fuseki:Server ; + fuseki:services ( + :service + ) . + +:service rdf:type fuseki:Service ; + fuseki:name "dataset" ; + + fuseki:endpoint [ fuseki:operation fuseki:query ; ] ; + fuseki:endpoint [ + fuseki:operation fuseki:query ; + fuseki:name "sparql" + ]; + fuseki:endpoint [ + fuseki:operation fuseki:query ; + fuseki:name "query" + ] ; + fuseki:endpoint [ + fuseki:operation fuseki:gsp-r ; + fuseki:name "get" + ] ; + fuseki:dataset <#geo_ds> ; + . + +<#geo_ds> rdf:type geosparql:GeosparqlDataset ; + geosparql:dataset :dataset ; + geosparql:inference true ; + geosparql:queryRewrite true ; + geosparql:indexEnabled true ; + geosparql:applyDefaultGeometry true ; +. + +# Transactional in-memory dataset. +:dataset rdf:type ja:MemoryDataset ; + ## Optional load with data on start-up + ja:data "/rdf/catprez.ttl"; + ja:data "/rdf/vocprez.ttl"; + ja:data "/rdf/catprez.ttl"; + ja:data "/rdf/sandgate.ttl"; + ja:data "/rdf/object_catalog_bblocks_catalog.ttl"; + ja:data "/rdf/object_vocab_api_bblocks.ttl"; + ja:data "/rdf/object_vocab_datatype_bblocks.ttl"; + ja:data "/rdf/object_vocab_parameter_bblocks.ttl"; + ja:data "/rdf/object_vocab_schema_bblocks.ttl"; + . diff --git a/demo/prez-v4-backend/docker-compose.yml b/demo/prez-v4-backend/docker-compose.yml new file mode 100644 index 00000000..59fac4c1 --- /dev/null +++ b/demo/prez-v4-backend/docker-compose.yml @@ -0,0 +1,33 @@ +version: "3" +services: + + fuseki: + image: "ghcr.io/zazuko/fuseki-geosparql:v3.2.0" + ports: + - "3030:3030" + volumes: + - type: bind + source: config.ttl + target: /fuseki/config.ttl + - type: bind + source: ../../test_data + target: /rdf + environment: + ADMIN_PASSWORD: pw + healthcheck: + test: ["CMD-SHELL", "wget -qO- http://localhost:3030 || exit 1"] + interval: 5s + timeout: 10s + retries: 3 + + prez: + build: + context: ../../ + dockerfile: ./Dockerfile + ports: + - "8000:8000" + environment: + SPARQL_ENDPOINT: 'http://fuseki:3030/dataset' + depends_on: + fuseki: + condition: service_healthy diff --git a/demo/prez-v4-backend/readme.md b/demo/prez-v4-backend/readme.md new file mode 100644 index 00000000..a0048066 --- /dev/null +++ b/demo/prez-v4-backend/readme.md @@ -0,0 +1,3 @@ +This directory contains a docker compose file which will run the Prez backend and Fuseki GeoSPARQL together with some sample data. + +NB any data added to the test_data folder must also be specified in the fuseki config.ttl file. \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 47479f51..a17ee71a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -502,13 +502,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.32" +version = "2.5.33" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.32-py2.py3-none-any.whl", hash = "sha256:0b7656ef6cba81664b783352c73f8c24b39cf82f926f78f4550eda928e5e0545"}, - {file = "identify-2.5.32.tar.gz", hash = "sha256:5d9979348ec1a21c768ae07e0a652924538e8bce67313a73cb0f681cf08ba407"}, + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, ] [package.extras] @@ -876,24 +876,24 @@ files = [ [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.0" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.0-py3-none-any.whl", hash = "sha256:f1f8a7eab698c357945c85ed79715e014612b8584faebe209dca4558e2b09513"}, + {file = "pathspec-0.12.0.tar.gz", hash = "sha256:c57e16065a97b7beb175f13c84d27cb05f7b7315741c2fbd5de541042f4ea6e1"}, ] [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -1140,28 +1140,35 @@ python-versions = ">=3.7" files = [ {file = "pyoxigraph-0.3.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49609d3c8d6637193872181e8f9d8b85ae304b3d944b1d50a2e363bd4d3ad878"}, {file = "pyoxigraph-0.3.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0a0f2bd4348e9b92fbb92c71f449b7e42f6ac6fb67ce5797cbd8ab3b673c86"}, + {file = "pyoxigraph-0.3.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5e9cd5931488feb3bdd189094a746d2d0c05c5364a2d93a1b748d2bb91145ab8"}, {file = "pyoxigraph-0.3.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:95c43d3da6d43460368f0a5f4b497412b0d6509e55eb12245b0f173248118656"}, {file = "pyoxigraph-0.3.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d466025962895e67a7c4a4ba303fe23a911f99d2158f5f53eb50f56949125f"}, {file = "pyoxigraph-0.3.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90dc1e4010e2011c5440b7a3832153a14f52257e12a90a0d7fc6ed16e88a7961"}, + {file = "pyoxigraph-0.3.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:10c02f543fa83338e93308cad7868137ccadffc3330827deebac715333070091"}, {file = "pyoxigraph-0.3.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:469039b1ed6a31fef59b8b6c2ef5c836dd147944aa7120b4f4e6db4fd5abf60a"}, {file = "pyoxigraph-0.3.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2baadd8dba65ff91bdcdf85e57d928806d94612b85da58d64526f0f1d5cd4df"}, {file = "pyoxigraph-0.3.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f7e217e82e541f7df4697705c7cbfbd62e019c50786669647cb261445d75215"}, + {file = "pyoxigraph-0.3.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:963bc825e34d7238bffb942572ac0e59a6512e7d33ec8f898f495964a8dac1de"}, {file = "pyoxigraph-0.3.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c99cd7d305a5f154d6fa7eca3a93b153ac94ad2a4aff6c404ec56db38d538ea4"}, {file = "pyoxigraph-0.3.22-cp37-abi3-macosx_10_14_x86_64.macosx_11_0_arm64.macosx_10_14_universal2.whl", hash = "sha256:32d5630c9fb3d7b819a25401b3afdbd01dbfc9624b1519d41216622fe3af52e6"}, {file = "pyoxigraph-0.3.22-cp37-abi3-macosx_10_14_x86_64.whl", hash = "sha256:6368f24bc236a6055171f4a80cb63b9ad76fcbdbcb4a3ef981eb6d86d8975c11"}, {file = "pyoxigraph-0.3.22-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:821e1103cf1e8f12d0738cf1b2625c8374758e33075ca67161ead3669f53e4cb"}, {file = "pyoxigraph-0.3.22-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630f1090d67d1199c86f358094289816e0c00a21000164cfe06499c8689f8b9e"}, {file = "pyoxigraph-0.3.22-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aca511243209005da32470bbfec9e023ac31095bbeaa8cedabe0a652adce38c"}, + {file = "pyoxigraph-0.3.22-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ab329df388865afa9a934f1eac2e75264b220962a21bbcded6cb7ead96d1f1dd"}, {file = "pyoxigraph-0.3.22-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:60b7f13331b91827e2edfa8633ffb7e3bfc8630b708578fb0bc8d43c76754f20"}, {file = "pyoxigraph-0.3.22-cp37-abi3-win_amd64.whl", hash = "sha256:9a4ffd8ce28c3e8ce888662e0d9e9155e5226ecd8cd967f3c46391cf266c4c1d"}, {file = "pyoxigraph-0.3.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4b8fde463e507c394f5b165a7a2571fd74028a8b343c161d81f63eb83a7d7c7"}, {file = "pyoxigraph-0.3.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6ad3d8037af4ab5b1de75999fd2ba1b93becf24a9ee5e46ea0ee20a4efe270b"}, + {file = "pyoxigraph-0.3.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:26c229a061372b5c52f2b85f30fae028a69a8ba71654b402cc4099264d04ca58"}, {file = "pyoxigraph-0.3.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9211b2a9d9f13875aec4acede8e1395ff617d64ac7cff0f80cbaf4c08fc8b648"}, {file = "pyoxigraph-0.3.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00645cb370ebafc79cfecd08c5ac4656469af9ec450cb9207d94f6939e26ba0e"}, {file = "pyoxigraph-0.3.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6d55de26adabe7d6fece9e1dad4556d648c4166ee79d65e4f7c64acd898656e"}, + {file = "pyoxigraph-0.3.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1427e62704bce0a1bc03661efd4d6a7c85cf548824e5e48b17efb4509bd034ad"}, {file = "pyoxigraph-0.3.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e2bebace02e29d1cf3bc324815058f50b2ff59980a02193280a89c905d8437ab"}, {file = "pyoxigraph-0.3.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e363d0b788f870b1008bb75e41a31b01a6277d9a7cc028ed6534a23bba69e60"}, {file = "pyoxigraph-0.3.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0508eb4515ce1b3c7548d3f9382c1b366f6602c2e01e9e036c20e730d8fece47"}, + {file = "pyoxigraph-0.3.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:db64bdef54d5d1c0d51bec08d811cd1ff86c7608e24b9362523ff94fb3b46117"}, {file = "pyoxigraph-0.3.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33ca01c1727e079af3335883d75e5390619e7d2ece813c8065ba1cbcd71d17a3"}, {file = "pyoxigraph-0.3.22-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55322d5b9b852c4813c293575aa5e676cec19c617d0aad5ae7ce47c49b113f0b"}, {file = "pyoxigraph-0.3.22-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3397138f3a6d2c3299250ebde2bca7c95a25b58b29009eb0b29c2f5d1438d954"}, @@ -1329,6 +1336,23 @@ html = ["html5lib (>=1.0,<2.0)"] lxml = ["lxml (>=4.3.0,<5.0.0)"] networkx = ["networkx (>=2.0.0,<3.0.0)"] +[[package]] +name = "rdframe" +version = "0.1.1" +description = "" +optional = false +python-versions = ">=3.11,<4.0" +files = [ + {file = "rdframe-0.1.1-py3-none-any.whl", hash = "sha256:3a8c54da40aa9b57d8e1785d85e2013343038d4ef1f9255d2bc7cc0996301361"}, + {file = "rdframe-0.1.1.tar.gz", hash = "sha256:9e508518c64932abf05fe123f00aadc6f90d0d6fae446fd0b01cec0a40f49a7a"}, +] + +[package.dependencies] +pydantic = ">=2.5.2,<3.0.0" +pyld = ">=2.0.3,<3.0.0" +rdflib = ">=7.0.0,<8.0.0" +shapely = ">=2.0.2,<3.0.0" + [[package]] name = "requests" version = "2.31.0" @@ -1524,13 +1548,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -1604,4 +1628,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "67c2ad668e9468f8b6121615f92ac0e7fe4125ae47337670d14f52673d6f7896" +content-hash = "85c668acf50861c99381889c5be9aef605635c1c9abf040529fd01ab9ee2a805" diff --git a/prez/dependencies.py b/prez/dependencies.py index b2c91af7..edf6ac6c 100644 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -16,7 +16,7 @@ ) from prez.config import settings from prez.sparql.methods import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo -from temp.cql2sparql import CQLParser +from rdframe import CQLParser async def get_async_http_client(): @@ -105,17 +105,20 @@ async def cql_post_parser_dependency(request: Request): async def cql_get_parser_dependency(request: Request): - try: - query = json.loads(request.query_params["q"]) - context = json.load( - (Path(__file__).parent.parent / "temp" / "default_cql_context.json").open() - ) - cql_parser = CQLParser(cql=query, context=context) - cql_parser.generate_jsonld() - return cql_parser - except json.JSONDecodeError: - raise HTTPException(status_code=400, detail="Invalid JSON format.") - except Exception as e: # Replace with your specific parsing exception - raise HTTPException( - status_code=400, detail="Invalid CQL format: Parsing failed." - ) + if request.query_params.get("filter"): + try: + query = json.loads(request.query_params["filter"]) + context = json.load( + ( + Path(__file__).parent.parent / "temp" / "default_cql_context.json" + ).open() + ) + cql_parser = CQLParser(cql=query, context=context) + cql_parser.generate_jsonld() + return cql_parser + except json.JSONDecodeError: + raise HTTPException(status_code=400, detail="Invalid JSON format.") + except Exception as e: # Replace with your specific parsing exception + raise HTTPException( + status_code=400, detail="Invalid CQL format: Parsing failed." + ) diff --git a/prez/reference_data/context_ontologies/prez-ontology.nq b/prez/reference_data/context_ontologies/prez-ontology.nq new file mode 100644 index 00000000..65c6a480 --- /dev/null +++ b/prez/reference_data/context_ontologies/prez-ontology.nq @@ -0,0 +1,21 @@ + "link" . + "count" . + "members" . + "Matched Term" . + "Matched Predicate" . + "Search Result Weight" . + "Default Resource Format" . + "Has Resource Format" . + "Constrains Class" . + "Has Node Shape" . + "Has Default Profile" . + "All Predicate Values" . + "limit" . + "offset" . + "order by" . + "blank node depth" . + "endpoint template" . + "delivers classes" . + "parent endpoint" . + "parent to focus relation" . + "focus to parent relation" . \ No newline at end of file diff --git a/prez/reference_data/endpoints/cql_endpoints.ttl b/prez/reference_data/endpoints/cql_endpoints.ttl index 40414293..6624213d 100644 --- a/prez/reference_data/endpoints/cql_endpoints.ttl +++ b/prez/reference_data/endpoints/cql_endpoints.ttl @@ -1,6 +1,6 @@ PREFIX dcat: PREFIX dcterms: -PREFIX endpoint: +PREFIX endpoint: PREFIX prez: PREFIX ont: PREFIX rdfs: @@ -10,8 +10,31 @@ PREFIX shext: endpoint:cql a ont:ListingEndpoint ; - ont:endpointTemplate "/cql" ; ont:deliversClasses prez:CQLObjectList ; # required to determine the correct profile for ConnegP shext:limit 20 ; shext:offset 0 ; . + +endpoint:queryables a ont:ListingEndpoint ; + sh:rule [ sh:subject "?focus_node" ; + sh:predicate ; + sh:object ] ; + ont:deliversClasses prez:QueryablesList ; + sh:target [ sh:select """SELECT DISTINCT ?focus_node + WHERE { + ?s a ?class ; + ?focus_node ?o . + VALUES ?class { + dcat:Catalog + dcat:Dataset + dcat:Resource + skos:ConceptScheme + skos:Collection + skos:Concept + geo:FeatureCollection + geo:Feature + } + }""" ] ; + shext:limit 100 ; + shext:offset 0 ; +. \ No newline at end of file diff --git a/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl index bccd8bf9..7f7a0557 100644 --- a/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl +++ b/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl @@ -34,7 +34,7 @@ endpoint:top-level-catalog-object a ont:ObjectEndpoint ; endpoint:lower-level-catalog-listing a ont:ListingEndpoint ; ont:endpointTemplate "/c/catalogs/$parent_1/collections" ; ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP - ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentToFocusRelation dcterms:hasPart ; sh:targetClass dcat:Catalog ; # required for query construction ont:parentEndpoint endpoint:top-level-catalog-object ; shext:limit 20 ; @@ -51,14 +51,14 @@ endpoint:lower-level-catalog-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:deliversClasses dcat:Catalog ; # required for link generation for objects ont:endpointTemplate "/c/catalogs/$parent_1/collections/$object" ; - ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentToFocusRelation dcterms:hasPart ; ont:parentEndpoint endpoint:lower-level-catalog-listing ; . endpoint:resource-listing a ont:ListingEndpoint ; ont:endpointTemplate "/c/catalogs/$parent_2/collections/$parent_1/items" ; ont:deliversClasses prez:ResourceList ; # required to determine the correct profile for ConnegP - ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentToFocusRelation dcterms:hasPart ; sh:targetClass dcat:Resource ; # required for query construction ont:parentEndpoint endpoint:lower-level-catalog-object ; shext:limit 20 ; @@ -75,7 +75,7 @@ endpoint:resource-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:deliversClasses dcat:Resource ; # required for link generation for objects ont:endpointTemplate "/c/catalogs/$parent_2/collections/$parent_1/items/$object" ; - ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentToFocusRelation dcterms:hasPart ; ont:parentEndpoint endpoint:resource-listing ; . diff --git a/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl index 97af374e..9d53c519 100644 --- a/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl +++ b/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl @@ -15,7 +15,6 @@ endpoint:spaceprez-home a ont:Endpoint ; endpoint:dataset-listing a ont:ListingEndpoint ; ont:deliversClasses prez:DatasetList ; sh:targetClass dcat:Dataset ; - ont:isTopLevelEndpoint "true"^^xsd:boolean ; ont:endpointTemplate "/s/catalogs" ; shext:limit 20 ; shext:offset 0 ; @@ -33,7 +32,7 @@ endpoint:feature-collection-listing a ont:ListingEndpoint ; sh:targetClass geo:FeatureCollection ; ont:deliversClasses prez:FeatureCollectionList ; ont:endpointTemplate "/s/catalogs/$parent_1/collections" ; - ont:ParentToFocusRelation rdfs:member ; + ont:parentToFocusRelation rdfs:member ; shext:limit 20 ; shext:offset 0 ; shext:orderBy [ sh:path rdfs:label ] ; @@ -50,7 +49,7 @@ endpoint:feature-collection-object a ont:ObjectEndpoint ; ont:parentEndpoint endpoint:feature-collection-listing ; ont:deliversClasses geo:FeatureCollection ; ont:endpointTemplate "/s/catalogs/$parent_1/collections/$object" ; - ont:ParentToFocusRelation rdfs:member ; + ont:parentToFocusRelation rdfs:member ; . endpoint:feature-listing a ont:ListingEndpoint ; @@ -58,7 +57,7 @@ endpoint:feature-listing a ont:ListingEndpoint ; sh:targetClass geo:Feature ; ont:deliversClasses prez:FeatureList ; ont:endpointTemplate "/s/catalogs/$parent_2/collections/$parent_1/items" ; - ont:ParentToFocusRelation rdfs:member ; + ont:parentToFocusRelation rdfs:member ; shext:limit 20 ; shext:offset 0 ; shext:orderBy [ sh:path rdfs:label ] ; @@ -75,5 +74,5 @@ endpoint:feature-object a ont:ObjectEndpoint ; ont:parentEndpoint endpoint:feature-listing ; ont:deliversClasses geo:Feature ; ont:endpointTemplate "/s/catalogs/$parent_2/collections/$parent_1/items/$object" ; - ont:ParentToFocusRelation rdfs:member ; + ont:parentToFocusRelation rdfs:member ; . diff --git a/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl index 3c2b22cf..e1053b87 100644 --- a/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl +++ b/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl @@ -35,7 +35,7 @@ endpoint:catalog-object a ont:ObjectEndpoint ; endpoint:vocab-listing a ont:ListingEndpoint ; ont:endpointTemplate "/v/catalogs/$parent_1/collections" ; ont:deliversClasses prez:SchemesList ; # required to determine the correct profile for ConnegP - ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentToFocusRelation dcterms:hasPart ; sh:targetClass skos:ConceptScheme ; # required for query construction ont:parentEndpoint endpoint:catalog-object ; shext:limit 20 ; @@ -52,14 +52,14 @@ endpoint:vocab-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:deliversClasses skos:ConceptScheme ; # required for link generation for objects ont:endpointTemplate "/v/catalogs/$parent_1/collections/$object" ; - ont:ParentToFocusRelation dcterms:hasPart ; + ont:parentToFocusRelation dcterms:hasPart ; ont:parentEndpoint endpoint:vocab-listing ; . endpoint:concept-listing a ont:ListingEndpoint ; ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/items" ; ont:deliversClasses prez:ConceptList ; # required to determine the correct profile for ConnegP - ont:FocusToParentRelation skos:inScheme ; + ont:focusToParentRelation skos:inScheme ; sh:targetClass skos:Concept ; # required for query construction ont:parentEndpoint endpoint:vocab-object ; shext:limit 20 ; @@ -76,7 +76,7 @@ endpoint:concept-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:deliversClasses skos:Concept ; # required for link generation for objects ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/items/$object" ; - ont:FocusToParentRelation skos:inScheme ; + ont:focusToParentRelation skos:inScheme ; ont:parentEndpoint endpoint:concept-listing ; . @@ -103,7 +103,6 @@ endpoint:narrowers shext:offset 0 ; shext:orderBy [ sh:path skos:prefLabel ] ; sh:targetClass skos:Concept ; - ont:endpointTemplate "/v/catalogs/$parent_3/collections/$parent_2/items/$parent_1/narrowers" ; . endpoint:top-concepts a ont:ListingEndpoint ; @@ -122,5 +121,4 @@ endpoint:top-concepts a ont:ListingEndpoint ; shext:offset 0 ; shext:orderBy [ sh:path skos:prefLabel ] ; sh:targetClass skos:Concept ; - ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/top-concepts" ; . \ No newline at end of file diff --git a/prez/reference_data/endpoints/system_endpoints.ttl b/prez/reference_data/endpoints/system_endpoints.ttl index 969e1ee6..56d32d82 100644 --- a/prez/reference_data/endpoints/system_endpoints.ttl +++ b/prez/reference_data/endpoints/system_endpoints.ttl @@ -7,6 +7,7 @@ PREFIX rdfs: PREFIX sh: PREFIX xsd: PREFIX prof: +PREFIX skos: PREFIX shext: endpoint:profiles-listing a ont:ListingEndpoint ; @@ -33,4 +34,8 @@ endpoint:alt-profiles-listing a ont:ListingEndpoint ; }""" ] ; shext:limit 20 ; shext:offset 0 ; -. \ No newline at end of file +. + +endpoint:object a ont:ObjectEndpoint ; + ont:deliversClasses prez:Object ; +. diff --git a/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused b/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused index 6cb1edfd..ccdaf4a3 100644 --- a/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused +++ b/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused @@ -29,7 +29,7 @@ endpoint:collection-concept a ont:ObjectEndpoint ; ont:parentEndpoint endpoint:collection-object ; ont:deliversClasses skos:Concept ; ont:endpointTemplate "/v/collection/$parent_1/$object" ; - ont:ParentToFocusRelation skos:member ; + ont:parentToFocusRelation skos:member ; . endpoint:vocabs-listing a ont:ListingEndpoint ; @@ -50,7 +50,7 @@ endpoint:vocab-concept a ont:ObjectEndpoint ; ont:parentEndpoint endpoint:vocab-object ; ont:deliversClasses skos:Concept ; ont:endpointTemplate "/v/vocab/$parent_1/$object" ; - ont:FocusToParentRelation skos:inScheme ; + ont:focusToParentRelation skos:inScheme ; . endpoint:cs-top-concepts a ont:ListingEndpoint ; diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index ea6a82d0..2e1b4e37 100644 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -35,7 +35,7 @@ PREFIX xsd: dcterms:identifier "openobj"^^xsd:token ; dcterms:description "An open profile for objects which will return all direct properties for a resource." ; dcterms:title "Open profile" ; - altr-ext:constrainsClass prez:SPARQLQuery , prof:Profile , prez:SearchResult ; + altr-ext:constrainsClass prez:SPARQLQuery , prof:Profile , prez:SearchResult , prez:Object ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/ld+json" , "application/ld+json" , @@ -61,7 +61,8 @@ PREFIX xsd: prez:SchemesList , prez:VocPrezCollectionList , prez:CatalogList , - prez:CQLObjectList ; + prez:CQLObjectList , + prez:QueryablesList ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/json" , diff --git a/prez/routers/cql.py b/prez/routers/cql.py index e6a6ea5f..00505ea8 100644 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -17,7 +17,7 @@ @router.post( path="/cql", - name="https://prez.dev/endpoint/cql-post", + name="https://prez.dev/endpoint/cql/post", ) async def cql_post_endpoint( request: Request, @@ -27,7 +27,7 @@ async def cql_post_endpoint( repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - endpoint_uri = URIRef("https://prez.dev/endpoint/cql") + endpoint_uri = URIRef("https://prez.dev/endpoint/cql/post") return await listing_function( request=request, repo=repo, @@ -41,7 +41,7 @@ async def cql_post_endpoint( @router.get( path="/cql", - name="https://prez.dev/endpoint/cql-get", + name="https://prez.dev/endpoint/cql/get", ) async def cql_get_endpoint( request: Request, @@ -51,7 +51,31 @@ async def cql_get_endpoint( repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - endpoint_uri = URIRef("https://prez.dev/endpoint/cql") + endpoint_uri = URIRef("https://prez.dev/endpoint/cql/get") + return await listing_function( + request=request, + repo=repo, + system_repo=system_repo, + endpoint_uri=endpoint_uri, + page=page, + per_page=per_page, + cql_parser=cql_parser, + ) + + +@router.get( + path="/queryables", + name="https://prez.dev/endpoint/cql/queryables", +) +async def queryables_endpoint( + request: Request, + cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), +): + endpoint_uri = URIRef("https://prez.dev/endpoint/cql/queryables") return await listing_function( request=request, repo=repo, diff --git a/prez/routers/object.py b/prez/routers/object.py index 664914d1..1cbb8d49 100644 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -1,8 +1,8 @@ from fastapi import APIRouter, Request, HTTPException, status, Query from fastapi import Depends from starlette.responses import PlainTextResponse - -from prez.dependencies import get_repo +from rdflib import URIRef +from prez.dependencies import get_repo, get_system_repo from prez.queries.object import object_inbound_query, object_outbound_query from prez.routers.identifier import get_iri_route from prez.services.objects import object_function @@ -68,6 +68,20 @@ async def count_route( return row["count"]["value"] -@router.get("/object", summary="Object", name="https://prez.dev/endpoint/object") -async def object_route(request: Request, repo=Depends(get_repo)): - return await object_function(request, repo=repo) +@router.get("/object", summary="Object", name="https://prez.dev/endpoint/system/object") +async def object_route( + request: Request, + repo=Depends(get_repo), + system_repo=Depends(get_system_repo), +): + endpoint_uri = URIRef(request.scope.get("route").name) + uri = URIRef(request.query_params.get("uri")) + request_url = request.scope["path"] + return await object_function( + request=request, + endpoint_uri=endpoint_uri, + uri=uri, + request_url=request_url, + repo=repo, + system_repo=system_repo, + ) diff --git a/prez/routers/ogc_spaceprez.py b/prez/routers/ogc_spaceprez.py index 2d1ab2f7..54bfcd78 100644 --- a/prez/routers/ogc_spaceprez.py +++ b/prez/routers/ogc_spaceprez.py @@ -15,7 +15,43 @@ SP_EP = Namespace("https://prez.dev/endpoint/spaceprez/") -@router.get("/s", summary="SpacePrez Home") +@router.get( + "/s", + summary="SpacePrez Home", + openapi_extra={ + "parameters": [ + { + "name": "page", + "in": "query", + "required": False, + "schema": { + "anyOf": [{"type": "integer"}, {"type": "null"}], + "default": 1, + "title": "Page", + }, + }, + { + "name": "per_page", + "in": "query", + "required": False, + "schema": { + "anyOf": [{"type": "integer"}, {"type": "null"}], + "default": 20, + "title": "Per Page", + }, + }, + { + "name": "search_term", + "in": "path", + "required": False, + "schema": { + "anyOf": [{"type": "string"}, {"type": "null"}], + "title": "Search Term", + }, + }, + ] + }, +) async def spaceprez_home(): return PlainTextResponse("SpacePrez Home") diff --git a/prez/routers/ogc_vocprez.py b/prez/routers/ogc_vocprez.py index 21661c76..26906c60 100644 --- a/prez/routers/ogc_vocprez.py +++ b/prez/routers/ogc_vocprez.py @@ -4,7 +4,7 @@ from fastapi.responses import PlainTextResponse from rdflib import URIRef -from prez.dependencies import get_repo, get_system_repo +from prez.dependencies import get_repo, get_system_repo, cql_get_parser_dependency from prez.services.curie_functions import get_uri_for_curie_id from prez.services.listings import listing_function from prez.services.objects import object_function @@ -24,6 +24,11 @@ } +@router.get("/v", summary="VocPrez Home") +async def vocprez_home(): + return PlainTextResponse("VocPrez Home") + + @router.get( "/v/catalogs", summary="List Catalogs", @@ -36,6 +41,7 @@ async def catalog_list( search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), + cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), ): search_term = request.query_params.get("q") endpoint_uri = URIRef(request.scope.get("route").name) @@ -46,6 +52,7 @@ async def catalog_list( endpoint_uri, page, per_page, + cql_parser=cql_parser, search_term=search_term, ) @@ -106,22 +113,15 @@ async def concept_list( ) -@router.get( - "/v/catalogs/{catalogId}/collections/{collectionId}/top-concepts", - summary="List Top Concepts", - name=ogc_endpoints["top-concepts"], -) async def top_concepts( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, - search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - search_term = request.query_params.get("q") parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) - endpoint_uri = URIRef(request.scope.get("route").name) + endpoint_uri = URIRef(ogc_endpoints["top-concepts"]) return await listing_function( request, repo, @@ -130,31 +130,18 @@ async def top_concepts( page, per_page, parent_uri, - search_term=search_term, ) -@router.get("/v", summary="VocPrez Home") -async def vocprez_home(): - return PlainTextResponse("VocPrez Home") - - -@router.get( - "/v/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}/narrowers", - summary="List Narrower Concepts", - name=ogc_endpoints["narrowers"], -) async def narrowers( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, - search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - search_term = request.query_params.get("q") parent_uri = get_uri_for_curie_id(request.path_params["itemId"]) - endpoint_uri = URIRef(request.scope.get("route").name) + endpoint_uri = URIRef(ogc_endpoints["narrowers"]) return await listing_function( request, repo, @@ -163,7 +150,6 @@ async def narrowers( page, per_page, parent_uri, - search_term=search_term, ) @@ -192,9 +178,15 @@ async def catalog_object( ) async def catalog_object( request: Request, + page: Optional[int] = 1, # for top-concepts + per_page: Optional[int] = 20, # for top-concepts repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + if "top-concepts" in request.query_params: + return await top_concepts( + request, page, per_page, repo=repo, system_repo=system_repo + ) request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) @@ -210,9 +202,16 @@ async def catalog_object( ) async def catalog_object( request: Request, + page: Optional[int] = 1, # for narrowers + per_page: Optional[int] = 20, # for narrowers repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): + # check if it's a narrowers path param + if "narrowers" in request.query_params: + return await narrowers( + request, page, per_page, repo=repo, system_repo=system_repo + ) request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) object_uri = get_uri_for_curie_id(request.path_params["itemId"]) diff --git a/prez/services/app_service.py b/prez/services/app_service.py index f4e9e6dc..08da45aa 100644 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -180,8 +180,8 @@ async def add_common_context_ontologies_to_tbox_cache(): g = Dataset(default_union=True) for file in ( Path(__file__).parent.parent / "reference_data/context_ontologies" - ).glob("*.nq"): - g.parse(file, format="nquads") + ).glob("*"): + g.parse(file) relevant_predicates = [ RDFS.label, DCTERMS.title, diff --git a/prez/services/listings.py b/prez/services/listings.py index 9ff914d7..c023d947 100644 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -17,9 +17,9 @@ temp_listing_count, ) from prez.sparql.search_query import SearchQuery -from temp.cql2sparql import CQLParser -from temp.grammar import SubSelect -from temp.shacl2sparql import SHACLParser +from rdframe.grammar import SubSelect +from rdframe import SHACLParser +from rdframe import CQLParser log = logging.getLogger(__name__) diff --git a/prez/services/model_methods.py b/prez/services/model_methods.py index 02a3d0b4..04935b2c 100644 --- a/prez/services/model_methods.py +++ b/prez/services/model_methods.py @@ -19,7 +19,7 @@ async def get_classes( """ _, r = await repo.send_queries([], [(uri, q)]) tabular_result = r[0] # should only be one result - only one query sent - if endpoint != URIRef("https://prez.dev/endpoint/object"): + if endpoint != URIRef("https://prez.dev/endpoint/system/object"): endpoint_classes = list( endpoints_graph_cache.objects( subject=endpoint, @@ -32,5 +32,5 @@ async def get_classes( object_classes_delivered_by_endpoint.append(URIRef(c["class"]["value"])) classes = frozenset(object_classes_delivered_by_endpoint) else: - classes = frozenset([c["class"]["value"] for c in tabular_result[1]]) + classes = frozenset([URIRef(c["class"]["value"]) for c in tabular_result[1]]) return classes diff --git a/prez/services/objects.py b/prez/services/objects.py index 3b03d0ac..3e21b5a2 100644 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -15,7 +15,7 @@ ) from prez.services.model_methods import get_classes from prez.sparql.methods import Repo -from temp.shacl2sparql import SHACLParser +from rdframe import SHACLParser log = logging.getLogger(__name__) @@ -32,11 +32,11 @@ async def object_function( # ConnegP prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=klasses) # if we're on the object endpoint and a profile hasn't been requested, use the open profile - if (endpoint_uri == EP.object) and not ( - prof_and_mt_info.req_profiles or prof_and_mt_info.req_profiles_token - ): - prof_and_mt_info.selected_class = None - prof_and_mt_info.profile = PREZ["profile/open"] + # if (endpoint_uri == EP.object) and not ( + # prof_and_mt_info.req_profiles or prof_and_mt_info.req_profiles_token + # ): + # prof_and_mt_info.selected_class = None + # prof_and_mt_info.profile = PREZ["profile/open"] # create the object with all required info object_item = ObjectItem( # object item now does not need request uri=uri, diff --git a/prez/sparql/methods.py b/prez/sparql/methods.py index 09b162f8..beac6656 100644 --- a/prez/sparql/methods.py +++ b/prez/sparql/methods.py @@ -150,7 +150,10 @@ def _handle_query_triples_results(results: pyoxigraph.QueryTriples) -> Graph: return g.parse(data=ntriples, format="ntriples") def _sync_rdf_query_to_graph(self, query: str) -> Graph: - results = self.pyoxi_store.query(query) + try: + results = self.pyoxi_store.query(query) + except Exception as e: + print(e) result_graph = self._handle_query_triples_results(results) return result_graph diff --git a/prez/sparql/objects_listings.py b/prez/sparql/objects_listings.py index e3010c85..41bd263f 100644 --- a/prez/sparql/objects_listings.py +++ b/prez/sparql/objects_listings.py @@ -386,7 +386,7 @@ def select_profile_mediatype( VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection prez:FeatureCollectionList prez:FeatureList geo:Feature skos:ConceptScheme skos:Concept prez:ConceptList skos:Collection prez:DatasetList prez:VocPrezCollectionList prez:SchemesList prez:CatalogList prez:ResourceList prez:ProfilesList dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery - prez:SearchResult prez:CQLObjectList }} + prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object }} ?profile altr-ext:constrainsClass ?class ; altr-ext:hasResourceFormat ?format ; dcterms:title ?title .\ @@ -453,8 +453,8 @@ def get_endpoint_template_queries(classes: FrozenSet[URIRef]): ont:endpointTemplate ?endpoint_template ; ont:deliversClasses ?classes . FILTER(?classes IN ({", ".join('<' + str(klass) + '>' for klass in classes)})) - VALUES ?relation_direction {{ont:FocusToParentRelation ont:ParentToFocusRelation}} - VALUES ?ep_relation_direction {{ont:FocusToParentRelation ont:ParentToFocusRelation}} + VALUES ?relation_direction {{ont:focusToParentRelation ont:parentToFocusRelation}} + VALUES ?ep_relation_direction {{ont:focusToParentRelation ont:parentToFocusRelation}} {{ SELECT ?parent_endpoint ?endpoint (count(?intermediate) as ?distance) {{ ?endpoint ont:parentEndpoint* ?intermediate ; @@ -496,7 +496,7 @@ def generate_relationship_query( predicate, direction = relation if predicate: parent = "?parent_" + str(i) - if direction == URIRef("https://prez.dev/ont/ParentToFocusRelation"): + if direction == URIRef("https://prez.dev/ont/parentToFocusRelation"): subquery += f"{parent} <{predicate}> {uri_str} .\n" else: # assuming the direction is "focus_to_parent" subquery += f"{uri_str} <{predicate}> {parent} .\n" diff --git a/pyproject.toml b/pyproject.toml index aac7de44..97e5c6f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ authors = ["Jamie Feiss ", "Nicholas Car List["SimplifiedTriple"]: + def collect_triples(self) -> List[SimplifiedTriple]: """ Recursively collect SimplifiedTriple instances from this object. """ @@ -83,7 +82,6 @@ class Anon: https://www.w3.org/TR/sparql11-query/#rANON """ - # TODO not sure how to make this more useful - allow input of whitespace? def render(self): yield "[]" @@ -138,12 +136,16 @@ class RDFLiteral(SPARQLGrammarBase): """ value: str - langtag_or_datatype: Optional[Union[LANGTAG, IRI]] = None + langtag: Optional[LANGTAG] = None + datatype: Optional[IRI] = None def render(self) -> Generator[str, None, None]: yield f'"{self.value}"' - if self.langtag_or_datatype: - yield from self.langtag_or_datatype.render() + if self.langtag: + yield from self.langtag.render() + elif self.datatype: + yield "^^" + yield from self.datatype.render() def __hash__(self): return hash(self.value) @@ -635,12 +637,13 @@ class SubSelectString(SubSelect): select_clause: Optional[str] = None where_clause: Optional[str] = None - solution_modifier: Optional["SolutionModifier"] = None + solution_modifier: Optional[SolutionModifier] = None select_string: str @field_validator("select_string") def validate_and_transform_select_string(cls, v): try: + translateAlgebra(prepareQuery(translateAlgebra(prepareQuery(v)))) return translateAlgebra(prepareQuery(v)) except Exception as e: log.error(msg=f'Potential query issue, or RDFLib bug: "{str(e)}"') @@ -748,6 +751,7 @@ def render(self) -> Generator[str, None, None]: class FunctionCall(SPARQLGrammarBase): """ + FunctionCall ::= iri ArgList Represents a SPARQL FunctionCall. FunctionCall ::= iri ArgList """ @@ -756,10 +760,8 @@ class FunctionCall(SPARQLGrammarBase): arg_list: ArgList def render(self) -> Generator[str, None, None]: - yield self.iri.render() - yield "(" + yield from self.iri.render() yield from self.arg_list.render() - yield ")" class ArgList(SPARQLGrammarBase): diff --git a/temp/shacl2sparql.py b/temp/shacl2sparql.py index 2acb4e9e..0b30cfee 100644 --- a/temp/shacl2sparql.py +++ b/temp/shacl2sparql.py @@ -1,5 +1,6 @@ from string import Template from typing import Union, Optional, List +import re from rdflib import URIRef, Variable, Namespace, Graph, SH, RDF, BNode, Literal from rdflib.collection import Collection @@ -176,8 +177,10 @@ def parse_endpoint_definition(self): # sh:target / sh:select if target_bn: ggp = self.create_select_subquery_from_template(target_bn) - self._add_target_class(target_classes[0]) self._add_ggp_to_main_ggps(ggp) + if target_classes: + self._add_target_class(target_classes[0]) + # don't use the target class if there's a sh:target / sh:select #TODO confirm why this caused issues - duplicate # pattern matches in the subquery? @@ -223,7 +226,9 @@ def _create_construct_triples_from_sh_rules(self, rule_node): else: self.construct_triples = [triple] - def create_select_subquery_for_class_listing(self, target_classes: Optional[List[URIRef]] = None): + def create_select_subquery_for_class_listing( + self, target_classes: Optional[List[URIRef]] = None + ): ggp = GroupGraphPattern(content=GroupGraphPatternSub()) if target_classes: @@ -277,12 +282,29 @@ def create_select_subquery_from_template(self, target_bn): substituted_query = ( substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" ) + if self.additional_ggps: # for example from cql + additional_ggps_str = "".join( + part for part in self.additional_ggps.render() + ) + substituted_query = self.split_query(substituted_query, additional_ggps_str) sss = SubSelectString( select_string=substituted_query, solution_modifier=sol_mod ) ggp = GroupGraphPattern(content=sss) return ggp + def split_query(self, original_query, additional_ggps_str): + # Regex to match the entire structure: 'SELECT ?xxx { ... }' + pattern = r"(SELECT\s+[\?\w\s\(\)]+\s*\{)(.*?)(\}\s*)" + # Use re.split to split the query based on the pattern + parts = re.split(pattern, original_query, flags=re.DOTALL) + parts = [part for part in parts if part.strip()] + new_parts = [parts[0], additional_ggps_str] + if len(parts) > 1: + new_parts.extend(parts[1:]) + new_query = "".join(part for part in new_parts) + return new_query + def _create_focus_node_solution_modifier(self): """ Solution modifiers include LIMIT, OFFSET, ORDER BY clauses. diff --git a/test_data/object_catalog_bblocks_catalog.ttl b/test_data/object_catalog_bblocks_catalog.ttl new file mode 100644 index 00000000..86b4b547 --- /dev/null +++ b/test_data/object_catalog_bblocks_catalog.ttl @@ -0,0 +1,12 @@ +@prefix dcat: . +@prefix dcterms: . +@prefix vocab: . +@prefix catalog: . +@prefix prez: . + +catalog:bblocks + a dcat:Catalog ; + dcterms:identifier "bblocks" ; + dcterms:title "A catalog of Building Block Vocabularies" ; + dcterms:hasPart vocab:api , vocab:datatype , vocab:parameter , vocab:schema ; + . diff --git a/test_data/object_vocab_api_bblocks.ttl b/test_data/object_vocab_api_bblocks.ttl new file mode 100644 index 00000000..15cb9aa3 --- /dev/null +++ b/test_data/object_vocab_api_bblocks.ttl @@ -0,0 +1,38 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . +@prefix prez: . + +vocab:api + a skos:ConceptScheme ; + skos:prefLabel "API Building Blocks" ; + skos:hasTopConcept bblocks:ogc.unstable.sosa ; + dct:identifier "api" ; + . + +bblocks:ogc.unstable.sosa a skos:Concept, + bblocks:Api ; + rdfs:label "Sensor, Observation, Sample, and Actuator (SOSA)" ; + dct:abstract "The SOSA (Sensor, Observation, Sample, and Actuator) ontology is a realisation of the Observations, Measurements and Sampling (OMS) Conceptual model" ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:api ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status ; + . diff --git a/test_data/object_vocab_datatype_bblocks.ttl b/test_data/object_vocab_datatype_bblocks.ttl new file mode 100644 index 00000000..9651ba4c --- /dev/null +++ b/test_data/object_vocab_datatype_bblocks.ttl @@ -0,0 +1,38 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . + +vocab:datatype + a skos:ConceptScheme ; + skos:prefLabel "Datatype Building Blocks" ; + skos:hasTopConcept bblocks:ogc.ogc-utils.iri-or-curie ; + dct:identifier "datatype" ; + . + +bblocks:ogc.ogc-utils.iri-or-curie a skos:Concept, + bblocks:Datatype ; + rdfs:label "IRI or CURIE" ; + dct:abstract "This Building Block defines a data type for a full IRI/URI or a CURIE (with or without a prefix)" ; + dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/ogc-utils/iri-or-curie/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/ogc-utils/iri-or-curie/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/ogc-utils/iri-or-curie/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-03-09"^^xsd:date ; + dct:source , + , + ; + skos:inScheme , vocab:datatype ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . diff --git a/test_data/object_vocab_parameter_bblocks.ttl b/test_data/object_vocab_parameter_bblocks.ttl new file mode 100644 index 00000000..23c920a7 --- /dev/null +++ b/test_data/object_vocab_parameter_bblocks.ttl @@ -0,0 +1,61 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . + +vocab:parameter + a skos:ConceptScheme ; + skos:prefLabel "Parameter Building Blocks" ; + skos:hasTopConcept bblocks:ogc.geo.common.parameters.bbox , bblocks:ogc.geo.common.parameters.bbox-crs ; + dct:identifier "parameter" + . + +bblocks:ogc.geo.common.parameters.bbox a skos:Concept, + bblocks:Parameter ; + rdfs:label "bbox" ; + dct:abstract "The bbox query parameter provides a simple mechanism for filtering resources based on their location. It selects all resources that intersect a rectangle (map view) or box (including height information)." ; + dct:created "2022-05-24T13:51:38+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/parameters/bbox/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/parameters/bbox/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/parameters/bbox/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2022-05-24"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:parameter ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.common.parameters.bbox-crs a skos:Concept, + bblocks:Parameter ; + rdfs:label "bbox-crs" ; + dct:abstract "The bbox-crs query parameter can be used to assert the coordinate reference system that is used for the coordinate values of the bbox parameter." ; + dct:created "2022-07-05T01:01:01+02:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/parameters/bbox-crs/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/parameters/bbox-crs/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/parameters/bbox-crs/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2022-07-05"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:parameter ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + + + + diff --git a/test_data/object_vocab_schema_bblocks.ttl b/test_data/object_vocab_schema_bblocks.ttl new file mode 100644 index 00000000..f71f849a --- /dev/null +++ b/test_data/object_vocab_schema_bblocks.ttl @@ -0,0 +1,414 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . + +vocab:schema + a skos:ConceptScheme ; + dct:identifier "schema" ; + skos:prefLabel "Schema Building Blocks" ; + skos:hasTopConcept bblocks:ogc.unstable.sosa.examples.vectorObservation , + bblocks:ogc.unstable.sosa.examples.vectorObservationFeature , + bblocks:ogc.unstable.sosa.features.observation , + bblocks:ogc.unstable.sosa.features.observationCollection , + bblocks:ogc.unstable.sosa.properties.observation , + bblocks:ogc.unstable.sosa.properties.observationCollection , + bblocks:ogc.ogc-utils.json-link , + bblocks:ogc.geo.features.feature , + bblocks:ogc.geo.features.featureCollection , + bblocks:ogc.geo.geopose.advanced , + bblocks:ogc.geo.geopose.basic.quaternion , + bblocks:ogc.geo.geopose.basic.ypr , + bblocks:ogc.geo.json-fg.feature , + bblocks:ogc.geo.json-fg.feature-lenient , + bblocks:ogc.geo.json-fg.featureCollection , + bblocks:ogc.geo.json-fg.featureCollection-lenient , + bblocks:ogc.geo.common.data_types.bounding_box , + bblocks:ogc.geo.common.data_types.geojson + . + + +bblocks:ogc.unstable.sosa.examples.vectorObservation a skos:Concept, + bblocks:Schema ; + rdfs:label "Example SOSA Vector Observation" ; + dct:abstract "This building block defines an example SOSA Vector Observation" ; + dct:created "2023-05-19T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/examples/vectorObservation/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/examples/vectorObservation/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/examples/vectorObservation/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-19"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.examples.vectorObservationFeature a skos:Concept, + bblocks:Schema ; + rdfs:label "Example SOSA Vector Observation Feature" ; + dct:abstract "This building block defines an example SOSA Observation Feature for a Vector Observation" ; + dct:created "2023-05-19T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/examples/vectorObservationFeature/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/examples/vectorObservationFeature/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/examples/vectorObservationFeature/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-19"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.features.observation a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA Observation Feature" ; + dct:abstract "This building blocks defines a GeoJSON feature containing a SOSA Observation" ; + dct:created "2023-05-18T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/features/observation/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/features/observation/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/features/observation/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-18"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.features.observationCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA ObservationCollection Feature" ; + dct:abstract "This building blocks defines an ObservationCollection Feature according to the SOSA/SSN v1.1 specification." ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/features/observationCollection/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/features/observationCollection/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/features/observationCollection/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-28"^^xsd:date ; + dct:source , + ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.properties.observation a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA Observation" ; + dct:abstract "This building block defines the set of properties for an observation according to the SOSA/SSN specification. These properties may be directly included into a root element of a JSON object or used in the properties container of a GeoJSON feature." ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/properties/observation/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/properties/observation/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/properties/observation/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.properties.observationCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA ObservationCollection" ; + dct:abstract "This building blocks defines an ObservationCollection according to the SOSA/SSN v1.1 specification." ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/properties/observationCollection/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/properties/observationCollection/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/properties/observationCollection/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-28"^^xsd:date ; + dct:source , + ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.ogc-utils.json-link a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON Link" ; + dct:abstract "Web linking is used to express relationships between resources. The JSON object representation of links described here is used consistently in OGC API’s." ; + dct:created "2022-05-18T15:21:59+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/ogc-utils/json-link/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/ogc-utils/json-link/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/ogc-utils/json-link/index.md" ] ; + dct:hasVersion "0.1" ; + dct:modified "2022-05-18"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.features.feature a skos:Concept, + bblocks:Schema ; + rdfs:label "Feature" ; + dct:abstract "A feature. Every feature is a sub-resource of an OGC Collection." ; + dct:created "2023-05-24T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/features/feature/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/features/feature/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/features/feature/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-15"^^xsd:date ; + dct:source , + ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.features.featureCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "Feature Collection" ; + dct:abstract "A collection of features." ; + dct:created "2023-06-26T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/features/featureCollection/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/features/featureCollection/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/features/featureCollection/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-06-26"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.geopose.advanced a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoPose Advanced" ; + dct:abstract "Advanced GeoPose allowing flexible outer frame specification, quaternion orientation, and valid time." ; + dct:created "2023-07-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/advanced/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/advanced/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/advanced/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-07-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.geopose.basic.quaternion a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoPose Basic-Quaternion" ; + dct:abstract "Basic GeoPose using quaternion to specify orientation" ; + dct:created "2023-07-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/basic/quaternion/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/basic/quaternion/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/basic/quaternion/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-07-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.geopose.basic.ypr a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoPose Basic-YPR" ; + dct:abstract "Basic GeoPose using yaw, pitch, and roll to specify orientation" ; + dct:created "2023-03-15T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/basic/ypr/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/basic/ypr/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/basic/ypr/index.json" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-07-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.feature a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature" ; + dct:abstract "A OGC Features and Geometries JSON (JSON-FG) Feature, extending GeoJSON to support a limited set of additional capabilities that are out-of-scope for GeoJSON, but that are important for a variety of use cases involving feature data." ; + dct:created "2023-05-31T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/feature/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/feature/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/feature/index.json" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-05-31"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.feature-lenient a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature - Lenient" ; + dct:abstract "A OGC Features and Geometries JSON (JSON-FG) Feature that does not require the \"time\" and \"place\" properties." ; + dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/feature-lenient/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/feature-lenient/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/feature-lenient/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-08-08"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.featureCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature Collection" ; + dct:abstract "A collection of OGC Features and Geometries JSON (JSON-FG) Features, extending GeoJSON to support a limited set of additional capabilities that are out-of-scope for GeoJSON, but that are important for a variety of use cases involving feature data." ; + dct:created "2023-05-31T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/featureCollection/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/featureCollection/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/featureCollection/index.md" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-05-31"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.featureCollection-lenient a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature Collection - Lenient" ; + dct:abstract "A collection of lenient OGC Features and Geometries JSON (JSON-FG) Features, that do not require the \"time\" and \"place\" properties" ; + dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/featureCollection-lenient/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/featureCollection-lenient/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/featureCollection-lenient/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-08-08"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.common.data_types.bounding_box a skos:Concept, + bblocks:Schema ; + rdfs:label "Bounding Box" ; + dct:abstract "The bounding box JSON object describes a simple spatial extent of a resource. For OGC API’s this could be a feature, a feature collection or a dataset, but it can be used in any JSON resource that wants to communicate its rough location. The extent is simple in that the bounding box does not describe the precise location and shape of the resource, but provides an axis-aligned approximation of the spatial extent that can be used as an initial test whether two resources are potentially intersecting each other." ; + dct:created "2022-05-24T13:51:38+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/data_types/bounding_box/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/data_types/bounding_box/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/data_types/bounding_box/index.md" ] ; + dct:hasVersion "1.0.1" ; + dct:modified "2023-03-09"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.common.data_types.geojson a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoJSON" ; + dct:abstract "A GeoJSON object" ; + dct:created "2023-05-24T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/data_types/geojson/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/data_types/geojson/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/data_types/geojson/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-15"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . diff --git a/test_data/sandgate.ttl b/test_data/sandgate.ttl new file mode 100644 index 00000000..8def41e2 --- /dev/null +++ b/test_data/sandgate.ttl @@ -0,0 +1,296 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX geo: +PREFIX geofab: +PREFIX rdfs: +PREFIX sand: +PREFIX xsd: + + + a dcat:Dataset ; + dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; + dcterms:identifier "sandgate"^^xsd:token ; + dcterms:title "Sandgate example dataset"@en ; + rdfs:label "Sandgate example dataset"@en ; + rdfs:member + sand:catchments , + sand:facilities , + sand:floods , + sand:roads ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2234024,152.9075 -27.2234024,152.9075 -27.42))"^^geo:wktLiteral + ] ; +. + +sand:catchments + a geo:FeatureCollection ; + dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; + dcterms:identifier "catchments"^^xsd:token ; + dcterms:title "Geofabric Contracted Catchments"@en ; + rdfs:label "Geofabric Contracted Catchments"@en ; + rdfs:member + sand:cc12109444 , + sand:cc12109445 ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral + ] ; +. + +sand:facilities + a geo:FeatureCollection ; + dcterms:description "Sandgate area demo Facilities"@en ; + dcterms:identifier "facilities"^^xsd:token ; + dcterms:title "Sandgate are demo Facilities"@en ; + rdfs:label "Sandgate are demo Facilities"@en ; + rdfs:member + sand:bhc , + sand:bhca , + sand:bps , + sand:cpc , + sand:jcabi , + sand:rps , + sand:sac , + sand:sps , + sand:src , + sand:srca ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((153.0144819 -27.3506599,153.1143102 -27.3506599,153.1143102 -27.2234024,153.0144819 -27.2234024,153.0144819 -27.3506599))"^^geo:wktLiteral + ] ; +. + +sand:floods + a geo:FeatureCollection ; + dcterms:description "Sandgate flooded areas"@en ; + dcterms:identifier "floods"^^xsd:token ; + dcterms:title "Sandgate flooded areas"@en ; + rdfs:label "Sandgate flooded areas"@en ; + rdfs:member + sand:f001 , + sand:f023 , + sand:f332 , + sand:f632 ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((153.06307 -27.3151243,153.069877 -27.3151243,153.069877 -27.2859541,153.06307 -27.2859541,153.06307 -27.3151243))"^^geo:wktLiteral + ] ; +. + +sand:roads + a geo:FeatureCollection ; + dcterms:description "Sandgate main roads"@en ; + dcterms:identifier "roads"^^xsd:token ; + dcterms:title "Sandgate main roads"@en ; + rdfs:label "Sandgate main roads"@en ; + rdfs:member + sand:bt , + sand:fp ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((153.0617934 -27.3203138,153.0747569 -27.3203138,153.0747569 -27.2920918,153.0617934 -27.2920918,153.0617934 -27.3203138))"^^geo:wktLiteral + ] ; +. + +sand:bhc + a geo:Feature ; + rdfs:label "Brighton Health Campus Location" ; + dcterms:identifier "bhc"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0638169, -27.2897951]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0638169 -27.2897951)"^^geo:wktLiteral + ] ; +. + +sand:bhca + a geo:Feature ; + rdfs:label "Brighton Health Campus Area" ; + dcterms:identifier "bhca"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.063644,-27.2894036],[153.0635207,-27.2896229],[153.0631612,-27.2896182],[153.0631291,-27.289909],[153.0631559,-27.290338],[153.0644487,-27.2904858],[153.0645614,-27.2899185],[153.0648349,-27.2895324],[153.0648135,-27.2889174],[153.0637674,-27.2887362],[153.063644,-27.2894036]]]}"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.063644 -27.2894036, 153.0635207 -27.2896229, 153.0631612 -27.2896182, 153.0631291 -27.289909, 153.0631559 -27.290338, 153.0644487 -27.2904858, 153.0645614 -27.2899185, 153.0648349 -27.2895324, 153.0648135 -27.2889174, 153.0637674 -27.2887362, 153.063644 -27.2894036))"^^geo:wktLiteral + ] ; +. + +sand:bps + a geo:Feature ; + rdfs:label "Boondal Police Station" ; + dcterms:identifier "bps"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0536022, -27.3497934]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0536022 -27.3497934)"^^geo:wktLiteral + ] ; +. + +sand:bt + a geo:Feature ; + rdfs:label "Brighton Terrace" ; + dcterms:identifier "bt"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.06513, -27.3143431 ], [ 153.065881100000013, -27.3140285 ], [ 153.0653983, -27.3130466 ], [ 153.0652052, -27.3122745 ], [ 153.0651193, -27.3116453 ], [ 153.064550700000012, -27.3103202 ], [ 153.0641108, -27.3092526 ], [ 153.0637889, -27.3074031 ], [ 153.0631774, -27.3057253 ], [ 153.0628448, -27.3044573 ], [ 153.0627053, -27.3036565 ], [ 153.061847, -27.2988706 ], [ 153.0617934, -27.2952 ], [ 153.062168899999989, -27.2933312 ], [ 153.0622333, -27.2920918 ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "LINESTRING (153.06513 -27.3143431,153.0658811 -27.3140285,153.0653983 -27.3130466,153.0652052 -27.3122745,153.0651193 -27.3116453,153.0645507 -27.3103202,153.0641108 -27.3092526,153.0637889 -27.3074031,153.0631774 -27.3057253,153.0628448 -27.3044573,153.0627053 -27.3036565,153.061847 -27.2988706,153.0617934 -27.2952,153.0621689 -27.2933312,153.0622333 -27.2920918)"^^geo:wktLiteral + ] ; +. + +sand:cc12109444 + a + geo:Feature , + geofab:ContractedCatchment ; + rdfs:label "Contracted Catchment 12109444" ; + dcterms:identifier "cc12109444"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}'^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral + ] ; +. + +sand:cc12109445 + a + geo:Feature , + geofab:ContractedCatchment ; + rdfs:label "Contracted Catchment 12109445" ; + dcterms:identifier "cc12109445"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.0025, -27.2775], [153.0025, -27.28], [153.005, -27.28], [153.005, -27.285], [153.0075, -27.285], [153.015, -27.285], [153.015, -27.29], [153.0175, -27.29], [153.0175, -27.2925], [153.0175, -27.3025], [153.02, -27.3025], [153.02, -27.305], [153.0225, -27.305], [153.0225, -27.31], [153.0175, -27.31], [153.0175, -27.3125], [153.015, -27.3125], [153.015, -27.315], [153.015, -27.3175], [153.0175, -27.3175], [153.0175, -27.32], [153.02, -27.32], [153.02, -27.3225], [153.0275, -27.3225], [153.0275, -27.325], [153.025, -27.325], [153.025, -27.3275], [153.0225, -27.3275], [153.0225, -27.33], [153.02, -27.33], [153.02, -27.335], [153.0175, -27.335], [153.0175, -27.3475], [153.01, -27.3475], [153.01, -27.35], [153.005, -27.35], [153, -27.35], [153, -27.3525], [152.995, -27.3525], [152.995, -27.355], [152.9925, -27.355], [152.9925, -27.3625], [152.99, -27.3625], [152.99, -27.3675], [152.9875, -27.3675], [152.9875, -27.37], [152.985, -27.37], [152.985, -27.3725], [152.9825, -27.3725], [152.9825, -27.375], [152.98, -27.375], [152.98, -27.3775], [152.9675, -27.3775], [152.9675, -27.38], [152.965, -27.38], [152.9525, -27.38], [152.9525, -27.3825], [152.9475, -27.3825], [152.9475, -27.38], [152.94, -27.38], [152.94, -27.3825], [152.9375, -27.3825], [152.9375, -27.38], [152.9325, -27.38], [152.9325, -27.3825], [152.93, -27.3825], [152.925, -27.3825], [152.925, -27.385], [152.92, -27.385], [152.92, -27.3825], [152.9075, -27.3825], [152.9075, -27.38], [152.9075, -27.375], [152.9075, -27.3725], [152.915, -27.3725], [152.915, -27.37], [152.92, -27.37], [152.92, -27.3675], [152.9225, -27.3675], [152.9225, -27.365], [152.925, -27.365], [152.925, -27.3625], [152.9275, -27.3625], [152.9275, -27.36], [152.9275, -27.3575], [152.925, -27.3575], [152.925, -27.355], [152.9225, -27.355], [152.9225, -27.3525], [152.92, -27.3525], [152.92, -27.35], [152.9175, -27.35], [152.9175, -27.345], [152.92, -27.345], [152.92, -27.3325], [152.9175, -27.3325], [152.9175, -27.33], [152.915, -27.33], [152.915, -27.3275], [152.9125, -27.3275], [152.9125, -27.325], [152.9125, -27.3225], [152.9225, -27.3225], [152.9225, -27.32], [152.925, -27.32], [152.925, -27.3175], [152.9275, -27.3175], [152.9275, -27.315], [152.93, -27.315], [152.93, -27.3125], [152.9325, -27.3125], [152.9325, -27.31], [152.935, -27.31], [152.935, -27.305], [152.94, -27.305], [152.94, -27.3025], [152.9425, -27.3025], [152.9425, -27.3], [152.945, -27.3], [152.945, -27.2975], [152.95, -27.2975], [152.95, -27.295], [152.955, -27.295], [152.9575, -27.295], [152.9575, -27.2925], [152.96, -27.2925], [152.96, -27.29], [152.9625, -27.29], [152.9625, -27.2875], [152.9675, -27.2875], [152.9675, -27.285], [152.9725, -27.285], [152.9725, -27.2825], [152.9775, -27.2825], [152.9775, -27.28], [152.98, -27.28], [152.9925, -27.28], [152.9925, -27.2775], [152.9975, -27.2775], [153.0025, -27.2775]]]}'^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0025 -27.2775, 153.0025 -27.28, 153.005 -27.28, 153.005 -27.285, 153.0075 -27.285, 153.015 -27.285, 153.015 -27.29, 153.0175 -27.29, 153.0175 -27.2925, 153.0175 -27.3025, 153.02 -27.3025, 153.02 -27.305, 153.0225 -27.305, 153.0225 -27.31, 153.0175 -27.31, 153.0175 -27.3125, 153.015 -27.3125, 153.015 -27.315, 153.015 -27.3175, 153.0175 -27.3175, 153.0175 -27.32, 153.02 -27.32, 153.02 -27.3225, 153.0275 -27.3225, 153.0275 -27.325, 153.025 -27.325, 153.025 -27.3275, 153.0225 -27.3275, 153.0225 -27.33, 153.02 -27.33, 153.02 -27.335, 153.0175 -27.335, 153.0175 -27.3475, 153.01 -27.3475, 153.01 -27.35, 153.005 -27.35, 153 -27.35, 153 -27.3525, 152.995 -27.3525, 152.995 -27.355, 152.9925 -27.355, 152.9925 -27.3625, 152.99 -27.3625, 152.99 -27.3675, 152.9875 -27.3675, 152.9875 -27.37, 152.985 -27.37, 152.985 -27.3725, 152.9825 -27.3725, 152.9825 -27.375, 152.98 -27.375, 152.98 -27.3775, 152.9675 -27.3775, 152.9675 -27.38, 152.965 -27.38, 152.9525 -27.38, 152.9525 -27.3825, 152.9475 -27.3825, 152.9475 -27.38, 152.94 -27.38, 152.94 -27.3825, 152.9375 -27.3825, 152.9375 -27.38, 152.9325 -27.38, 152.9325 -27.3825, 152.93 -27.3825, 152.925 -27.3825, 152.925 -27.385, 152.92 -27.385, 152.92 -27.3825, 152.9075 -27.3825, 152.9075 -27.38, 152.9075 -27.375, 152.9075 -27.3725, 152.915 -27.3725, 152.915 -27.37, 152.92 -27.37, 152.92 -27.3675, 152.9225 -27.3675, 152.9225 -27.365, 152.925 -27.365, 152.925 -27.3625, 152.9275 -27.3625, 152.9275 -27.36, 152.9275 -27.3575, 152.925 -27.3575, 152.925 -27.355, 152.9225 -27.355, 152.9225 -27.3525, 152.92 -27.3525, 152.92 -27.35, 152.9175 -27.35, 152.9175 -27.345, 152.92 -27.345, 152.92 -27.3325, 152.9175 -27.3325, 152.9175 -27.33, 152.915 -27.33, 152.915 -27.3275, 152.9125 -27.3275, 152.9125 -27.325, 152.9125 -27.3225, 152.9225 -27.3225, 152.9225 -27.32, 152.925 -27.32, 152.925 -27.3175, 152.9275 -27.3175, 152.9275 -27.315, 152.93 -27.315, 152.93 -27.3125, 152.9325 -27.3125, 152.9325 -27.31, 152.935 -27.31, 152.935 -27.305, 152.94 -27.305, 152.94 -27.3025, 152.9425 -27.3025, 152.9425 -27.3, 152.945 -27.3, 152.945 -27.2975, 152.95 -27.2975, 152.95 -27.295, 152.955 -27.295, 152.9575 -27.295, 152.9575 -27.2925, 152.96 -27.2925, 152.96 -27.29, 152.9625 -27.29, 152.9625 -27.2875, 152.9675 -27.2875, 152.9675 -27.285, 152.9725 -27.285, 152.9725 -27.2825, 152.9775 -27.2825, 152.9775 -27.28, 152.98 -27.28, 152.9925 -27.28, 152.9925 -27.2775, 152.9975 -27.2775, 153.0025 -27.2775))"^^geo:wktLiteral + ] ; +. + +sand:cpc + a geo:Feature ; + rdfs:label "Carseldine Police Station" ; + dcterms:identifier "cpc"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0144819, -27.3506599]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0144819 -27.3506599)"^^geo:wktLiteral + ] ; +. + +sand:f001 + a geo:Feature ; + rdfs:label "Flood 001" ; + dcterms:identifier "f001"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.064893899999987, -27.2909981 ], [ 153.0648081, -27.2911506 ], [ 153.064475499999986, -27.2912364 ], [ 153.064078599999988, -27.2912269 ], [ 153.0635636, -27.291265 ], [ 153.0633383, -27.2913604 ], [ 153.0632417, -27.2914462 ], [ 153.0631559, -27.2915701 ], [ 153.0630808, -27.2917036 ], [ 153.06307, -27.2917704 ], [ 153.0631773, -27.2918943 ], [ 153.0633168, -27.2920564 ], [ 153.0634241, -27.2921613 ], [ 153.063767399999989, -27.2921994 ], [ 153.0642824, -27.2922757 ], [ 153.064400400000011, -27.292371 ], [ 153.0644111, -27.2926761 ], [ 153.0643897, -27.2928764 ], [ 153.0643682, -27.2930766 ], [ 153.06434680000001, -27.2932196 ], [ 153.0642824, -27.2934675 ], [ 153.0642824, -27.2935628 ], [ 153.0643682, -27.2936391 ], [ 153.0647223, -27.2937345 ], [ 153.0648296, -27.293744 ], [ 153.064893899999987, -27.2909981 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0648939 -27.2909981,153.0648081 -27.2911506,153.0644755 -27.2912364,153.0640786 -27.2912269,153.0635636 -27.291265,153.0633383 -27.2913604,153.0632417 -27.2914462,153.0631559 -27.2915701,153.0630808 -27.2917036,153.06307 -27.2917704,153.0631773 -27.2918943,153.0633168 -27.2920564,153.0634241 -27.2921613,153.0637674 -27.2921994,153.0642824 -27.2922757,153.0644004 -27.292371,153.0644111 -27.2926761,153.0643897 -27.2928764,153.0643682 -27.2930766,153.0643468 -27.2932196,153.0642824 -27.2934675,153.0642824 -27.2935628,153.0643682 -27.2936391,153.0647223 -27.2937345,153.0648296 -27.293744,153.0648939 -27.2909981))"^^geo:wktLiteral + ] ; +. + +sand:f023 + a geo:Feature ; + rdfs:label "Flood 023" ; + dcterms:identifier "f023"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.06487820000001, -27.30059 ], [ 153.0648031, -27.301019 ], [ 153.0648138, -27.3012955 ], [ 153.0648889, -27.3015815 ], [ 153.0648567, -27.3016768 ], [ 153.064824499999986, -27.3018198 ], [ 153.0648138, -27.3020295 ], [ 153.064824499999986, -27.3022965 ], [ 153.0647387, -27.3024109 ], [ 153.0641808, -27.3024776 ], [ 153.063698, -27.3025634 ], [ 153.0634512, -27.3026302 ], [ 153.063419, -27.3027827 ], [ 153.063440500000013, -27.303002 ], [ 153.0634619, -27.303307 ], [ 153.063622900000013, -27.3034501 ], [ 153.0638696, -27.3034882 ], [ 153.0643095, -27.3035454 ], [ 153.0645456, -27.3036026 ], [ 153.0647923, -27.3037456 ], [ 153.0650176, -27.3039553 ], [ 153.0652, -27.3041174 ], [ 153.065318, -27.3042413 ], [ 153.0653931, -27.3045083 ], [ 153.0655112, -27.3047371 ], [ 153.065790099999987, -27.3050803 ], [ 153.0660476, -27.3052519 ], [ 153.0656935, -27.3037551 ], [ 153.0652215, -27.30243 ], [ 153.06487820000001, -27.30059 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0648782 -27.30059,153.0648031 -27.301019,153.0648138 -27.3012955,153.0648889 -27.3015815,153.0648567 -27.3016768,153.0648245 -27.3018198,153.0648138 -27.3020295,153.0648245 -27.3022965,153.0647387 -27.3024109,153.0641808 -27.3024776,153.063698 -27.3025634,153.0634512 -27.3026302,153.063419 -27.3027827,153.0634405 -27.303002,153.0634619 -27.303307,153.0636229 -27.3034501,153.0638696 -27.3034882,153.0643095 -27.3035454,153.0645456 -27.3036026,153.0647923 -27.3037456,153.0650176 -27.3039553,153.0652 -27.3041174,153.065318 -27.3042413,153.0653931 -27.3045083,153.0655112 -27.3047371,153.0657901 -27.3050803,153.0660476 -27.3052519,153.0656935 -27.3037551,153.0652215 -27.30243,153.0648782 -27.30059))"^^geo:wktLiteral + ] ; +. + +sand:f332 + a geo:Feature ; + rdfs:label "Flood 332" ; + dcterms:identifier "f332"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.068289099999987, -27.3113685 ], [ 153.0681389, -27.3108346 ], [ 153.0676454, -27.3103961 ], [ 153.0673021, -27.3096144 ], [ 153.0670231, -27.3088708 ], [ 153.066615399999989, -27.3088327 ], [ 153.0659932, -27.3089662 ], [ 153.0656928, -27.3091568 ], [ 153.065564, -27.3095381 ], [ 153.0658215, -27.310377 ], [ 153.0659073, -27.3107774 ], [ 153.0660361, -27.3111587 ], [ 153.0665725, -27.3113685 ], [ 153.066744199999988, -27.3115973 ], [ 153.0674094, -27.3130272 ], [ 153.0676669, -27.3135419 ], [ 153.0680102, -27.3142473 ], [ 153.0685466, -27.3151243 ], [ 153.0693191, -27.3150862 ], [ 153.0698126, -27.3147049 ], [ 153.069877, -27.3145143 ], [ 153.06970530000001, -27.3140376 ], [ 153.0694479, -27.3134085 ], [ 153.069147500000014, -27.31297 ], [ 153.0688041, -27.3124552 ], [ 153.068375, -27.3120548 ], [ 153.068074599999989, -27.3117498 ], [ 153.068289099999987, -27.3113685 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0682891 -27.3113685,153.0681389 -27.3108346,153.0676454 -27.3103961,153.0673021 -27.3096144,153.0670231 -27.3088708,153.0666154 -27.3088327,153.0659932 -27.3089662,153.0656928 -27.3091568,153.065564 -27.3095381,153.0658215 -27.310377,153.0659073 -27.3107774,153.0660361 -27.3111587,153.0665725 -27.3113685,153.0667442 -27.3115973,153.0674094 -27.3130272,153.0676669 -27.3135419,153.0680102 -27.3142473,153.0685466 -27.3151243,153.0693191 -27.3150862,153.0698126 -27.3147049,153.069877 -27.3145143,153.0697053 -27.3140376,153.0694479 -27.3134085,153.0691475 -27.31297,153.0688041 -27.3124552,153.068375 -27.3120548,153.0680746 -27.3117498,153.0682891 -27.3113685))"^^geo:wktLiteral + ] ; +. + +sand:f632 + a geo:Feature ; + rdfs:label "Flood 632" ; + dcterms:identifier "f632"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.0649154, -27.2906357 ], [ 153.0650656, -27.2892818 ], [ 153.0651407, -27.288233 ], [ 153.06513, -27.287413 ], [ 153.0650656, -27.2859541 ], [ 153.0649905, -27.2861353 ], [ 153.065012, -27.2863737 ], [ 153.065001200000012, -27.2868218 ], [ 153.0649583, -27.2871079 ], [ 153.0648296, -27.2873463 ], [ 153.0646472, -27.2873939 ], [ 153.064604300000013, -27.2875274 ], [ 153.0646365, -27.2877849 ], [ 153.0646686, -27.2879183 ], [ 153.0646686, -27.2882711 ], [ 153.0646365, -27.2885762 ], [ 153.0642609, -27.2886716 ], [ 153.0640678, -27.2888623 ], [ 153.064035600000011, -27.2890816 ], [ 153.064293099999986, -27.2894248 ], [ 153.064379, -27.2897204 ], [ 153.0642288, -27.2899206 ], [ 153.064057100000014, -27.2899969 ], [ 153.0639605, -27.2902353 ], [ 153.0639927, -27.2904069 ], [ 153.064110699999986, -27.2905309 ], [ 153.0642824, -27.2906644 ], [ 153.064497, -27.2907216 ], [ 153.064657899999986, -27.2907406 ], [ 153.064818800000012, -27.2907406 ], [ 153.0649154, -27.2906357 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0649154 -27.2906357,153.0650656 -27.2892818,153.0651407 -27.288233,153.06513 -27.287413,153.0650656 -27.2859541,153.0649905 -27.2861353,153.065012 -27.2863737,153.0650012 -27.2868218,153.0649583 -27.2871079,153.0648296 -27.2873463,153.0646472 -27.2873939,153.0646043 -27.2875274,153.0646365 -27.2877849,153.0646686 -27.2879183,153.0646686 -27.2882711,153.0646365 -27.2885762,153.0642609 -27.2886716,153.0640678 -27.2888623,153.0640356 -27.2890816,153.0642931 -27.2894248,153.064379 -27.2897204,153.0642288 -27.2899206,153.0640571 -27.2899969,153.0639605 -27.2902353,153.0639927 -27.2904069,153.0641107 -27.2905309,153.0642824 -27.2906644,153.064497 -27.2907216,153.0646579 -27.2907406,153.0648188 -27.2907406,153.0649154 -27.2906357))"^^geo:wktLiteral + ] ; +. + +sand:fp + a geo:Feature ; + rdfs:label "Flinder Parade" ; + dcterms:identifier "fp"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.074756900000011, -27.3203138 ], [ 153.0727077, -27.3183121 ], [ 153.0715276, -27.3170824 ], [ 153.070519, -27.3157669 ], [ 153.0694891, -27.3143847 ], [ 153.067751, -27.311115 ], [ 153.0664635, -27.3072446 ], [ 153.0656267, -27.3047468 ], [ 153.065111699999989, -27.3031262 ], [ 153.0647898, -27.301677 ], [ 153.064510899999988, -27.3000372 ], [ 153.0644036, -27.2984546 ], [ 153.0643392, -27.2973296 ], [ 153.06459670000001, -27.2953656 ], [ 153.0646396, -27.2936494 ], [ 153.0644465, -27.2922764 ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "LINESTRING (153.0747569 -27.3203138,153.0727077 -27.3183121,153.0715276 -27.3170824,153.070519 -27.3157669,153.0694891 -27.3143847,153.067751 -27.311115,153.0664635 -27.3072446,153.0656267 -27.3047468,153.0651117 -27.3031262,153.0647898 -27.301677,153.0645109 -27.3000372,153.0644036 -27.2984546,153.0643392 -27.2973296,153.0645967 -27.2953656,153.0646396 -27.2936494,153.0644465 -27.2922764)"^^geo:wktLiteral + ] ; +. + +sand:jcabi + a geo:Feature ; + rdfs:label "Jacana Centre for Acquired Brain Injury" ; + dcterms:identifier "jcabi"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0632873, -27.2918652]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0632873 -27.2918652)"^^geo:wktLiteral + ] ; +. + +sand:rps + a geo:Feature ; + rdfs:label "Redcliffe Police Station" ; + dcterms:identifier "rps"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.1143102, -27.2234024]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.1143102 -27.2234024)"^^geo:wktLiteral + ] ; +. + +sand:sac + a geo:Feature ; + rdfs:label "Sandgate Aquatic Centre" ; + dcterms:identifier "sac"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0688897, -27.3122011]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0688897 -27.3122011)"^^geo:wktLiteral + ] ; +. + +sand:sps + a geo:Feature ; + rdfs:label "Sandgate Police Station" ; + dcterms:identifier "sps"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0677583, -27.318185]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0677583 -27.318185)"^^geo:wktLiteral + ] ; +. + +sand:src + a geo:Feature ; + rdfs:label "Sandgate Respite Centre" ; + dcterms:identifier "src"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0614757, -27.3111489]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0614757 -27.3111489)"^^geo:wktLiteral + ] ; +. + +sand:srca + a geo:Feature ; + rdfs:label "Sandgate Respite Centre Area" ; + dcterms:identifier "srca"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.0606281,-27.3096141], [153.0604564,-27.3105197], [153.0600487,-27.3109296], [153.0607354,-27.3127218], [153.063203,-27.3121212], [153.0621623,-27.3095187], [153.0617868,-27.3098333], [153.0606281,-27.3096141]]]}"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0606281 -27.3096141, 153.0604564 -27.3105197, 153.0600487 -27.3109296, 153.0607354 -27.3127218, 153.063203 -27.3121212, 153.0621623 -27.3095187, 153.0617868 -27.3098333, 153.0606281 -27.3096141))"^^geo:wktLiteral + ] ; +. diff --git a/tests/test_cql.py b/tests/_test_cql.py similarity index 73% rename from tests/test_cql.py rename to tests/_test_cql.py index 0f065d60..c2e2f5e0 100644 --- a/tests/test_cql.py +++ b/tests/_test_cql.py @@ -8,6 +8,7 @@ from prez.app import app from prez.dependencies import get_repo from prez.sparql.methods import Repo, PyoxigraphRepo +from urllib.parse import quote_plus @pytest.fixture(scope="session") @@ -73,3 +74,21 @@ def test_simple(client, cql_json_filename): headers = {"content-type": "application/json"} response = client.post("/cql", json=cql_json_as_json, headers=headers) assert response.status_code == 200 + + +def test_intersects_post(client): + cql_json = Path(__file__).parent / f"data/cql/input/geo_intersects.json" + cql_json_as_json = json.loads(cql_json.read_text()) + headers = {"content-type": "application/json"} + response = client.post("/cql", json=cql_json_as_json, headers=headers) + assert response.status_code == 200 + + +def test_intersects_get(client): + cql_json = Path(__file__).parent / f"data/cql/input/geo_intersects.json" + cql_json_as_json = json.loads(cql_json.read_text()) + query_string = quote_plus(json.dumps(cql_json_as_json)) + response = client.get( + f"/cql?filter={query_string}&_mediatype=application/sparql-query" + ) + assert response.status_code == 200 diff --git a/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl index e2278cde..24547d3c 100644 --- a/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl +++ b/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl @@ -1,15 +1,10 @@ @prefix dcat: . @prefix dcterms: . +@prefix ns1: . @prefix prez: . -@prefix prov: . @prefix rdf: . @prefix rdfs: . -@prefix schema: . @prefix skos: . -@prefix xsd: . - -dcterms:created rdfs:label "Date Created"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . dcterms:description rdfs:label "Description"@en ; dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . @@ -20,14 +15,6 @@ dcterms:hasPart rdfs:label "Has Part"@en ; dcterms:identifier rdfs:label "Identifier"@en ; dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . -dcterms:modified rdfs:label "Date Modified"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -dcterms:title rdfs:label "Title"@en . - rdf:type rdfs:label "type" . rdfs:label rdfs:label "label" . @@ -35,419 +22,25 @@ rdfs:label rdfs:label "label" . skos:definition rdfs:label "definition"@en ; skos:definition "A statement or formal explanation of the meaning of a concept."@en . -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - -dcat:hadRole rdfs:label "hadRole"@en ; - skos:definition "The function of an entity or agent with respect to another entity or resource."@en . - -prov:agent rdfs:label "agent" . - -prov:qualifiedAttribution rdfs:label "qualified attribution" . - - a dcat:Catalog ; - rdfs:label "IDN Demonstration Catalogue" ; - dcterms:created "2022-07-31"^^xsd:date ; - dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. - -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. - -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:hasPart , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - ; - dcterms:identifier "democat"^^xsd:token, - "pd:democat"^^prez:identifier ; - dcterms:modified "2022-08-29"^^xsd:date ; - dcterms:title "IDN Demonstration Catalogue" ; - prov:qualifiedAttribution [ dcat:hadRole , - , - ; - prov:agent ] ; - prez:link "/catalogs/pd:democat" ; - prez:members [ prez:link "/catalogs/pd:democat/collections" ] . - -schema:description rdfs:label "description" . - -schema:name rdfs:label "name" . - - rdfs:label "author"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - skos:definition "party who authored the resource" ; - skos:prefLabel "author"@en . - - rdfs:label "custodian"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - skos:definition "party that accepts accountability and responsibility for the resource and ensures appropriate care and maintenance of the resource" ; - skos:prefLabel "custodian"@en . - - rdfs:label "owner"@en ; - dcterms:provenance "Presented in the original standard's codelist"@en ; - skos:definition "party that owns the resource" ; - skos:prefLabel "owner"@en . +ns1:TopLevelCatalog a dcat:Catalog ; + rdfs:label "Top level catalog" ; + dcterms:hasPart ns1:LowerLevelCatalog ; + dcterms:identifier "exm:TopLevelCatalog"^^prez:identifier ; + ns1:property "top level catalog property" ; + prez:link "/c/catalogs/exm:TopLevelCatalog", + "/v/catalogs/exm:TopLevelCatalog" ; + prez:members [ prez:link "/c/catalogs/exm:TopLevelCatalog/collections" ] . - dcterms:description """Needs to be integrated with KHRD. Negotiation required with State Library. +prez:link rdfs:label "link" . -Comprises Barwick's publications and conference papers; Barwick's PhD.; work with the Australian Institute of Aboriginal Studies and the Aboriginal History journal; work on major research projects; incoming and outgoing correspondence; reference material, and collected genealogies of Aboriginal Victorian families.""" ; - dcterms:title "The Diane Barwick Archive" . +prez:members rdfs:label "members" . dcat:Catalog rdfs:label "Catalog"@en ; skos:definition "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en . - dcterms:description """This dataset has been developed by the Australian Government as an authoritative source of indigenous location names across Australia. It is sponsored by the Spatial Policy Branch within the Department of Communications and managed solely by the Department of Human Services. -The dataset is designed to support the accurate positioning, consistent reporting, and effective delivery of Australian Government programs and services to indigenous locations. -The dataset contains Preferred and Alternate names for indigenous locations where Australian Government programs and services have been, are being, or may be provided. The Preferred name will always default to a State or Territory jurisdiction's gazetted name so the term 'preferred' does not infer that this is the locally known name for the location. Similarly, locational details are aligned, where possible, with those published in State and Territory registers. -This dataset is NOT a complete listing of all locations at which indigenous people reside. Town and city names are not included in the dataset. The dataset contains names that represent indigenous communities, outstations, defined indigenous areas within a town or city or locations where services have been provided.""" ; - dcterms:title "Australian Government Indigenous Programs & Policy Locations (AGIL) dataset" . - - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:title "Annual Aboriginal Census,1921-1944 - Australia" . - - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:title "Annual Aboriginal Census,1921-1944 - South Australia" . - - dcterms:description "Existing database at ANU" ; - dcterms:title "The Australian Dictionary of Biography" . - - dcterms:description "A database of Agents - Organisations & People - with roles relating to indigenous data" ; - dcterms:title "Indigenous Data Network's Agents DB" . - - dcterms:description "An Indigenous geography and gazetteer, including a Loc-I framework for tribal, language and community data. Requires developmental work in collaboration with Universities, ABS, AIHW, Geoscience Australia, AURIN etc etc." ; - dcterms:title "Indigenous Gazetteer" . - - dcterms:description "The Australian National University is home to many research collections of national and international significance. Material from the ANU Archives, ANU Classics Museum, ANU Library, Asia Pacific Map Collection and the Noel Butlin Archives Centre are being progressivley digitised and made available through this repository." ; - dcterms:title "ANU Archive and Library Collections - \"Indigenous\" Search" . - - dcterms:description "A 2020 review of First Nations Identified physical collections held by the ANU. Not published." ; - dcterms:title "2020 ANU First Nations Collections Review" . - - dcterms:description "The University's Open Research digital repository ecompasses a number of research collections which the wider community is free to browse." ; - dcterms:title "ANU Open Research Collections" . - - dcterms:description """The Australian National University, through its Open Research repository collects, maintains, preserves, promotes and disseminates its open access scholarly materials. - -Open Research holds a variety of scholarly publications including journal articles; books and book chapters; conference papers, posters and presentations; theses; creative works; photographs and much more in a number of collections and formats. The wider community is free to browse this material and all members of the ANU community (past and present) are encouraged to contribute their research.""" ; - dcterms:title "ANU Open Research Library - \"Indigenous\" Search (Thesis Library)" . - - dcterms:description "Publications, Ethics, Grants" ; - dcterms:title "ANU Research Information Enterprise System" . - - dcterms:description """Needs to be made fully maintainable, sustainable interoperable and web-accessible - -ATNS provides an online portal for people seeking information on agreements with Indigenous peoples. We aim to promote knowledge and transparency by capturing the range and variety of agreement making occurring in Australia and other parts of the world. - -We gather and review information from publicly available academic sources, online materials and documents provided by the organisations and agencies involved in agreement-making processes. No confidential material is published. """ ; - dcterms:title "The Agreements, Treaties and Negotiated Settlements Database" . - - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the Aboriginal Community level. -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: Aboriginal Community, ACT" . - - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Region level. - -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Regions" . - - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Zone level. -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Zones" . - - dcterms:description "ATSIDA is a specialised trusted research data management facility, and thematic archive within the Australian Data Archive for Australian Aboriginal and Torres Strait Islander research data managed by the UTS Library. ATSIDA provides a transformational research platform working at the nexus of researchers, communities and other stakeholders in preserving and ensuring ethical access to research data related to Indigenous communities. ATSIDA works with universities, government and other organisations to increase Indigenous student and staff research capacity, support Indigenous researchers and those working with Indigenous research data. It engages with communities to manage appropriate access and return of digital materials.", - "The Aboriginal and Torres Strait Islander Data Archive at the Australian Data Archive and ANU Archives. This was specifically mentioned in the NCRIS Roadmap as an existing strength to be built on. It needs staff at the Data Archive to fully curate and digitise these collections and make them web-accessible." ; - dcterms:title "ABORIGINAL & TORRES STRAIT ISLANDER DATA ARCHIVE", - "The Aboriginal and Torres Strait Islander Data Archive at ADA, ANU" . - - dcterms:description "This looks like a mirror of the ADA archive. Many links are broken." ; - dcterms:title "The Aboriginal and Torres Strait Islander Data Archive at Jumbunna, UTS" . - - dcterms:description """Austlang provides information about Indigenous Australian languages which has been assembled from referenced sources. -The dataset provided here includes the language names, each with a unique alpha-numeric code which functions as a stable identifier, alternative/variant names and spellings and the approximate location of each language variety.""" ; - dcterms:title "Austlang database." . - - dcterms:description """The Indigenous Protected Areas (IPA) programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community. - -The Birriliburu & Matuwa Kurrara Kurrara (MKK) IPAs have provided an opportunity for Martu people to reconnect with and actively manage their traditional country. - -The two IPAs have proved a useful tool with which to leverage third party investment, through a joint management arrangement with the Western Australia (WA) Government, project specific funding from environmental NGOs and mutually beneficial partnerships with the private sector. - -Increased and diversified investment from a range of funding sources would meet the high demand for Ranger jobs and could deliver a more expansive programme of works, which would, in turn, increase the social, economic and cultural outcomes for Martu Rangers and Community Members.""" ; - dcterms:title "SRI Investment Analysis of the Birriliburu and Matuwa Kurrara Kurrara Indigenous Protected Areas (2016)" . - - dcterms:description "Historical population data and biographical records" ; - dcterms:title "Briscoe-Smith Archive" . - - dcterms:description """The Composite Gazetteer of Australia is a cloud-based system allowing users to easily discover, interrogate and download place names information from Australia and its external territories. It is developed as a partnership between contributing agencies of the Intergovernmental Committee on Surveying and Mapping (ICSM) and is built on modern infrastructure providing automated ingestion and validation, producing a composite dataset from the individual jurisdictional gazetteers. - -The place names database is a collection of jurisdictional data that is combined to create the Composite Gazetteer of Australia. Place name information is managed at a local level by jurisdictions. The place name database and the Composite Gazetteer of Australia are maintained by ICSM.""" ; - dcterms:title "Compound Gazetteer of Australia" . - - dcterms:description "The Cultural Heritage Parties dataset is the spatial representation of state-wide Aboriginal and Torres Strait Islander Native Title Party boundaries within Queensland as described under the Aboriginal Cultural Heritage Act 2003 and the Torres Strait Islander Cultural Heritage Act 2003 (the Acts)." ; - dcterms:title "Cultural Heritage Party boundaries - Queensland" . - - dcterms:description "Productivity Commissions data dashboard arising from the National Agreement on Closing the Gap." ; - dcterms:title "Closing the gap information repository" . - - dcterms:description "Norman B. Tindale ; tribal boundaries drawn by Winifred Mumford on a base map produced by the Division of National Mapping, Department of National Development, Canberra, Australia." ; - dcterms:title "Distribution of the Aboriginal Tribes of Australia (1940)" . - - dcterms:description "UTS has taken over this data, but needs help to turn it into an ongoing public database" ; - dcterms:title "Aboriginal Deaths and Injuries in Custody" . - - dcterms:description "Barry Hansen and Yothu Yindi Foundation have done extensive work on where the money goes in the NT. Needs to be a national database." ; - dcterms:title "Expenditure on Indigenous Advancement" . - - dcterms:description "(Torrens University). An earlier application with Marcia for AIATSIS funding was never considered." ; - dcterms:title "GDP and Genuine Progress Indicator" . - - dcterms:description "The Snapshot is an ongoing research project that links enterprises on Indigenous business registries to data held by the Australian Bureau of Statistics. It will enable us to track the industries, revenue, employment outcome and growth of Indigenous businesses. This report provides an unprecedented snapshot of the Indigenous business sector to help dismantle the many stereotypes and myths that have led to lost opportunities for Indigenous business growth. There is mention of an I-BLADE dataset." ; - dcterms:title "Indigenous Business Sector Snapshot 1.1 Indigenous Businesses Sector Snapshot Study, Insights from I-BLADE 1.0" . - - dcterms:description "Land that is owned or managed by Australia’s Indigenous communities, or over which Indigenous people have use and rights, was compiled from information supplied by Australian, state and territory governments and other statutory authorities with Indigenous land and sea management interests." ; - dcterms:title "Indigenous Land and Sea Interests " . - - dcterms:description "Registered & Notified Indigenous Land Use Agreements – (as per s. 24BH(1)(a), s. 24CH and s. 24DI(1)(a)) across Australia, The Central Resource for Sharing and Enabling Environmental Data in NSW" ; - dcterms:title "Indigenous Land Use Agreement Boundaries with basic metadata and status" . - - dcterms:description "Printed catalog highlighting ANU Indigenous Research activities at the time of publication" ; - dcterms:title "Indigenous Research Compendium 2018" . - - dcterms:description """Various projects from $10 million Indigenous Research Fund administered by AIATSIS. -A number of projects are described p13-15 here. -One might expect a number of these would give rise to relevant data collections and information on methods. -Each of these projects should be catalogued? Or not?""" ; - dcterms:title "Indigenous Research Exchange/Knowledge Exchange Platform" . - - dcterms:description """Sandra Silcot has identified the steps required to make this fully maintainable and sustainable. -Koori Health Research Database (Janet McCalman) traces BDM of 7,800 Aboriginals in Victoria & New South Wales Australia from 19th Century to the present. It is built from Yggdrasil, an existing open-source web database application designed for large population studies of family history https://rdxx.org/notes.sandra/khrd/slides/khrd-apa2012-talk.pdf.html""" ; - dcterms:title "The Koori Health Research Database" . - - dcterms:description """The Mayi Kuwayu Study looks at how Aboriginal and Torres Strait Islander wellbeing is linked to things like connection to country, cultural practices, spirituality and language use. -Our research team follows a large number of Aboriginal and Torres Strait Islander people and asks about their culture and wellbeing. As a longitudinal study, we are surveying people and then ask them to take the same survey every few years, so that we can understand what influences changes over time. -This is the first time a national study of this type has been done and will provide an evidence base to allow for the creation of better policies and programs. -This study has been created by and for Aboriginal and Torres Strait Islander people. It is an Aboriginal and Torres Strait Islander controlled research resource. -The Mayi Kuwayu team are experienced at working closely with communities across Australia, and the study has majority Aboriginal and Torres Strait Islander staffing and study governance (decision making) structure.""" ; - dcterms:title "The National Study of Aboriginal and Torres Strait Islander Wellbeing" . - - dcterms:description "These are extensive paper records which Ian Anderson has proposed incorporating in a database. Negotiation is still needed." ; - dcterms:title "Tasmanian Aboriginal genealogies" . - - dcterms:description "The Historical Census and Colonial Data Archive (HCCDA) is an archive of Australian colonial census publications and reports covering the period from 1833 to 1901, the year of Australia's federation. The corpus includes 18,638 pages of text, and approximately 15000 tables, all with full digital images, text conversion and individually identified pages and tables. Please note that the archive contains colonial census reports, but not individual census returns." ; - dcterms:title "The Historical Census and Colonial Data Archive" . - - dcterms:description "Noongar Boodjar Language Centre (NBLC) in Perth have partnered with the Atlas of Living Australia to link Noongar-Wudjari language and knowledge for plants and animals to western science knowledge to create the Noongar-Wudjari Plant and Animal online Encyclopedia. This project focused on the Noongar-Wudjari clan, from the South coast of WA, and worked specifically with Wudjari knowledge holders - Lynette Knapp and Gail Yorkshire to record, preserve and share their ancestral language and knowledge about plants and animals. Knowledge and language for 90 plants and animals were collected and are now ready for publication through the Atlas of Living Australia (ala.org.au)." ; - dcterms:title "Noongar Boodjar Plants and Animals" . - - dcterms:description """We are making a national resource for Indigenous health and heritage, which is based on our collection of biological samples, genome data and documents from Indigenous communities in many parts of Australia. You can find out more about NCIG and its collections at ncig.anu.edu.au. - -Information in these collections tells two kinds of stories. - -We are working with Indigenous communities to decide how to tell the stories of the people who are represented in the collection. We do not make personal information available, but the website lets you know what collections we have and how to contact us if you want to know more. - -There is also the story about how the collection was made and how it can be useful to researchers and other people. - -This website helps to tell this second story by making some records and documents from the collection openly available. There is information about the people who collected the samples and made the records, why they carried out their studies, the places they visited and some of the results of their studies.""" ; - dcterms:title "National Centre for Indigenous Genomics data" . - - dcterms:description "NSW prison population data and quarterly custody reports" ; - dcterms:title "NSW Custody Statistics" . - - dcterms:description "Existing database at the National Library" ; - dcterms:title "People Australia" . - - dcterms:description "Databases held by the NNTT" ; - dcterms:title "Native Title Databases at the National Native Title Tribunal" . - - dcterms:description "This comprises records of about 70,000 Indigenous and 30,000 non-Indigenous people surveyed in the 1970s and 1980s. Some paper records are held at AIATSIS. Microfilms of others are at UNSW Archives. There have been preliminary discussions with AIATSIS, the National Library and former members of the Hollows team about a program to digitise the records. IDN staff/resources would be needed." ; - dcterms:title "The Fred Hollows Archive (National Trachoma and Eye Health Program)" . - - dcterms:description """Conference powerpoint presentation - -Case study in exemplary IDG. -- Survey of native title prescribed bodies corporate (PBCs) -- Collect data on PBCs’ capacity, capabilities, needs and aspirations to better inform policies that affect PBCs -- Started data collection May 2019, to finish in 3rd quarter 2019""" ; - dcterms:title "Prescribed bodies corporate (PBCs) Survey 2019" . - - dcterms:title "AG Productivity Commission - Report on Government Services: Indigenous Compendium reports 2005-2015" . - - dcterms:description "This dataset is of police offences by Aboriginals in Western Australia" ; - dcterms:title "Police Offenses WA (Erin Mathews)" . - - dcterms:description """Aboriginal and Torres Strait Islander people are the Indigenous people of Australia. They are not one group, but comprise hundreds of groups that have their own distinct set of languages, histories and cultural traditions. - -AIHW reports and other products include information about Indigenous Australians, where data quality permits. Thus, information and statistics about Indigenous Australians can be found in most AIHW products. - -In December 2021, AIHW released the Regional Insights for Indigenous Communities (RIFIC). The aim of this website is to provide access to data at a regional level, to help communities set their priorities and participate in joint planning with government and service providers. - -AIHW products that focus specifically on Indigenous Australians are captured on this page.""" ; - dcterms:title "Regional Insights for Indigenous Communities" . - - dcterms:description """Data workbooks presenting the latest Social Health Atlases of Australia are available for the whole of Australia by Population Health Area, Local Government Area, and Primary Health Network, and by Indigenous Area for the Aboriginal & Torres Strait Islander population. Data are also available by Quintile of Socioeconomic Disadvantage of Area (current period and time series), and Remoteness Area (current period and time series), for both the whole population, and the Aboriginal & Torres Strait Islander population (current period only). - -These workbooks are derived from ABS Census data releases.""" ; - dcterms:title "Social Health Atlases of Australia" . - - dcterms:description "Summarises all available aerial survey data and metadata used to characterise the long-term distribution and abundance of magpie geese in the Northern Territory undertaken by different institutions and publically available in several journals (Appendix A). Summarised also are results from a PhD study (E. Ligtermoet) documenting the cultural harvesting values of magpie geese ascertained by interviews with Kakadu Traditional Owners (2011-2015)." ; - dcterms:title "Supplementary Material used to characterise the spatial and temporal dynamics of magpie goose populations in the Kakadu Region NT and their cultural harvesting values" . - - dcterms:description "The Minyumai Indigenous Protected Areas (IPA) has provided an opportunity for the Bandjalang clan to re-engage with culture and language through country. Through land and fire management work, Bandjalang traditional owners have seen the restoration of native plants and animals that were thought to have been lost. Their return serves as a powerful reminder of the resilience of the Bandjalang people and enables them to better understand themselves, their culture, and their place in the world. The IPA programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community." ; - dcterms:title "Social Return on Investment analysis of the Minyumai Indigenous Protected Area" . - - dcterms:description "Access still to be negotiated with the Museum." ; - dcterms:title "The Sandra Smith Archive" . - - dcterms:description "Strong demand but controversial." ; - dcterms:title "Tindale/Horton map" . - - dcterms:description """TLCMap is a set of tools that work together for mapping Australian history and culture. - -Note that historical placenames in TLCmap is a HASS-I integration activity.""" ; - dcterms:title "Time Layered Cultural Map of Australia" . - - dcterms:description """The Victorian Perinatal Data Collection (VPDC) is a population-based surveillance system that collects for analysis comprehensive information on the health of mothers and babies, in order to contribute to improvements in their health. - -The VPDC contains information on obstetric conditions, procedures and outcomes, neonatal morbidity and congenital anomalies relating to every birth in Victoria. - -This data is reported annually to the AIHW as part of the National Perinatal Data Collection managed by the AIHW. The AIHW produces the annual report Australia’s mothers and babies, using the National Perinatal Data Collection and other data.""" ; - dcterms:title "The Victorian Perinatal database" . - - dcterms:description """This was nominated by Sandra Eades. Investigation, documentation and negotiation needed. - -https://www.datalinkage-wa.org.au/dlb-services/derived-indigenous-status-flag/ ?""" ; - dcterms:title "Western Australia Linked Data" . - - dcterms:description "In 2012, the remote Aboriginal community of Wilcannia in western NSW hosted the first Australian pilot of a Cuban mass adult literacy campaign model known as Yes I Can. The aim was to investigate the appropriateness of this model in Aboriginal Australia. Building on an intensive community development process of ‘socialisation and mobilisation’, sixteen community members with very low literacy graduated from the basic literacy course, with the majority continuing on into post-literacy activities, further training and/or employment." ; - dcterms:title "Aboriginal adult literacy campaign - Wilcannia Pilot Project Final Evaluation Report" . - - dcterms:description """The Yawuru Knowing Our Community (YKC) Household Survey was commissioned by the Nyamba Buru Yawuru Board of Directors in December 2010. This report and associated data base are the property of the NBY Board. The report was designed and produced by The Kimberley Institute, Centre for Aboriginal Economic Policy Research at The Australian National University, and the Broome Aboriginal community. -In September 2010, the NBY Board resolved to undertake a comprehensive population survey of Broome to inform the Board’s investment strategy, particularly on social housing.""" ; - dcterms:title "Yawuru Knowing Our Community Household Survey" . - - dcterms:description """Yumi Sabe is an Australian Kriol term that translates to 'we know', or, 'we have the knowledge'. - -Yumi Sabe is an Indigenous Knowledge Exchange that helps Indigenous communities, researchers and policy makers to access and use data to inform and improve policies and programs and demonstrate the complexity and diversity of Aboriginal and Torres Strait Islander peoples', research and culture. - -This is a beta product that is still being refined and developed. Please contact us if you have any issues or feedback.""" ; - dcterms:title "Indigenous Research Exchange Platform" . - - dcterms:description "The Australia's Indigenous land and forest estate (2020) is a continental spatial dataset that identifies and reports separately the individual attributes of Australia's Indigenous estate, namely the extent of land and forest over which Indigenous peoples and communities have ownership, management and co-management, or other special rights." ; - dcterms:title "Australia's Indigenous land and forest estate (2020)" . - - dcterms:description """Tandana is owned and managed by the National Aboriginal Cultural Institute Inc. It is Australia’s oldest Aboriginal-owned and managed multi-arts centre. -As Tandana is government funded it reports annually on the funding supplied and its distribution.""" ; - dcterms:title "Tandanya Annual Reporting Regulatory Data" . - - dcterms:description "Indigenous Areas (IAREs) are medium sized geographic areas built from whole Indigenous Locations. They are designed for the release and analysis of more detailed statistics for Aboriginal and Torres Strait Islander people. Whole Indigenous Areas aggregate to form Indigenous Regions."@en ; - dcterms:title "Indigenous Areas within the ASGS" . - - dcterms:description """This is a reference geospatial dataset developed by the Australian Bureau of Statistics which provides the most granular form of Indigenous Structure represented in the Australian Statistical Geography Standard (ASGS), currently at Edition 3 (2021). Indigenous Locations (ILOCs) are designed to allow the production and analysis of statistics relating to Aboriginal and Torres Strait Islander people with a high level of spatial accuracy, while also maintaining the confidentiality of individuals. It has been designed in consultation with the ABS Centre for Aboriginal and Torres Strait Islander Statistics to incorporate statistical and community requirements wherever possible. - -ILOCs are geographic areas built from whole Statistical Areas Level 1 (SA1s). They are designed to represent small Aboriginal and Torres Strait Islander communities (urban and rural) that are near each other or that share language, traditional borders or Native Title. They usually have a minimum population of about 90 people. In some cases, Indigenous Locations have a smaller Aboriginal and Torres Strait Islander population to meet statistical requirements or to better represent the local community. - -Where a community is too small for confidentiality requirements, it is combined with another, related population. Remaining Statistical Areas Level 1 are combined into larger areas, which will include a more dispersed Aboriginal and Torres Strait Islander population. - -In some cases, Aboriginal and Torres Strait Islander communities that are too small to be identified separately have been combined with other nearby and associated communities. This has resulted in some multi-part Indigenous Locations where related communities are represented as a single Indigenous Location but are geographically separate. This enables the release of Census of Population and Housing data and other data for Aboriginal and Torres Strait Islander communities in a meaningful way, while balancing confidentiality and statistical requirements. - -There are 1,139 ILOCs covering the whole of Australia without gaps or overlaps. Whole ILOCs aggregate to form Indigenous Areas (IAREs). Whole Indigenous Areas aggregate to form Indigenous Regions (IREGs). - -Indigenous Locations are identified by eight-digit hierarchical codes consisting of a one-digit State or Territory identifier, followed by a two-digit Indigenous Region identifier, a three-digit Indigenous Area identifier and finally a two-digit Indigenous Location identifier. Within each Indigenous Area, Indigenous Location identifiers are unique. When change occurs, old codes are retired and the next available identifier is assigned. - -Shapefiles for Indigenous Locations and other components of the ABS's Indigenous Structure are available: https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3/jul2021-jun2026/access-and-downloads/digital-boundary-files - -This catalog entry refers to the latest ASGS release. For all releases refer to the ABS: https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3"""@en ; - dcterms:title "Indigenous Locations within the Australian Statistical Geography Standard (ASGS) Edition 3" . - - dcterms:description "Indigenous Regions (IREGs) are large geographic areas built from whole Indigenous Areas and are based on historical boundaries. The larger population of Indigenous Regions enables highly detailed analysis."@en ; - dcterms:title "Indigenous Regions within the ASGS" . - - rdfs:label "Indigenous Data Network" ; - schema:description "The IDN is within the University of Melbourne. It was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities.", - "The Indigenous Data Network (IDN) was established in 2018 to support and coordinate the governance of Indigenous data for Aboriginal and Torres Strait Islander peoples and empower Aboriginal and Torres Strait Islander communities to decide their own local data priorities."@en ; - schema:name "Indigenous Data Network" . - - dcterms:description """Aboriginal and Torres Strait Islander collections, including the Mountford-Sheard Collection INDIGENOUS COLLECTIONS -The State Library has a significant and developing amount of specialist material relating to Aboriginal and Torres Strait Islander people including the Mountford-Sheard Collection. -The papers of the Mountford-Sheard Collection which comprise an extensive collection of Charles P. Mountford's expedition journals, photographs, film, sound recordings, artworks, objects and research. The papers were compiled with the assistance and encouragement of friend and colleague Harold L Sheard. Mountford developed his appreciation of Australian Aboriginal people and their customs, beliefs and art over many years of expeditions, making it his life's work.""" ; - dcterms:title "Mountford-Sheard Collection" . - - dcterms:description "The Deebing Creek mission was founded by the Aboriginal Protection Society of Ipswich. Work started on the establishment of an Aboriginal mission at Deebing Creek around 1887. The correspondence records of the Home Secretary’s Office, Chief Protector of Aboriginals and the Southern Protector of Aboriginals Offices are a valuable source of information relating to Deebing Creek." ; - dcterms:title "Correspondence relating to Aboriginal and Torres Strait Islander people - Deebing Creek explanatory notes" . - - dcterms:description """This dataset details the Dedicated Indigenous Protected Areas (IPA) across Australia through the implementation of the Indigenous Protected Areas Programme. These boundaries are not legally binding. -An Indigenous Protected Area (IPA) is an area of Indigenous-owned land or sea where traditional Indigenous owners have entered into an agreement with the Australian Government to promote biodiversity and cultural resource conservation- making up over over half of Australia's National Reserve System. - -Further information can be found at the website below. - -https://www.awe.gov.au/agriculture-land/land/indigenous-protected-areas""" ; - dcterms:title "Indigenous Protected Areas (IPA) - Dedicated" . +ns1:LowerLevelCatalog rdfs:label "Lower level catalog" ; + dcterms:identifier "exm:LowerLevelCatalog"^^prez:identifier ; + prez:link "/c/catalogs/exm:LowerLevelCatalog", + "/c/catalogs/exm:TopLevelCatalog/collections/exm:LowerLevelCatalog", + "/v/catalogs/exm:LowerLevelCatalog" . diff --git a/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl index e741a08d..dd7bcefa 100644 --- a/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl +++ b/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl @@ -12,8 +12,6 @@ dcterms:description rdfs:label "Description"@en ; dcterms:identifier rdfs:label "Identifier"@en ; dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . -dcterms:title rdfs:label "Title"@en . - rdf:type rdfs:label "type" . rdfs:label rdfs:label "label" . @@ -21,48 +19,23 @@ rdfs:label rdfs:label "label" . skos:definition rdfs:label "definition"@en ; skos:definition "A statement or formal explanation of the meaning of a concept."@en . - a dcat:Catalog ; - rdfs:label "Container catalog Catalogue" ; - dcterms:description "container catalog to be used for testing" ; - dcterms:identifier "pd:container-catalog"^^prez:identifier ; - prez:link "/catalogs/pd:container-catalog" . - - a dcat:Catalog ; - rdfs:label "IDN Demonstration Catalogue" ; - dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. - -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. - -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:identifier "pd:democat"^^prez:identifier ; - dcterms:title "IDN Demonstration Catalogue" ; - prez:link "/catalogs/pd:democat" . - - a dcat:Catalog ; - dcterms:description """The Indigenous Data Network's catalogue of Agents. This catalogue contains instances of Agents - People and Organisations - related to the holding of indigenous data. This includes non-indigenous Agents - -This catalogue extends on standard Agent information to include properties useful to understand the indigeneity of Agents: whether they are or not, or how much they are, indigenous"""@en ; - dcterms:identifier "dtst:agents"^^prez:identifier ; - dcterms:title "IDN Agents Catalogue" ; - prez:link "/catalogs/dtst:agents" . - - a dcat:Catalog ; - dcterms:description """The Indigenous Data Network's catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. + a dcat:Catalog ; + rdfs:label "Top level catalog" ; + dcterms:identifier "exm:TopLevelCatalog"^^prez:identifier ; + prez:link "/c/catalogs/exm:TopLevelCatalog", + "/v/catalogs/exm:TopLevelCatalog" . -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. + a dcat:Catalog ; + rdfs:label "amazing catalog" ; + dcterms:identifier "exm:TopLevelCatalogTwo"^^prez:identifier ; + prez:link "/c/catalogs/exm:TopLevelCatalogTwo", + "/v/catalogs/exm:TopLevelCatalogTwo" . -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:identifier "dtst:democat"^^prez:identifier ; - dcterms:title "IDN Datasets Catalogue" ; - prez:link "/catalogs/dtst:democat" . +prez:count rdfs:label "count" . - a dcat:Catalog ; - dcterms:description "This is the system catalogue implemented by this instance of CatPrez that lists all its other Catalog instances"@en ; - dcterms:identifier "sys:catprez"^^prez:identifier ; - dcterms:title "CatPrez System Catalogue" ; - prez:link "/catalogs/sys:catprez" . +prez:link rdfs:label "link" . dcat:Catalog rdfs:label "Catalog"@en ; skos:definition "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en ; - prez:count 5 . + prez:count 2 . diff --git a/tests/data/catprez/input/catalog.ttl b/tests/data/catprez/input/catprez.ttl similarity index 52% rename from tests/data/catprez/input/catalog.ttl rename to tests/data/catprez/input/catprez.ttl index 0d0d01bf..f599aeed 100644 --- a/tests/data/catprez/input/catalog.ttl +++ b/tests/data/catprez/input/catprez.ttl @@ -1,20 +1,38 @@ PREFIX dcat: PREFIX dcterms: -PREFIX ex: +PREFIX ex: PREFIX rdfs: ex:TopLevelCatalog a dcat:Catalog ; rdfs:label "Top level catalog" ; dcterms:hasPart ex:LowerLevelCatalog ; ex:property "top level catalog property" ; - . +. ex:LowerLevelCatalog a dcat:Catalog ; rdfs:label "Lower level catalog" ; dcterms:hasPart ex:Resource ; - ex:property "lower level catalog property" . + ex:property "lower level catalog property" +. ex:Resource a dcat:Resource ; rdfs:label "Resource" ; ex:property "resource property" ; +. + +ex:TopLevelCatalogTwo a dcat:Catalog ; + rdfs:label "amazing catalog" ; + dcterms:hasPart ex:LowerLevelCatalogTwo ; + ex:property "complete" ; +. + +ex:LowerLevelCatalogTwo a dcat:Catalog ; + rdfs:label "rightful" ; + dcterms:hasPart ex:ResourceTwo ; + ex:property "exposure" +. + +ex:ResourceTwo a dcat:Resource ; + rdfs:label "salty" ; + ex:property "proficient" ; . \ No newline at end of file diff --git a/tests/data/cql/input/geo_intersects.json b/tests/data/cql/input/geo_intersects.json new file mode 100644 index 00000000..56848d8f --- /dev/null +++ b/tests/data/cql/input/geo_intersects.json @@ -0,0 +1,35 @@ +{ + "op": "s_intersects", + "args": [ + { + "property": "http://www.w3.org/ns/shacl#this" + }, + { + "type": "Polygon", + "coordinates": [ + [ + [ + 153.03375, + -27.42 + ], + [ + 153.16, + -27.3217012 + ], + [ + 153.03375, + -27.2234024 + ], + [ + 152.9075, + -27.3217012 + ], + [ + 153.03375, + -27.42 + ] + ] + ] + } + ] +} \ No newline at end of file diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py index f2a96b65..b022649f 100644 --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -62,7 +62,7 @@ def override_get_repo(): @pytest.fixture(scope="session") def a_catalog_link(client): # get link for first catalog - r = client.get("/catalogs") + r = client.get("/c/catalogs") g = Graph().parse(data=r.text) member_uri = g.value(None, RDF.type, DCAT.Catalog) link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) @@ -81,59 +81,27 @@ def a_resource_link(client, a_catalog_link): def test_catalog_listing_anot(client): r = client.get( - f"/catalogs?_mediatype=text/anot+turtle&_profile=prez:OGCListingProfile" + f"/c/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile" ) response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl" - ) - assert isomorphic(response_graph, expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", - ) + expected_response_1 = (URIRef("https://example.com/TopLevelCatalog"), RDF.type, DCAT.Catalog) + expected_response_2 = (URIRef("https://example.com/TopLevelCatalogTwo"), RDF.type, DCAT.Catalog) + assert next(response_graph.triples(expected_response_1)) + assert next(response_graph.triples(expected_response_2)) def test_catalog_anot(client, a_catalog_link): - r = client.get(f"{a_catalog_link}?_mediatype=text/anot+turtle") + r = client.get(f"{a_catalog_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/top_level_catalog_anot.ttl" - ) - assert isomorphic(response_graph, expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", - ) + expected_response = (URIRef("https://example.com/TopLevelCatalog"), RDF.type, DCAT.Catalog) + assert next(response_graph.triples(expected_response)) -def test_resource_listing_anot(client, a_catalog_link): - r = client.get(f"{a_catalog_link}/collections?_mediatype=text/anot+turtle") +def test_lower_level_listing_anot(client, a_catalog_link): + r = client.get(f"{a_catalog_link}/collections?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/resource_listing_anot.ttl" - ) - assert isomorphic(response_graph, expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", - ) + expected_response = (URIRef("https://example.com/LowerLevelCatalog"), RDF.type, DCAT.Catalog) + assert next(response_graph.triples(expected_response)) + -def test_resource_anot(client, a_resource_link): - r = client.get(f"{a_resource_link}?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/catprez/expected_responses/resource_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"Missing:{(expected_graph - response_graph).serialize()}" - f"Extra:{(response_graph - expected_graph).serialize()}" - ) From 81a1b5e937e0b6fc3f73d0dbfcf231f8c36485be Mon Sep 17 00:00:00 2001 From: davidhabgood Date: Mon, 22 Jan 2024 10:38:53 +1000 Subject: [PATCH 05/25] changes towards a single set of endpoints --- .dockerignore | 0 .env-template | 0 .github/workflows/on_pr_to_main.yaml | 0 .github/workflows/on_push_to_feature.yaml | 0 .github/workflows/on_push_to_main.yaml | 0 .github/workflows/on_release.yaml | 0 .gitignore | 0 .pre-commit-config.yaml | 0 Dockerfile | 0 LICENSE | 0 README-Dev.md | 0 README.md | 0 changelog.md | 0 connegp-0.1.6-py3-none-any.whl | Bin demo/docker-compose.yml | 0 demo/prez-v4-backend/config.ttl | 0 demo/prez-v4-backend/docker-compose.yml | 0 demo/prez-v4-backend/readme.md | 0 dev/dev-config.ttl | 0 dev/dev-setup.py | 0 main.py | 0 poetry.lock | 0 poetry.toml | 0 prez-logo.png | Bin prez/app.py | 11 +- prez/bnode.py | 0 prez/cache.py | 0 prez/config.py | 1 - prez/dependencies.py | 0 prez/models/model_exceptions.py | 0 prez/models/object_item.py | 0 prez/models/profiles_and_mediatypes.py | 37 +- prez/models/profiles_listings.py | 0 prez/queries/identifier.py | 0 prez/queries/object.py | 0 .../reference_data/context_ontologies/dcat.nq | 0 .../context_ontologies/dcterms.nq | 0 prez/reference_data/context_ontologies/geo.nq | 0 .../context_ontologies/prez-ontology.nq | 0 prez/reference_data/context_ontologies/rdf.nq | 0 .../reference_data/context_ontologies/rdfs.nq | 0 .../context_ontologies/schema.nq | 0 .../schemaorg-current-https.nq | 0 .../reference_data/context_ontologies/skos.nq | 0 .../endpoints/cql_endpoints.ttl | 2 +- .../endpoints/extended_ogc_records.ttl | 85 ++++ ...ints.ttl => ogc_catprez_endpoints.ttl.old} | 0 ...ts.ttl => ogc_spaceprez_endpoints.ttl.old} | 0 ...ints.ttl => ogc_vocprez_endpoints.ttl.old} | 0 .../endpoints/system_endpoints.ttl | 0 .../endpoints/vocprez_endpoints.ttl.unused | 0 .../reference_data/prefixes/all.file.vann.ttl | 0 prez/reference_data/prefixes/standard.ttl | 0 prez/reference_data/prefixes/testing.ttl | 0 prez/reference_data/prez_ns.py | 0 prez/reference_data/profiles/dd.ttl | 0 .../profiles/ogc_records_profile.ttl | 20 +- .../profiles/prez_default_profiles.ttl | 0 .../profiles/spaceprez_default_profiles.ttl | 0 .../search_methods/search_default.ttl | 0 .../search_methods/search_exact.ttl | 0 .../search_methods/search_readme.md | 0 .../search_methods/search_skos_preflabel.ttl | 0 .../search_methods/search_skos_weighted.ttl | 0 prez/renderers/csv_renderer.py | 0 prez/renderers/json_renderer.py | 0 prez/renderers/renderer.py | 0 prez/response.py | 0 prez/routers/cql.py | 0 prez/routers/identifier.py | 0 prez/routers/management.py | 0 prez/routers/object.py | 6 +- .../routers/{ogc_catprez.py => ogc_router.py} | 36 +- ...{ogc_spaceprez.py => ogc_spaceprez.py.old} | 33 -- .../{ogc_vocprez.py => ogc_vocprez.py.old} | 0 prez/routers/profiles.py | 0 prez/routers/search.py | 5 +- prez/routers/sparql.py | 10 +- prez/routers/vocprez.py.unused | 0 prez/services/app_service.py | 21 - prez/services/connegp_service.py | 0 prez/services/cql_search.py | 0 prez/services/curie_functions.py | 0 prez/services/exception_catchers.py | 0 prez/services/generate_profiles.py | 44 +- prez/services/link_generation.py | 0 prez/services/listings.py | 6 +- prez/services/model_methods.py | 0 prez/services/objects.py | 5 +- prez/services/prez_logging.py | 0 prez/sparql/methods.py | 0 prez/sparql/objects_listings.py | 6 + prez/sparql/resource.py | 0 prez/sparql/search_query.py | 0 prez/url.py | 0 pyproject.toml | 0 temp/cql2sparql.py | 0 temp/cql_sparql_reference.py | 0 temp/default_cql_context.json | 0 temp/grammar.py | 0 temp/shacl2sparql.py | 1 - temp/test_search.py | 0 test_data/catprez.ttl | 0 test_data/object_catalog_bblocks_catalog.ttl | 12 - test_data/object_vocab_api_bblocks.ttl | 38 -- test_data/object_vocab_datatype_bblocks.ttl | 38 -- test_data/object_vocab_parameter_bblocks.ttl | 61 --- test_data/object_vocab_schema_bblocks.ttl | 414 ------------------ test_data/sandgate.ttl | 296 ------------- test_data/spaceprez.ttl | 0 test_data/vocprez.ttl | 0 tests/__init__.py | 0 tests/_test_cql.py | 0 tests/_test_curie_generation.py | 0 tests/conftest.py | 0 tests/data/bnode_depth/bnode_depth-1.ttl | 0 tests/data/bnode_depth/bnode_depth-2-2.ttl | 0 tests/data/bnode_depth/bnode_depth-2.ttl | 0 tests/data/bnode_depth/bnode_depth-4.ttl | 0 .../expected_responses/resource_anot.ttl | 0 .../resource_listing_anot.ttl | 0 .../top_level_catalog_anot.ttl | 0 .../top_level_catalog_listing_anot.ttl | 0 tests/data/catprez/input/catprez.ttl | 0 tests/data/cql/input/example01.json | 0 tests/data/cql/input/example02.json | 0 tests/data/cql/input/example03.json | 0 tests/data/cql/input/example05a.json | 0 tests/data/cql/input/example05b.json | 0 tests/data/cql/input/example06b.json | 0 tests/data/cql/input/example07.json | 0 tests/data/cql/input/example08.json | 0 tests/data/cql/input/example09.json | 0 tests/data/cql/input/example10.json | 0 tests/data/cql/input/example11.json | 0 tests/data/cql/input/example12.json | 0 tests/data/cql/input/example14.json | 0 tests/data/cql/input/example15.json | 0 tests/data/cql/input/example17.json | 0 tests/data/cql/input/example29.json | 0 tests/data/cql/input/example31.json | 0 tests/data/cql/input/example32.json | 0 tests/data/cql/input/example33.json | 0 tests/data/cql/input/example34.json | 0 tests/data/cql/input/example35.json | 0 tests/data/cql/input/example39.json | 0 tests/data/cql/input/geo_intersects.json | 0 tests/data/object/expected_responses/fc.ttl | 0 .../object/expected_responses/feature.ttl | 0 tests/data/profiles/remote_profile.ttl | 0 .../filter_to_focus_search.ttl | 0 .../focus_to_filter_search.ttl | 0 .../expected_responses/dataset_anot.ttl | 0 .../dataset_listing_anot.ttl | 0 .../expected_responses/feature_anot.ttl | 0 .../feature_collection_anot.ttl | 0 .../feature_collection_listing_anot.ttl | 0 .../feature_listing_anot.ttl | 0 .../data/spaceprez/input/geofabric_small.ttl | 0 tests/data/spaceprez/input/gnaf_small.ttl | 0 tests/data/spaceprez/input/labels.ttl | 0 .../data/spaceprez/input/multiple_object.ttl | 0 .../input/redirect-foaf-homepage.ttl | 0 tests/data/spaceprez/input/sandgate.ttl | 0 .../input/sandgate/catchments.geojson | 0 .../input/sandgate/facilities.geojson | 0 .../spaceprez/input/sandgate/floods.geojson | 0 .../spaceprez/input/sandgate/roads.geojson | 0 .../spaceprez/input/sandgate/sandgate.json | 0 .../beddingsurfacestructure_top_concepts.ttl | 0 .../collection_listing_anot.ttl | 0 .../collection_listing_item.ttl | 0 .../expected_responses/concept-coal.ttl | 0 .../concept-open-cut-coal-mining.ttl | 0 .../concept-with-2-narrower-concepts.ttl | 0 .../expected_responses/concept_anot.ttl | 0 .../concept_scheme_no_children.ttl | 0 ...cept_scheme_top_concepts_with_children.ttl | 0 .../concept_scheme_with_children.ttl | 0 .../data/vocprez/expected_responses/empty.ttl | 0 .../expected_responses/vocab_listing_anot.ttl | 0 .../vocprez/input/absolute-collection.ttl | 0 tests/data/vocprez/input/alteration-types.ttl | 0 .../vocprez/input/beddingsurfacestructure.ttl | 0 .../input/borehole-purpose-no-children.ttl | 0 tests/data/vocprez/input/borehole-purpose.ttl | 0 .../data/vocprez/input/catalog-of-vocabs.ttl | 0 tests/data/vocprez/input/contacttype.ttl | 0 .../data/vocprez/input/dublin_core_terms.ttl | 0 tests/data/vocprez/input/reg-status.ttl | 0 .../vocprez/input/vocab-derivation-modes.ttl | 0 tests/test_bnode.py | 0 tests/test_count.py | 0 tests/test_curie_endpoint.py | 0 tests/test_dd_profiles.py | 0 tests/test_endpoints_cache.py | 0 tests/test_endpoints_catprez.py | 29 +- tests/test_endpoints_management.py | 0 tests/test_endpoints_object.py | 0 tests/test_endpoints_ok.py | 0 tests/test_endpoints_profiles.py | 0 tests/test_endpoints_spaceprez.py | 125 ++---- tests/test_endpoints_vocprez.py | 0 tests/test_redirect_endpoint.py | 0 tests/test_search.py | 0 tests/test_sparql.py | 0 206 files changed, 242 insertions(+), 1100 deletions(-) mode change 100644 => 100755 .dockerignore mode change 100644 => 100755 .env-template mode change 100644 => 100755 .github/workflows/on_pr_to_main.yaml mode change 100644 => 100755 .github/workflows/on_push_to_feature.yaml mode change 100644 => 100755 .github/workflows/on_push_to_main.yaml mode change 100644 => 100755 .github/workflows/on_release.yaml mode change 100644 => 100755 .gitignore mode change 100644 => 100755 .pre-commit-config.yaml mode change 100644 => 100755 Dockerfile mode change 100644 => 100755 LICENSE mode change 100644 => 100755 README-Dev.md mode change 100644 => 100755 README.md mode change 100644 => 100755 changelog.md mode change 100644 => 100755 connegp-0.1.6-py3-none-any.whl mode change 100644 => 100755 demo/docker-compose.yml mode change 100644 => 100755 demo/prez-v4-backend/config.ttl mode change 100644 => 100755 demo/prez-v4-backend/docker-compose.yml mode change 100644 => 100755 demo/prez-v4-backend/readme.md mode change 100644 => 100755 dev/dev-config.ttl mode change 100644 => 100755 dev/dev-setup.py mode change 100644 => 100755 main.py mode change 100644 => 100755 poetry.lock mode change 100644 => 100755 poetry.toml mode change 100644 => 100755 prez-logo.png mode change 100644 => 100755 prez/app.py mode change 100644 => 100755 prez/bnode.py mode change 100644 => 100755 prez/cache.py mode change 100644 => 100755 prez/config.py mode change 100644 => 100755 prez/dependencies.py mode change 100644 => 100755 prez/models/model_exceptions.py mode change 100644 => 100755 prez/models/object_item.py mode change 100644 => 100755 prez/models/profiles_and_mediatypes.py mode change 100644 => 100755 prez/models/profiles_listings.py mode change 100644 => 100755 prez/queries/identifier.py mode change 100644 => 100755 prez/queries/object.py mode change 100644 => 100755 prez/reference_data/context_ontologies/dcat.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/dcterms.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/geo.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/prez-ontology.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/rdf.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/rdfs.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/schema.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/schemaorg-current-https.nq mode change 100644 => 100755 prez/reference_data/context_ontologies/skos.nq mode change 100644 => 100755 prez/reference_data/endpoints/cql_endpoints.ttl create mode 100755 prez/reference_data/endpoints/extended_ogc_records.ttl rename prez/reference_data/endpoints/{ogc_catprez_endpoints.ttl => ogc_catprez_endpoints.ttl.old} (100%) mode change 100644 => 100755 rename prez/reference_data/endpoints/{ogc_spaceprez_endpoints.ttl => ogc_spaceprez_endpoints.ttl.old} (100%) mode change 100644 => 100755 rename prez/reference_data/endpoints/{ogc_vocprez_endpoints.ttl => ogc_vocprez_endpoints.ttl.old} (100%) mode change 100644 => 100755 mode change 100644 => 100755 prez/reference_data/endpoints/system_endpoints.ttl mode change 100644 => 100755 prez/reference_data/endpoints/vocprez_endpoints.ttl.unused mode change 100644 => 100755 prez/reference_data/prefixes/all.file.vann.ttl mode change 100644 => 100755 prez/reference_data/prefixes/standard.ttl mode change 100644 => 100755 prez/reference_data/prefixes/testing.ttl mode change 100644 => 100755 prez/reference_data/prez_ns.py mode change 100644 => 100755 prez/reference_data/profiles/dd.ttl mode change 100644 => 100755 prez/reference_data/profiles/ogc_records_profile.ttl mode change 100644 => 100755 prez/reference_data/profiles/prez_default_profiles.ttl mode change 100644 => 100755 prez/reference_data/profiles/spaceprez_default_profiles.ttl mode change 100644 => 100755 prez/reference_data/search_methods/search_default.ttl mode change 100644 => 100755 prez/reference_data/search_methods/search_exact.ttl mode change 100644 => 100755 prez/reference_data/search_methods/search_readme.md mode change 100644 => 100755 prez/reference_data/search_methods/search_skos_preflabel.ttl mode change 100644 => 100755 prez/reference_data/search_methods/search_skos_weighted.ttl mode change 100644 => 100755 prez/renderers/csv_renderer.py mode change 100644 => 100755 prez/renderers/json_renderer.py mode change 100644 => 100755 prez/renderers/renderer.py mode change 100644 => 100755 prez/response.py mode change 100644 => 100755 prez/routers/cql.py mode change 100644 => 100755 prez/routers/identifier.py mode change 100644 => 100755 prez/routers/management.py mode change 100644 => 100755 prez/routers/object.py rename prez/routers/{ogc_catprez.py => ogc_router.py} (78%) mode change 100644 => 100755 rename prez/routers/{ogc_spaceprez.py => ogc_spaceprez.py.old} (83%) mode change 100644 => 100755 rename prez/routers/{ogc_vocprez.py => ogc_vocprez.py.old} (100%) mode change 100644 => 100755 mode change 100644 => 100755 prez/routers/profiles.py mode change 100644 => 100755 prez/routers/search.py mode change 100644 => 100755 prez/routers/sparql.py mode change 100644 => 100755 prez/routers/vocprez.py.unused mode change 100644 => 100755 prez/services/app_service.py mode change 100644 => 100755 prez/services/connegp_service.py mode change 100644 => 100755 prez/services/cql_search.py mode change 100644 => 100755 prez/services/curie_functions.py mode change 100644 => 100755 prez/services/exception_catchers.py mode change 100644 => 100755 prez/services/generate_profiles.py mode change 100644 => 100755 prez/services/link_generation.py mode change 100644 => 100755 prez/services/listings.py mode change 100644 => 100755 prez/services/model_methods.py mode change 100644 => 100755 prez/services/objects.py mode change 100644 => 100755 prez/services/prez_logging.py mode change 100644 => 100755 prez/sparql/methods.py mode change 100644 => 100755 prez/sparql/objects_listings.py mode change 100644 => 100755 prez/sparql/resource.py mode change 100644 => 100755 prez/sparql/search_query.py mode change 100644 => 100755 prez/url.py mode change 100644 => 100755 pyproject.toml mode change 100644 => 100755 temp/cql2sparql.py mode change 100644 => 100755 temp/cql_sparql_reference.py mode change 100644 => 100755 temp/default_cql_context.json mode change 100644 => 100755 temp/grammar.py mode change 100644 => 100755 temp/shacl2sparql.py mode change 100644 => 100755 temp/test_search.py mode change 100644 => 100755 test_data/catprez.ttl delete mode 100644 test_data/object_catalog_bblocks_catalog.ttl delete mode 100644 test_data/object_vocab_api_bblocks.ttl delete mode 100644 test_data/object_vocab_datatype_bblocks.ttl delete mode 100644 test_data/object_vocab_parameter_bblocks.ttl delete mode 100644 test_data/object_vocab_schema_bblocks.ttl delete mode 100644 test_data/sandgate.ttl mode change 100644 => 100755 test_data/spaceprez.ttl mode change 100644 => 100755 test_data/vocprez.ttl mode change 100644 => 100755 tests/__init__.py mode change 100644 => 100755 tests/_test_cql.py mode change 100644 => 100755 tests/_test_curie_generation.py mode change 100644 => 100755 tests/conftest.py mode change 100644 => 100755 tests/data/bnode_depth/bnode_depth-1.ttl mode change 100644 => 100755 tests/data/bnode_depth/bnode_depth-2-2.ttl mode change 100644 => 100755 tests/data/bnode_depth/bnode_depth-2.ttl mode change 100644 => 100755 tests/data/bnode_depth/bnode_depth-4.ttl mode change 100644 => 100755 tests/data/catprez/expected_responses/resource_anot.ttl mode change 100644 => 100755 tests/data/catprez/expected_responses/resource_listing_anot.ttl mode change 100644 => 100755 tests/data/catprez/expected_responses/top_level_catalog_anot.ttl mode change 100644 => 100755 tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl mode change 100644 => 100755 tests/data/catprez/input/catprez.ttl mode change 100644 => 100755 tests/data/cql/input/example01.json mode change 100644 => 100755 tests/data/cql/input/example02.json mode change 100644 => 100755 tests/data/cql/input/example03.json mode change 100644 => 100755 tests/data/cql/input/example05a.json mode change 100644 => 100755 tests/data/cql/input/example05b.json mode change 100644 => 100755 tests/data/cql/input/example06b.json mode change 100644 => 100755 tests/data/cql/input/example07.json mode change 100644 => 100755 tests/data/cql/input/example08.json mode change 100644 => 100755 tests/data/cql/input/example09.json mode change 100644 => 100755 tests/data/cql/input/example10.json mode change 100644 => 100755 tests/data/cql/input/example11.json mode change 100644 => 100755 tests/data/cql/input/example12.json mode change 100644 => 100755 tests/data/cql/input/example14.json mode change 100644 => 100755 tests/data/cql/input/example15.json mode change 100644 => 100755 tests/data/cql/input/example17.json mode change 100644 => 100755 tests/data/cql/input/example29.json mode change 100644 => 100755 tests/data/cql/input/example31.json mode change 100644 => 100755 tests/data/cql/input/example32.json mode change 100644 => 100755 tests/data/cql/input/example33.json mode change 100644 => 100755 tests/data/cql/input/example34.json mode change 100644 => 100755 tests/data/cql/input/example35.json mode change 100644 => 100755 tests/data/cql/input/example39.json mode change 100644 => 100755 tests/data/cql/input/geo_intersects.json mode change 100644 => 100755 tests/data/object/expected_responses/fc.ttl mode change 100644 => 100755 tests/data/object/expected_responses/feature.ttl mode change 100644 => 100755 tests/data/profiles/remote_profile.ttl mode change 100644 => 100755 tests/data/search/expected_responses/filter_to_focus_search.ttl mode change 100644 => 100755 tests/data/search/expected_responses/focus_to_filter_search.ttl mode change 100644 => 100755 tests/data/spaceprez/expected_responses/dataset_anot.ttl mode change 100644 => 100755 tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl mode change 100644 => 100755 tests/data/spaceprez/expected_responses/feature_anot.ttl mode change 100644 => 100755 tests/data/spaceprez/expected_responses/feature_collection_anot.ttl mode change 100644 => 100755 tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl mode change 100644 => 100755 tests/data/spaceprez/expected_responses/feature_listing_anot.ttl mode change 100644 => 100755 tests/data/spaceprez/input/geofabric_small.ttl mode change 100644 => 100755 tests/data/spaceprez/input/gnaf_small.ttl mode change 100644 => 100755 tests/data/spaceprez/input/labels.ttl mode change 100644 => 100755 tests/data/spaceprez/input/multiple_object.ttl mode change 100644 => 100755 tests/data/spaceprez/input/redirect-foaf-homepage.ttl mode change 100644 => 100755 tests/data/spaceprez/input/sandgate.ttl mode change 100644 => 100755 tests/data/spaceprez/input/sandgate/catchments.geojson mode change 100644 => 100755 tests/data/spaceprez/input/sandgate/facilities.geojson mode change 100644 => 100755 tests/data/spaceprez/input/sandgate/floods.geojson mode change 100644 => 100755 tests/data/spaceprez/input/sandgate/roads.geojson mode change 100644 => 100755 tests/data/spaceprez/input/sandgate/sandgate.json mode change 100644 => 100755 tests/data/vocprez/expected_responses/beddingsurfacestructure_top_concepts.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/collection_listing_anot.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/collection_listing_item.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/concept-coal.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/concept-open-cut-coal-mining.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/concept-with-2-narrower-concepts.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/concept_anot.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/concept_scheme_no_children.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/concept_scheme_top_concepts_with_children.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/concept_scheme_with_children.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/empty.ttl mode change 100644 => 100755 tests/data/vocprez/expected_responses/vocab_listing_anot.ttl mode change 100644 => 100755 tests/data/vocprez/input/absolute-collection.ttl mode change 100644 => 100755 tests/data/vocprez/input/alteration-types.ttl mode change 100644 => 100755 tests/data/vocprez/input/beddingsurfacestructure.ttl mode change 100644 => 100755 tests/data/vocprez/input/borehole-purpose-no-children.ttl mode change 100644 => 100755 tests/data/vocprez/input/borehole-purpose.ttl mode change 100644 => 100755 tests/data/vocprez/input/catalog-of-vocabs.ttl mode change 100644 => 100755 tests/data/vocprez/input/contacttype.ttl mode change 100644 => 100755 tests/data/vocprez/input/dublin_core_terms.ttl mode change 100644 => 100755 tests/data/vocprez/input/reg-status.ttl mode change 100644 => 100755 tests/data/vocprez/input/vocab-derivation-modes.ttl mode change 100644 => 100755 tests/test_bnode.py mode change 100644 => 100755 tests/test_count.py mode change 100644 => 100755 tests/test_curie_endpoint.py mode change 100644 => 100755 tests/test_dd_profiles.py mode change 100644 => 100755 tests/test_endpoints_cache.py mode change 100644 => 100755 tests/test_endpoints_catprez.py mode change 100644 => 100755 tests/test_endpoints_management.py mode change 100644 => 100755 tests/test_endpoints_object.py mode change 100644 => 100755 tests/test_endpoints_ok.py mode change 100644 => 100755 tests/test_endpoints_profiles.py mode change 100644 => 100755 tests/test_endpoints_spaceprez.py mode change 100644 => 100755 tests/test_endpoints_vocprez.py mode change 100644 => 100755 tests/test_redirect_endpoint.py mode change 100644 => 100755 tests/test_search.py mode change 100644 => 100755 tests/test_sparql.py diff --git a/.dockerignore b/.dockerignore old mode 100644 new mode 100755 diff --git a/.env-template b/.env-template old mode 100644 new mode 100755 diff --git a/.github/workflows/on_pr_to_main.yaml b/.github/workflows/on_pr_to_main.yaml old mode 100644 new mode 100755 diff --git a/.github/workflows/on_push_to_feature.yaml b/.github/workflows/on_push_to_feature.yaml old mode 100644 new mode 100755 diff --git a/.github/workflows/on_push_to_main.yaml b/.github/workflows/on_push_to_main.yaml old mode 100644 new mode 100755 diff --git a/.github/workflows/on_release.yaml b/.github/workflows/on_release.yaml old mode 100644 new mode 100755 diff --git a/.gitignore b/.gitignore old mode 100644 new mode 100755 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml old mode 100644 new mode 100755 diff --git a/Dockerfile b/Dockerfile old mode 100644 new mode 100755 diff --git a/LICENSE b/LICENSE old mode 100644 new mode 100755 diff --git a/README-Dev.md b/README-Dev.md old mode 100644 new mode 100755 diff --git a/README.md b/README.md old mode 100644 new mode 100755 diff --git a/changelog.md b/changelog.md old mode 100644 new mode 100755 diff --git a/connegp-0.1.6-py3-none-any.whl b/connegp-0.1.6-py3-none-any.whl old mode 100644 new mode 100755 diff --git a/demo/docker-compose.yml b/demo/docker-compose.yml old mode 100644 new mode 100755 diff --git a/demo/prez-v4-backend/config.ttl b/demo/prez-v4-backend/config.ttl old mode 100644 new mode 100755 diff --git a/demo/prez-v4-backend/docker-compose.yml b/demo/prez-v4-backend/docker-compose.yml old mode 100644 new mode 100755 diff --git a/demo/prez-v4-backend/readme.md b/demo/prez-v4-backend/readme.md old mode 100644 new mode 100755 diff --git a/dev/dev-config.ttl b/dev/dev-config.ttl old mode 100644 new mode 100755 diff --git a/dev/dev-setup.py b/dev/dev-setup.py old mode 100644 new mode 100755 diff --git a/main.py b/main.py old mode 100644 new mode 100755 diff --git a/poetry.lock b/poetry.lock old mode 100644 new mode 100755 diff --git a/poetry.toml b/poetry.toml old mode 100644 new mode 100755 diff --git a/prez-logo.png b/prez-logo.png old mode 100644 new mode 100755 diff --git a/prez/app.py b/prez/app.py old mode 100644 new mode 100755 index 769ad17d..75332fe9 --- a/prez/app.py +++ b/prez/app.py @@ -24,9 +24,7 @@ from prez.routers.identifier import router as identifier_router from prez.routers.management import router as management_router from prez.routers.object import router as object_router -from prez.routers.ogc_vocprez import router as vocprez_router -from prez.routers.ogc_spaceprez import router as spaceprez_router -from prez.routers.ogc_catprez import router as catprez_router +from prez.routers.ogc_router import router as ogc_records_router from prez.routers.profiles import router as profiles_router from prez.routers.search import router as search_router from prez.routers.sparql import router as sparql_router @@ -67,12 +65,7 @@ app.include_router(sparql_router) app.include_router(search_router) app.include_router(profiles_router) -if "CatPrez" in settings.prez_flavours: - app.include_router(catprez_router) -if "VocPrez" in settings.prez_flavours: - app.include_router(vocprez_router) -if "SpacePrez" in settings.prez_flavours: - app.include_router(spaceprez_router) +app.include_router(ogc_records_router) app.include_router(identifier_router) diff --git a/prez/bnode.py b/prez/bnode.py old mode 100644 new mode 100755 diff --git a/prez/cache.py b/prez/cache.py old mode 100644 new mode 100755 diff --git a/prez/config.py b/prez/config.py old mode 100644 new mode 100755 index 20fd72d8..097e1dbd --- a/prez/config.py +++ b/prez/config.py @@ -36,7 +36,6 @@ class Settings(BaseSettings): curie_separator: str = ":" system_uri: Optional[str] = f"{protocol}://{host}:{port}" order_lists_by_label: bool = True - prez_flavours: Optional[list] = ["SpacePrez", "VocPrez", "CatPrez", "ProfilesPrez"] label_predicates: Optional[List[URIRef]] = [ SKOS.prefLabel, DCTERMS.title, diff --git a/prez/dependencies.py b/prez/dependencies.py old mode 100644 new mode 100755 diff --git a/prez/models/model_exceptions.py b/prez/models/model_exceptions.py old mode 100644 new mode 100755 diff --git a/prez/models/object_item.py b/prez/models/object_item.py old mode 100644 new mode 100755 diff --git a/prez/models/profiles_and_mediatypes.py b/prez/models/profiles_and_mediatypes.py old mode 100644 new mode 100755 index 65d6b87b..6572b5ed --- a/prez/models/profiles_and_mediatypes.py +++ b/prez/models/profiles_and_mediatypes.py @@ -6,6 +6,7 @@ from prez.services.generate_profiles import get_profiles_and_mediatypes from prez.services.connegp_service import get_requested_profile_and_mediatype +from prez.sparql.methods import Repo PREZ = Namespace("https://prez.dev/") @@ -16,6 +17,7 @@ class Config: request: Request # TODO slim down once connegp is refactored so the whole request doesn't need to be passed through classes: FrozenSet[URIRef] + system_repo: Repo req_profiles: Optional[str] = None req_profiles_token: Optional[str] = None req_mediatypes: Optional[FrozenSet] = None @@ -24,6 +26,7 @@ class Config: selected_class: Optional[URIRef] = None profile_headers: Optional[str] = None avail_profile_uris: Optional[str] = None + listing: Optional[bool] = False @model_validator(mode="after") def populate_requested_types(self): @@ -35,19 +38,21 @@ def populate_requested_types(self): ) = get_requested_profile_and_mediatype(request) return self - @model_validator(mode="after") - def populate_profile_and_mediatype(self): - req_profiles = self.req_profiles - req_profiles_token = self.req_profiles_token - req_mediatypes = self.req_mediatypes - classes = self.classes - ( - self.profile, - self.mediatype, - self.selected_class, - self.profile_headers, - self.avail_profile_uris, - ) = get_profiles_and_mediatypes( - classes, req_profiles, req_profiles_token, req_mediatypes - ) - return self +async def populate_profile_and_mediatype( + profiles_mediatypes_model: ProfilesMediatypesInfo, + system_repo: Repo +): + req_profiles = profiles_mediatypes_model.req_profiles + req_profiles_token = profiles_mediatypes_model.req_profiles_token + req_mediatypes = profiles_mediatypes_model.req_mediatypes + classes = profiles_mediatypes_model.classes + listing = profiles_mediatypes_model.listing + ( + profiles_mediatypes_model.profile, + profiles_mediatypes_model.mediatype, + profiles_mediatypes_model.selected_class, + profiles_mediatypes_model.profile_headers, + profiles_mediatypes_model.avail_profile_uris, + ) = await get_profiles_and_mediatypes( + classes, system_repo, req_profiles, req_profiles_token, req_mediatypes, listing + ) \ No newline at end of file diff --git a/prez/models/profiles_listings.py b/prez/models/profiles_listings.py old mode 100644 new mode 100755 diff --git a/prez/queries/identifier.py b/prez/queries/identifier.py old mode 100644 new mode 100755 diff --git a/prez/queries/object.py b/prez/queries/object.py old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/dcat.nq b/prez/reference_data/context_ontologies/dcat.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/dcterms.nq b/prez/reference_data/context_ontologies/dcterms.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/geo.nq b/prez/reference_data/context_ontologies/geo.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/prez-ontology.nq b/prez/reference_data/context_ontologies/prez-ontology.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/rdf.nq b/prez/reference_data/context_ontologies/rdf.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/rdfs.nq b/prez/reference_data/context_ontologies/rdfs.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/schema.nq b/prez/reference_data/context_ontologies/schema.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/schemaorg-current-https.nq b/prez/reference_data/context_ontologies/schemaorg-current-https.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/context_ontologies/skos.nq b/prez/reference_data/context_ontologies/skos.nq old mode 100644 new mode 100755 diff --git a/prez/reference_data/endpoints/cql_endpoints.ttl b/prez/reference_data/endpoints/cql_endpoints.ttl old mode 100644 new mode 100755 index 6624213d..86fb76e1 --- a/prez/reference_data/endpoints/cql_endpoints.ttl +++ b/prez/reference_data/endpoints/cql_endpoints.ttl @@ -9,7 +9,7 @@ PREFIX skos: PREFIX shext: -endpoint:cql a ont:ListingEndpoint ; +endpoint:get a ont:ListingEndpoint ; ont:deliversClasses prez:CQLObjectList ; # required to determine the correct profile for ConnegP shext:limit 20 ; shext:offset 0 ; diff --git a/prez/reference_data/endpoints/extended_ogc_records.ttl b/prez/reference_data/endpoints/extended_ogc_records.ttl new file mode 100755 index 00000000..595ddc6e --- /dev/null +++ b/prez/reference_data/endpoints/extended_ogc_records.ttl @@ -0,0 +1,85 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX endpoint: +PREFIX geo: +PREFIX prez: +PREFIX ont: +PREFIX sh: +prefix skos: +PREFIX shext: +PREFIX xsd: + + +endpoint:top-level-catalog-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/catalogs" ; + sh:targetClass dcat:Catalog ; # required for query construction + sh:target [ sh:select """ + SELECT ?focus_node + WHERE { + ?focus_node a ; + ?child . + ?child a ?child_class . + VALUES ?child_class { + + } + } + """ ] ; + shext:limit 20 ; + shext:offset 0 ; +. + +endpoint:top-level-catalog-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Catalog ; # required for link generation for objects + ont:endpointTemplate "/catalogs/$object" ; + ont:parentEndpoint endpoint:top-level-catalog-listing ; +. + +endpoint:lower-level-catalog-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/catalogs/$parent_1/collections" ; + ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP + ont:parentToFocusRelation dcterms:hasPart ; + sh:targetClass dcat:Catalog ; # required for query construction + ont:parentEndpoint endpoint:top-level-catalog-object ; + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 dcterms:hasPart ?focus_node . + } + """ ] ; +. + +endpoint:lower-level-catalog-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Catalog ; # required for link generation for objects + ont:endpointTemplate "/catalogs/$parent_1/collections/$object" ; + ont:parentToFocusRelation dcterms:hasPart ; + ont:parentEndpoint endpoint:lower-level-catalog-listing ; +. + +endpoint:resource-listing a ont:ListingEndpoint ; + ont:endpointTemplate "/catalogs/$parent_2/collections/$parent_1/items" ; + ont:deliversClasses prez:ResourceList ; # required to determine the correct profile for ConnegP + ont:parentToFocusRelation dcterms:hasPart ; + sh:targetClass dcat:Resource ; # required for query construction + ont:parentEndpoint endpoint:lower-level-catalog-object ; + shext:limit 20 ; + shext:offset 0 ; + sh:target [ + sh:select """SELECT ?focus_node + WHERE { + $parent_1 dcterms:hasPart ?focus_node . + } + """ ] ; +. + +endpoint:resource-object a ont:ObjectEndpoint ; + sh:targetNode "$object" ; + ont:deliversClasses dcat:Resource ; # required for link generation for objects + ont:endpointTemplate "/catalogs/$parent_2/collections/$parent_1/items/$object" ; + ont:parentToFocusRelation dcterms:hasPart ; + ont:parentEndpoint endpoint:resource-listing ; +. + diff --git a/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl.old old mode 100644 new mode 100755 similarity index 100% rename from prez/reference_data/endpoints/ogc_catprez_endpoints.ttl rename to prez/reference_data/endpoints/ogc_catprez_endpoints.ttl.old diff --git a/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl.old old mode 100644 new mode 100755 similarity index 100% rename from prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl rename to prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl.old diff --git a/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl b/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl.old old mode 100644 new mode 100755 similarity index 100% rename from prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl rename to prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl.old diff --git a/prez/reference_data/endpoints/system_endpoints.ttl b/prez/reference_data/endpoints/system_endpoints.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused b/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused old mode 100644 new mode 100755 diff --git a/prez/reference_data/prefixes/all.file.vann.ttl b/prez/reference_data/prefixes/all.file.vann.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/prefixes/standard.ttl b/prez/reference_data/prefixes/standard.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/prefixes/testing.ttl b/prez/reference_data/prefixes/testing.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/prez_ns.py b/prez/reference_data/prez_ns.py old mode 100644 new mode 100755 diff --git a/prez/reference_data/profiles/dd.ttl b/prez/reference_data/profiles/dd.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl old mode 100644 new mode 100755 index fd34ab32..a81997c7 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -25,21 +25,21 @@ prez:OGCRecordsProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasNodeShape [ a sh:NodeShape ; - sh:targetClass prez:CatalogList , prez:ConceptList ; + sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; altr-ext:hasDefaultProfile prez:OGCListingProfile ] , [ a sh:NodeShape ; sh:targetClass prez:SchemesList ; - altr-ext:hasDefaultProfile prez:OGCSchemesListProfile + altr-ext:hasDefaultProfile skos:ConceptScheme ] , [ a sh:NodeShape ; - sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept ; + sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; altr-ext:hasDefaultProfile prez:OGCItemProfile ] . prez:OGCListingProfile - a prof:Profile , sh:NodeShape ; + a prof:Profile , prez:ListingProfile , sh:NodeShape ; dcterms:title "OGC Listing Profile" ; altr-ext:hasResourceFormat "application/ld+json" , @@ -48,11 +48,11 @@ prez:OGCListingProfile "text/anot+turtle" , "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:constrainsClass prez:CatalogList , prez:SchemesList , prez:ConceptList ; + altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept ; . prez:OGCSchemesListProfile - a prof:Profile , sh:NodeShape ; + a prof:Profile , prez:ListingProfile , sh:NodeShape ; dcterms:title "OGC Concept Scheme Listing Profile" ; altr-ext:hasResourceFormat "application/ld+json" , @@ -61,7 +61,7 @@ prez:OGCSchemesListProfile "text/anot+turtle" , "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:constrainsClass prez:SchemesList ; + altr-ext:constrainsClass skos:ConceptScheme ; sh:property [ sh:minCount 0 ; sh:path ( @@ -76,8 +76,8 @@ prez:OGCSchemesListProfile . prez:OGCItemProfile - a prof:Profile , sh:NodeShape ; - dcterms:title "OGC Item Profile" ; + a prof:Profile , prez:ObjectProfile , sh:NodeShape ; + dcterms:title "OGC Object Profile" ; altr-ext:hasResourceFormat "application/ld+json" , "application/anot+ld+json" , @@ -93,5 +93,5 @@ prez:OGCItemProfile sh:path [ sh:inversePath dcterms:hasPart ] ; ] ; shext:bnode-depth 2 ; - altr-ext:constrainsClass dcat:Catalog , dcat:Resource , skos:ConceptScheme, skos:Concept ; + altr-ext:constrainsClass dcat:Catalog , dcat:Resource , skos:ConceptScheme, skos:Collection , skos:Concept , geo:FeatureCollection , geo:Feature ; . diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/profiles/spaceprez_default_profiles.ttl b/prez/reference_data/profiles/spaceprez_default_profiles.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/search_methods/search_default.ttl b/prez/reference_data/search_methods/search_default.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/search_methods/search_exact.ttl b/prez/reference_data/search_methods/search_exact.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/search_methods/search_readme.md b/prez/reference_data/search_methods/search_readme.md old mode 100644 new mode 100755 diff --git a/prez/reference_data/search_methods/search_skos_preflabel.ttl b/prez/reference_data/search_methods/search_skos_preflabel.ttl old mode 100644 new mode 100755 diff --git a/prez/reference_data/search_methods/search_skos_weighted.ttl b/prez/reference_data/search_methods/search_skos_weighted.ttl old mode 100644 new mode 100755 diff --git a/prez/renderers/csv_renderer.py b/prez/renderers/csv_renderer.py old mode 100644 new mode 100755 diff --git a/prez/renderers/json_renderer.py b/prez/renderers/json_renderer.py old mode 100644 new mode 100755 diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py old mode 100644 new mode 100755 diff --git a/prez/response.py b/prez/response.py old mode 100644 new mode 100755 diff --git a/prez/routers/cql.py b/prez/routers/cql.py old mode 100644 new mode 100755 diff --git a/prez/routers/identifier.py b/prez/routers/identifier.py old mode 100644 new mode 100755 diff --git a/prez/routers/management.py b/prez/routers/management.py old mode 100644 new mode 100755 diff --git a/prez/routers/object.py b/prez/routers/object.py old mode 100644 new mode 100755 index 1cbb8d49..395885a0 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -70,9 +70,9 @@ async def count_route( @router.get("/object", summary="Object", name="https://prez.dev/endpoint/system/object") async def object_route( - request: Request, - repo=Depends(get_repo), - system_repo=Depends(get_system_repo), + request: Request, + repo=Depends(get_repo), + system_repo=Depends(get_system_repo), ): endpoint_uri = URIRef(request.scope.get("route").name) uri = URIRef(request.query_params.get("uri")) diff --git a/prez/routers/ogc_catprez.py b/prez/routers/ogc_router.py old mode 100644 new mode 100755 similarity index 78% rename from prez/routers/ogc_catprez.py rename to prez/routers/ogc_router.py index b4c5152b..4a449933 --- a/prez/routers/ogc_catprez.py +++ b/prez/routers/ogc_router.py @@ -1,34 +1,32 @@ from typing import Optional from fastapi import APIRouter, Request, Depends -from fastapi.responses import PlainTextResponse +from rdflib import Namespace from rdflib import URIRef -from prez.dependencies import get_repo, cql_post_parser_dependency, get_system_repo +from prez.dependencies import get_repo, get_system_repo from prez.services.curie_functions import get_uri_for_curie_id from prez.services.listings import listing_function from prez.services.objects import object_function from prez.sparql.methods import Repo +from prez.reference_data.prez_ns import PREZ router = APIRouter(tags=["ogccatprez"]) +OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) + ogc_endpoints = { - "top-level-catalog-listing": "https://prez.dev/endpoint/ogccatprez/top-level-catalog-listing", - "top-level-catalog-object": "https://prez.dev/endpoint/ogccatprez/top-level-catalog-object", - "lower-level-catalog-listing": "https://prez.dev/endpoint/ogccatprez/lower-level-catalog-listing", - "lower-level-catalog-object": "https://prez.dev/endpoint/ogccatprez/lower-level-catalog-object", - "resource-listing": "https://prez.dev/endpoint/ogccatprez/resource-listing", - "resource-object": "https://prez.dev/endpoint/ogccatprez/resource-object", + "top-level-catalog-listing": OGCE["top-level-catalog-listing"], + "top-level-catalog-object": OGCE["top-level-catalog-object"], + "lower-level-catalog-listing": OGCE["lower-level-catalog-listing"], + "lower-level-catalog-object": OGCE["lower-level-catalog-object"], + "resource-listing": OGCE["resource-listing"], + "resource-object": OGCE["resource-object"], } -@router.get("/c", summary="CatPrez Home") -async def catprez_home(): - return PlainTextResponse("CatPrez Home") - - @router.get( - "/c/catalogs", + "/catalogs", summary="List Top Level Catalogs", name=ogc_endpoints["top-level-catalog-listing"], ) @@ -54,7 +52,7 @@ async def catalog_list( @router.get( - "/c/catalogs/{catalogId}/collections", + "/catalogs/{catalogId}/collections", summary="List Lower Level Catalogs", name=ogc_endpoints["lower-level-catalog-listing"], ) @@ -83,7 +81,7 @@ async def vocab_list( @router.get( - "/c/catalogs/{catalogId}/collections/{collectionId}/items", + "/catalogs/{catalogId}/collections/{collectionId}/items", summary="List Resources", name=ogc_endpoints["resource-listing"], ) @@ -111,7 +109,7 @@ async def concept_list( @router.get( - "/c/catalogs/{catalogId}", + "/catalogs/{catalogId}", summary="Top Level Catalog Object", name=ogc_endpoints["top-level-catalog-object"], ) @@ -129,7 +127,7 @@ async def catalog_object( @router.get( - "/c/catalogs/{catalogId}/collections/{collectionId}", + "/catalogs/{catalogId}/collections/{collectionId}", summary="Lower Level Catalog Object", name=ogc_endpoints["lower-level-catalog-object"], ) @@ -147,7 +145,7 @@ async def catalog_object( @router.get( - "/c/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", + "/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", summary="Resource Object", name=ogc_endpoints["resource-object"], ) diff --git a/prez/routers/ogc_spaceprez.py b/prez/routers/ogc_spaceprez.py.old old mode 100644 new mode 100755 similarity index 83% rename from prez/routers/ogc_spaceprez.py rename to prez/routers/ogc_spaceprez.py.old index 54bfcd78..929b5bda --- a/prez/routers/ogc_spaceprez.py +++ b/prez/routers/ogc_spaceprez.py.old @@ -18,39 +18,6 @@ @router.get( "/s", summary="SpacePrez Home", - openapi_extra={ - "parameters": [ - { - "name": "page", - "in": "query", - "required": False, - "schema": { - "anyOf": [{"type": "integer"}, {"type": "null"}], - "default": 1, - "title": "Page", - }, - }, - { - "name": "per_page", - "in": "query", - "required": False, - "schema": { - "anyOf": [{"type": "integer"}, {"type": "null"}], - "default": 20, - "title": "Per Page", - }, - }, - { - "name": "search_term", - "in": "path", - "required": False, - "schema": { - "anyOf": [{"type": "string"}, {"type": "null"}], - "title": "Search Term", - }, - }, - ] - }, ) async def spaceprez_home(): return PlainTextResponse("SpacePrez Home") diff --git a/prez/routers/ogc_vocprez.py b/prez/routers/ogc_vocprez.py.old old mode 100644 new mode 100755 similarity index 100% rename from prez/routers/ogc_vocprez.py rename to prez/routers/ogc_vocprez.py.old diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py old mode 100644 new mode 100755 diff --git a/prez/routers/search.py b/prez/routers/search.py old mode 100644 new mode 100755 index b2590844..6b094fee --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -5,7 +5,7 @@ from prez.config import settings from prez.dependencies import get_repo -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo +from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph from prez.services.link_generation import _add_prez_links @@ -37,8 +37,9 @@ async def search( graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) prof_and_mt_info = ProfilesMediatypesInfo( - request=request, classes=frozenset([PREZ.SearchResult]) + request=request, classes=frozenset([PREZ.SearchResult]), system_repo=system_repo ) + await populate_profile_and_mediatype(prof_and_mt_info, system_repo) req_mt = prof_and_mt_info.req_mediatypes if req_mt: diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py old mode 100644 new mode 100755 index 82ad842b..eb788584 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -8,8 +8,8 @@ from starlette.requests import Request from starlette.responses import StreamingResponse -from prez.dependencies import get_repo -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo +from prez.dependencies import get_repo, get_system_repo +from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.renderers.renderer import return_annotated_rdf from prez.sparql.methods import Repo @@ -26,6 +26,7 @@ async def sparql_endpoint( query: str, request: Request, repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): request_mediatype = request.headers.get("accept").split(",")[ 0 @@ -35,8 +36,11 @@ async def sparql_endpoint( # Intercept "+anot" mediatypes if "anot+" in request_mediatype: prof_and_mt_info = ProfilesMediatypesInfo( - request=request, classes=frozenset([PREZ.SPARQLQuery]) + request=request, + classes=frozenset([PREZ.SPARQLQuery]), + system_repo=system_repo, ) + await populate_profile_and_mediatype(prof_and_mt_info, system_repo) non_anot_mediatype = request_mediatype.replace("anot+", "") request._headers = Headers({**request.headers, "accept": non_anot_mediatype}) response = await repo.sparql(request) diff --git a/prez/routers/vocprez.py.unused b/prez/routers/vocprez.py.unused old mode 100644 new mode 100755 diff --git a/prez/services/app_service.py b/prez/services/app_service.py old mode 100644 new mode 100755 index 08da45aa..249cd5dc --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -59,27 +59,6 @@ async def count_objects(repo): async def populate_api_info(): - for prez in settings.prez_flavours: - bnode = BNode() - prez_system_graph.add( - (URIRef(settings.system_uri), PREZ.enabledPrezFlavour, bnode) - ) - prez_system_graph.add((bnode, RDF.type, PREZ[prez])) - # add links to prez subsystems - prez_system_graph.add((bnode, PREZ.link, Literal(f"/{prez[0].lower()}"))) - - # add links to search methods - sys_prof = profiles_graph_cache.value(None, ALTREXT.constrainsClass, PREZ[prez]) - if sys_prof: - search_methods = [ - sm - for sm in profiles_graph_cache.objects( - sys_prof, PREZ.supportedSearchMethod - ) - ] - for method in search_methods: - prez_system_graph.add((bnode, PREZ.availableSearchMethod, method)) - prez_system_graph.add( (URIRef(settings.system_uri), PREZ.version, Literal(settings.prez_version)) ) diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py old mode 100644 new mode 100755 diff --git a/prez/services/cql_search.py b/prez/services/cql_search.py old mode 100644 new mode 100755 diff --git a/prez/services/curie_functions.py b/prez/services/curie_functions.py old mode 100644 new mode 100755 diff --git a/prez/services/exception_catchers.py b/prez/services/exception_catchers.py old mode 100644 new mode 100755 diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py old mode 100644 new mode 100755 index 0c17464a..ee2c7f21 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -9,6 +9,7 @@ from prez.models.model_exceptions import NoProfilesException from prez.reference_data.prez_ns import PREZ from prez.services.curie_functions import get_curie_id_for_uri +from prez.sparql.methods import Repo from prez.sparql.objects_listings import select_profile_mediatype log = logging.getLogger(__name__) @@ -20,18 +21,8 @@ async def create_profiles_graph(repo) -> Graph: ): # pytest imports app.py multiple times, so this is needed. Not sure why cache is # not cleared between calls return - flavours = ["CatPrez", "SpacePrez", "VocPrez"] for f in (Path(__file__).parent.parent / "reference_data/profiles").glob("*.ttl"): - # Check if file starts with any of the flavour prefixes - matching_flavour = next( - (flavour for flavour in flavours if f.name.startswith(flavour.lower())), - None, - ) - # If the file doesn't start with any specific flavour or the matching flavour is in settings.prez_flavours, parse it. - if not matching_flavour or ( - matching_flavour and matching_flavour in settings.prez_flavours - ): - profiles_graph_cache.parse(f) + profiles_graph_cache.parse(f) log.info("Prez default profiles loaded") remote_profiles_query = """ PREFIX dcat: @@ -76,25 +67,28 @@ async def create_profiles_graph(repo) -> Graph: # @lru_cache(maxsize=128) -def get_profiles_and_mediatypes( +async def get_profiles_and_mediatypes( classes: FrozenSet[URIRef], + system_repo: Repo, requested_profile: URIRef = None, requested_profile_token: str = None, requested_mediatype: URIRef = None, + listing: bool = False ): query = select_profile_mediatype( - classes, requested_profile, requested_profile_token, requested_mediatype + classes, requested_profile, requested_profile_token, requested_mediatype, listing ) log.debug(f"ConnegP query: {query}") - response = profiles_graph_cache.query(query) + # response = profiles_graph_cache.query(query) + response = await system_repo.send_queries([], [(None, query)]) # log.debug(f"ConnegP response:{results_pretty_printer(response)}") - if len(response.bindings[0]) == 0: + if response[1][0][1] == [{}]: raise NoProfilesException(classes) - top_result = response.bindings[0] + top_result = response[1][0][1][0] profile, mediatype, selected_class = ( - top_result["profile"], - top_result["format"], - top_result["class"], + URIRef(top_result["profile"]["value"]), + Literal(top_result["format"]["value"]), + URIRef(top_result["class"]["value"]), ) profile_headers, avail_profile_uris = generate_profiles_headers( selected_class, response, profile, mediatype @@ -155,8 +149,8 @@ def generate_profiles_headers(selected_class, response, profile, mediatype): "Content-Type": mediatype, } avail_profiles = set( - (get_curie_id_for_uri(i["profile"]), i["profile"], i["title"]) - for i in response.bindings + (get_curie_id_for_uri(i["profile"]["value"]), i["profile"]["value"], i["title"]["value"]) + for i in response[1][0][1] ) avail_profiles_headers = ", ".join( [ @@ -166,11 +160,11 @@ def generate_profiles_headers(selected_class, response, profile, mediatype): ) avail_mediatypes_headers = ", ".join( [ - f"""<{selected_class}?_profile={get_curie_id_for_uri(i["profile"])}&_mediatype={i["format"]}>; \ -rel="{"self" if i["profile"] == profile and i["format"] == mediatype else "alternate"}"; \ -type="{i["format"]}"; profile="{i["profile"]}"\ + f"""<{selected_class}?_profile={get_curie_id_for_uri(i["profile"]["value"])}&_mediatype={i["format"]["value"]}>; \ +rel="{"self" if i["profile"]["value"] == profile and i["format"]["value"] == mediatype else "alternate"}"; \ +type="{i["format"]["value"]}"; profile="{i["profile"]["value"]}"\ """ - for i in response.bindings + for i in response[1][0][1] ] ) headers["Link"] = ", ".join( diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py old mode 100644 new mode 100755 diff --git a/prez/services/listings.py b/prez/services/listings.py old mode 100644 new mode 100755 index c023d947..ac8fd3fe --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -8,7 +8,7 @@ from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.config import settings -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo +from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.reference_data.prez_ns import ONT, PREZ from prez.renderers.renderer import return_from_graph from prez.services.link_generation import _add_prez_links @@ -37,10 +37,10 @@ async def listing_function( ): queries = [] # class is from endpoint definition. - listing_class = endpoints_graph_cache.value(endpoint_uri, ONT.deliversClasses) target_class = endpoints_graph_cache.value(endpoint_uri, SH.targetClass) - prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=[listing_class]) + prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=[target_class], system_repo=system_repo, listing=True) + await populate_profile_and_mediatype(prof_and_mt_info, system_repo) selected_class, selected_profile = ( prof_and_mt_info.selected_class, prof_and_mt_info.profile, diff --git a/prez/services/model_methods.py b/prez/services/model_methods.py old mode 100644 new mode 100755 diff --git a/prez/services/objects.py b/prez/services/objects.py old mode 100644 new mode 100755 index 3e21b5a2..fbb16034 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -6,7 +6,7 @@ from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.models.object_item import ObjectItem -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo +from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.reference_data.prez_ns import PREZ, EP from prez.renderers.renderer import return_from_graph from prez.services.link_generation import ( @@ -30,7 +30,8 @@ async def object_function( ): klasses = await get_classes(uri=uri, repo=repo, endpoint=endpoint_uri) # ConnegP - prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=klasses) + prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=klasses, system_repo=system_repo) + await populate_profile_and_mediatype(prof_and_mt_info, system_repo) # if we're on the object endpoint and a profile hasn't been requested, use the open profile # if (endpoint_uri == EP.object) and not ( # prof_and_mt_info.req_profiles or prof_and_mt_info.req_profiles_token diff --git a/prez/services/prez_logging.py b/prez/services/prez_logging.py old mode 100644 new mode 100755 diff --git a/prez/sparql/methods.py b/prez/sparql/methods.py old mode 100644 new mode 100755 diff --git a/prez/sparql/objects_listings.py b/prez/sparql/objects_listings.py old mode 100644 new mode 100755 index 41bd263f..56b7353f --- a/prez/sparql/objects_listings.py +++ b/prez/sparql/objects_listings.py @@ -327,6 +327,7 @@ def select_profile_mediatype( requested_profile_uri: URIRef = None, requested_profile_token: str = None, requested_mediatypes: List[Tuple] = None, + listing: bool = False, ): """ Returns a SPARQL SELECT query which will determine the profile and mediatype to return based on user requests, @@ -364,6 +365,10 @@ def select_profile_mediatype( 4. If neither a profile nor mediatype is requested, the default profile for the most specific class is returned, with the default mediatype for that profile. """ + if listing: + profile_class = PREZ.ListingProfile + else: + profile_class = PREZ.ObjectProfile if requested_profile_token: requested_profile_uri = get_uri_for_curie_id(requested_profile_token) query = dedent( @@ -390,6 +395,7 @@ def select_profile_mediatype( ?profile altr-ext:constrainsClass ?class ; altr-ext:hasResourceFormat ?format ; dcterms:title ?title .\ + {f'?profile a {profile_class.n3()} .'} {f'BIND(?profile=<{requested_profile_uri}> as ?req_profile)' if requested_profile_uri else ''} BIND(EXISTS {{ ?shape sh:targetClass ?class ; altr-ext:hasDefaultProfile ?profile }} AS ?def_profile) diff --git a/prez/sparql/resource.py b/prez/sparql/resource.py old mode 100644 new mode 100755 diff --git a/prez/sparql/search_query.py b/prez/sparql/search_query.py old mode 100644 new mode 100755 diff --git a/prez/url.py b/prez/url.py old mode 100644 new mode 100755 diff --git a/pyproject.toml b/pyproject.toml old mode 100644 new mode 100755 diff --git a/temp/cql2sparql.py b/temp/cql2sparql.py old mode 100644 new mode 100755 diff --git a/temp/cql_sparql_reference.py b/temp/cql_sparql_reference.py old mode 100644 new mode 100755 diff --git a/temp/default_cql_context.json b/temp/default_cql_context.json old mode 100644 new mode 100755 diff --git a/temp/grammar.py b/temp/grammar.py old mode 100644 new mode 100755 diff --git a/temp/shacl2sparql.py b/temp/shacl2sparql.py old mode 100644 new mode 100755 index 0b30cfee..de39fadf --- a/temp/shacl2sparql.py +++ b/temp/shacl2sparql.py @@ -181,7 +181,6 @@ def parse_endpoint_definition(self): if target_classes: self._add_target_class(target_classes[0]) - # don't use the target class if there's a sh:target / sh:select #TODO confirm why this caused issues - duplicate # pattern matches in the subquery? # elif target_classes: diff --git a/temp/test_search.py b/temp/test_search.py old mode 100644 new mode 100755 diff --git a/test_data/catprez.ttl b/test_data/catprez.ttl old mode 100644 new mode 100755 diff --git a/test_data/object_catalog_bblocks_catalog.ttl b/test_data/object_catalog_bblocks_catalog.ttl deleted file mode 100644 index 86b4b547..00000000 --- a/test_data/object_catalog_bblocks_catalog.ttl +++ /dev/null @@ -1,12 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix vocab: . -@prefix catalog: . -@prefix prez: . - -catalog:bblocks - a dcat:Catalog ; - dcterms:identifier "bblocks" ; - dcterms:title "A catalog of Building Block Vocabularies" ; - dcterms:hasPart vocab:api , vocab:datatype , vocab:parameter , vocab:schema ; - . diff --git a/test_data/object_vocab_api_bblocks.ttl b/test_data/object_vocab_api_bblocks.ttl deleted file mode 100644 index 15cb9aa3..00000000 --- a/test_data/object_vocab_api_bblocks.ttl +++ /dev/null @@ -1,38 +0,0 @@ -@prefix bblocks: . -@prefix dct: . -@prefix prov: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . -@prefix vocab: . -@prefix prez: . - -vocab:api - a skos:ConceptScheme ; - skos:prefLabel "API Building Blocks" ; - skos:hasTopConcept bblocks:ogc.unstable.sosa ; - dct:identifier "api" ; - . - -bblocks:ogc.unstable.sosa a skos:Concept, - bblocks:Api ; - rdfs:label "Sensor, Observation, Sample, and Actuator (SOSA)" ; - dct:abstract "The SOSA (Sensor, Observation, Sample, and Actuator) ontology is a realisation of the Observations, Measurements and Sampling (OMS) Conceptual model" ; - dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/index.json" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-04-13"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:api ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status ; - . diff --git a/test_data/object_vocab_datatype_bblocks.ttl b/test_data/object_vocab_datatype_bblocks.ttl deleted file mode 100644 index 9651ba4c..00000000 --- a/test_data/object_vocab_datatype_bblocks.ttl +++ /dev/null @@ -1,38 +0,0 @@ -@prefix bblocks: . -@prefix dct: . -@prefix prov: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . -@prefix vocab: . - -vocab:datatype - a skos:ConceptScheme ; - skos:prefLabel "Datatype Building Blocks" ; - skos:hasTopConcept bblocks:ogc.ogc-utils.iri-or-curie ; - dct:identifier "datatype" ; - . - -bblocks:ogc.ogc-utils.iri-or-curie a skos:Concept, - bblocks:Datatype ; - rdfs:label "IRI or CURIE" ; - dct:abstract "This Building Block defines a data type for a full IRI/URI or a CURIE (with or without a prefix)" ; - dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/ogc-utils/iri-or-curie/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/ogc-utils/iri-or-curie/" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/ogc-utils/iri-or-curie/index.json" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-03-09"^^xsd:date ; - dct:source , - , - ; - skos:inScheme , vocab:datatype ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . diff --git a/test_data/object_vocab_parameter_bblocks.ttl b/test_data/object_vocab_parameter_bblocks.ttl deleted file mode 100644 index 23c920a7..00000000 --- a/test_data/object_vocab_parameter_bblocks.ttl +++ /dev/null @@ -1,61 +0,0 @@ -@prefix bblocks: . -@prefix dct: . -@prefix prov: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . -@prefix vocab: . - -vocab:parameter - a skos:ConceptScheme ; - skos:prefLabel "Parameter Building Blocks" ; - skos:hasTopConcept bblocks:ogc.geo.common.parameters.bbox , bblocks:ogc.geo.common.parameters.bbox-crs ; - dct:identifier "parameter" - . - -bblocks:ogc.geo.common.parameters.bbox a skos:Concept, - bblocks:Parameter ; - rdfs:label "bbox" ; - dct:abstract "The bbox query parameter provides a simple mechanism for filtering resources based on their location. It selects all resources that intersect a rectangle (map view) or box (including height information)." ; - dct:created "2022-05-24T13:51:38+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/parameters/bbox/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/parameters/bbox/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/parameters/bbox/index.md" ] ; - dct:hasVersion "1.0" ; - dct:modified "2022-05-24"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:parameter ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.common.parameters.bbox-crs a skos:Concept, - bblocks:Parameter ; - rdfs:label "bbox-crs" ; - dct:abstract "The bbox-crs query parameter can be used to assert the coordinate reference system that is used for the coordinate values of the bbox parameter." ; - dct:created "2022-07-05T01:01:01+02:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/parameters/bbox-crs/index.json" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/parameters/bbox-crs/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/parameters/bbox-crs/" ] ; - dct:hasVersion "1.0" ; - dct:modified "2022-07-05"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:parameter ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - - - - diff --git a/test_data/object_vocab_schema_bblocks.ttl b/test_data/object_vocab_schema_bblocks.ttl deleted file mode 100644 index f71f849a..00000000 --- a/test_data/object_vocab_schema_bblocks.ttl +++ /dev/null @@ -1,414 +0,0 @@ -@prefix bblocks: . -@prefix dct: . -@prefix prov: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . -@prefix vocab: . - -vocab:schema - a skos:ConceptScheme ; - dct:identifier "schema" ; - skos:prefLabel "Schema Building Blocks" ; - skos:hasTopConcept bblocks:ogc.unstable.sosa.examples.vectorObservation , - bblocks:ogc.unstable.sosa.examples.vectorObservationFeature , - bblocks:ogc.unstable.sosa.features.observation , - bblocks:ogc.unstable.sosa.features.observationCollection , - bblocks:ogc.unstable.sosa.properties.observation , - bblocks:ogc.unstable.sosa.properties.observationCollection , - bblocks:ogc.ogc-utils.json-link , - bblocks:ogc.geo.features.feature , - bblocks:ogc.geo.features.featureCollection , - bblocks:ogc.geo.geopose.advanced , - bblocks:ogc.geo.geopose.basic.quaternion , - bblocks:ogc.geo.geopose.basic.ypr , - bblocks:ogc.geo.json-fg.feature , - bblocks:ogc.geo.json-fg.feature-lenient , - bblocks:ogc.geo.json-fg.featureCollection , - bblocks:ogc.geo.json-fg.featureCollection-lenient , - bblocks:ogc.geo.common.data_types.bounding_box , - bblocks:ogc.geo.common.data_types.geojson - . - - -bblocks:ogc.unstable.sosa.examples.vectorObservation a skos:Concept, - bblocks:Schema ; - rdfs:label "Example SOSA Vector Observation" ; - dct:abstract "This building block defines an example SOSA Vector Observation" ; - dct:created "2023-05-19T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/examples/vectorObservation/" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/examples/vectorObservation/index.json" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/examples/vectorObservation/index.md" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-05-19"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.unstable.sosa.examples.vectorObservationFeature a skos:Concept, - bblocks:Schema ; - rdfs:label "Example SOSA Vector Observation Feature" ; - dct:abstract "This building block defines an example SOSA Observation Feature for a Vector Observation" ; - dct:created "2023-05-19T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/examples/vectorObservationFeature/index.md" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/examples/vectorObservationFeature/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/examples/vectorObservationFeature/" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-05-19"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.unstable.sosa.features.observation a skos:Concept, - bblocks:Schema ; - rdfs:label "SOSA Observation Feature" ; - dct:abstract "This building blocks defines a GeoJSON feature containing a SOSA Observation" ; - dct:created "2023-05-18T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/features/observation/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/features/observation/index.md" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/features/observation/index.json" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-05-18"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.unstable.sosa.features.observationCollection a skos:Concept, - bblocks:Schema ; - rdfs:label "SOSA ObservationCollection Feature" ; - dct:abstract "This building blocks defines an ObservationCollection Feature according to the SOSA/SSN v1.1 specification." ; - dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/features/observationCollection/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/features/observationCollection/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/features/observationCollection/index.md" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-04-28"^^xsd:date ; - dct:source , - ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.unstable.sosa.properties.observation a skos:Concept, - bblocks:Schema ; - rdfs:label "SOSA Observation" ; - dct:abstract "This building block defines the set of properties for an observation according to the SOSA/SSN specification. These properties may be directly included into a root element of a JSON object or used in the properties container of a GeoJSON feature." ; - dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/properties/observation/index.md" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/properties/observation/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/properties/observation/" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-04-13"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.unstable.sosa.properties.observationCollection a skos:Concept, - bblocks:Schema ; - rdfs:label "SOSA ObservationCollection" ; - dct:abstract "This building blocks defines an ObservationCollection according to the SOSA/SSN v1.1 specification." ; - dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/properties/observationCollection/index.json" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/properties/observationCollection/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/properties/observationCollection/" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-04-28"^^xsd:date ; - dct:source , - ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.ogc-utils.json-link a skos:Concept, - bblocks:Schema ; - rdfs:label "JSON Link" ; - dct:abstract "Web linking is used to express relationships between resources. The JSON object representation of links described here is used consistently in OGC API’s." ; - dct:created "2022-05-18T15:21:59+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/ogc-utils/json-link/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/ogc-utils/json-link/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/ogc-utils/json-link/index.md" ] ; - dct:hasVersion "0.1" ; - dct:modified "2022-05-18"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.features.feature a skos:Concept, - bblocks:Schema ; - rdfs:label "Feature" ; - dct:abstract "A feature. Every feature is a sub-resource of an OGC Collection." ; - dct:created "2023-05-24T14:56:51+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/features/feature/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/features/feature/index.md" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/features/feature/index.json" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-05-15"^^xsd:date ; - dct:source , - ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.features.featureCollection a skos:Concept, - bblocks:Schema ; - rdfs:label "Feature Collection" ; - dct:abstract "A collection of features." ; - dct:created "2023-06-26T14:56:51+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/features/featureCollection/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/features/featureCollection/" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/features/featureCollection/index.json" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-06-26"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.geopose.advanced a skos:Concept, - bblocks:Schema ; - rdfs:label "GeoPose Advanced" ; - dct:abstract "Advanced GeoPose allowing flexible outer frame specification, quaternion orientation, and valid time." ; - dct:created "2023-07-13T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/advanced/index.json" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/advanced/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/advanced/" ] ; - dct:hasVersion "0.1" ; - dct:modified "2023-07-13"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.geopose.basic.quaternion a skos:Concept, - bblocks:Schema ; - rdfs:label "GeoPose Basic-Quaternion" ; - dct:abstract "Basic GeoPose using quaternion to specify orientation" ; - dct:created "2023-07-13T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/basic/quaternion/index.md" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/basic/quaternion/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/basic/quaternion/" ] ; - dct:hasVersion "0.1" ; - dct:modified "2023-07-13"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.geopose.basic.ypr a skos:Concept, - bblocks:Schema ; - rdfs:label "GeoPose Basic-YPR" ; - dct:abstract "Basic GeoPose using yaw, pitch, and roll to specify orientation" ; - dct:created "2023-03-15T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/basic/ypr/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/basic/ypr/index.md" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/basic/ypr/index.json" ] ; - dct:hasVersion "0.1" ; - dct:modified "2023-07-13"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.json-fg.feature a skos:Concept, - bblocks:Schema ; - rdfs:label "JSON-FG Feature" ; - dct:abstract "A OGC Features and Geometries JSON (JSON-FG) Feature, extending GeoJSON to support a limited set of additional capabilities that are out-of-scope for GeoJSON, but that are important for a variety of use cases involving feature data." ; - dct:created "2023-05-31T14:56:51+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/feature/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/feature/" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/feature/index.json" ] ; - dct:hasVersion "0.1" ; - dct:modified "2023-05-31"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.json-fg.feature-lenient a skos:Concept, - bblocks:Schema ; - rdfs:label "JSON-FG Feature - Lenient" ; - dct:abstract "A OGC Features and Geometries JSON (JSON-FG) Feature that does not require the \"time\" and \"place\" properties." ; - dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/feature-lenient/index.md" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/feature-lenient/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/feature-lenient/" ] ; - dct:hasVersion "0.1" ; - dct:modified "2023-08-08"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.json-fg.featureCollection a skos:Concept, - bblocks:Schema ; - rdfs:label "JSON-FG Feature Collection" ; - dct:abstract "A collection of OGC Features and Geometries JSON (JSON-FG) Features, extending GeoJSON to support a limited set of additional capabilities that are out-of-scope for GeoJSON, but that are important for a variety of use cases involving feature data." ; - dct:created "2023-05-31T14:56:51+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/featureCollection/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/featureCollection/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/featureCollection/index.md" ] ; - dct:hasVersion "0.1" ; - dct:modified "2023-05-31"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.json-fg.featureCollection-lenient a skos:Concept, - bblocks:Schema ; - rdfs:label "JSON-FG Feature Collection - Lenient" ; - dct:abstract "A collection of lenient OGC Features and Geometries JSON (JSON-FG) Features, that do not require the \"time\" and \"place\" properties" ; - dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/featureCollection-lenient/index.json" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/featureCollection-lenient/index.md" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/featureCollection-lenient/" ] ; - dct:hasVersion "0.1" ; - dct:modified "2023-08-08"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.common.data_types.bounding_box a skos:Concept, - bblocks:Schema ; - rdfs:label "Bounding Box" ; - dct:abstract "The bounding box JSON object describes a simple spatial extent of a resource. For OGC API’s this could be a feature, a feature collection or a dataset, but it can be used in any JSON resource that wants to communicate its rough location. The extent is simple in that the bounding box does not describe the precise location and shape of the resource, but provides an axis-aligned approximation of the spatial extent that can be used as an initial test whether two resources are potentially intersecting each other." ; - dct:created "2022-05-24T13:51:38+00:00"^^xsd:dateTime ; - dct:description [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/data_types/bounding_box/index.json" ], - [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/data_types/bounding_box/" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/data_types/bounding_box/index.md" ] ; - dct:hasVersion "1.0.1" ; - dct:modified "2023-03-09"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . - -bblocks:ogc.geo.common.data_types.geojson a skos:Concept, - bblocks:Schema ; - rdfs:label "GeoJSON" ; - dct:abstract "A GeoJSON object" ; - dct:created "2023-05-24T14:56:51+00:00"^^xsd:dateTime ; - dct:description [ dct:format "text/html" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/data_types/geojson/" ], - [ dct:format "application/json" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/data_types/geojson/index.json" ], - [ dct:format "text/markdown" ; - rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/data_types/geojson/index.md" ] ; - dct:hasVersion "1.0" ; - dct:modified "2023-05-15"^^xsd:date ; - dct:source ; - skos:inScheme , vocab:schema ; - bblocks:hasJsonLdContext ; - bblocks:hasSchema , - ; - bblocks:scope ; - bblocks:status . diff --git a/test_data/sandgate.ttl b/test_data/sandgate.ttl deleted file mode 100644 index 8def41e2..00000000 --- a/test_data/sandgate.ttl +++ /dev/null @@ -1,296 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX geofab: -PREFIX rdfs: -PREFIX sand: -PREFIX xsd: - - - a dcat:Dataset ; - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "sandgate"^^xsd:token ; - dcterms:title "Sandgate example dataset"@en ; - rdfs:label "Sandgate example dataset"@en ; - rdfs:member - sand:catchments , - sand:facilities , - sand:floods , - sand:roads ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2234024,152.9075 -27.2234024,152.9075 -27.42))"^^geo:wktLiteral - ] ; -. - -sand:catchments - a geo:FeatureCollection ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "catchments"^^xsd:token ; - dcterms:title "Geofabric Contracted Catchments"@en ; - rdfs:label "Geofabric Contracted Catchments"@en ; - rdfs:member - sand:cc12109444 , - sand:cc12109445 ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral - ] ; -. - -sand:facilities - a geo:FeatureCollection ; - dcterms:description "Sandgate area demo Facilities"@en ; - dcterms:identifier "facilities"^^xsd:token ; - dcterms:title "Sandgate are demo Facilities"@en ; - rdfs:label "Sandgate are demo Facilities"@en ; - rdfs:member - sand:bhc , - sand:bhca , - sand:bps , - sand:cpc , - sand:jcabi , - sand:rps , - sand:sac , - sand:sps , - sand:src , - sand:srca ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((153.0144819 -27.3506599,153.1143102 -27.3506599,153.1143102 -27.2234024,153.0144819 -27.2234024,153.0144819 -27.3506599))"^^geo:wktLiteral - ] ; -. - -sand:floods - a geo:FeatureCollection ; - dcterms:description "Sandgate flooded areas"@en ; - dcterms:identifier "floods"^^xsd:token ; - dcterms:title "Sandgate flooded areas"@en ; - rdfs:label "Sandgate flooded areas"@en ; - rdfs:member - sand:f001 , - sand:f023 , - sand:f332 , - sand:f632 ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((153.06307 -27.3151243,153.069877 -27.3151243,153.069877 -27.2859541,153.06307 -27.2859541,153.06307 -27.3151243))"^^geo:wktLiteral - ] ; -. - -sand:roads - a geo:FeatureCollection ; - dcterms:description "Sandgate main roads"@en ; - dcterms:identifier "roads"^^xsd:token ; - dcterms:title "Sandgate main roads"@en ; - rdfs:label "Sandgate main roads"@en ; - rdfs:member - sand:bt , - sand:fp ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((153.0617934 -27.3203138,153.0747569 -27.3203138,153.0747569 -27.2920918,153.0617934 -27.2920918,153.0617934 -27.3203138))"^^geo:wktLiteral - ] ; -. - -sand:bhc - a geo:Feature ; - rdfs:label "Brighton Health Campus Location" ; - dcterms:identifier "bhc"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0638169, -27.2897951]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0638169 -27.2897951)"^^geo:wktLiteral - ] ; -. - -sand:bhca - a geo:Feature ; - rdfs:label "Brighton Health Campus Area" ; - dcterms:identifier "bhca"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.063644,-27.2894036],[153.0635207,-27.2896229],[153.0631612,-27.2896182],[153.0631291,-27.289909],[153.0631559,-27.290338],[153.0644487,-27.2904858],[153.0645614,-27.2899185],[153.0648349,-27.2895324],[153.0648135,-27.2889174],[153.0637674,-27.2887362],[153.063644,-27.2894036]]]}"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.063644 -27.2894036, 153.0635207 -27.2896229, 153.0631612 -27.2896182, 153.0631291 -27.289909, 153.0631559 -27.290338, 153.0644487 -27.2904858, 153.0645614 -27.2899185, 153.0648349 -27.2895324, 153.0648135 -27.2889174, 153.0637674 -27.2887362, 153.063644 -27.2894036))"^^geo:wktLiteral - ] ; -. - -sand:bps - a geo:Feature ; - rdfs:label "Boondal Police Station" ; - dcterms:identifier "bps"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0536022, -27.3497934]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0536022 -27.3497934)"^^geo:wktLiteral - ] ; -. - -sand:bt - a geo:Feature ; - rdfs:label "Brighton Terrace" ; - dcterms:identifier "bt"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.06513, -27.3143431 ], [ 153.065881100000013, -27.3140285 ], [ 153.0653983, -27.3130466 ], [ 153.0652052, -27.3122745 ], [ 153.0651193, -27.3116453 ], [ 153.064550700000012, -27.3103202 ], [ 153.0641108, -27.3092526 ], [ 153.0637889, -27.3074031 ], [ 153.0631774, -27.3057253 ], [ 153.0628448, -27.3044573 ], [ 153.0627053, -27.3036565 ], [ 153.061847, -27.2988706 ], [ 153.0617934, -27.2952 ], [ 153.062168899999989, -27.2933312 ], [ 153.0622333, -27.2920918 ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "LINESTRING (153.06513 -27.3143431,153.0658811 -27.3140285,153.0653983 -27.3130466,153.0652052 -27.3122745,153.0651193 -27.3116453,153.0645507 -27.3103202,153.0641108 -27.3092526,153.0637889 -27.3074031,153.0631774 -27.3057253,153.0628448 -27.3044573,153.0627053 -27.3036565,153.061847 -27.2988706,153.0617934 -27.2952,153.0621689 -27.2933312,153.0622333 -27.2920918)"^^geo:wktLiteral - ] ; -. - -sand:cc12109444 - a - geo:Feature , - geofab:ContractedCatchment ; - rdfs:label "Contracted Catchment 12109444" ; - dcterms:identifier "cc12109444"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}'^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral - ] ; -. - -sand:cc12109445 - a - geo:Feature , - geofab:ContractedCatchment ; - rdfs:label "Contracted Catchment 12109445" ; - dcterms:identifier "cc12109445"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.0025, -27.2775], [153.0025, -27.28], [153.005, -27.28], [153.005, -27.285], [153.0075, -27.285], [153.015, -27.285], [153.015, -27.29], [153.0175, -27.29], [153.0175, -27.2925], [153.0175, -27.3025], [153.02, -27.3025], [153.02, -27.305], [153.0225, -27.305], [153.0225, -27.31], [153.0175, -27.31], [153.0175, -27.3125], [153.015, -27.3125], [153.015, -27.315], [153.015, -27.3175], [153.0175, -27.3175], [153.0175, -27.32], [153.02, -27.32], [153.02, -27.3225], [153.0275, -27.3225], [153.0275, -27.325], [153.025, -27.325], [153.025, -27.3275], [153.0225, -27.3275], [153.0225, -27.33], [153.02, -27.33], [153.02, -27.335], [153.0175, -27.335], [153.0175, -27.3475], [153.01, -27.3475], [153.01, -27.35], [153.005, -27.35], [153, -27.35], [153, -27.3525], [152.995, -27.3525], [152.995, -27.355], [152.9925, -27.355], [152.9925, -27.3625], [152.99, -27.3625], [152.99, -27.3675], [152.9875, -27.3675], [152.9875, -27.37], [152.985, -27.37], [152.985, -27.3725], [152.9825, -27.3725], [152.9825, -27.375], [152.98, -27.375], [152.98, -27.3775], [152.9675, -27.3775], [152.9675, -27.38], [152.965, -27.38], [152.9525, -27.38], [152.9525, -27.3825], [152.9475, -27.3825], [152.9475, -27.38], [152.94, -27.38], [152.94, -27.3825], [152.9375, -27.3825], [152.9375, -27.38], [152.9325, -27.38], [152.9325, -27.3825], [152.93, -27.3825], [152.925, -27.3825], [152.925, -27.385], [152.92, -27.385], [152.92, -27.3825], [152.9075, -27.3825], [152.9075, -27.38], [152.9075, -27.375], [152.9075, -27.3725], [152.915, -27.3725], [152.915, -27.37], [152.92, -27.37], [152.92, -27.3675], [152.9225, -27.3675], [152.9225, -27.365], [152.925, -27.365], [152.925, -27.3625], [152.9275, -27.3625], [152.9275, -27.36], [152.9275, -27.3575], [152.925, -27.3575], [152.925, -27.355], [152.9225, -27.355], [152.9225, -27.3525], [152.92, -27.3525], [152.92, -27.35], [152.9175, -27.35], [152.9175, -27.345], [152.92, -27.345], [152.92, -27.3325], [152.9175, -27.3325], [152.9175, -27.33], [152.915, -27.33], [152.915, -27.3275], [152.9125, -27.3275], [152.9125, -27.325], [152.9125, -27.3225], [152.9225, -27.3225], [152.9225, -27.32], [152.925, -27.32], [152.925, -27.3175], [152.9275, -27.3175], [152.9275, -27.315], [152.93, -27.315], [152.93, -27.3125], [152.9325, -27.3125], [152.9325, -27.31], [152.935, -27.31], [152.935, -27.305], [152.94, -27.305], [152.94, -27.3025], [152.9425, -27.3025], [152.9425, -27.3], [152.945, -27.3], [152.945, -27.2975], [152.95, -27.2975], [152.95, -27.295], [152.955, -27.295], [152.9575, -27.295], [152.9575, -27.2925], [152.96, -27.2925], [152.96, -27.29], [152.9625, -27.29], [152.9625, -27.2875], [152.9675, -27.2875], [152.9675, -27.285], [152.9725, -27.285], [152.9725, -27.2825], [152.9775, -27.2825], [152.9775, -27.28], [152.98, -27.28], [152.9925, -27.28], [152.9925, -27.2775], [152.9975, -27.2775], [153.0025, -27.2775]]]}'^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0025 -27.2775, 153.0025 -27.28, 153.005 -27.28, 153.005 -27.285, 153.0075 -27.285, 153.015 -27.285, 153.015 -27.29, 153.0175 -27.29, 153.0175 -27.2925, 153.0175 -27.3025, 153.02 -27.3025, 153.02 -27.305, 153.0225 -27.305, 153.0225 -27.31, 153.0175 -27.31, 153.0175 -27.3125, 153.015 -27.3125, 153.015 -27.315, 153.015 -27.3175, 153.0175 -27.3175, 153.0175 -27.32, 153.02 -27.32, 153.02 -27.3225, 153.0275 -27.3225, 153.0275 -27.325, 153.025 -27.325, 153.025 -27.3275, 153.0225 -27.3275, 153.0225 -27.33, 153.02 -27.33, 153.02 -27.335, 153.0175 -27.335, 153.0175 -27.3475, 153.01 -27.3475, 153.01 -27.35, 153.005 -27.35, 153 -27.35, 153 -27.3525, 152.995 -27.3525, 152.995 -27.355, 152.9925 -27.355, 152.9925 -27.3625, 152.99 -27.3625, 152.99 -27.3675, 152.9875 -27.3675, 152.9875 -27.37, 152.985 -27.37, 152.985 -27.3725, 152.9825 -27.3725, 152.9825 -27.375, 152.98 -27.375, 152.98 -27.3775, 152.9675 -27.3775, 152.9675 -27.38, 152.965 -27.38, 152.9525 -27.38, 152.9525 -27.3825, 152.9475 -27.3825, 152.9475 -27.38, 152.94 -27.38, 152.94 -27.3825, 152.9375 -27.3825, 152.9375 -27.38, 152.9325 -27.38, 152.9325 -27.3825, 152.93 -27.3825, 152.925 -27.3825, 152.925 -27.385, 152.92 -27.385, 152.92 -27.3825, 152.9075 -27.3825, 152.9075 -27.38, 152.9075 -27.375, 152.9075 -27.3725, 152.915 -27.3725, 152.915 -27.37, 152.92 -27.37, 152.92 -27.3675, 152.9225 -27.3675, 152.9225 -27.365, 152.925 -27.365, 152.925 -27.3625, 152.9275 -27.3625, 152.9275 -27.36, 152.9275 -27.3575, 152.925 -27.3575, 152.925 -27.355, 152.9225 -27.355, 152.9225 -27.3525, 152.92 -27.3525, 152.92 -27.35, 152.9175 -27.35, 152.9175 -27.345, 152.92 -27.345, 152.92 -27.3325, 152.9175 -27.3325, 152.9175 -27.33, 152.915 -27.33, 152.915 -27.3275, 152.9125 -27.3275, 152.9125 -27.325, 152.9125 -27.3225, 152.9225 -27.3225, 152.9225 -27.32, 152.925 -27.32, 152.925 -27.3175, 152.9275 -27.3175, 152.9275 -27.315, 152.93 -27.315, 152.93 -27.3125, 152.9325 -27.3125, 152.9325 -27.31, 152.935 -27.31, 152.935 -27.305, 152.94 -27.305, 152.94 -27.3025, 152.9425 -27.3025, 152.9425 -27.3, 152.945 -27.3, 152.945 -27.2975, 152.95 -27.2975, 152.95 -27.295, 152.955 -27.295, 152.9575 -27.295, 152.9575 -27.2925, 152.96 -27.2925, 152.96 -27.29, 152.9625 -27.29, 152.9625 -27.2875, 152.9675 -27.2875, 152.9675 -27.285, 152.9725 -27.285, 152.9725 -27.2825, 152.9775 -27.2825, 152.9775 -27.28, 152.98 -27.28, 152.9925 -27.28, 152.9925 -27.2775, 152.9975 -27.2775, 153.0025 -27.2775))"^^geo:wktLiteral - ] ; -. - -sand:cpc - a geo:Feature ; - rdfs:label "Carseldine Police Station" ; - dcterms:identifier "cpc"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0144819, -27.3506599]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0144819 -27.3506599)"^^geo:wktLiteral - ] ; -. - -sand:f001 - a geo:Feature ; - rdfs:label "Flood 001" ; - dcterms:identifier "f001"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.064893899999987, -27.2909981 ], [ 153.0648081, -27.2911506 ], [ 153.064475499999986, -27.2912364 ], [ 153.064078599999988, -27.2912269 ], [ 153.0635636, -27.291265 ], [ 153.0633383, -27.2913604 ], [ 153.0632417, -27.2914462 ], [ 153.0631559, -27.2915701 ], [ 153.0630808, -27.2917036 ], [ 153.06307, -27.2917704 ], [ 153.0631773, -27.2918943 ], [ 153.0633168, -27.2920564 ], [ 153.0634241, -27.2921613 ], [ 153.063767399999989, -27.2921994 ], [ 153.0642824, -27.2922757 ], [ 153.064400400000011, -27.292371 ], [ 153.0644111, -27.2926761 ], [ 153.0643897, -27.2928764 ], [ 153.0643682, -27.2930766 ], [ 153.06434680000001, -27.2932196 ], [ 153.0642824, -27.2934675 ], [ 153.0642824, -27.2935628 ], [ 153.0643682, -27.2936391 ], [ 153.0647223, -27.2937345 ], [ 153.0648296, -27.293744 ], [ 153.064893899999987, -27.2909981 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0648939 -27.2909981,153.0648081 -27.2911506,153.0644755 -27.2912364,153.0640786 -27.2912269,153.0635636 -27.291265,153.0633383 -27.2913604,153.0632417 -27.2914462,153.0631559 -27.2915701,153.0630808 -27.2917036,153.06307 -27.2917704,153.0631773 -27.2918943,153.0633168 -27.2920564,153.0634241 -27.2921613,153.0637674 -27.2921994,153.0642824 -27.2922757,153.0644004 -27.292371,153.0644111 -27.2926761,153.0643897 -27.2928764,153.0643682 -27.2930766,153.0643468 -27.2932196,153.0642824 -27.2934675,153.0642824 -27.2935628,153.0643682 -27.2936391,153.0647223 -27.2937345,153.0648296 -27.293744,153.0648939 -27.2909981))"^^geo:wktLiteral - ] ; -. - -sand:f023 - a geo:Feature ; - rdfs:label "Flood 023" ; - dcterms:identifier "f023"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.06487820000001, -27.30059 ], [ 153.0648031, -27.301019 ], [ 153.0648138, -27.3012955 ], [ 153.0648889, -27.3015815 ], [ 153.0648567, -27.3016768 ], [ 153.064824499999986, -27.3018198 ], [ 153.0648138, -27.3020295 ], [ 153.064824499999986, -27.3022965 ], [ 153.0647387, -27.3024109 ], [ 153.0641808, -27.3024776 ], [ 153.063698, -27.3025634 ], [ 153.0634512, -27.3026302 ], [ 153.063419, -27.3027827 ], [ 153.063440500000013, -27.303002 ], [ 153.0634619, -27.303307 ], [ 153.063622900000013, -27.3034501 ], [ 153.0638696, -27.3034882 ], [ 153.0643095, -27.3035454 ], [ 153.0645456, -27.3036026 ], [ 153.0647923, -27.3037456 ], [ 153.0650176, -27.3039553 ], [ 153.0652, -27.3041174 ], [ 153.065318, -27.3042413 ], [ 153.0653931, -27.3045083 ], [ 153.0655112, -27.3047371 ], [ 153.065790099999987, -27.3050803 ], [ 153.0660476, -27.3052519 ], [ 153.0656935, -27.3037551 ], [ 153.0652215, -27.30243 ], [ 153.06487820000001, -27.30059 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0648782 -27.30059,153.0648031 -27.301019,153.0648138 -27.3012955,153.0648889 -27.3015815,153.0648567 -27.3016768,153.0648245 -27.3018198,153.0648138 -27.3020295,153.0648245 -27.3022965,153.0647387 -27.3024109,153.0641808 -27.3024776,153.063698 -27.3025634,153.0634512 -27.3026302,153.063419 -27.3027827,153.0634405 -27.303002,153.0634619 -27.303307,153.0636229 -27.3034501,153.0638696 -27.3034882,153.0643095 -27.3035454,153.0645456 -27.3036026,153.0647923 -27.3037456,153.0650176 -27.3039553,153.0652 -27.3041174,153.065318 -27.3042413,153.0653931 -27.3045083,153.0655112 -27.3047371,153.0657901 -27.3050803,153.0660476 -27.3052519,153.0656935 -27.3037551,153.0652215 -27.30243,153.0648782 -27.30059))"^^geo:wktLiteral - ] ; -. - -sand:f332 - a geo:Feature ; - rdfs:label "Flood 332" ; - dcterms:identifier "f332"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.068289099999987, -27.3113685 ], [ 153.0681389, -27.3108346 ], [ 153.0676454, -27.3103961 ], [ 153.0673021, -27.3096144 ], [ 153.0670231, -27.3088708 ], [ 153.066615399999989, -27.3088327 ], [ 153.0659932, -27.3089662 ], [ 153.0656928, -27.3091568 ], [ 153.065564, -27.3095381 ], [ 153.0658215, -27.310377 ], [ 153.0659073, -27.3107774 ], [ 153.0660361, -27.3111587 ], [ 153.0665725, -27.3113685 ], [ 153.066744199999988, -27.3115973 ], [ 153.0674094, -27.3130272 ], [ 153.0676669, -27.3135419 ], [ 153.0680102, -27.3142473 ], [ 153.0685466, -27.3151243 ], [ 153.0693191, -27.3150862 ], [ 153.0698126, -27.3147049 ], [ 153.069877, -27.3145143 ], [ 153.06970530000001, -27.3140376 ], [ 153.0694479, -27.3134085 ], [ 153.069147500000014, -27.31297 ], [ 153.0688041, -27.3124552 ], [ 153.068375, -27.3120548 ], [ 153.068074599999989, -27.3117498 ], [ 153.068289099999987, -27.3113685 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0682891 -27.3113685,153.0681389 -27.3108346,153.0676454 -27.3103961,153.0673021 -27.3096144,153.0670231 -27.3088708,153.0666154 -27.3088327,153.0659932 -27.3089662,153.0656928 -27.3091568,153.065564 -27.3095381,153.0658215 -27.310377,153.0659073 -27.3107774,153.0660361 -27.3111587,153.0665725 -27.3113685,153.0667442 -27.3115973,153.0674094 -27.3130272,153.0676669 -27.3135419,153.0680102 -27.3142473,153.0685466 -27.3151243,153.0693191 -27.3150862,153.0698126 -27.3147049,153.069877 -27.3145143,153.0697053 -27.3140376,153.0694479 -27.3134085,153.0691475 -27.31297,153.0688041 -27.3124552,153.068375 -27.3120548,153.0680746 -27.3117498,153.0682891 -27.3113685))"^^geo:wktLiteral - ] ; -. - -sand:f632 - a geo:Feature ; - rdfs:label "Flood 632" ; - dcterms:identifier "f632"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.0649154, -27.2906357 ], [ 153.0650656, -27.2892818 ], [ 153.0651407, -27.288233 ], [ 153.06513, -27.287413 ], [ 153.0650656, -27.2859541 ], [ 153.0649905, -27.2861353 ], [ 153.065012, -27.2863737 ], [ 153.065001200000012, -27.2868218 ], [ 153.0649583, -27.2871079 ], [ 153.0648296, -27.2873463 ], [ 153.0646472, -27.2873939 ], [ 153.064604300000013, -27.2875274 ], [ 153.0646365, -27.2877849 ], [ 153.0646686, -27.2879183 ], [ 153.0646686, -27.2882711 ], [ 153.0646365, -27.2885762 ], [ 153.0642609, -27.2886716 ], [ 153.0640678, -27.2888623 ], [ 153.064035600000011, -27.2890816 ], [ 153.064293099999986, -27.2894248 ], [ 153.064379, -27.2897204 ], [ 153.0642288, -27.2899206 ], [ 153.064057100000014, -27.2899969 ], [ 153.0639605, -27.2902353 ], [ 153.0639927, -27.2904069 ], [ 153.064110699999986, -27.2905309 ], [ 153.0642824, -27.2906644 ], [ 153.064497, -27.2907216 ], [ 153.064657899999986, -27.2907406 ], [ 153.064818800000012, -27.2907406 ], [ 153.0649154, -27.2906357 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0649154 -27.2906357,153.0650656 -27.2892818,153.0651407 -27.288233,153.06513 -27.287413,153.0650656 -27.2859541,153.0649905 -27.2861353,153.065012 -27.2863737,153.0650012 -27.2868218,153.0649583 -27.2871079,153.0648296 -27.2873463,153.0646472 -27.2873939,153.0646043 -27.2875274,153.0646365 -27.2877849,153.0646686 -27.2879183,153.0646686 -27.2882711,153.0646365 -27.2885762,153.0642609 -27.2886716,153.0640678 -27.2888623,153.0640356 -27.2890816,153.0642931 -27.2894248,153.064379 -27.2897204,153.0642288 -27.2899206,153.0640571 -27.2899969,153.0639605 -27.2902353,153.0639927 -27.2904069,153.0641107 -27.2905309,153.0642824 -27.2906644,153.064497 -27.2907216,153.0646579 -27.2907406,153.0648188 -27.2907406,153.0649154 -27.2906357))"^^geo:wktLiteral - ] ; -. - -sand:fp - a geo:Feature ; - rdfs:label "Flinder Parade" ; - dcterms:identifier "fp"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.074756900000011, -27.3203138 ], [ 153.0727077, -27.3183121 ], [ 153.0715276, -27.3170824 ], [ 153.070519, -27.3157669 ], [ 153.0694891, -27.3143847 ], [ 153.067751, -27.311115 ], [ 153.0664635, -27.3072446 ], [ 153.0656267, -27.3047468 ], [ 153.065111699999989, -27.3031262 ], [ 153.0647898, -27.301677 ], [ 153.064510899999988, -27.3000372 ], [ 153.0644036, -27.2984546 ], [ 153.0643392, -27.2973296 ], [ 153.06459670000001, -27.2953656 ], [ 153.0646396, -27.2936494 ], [ 153.0644465, -27.2922764 ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "LINESTRING (153.0747569 -27.3203138,153.0727077 -27.3183121,153.0715276 -27.3170824,153.070519 -27.3157669,153.0694891 -27.3143847,153.067751 -27.311115,153.0664635 -27.3072446,153.0656267 -27.3047468,153.0651117 -27.3031262,153.0647898 -27.301677,153.0645109 -27.3000372,153.0644036 -27.2984546,153.0643392 -27.2973296,153.0645967 -27.2953656,153.0646396 -27.2936494,153.0644465 -27.2922764)"^^geo:wktLiteral - ] ; -. - -sand:jcabi - a geo:Feature ; - rdfs:label "Jacana Centre for Acquired Brain Injury" ; - dcterms:identifier "jcabi"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0632873, -27.2918652]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0632873 -27.2918652)"^^geo:wktLiteral - ] ; -. - -sand:rps - a geo:Feature ; - rdfs:label "Redcliffe Police Station" ; - dcterms:identifier "rps"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.1143102, -27.2234024]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.1143102 -27.2234024)"^^geo:wktLiteral - ] ; -. - -sand:sac - a geo:Feature ; - rdfs:label "Sandgate Aquatic Centre" ; - dcterms:identifier "sac"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0688897, -27.3122011]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0688897 -27.3122011)"^^geo:wktLiteral - ] ; -. - -sand:sps - a geo:Feature ; - rdfs:label "Sandgate Police Station" ; - dcterms:identifier "sps"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0677583, -27.318185]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0677583 -27.318185)"^^geo:wktLiteral - ] ; -. - -sand:src - a geo:Feature ; - rdfs:label "Sandgate Respite Centre" ; - dcterms:identifier "src"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0614757, -27.3111489]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0614757 -27.3111489)"^^geo:wktLiteral - ] ; -. - -sand:srca - a geo:Feature ; - rdfs:label "Sandgate Respite Centre Area" ; - dcterms:identifier "srca"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.0606281,-27.3096141], [153.0604564,-27.3105197], [153.0600487,-27.3109296], [153.0607354,-27.3127218], [153.063203,-27.3121212], [153.0621623,-27.3095187], [153.0617868,-27.3098333], [153.0606281,-27.3096141]]]}"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0606281 -27.3096141, 153.0604564 -27.3105197, 153.0600487 -27.3109296, 153.0607354 -27.3127218, 153.063203 -27.3121212, 153.0621623 -27.3095187, 153.0617868 -27.3098333, 153.0606281 -27.3096141))"^^geo:wktLiteral - ] ; -. diff --git a/test_data/spaceprez.ttl b/test_data/spaceprez.ttl old mode 100644 new mode 100755 diff --git a/test_data/vocprez.ttl b/test_data/vocprez.ttl old mode 100644 new mode 100755 diff --git a/tests/__init__.py b/tests/__init__.py old mode 100644 new mode 100755 diff --git a/tests/_test_cql.py b/tests/_test_cql.py old mode 100644 new mode 100755 diff --git a/tests/_test_curie_generation.py b/tests/_test_curie_generation.py old mode 100644 new mode 100755 diff --git a/tests/conftest.py b/tests/conftest.py old mode 100644 new mode 100755 diff --git a/tests/data/bnode_depth/bnode_depth-1.ttl b/tests/data/bnode_depth/bnode_depth-1.ttl old mode 100644 new mode 100755 diff --git a/tests/data/bnode_depth/bnode_depth-2-2.ttl b/tests/data/bnode_depth/bnode_depth-2-2.ttl old mode 100644 new mode 100755 diff --git a/tests/data/bnode_depth/bnode_depth-2.ttl b/tests/data/bnode_depth/bnode_depth-2.ttl old mode 100644 new mode 100755 diff --git a/tests/data/bnode_depth/bnode_depth-4.ttl b/tests/data/bnode_depth/bnode_depth-4.ttl old mode 100644 new mode 100755 diff --git a/tests/data/catprez/expected_responses/resource_anot.ttl b/tests/data/catprez/expected_responses/resource_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/catprez/expected_responses/resource_listing_anot.ttl b/tests/data/catprez/expected_responses/resource_listing_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/catprez/input/catprez.ttl b/tests/data/catprez/input/catprez.ttl old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example01.json b/tests/data/cql/input/example01.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example02.json b/tests/data/cql/input/example02.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example03.json b/tests/data/cql/input/example03.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example05a.json b/tests/data/cql/input/example05a.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example05b.json b/tests/data/cql/input/example05b.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example06b.json b/tests/data/cql/input/example06b.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example07.json b/tests/data/cql/input/example07.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example08.json b/tests/data/cql/input/example08.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example09.json b/tests/data/cql/input/example09.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example10.json b/tests/data/cql/input/example10.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example11.json b/tests/data/cql/input/example11.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example12.json b/tests/data/cql/input/example12.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example14.json b/tests/data/cql/input/example14.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example15.json b/tests/data/cql/input/example15.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example17.json b/tests/data/cql/input/example17.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example29.json b/tests/data/cql/input/example29.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example31.json b/tests/data/cql/input/example31.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example32.json b/tests/data/cql/input/example32.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example33.json b/tests/data/cql/input/example33.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example34.json b/tests/data/cql/input/example34.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example35.json b/tests/data/cql/input/example35.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/example39.json b/tests/data/cql/input/example39.json old mode 100644 new mode 100755 diff --git a/tests/data/cql/input/geo_intersects.json b/tests/data/cql/input/geo_intersects.json old mode 100644 new mode 100755 diff --git a/tests/data/object/expected_responses/fc.ttl b/tests/data/object/expected_responses/fc.ttl old mode 100644 new mode 100755 diff --git a/tests/data/object/expected_responses/feature.ttl b/tests/data/object/expected_responses/feature.ttl old mode 100644 new mode 100755 diff --git a/tests/data/profiles/remote_profile.ttl b/tests/data/profiles/remote_profile.ttl old mode 100644 new mode 100755 diff --git a/tests/data/search/expected_responses/filter_to_focus_search.ttl b/tests/data/search/expected_responses/filter_to_focus_search.ttl old mode 100644 new mode 100755 diff --git a/tests/data/search/expected_responses/focus_to_filter_search.ttl b/tests/data/search/expected_responses/focus_to_filter_search.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/expected_responses/dataset_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/expected_responses/feature_anot.ttl b/tests/data/spaceprez/expected_responses/feature_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/geofabric_small.ttl b/tests/data/spaceprez/input/geofabric_small.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/gnaf_small.ttl b/tests/data/spaceprez/input/gnaf_small.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/labels.ttl b/tests/data/spaceprez/input/labels.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/multiple_object.ttl b/tests/data/spaceprez/input/multiple_object.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/redirect-foaf-homepage.ttl b/tests/data/spaceprez/input/redirect-foaf-homepage.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/sandgate.ttl b/tests/data/spaceprez/input/sandgate.ttl old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/sandgate/catchments.geojson b/tests/data/spaceprez/input/sandgate/catchments.geojson old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/sandgate/facilities.geojson b/tests/data/spaceprez/input/sandgate/facilities.geojson old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/sandgate/floods.geojson b/tests/data/spaceprez/input/sandgate/floods.geojson old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/sandgate/roads.geojson b/tests/data/spaceprez/input/sandgate/roads.geojson old mode 100644 new mode 100755 diff --git a/tests/data/spaceprez/input/sandgate/sandgate.json b/tests/data/spaceprez/input/sandgate/sandgate.json old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/beddingsurfacestructure_top_concepts.ttl b/tests/data/vocprez/expected_responses/beddingsurfacestructure_top_concepts.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/collection_listing_anot.ttl b/tests/data/vocprez/expected_responses/collection_listing_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/collection_listing_item.ttl b/tests/data/vocprez/expected_responses/collection_listing_item.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/concept-coal.ttl b/tests/data/vocprez/expected_responses/concept-coal.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/concept-open-cut-coal-mining.ttl b/tests/data/vocprez/expected_responses/concept-open-cut-coal-mining.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/concept-with-2-narrower-concepts.ttl b/tests/data/vocprez/expected_responses/concept-with-2-narrower-concepts.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/concept_anot.ttl b/tests/data/vocprez/expected_responses/concept_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/concept_scheme_no_children.ttl b/tests/data/vocprez/expected_responses/concept_scheme_no_children.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/concept_scheme_top_concepts_with_children.ttl b/tests/data/vocprez/expected_responses/concept_scheme_top_concepts_with_children.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/concept_scheme_with_children.ttl b/tests/data/vocprez/expected_responses/concept_scheme_with_children.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/empty.ttl b/tests/data/vocprez/expected_responses/empty.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/expected_responses/vocab_listing_anot.ttl b/tests/data/vocprez/expected_responses/vocab_listing_anot.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/absolute-collection.ttl b/tests/data/vocprez/input/absolute-collection.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/alteration-types.ttl b/tests/data/vocprez/input/alteration-types.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/beddingsurfacestructure.ttl b/tests/data/vocprez/input/beddingsurfacestructure.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/borehole-purpose-no-children.ttl b/tests/data/vocprez/input/borehole-purpose-no-children.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/borehole-purpose.ttl b/tests/data/vocprez/input/borehole-purpose.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/catalog-of-vocabs.ttl b/tests/data/vocprez/input/catalog-of-vocabs.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/contacttype.ttl b/tests/data/vocprez/input/contacttype.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/dublin_core_terms.ttl b/tests/data/vocprez/input/dublin_core_terms.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/reg-status.ttl b/tests/data/vocprez/input/reg-status.ttl old mode 100644 new mode 100755 diff --git a/tests/data/vocprez/input/vocab-derivation-modes.ttl b/tests/data/vocprez/input/vocab-derivation-modes.ttl old mode 100644 new mode 100755 diff --git a/tests/test_bnode.py b/tests/test_bnode.py old mode 100644 new mode 100755 diff --git a/tests/test_count.py b/tests/test_count.py old mode 100644 new mode 100755 diff --git a/tests/test_curie_endpoint.py b/tests/test_curie_endpoint.py old mode 100644 new mode 100755 diff --git a/tests/test_dd_profiles.py b/tests/test_dd_profiles.py old mode 100644 new mode 100755 diff --git a/tests/test_endpoints_cache.py b/tests/test_endpoints_cache.py old mode 100644 new mode 100755 diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py old mode 100644 new mode 100755 index b022649f..8f0016cd --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -18,7 +18,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + for file in Path(__file__).parent.glob("../test_data/catprez.ttl"): store.load(file.read_bytes(), "text/turtle") return store @@ -84,8 +84,16 @@ def test_catalog_listing_anot(client): f"/c/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile" ) response_graph = Graph().parse(data=r.text) - expected_response_1 = (URIRef("https://example.com/TopLevelCatalog"), RDF.type, DCAT.Catalog) - expected_response_2 = (URIRef("https://example.com/TopLevelCatalogTwo"), RDF.type, DCAT.Catalog) + expected_response_1 = ( + URIRef("https://example.com/TopLevelCatalog"), + RDF.type, + DCAT.Catalog, + ) + expected_response_2 = ( + URIRef("https://example.com/TopLevelCatalogTwo"), + RDF.type, + DCAT.Catalog, + ) assert next(response_graph.triples(expected_response_1)) assert next(response_graph.triples(expected_response_2)) @@ -93,15 +101,20 @@ def test_catalog_listing_anot(client): def test_catalog_anot(client, a_catalog_link): r = client.get(f"{a_catalog_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_response = (URIRef("https://example.com/TopLevelCatalog"), RDF.type, DCAT.Catalog) + expected_response = ( + URIRef("https://example.com/TopLevelCatalog"), + RDF.type, + DCAT.Catalog, + ) assert next(response_graph.triples(expected_response)) def test_lower_level_listing_anot(client, a_catalog_link): r = client.get(f"{a_catalog_link}/collections?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_response = (URIRef("https://example.com/LowerLevelCatalog"), RDF.type, DCAT.Catalog) + expected_response = ( + URIRef("https://example.com/LowerLevelCatalog"), + RDF.type, + DCAT.Catalog, + ) assert next(response_graph.triples(expected_response)) - - - diff --git a/tests/test_endpoints_management.py b/tests/test_endpoints_management.py old mode 100644 new mode 100755 diff --git a/tests/test_endpoints_object.py b/tests/test_endpoints_object.py old mode 100644 new mode 100755 diff --git a/tests/test_endpoints_ok.py b/tests/test_endpoints_ok.py old mode 100644 new mode 100755 diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py old mode 100644 new mode 100755 diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py old mode 100644 new mode 100755 index c387170d..3e1e6a07 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -5,7 +5,7 @@ from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef from rdflib.compare import isomorphic -from rdflib.namespace import RDF, DCAT, RDFS +from rdflib.namespace import RDF, DCAT, RDFS, GEO from prez.app import app from prez.dependencies import get_repo @@ -17,7 +17,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): + for file in Path(__file__).parent.glob("../test_data/spaceprez.ttl"): store.load(file.read_bytes(), "text/turtle") return store @@ -57,109 +57,66 @@ def a_dataset_link(client): def an_fc_link(client, a_dataset_link): r = client.get(f"{a_dataset_link}/collections") g = Graph().parse(data=r.text) - member_uri = g.value( - URIRef("http://example.com/datasets/sandgate"), RDFS.member, None - ) - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link + links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) + for link in links: + if link != a_dataset_link: + return link @pytest.fixture(scope="session") def a_feature_link(client, an_fc_link): r = client.get(f"{an_fc_link}/items") g = Graph().parse(data=r.text) - member_uri = g.value( - URIRef("http://example.com/datasets/sandgate/catchments"), RDFS.member, None - ) - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link + links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) + for link in links: + if link != an_fc_link: + return link def test_dataset_anot(client, a_dataset_link): - r = client.get(f"{a_dataset_link}?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/dataset_anot.ttl" - ) - assert isomorphic(response_graph, expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", - ) - - -def test_feature_collection_anot(client, an_fc_link): - r = client.get(f"{an_fc_link}?_mediatype=text/anot+turtle") + r = client.get(f"{a_dataset_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_collection_anot.ttl" - ) - assert isomorphic(response_graph, expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + expected_response_1 = ( + URIRef("https://example.com/Dataset"), + RDF.type, + DCAT.Dataset, ) + assert next(response_graph.triples(expected_response_1)) -def test_feature_anot(client, a_feature_link): - r = client.get(f"{a_feature_link}?_mediatype=text/anot+turtle") - response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_anot.ttl" - ) - assert isomorphic(response_graph, expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", - ) - -def test_dataset_listing_anot(client): - r = client.get("/s/catalogs?_mediatype=text/anot+turtle") +def test_feature_collection(client, an_fc_link): + r = client.get(f"{an_fc_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + expected_response_1 = ( + URIRef("https://example.com/FeatureCollection"), + RDF.type, + GEO.FeatureCollection, ) + assert next(response_graph.triples(expected_response_1)) - -def test_feature_collection_listing_anot(client, a_dataset_link): - r = client.get(f"{a_dataset_link}/collections?_mediatype=text/anot+turtle") +def test_feature(client, a_feature_link): + r = client.get(f"{a_feature_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl" + expected_response_1 = ( + URIRef("https://example.com/Feature1"), + RDF.type, + GEO.Feature, ) - assert response_graph.isomorphic(expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", - ) - + assert next(response_graph.triples(expected_response_1)) def test_feature_listing_anot(client, an_fc_link): - r = client.get(f"{an_fc_link}/items?_mediatype=text/anot+turtle") + r = client.get(f"{an_fc_link}/items?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent - / "../tests/data/spaceprez/expected_responses/feature_listing_anot.ttl" + expected_response_1 = ( + URIRef("https://example.com/Feature1"), + RDF.type, + GEO.Feature, ) - assert response_graph.isomorphic(expected_graph), print( - f"RESPONSE GRAPH\n{response_graph.serialize()}," - f"EXPECTED GRAPH\n{expected_graph.serialize()}", - f"MISSING TRIPLES\n{(expected_graph - response_graph).serialize()}", - f"EXTRA TRIPLES\n{(response_graph - expected_graph).serialize()}", + expected_response_2 = ( + URIRef("https://example.com/Feature2"), + RDF.type, + GEO.Feature, ) + assert next(response_graph.triples(expected_response_1)) + assert next(response_graph.triples(expected_response_2)) \ No newline at end of file diff --git a/tests/test_endpoints_vocprez.py b/tests/test_endpoints_vocprez.py old mode 100644 new mode 100755 diff --git a/tests/test_redirect_endpoint.py b/tests/test_redirect_endpoint.py old mode 100644 new mode 100755 diff --git a/tests/test_search.py b/tests/test_search.py old mode 100644 new mode 100755 diff --git a/tests/test_sparql.py b/tests/test_sparql.py old mode 100644 new mode 100755 From 40799e7f27a4f1201c64e01548ab0855e09d1282 Mon Sep 17 00:00:00 2001 From: david Date: Wed, 24 Jan 2024 01:58:05 +1000 Subject: [PATCH 06/25] further ingetration --- .../endpoints/extended_ogc_records.ttl | 10 ++-- prez/services/link_generation.py | 11 +++- prez/services/model_methods.py | 28 ++++----- temp/shacl-nodeshapes2sparql.py | 58 +++++++++++++++++++ 4 files changed, 87 insertions(+), 20 deletions(-) create mode 100644 temp/shacl-nodeshapes2sparql.py diff --git a/prez/reference_data/endpoints/extended_ogc_records.ttl b/prez/reference_data/endpoints/extended_ogc_records.ttl index 595ddc6e..e18b5849 100755 --- a/prez/reference_data/endpoints/extended_ogc_records.ttl +++ b/prez/reference_data/endpoints/extended_ogc_records.ttl @@ -20,8 +20,7 @@ endpoint:top-level-catalog-listing a ont:ListingEndpoint ; ?child . ?child a ?child_class . VALUES ?child_class { - - } + } } """ ] ; shext:limit 20 ; @@ -30,7 +29,7 @@ endpoint:top-level-catalog-listing a ont:ListingEndpoint ; endpoint:top-level-catalog-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; - ont:deliversClasses dcat:Catalog ; # required for link generation for objects + sh:targetClass dcat:Catalog ; # endpoint CAN render objects of this class. ont:endpointTemplate "/catalogs/$object" ; ont:parentEndpoint endpoint:top-level-catalog-listing ; . @@ -55,8 +54,9 @@ endpoint:lower-level-catalog-object a ont:ObjectEndpoint ; sh:targetNode "$object" ; ont:deliversClasses dcat:Catalog ; # required for link generation for objects ont:endpointTemplate "/catalogs/$parent_1/collections/$object" ; - ont:parentToFocusRelation dcterms:hasPart ; - ont:parentEndpoint endpoint:lower-level-catalog-listing ; + ont:endpointRelation [ + sh:path dcterms:hasPart + ]; . endpoint:resource-listing a ont:ListingEndpoint ; diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index ef07556f..c124135d 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -52,7 +52,16 @@ async def _add_prez_links(graph: Graph, repo: Repo, system_repo: Repo): uri_to_klasses[uri] = await get_classes(uri, repo) for uri, klasses in uri_to_klasses.items(): - await _create_internal_links_graph(uri, graph, repo, klasses, system_repo) + await _new_link_generation(uri, repo, klasses, system_repo) + # await _create_internal_links_graph(uri, graph, repo, klasses, system_repo) + +async def _new_link_generation(uri, repo: Repo, klasses, system_repo): + # get the endpoints that can deliver the class + query = f"""SELECT ?ep WHERE + {{ ?ep a <{ONT.ObjectEndpoint}> }}""" + # if there's a link generation query for the endpoint, run it + + _, tabular_results = await repo.send_queries([], [(None, query)]) async def _create_internal_links_graph(uri, graph, repo: Repo, klasses, system_repo): diff --git a/prez/services/model_methods.py b/prez/services/model_methods.py index 04935b2c..e32d2e90 100755 --- a/prez/services/model_methods.py +++ b/prez/services/model_methods.py @@ -19,18 +19,18 @@ async def get_classes( """ _, r = await repo.send_queries([], [(uri, q)]) tabular_result = r[0] # should only be one result - only one query sent - if endpoint != URIRef("https://prez.dev/endpoint/system/object"): - endpoint_classes = list( - endpoints_graph_cache.objects( - subject=endpoint, - predicate=URIRef("https://prez.dev/ont/deliversClasses"), - ) - ) - object_classes_delivered_by_endpoint = [] - for c in tabular_result[1]: - if URIRef(c["class"]["value"]) in endpoint_classes: - object_classes_delivered_by_endpoint.append(URIRef(c["class"]["value"])) - classes = frozenset(object_classes_delivered_by_endpoint) - else: - classes = frozenset([URIRef(c["class"]["value"]) for c in tabular_result[1]]) + # if endpoint != URIRef("https://prez.dev/endpoint/system/object"): + # endpoint_classes = list( + # endpoints_graph_cache.objects( + # subject=endpoint, + # predicate=URIRef("https://prez.dev/ont/deliversClasses"), + # ) + # ) + # object_classes_delivered_by_endpoint = [] + # for c in tabular_result[1]: + # if URIRef(c["class"]["value"]) in endpoint_classes: + # object_classes_delivered_by_endpoint.append(URIRef(c["class"]["value"])) + # classes = frozenset(object_classes_delivered_by_endpoint) + # else: + classes = frozenset([URIRef(c["class"]["value"]) for c in tabular_result[1]]) return classes diff --git a/temp/shacl-nodeshapes2sparql.py b/temp/shacl-nodeshapes2sparql.py new file mode 100644 index 00000000..ccb65970 --- /dev/null +++ b/temp/shacl-nodeshapes2sparql.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from typing import List, Optional + +from pydantic import BaseModel +from rdflib import URIRef +from rdflib.namespace import SH + +from temp.grammar import IRI + + +class SHACL(BaseModel): + class Config: + arbitrary_types_allowed = True + + def from_graph(self, graph): + raise NotImplementedError("Subclasses must implement this method.") + + def to_grammar(self): + raise NotImplementedError("Subclasses must implement this method.") + + + class NodeShape(SHACL): + uri: URIRef + nodeTarget: Optional[URIRef] + classTarget: Optional[List[URIRef]] + subjectsOfTarget: Optional[URIRef] + objectsOfTarget: Optional[URIRef] + propertyShapes: Optional[List[PropertyShape]] + + def from_graph(self, graph): + self.nodeTarget = next(graph.objects(self.uri, SH.targetNode), None) + self.classTarget = list(graph.objects(self.uri, SH.targetClass)) + self.subjectsOfTarget = next(graph.value(self.uri, SH.targetSubjectsOf), None) + self.objectsOfTarget = next(graph.objects(self.uri, SH.targetObjectsOf), None) + self.propertyShapes = list(graph.objects(self.uri, SH.property)) + + def to_grammar(self): + if self.nodeTarget: + pass # do not need to add any specific triples or the like + if self.classTarget: + pass + if self.subjectsOfTarget: + pass + if self.objectsOfTarget: + pass + if self.propertyShapes: + pass + + def _process_node_target(self): + target_uri = IRI(value=self.nodeTarget) + + def _process_property_shapes(self, property_shapes): + pass + + +class PropertyShape(SHACL): + uri: URIRef From 21deaeee2e1f101afe26afa57aba68e484e2848c Mon Sep 17 00:00:00 2001 From: david Date: Wed, 24 Jan 2024 09:57:18 +1000 Subject: [PATCH 07/25] further ingetration --- temp/shacl-nodeshapes2sparql.py | 74 ++++++++++++++++++--------------- 1 file changed, 41 insertions(+), 33 deletions(-) diff --git a/temp/shacl-nodeshapes2sparql.py b/temp/shacl-nodeshapes2sparql.py index ccb65970..a9eafafb 100644 --- a/temp/shacl-nodeshapes2sparql.py +++ b/temp/shacl-nodeshapes2sparql.py @@ -4,9 +4,9 @@ from pydantic import BaseModel from rdflib import URIRef -from rdflib.namespace import SH +from rdflib.namespace import SH, RDF -from temp.grammar import IRI +from temp.grammar import IRI, SimplifiedTriple, TriplesBlock class SHACL(BaseModel): @@ -20,38 +20,46 @@ def to_grammar(self): raise NotImplementedError("Subclasses must implement this method.") - class NodeShape(SHACL): - uri: URIRef - nodeTarget: Optional[URIRef] - classTarget: Optional[List[URIRef]] - subjectsOfTarget: Optional[URIRef] - objectsOfTarget: Optional[URIRef] - propertyShapes: Optional[List[PropertyShape]] - - def from_graph(self, graph): - self.nodeTarget = next(graph.objects(self.uri, SH.targetNode), None) - self.classTarget = list(graph.objects(self.uri, SH.targetClass)) - self.subjectsOfTarget = next(graph.value(self.uri, SH.targetSubjectsOf), None) - self.objectsOfTarget = next(graph.objects(self.uri, SH.targetObjectsOf), None) - self.propertyShapes = list(graph.objects(self.uri, SH.property)) - - def to_grammar(self): - if self.nodeTarget: - pass # do not need to add any specific triples or the like - if self.classTarget: - pass - if self.subjectsOfTarget: - pass - if self.objectsOfTarget: - pass - if self.propertyShapes: - pass - - def _process_node_target(self): - target_uri = IRI(value=self.nodeTarget) - - def _process_property_shapes(self, property_shapes): +class NodeShape(SHACL): + uri: URIRef + nodeTarget: Optional[URIRef] + classTarget: Optional[List[URIRef]] + subjectsOfTarget: Optional[URIRef] + objectsOfTarget: Optional[URIRef] + propertyShapes: Optional[List[PropertyShape]] + _triples: Optional[List[SimplifiedTriple]] + + def from_graph(self, graph): + self.nodeTarget = next(graph.objects(self.uri, SH.targetNode), None) + self.classTarget = list(graph.objects(self.uri, SH.targetClass)) + self.subjectsOfTarget = next(graph.value(self.uri, SH.targetSubjectsOf), None) + self.objectsOfTarget = next(graph.objects(self.uri, SH.targetObjectsOf), None) + self.propertyShapes = list(graph.objects(self.uri, SH.property)) + + def to_grammar(self) -> TriplesBlock: + if self.nodeTarget: + pass # do not need to add any specific triples or the like + if self.classTarget: + self._process_class_target() + if self.subjectsOfTarget: pass + if self.objectsOfTarget: + pass + if self.propertyShapes: + pass + + def _process_class_target(self): + for klass in self.classTarget: + self._triples.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=klass, + ) + ) + + def _process_property_shapes(self, property_shapes): + pass class PropertyShape(SHACL): From afbf20e2489ebe1254b26045384d61b07aa2275d Mon Sep 17 00:00:00 2001 From: davidhabgood Date: Mon, 29 Jan 2024 09:15:34 +1000 Subject: [PATCH 08/25] incomplete changes --- prez/reference_data/endpoints/new.ttl | 97 +++++++++++++++ prez/services/link_generation.py | 10 +- temp/shacl-nodeshapes2sparql.py | 167 ++++++++++++++++++++++++-- temp/shacl2sparql.py | 46 +++++-- 4 files changed, 292 insertions(+), 28 deletions(-) create mode 100644 prez/reference_data/endpoints/new.ttl diff --git a/prez/reference_data/endpoints/new.ttl b/prez/reference_data/endpoints/new.ttl new file mode 100644 index 00000000..0d67f9d4 --- /dev/null +++ b/prez/reference_data/endpoints/new.ttl @@ -0,0 +1,97 @@ +@prefix sh: . +@prefix dcterms: . +@prefix dcat: . +@prefix geo: . +@prefix ex: . +@prefix skos: . +@prefix rdfs: . + +ex:TopLevelCatalogs + a sh:NodeShape ; + sh:targetClass dcat:Catalog ; + sh:targetSubjectsOf dcterms:hasPart ; + sh:property [ + sh:path dcterms:hasPart ; + sh:or ( + [ sh:class dcat:Catalog ] + [ sh:class geo:FeatureCollection ] + [ sh:class skos:ConceptScheme ] + [ sh:class skos:Collection ] + ) ; + ] . + +ex:FeatureCollectionListing + a sh:NodeShape ; + sh:targetClass geo:FeatureCollection ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:ConceptSchemeListing + a sh:NodeShape ; + sh:targetClass skos:ConceptScheme ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:CollectionListing + a sh:NodeShape ; + sh:targetClass skos:Collection ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:LowerLevelCatalogListing + a sh:NodeShape ; + sh:targetClass dcat:Catalog ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:FeatureListing + a sh:NodeShape ; + sh:targetClass geo:Feature ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class geo:FeatureCollection ; + ] , [ + sh:path ( [sh:inversePath rdfs:member ] [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . + +ex:ConceptSchemeConceptListing + a sh:NodeShape ; + sh:targetClass skos:Concept ; + sh:property [ + sh:path skos:inScheme ; + sh:class skos:ConceptScheme ; + ] , [ + sh:path ( skos:inScheme [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . + +ex:CollectionConceptListing + a sh:NodeShape ; + sh:targetClass skos:Concept ; + sh:property [ + sh:path skos:inScheme ; + sh:class skos:Collection ; + ] , [ + sh:path ( [ sh:inversePath skos:member ] [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . + +ex:ResourceListing + a sh:NodeShape ; + sh:targetClass dcat:Resource ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] , [ + sh:path ( [ sh:inversePath dcterms:hasPart ] [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . \ No newline at end of file diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index c124135d..ffe8e5ad 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -5,6 +5,7 @@ from fastapi import Depends from rdflib import Graph, Literal, URIRef, DCTERMS, BNode +from rdflib.namespace import SH from prez.cache import endpoints_graph_cache, links_ids_graph_cache from prez.dependencies import get_system_repo @@ -57,8 +58,13 @@ async def _add_prez_links(graph: Graph, repo: Repo, system_repo: Repo): async def _new_link_generation(uri, repo: Repo, klasses, system_repo): # get the endpoints that can deliver the class - query = f"""SELECT ?ep WHERE - {{ ?ep a <{ONT.ObjectEndpoint}> }}""" + # many node shapes to one endpoint; multiple node shapes can point to the endpoint + query = f"""SELECT ?nodeShape {{ ?nodeShape a {SH.NodeShape} ; + {SH.targetClass} ?klasses . + VALUES ?klasses {" ".join(["<" + klass.n3() + ">" for klass in klasses])} + }}""" + {" ".join(["<" + klass.n3() + ">" for klass in klasses])} + system_repo.send_queries() # if there's a link generation query for the endpoint, run it _, tabular_results = await repo.send_queries([], [(None, query)]) diff --git a/temp/shacl-nodeshapes2sparql.py b/temp/shacl-nodeshapes2sparql.py index a9eafafb..51c3e8a9 100644 --- a/temp/shacl-nodeshapes2sparql.py +++ b/temp/shacl-nodeshapes2sparql.py @@ -1,12 +1,13 @@ from __future__ import annotations -from typing import List, Optional +from typing import List, Optional, Union from pydantic import BaseModel -from rdflib import URIRef +from rdflib import URIRef, BNode, Graph from rdflib.namespace import SH, RDF -from temp.grammar import IRI, SimplifiedTriple, TriplesBlock +from temp.grammar import IRI, SimplifiedTriple, TriplesBlock, Var, SelectClause, GraphPatternNotTriples, InlineData, \ + DataBlock, InlineDataOneVar, DataBlockValue class SHACL(BaseModel): @@ -26,41 +27,181 @@ class NodeShape(SHACL): classTarget: Optional[List[URIRef]] subjectsOfTarget: Optional[URIRef] objectsOfTarget: Optional[URIRef] - propertyShapes: Optional[List[PropertyShape]] + propertyShapes: Optional[List[URIRef]] _triples: Optional[List[SimplifiedTriple]] - def from_graph(self, graph): + def from_shacl_graph(self, graph): # TODO this can be a SPARQL select against the system graph. self.nodeTarget = next(graph.objects(self.uri, SH.targetNode), None) self.classTarget = list(graph.objects(self.uri, SH.targetClass)) - self.subjectsOfTarget = next(graph.value(self.uri, SH.targetSubjectsOf), None) + self.subjectsOfTarget = next(graph.objects(self.uri, SH.targetSubjectsOf), None) self.objectsOfTarget = next(graph.objects(self.uri, SH.targetObjectsOf), None) self.propertyShapes = list(graph.objects(self.uri, SH.property)) - def to_grammar(self) -> TriplesBlock: + def to_listing_select(self) -> TriplesBlock: + focus_node = Var(value="focus_node") if self.nodeTarget: pass # do not need to add any specific triples or the like if self.classTarget: - self._process_class_target() + self._process_class_target(focus_node) if self.subjectsOfTarget: pass if self.objectsOfTarget: pass if self.propertyShapes: - pass + self._process_property_shapes() - def _process_class_target(self): + def to_link_select(self, focus_node) -> SelectClause: + + def _process_class_target(self, focus_node): for klass in self.classTarget: self._triples.append( SimplifiedTriple( - subject=self.focus_node, + subject=focus_node, predicate=IRI(value=RDF.type), object=klass, ) ) - def _process_property_shapes(self, property_shapes): - pass + def _process_subjects_of_target(self): + # ?focus_node pred ?obj - ?obj is constrained by e.g. sh:class in a property shape. + self._triples.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=self.subjectsOfTarget), + object=Var(value="ValidationNode"), + ) + ) + + def _process_objects_of_target(self): + self._triples.append( + SimplifiedTriple( + subject=Var(value="ValidationNode"), + predicate=IRI(value=self.objectsOfTarget), + object=self.focus_node, + ) + ) + + def _process_property_shapes(self): + for shape in self.propertyShapes: + ps = PropertyShape(shape) + self._triples.append(ps.to_grammar) class PropertyShape(SHACL): + uri: URIRef # URI of the shape + graph: Graph # the graph containing the property shape + # inputs + property_paths: Optional[List[Union[URIRef, BNode]]] + or_klasses: List[URIRef] + # outputs + _st_list = Optional[List[SimplifiedTriple]] + _gpnt_list = Optional[List[GraphPatternNotTriples]] + _select_vars: Optional[List[Var]] + + def from_graph(self, graph): + _single_class = next(graph.objects(self.uri, SH["class"]), None) + if _single_class: + klasses = list(_single_class) + else: + # _multiple_classes = list(graph.objects(self.uri, SH["class"]), None) + klasses = _single_class # if _single_class else _multiple_classes + pass + # TODO logic for or statement + self.property_paths = list(graph.objects(self.uri, SH.path)) + + def to_grammar(self, focus_node: Union[Var, IRI]): + # focus node = URI when generating links; Variable when listing objects + # process class statements NB this is the class on validation nodes + # get the length of any property path chains; this is what the target class applies to. + for pp in self.property_paths: + if isinstance(pp, BNode): + pred_objects_gen = self.profile_graph.predicate_objects( + subject=pp + ) + bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + if bn_obj == SH.union: + pass + elif bn_pred == SH.inversePath: + inverse_preds.append(IRI(value=bn_obj)) + elif bn_pred == SH.alternativePath: + predicates.extend(list(Collection(self.profile_graph, bn_obj))) + else: # sequence paths + predicates.append(tuple(Collection(self.profile_graph, path_obj))) + else: # a plain path specification to restrict the predicate to a specific value + predicates.append(path_obj) + else: # a plain path specification to restrict the predicate to a specific value + predicates.append(path_obj) + + + if self.property_paths: + for property_path in self.property_paths: + if isinstance(property_path, URIRef): + # vanilla property path + self._st_list.append( + SimplifiedTriple( + subject=focus_node, + predicate=IRI(value=property_path), + object=Var(value="ValidationNode") + ) + ) + elif isinstance(property_path, BNode): + pred_objects_gen = self.profile_graph.predicate_objects( + subject=path_obj + ) + bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + if bn_obj == SH.union: + pass + elif bn_pred == SH.inversePath: + inverse_preds.append(IRI(value=bn_obj)) + elif bn_pred == SH.alternativePath: + predicates.extend(list(Collection(self.profile_graph, bn_obj))) + else: # sequence paths + predicates.append(tuple(Collection(self.profile_graph, path_obj))) + else: # a plain path specification to restrict the predicate to a specific value + predicates.append(path_obj) + + if self.or_klasses: + if len(self.or_klasses) == 1: + self._st_list.append( + SimplifiedTriple( + subject=Var(value="ValidationNode"), + predicate=IRI(value=RDF.type), + object=IRI(value=self.or_klasses[0]) + ) + ) + else: + self._st_list.append( + SimplifiedTriple(value="ValidationNode"), + IRI(value=RDF.type), + Var(value="ValClasses") + ) + dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses] + self._gpnt_list.append( + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=Var(value="ValClasses"), + datablockvalues=dbvs + ) + ) + ) + ) + ) + + + + +class PropertyPath(SHACL): uri: URIRef + +class Path(SHACL): + focus_uri: Union[IRI, Var] + path_uri: URIRef + + def to_grammar(self): + return SimplifiedTriple(self.focus_uri, IRI(value=self.uri), Var(value="ValidationNode")) + +class InversePath(SHACL): + focus_uri: Union[IRI, Var] + inverse_uri: URIRef \ No newline at end of file diff --git a/temp/shacl2sparql.py b/temp/shacl2sparql.py index de39fadf..675a9f94 100755 --- a/temp/shacl2sparql.py +++ b/temp/shacl2sparql.py @@ -155,6 +155,12 @@ def parse_endpoint_definition(self): self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.rule) ) + target_subjects_of = list( + self.endpoint_graph.objects( + subject=self.endpoint_uri, predicate=SH.targetSubjectsOf + ) + ) + # objects - just set the focus node. if target_nodes: target_node_var = str(target_nodes[0]) @@ -181,12 +187,13 @@ def parse_endpoint_definition(self): if target_classes: self._add_target_class(target_classes[0]) + if target_subjects_of: + pass # TODO + # don't use the target class if there's a sh:target / sh:select #TODO confirm why this caused issues - duplicate # pattern matches in the subquery? # elif target_classes: - elif ( - endpoint_type == ONT.ListingEndpoint - ): # ignore class for non listing at present + if target_classes: ggp = self.create_select_subquery_for_class_listing(target_classes) self._add_ggp_to_main_ggps(ggp) @@ -226,21 +233,34 @@ def _create_construct_triples_from_sh_rules(self, rule_node): self.construct_triples = [triple] def create_select_subquery_for_class_listing( - self, target_classes: Optional[List[URIRef]] = None + self, + target_classes: Optional[List[URIRef]] = None, + target_subjects_of: Optional[URIRef] = None ): ggp = GroupGraphPattern(content=GroupGraphPatternSub()) + triples = [] if target_classes: target_class_var = IRI(value=target_classes[0]) - triples_block = TriplesBlock( - triples=[ - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=target_class_var, - ) - ] + triples.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=target_class_var, + ) + ) + + if target_subjects_of: # typically used in conjunction with a sh:class statement to specify the class of the validation node. + triples.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=target_subjects_of, + object=Var(value="ValidationNode") # better name? + ) ) + + triples_block = TriplesBlock(triples=triples) + if self.additional_ggps: # for example from cql gpnt = GraphPatternNotTriples( content=GroupOrUnionGraphPattern( @@ -510,7 +530,7 @@ def process_path_object(path_obj: Union[URIRef, BNode]): subject=path_object ) bnode_pred, bnode_obj = next(predicate_objects_gen, (None, None)) - if bnode_obj == SH.union: + if bnode_obj == SH.union: # TODO or sh:or ?? union_list_bnode = list(Collection(self.profile_graph, path_object))[1] union_items = list(Collection(self.profile_graph, union_list_bnode)) From b5a09a30d4ccc714ac91349d94673c67a0f14c56 Mon Sep 17 00:00:00 2001 From: david Date: Thu, 1 Feb 2024 10:26:43 +1000 Subject: [PATCH 09/25] progress --- ...s2sparql.py => shacl_nodeshapes2sparql.py} | 137 +++++++++++------- tests/data/nodeshapes/endpoints.ttl | 97 +++++++++++++ tests/test_shacl_parsing.py | 37 +++++ 3 files changed, 216 insertions(+), 55 deletions(-) rename temp/{shacl-nodeshapes2sparql.py => shacl_nodeshapes2sparql.py} (67%) create mode 100644 tests/data/nodeshapes/endpoints.ttl create mode 100755 tests/test_shacl_parsing.py diff --git a/temp/shacl-nodeshapes2sparql.py b/temp/shacl_nodeshapes2sparql.py similarity index 67% rename from temp/shacl-nodeshapes2sparql.py rename to temp/shacl_nodeshapes2sparql.py index 51c3e8a9..a349b493 100644 --- a/temp/shacl-nodeshapes2sparql.py +++ b/temp/shacl_nodeshapes2sparql.py @@ -23,37 +23,38 @@ def to_grammar(self): class NodeShape(SHACL): uri: URIRef - nodeTarget: Optional[URIRef] - classTarget: Optional[List[URIRef]] - subjectsOfTarget: Optional[URIRef] - objectsOfTarget: Optional[URIRef] - propertyShapes: Optional[List[URIRef]] - _triples: Optional[List[SimplifiedTriple]] - - def from_shacl_graph(self, graph): # TODO this can be a SPARQL select against the system graph. - self.nodeTarget = next(graph.objects(self.uri, SH.targetNode), None) - self.classTarget = list(graph.objects(self.uri, SH.targetClass)) - self.subjectsOfTarget = next(graph.objects(self.uri, SH.targetSubjectsOf), None) - self.objectsOfTarget = next(graph.objects(self.uri, SH.targetObjectsOf), None) + targetNode: Optional[URIRef] = None + targetClass: Optional[List[URIRef]] = None + targetSubjectsOf: Optional[URIRef] = None + targetObjectsOf: Optional[URIRef] = None + propertyShapes: Optional[List[URIRef]] = None + _triples: Optional[List[SimplifiedTriple]] = None + + def from_graph(self, graph): # TODO this can be a SPARQL select against the system graph. + self.targetNode = next(graph.objects(self.uri, SH.targetNode), None) + self.targetClass = list(graph.objects(self.uri, SH.targetClass)) + self.targetSubjectsOf = next(graph.objects(self.uri, SH.targetSubjectsOf), None) + self.targetObjectsOf = next(graph.objects(self.uri, SH.targetObjectsOf), None) self.propertyShapes = list(graph.objects(self.uri, SH.property)) def to_listing_select(self) -> TriplesBlock: focus_node = Var(value="focus_node") - if self.nodeTarget: + if self.targetNode: pass # do not need to add any specific triples or the like - if self.classTarget: + if self.targetClass: self._process_class_target(focus_node) - if self.subjectsOfTarget: + if self.targetSubjectsOf: pass - if self.objectsOfTarget: + if self.targetObjectsOf: pass if self.propertyShapes: self._process_property_shapes() def to_link_select(self, focus_node) -> SelectClause: + pass def _process_class_target(self, focus_node): - for klass in self.classTarget: + for klass in self.targetClass: self._triples.append( SimplifiedTriple( subject=focus_node, @@ -67,7 +68,7 @@ def _process_subjects_of_target(self): self._triples.append( SimplifiedTriple( subject=self.focus_node, - predicate=IRI(value=self.subjectsOfTarget), + predicate=IRI(value=self.targetSubjectsOf), object=Var(value="ValidationNode"), ) ) @@ -76,7 +77,7 @@ def _process_objects_of_target(self): self._triples.append( SimplifiedTriple( subject=Var(value="ValidationNode"), - predicate=IRI(value=self.objectsOfTarget), + predicate=IRI(value=self.targetObjectsOf), object=self.focus_node, ) ) @@ -88,51 +89,56 @@ def _process_property_shapes(self): class PropertyShape(SHACL): - uri: URIRef # URI of the shape - graph: Graph # the graph containing the property shape + uri: URIRef | BNode # URI of the shape + focus_node: Union[Var, IRI] = Var(value="focus_node") # inputs - property_paths: Optional[List[Union[URIRef, BNode]]] - or_klasses: List[URIRef] + property_paths: Optional[List[Union[URIRef, BNode]]] = None + or_klasses: Optional[List[URIRef]] = None # outputs - _st_list = Optional[List[SimplifiedTriple]] - _gpnt_list = Optional[List[GraphPatternNotTriples]] - _select_vars: Optional[List[Var]] + _st_list: Optional[List[SimplifiedTriple]] = None + _gpnt_list: Optional[List[GraphPatternNotTriples]] = None + _select_vars: Optional[List[Var]] = None def from_graph(self, graph): _single_class = next(graph.objects(self.uri, SH["class"]), None) if _single_class: - klasses = list(_single_class) + self.or_klasses = [_single_class] else: - # _multiple_classes = list(graph.objects(self.uri, SH["class"]), None) - klasses = _single_class # if _single_class else _multiple_classes pass + # _multiple_classes = list(graph.objects(self.uri, SH["class"]), None) # TODO logic for or statement self.property_paths = list(graph.objects(self.uri, SH.path)) - def to_grammar(self, focus_node: Union[Var, IRI]): + pp_asts = Or() + for pp in self.property_paths: + pp_asts.paths.append(self.process_property_path(pp, graph)) + # focus node = URI when generating links; Variable when listing objects # process class statements NB this is the class on validation nodes # get the length of any property path chains; this is what the target class applies to. - for pp in self.property_paths: - if isinstance(pp, BNode): - pred_objects_gen = self.profile_graph.predicate_objects( - subject=pp - ) - bn_pred, bn_obj = next(pred_objects_gen, (None, None)) - if bn_obj == SH.union: - pass - elif bn_pred == SH.inversePath: - inverse_preds.append(IRI(value=bn_obj)) - elif bn_pred == SH.alternativePath: - predicates.extend(list(Collection(self.profile_graph, bn_obj))) - else: # sequence paths - predicates.append(tuple(Collection(self.profile_graph, path_obj))) - else: # a plain path specification to restrict the predicate to a specific value - predicates.append(path_obj) + + def _process_property_path(self, pp, graph): + if isinstance(pp, BNode): + pred_objects_gen = graph.predicate_objects( + subject=pp + ) + bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + if bn_obj == SH.union: + pass + elif bn_pred == SH.inversePath: + inverse_preds.append(IRI(value=bn_obj)) + elif bn_pred == SH.alternativePath: + predicates.extend(list(Collection(self.profile_graph, bn_obj))) + else: # sequence paths + predicates.append(tuple(Collection(self.profile_graph, path_obj))) else: # a plain path specification to restrict the predicate to a specific value predicates.append(path_obj) + return pp_ast + + + def to_grammar(self): if self.property_paths: for property_path in self.property_paths: if isinstance(property_path, URIRef): @@ -163,11 +169,11 @@ def to_grammar(self, focus_node: Union[Var, IRI]): if self.or_klasses: if len(self.or_klasses) == 1: self._st_list.append( - SimplifiedTriple( - subject=Var(value="ValidationNode"), - predicate=IRI(value=RDF.type), - object=IRI(value=self.or_klasses[0]) - ) + SimplifiedTriple( + subject=Var(value="ValidationNode"), + predicate=IRI(value=RDF.type), + object=IRI(value=self.or_klasses[0]) + ) ) else: self._st_list.append( @@ -190,18 +196,39 @@ def to_grammar(self, focus_node: Union[Var, IRI]): ) - - class PropertyPath(SHACL): uri: URIRef -class Path(SHACL): + +class Path(PropertyPath): focus_uri: Union[IRI, Var] path_uri: URIRef def to_grammar(self): return SimplifiedTriple(self.focus_uri, IRI(value=self.uri), Var(value="ValidationNode")) + +class SequencePath(SHACL): + uri: URIRef + paths: List[PropertyPath] + + def from_graph(self, graph): + pass + + def to_grammar(self): + pass + + class InversePath(SHACL): focus_uri: Union[IRI, Var] - inverse_uri: URIRef \ No newline at end of file + inverse_path: URIRef + validation_node: Var + + +class Or(SHACL): + paths: List[SHACL] + pass + + +class And(SHACL): + pass \ No newline at end of file diff --git a/tests/data/nodeshapes/endpoints.ttl b/tests/data/nodeshapes/endpoints.ttl new file mode 100644 index 00000000..0d67f9d4 --- /dev/null +++ b/tests/data/nodeshapes/endpoints.ttl @@ -0,0 +1,97 @@ +@prefix sh: . +@prefix dcterms: . +@prefix dcat: . +@prefix geo: . +@prefix ex: . +@prefix skos: . +@prefix rdfs: . + +ex:TopLevelCatalogs + a sh:NodeShape ; + sh:targetClass dcat:Catalog ; + sh:targetSubjectsOf dcterms:hasPart ; + sh:property [ + sh:path dcterms:hasPart ; + sh:or ( + [ sh:class dcat:Catalog ] + [ sh:class geo:FeatureCollection ] + [ sh:class skos:ConceptScheme ] + [ sh:class skos:Collection ] + ) ; + ] . + +ex:FeatureCollectionListing + a sh:NodeShape ; + sh:targetClass geo:FeatureCollection ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:ConceptSchemeListing + a sh:NodeShape ; + sh:targetClass skos:ConceptScheme ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:CollectionListing + a sh:NodeShape ; + sh:targetClass skos:Collection ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:LowerLevelCatalogListing + a sh:NodeShape ; + sh:targetClass dcat:Catalog ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] . + +ex:FeatureListing + a sh:NodeShape ; + sh:targetClass geo:Feature ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class geo:FeatureCollection ; + ] , [ + sh:path ( [sh:inversePath rdfs:member ] [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . + +ex:ConceptSchemeConceptListing + a sh:NodeShape ; + sh:targetClass skos:Concept ; + sh:property [ + sh:path skos:inScheme ; + sh:class skos:ConceptScheme ; + ] , [ + sh:path ( skos:inScheme [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . + +ex:CollectionConceptListing + a sh:NodeShape ; + sh:targetClass skos:Concept ; + sh:property [ + sh:path skos:inScheme ; + sh:class skos:Collection ; + ] , [ + sh:path ( [ sh:inversePath skos:member ] [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . + +ex:ResourceListing + a sh:NodeShape ; + sh:targetClass dcat:Resource ; + sh:property [ + sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:class dcat:Catalog ; + ] , [ + sh:path ( [ sh:inversePath dcterms:hasPart ] [ sh:inversePath dcterms:hasPart ] ); + sh:class dcat:Catalog ; + ] . \ No newline at end of file diff --git a/tests/test_shacl_parsing.py b/tests/test_shacl_parsing.py new file mode 100755 index 00000000..36c0e6f9 --- /dev/null +++ b/tests/test_shacl_parsing.py @@ -0,0 +1,37 @@ +from temp.shacl_nodeshapes2sparql import NodeShape, PropertyShape +from rdflib import Graph, URIRef +import pytest + +endpoints_graph = Graph().parse("tests/data/nodeshapes/endpoints.ttl", format="turtle") + + +@pytest.fixture +def property_shape(): + return endpoints_graph.value( + subject=URIRef("http://example.org/ns#FeatureCollectionListing"), + predicate=URIRef("http://www.w3.org/ns/shacl#property"), + ) + + +@pytest.mark.parametrize("nodeshape_uri", + [ + "http://example.org/ns#FeatureCollectionListing" + ]) +def test_nodeshape_parsing(nodeshape_uri): + ns = NodeShape(uri=URIRef(nodeshape_uri)) + ns.from_shacl_graph(endpoints_graph) + assert ns.targetClass == [URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection")] + assert len(ns.propertyShapes) == 1 + + +def test_propertyshape_parsing(property_shape): + ps = PropertyShape(uri=property_shape) + ps.from_graph(graph=endpoints_graph) + print('') + + +def test_propertyshape_create_grammar(property_shape): + ps = PropertyShape(uri=property_shape) + ps.from_graph(graph=endpoints_graph) + ps.to_grammar() + assert True \ No newline at end of file From 43a171f08c15fd7adf1f02e770a8a62761c994c0 Mon Sep 17 00:00:00 2001 From: david Date: Thu, 8 Feb 2024 00:06:04 +1000 Subject: [PATCH 10/25] working main endpoints --- prez/config.py | 3 +- prez/models/object_item.py | 59 ---- prez/models/profiles_listings.py | 20 -- .../endpoints/endpoint_metadata.ttl | 34 +++ ...ttl => endpoint_node_selection_shapes.ttl} | 82 ++--- ...cords.ttl => extended_ogc_records.ttl.old} | 0 .../profiles/ogc_records_profile.ttl | 4 +- .../search_methods/search_default.ttl | 43 --- .../search_methods/search_exact.ttl | 16 - .../search_methods/search_readme.md | 67 ---- .../search_methods/search_skos_preflabel.ttl | 35 --- .../search_methods/search_skos_weighted.ttl | 63 ---- prez/routers/ogc_router.py | 68 ++--- prez/routers/ogc_spaceprez.py.old | 181 ----------- prez/routers/profiles.py | 16 +- prez/routers/search.py | 4 +- prez/services/link_generation.py | 225 ++++++-------- prez/services/listings.py | 182 ++++++++--- prez/services/objects.py | 49 ++- prez/sparql/count_query.py | 75 +++++ prez/sparql/methods.py | 2 +- prez/sparql/objects_listings.py | 8 +- prez/sparql/resource.py | 9 - prez/sparql/search_query.py | 58 +--- temp/cql2sparql.py | 4 +- temp/grammar/__init__.py | 74 +++++ temp/{ => grammar}/grammar.py | 9 +- temp/shacl2sparql.py | 289 ++++++------------ temp/shacl_node_selection.py | 238 +++++++++++++++ temp/shacl_nodeshapes2sparql.py | 234 -------------- tests/data/nodeshapes/endpoints.ttl | 97 ------ tests/test_shacl_parsing.py | 50 ++- 32 files changed, 889 insertions(+), 1409 deletions(-) delete mode 100755 prez/models/object_item.py delete mode 100755 prez/models/profiles_listings.py create mode 100644 prez/reference_data/endpoints/endpoint_metadata.ttl rename prez/reference_data/endpoints/{new.ttl => endpoint_node_selection_shapes.ttl} (61%) rename prez/reference_data/endpoints/{extended_ogc_records.ttl => extended_ogc_records.ttl.old} (100%) delete mode 100755 prez/reference_data/search_methods/search_default.ttl delete mode 100755 prez/reference_data/search_methods/search_exact.ttl delete mode 100755 prez/reference_data/search_methods/search_readme.md delete mode 100755 prez/reference_data/search_methods/search_skos_preflabel.ttl delete mode 100755 prez/reference_data/search_methods/search_skos_weighted.ttl delete mode 100755 prez/routers/ogc_spaceprez.py.old create mode 100755 prez/sparql/count_query.py delete mode 100755 prez/sparql/resource.py create mode 100644 temp/grammar/__init__.py rename temp/{ => grammar}/grammar.py (99%) create mode 100644 temp/shacl_node_selection.py delete mode 100644 temp/shacl_nodeshapes2sparql.py delete mode 100644 tests/data/nodeshapes/endpoints.ttl diff --git a/prez/config.py b/prez/config.py index 097e1dbd..86b10297 100755 --- a/prez/config.py +++ b/prez/config.py @@ -1,6 +1,6 @@ from os import environ from pathlib import Path -from typing import Optional, List +from typing import Optional, List, Tuple import toml from pydantic import root_validator @@ -62,6 +62,7 @@ class Settings(BaseSettings): disable_prefix_generation: bool = False default_language: str = "en" local_rdf_dir: str = "rdf" + endpoint_structure: Optional[Tuple[str, ...]] = ("catalogs", "collections", "items") # @root_validator() # def check_endpoint_enabled(cls, values): diff --git a/prez/models/object_item.py b/prez/models/object_item.py deleted file mode 100755 index ff348ae6..00000000 --- a/prez/models/object_item.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Optional, FrozenSet, Tuple -from typing import Set - -from pydantic import BaseModel, root_validator -from rdflib import URIRef, PROF - -from prez.cache import endpoints_graph_cache -from prez.models.model_exceptions import ClassNotFoundException -from prez.reference_data.prez_ns import PREZ, ONT -from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.model_methods import get_classes - - -class ObjectItem(BaseModel): - class Config: - arbitrary_types_allowed = True - - uri: Optional[URIRef] = None - classes: Optional[FrozenSet[URIRef]] = frozenset([PROF.Profile]) - selected_class: Optional[URIRef] = None - profile: Optional[URIRef] = None - top_level_listing: Optional[bool] = False - - def __hash__(self): - return hash(self.uri) - - # @root_validator - # def populate(cls, values): - # values["top_level_listing"] = False # this class is for objects, not listings. - # uri_str = values.get("uri") - # endpoint_uri_str = values.get("endpoint_uri") - # if endpoint_uri_str: - # endpoint_uri = URIRef(endpoint_uri_str) - # values["classes"] = frozenset( - # [ - # klass - # for klass in endpoints_graph_cache.objects( - # endpoint_uri, ONT.deliversClasses, None - # ) - # ] - # ) - # values["base_class"] = endpoints_graph_cache.value( - # endpoint_uri, ONT.baseClass - # ) - # else: - # try: - # values["classes"] = frozenset( - # tup[1] for tup in get_classes([values["uri"]]) - # ) - # except ClassNotFoundException: - # # TODO return a generic DESCRIBE on the object - we can't use any of prez's profiles/endpoints to render - # # information about the object, but we can provide any RDF we have for it. - # pass - # if uri_str: - # values["uri"] = URIRef(uri_str) - # else: - # values["uri"] = get_uri_for_curie_id(values["uri"]) - # - # return values diff --git a/prez/models/profiles_listings.py b/prez/models/profiles_listings.py deleted file mode 100755 index a5d5750b..00000000 --- a/prez/models/profiles_listings.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Optional, FrozenSet - -from pydantic import BaseModel -from rdflib import Namespace -from rdflib.namespace import URIRef - -PREZ = Namespace("https://prez.dev/") - - -class ProfilesMembers(BaseModel): - class Config: - arbitrary_types_allowed = True - - url_path: str - uri: Optional[URIRef] = None - base_class: Optional[URIRef] - classes: Optional[FrozenSet[URIRef]] = frozenset([PREZ.ProfilesList]) - selected_class: Optional[URIRef] = None - link_constructor: Optional[str] - top_level_listing: Optional[bool] = True diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl new file mode 100644 index 00000000..3eebae3c --- /dev/null +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -0,0 +1,34 @@ +@prefix ex: . +@prefix ogce: . +@prefix ont: . +@prefix prez: . + +ogce:catalog-listing + a ont:ListingEndpoint ; + ont:relevantShapes ex:TopLevelCatalogs ; +. + +ogce:catalog-object + a ont:ObjectEndpoint ; + ont:relevantShapes ex:TopLevelCatalogs ; +. + +ogce:collection-listing + a ont:ListingEndpoint ; + ont:relevantShapes ex:Collections ; +. + +ogce:collection-object + a ont:ObjectEndpoint ; + ont:relevantShapes ex:Collections ; +. + +ogce:item-listing + a ont:ListingEndpoint ; + ont:relevantShapes ex:Feature , ex:ConceptSchemeConcept , ex:CollectionConcept , ex:Resource ; +. + +ogce:item-object + a ont:ObjectEndpoint ; + ont:relevantShapes ex:Feature , ex:ConceptSchemeConcept , ex:CollectionConcept , ex:Resource ; +. \ No newline at end of file diff --git a/prez/reference_data/endpoints/new.ttl b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl similarity index 61% rename from prez/reference_data/endpoints/new.ttl rename to prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl index 0d67f9d4..cf9abe11 100644 --- a/prez/reference_data/endpoints/new.ttl +++ b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl @@ -1,15 +1,17 @@ -@prefix sh: . -@prefix dcterms: . +@prefix ont: . @prefix dcat: . -@prefix geo: . +@prefix dcterms: . @prefix ex: . -@prefix skos: . +@prefix geo: . +@prefix prez: . @prefix rdfs: . +@prefix sh: . +@prefix skos: . ex:TopLevelCatalogs a sh:NodeShape ; + ont:hierarchyLevel 1 ; sh:targetClass dcat:Catalog ; - sh:targetSubjectsOf dcterms:hasPart ; sh:property [ sh:path dcterms:hasPart ; sh:or ( @@ -20,51 +22,57 @@ ex:TopLevelCatalogs ) ; ] . -ex:FeatureCollectionListing - a sh:NodeShape ; - sh:targetClass geo:FeatureCollection ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] . - -ex:ConceptSchemeListing +ex:Collections a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; + ont:hierarchyLevel 2 ; + sh:targetClass geo:FeatureCollection , skos:ConceptScheme , skos:Collection , dcat:Catalog ; sh:property [ sh:path [ sh:inversePath dcterms:hasPart ] ; sh:class dcat:Catalog ; ] . -ex:CollectionListing - a sh:NodeShape ; - sh:targetClass skos:Collection ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] . - -ex:LowerLevelCatalogListing - a sh:NodeShape ; - sh:targetClass dcat:Catalog ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] . +#ex:ConceptScheme +# a sh:NodeShape ; +# ont:hierarchyLevel 2 ; +# sh:targetClass skos:ConceptScheme ; +# sh:property [ +# sh:path [ sh:inversePath dcterms:hasPart ] ; +# sh:class dcat:Catalog ; +# ] . +# +#ex:Collection +# a sh:NodeShape ; +# ont:hierarchyLevel 2 ; +# sh:targetClass skos:Collection ; +# sh:property [ +# sh:path [ sh:inversePath dcterms:hasPart ] ; +# sh:class dcat:Catalog ; +# ] . +# +#ex:LowerLevelCatalog +# a sh:NodeShape ; +# ont:hierarchyLevel 2 ; +# sh:targetClass dcat:Catalog ; +# sh:property [ +# sh:path [ sh:inversePath dcterms:hasPart ] ; +# sh:class dcat:Catalog ; +# ] . -ex:FeatureListing +ex:Feature a sh:NodeShape ; + ont:hierarchyLevel 3 ; sh:targetClass geo:Feature ; sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; + sh:path [ sh:inversePath rdfs:member ] ; sh:class geo:FeatureCollection ; ] , [ sh:path ( [sh:inversePath rdfs:member ] [ sh:inversePath dcterms:hasPart ] ); sh:class dcat:Catalog ; ] . -ex:ConceptSchemeConceptListing +ex:ConceptSchemeConcept a sh:NodeShape ; + ont:hierarchyLevel 3 ; sh:targetClass skos:Concept ; sh:property [ sh:path skos:inScheme ; @@ -74,19 +82,21 @@ ex:ConceptSchemeConceptListing sh:class dcat:Catalog ; ] . -ex:CollectionConceptListing +ex:CollectionConcept a sh:NodeShape ; + ont:hierarchyLevel 3 ; sh:targetClass skos:Concept ; sh:property [ - sh:path skos:inScheme ; + sh:path [ sh:inversePath skos:member ] ; sh:class skos:Collection ; ] , [ sh:path ( [ sh:inversePath skos:member ] [ sh:inversePath dcterms:hasPart ] ); sh:class dcat:Catalog ; ] . -ex:ResourceListing +ex:Resource a sh:NodeShape ; + ont:hierarchyLevel 3 ; sh:targetClass dcat:Resource ; sh:property [ sh:path [ sh:inversePath dcterms:hasPart ] ; diff --git a/prez/reference_data/endpoints/extended_ogc_records.ttl b/prez/reference_data/endpoints/extended_ogc_records.ttl.old similarity index 100% rename from prez/reference_data/endpoints/extended_ogc_records.ttl rename to prez/reference_data/endpoints/extended_ogc_records.ttl.old diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index a81997c7..5469b2d1 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -48,7 +48,9 @@ prez:OGCListingProfile "text/anot+turtle" , "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept ; + altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept , + dcat:Resource ; + sh:property [ sh:path rdf:type ] . prez:OGCSchemesListProfile diff --git a/prez/reference_data/search_methods/search_default.ttl b/prez/reference_data/search_methods/search_default.ttl deleted file mode 100755 index 82ffc515..00000000 --- a/prez/reference_data/search_methods/search_default.ttl +++ /dev/null @@ -1,43 +0,0 @@ -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . - -prez:default - a prez:SearchMethod ; - dcterms:identifier "default" ; - rdfs:label "Prez Default Match Search"@en ; - rdfs:comment "A default method to search for objects in Prez" ; - rdf:value """ - SELECT ?search_result_uri ?predicate ?match ?weight (URI(CONCAT("urn:hash:", SHA256(CONCAT(STR(?search_result_uri), STR(?predicate), STR(?match), STR(?weight))))) AS ?hashID) - WHERE { - SELECT ?search_result_uri ?predicate ?match (SUM(?w) AS ?weight) - WHERE - { - ?search_result_uri ?predicate ?match . - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - VALUES ?predicate { $PREDICATES } - { - ?search_result_uri ?predicate ?match . - BIND (100 AS ?w) - FILTER (LCASE(?match) = "$TERM") - } - UNION - { - ?search_result_uri ?predicate ?match . - BIND (20 AS ?w) - FILTER (REGEX(?match, "^$TERM", "i")) - } - UNION - { - ?search_result_uri ?predicate ?match . - BIND (10 AS ?w) - FILTER (REGEX(?match, "$TERM", "i")) - } - } - GROUP BY ?search_result_uri ?predicate ?match - ORDER BY DESC(?weight) - LIMIT $LIMIT OFFSET $OFFSET - } - """ . diff --git a/prez/reference_data/search_methods/search_exact.ttl b/prez/reference_data/search_methods/search_exact.ttl deleted file mode 100755 index bc2c7f4e..00000000 --- a/prez/reference_data/search_methods/search_exact.ttl +++ /dev/null @@ -1,16 +0,0 @@ -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . - -prez:exactMatch a prez:SearchMethod ; - dcterms:identifier "exactMatch" ; - rdfs:label "Exact Object Match Search"@en ; - rdf:value """ - SELECT DISTINCT ?search_result_uri ?predicate ?match - WHERE { ?search_result_uri ?predicate ?match . - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - FILTER REGEX(?match, "^$TERM$$", "i") - } LIMIT $LIMIT - """. diff --git a/prez/reference_data/search_methods/search_readme.md b/prez/reference_data/search_methods/search_readme.md deleted file mode 100755 index bb6200c9..00000000 --- a/prez/reference_data/search_methods/search_readme.md +++ /dev/null @@ -1,67 +0,0 @@ -## General info - -Search results are returned according to profiles. As such, profiles can be used to determine which properties, -inbound/outbound links etc. are returned for a search result. - -## Adding search methods -Search methods can be added in two different ways to Prez: - -1. Adding turtle files to the `prez/reference_data/search_methods` directory in the `prez` project. Prez will load these -files on application startup. -2. Adding remote search methods to a graph in Prez's backend. Prez will load these search methods on application -startup, by looking within the `prez:systemGraph` graph, for instances of `prez:SearchMethod`. -TODO: provide an update endpoint for adding search methods to the search methods dictionary - allowing instance data to -be kept separate from system methods. - -## Defining search methods -- Search methods must be RDF. They must be in turtle if added to the `prez/reference_data/search_methods` directory. -- Declare a class of `prez:SearchMethod` -- Have the following predicates, with objects as described: - - `dcterms:identifier` - a unique identifier for the search method used in the query string argument. - - `rdfs:label` - a name for the search method. - - `rdf:value` - a SPARQL SELECT query in the form described below: - -### SPARQL SELECT query format -Search SPARQL queries MUST: - -- **use fully qualified URIs (i.e. no namespace prefixes are allowed). This is because simple string concatenation is used to insert the search query as a subquery in a query which gathers additional context for search results.** -- return search results bound to the `?search_result_uri` variable. This is because the search method is used as a -sub-select in an object listing query which expects this variable. -- accept a search term using `$TERM` in the query. This will be substituted for the search term provided by the user. - - -Search SPARQL queries SHOULD: - -- include a LIMIT clause by including `$LIMIT` to limit the number of results returned. Prez will default this limit to -20 results if a LIMIT is not specified by users. - -Search SPARQL queries MAY: - -- return search results with the following variables bound: - - `?weight` - a float value representing the weight of the search result; with higher values being more relevant - - `?predicate` - the predicate (of the triple) the search result was found on. (Full text search can search across multiple predicates in - a group. This variable can be used to distinguish which predicate the search result was found on.) - - `?match` The (full) literal value of the object for the search result. - -Example query snippet: - -```sparql -SELECT DISTINCT ?search_result_uri ?predicate ?match - WHERE { ?search_result_uri ?predicate ?match . - FILTER REGEX(?match, "^$TERM$$", "i") - } - } LIMIT $LIMIT -``` - -## Search Result Responses -Search results are of the form: - -```turtle -PREFIX prez: - a prez:SearchResult ; - prez:weight ; - prez:predicate ; - prez:match ; - ; - ... -``` diff --git a/prez/reference_data/search_methods/search_skos_preflabel.ttl b/prez/reference_data/search_methods/search_skos_preflabel.ttl deleted file mode 100755 index ef1e293a..00000000 --- a/prez/reference_data/search_methods/search_skos_preflabel.ttl +++ /dev/null @@ -1,35 +0,0 @@ -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . - -prez:skosWeighted a prez:SearchMethod ; - dcterms:identifier "skosPrefLabel" ; - rdfs:label "SKOS PrefLabel Search"@en ; - rdf:value """ - {{ - SELECT DISTINCT ?search_result_uri ?match (SUM(?w) AS ?weight) - WHERE {{ - {{ # exact match on a prefLabel always wins - ?search_result_uri a ; - ?match ; - ||^ ?cs . - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - BIND (50 AS ?w) - FILTER REGEX(?match, "^$TERM$$", "i") - }} - UNION - {{ - ?search_result_uri a ; - ?match ; - ||^ ?cs . - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - BIND (10 AS ?w) - FILTER REGEX(?match, "$TERM", "i") - }} - }} - GROUP BY ?cs ?search_result_uri ?match - }} - """. diff --git a/prez/reference_data/search_methods/search_skos_weighted.ttl b/prez/reference_data/search_methods/search_skos_weighted.ttl deleted file mode 100755 index c1241a91..00000000 --- a/prez/reference_data/search_methods/search_skos_weighted.ttl +++ /dev/null @@ -1,63 +0,0 @@ -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . - -prez:skosWeighted a prez:SearchMethod ; - dcterms:identifier "skosWeighted" ; - rdfs:label "SKOS Weighted Search"@en ; - rdf:value """ - {{ - SELECT DISTINCT ?search_result_uri ?match (SUM(?w) AS ?weight) - WHERE {{ - {{ # exact match on a prefLabel always wins - ?search_result_uri a ; - ?pl . - BIND (50 AS ?w) - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - FILTER REGEX(?pl, "^$TERM$$", "i") - }} - UNION - {{ - ?search_result_uri a ; - ?pl . - BIND (10 AS ?w) - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - FILTER REGEX(?pl, "$TERM", "i") - }} - UNION - {{ - ?search_result_uri a ; - ?al ; - ?pl . - BIND (5 AS ?w) - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - FILTER REGEX(?al, "$TERM", "i") - }} - UNION - {{ - ?search_result_uri a ; - ?hl ; - ?pl . - BIND (5 AS ?w) - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - FILTER REGEX(?hl, "$TERM", "i") - }} - UNION - {{ - ?search_result_uri a ; - ?d ; - ?pl . - BIND (1 AS ?w) - $FOCUS_TO_FILTER - $FILTER_TO_FOCUS - FILTER REGEX(?d, "$TERM", "i") - }} - }} - GROUP BY ?search_result_uri ?pl ?match - }} - """ . diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 4a449933..5c52a366 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -10,25 +10,17 @@ from prez.services.objects import object_function from prez.sparql.methods import Repo from prez.reference_data.prez_ns import PREZ +from temp.grammar import IRI router = APIRouter(tags=["ogccatprez"]) OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) -ogc_endpoints = { - "top-level-catalog-listing": OGCE["top-level-catalog-listing"], - "top-level-catalog-object": OGCE["top-level-catalog-object"], - "lower-level-catalog-listing": OGCE["lower-level-catalog-listing"], - "lower-level-catalog-object": OGCE["lower-level-catalog-object"], - "resource-listing": OGCE["resource-listing"], - "resource-object": OGCE["resource-object"], -} - @router.get( "/catalogs", - summary="List Top Level Catalogs", - name=ogc_endpoints["top-level-catalog-listing"], + summary="Catalog Listing", + name=OGCE["catalog-listing"], ) async def catalog_list( request: Request, @@ -45,18 +37,19 @@ async def catalog_list( repo, system_repo, endpoint_uri, - page, - per_page, + hierarchy_level=1, + page=page, + per_page=per_page, search_term=search_term, ) @router.get( "/catalogs/{catalogId}/collections", - summary="List Lower Level Catalogs", - name=ogc_endpoints["lower-level-catalog-listing"], + summary="Collection Listing", + name=OGCE["collection-listing"], ) -async def vocab_list( +async def collection_listing( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, @@ -66,26 +59,28 @@ async def vocab_list( ): search_term = request.query_params.get("q") - parent_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + path_node_1_uri = get_uri_for_curie_id(request.path_params["catalogId"]) endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( request, repo, system_repo, endpoint_uri, - page, - per_page, - parent_uri, + hierarchy_level=2, + path_nodes={"path_node_1": IRI(value=path_node_1_uri)}, + page=page, + per_page=per_page, + parent_uri=path_node_1_uri, search_term=search_term, ) @router.get( "/catalogs/{catalogId}/collections/{collectionId}/items", - summary="List Resources", - name=ogc_endpoints["resource-listing"], + summary="Item Listing", + name=OGCE["item-listing"], ) -async def concept_list( +async def item_listing( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, @@ -94,24 +89,27 @@ async def concept_list( system_repo: Repo = Depends(get_system_repo), ): search_term = request.query_params.get("q") - parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + path_node_1_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + path_node_2_uri = get_uri_for_curie_id(request.path_params["catalogId"]) endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( request, repo, system_repo, endpoint_uri, - page, - per_page, - parent_uri, + hierarchy_level=3, + path_nodes={"path_node_1": IRI(value=path_node_1_uri), "path_node_2": IRI(value=path_node_2_uri)}, + page=page, + per_page=per_page, + parent_uri=path_node_1_uri, search_term=search_term, ) @router.get( "/catalogs/{catalogId}", - summary="Top Level Catalog Object", - name=ogc_endpoints["top-level-catalog-object"], + summary="Catalog Object", + name=OGCE["catalog-object"], ) async def catalog_object( request: Request, @@ -128,10 +126,10 @@ async def catalog_object( @router.get( "/catalogs/{catalogId}/collections/{collectionId}", - summary="Lower Level Catalog Object", - name=ogc_endpoints["lower-level-catalog-object"], + summary="Collection Object", + name=OGCE["collection-object"], ) -async def catalog_object( +async def collection_object( request: Request, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), @@ -146,10 +144,10 @@ async def catalog_object( @router.get( "/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", - summary="Resource Object", - name=ogc_endpoints["resource-object"], + summary="Item Object", + name=OGCE["item-object"], ) -async def catalog_object( +async def item_object( request: Request, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), diff --git a/prez/routers/ogc_spaceprez.py.old b/prez/routers/ogc_spaceprez.py.old deleted file mode 100755 index 929b5bda..00000000 --- a/prez/routers/ogc_spaceprez.py.old +++ /dev/null @@ -1,181 +0,0 @@ -from typing import Optional - -from fastapi import APIRouter, Request, Depends -from rdflib import Namespace -from fastapi.responses import PlainTextResponse - -from prez.dependencies import get_repo, get_system_repo -from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function -from prez.services.objects import object_function -from prez.sparql.methods import Repo - -router = APIRouter(tags=["SpacePrez"]) - -SP_EP = Namespace("https://prez.dev/endpoint/spaceprez/") - - -@router.get( - "/s", - summary="SpacePrez Home", -) -async def spaceprez_home(): - return PlainTextResponse("SpacePrez Home") - - -@router.get( - "/s/catalogs", - summary="List Datasets", - name=SP_EP["dataset-listing"], -) -async def list_datasets( - request: Request, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), - page: Optional[int] = 1, - per_page: Optional[int] = 20, - search_term: Optional[str] = None, -): - search_term = request.query_params.get("q") - endpoint_uri = SP_EP["dataset-listing"] - return await listing_function( - request=request, - repo=repo, - system_repo=system_repo, - endpoint_uri=endpoint_uri, - page=page, - per_page=per_page, - search_term=search_term, - ) - - -@router.get( - "/s/catalogs/{dataset_curie}/collections", - summary="List Feature Collections", - name=SP_EP["feature-collection-listing"], -) -async def list_feature_collections( - request: Request, - dataset_curie: str, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), - page: Optional[int] = 1, - per_page: Optional[int] = 20, - search_term: Optional[str] = None, -): - search_term = request.query_params.get("q") - endpoint_uri = SP_EP["feature-collection-listing"] - dataset_uri = get_uri_for_curie_id(dataset_curie) - return await listing_function( - request=request, - repo=repo, - system_repo=system_repo, - endpoint_uri=endpoint_uri, - page=page, - per_page=per_page, - parent_uri=dataset_uri, - search_term=search_term, - ) - - -@router.get( - "/s/catalogs/{dataset_curie}/collections/{collection_curie}/items", - summary="List Features", - name=SP_EP["feature-listing"], -) -async def list_features( - request: Request, - dataset_curie: str, - collection_curie: str, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), - page: Optional[int] = 1, - per_page: Optional[int] = 20, - search_term: Optional[str] = None, -): - search_term = request.query_params.get("q") - collection_uri = get_uri_for_curie_id(collection_curie) - endpoint_uri = SP_EP["feature-listing"] - return await listing_function( - request=request, - repo=repo, - system_repo=system_repo, - endpoint_uri=endpoint_uri, - page=page, - per_page=per_page, - parent_uri=collection_uri, - search_term=search_term, - ) - - -@router.get( - "/s/catalogs/{dataset_curie}", summary="Get Dataset", name=SP_EP["dataset-object"] -) -async def dataset_item( - request: Request, - dataset_curie: str, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), -): - request_url = request.scope["path"] - endpoint_uri = SP_EP["dataset-object"] - dataset_uri = get_uri_for_curie_id(dataset_curie) - return await object_function( - request=request, - endpoint_uri=endpoint_uri, - uri=dataset_uri, - request_url=request_url, - repo=repo, - system_repo=system_repo, - ) - - -@router.get( - "/s/catalogs/{dataset_curie}/collections/{collection_curie}", - summary="Get Feature Collection", - name=SP_EP["feature-collection-object"], -) -async def feature_collection_item( - request: Request, - dataset_curie: str, - collection_curie: str, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), -): - request_url = request.scope["path"] - endpoint_uri = SP_EP["feature-collection-object"] - collection_uri = get_uri_for_curie_id(collection_curie) - return await object_function( - request=request, - endpoint_uri=endpoint_uri, - uri=collection_uri, - request_url=request_url, - repo=repo, - system_repo=system_repo, - ) - - -@router.get( - "/s/catalogs/{dataset_curie}/collections/{collection_curie}/items/{feature_curie}", - summary="Get Feature", - name="https://prez.dev/endpoint/spaceprez/feature", -) -async def feature_item( - request: Request, - dataset_curie: str, - collection_curie: str, - feature_curie: str, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), -): - request_url = request.scope["path"] - endpoint_uri = SP_EP["feature-object"] - feature_uri = get_uri_for_curie_id(feature_curie) - return await object_function( - request=request, - endpoint_uri=endpoint_uri, - uri=feature_uri, - request_url=request_url, - repo=repo, - system_repo=system_repo, - ) diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index 95b42cd9..c83a29cc 100755 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -14,21 +14,6 @@ summary="List Profiles", name="https://prez.dev/endpoint/system/profiles-listing", ) -@router.get( - "/s/profiles", - summary="SpacePrez Profiles", - name="https://prez.dev/endpoint/system/spaceprez-profiles-listing", -) -@router.get( - "/v/profiles", - summary="VocPrez Profiles", - name="https://prez.dev/endpoint/system/vocprez-profiles-listing", -) -@router.get( - "/c/profiles", - summary="CatPrez Profiles", - name="https://prez.dev/endpoint/system/catprez-profiles-listing", -) async def profiles( request: Request, page: int = 1, @@ -41,6 +26,7 @@ async def profiles( repo=repo, system_repo=repo, endpoint_uri=endpoint_uri, + hierarchy_level=1, page=page, per_page=per_page, ) diff --git a/prez/routers/search.py b/prez/routers/search.py index 6b094fee..a00b20ab 100755 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -8,7 +8,7 @@ from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph -from prez.services.link_generation import _add_prez_links +from prez.services.link_generation import add_prez_links from prez.sparql.methods import Repo from prez.sparql.search_query import SearchQuery @@ -47,7 +47,7 @@ async def search( return PlainTextResponse(query, media_type="application/sparql-query") if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(graph, repo, system_repo) + await add_prez_links(graph, repo) return await return_from_graph( graph, diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index ffe8e5ad..a72f12d3 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -1,51 +1,23 @@ import logging -import time from string import Template -from typing import FrozenSet -from fastapi import Depends from rdflib import Graph, Literal, URIRef, DCTERMS, BNode from rdflib.namespace import SH from prez.cache import endpoints_graph_cache, links_ids_graph_cache -from prez.dependencies import get_system_repo +from prez.config import settings +from prez.reference_data.prez_ns import ONT from prez.reference_data.prez_ns import PREZ from prez.services.curie_functions import get_curie_id_for_uri from prez.services.model_methods import get_classes from prez.sparql.methods import Repo -from prez.sparql.objects_listings import ( - get_endpoint_template_queries, - generate_relationship_query, -) -from temp.shacl2sparql import ONT +from temp.grammar import * +from temp.shacl_node_selection import NodeShape log = logging.getLogger(__name__) -async def _add_prez_link_to_collection_page( - item_graph: Graph, item_uri: URIRef, request_url: str, endpoint_uri: URIRef -): - """ - 1. get the request's URL; this will be the URL of the current object page - 2. look up the endpoint that hasParentEndpoint the object endpoint in the endpoints graph cache - 3. take the fragment (suffix) of the endpoint template for the child endpoint identified in step 2 - 4. append the fragment to the URL from step 1 - """ - child_endpoint = endpoints_graph_cache.value( - predicate=ONT.parentEndpoint, object=endpoint_uri - ) - child_endpoint_template = endpoints_graph_cache.value( - subject=child_endpoint, predicate=ONT.endpointTemplate - ) - if child_endpoint_template: - last_part_of_url = child_endpoint_template.split("/")[-1] - collections_url = f"{request_url}/{last_part_of_url}" - bnode = BNode() - item_graph.add((item_uri, PREZ.members, bnode)) - item_graph.add((bnode, PREZ.link, Literal(collections_url))) - - -async def _add_prez_links(graph: Graph, repo: Repo, system_repo: Repo): +async def add_prez_links(graph: Graph, repo: Repo): # get all URIRefs - if Prez can find a class and endpoint for them, an internal link will be generated. uris = [uri for uri in graph.all_nodes() if isinstance(uri, URIRef)] uri_to_klasses = {} @@ -53,117 +25,96 @@ async def _add_prez_links(graph: Graph, repo: Repo, system_repo: Repo): uri_to_klasses[uri] = await get_classes(uri, repo) for uri, klasses in uri_to_klasses.items(): - await _new_link_generation(uri, repo, klasses, system_repo) - # await _create_internal_links_graph(uri, graph, repo, klasses, system_repo) - -async def _new_link_generation(uri, repo: Repo, klasses, system_repo): - # get the endpoints that can deliver the class - # many node shapes to one endpoint; multiple node shapes can point to the endpoint - query = f"""SELECT ?nodeShape {{ ?nodeShape a {SH.NodeShape} ; - {SH.targetClass} ?klasses . - VALUES ?klasses {" ".join(["<" + klass.n3() + ">" for klass in klasses])} - }}""" - {" ".join(["<" + klass.n3() + ">" for klass in klasses])} - system_repo.send_queries() - # if there's a link generation query for the endpoint, run it + if klasses: # need class to know which endpoints can deliver the class + await _link_generation(uri, repo, klasses, graph) - _, tabular_results = await repo.send_queries([], [(None, query)]) - -async def _create_internal_links_graph(uri, graph, repo: Repo, klasses, system_repo): +async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph): + # check the cache quads = list( links_ids_graph_cache.quads((None, None, None, uri)) - ) # context required as not all triples that relate to links or identifiers for a particular object have that object's URI as the subject + ) # context required as not all triples that relate to links or identifiers for a particular object have that + # object's URI as the subject if quads: for quad in quads: graph.add(quad[:3]) - else: - for klass in klasses: - endpoint_to_relations = await get_endpoint_info_for_classes( - frozenset([klass]), system_repo - ) - relationship_query = generate_relationship_query(uri, endpoint_to_relations) - if relationship_query: - _, tabular_results = await repo.send_queries( - [], [(uri, relationship_query)] + # get the endpoints that can deliver the class + # many node shapes to one endpoint; multiple node shapes can point to the endpoint + else: # generate links + available_nodeshapes = [] + available_nodeshape_triples = list(endpoints_graph_cache.triples_choices((None, SH.targetClass, list(klasses)))) + if available_nodeshape_triples: + for ns, _, _ in available_nodeshape_triples: + available_nodeshapes.append( + NodeShape( + uri=ns, + graph=endpoints_graph_cache, + focus_node=IRI(value=uri), + ) ) - for _, result in tabular_results: - quads = generate_system_links_object(result, uri) - for quad in quads: - graph.add(quad[:3]) # just add the triple not the quad - links_ids_graph_cache.add(quad) # add the quad to the cache - - -async def get_endpoint_info_for_classes( - classes: FrozenSet[URIRef], system_repo -) -> dict: - """ - Queries Prez's in memory reference data for endpoints to determine which endpoints are relevant for the classes an - object has, along with information about "parent" objects included in the URL path for the object. This information - is whether the relationship in RDF is expected to be from the parent to the child, or from the child to the parent, - and the predicate used for the relationship. - """ - endpoint_query = get_endpoint_template_queries(classes) - results = await system_repo.send_queries([], [(None, endpoint_query)]) - endpoint_to_relations = {} - if results[1][0][1] != [{}]: - for result in results[1][0][1]: - endpoint_template = result["endpoint_template"]["value"] - relation = result.get("relation_predicate") - if relation: - relation = URIRef(relation["value"]) - direction = result.get("relation_direction") - if direction: - direction = URIRef(direction["value"]) - if endpoint_template not in endpoint_to_relations: - endpoint_to_relations[endpoint_template] = [(relation, direction)] - else: - endpoint_to_relations[endpoint_template].append((relation, direction)) - return endpoint_to_relations - -def generate_system_links_object(relationship_results: list, object_uri: str): - """ - Generates system links for objects from the 'object' endpoint - relationship_results: a list of dictionaries, one per endpoint, each dictionary contains: - 1. an endpoint template with parameters denoted by `$` to be populated using python's string Template library - 2. the arguments to populate this endpoint template, as URIs. The get_curie_id_for_uri function is used to convert - these to curies. - """ - endpoints = [] - link_quads = [] - for endpoint_results in relationship_results: - endpoint_template = Template(endpoint_results["endpoint"]["value"]) - template_args = { - k: get_curie_id_for_uri(v["value"]) - for k, v in endpoint_results.items() - if k != "endpoint" - } | {"object": get_curie_id_for_uri(URIRef(object_uri))} - endpoints.append(endpoint_template.substitute(template_args)) - for endpoint in endpoints: - link_quads.append( - (URIRef(object_uri), PREZ["link"], Literal(endpoint), object_uri) - ) - for ep_result in relationship_results: - for k, v in ep_result.items(): - if k != "endpoint": - uri = URIRef(v["value"]) - curie = get_curie_id_for_uri(uri) - link_quads.append( - ( - uri, - DCTERMS.identifier, - Literal(curie, datatype=PREZ.identifier), - object_uri, - ) + link_queries = [] + for ns in available_nodeshapes: + link_queries.append( + ( + ns.uri, + "".join(SubSelect( + select_clause=SelectClause( + variables_or_all=ns.path_nodes.values()), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock( + triples=ns.triples_list + ), + graph_patterns_or_triples_blocks=ns.gpnt_list + ) + ) + ) + ).render()) ) - object_curie = get_curie_id_for_uri(object_uri) - link_quads.append( - ( - object_uri, - DCTERMS.identifier, - Literal(object_curie, datatype=PREZ.identifier), - object_uri, - ) - ) - return link_quads + ) + _, results = await repo.send_queries([], link_queries) + for result in results: + # if the list at tuple[1] > 0 then there's some result and a link should be generated. + # NB for top level links, there will be a result (the graph pattern matched) BUT the result will not form + # part of the link. e.g. ?path_node_1 will have result(s) but is not part of the link. + for solution in result[1]: + # get the hierarchy level + hl = endpoints_graph_cache.value(predicate=ONT.hierarchyLevel, subject=result[0]) + if not hl: + raise ValueError( + f"Endpoint {result[0]} has no hierarchy level") # TODO validate endpoint nodes with SHACL + components = list(settings.endpoint_structure[:int(hl)]) + variables = reversed(["focus_node"] + [f"path_node_{i}" for i in range(1, len(components))]) + item_link_template = Template( + "".join([f"/{comp}/${pattern}" for comp, pattern in zip(components, variables)])) + curie_for_uri = get_curie_id_for_uri(uri) + sol_values = {k: get_curie_id_for_uri(v["value"]) for k, v in solution.items()} + object_link = item_link_template.substitute(sol_values | {"focus_node": curie_for_uri}) + members_link = None + if len(components) < len(list(settings.endpoint_structure)): + members_link = object_link + "/" + settings.endpoint_structure[len(components)] + + quads = [] + quads.append( + (uri, PREZ["link"], Literal(object_link), uri) + ) + quads.append( + (uri, DCTERMS.identifier, Literal(curie_for_uri, datatype=PREZ.identifier), uri) + ) + if members_link: + existing_members_link = list( + links_ids_graph_cache.quads((uri, PREZ["members"], None, uri)) + ) + if not existing_members_link: + members_bn = BNode() + quads.append( + (uri, PREZ["members"], members_bn, uri) + ) + quads.append( + (members_bn, PREZ["link"], Literal(members_link), uri) + ) + for quad in quads: + graph.add(quad[:3]) + links_ids_graph_cache.add(quad) diff --git a/prez/services/listings.py b/prez/services/listings.py index ac8fd3fe..45b1f538 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -1,45 +1,64 @@ +import copy import logging -from typing import Optional +from typing import Optional, Dict from fastapi import Request from fastapi.responses import PlainTextResponse from rdflib import URIRef, Literal -from rdflib.namespace import PROF, RDF, SH +from rdflib.namespace import RDF, SH +from rdframe import CQLParser from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.config import settings from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype -from prez.reference_data.prez_ns import ONT, PREZ +from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph -from prez.services.link_generation import _add_prez_links +from prez.services.link_generation import add_prez_links +from prez.services.model_methods import get_classes +from prez.sparql.count_query import CountQuery from prez.sparql.methods import Repo -from prez.sparql.objects_listings import ( - temp_listing_count, -) from prez.sparql.search_query import SearchQuery -from rdframe.grammar import SubSelect -from rdframe import SHACLParser -from rdframe import CQLParser +from temp.grammar import * +# from rdframe.grammar import SubSelect +# from rdframe import PrezQueryConstructor +from temp.shacl2sparql import PrezQueryConstructor +from temp.shacl_node_selection import NodeShape log = logging.getLogger(__name__) async def listing_function( - request: Request, - repo: Repo, - system_repo: Repo, - endpoint_uri: URIRef, - page: int = 1, - per_page: int = 20, - parent_uri: Optional[URIRef] = None, - cql_parser: CQLParser = None, - search_term: Optional[str] = None, + request: Request, + repo: Repo, + system_repo: Repo, + endpoint_uri: URIRef, + hierarchy_level: int, + path_nodes: Dict[str, Var | IRI] = None, + page: int = 1, + per_page: int = 20, + parent_uri: Optional[URIRef] = None, + cql_parser: CQLParser = None, + search_term: Optional[str] = None, ): + """ + # determine the relevant node selection part of the query - from SHACL, CQL, Search + # determine the relevant profile for the query - from SHACL only + # gather relevant info for the node selection part of the query + # gather relevant info for the profile part of the query + # build the query + """ queries = [] - # class is from endpoint definition. - target_class = endpoints_graph_cache.value(endpoint_uri, SH.targetClass) + # determine possible SHACL node shapes for endpoint + node_selection_shape, target_classes = await determine_nodeshape( + endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo) + + if not path_nodes: + path_nodes = {} + ns = NodeShape(uri=node_selection_shape, graph=endpoints_graph_cache, path_nodes=path_nodes) - prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=[target_class], system_repo=system_repo, listing=True) + # determine the relevant profile + prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=target_classes, system_repo=system_repo, + listing=True) await populate_profile_and_mediatype(prof_and_mt_info, system_repo) selected_class, selected_profile = ( prof_and_mt_info.selected_class, @@ -48,35 +67,36 @@ async def listing_function( runtime_values = {} if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") - runtime_values["selectedClass"] = listing_class + runtime_values["selectedClass"] = prof_and_mt_info.selected_class runtime_values["limit"] = per_page runtime_values["offset"] = (page - 1) * per_page - runtime_values["parent_1"] = parent_uri - shacl_parser = SHACLParser( + query_constructor = PrezQueryConstructor( runtime_values, endpoints_graph_cache, profiles_graph_cache, - endpoint_uri, - selected_profile, + listing_or_object="listing", + endpoint_uri=endpoint_uri, + profile_uri=selected_profile, + node_selection_triples=ns.triples_list, + node_selection_gpnt=ns.gpnt_list, + target_class=target_classes ) if cql_parser: cql_parser.parse() cql_select_ggps = cql_parser.ggps_inner_select - shacl_parser.additional_ggps = cql_select_ggps + query_constructor.additional_ggps = cql_select_ggps - shacl_parser.generate_sparql() - main_query = shacl_parser.sparql + query_constructor.generate_sparql() + main_query = query_constructor.sparql if search_term: - subselect = find_instances(shacl_parser.main_where_ggps, SubSelect)[ - 0 - ] # assume there's only one subselect + subselect = query_constructor.inner_select search_query = SearchQuery( search_term=search_term, pred_vals=settings.label_predicates, @@ -94,30 +114,30 @@ async def listing_function( # add a count query if it's an annotated mediatype if "anot+" in prof_and_mt_info.mediatype and not search_term: - # pull the subselect out of the query string - subselect = find_instances(shacl_parser.main_where_ggps, SubSelect)[ - 0 - ] # assume there's only one subselect + subselect = copy.deepcopy(query_constructor.inner_select) subselect.solution_modifier = None # remove the limit and offset from the subselect so that we can get a count - if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): - count_class = PROF.Profile - else: - count_class = target_class - if count_class: # target_class may be unknown (None) for queries involving CQL - queries.append(temp_listing_count(subselect, count_class)) + count_query = CountQuery(subselect=subselect).render() + queries.append(count_query) + + # if prof_and_mt_info.profile == URIRef( + # "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + # ): + # count_class = PROF.Profile + # else: + # count_class = target_classes + # if count_class: # target_class may be unknown (None) for queries involving CQL + # queries.append(temp_listing_count(subselect, count_class)) if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): item_graph, _ = await system_repo.send_queries(queries, []) if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(item_graph, system_repo, system_repo) + await add_prez_links(item_graph, system_repo) else: item_graph, _ = await repo.send_queries(queries, []) if "anot+" in prof_and_mt_info.mediatype: - await _add_prez_links(item_graph, repo, system_repo) + await add_prez_links(item_graph, repo) # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated if search_term: count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) @@ -132,6 +152,70 @@ async def listing_function( ) +async def determine_nodeshape(endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo): + node_selection_shape = None + target_classes = [] + relevant_ns_query = f"""SELECT ?ns ?tc + WHERE {{ + {endpoint_uri.n3()} ?ns . + ?ns ?tc ; + {hierarchy_level} . + }}""" + _, r = await system_repo.send_queries([], [(parent_uri, relevant_ns_query)]) + tabular_results = r[0][1] + distinct_ns = set([result["ns"]["value"] for result in tabular_results]) + if len(distinct_ns) == 1: # only one possible node shape + node_selection_shape = URIRef(tabular_results[0]["ns"]["value"]) + target_classes = [URIRef(result["tc"]["value"]) for result in tabular_results] + elif len(distinct_ns) > 1: # more than one possible node shape + # try all of the available nodeshapes + nodeshapes = [NodeShape(uri=URIRef(ns), graph=endpoints_graph_cache, path_nodes=path_nodes) for ns in + distinct_ns] + + for ns in nodeshapes: + ns.gpnt_list.append( + GraphPatternNotTriples(content=Bind( + expression=Expression.from_primary_expr( + PrimaryExpression(content=IRIOrFunction(iri=IRI(value=ns.uri))) + ), + var=Var(value="nodeshape"), + ) + ) + ) + + ggps_list = [GroupGraphPattern( + content=GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[ + *ns.gpnt_list, + TriplesBlock(triples=ns.triples_list), + ] + ) + ) for ns in nodeshapes] + ss = SubSelect( + select_clause=SelectClause( + variables_or_all=[Var(value="nodeshape")]), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[ + GraphPatternNotTriples( + content=GroupOrUnionGraphPattern( + group_graph_patterns=ggps_list + ) + ) + ] + ) + ) + ), + solution_modifier=SolutionModifier() + ) + ss_query = "".join(ss.render()) + _, r = await repo.send_queries([], [(parent_uri, ss_query)]) + node_selection_shape = URIRef(r[0][1][0]["nodeshape"]["value"]) + target_classes = list(endpoints_graph_cache.objects(node_selection_shape, SH.targetClass)) + return node_selection_shape, target_classes + + def find_instances(obj, cls): found = [] diff --git a/prez/services/objects.py b/prez/services/objects.py index fbb16034..8cbc3596 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -5,17 +5,14 @@ from rdflib import URIRef from prez.cache import profiles_graph_cache, endpoints_graph_cache -from prez.models.object_item import ObjectItem from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype -from prez.reference_data.prez_ns import PREZ, EP +from prez.reference_data.prez_ns import EP from prez.renderers.renderer import return_from_graph -from prez.services.link_generation import ( - _add_prez_links, - _add_prez_link_to_collection_page, -) +from prez.services.link_generation import add_prez_links from prez.services.model_methods import get_classes from prez.sparql.methods import Repo -from rdframe import SHACLParser +from temp.grammar import IRI +from temp.shacl2sparql import PrezQueryConstructor log = logging.getLogger(__name__) @@ -32,19 +29,6 @@ async def object_function( # ConnegP prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=klasses, system_repo=system_repo) await populate_profile_and_mediatype(prof_and_mt_info, system_repo) - # if we're on the object endpoint and a profile hasn't been requested, use the open profile - # if (endpoint_uri == EP.object) and not ( - # prof_and_mt_info.req_profiles or prof_and_mt_info.req_profiles_token - # ): - # prof_and_mt_info.selected_class = None - # prof_and_mt_info.profile = PREZ["profile/open"] - # create the object with all required info - object_item = ObjectItem( # object item now does not need request - uri=uri, - classes=klasses, - profile=prof_and_mt_info.profile, - selected_class=prof_and_mt_info.selected_class, - ) # handle alternate profiles runtime_values = {} @@ -52,18 +36,21 @@ async def object_function( "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") - runtime_values["selectedClass"] = object_item.selected_class + # runtime_values["selectedClass"] = prof_and_mt_info.selected_class - runtime_values["object"] = uri - shacl_parser = SHACLParser( + # runtime_values["object"] = uri + query_constructor = PrezQueryConstructor( runtime_values, endpoints_graph_cache, profiles_graph_cache, - endpoint_uri, - prof_and_mt_info.profile, + listing_or_object="object", + focus_node=IRI(value=uri), + endpoint_uri=endpoint_uri, + profile_uri=prof_and_mt_info.profile, ) - shacl_parser.generate_sparql() - query = shacl_parser.sparql + query_constructor.generate_sparql() + query = query_constructor.sparql + req_mt = prof_and_mt_info.req_mediatypes if req_mt: if list(req_mt)[0] == "application/sparql-query": @@ -77,14 +64,12 @@ async def object_function( item_graph, _ = await repo.send_queries([query], []) if "anot+" in prof_and_mt_info.mediatype: if not endpoint_uri == EP.object: - await _add_prez_link_to_collection_page( - item_graph, uri, request_url, endpoint_uri - ) - await _add_prez_links(item_graph, repo, system_repo) + await add_prez_links(item_graph, repo) + await add_prez_links(item_graph, repo) return await return_from_graph( item_graph, prof_and_mt_info.mediatype, - object_item.profile, + prof_and_mt_info.profile, prof_and_mt_info.profile_headers, prof_and_mt_info.selected_class, repo, diff --git a/prez/sparql/count_query.py b/prez/sparql/count_query.py new file mode 100755 index 00000000..a45fb358 --- /dev/null +++ b/prez/sparql/count_query.py @@ -0,0 +1,75 @@ +from pydantic import BaseModel +from rdflib import RDF, BNode + +from prez.reference_data.prez_ns import PREZ +from temp.grammar import * + + +class CountQuery(BaseModel): + class Config: + arbitrary_types_allowed = True + + subselect: SubSelect + + def render(self): + cq = self.create_construct_query() + return "".join(part for part in cq.render()) + + def create_construct_query(self): + self.remove_limit_and_offset() + self.rebuild_select_clause() + cq = ConstructQuery( + construct_template=self.create_construct_template(), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=self.subselect + ) + ), + solution_modifier=SolutionModifier() + ) + return cq + + def remove_limit_and_offset(self): + self.subselect.solution_modifier = None + + def rebuild_select_clause(self): + sc = SelectClause( + variables_or_all=[ + ( + Expression.from_primary_expr( + PrimaryExpression( + content=BuiltInCall( + other_expressions=Aggregate( + function_name="COUNT", + distinct=True, + expression=Expression.from_primary_expr( + PrimaryExpression( + content=Var( + value="focus_node") + ) + ) + ) + ) + ) + ), + Var(value="count") + ) + ] + ) + self.subselect.select_clause = sc + + def create_construct_template(self): + """ + """ + bn = BlankNode(value=BlankNodeLabel(part_1=BNode())) + search_result_triples = [ + SimplifiedTriple( + subject=bn, + predicate=IRI(value=PREZ["count"]), + object=Var(value="count"), + ) + ] + ct = ConstructTemplate( + construct_triples=ConstructTriples(triples=search_result_triples) + ) + return ct diff --git a/prez/sparql/methods.py b/prez/sparql/methods.py index beac6656..e604e979 100755 --- a/prez/sparql/methods.py +++ b/prez/sparql/methods.py @@ -29,7 +29,7 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None): async def send_queries( self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef, str]] = None - ): + ) -> Tuple[Graph, List]: # Common logic to send both query types in parallel results = await asyncio.gather( *[self.rdf_query_to_graph(query) for query in rdf_queries if query], diff --git a/prez/sparql/objects_listings.py b/prez/sparql/objects_listings.py index 56b7353f..b1419cd2 100755 --- a/prez/sparql/objects_listings.py +++ b/prez/sparql/objects_listings.py @@ -8,7 +8,7 @@ from prez.cache import tbox_cache, profiles_graph_cache from prez.config import settings from prez.services.curie_functions import get_uri_for_curie_id -from temp.grammar import SubSelect +from temp.grammar.grammar import SubSelect log = logging.getLogger(__name__) @@ -388,9 +388,9 @@ def select_profile_mediatype( VALUES ?class {{{" ".join('<' + str(klass) + '>' for klass in classes)}}} ?class rdfs:subClassOf* ?mid . ?mid rdfs:subClassOf* ?base_class . - VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection prez:FeatureCollectionList prez:FeatureList geo:Feature - skos:ConceptScheme skos:Concept prez:ConceptList skos:Collection prez:DatasetList prez:VocPrezCollectionList prez:SchemesList - prez:CatalogList prez:ResourceList prez:ProfilesList dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery + VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection geo:Feature + skos:ConceptScheme skos:Concept skos:Collection + prez:ProfilesList dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object }} ?profile altr-ext:constrainsClass ?class ; altr-ext:hasResourceFormat ?format ; diff --git a/prez/sparql/resource.py b/prez/sparql/resource.py deleted file mode 100755 index c0cea689..00000000 --- a/prez/sparql/resource.py +++ /dev/null @@ -1,9 +0,0 @@ -from rdflib import Graph - -from prez.sparql.methods import Repo - - -async def get_resource(iri: str, repo: Repo) -> Graph: - query = f"""DESCRIBE <{iri}>""" - graph, _ = await repo.send_queries([query], []) - return graph diff --git a/prez/sparql/search_query.py b/prez/sparql/search_query.py index 639253ea..aea32c24 100755 --- a/prez/sparql/search_query.py +++ b/prez/sparql/search_query.py @@ -1,53 +1,10 @@ -from typing import Optional, List, Union, Tuple +from typing import Optional, List from pydantic import BaseModel from rdflib import RDF, URIRef from prez.reference_data.prez_ns import PREZ -from temp.grammar import ( - Var, - LANGTAG, - BooleanLiteral, - PrimaryExpression, - GroupGraphPattern, - GroupGraphPatternSub, - SimplifiedTriple, - Bind, - Expression, - GraphPatternNotTriples, - NumericLiteral, - BuiltInCall, - Filter, - RDFLiteral, - RegexExpression, - Constraint, - GroupOrUnionGraphPattern, - OptionalGraphPattern, - FunctionCall, - ArgList, - BrackettedExpression, - InlineData, - InlineDataOneVar, - DataBlock, - IRI, - SelectClause, - Aggregate, - SubSelect, - GroupClause, - GroupCondition, - SolutionModifier, - WhereClause, - OrderClause, - OrderCondition, - SubSelectString, - ConstructTemplate, - ConstructTriples, - ConstructQuery, - LimitClause, - LimitOffsetClauses, - OffsetClause, - DataBlockValue, -) +from temp.grammar import * class SearchQuery(BaseModel): @@ -291,11 +248,11 @@ def create_union_of_inner_ggps(self): return gougp def create_inner_ggp( - self, - weight_val: int, - function: str, - prefix: str, - case_insensitive: Optional[bool], + self, + weight_val: int, + function: str, + prefix: str, + case_insensitive: Optional[bool], ) -> GroupGraphPattern: ggp = GroupGraphPattern(content=GroupGraphPatternSub()) @@ -365,7 +322,6 @@ def create_inner_ggp( ggp.content.add_pattern(filter_gpnt) return ggp - # if __name__ == "__main__": # # additional_ss = SubSelectString(select_string="SELECT * {?focus_node a owl:Class}") # sr_uri = Var(value="focus_node") diff --git a/temp/cql2sparql.py b/temp/cql2sparql.py index f2e3b266..c0f616b2 100755 --- a/temp/cql2sparql.py +++ b/temp/cql2sparql.py @@ -1,10 +1,10 @@ from typing import Generator from pyld import jsonld -from rdflib import URIRef, Namespace, Variable, Literal +from rdflib import URIRef, Namespace from rdflib.namespace import GEO, SH -from temp.grammar import ( +from temp.grammar.grammar import ( GroupOrUnionGraphPattern, GroupGraphPatternSub, TriplesBlock, diff --git a/temp/grammar/__init__.py b/temp/grammar/__init__.py new file mode 100644 index 00000000..6b6bf28d --- /dev/null +++ b/temp/grammar/__init__.py @@ -0,0 +1,74 @@ +from .grammar import SPARQLGrammarBase, BlankNodeLabel, Anon, Var, IRI, BlankNode, RDFLiteral, LANGTAG, NIL, \ + NumericLiteral, SimplifiedTriple, TriplesBlock, PrimaryExpression, UnaryExpression, MultiplicativeExpression, \ + AdditiveExpression, NumericExpression, RelationalExpression, ValueLogical, ConditionalAndExpression, \ + ConditionalOrExpression, Expression, BrackettedExpression, InlineDataOneVar, DataBlockValue, InlineDataFull, \ + DataBlock, InlineData, ValuesClause, GraphPatternNotTriples, GroupGraphPatternSub, SelectClause, SubSelect, \ + SubSelectString, GroupGraphPattern, Filter, Constraint, FunctionCall, ArgList, Bind, OptionalGraphPattern, \ + GroupOrUnionGraphPattern, LimitClause, OffsetClause, OrderCondition, OrderClause, LimitOffsetClauses, \ + SolutionModifier, GroupClause, GroupCondition, ConstructTriples, ConstructTemplate, WhereClause, ConstructQuery, \ + BuiltInCall, BooleanLiteral, GraphTerm, IRIOrFunction, ExpressionList, Aggregate, RegexExpression, Tuple + +__all__ = [ + "Tuple", + "SPARQLGrammarBase", + "BlankNodeLabel", + "Anon", + "Var", + "IRI", + "BlankNode", + "RDFLiteral", + "LANGTAG", + "NIL", + "NumericLiteral", + "SimplifiedTriple", + "TriplesBlock", + "PrimaryExpression", + "UnaryExpression", + "MultiplicativeExpression", + "AdditiveExpression", + "NumericExpression", + "RelationalExpression", + "ValueLogical", + "ConditionalAndExpression", + "ConditionalOrExpression", + "Expression", + "BrackettedExpression", + "InlineDataOneVar", + "DataBlockValue", + "InlineDataFull", + "DataBlock", + "InlineData", + "ValuesClause", + "GraphPatternNotTriples", + "GroupGraphPatternSub", + "SelectClause", + "SubSelect", + "SubSelectString", + "GroupGraphPattern", + "Filter", + "Constraint", + "FunctionCall", + "ArgList", + "Bind", + "OptionalGraphPattern", + "GroupOrUnionGraphPattern", + "LimitClause", + "OffsetClause", + "OrderCondition", + "OrderClause", + "LimitOffsetClauses", + "SolutionModifier", + "GroupClause", + "GroupCondition", + "ConstructTriples", + "ConstructTemplate", + "WhereClause", + "ConstructQuery", + "BuiltInCall", + "BooleanLiteral", + "GraphTerm", + "IRIOrFunction", + "ExpressionList", + "Aggregate", + "RegexExpression" +] diff --git a/temp/grammar.py b/temp/grammar/grammar.py similarity index 99% rename from temp/grammar.py rename to temp/grammar/grammar.py index f866b2d5..92e53a97 100755 --- a/temp/grammar.py +++ b/temp/grammar/grammar.py @@ -121,7 +121,7 @@ class BlankNode(SPARQLGrammarBase): BlankNode ::= BLANK_NODE_LABEL | ANON """ - value: Union["BlankNodeLabel", "Anon"] + value: Union[BlankNodeLabel, Anon] def render(self): yield from self.value.render() @@ -536,14 +536,17 @@ def render(self) -> Generator[str, None, None]: for item in self.graph_patterns_or_triples_blocks: yield from item.render() - def add_pattern(self, pattern): + def add_pattern(self, pattern, prepend=False): if not isinstance(pattern, (TriplesBlock, GraphPatternNotTriples)): raise TypeError( "Pattern must be an instance of TriplesBlock or GraphPatternNotTriples." ) if self.graph_patterns_or_triples_blocks is None: self.graph_patterns_or_triples_blocks = [] - self.graph_patterns_or_triples_blocks.append(pattern) + if prepend: + self.graph_patterns_or_triples_blocks.insert(0, pattern) + else: + self.graph_patterns_or_triples_blocks.append(pattern) def add_triple(self, triple): if not isinstance(triple, SimplifiedTriple): diff --git a/temp/shacl2sparql.py b/temp/shacl2sparql.py index 675a9f94..af0c128f 100755 --- a/temp/shacl2sparql.py +++ b/temp/shacl2sparql.py @@ -1,61 +1,32 @@ +import re from string import Template from typing import Union, Optional, List -import re -from rdflib import URIRef, Variable, Namespace, Graph, SH, RDF, BNode, Literal +from rdflib import URIRef, Namespace, Graph, SH, RDF, BNode, Literal from rdflib.collection import Collection -from temp.grammar import ( - TriplesBlock, - OptionalGraphPattern, - SolutionModifier, - GroupGraphPattern, - SimplifiedTriple, - SubSelect, - SubSelectString, - GroupOrUnionGraphPattern, - GroupGraphPatternSub, - GraphPatternNotTriples, - SelectClause, - WhereClause, - LimitClause, - OffsetClause, - LimitOffsetClauses, - InlineDataOneVar, - DataBlock, - InlineData, - ConstructTemplate, - ConstructTriples, - ConstructQuery, - Filter, - OrderCondition, - OrderClause, - IRI, - Var, - Constraint, - BuiltInCall, - PrimaryExpression, - BrackettedExpression, - Expression, - RDFLiteral, - IRIOrFunction, - DataBlockValue, -) +from temp.grammar import * ONT = Namespace("https://prez.dev/ont/") ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") SHEXT = Namespace("http://example.com/shacl-extension#") -class SHACLParser: +class PrezQueryConstructor: def __init__( - self, - runtime_values: dict, - endpoint_graph: Graph, - profile_graph: Graph, - endpoint_uri: Optional[URIRef] = None, - profile_uri: Optional[URIRef] = None, - additional_ggps: Optional[GroupGraphPatternSub] = None, + self, + runtime_values: dict, + endpoint_graph: Graph, + profile_graph: Graph, + listing_or_object: str, + focus_node: Union[IRI, Var] = Var(value="focus_node"), + endpoint_uri: Optional[URIRef] = None, + profile_uri: Optional[URIRef] = None, + additional_ggps: Optional[GroupGraphPatternSub] = None, + node_selection_triples: Optional[List[SimplifiedTriple]] = None, + node_selection_gpnt: Optional[GraphPatternNotTriples] = None, + target_class: URIRef = None, + ): self.runtime_values = runtime_values self.endpoint_graph: Graph = endpoint_graph @@ -64,16 +35,14 @@ def __init__( self.profile_uri: Optional[URIRef] = profile_uri self.additional_ggps: Optional[GroupGraphPatternSub] = additional_ggps - self.focus_node: Union[IRI, Var] = Var(value="focus_node") + self.focus_node: Union[IRI, Var] = focus_node self.sparql = None self.results = None self.construct_triples = None self.main_where_ggps = GroupGraphPatternSub() - self.sub_select_ggps = None - self.optional_patterns = None - self.where_patterns = None + self.inner_select: Union[SubSelect, SubSelectString] = None self.default_limit = None self.default_offset = None @@ -85,6 +54,12 @@ def __init__( self._expand_runtime_vars() self._merge_runtime_and_default_vars() + self.node_selection_triples = node_selection_triples + self.node_selection_gpnt = node_selection_gpnt + + self.listing_or_object = listing_or_object + self.target_class = target_class + def _expand_runtime_vars(self): self.runtime_vals_expanded = {} for k, v in self.runtime_values.items(): @@ -105,9 +80,10 @@ def _merge_runtime_and_default_vars(self): def generate_sparql(self): """ - Generates SPARQL query from SHACL profile_graph. + Generates SPARQL query from Shape profile_graph. """ - self.parse_endpoint_definition() + if self.listing_or_object == "listing": + self.build_inner_select() self.parse_profile() self._generate_query() @@ -115,11 +91,19 @@ def _generate_query(self): where = WhereClause( group_graph_pattern=GroupGraphPattern(content=self.main_where_ggps) ) + if self.construct_triples: self.construct_triples.extend(where.collect_triples()) else: self.construct_triples = where.collect_triples() self.construct_triples = list(set(self.construct_triples)) + + if self.listing_or_object == "listing": + gpnt = GraphPatternNotTriples( + content=GroupOrUnionGraphPattern( + group_graph_patterns=[GroupGraphPattern(content=self.inner_select)])) + self.main_where_ggps.add_pattern(gpnt, prepend=True) + construct_template = ConstructTemplate( construct_triples=ConstructTriples(triples=self.construct_triples) ) @@ -132,70 +116,58 @@ def _generate_query(self): query_str = "".join(part for part in query.render()) self.sparql = query_str - def parse_endpoint_definition(self): + def build_inner_select(self): """ Either set the focus_node to a URIRef, if a target node is provided, or generate a triple pattern to get list items Generates triples for the endpoint definition with runtime values substituted. """ - # sparql targets + inner_select_ggps = GroupGraphPatternSub() + + self._set_limit_and_offset() + self._merge_runtime_and_default_vars() + + # sparql targets - for complex selection queries specified as strings target_bn = list( self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.target) ) - target_nodes = list( - self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=SH.targetNode - ) - ) - target_classes = list( - self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=SH.targetClass - ) - ) rule_nodes = list( self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.rule) ) - target_subjects_of = list( - self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=SH.targetSubjectsOf - ) - ) - - # objects - just set the focus node. - if target_nodes: - target_node_var = str(target_nodes[0]) - target_node_val = target_node_var[1:] - target_uri = IRI(value=self.runtime_values[target_node_val]) - self.focus_node = target_uri - - # rule nodes - for CONSTRUCT TRIPLES patterns. - if rule_nodes: - for rule_node in rule_nodes: - self._create_construct_triples_from_sh_rules(rule_node) - - # if it's a listing endpoint, get limit and offset if available, otherwise use defaults. - endpoint_type = self.get_endpoint_type() - if endpoint_type == ONT.ListingEndpoint: - # default limit and offset - self._set_default_limit_and_offset() - self._merge_runtime_and_default_vars() - # sh:target / sh:select if target_bn: - ggp = self.create_select_subquery_from_template(target_bn) - self._add_ggp_to_main_ggps(ggp) - if target_classes: - self._add_target_class(target_classes[0]) + sss = self.create_select_subquery_from_template(target_bn) + self.inner_select = sss + + # rule nodes - for CONSTRUCT TRIPLES patterns. + if rule_nodes: + for rule_node in rule_nodes: + self._create_construct_triples_from_sh_rules(rule_node) - if target_subjects_of: - pass # TODO + else: + sol_mod, order_by_triple = self._create_focus_node_solution_modifier() + + self.inner_select = SubSelect( + select_clause=SelectClause( + variables_or_all=[self.focus_node]), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=inner_select_ggps) + ), + solution_modifier=sol_mod + ) - # don't use the target class if there's a sh:target / sh:select #TODO confirm why this caused issues - duplicate - # pattern matches in the subquery? - # elif target_classes: - if target_classes: - ggp = self.create_select_subquery_for_class_listing(target_classes) - self._add_ggp_to_main_ggps(ggp) + if order_by_triple: + inner_select_ggps.add_triple(order_by_triple) + + # otherwise just use what is provided by the endpoint shapes + if self.node_selection_triples: + tb = TriplesBlock(triples=self.node_selection_triples) + inner_select_ggps.add_pattern(tb) + + if self.node_selection_gpnt: + for gpnt in self.node_selection_gpnt: + inner_select_ggps.add_pattern(gpnt) def _add_ggp_to_main_ggps(self, ggp): gorugp = GroupOrUnionGraphPattern(group_graph_patterns=[ggp]) @@ -232,65 +204,13 @@ def _create_construct_triples_from_sh_rules(self, rule_node): else: self.construct_triples = [triple] - def create_select_subquery_for_class_listing( - self, - target_classes: Optional[List[URIRef]] = None, - target_subjects_of: Optional[URIRef] = None - ): - ggp = GroupGraphPattern(content=GroupGraphPatternSub()) - triples = [] - - if target_classes: - target_class_var = IRI(value=target_classes[0]) - triples.append( - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=target_class_var, - ) - ) - - if target_subjects_of: # typically used in conjunction with a sh:class statement to specify the class of the validation node. - triples.append( - SimplifiedTriple( - subject=self.focus_node, - predicate=target_subjects_of, - object=Var(value="ValidationNode") # better name? - ) - ) - - triples_block = TriplesBlock(triples=triples) - - if self.additional_ggps: # for example from cql - gpnt = GraphPatternNotTriples( - content=GroupOrUnionGraphPattern( - group_graph_patterns=[ - GroupGraphPattern(content=self.additional_ggps) - ] - ) - ) - ggp.content.add_pattern(gpnt) - else: - ggp.content.add_pattern(triples_block) - wc = WhereClause(group_graph_pattern=ggp) - sc = SelectClause(variables_or_all="*") - sol_mod, order_by_triple = self._create_focus_node_solution_modifier() - if order_by_triple: - ggp.content.add_triple(order_by_triple) - ss = SubSelect( - select_clause=sc, - where_clause=wc, - solution_modifier=sol_mod, - ) - ggp = GroupGraphPattern(content=ss) - return ggp def create_select_subquery_from_template(self, target_bn): select_statement = Template( str(self.endpoint_graph.value(target_bn[0], SH.select, default=None)) ) # expand any prefixes etc. in case the prefixes are not defined in the query this subquery is being inserted - # into. NB SHACL does provide a mechanism to declare prefixes used in SPARQL targets - this has not been + # into. NB Shape does provide a mechanism to declare prefixes used in SPARQL targets - this has not been # implemented substituted_query = select_statement.substitute( self.merged_runtime_and_default_vals @@ -299,7 +219,7 @@ def create_select_subquery_from_template(self, target_bn): if order_by_triple: # insert it before the end of the string, order_by_triple_text = "".join(order_by_triple.render()) substituted_query = ( - substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" + substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" ) if self.additional_ggps: # for example from cql additional_ggps_str = "".join( @@ -309,8 +229,7 @@ def create_select_subquery_from_template(self, target_bn): sss = SubSelectString( select_string=substituted_query, solution_modifier=sol_mod ) - ggp = GroupGraphPattern(content=sss) - return ggp + return sss def split_query(self, original_query, additional_ggps_str): # Regex to match the entire structure: 'SELECT ?xxx { ... }' @@ -357,19 +276,19 @@ def _create_focus_node_solution_modifier(self): ) return sol_mod, order_by_triple - def _set_default_limit_and_offset(self): + def _set_limit_and_offset(self): """ Sets the default limit, offset, and ordering for a listing endpoint. """ - default_limit = list( + default_limit = next( self.endpoint_graph.objects( subject=self.endpoint_uri, predicate=SHEXT.limit - ) + ), 20 ) - default_offset = list( + default_offset = next( self.endpoint_graph.objects( subject=self.endpoint_uri, predicate=SHEXT.offset - ) + ), 0 ) default_order_by = list( self.endpoint_graph.objects( @@ -377,10 +296,8 @@ def _set_default_limit_and_offset(self): ) ) - if not default_limit or not default_offset: - raise ValueError( - "Listing endpoint must have both a default limit and a default offset" - ) + self.default_limit = int(default_limit) + self.default_offset = int(default_offset) # Process each blank node in the default_order_by list for blank_node in default_order_by: @@ -399,41 +316,13 @@ def _set_default_limit_and_offset(self): self.default_order_by = (path,) self.default_order_by_desc = is_descending - self.default_limit = int(default_limit[0]) - self.default_offset = int(default_offset[0]) - - def get_endpoint_type(self): - endpoint_type = list( - self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=RDF.type) - ) - if not endpoint_type: - raise ValueError( - 'Endpoint definition must have a type of either "https://prez.dev/ont/ListingEndpoint" ' - 'or "https://prez.dev/ont/ObjectEndpoint"' - ) - endpoint_type = endpoint_type[0] - return endpoint_type - def parse_profile(self): for i, property_node in enumerate( - self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) + self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) ): self._parse_property_shapes(property_node, i) self._build_bnode_blocks() - def _add_target_class(self, target_class): - triples = [ - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=IRI(value=target_class), - ) - ] - if self.construct_triples: - self.construct_triples.extend(triples) - else: - self.construct_triples = triples - def _build_bnode_blocks(self): bnode_depth = list( self.profile_graph.objects( @@ -549,7 +438,7 @@ def process_path_object(path_obj: Union[URIRef, BNode]): self._add_inverse_preds(ggps, inverse_preds, i) if predicates: self._add_predicate_constraints(predicates, property_node, ggp_list) - self._add_object_constrains(ggp_list, property_node) + self._add_object_constraints(ggp_list, property_node) union = GroupOrUnionGraphPattern(group_graph_patterns=ggp_list) gpnt = GraphPatternNotTriples(content=union) @@ -568,7 +457,7 @@ def process_path_object(path_obj: Union[URIRef, BNode]): self.main_where_ggps.add_pattern(gpnt) def _add_inverse_preds( - self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i + self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i ): if inverse_preds: ggps.add_triple( @@ -619,7 +508,7 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): ) tb = TriplesBlock(triples=[simplified_triple]) if predicates: - if max == Literal(0): + if max == Literal(0): # excluded predicates. values = [ PrimaryExpression(content=IRIOrFunction(iri=p)) for p in predicates ] @@ -638,7 +527,7 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): ggp = GroupGraphPattern(content=ggps) ggp_list.append(ggp) elif ( - IRI(value=SHEXT.allPredicateValues) not in predicates + IRI(value=SHEXT.allPredicateValues) not in predicates ): # add VALUES clause dbv_list = [DataBlockValue(value=p) for p in predicates] inline_data_one_var = InlineDataOneVar( @@ -655,7 +544,7 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): ggp = GroupGraphPattern(content=ggps) ggp_list.append(ggp) - def _add_object_constrains(self, ggp_list, property_node): + def _add_object_constraints(self, ggp_list, property_node): value = self.profile_graph.value( subject=property_node, predicate=SH.hasValue, default=None ) diff --git a/temp/shacl_node_selection.py b/temp/shacl_node_selection.py new file mode 100644 index 00000000..b9127c7e --- /dev/null +++ b/temp/shacl_node_selection.py @@ -0,0 +1,238 @@ +from __future__ import annotations + +from typing import List, Optional, Union, Any, Dict + +from pydantic import BaseModel +from rdflib import URIRef, BNode, Graph +from rdflib.collection import Collection +from rdflib.namespace import SH, RDF + +from temp.grammar import * + + +class Shape(BaseModel): + class Config: + arbitrary_types_allowed = True + + def __init__(self, **data: Any): + super().__init__(**data) + self.triples_list = [] + self.gpnt_list = [] + self.from_graph() + self.to_grammar() + + def from_graph(self): + raise NotImplementedError("Subclasses must implement this method.") + + def to_grammar(self): + raise NotImplementedError("Subclasses must implement this method.") + + +class NodeShape(Shape): + uri: URIRef + graph: Graph + focus_node: Var | IRI = Var(value="focus_node") + targetNode: Optional[URIRef] = None + targetClasses: Optional[List[URIRef]] = None + propertyShapesURIs: Optional[List[URIRef]] = None + propertyShapes: Optional[List[PropertyShape]] = None + triples_list: Optional[List[SimplifiedTriple]] = None + gpnt_list: Optional[List[GraphPatternNotTriples]] = None + path_nodes: Optional[Dict[str, Var | IRI]] = {} + + def from_graph(self): # TODO this can be a SPARQL select against the system graph. + self.targetNode = next(self.graph.objects(self.uri, SH.targetNode), None) + self.targetClasses = list(self.graph.objects(self.uri, SH.targetClass)) + self.propertyShapesURIs = list(self.graph.objects(self.uri, SH.property)) + self.propertyShapes = [PropertyShape( + uri=ps_uri, + graph=self.graph, + focus_node=self.focus_node, + path_nodes=self.path_nodes + ) for ps_uri in self.propertyShapesURIs] + + def to_grammar(self): + if self.targetNode: + pass # do not need to add any specific triples or the like + if self.targetClasses: + self._process_class_targets() + if self.propertyShapes: + self._process_property_shapes() + + def _process_class_targets(self): + if len(self.targetClasses) > 1: + if len(self.targetClasses) == 1: + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=IRI(value=self.targetClasses[0]) + ) + ) + else: + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=Var(value=f"focus_classes") + )) + dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.targetClasses] + self.gpnt_list.append( + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=Var(value=f"focus_classes"), + datablockvalues=dbvs + ) + ) + ) + ) + ) + + def _process_property_shapes(self): + for shape in self.propertyShapes: + self.triples_list.extend(shape.triples_list) + self.gpnt_list.extend(shape.gpnt_list) + self.path_nodes = self.path_nodes | shape.path_nodes + # deduplicate + self.triples_list = list(set(self.triples_list)) + + +class PropertyShape(Shape): + uri: URIRef | BNode # URI of the shape + graph: Graph + focus_node: IRI | Var = Var(value="focus_node") + # inputs + property_paths: Optional[List[PropertyPath]] = None + or_klasses: Optional[List[URIRef]] = None + # outputs + grammar: Optional[GroupGraphPatternSub] = None + triples_list: Optional[List[SimplifiedTriple]] = None + gpnt_list: Optional[List[GraphPatternNotTriples]] = None + path_nodes: Optional[Dict[str, Var | IRI]] = {} + _select_vars: Optional[List[Var]] = None + + def from_graph(self): + self.property_paths = [] + _single_class = next(self.graph.objects(self.uri, SH["class"]), None) + if _single_class: + self.or_klasses = [URIRef(_single_class)] + + # look for sh:or statements and process classes from these NB only sh:or / sh:class is handled at present. + or_classes = next(self.graph.objects(self.uri, SH["or"]), None) + if or_classes: + or_bns = list(Collection(self.graph, or_classes)) + or_triples = list(self.graph.triples_choices((or_bns, SH["class"], None))) + self.or_klasses = [URIRef(klass) for _, _, klass in or_triples] + + pp = next(self.graph.objects(self.uri, SH.path)) + if isinstance(pp, URIRef): + self.property_paths.append(Path(value=pp)) + elif isinstance(pp, BNode): + self._process_property_path(pp, self.graph) + + def _process_property_path(self, pp, graph): + if isinstance(pp, BNode): + pred_objects_gen = graph.predicate_objects( + subject=pp + ) + bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + if bn_obj == SH.union: + pass + elif bn_pred == SH.inversePath: + self.property_paths.append(InversePath(value=bn_obj)) + # elif bn_pred == SH.alternativePath: + # predicates.extend(list(Collection(self.profile_graph, bn_obj))) + else: # sequence paths + paths = list(Collection(graph, pp)) + for path in paths: + self._process_property_path(path, graph) + + def to_grammar(self): + + # set up the path nodes - either from supplied values or set as variables + for i, property_path in enumerate(self.property_paths): + path_node_str = f"path_node_{i+1}" + if path_node_str not in self.path_nodes: + self.path_nodes[path_node_str] = Var(value=path_node_str) + + self.triples_list = [] + len_pp = len(self.property_paths) + # sh:class applies to the end of sequence paths + path_node_term = self.path_nodes[f"path_node_{len_pp}"] + + if self.or_klasses: + if len(self.or_klasses) == 1: + self.triples_list.append( + SimplifiedTriple( + subject=path_node_term, + predicate=IRI(value=RDF.type), + object=IRI(value=self.or_klasses[0]) + ) + ) + else: + self.triples_list.append( + SimplifiedTriple( + subject=path_node_term, + predicate=IRI(value=RDF.type), + object=Var(value=f"path_node_classes_{len_pp}") + )) + dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses] + self.gpnt_list.append( + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=Var(value=f"path_node_classes_{len_pp}"), + datablockvalues=dbvs + ) + ) + ) + ) + ) + + if self.property_paths: + for i, property_path in enumerate(self.property_paths): + + path_node_var = self.path_nodes[f"path_node_{i + 1}"] + if i == 0: + focus_or_path_node = self.focus_node + else: + focus_or_path_node = self.path_nodes[f"path_node_{i}"] + if isinstance(property_path, Path): + # vanilla property path + self.triples_list.append( + SimplifiedTriple( + subject=focus_or_path_node, + predicate=IRI(value=property_path.value), + object=path_node_var + ) + ) + elif isinstance(property_path, InversePath): + self.triples_list.append( + SimplifiedTriple( + subject=path_node_var, + predicate=IRI(value=property_path.value), + object=focus_or_path_node + ) + ) + + +class PropertyPath(BaseModel): + class Config: + arbitrary_types_allowed = True + + uri: Optional[URIRef] = None + + +class Path(PropertyPath): + value: URIRef + + +class SequencePath(PropertyPath): + value: List[PropertyPath] + + +class InversePath(PropertyPath): + value: URIRef diff --git a/temp/shacl_nodeshapes2sparql.py b/temp/shacl_nodeshapes2sparql.py deleted file mode 100644 index a349b493..00000000 --- a/temp/shacl_nodeshapes2sparql.py +++ /dev/null @@ -1,234 +0,0 @@ -from __future__ import annotations - -from typing import List, Optional, Union - -from pydantic import BaseModel -from rdflib import URIRef, BNode, Graph -from rdflib.namespace import SH, RDF - -from temp.grammar import IRI, SimplifiedTriple, TriplesBlock, Var, SelectClause, GraphPatternNotTriples, InlineData, \ - DataBlock, InlineDataOneVar, DataBlockValue - - -class SHACL(BaseModel): - class Config: - arbitrary_types_allowed = True - - def from_graph(self, graph): - raise NotImplementedError("Subclasses must implement this method.") - - def to_grammar(self): - raise NotImplementedError("Subclasses must implement this method.") - - -class NodeShape(SHACL): - uri: URIRef - targetNode: Optional[URIRef] = None - targetClass: Optional[List[URIRef]] = None - targetSubjectsOf: Optional[URIRef] = None - targetObjectsOf: Optional[URIRef] = None - propertyShapes: Optional[List[URIRef]] = None - _triples: Optional[List[SimplifiedTriple]] = None - - def from_graph(self, graph): # TODO this can be a SPARQL select against the system graph. - self.targetNode = next(graph.objects(self.uri, SH.targetNode), None) - self.targetClass = list(graph.objects(self.uri, SH.targetClass)) - self.targetSubjectsOf = next(graph.objects(self.uri, SH.targetSubjectsOf), None) - self.targetObjectsOf = next(graph.objects(self.uri, SH.targetObjectsOf), None) - self.propertyShapes = list(graph.objects(self.uri, SH.property)) - - def to_listing_select(self) -> TriplesBlock: - focus_node = Var(value="focus_node") - if self.targetNode: - pass # do not need to add any specific triples or the like - if self.targetClass: - self._process_class_target(focus_node) - if self.targetSubjectsOf: - pass - if self.targetObjectsOf: - pass - if self.propertyShapes: - self._process_property_shapes() - - def to_link_select(self, focus_node) -> SelectClause: - pass - - def _process_class_target(self, focus_node): - for klass in self.targetClass: - self._triples.append( - SimplifiedTriple( - subject=focus_node, - predicate=IRI(value=RDF.type), - object=klass, - ) - ) - - def _process_subjects_of_target(self): - # ?focus_node pred ?obj - ?obj is constrained by e.g. sh:class in a property shape. - self._triples.append( - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=self.targetSubjectsOf), - object=Var(value="ValidationNode"), - ) - ) - - def _process_objects_of_target(self): - self._triples.append( - SimplifiedTriple( - subject=Var(value="ValidationNode"), - predicate=IRI(value=self.targetObjectsOf), - object=self.focus_node, - ) - ) - - def _process_property_shapes(self): - for shape in self.propertyShapes: - ps = PropertyShape(shape) - self._triples.append(ps.to_grammar) - - -class PropertyShape(SHACL): - uri: URIRef | BNode # URI of the shape - focus_node: Union[Var, IRI] = Var(value="focus_node") - # inputs - property_paths: Optional[List[Union[URIRef, BNode]]] = None - or_klasses: Optional[List[URIRef]] = None - # outputs - _st_list: Optional[List[SimplifiedTriple]] = None - _gpnt_list: Optional[List[GraphPatternNotTriples]] = None - _select_vars: Optional[List[Var]] = None - - def from_graph(self, graph): - _single_class = next(graph.objects(self.uri, SH["class"]), None) - if _single_class: - self.or_klasses = [_single_class] - else: - pass - # _multiple_classes = list(graph.objects(self.uri, SH["class"]), None) - # TODO logic for or statement - self.property_paths = list(graph.objects(self.uri, SH.path)) - - pp_asts = Or() - for pp in self.property_paths: - pp_asts.paths.append(self.process_property_path(pp, graph)) - - # focus node = URI when generating links; Variable when listing objects - # process class statements NB this is the class on validation nodes - # get the length of any property path chains; this is what the target class applies to. - - def _process_property_path(self, pp, graph): - if isinstance(pp, BNode): - pred_objects_gen = graph.predicate_objects( - subject=pp - ) - bn_pred, bn_obj = next(pred_objects_gen, (None, None)) - if bn_obj == SH.union: - pass - elif bn_pred == SH.inversePath: - inverse_preds.append(IRI(value=bn_obj)) - elif bn_pred == SH.alternativePath: - predicates.extend(list(Collection(self.profile_graph, bn_obj))) - else: # sequence paths - predicates.append(tuple(Collection(self.profile_graph, path_obj))) - else: # a plain path specification to restrict the predicate to a specific value - predicates.append(path_obj) - return pp_ast - - - - - def to_grammar(self): - if self.property_paths: - for property_path in self.property_paths: - if isinstance(property_path, URIRef): - # vanilla property path - self._st_list.append( - SimplifiedTriple( - subject=focus_node, - predicate=IRI(value=property_path), - object=Var(value="ValidationNode") - ) - ) - elif isinstance(property_path, BNode): - pred_objects_gen = self.profile_graph.predicate_objects( - subject=path_obj - ) - bn_pred, bn_obj = next(pred_objects_gen, (None, None)) - if bn_obj == SH.union: - pass - elif bn_pred == SH.inversePath: - inverse_preds.append(IRI(value=bn_obj)) - elif bn_pred == SH.alternativePath: - predicates.extend(list(Collection(self.profile_graph, bn_obj))) - else: # sequence paths - predicates.append(tuple(Collection(self.profile_graph, path_obj))) - else: # a plain path specification to restrict the predicate to a specific value - predicates.append(path_obj) - - if self.or_klasses: - if len(self.or_klasses) == 1: - self._st_list.append( - SimplifiedTriple( - subject=Var(value="ValidationNode"), - predicate=IRI(value=RDF.type), - object=IRI(value=self.or_klasses[0]) - ) - ) - else: - self._st_list.append( - SimplifiedTriple(value="ValidationNode"), - IRI(value=RDF.type), - Var(value="ValClasses") - ) - dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses] - self._gpnt_list.append( - GraphPatternNotTriples( - content=InlineData( - data_block=DataBlock( - block=InlineDataOneVar( - variable=Var(value="ValClasses"), - datablockvalues=dbvs - ) - ) - ) - ) - ) - - -class PropertyPath(SHACL): - uri: URIRef - - -class Path(PropertyPath): - focus_uri: Union[IRI, Var] - path_uri: URIRef - - def to_grammar(self): - return SimplifiedTriple(self.focus_uri, IRI(value=self.uri), Var(value="ValidationNode")) - - -class SequencePath(SHACL): - uri: URIRef - paths: List[PropertyPath] - - def from_graph(self, graph): - pass - - def to_grammar(self): - pass - - -class InversePath(SHACL): - focus_uri: Union[IRI, Var] - inverse_path: URIRef - validation_node: Var - - -class Or(SHACL): - paths: List[SHACL] - pass - - -class And(SHACL): - pass \ No newline at end of file diff --git a/tests/data/nodeshapes/endpoints.ttl b/tests/data/nodeshapes/endpoints.ttl deleted file mode 100644 index 0d67f9d4..00000000 --- a/tests/data/nodeshapes/endpoints.ttl +++ /dev/null @@ -1,97 +0,0 @@ -@prefix sh: . -@prefix dcterms: . -@prefix dcat: . -@prefix geo: . -@prefix ex: . -@prefix skos: . -@prefix rdfs: . - -ex:TopLevelCatalogs - a sh:NodeShape ; - sh:targetClass dcat:Catalog ; - sh:targetSubjectsOf dcterms:hasPart ; - sh:property [ - sh:path dcterms:hasPart ; - sh:or ( - [ sh:class dcat:Catalog ] - [ sh:class geo:FeatureCollection ] - [ sh:class skos:ConceptScheme ] - [ sh:class skos:Collection ] - ) ; - ] . - -ex:FeatureCollectionListing - a sh:NodeShape ; - sh:targetClass geo:FeatureCollection ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] . - -ex:ConceptSchemeListing - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] . - -ex:CollectionListing - a sh:NodeShape ; - sh:targetClass skos:Collection ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] . - -ex:LowerLevelCatalogListing - a sh:NodeShape ; - sh:targetClass dcat:Catalog ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] . - -ex:FeatureListing - a sh:NodeShape ; - sh:targetClass geo:Feature ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class geo:FeatureCollection ; - ] , [ - sh:path ( [sh:inversePath rdfs:member ] [ sh:inversePath dcterms:hasPart ] ); - sh:class dcat:Catalog ; - ] . - -ex:ConceptSchemeConceptListing - a sh:NodeShape ; - sh:targetClass skos:Concept ; - sh:property [ - sh:path skos:inScheme ; - sh:class skos:ConceptScheme ; - ] , [ - sh:path ( skos:inScheme [ sh:inversePath dcterms:hasPart ] ); - sh:class dcat:Catalog ; - ] . - -ex:CollectionConceptListing - a sh:NodeShape ; - sh:targetClass skos:Concept ; - sh:property [ - sh:path skos:inScheme ; - sh:class skos:Collection ; - ] , [ - sh:path ( [ sh:inversePath skos:member ] [ sh:inversePath dcterms:hasPart ] ); - sh:class dcat:Catalog ; - ] . - -ex:ResourceListing - a sh:NodeShape ; - sh:targetClass dcat:Resource ; - sh:property [ - sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Catalog ; - ] , [ - sh:path ( [ sh:inversePath dcterms:hasPart ] [ sh:inversePath dcterms:hasPart ] ); - sh:class dcat:Catalog ; - ] . \ No newline at end of file diff --git a/tests/test_shacl_parsing.py b/tests/test_shacl_parsing.py index 36c0e6f9..78687162 100755 --- a/tests/test_shacl_parsing.py +++ b/tests/test_shacl_parsing.py @@ -5,12 +5,12 @@ endpoints_graph = Graph().parse("tests/data/nodeshapes/endpoints.ttl", format="turtle") -@pytest.fixture -def property_shape(): - return endpoints_graph.value( - subject=URIRef("http://example.org/ns#FeatureCollectionListing"), - predicate=URIRef("http://www.w3.org/ns/shacl#property"), - ) +# @pytest.fixture +# def property_shape(): +# return endpoints_graph.value( +# subject=URIRef("http://example.org/ns#ResourceListing"), +# predicate=URIRef("http://www.w3.org/ns/shacl#property"), +# ) @pytest.mark.parametrize("nodeshape_uri", @@ -18,20 +18,38 @@ def property_shape(): "http://example.org/ns#FeatureCollectionListing" ]) def test_nodeshape_parsing(nodeshape_uri): - ns = NodeShape(uri=URIRef(nodeshape_uri)) - ns.from_shacl_graph(endpoints_graph) - assert ns.targetClass == [URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection")] - assert len(ns.propertyShapes) == 1 + ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) + assert ns.targetClasses == [URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection")] + assert len(ns.propertyShapesURIs) == 1 +@pytest.mark.parametrize("nodeshape_uri", + [ + "http://example.org/ns#TopLevelCatalogs" + # "http://example.org/ns#FeatureListing" + ]) +def test_nodeshape_to_grammar(nodeshape_uri): + ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) + ns.to_grammar() + print('') + + +@pytest.mark.parametrize("property_shape", + [ + "http://example.org/ns#resourceListingPropertyShape2" + ]) def test_propertyshape_parsing(property_shape): - ps = PropertyShape(uri=property_shape) - ps.from_graph(graph=endpoints_graph) + ps = PropertyShape(uri=URIRef(property_shape), graph=endpoints_graph) + ps.to_grammar() print('') +@pytest.mark.parametrize("property_shape", + [ + "http://example.org/ns#resourceListingPropertyShape2" + ]) def test_propertyshape_create_grammar(property_shape): - ps = PropertyShape(uri=property_shape) - ps.from_graph(graph=endpoints_graph) - ps.to_grammar() - assert True \ No newline at end of file + ps = PropertyShape(uri=URIRef(property_shape)) + # ps.from_graph(graph=endpoints_graph) + # ps.to_grammar() + # assert True \ No newline at end of file From 7ead742bbafa0bd147e8adc2ac7540bd282f9f09 Mon Sep 17 00:00:00 2001 From: david Date: Fri, 9 Feb 2024 17:58:12 +1000 Subject: [PATCH 11/25] Working profiles page --- poetry.lock | 887 +++++++++--------- .../endpoints/endpoint_metadata.ttl | 13 +- .../endpoint_node_selection_shapes.ttl | 36 +- prez/reference_data/prefixes/standard.ttl | 2 +- .../profiles/ogc_records_profile.ttl | 11 +- .../profiles/prez_default_profiles.ttl | 23 +- prez/routers/profiles.py | 1 + prez/services/generate_profiles.py | 2 +- prez/services/link_generation.py | 162 ++-- prez/services/listings.py | 63 +- prez/services/objects.py | 7 +- prez/sparql/search_query.py | 30 - pyproject.toml | 10 +- temp/shacl_node_selection.py | 60 +- 14 files changed, 620 insertions(+), 687 deletions(-) diff --git a/poetry.lock b/poetry.lock index a17ee71a..d812069a 100755 --- a/poetry.lock +++ b/poetry.lock @@ -29,13 +29,13 @@ files = [ [[package]] name = "anyio" -version = "3.7.1" +version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, ] [package.dependencies] @@ -43,19 +43,19 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] [[package]] name = "async-lru" -version = "1.0.3" -description = "Simple lru_cache for asyncio" +version = "2.0.4" +description = "Simple LRU cache for asyncio" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "async-lru-1.0.3.tar.gz", hash = "sha256:c2cb9b2915eb14e6cf3e717154b40f715bf90e596d73623677affd0d1fbcd32a"}, - {file = "async_lru-1.0.3-py3-none-any.whl", hash = "sha256:ea692c303feb6211ff260d230dae1583636f13e05c9ae616eada77855b7f415c"}, + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, ] [[package]] @@ -104,13 +104,13 @@ files = [ [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -278,63 +278,63 @@ url = "connegp-0.1.6-py3-none-any.whl" [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.extras] @@ -342,34 +342,33 @@ toml = ["tomli"] [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] [[package]] name = "fastapi" -version = "0.104.1" +version = "0.109.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.104.1-py3-none-any.whl", hash = "sha256:752dc31160cdbd0436bb93bad51560b57e525cbb1d4bbf6f4904ceee75548241"}, - {file = "fastapi-0.104.1.tar.gz", hash = "sha256:e5e4540a7c5e1dcfbbcf5b903c234feddcdcd881f191977a1c5dfd917487e7ae"}, + {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, + {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, ] [package.dependencies] -anyio = ">=3.7.1,<4.0.0" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.27.0,<0.28.0" +starlette = ">=0.36.3,<0.37.0" typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" @@ -389,48 +388,47 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "frozendict" -version = "2.3.10" +version = "2.4.0" description = "A simple immutable dictionary" optional = false python-versions = ">=3.6" files = [ - {file = "frozendict-2.3.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df2d2afa5af41bfa09dc9d5a8e6d73ae39b677a8572200c65a5ea353387ffccd"}, - {file = "frozendict-2.3.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b10df7f5d8637b1af319434f99dc25ca6f5537e28b293e4c405ebfb4bf9581fa"}, - {file = "frozendict-2.3.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da22a3e873f365f97445c49afc1e6d5198ed6d172f3efaf0e9fde0edcca3cea1"}, - {file = "frozendict-2.3.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89218738e2122b50bf8a0444083dbe2de280402e9c2ef0929c0db0f93ff11271"}, - {file = "frozendict-2.3.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aa11add43a71fd47523fbd011be5cc011df79e25ec0b0339fc0d728623aaa7ec"}, - {file = "frozendict-2.3.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:af267bd6d98cbc10580105dc76f28f7156856fa48a5bbcadd40edb85f93657ae"}, - {file = "frozendict-2.3.10-cp310-cp310-win_amd64.whl", hash = "sha256:c112024df64b8926a315d7e36b860967fcad8aae0c592b9f117589391373e893"}, - {file = "frozendict-2.3.10-cp310-cp310-win_arm64.whl", hash = "sha256:a0065db2bc76628853dd620bd08c1ca44ad0b711e92e89b4156493153add6f9d"}, - {file = "frozendict-2.3.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:93634af5a6d71762aebc7d78bdce92890b7e612588faf887c9eaf752dc7ccdb1"}, - {file = "frozendict-2.3.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b4d05e231dc1a2ec874f847fd7348cbee469555468efb875a89994ecde31a81"}, - {file = "frozendict-2.3.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d40d0644f19365fc6cc428db31c0f113fa550bd15920262f9d77ccf6556d87b"}, - {file = "frozendict-2.3.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:12b40526219f9583b30690011288bca4d6cce8724cda96b3c3ab08b67c5a7f09"}, - {file = "frozendict-2.3.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6b552fffeba8e41b43ce10cc0fc467e048a7c9a71ae3241057510342132555b9"}, - {file = "frozendict-2.3.10-cp36-cp36m-win_amd64.whl", hash = "sha256:07208e4718cb70aa259ac886c19b96a4aad1cf00e9199f211746f738951bbf7c"}, - {file = "frozendict-2.3.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e8bec6d11f7254e405290cb1b081caffa0c18b6aa779130da9a546349c56be83"}, - {file = "frozendict-2.3.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b089c7e8c95d8b043e82e7da26e165f4220d7310efaad5e94445db7e3bc8321e"}, - {file = "frozendict-2.3.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08a5829d708657c9d5ad58f4a7e4baa73a3d57290f9613bdd909d481fc203a3a"}, - {file = "frozendict-2.3.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c015852dacf144dbeadf203673d8c714f788fcc2b810a36504994b3c4f5a436"}, - {file = "frozendict-2.3.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb9f15a5ed924be2b1cb3654b7ea3b7bae265ff39e2b5784d42bd4a6e1353e45"}, - {file = "frozendict-2.3.10-cp37-cp37m-win_amd64.whl", hash = "sha256:809bb9c6c657bded925710a309bb2a2350bdbfdc9371df427f1a93cb8ab7ec3e"}, - {file = "frozendict-2.3.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ff7a9cca3a3a1e584349e859d028388bd96a5475f76721471b73797472c6db17"}, - {file = "frozendict-2.3.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cdd496933ddb428f3854bea9ffdce0245bb27c27909f663ad396409fb4dffb5"}, - {file = "frozendict-2.3.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9df392b655fadaa0174c1923e6205b30ad1ccca248e8e146e63a8147a355ee01"}, - {file = "frozendict-2.3.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7901828700f36fe12486705afe7afc5583434390c8f69b5419de1b6c566fb00d"}, - {file = "frozendict-2.3.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c9aa28ce48d848ee520409533fd0254de4caf025c5cf1b9f27c98c1dd8cf90aa"}, - {file = "frozendict-2.3.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0856af4f5b4288b2270e0b74078fad5cbaf4f799326b82183865f6f367008b2c"}, - {file = "frozendict-2.3.10-cp38-cp38-win_amd64.whl", hash = "sha256:ac41c671ff33cbefc0f06c4b2a630d18ab59f5256f45f57d5632252ae4a8c07a"}, - {file = "frozendict-2.3.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:893205dc5a4e5c4b24e5822ceb21ef14fed8ca4afae7ac688e2fc24294c85225"}, - {file = "frozendict-2.3.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e78c5ac5d71f3b73f07ff9d9e3cc32dfbf7954f2c57c2d0e1fe8f1600e980b40"}, - {file = "frozendict-2.3.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c4ca4cc42bc30b20476616411d4b49aae6084760b99251f1cbdfed879ae53ea"}, - {file = "frozendict-2.3.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c865962216f7cfd6dac8693f4de431a9d98a7225185ff23613ecd10c42423adc"}, - {file = "frozendict-2.3.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:99b2f47b292cc4d68f6679918e8e9e6dc5e816924d8369d07018be56b93fb20f"}, - {file = "frozendict-2.3.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e7abf4539b73c8e5680dd2fdbd19ca4fc3e2b2f3666f80f022217839bb859fd"}, - {file = "frozendict-2.3.10-cp39-cp39-win_amd64.whl", hash = "sha256:901e774629fc63f84d24b5e46b59de1eed22392ee98b7f92e694a127d541edac"}, - {file = "frozendict-2.3.10-cp39-cp39-win_arm64.whl", hash = "sha256:6f8681c0ffe92be9aba40c9b9960c48f0ae7f6ea585af2b93fc9542cc3865969"}, - {file = "frozendict-2.3.10-py3-none-any.whl", hash = "sha256:66cded65f144393b4226bda9fe9ac2f42451d2d603e8a486015744bb566a7008"}, - {file = "frozendict-2.3.10.tar.gz", hash = "sha256:aadc83510ce82751a0bb3575231f778bc37cbb373f5f05a52b888e26cbb92f79"}, + {file = "frozendict-2.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:475c65202a6f5421df8cacb8a2f29c5087134a0542b0540ae95fbf4db7af2ff9"}, + {file = "frozendict-2.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2607e82efdd2c277224a58bda3994d4cd48e49eff7fa31e404cf3066e8dbfeae"}, + {file = "frozendict-2.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fd4583194baabe100c135883017da76259a315d34e303eddf198541b7e02e44"}, + {file = "frozendict-2.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efca7281184b54f7abab6980cf25837b709f72ced62791f62dabcd7b184d958a"}, + {file = "frozendict-2.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fc4cba1ced988ce9020dfcaae6fe3f5521eebc00c5772b511aaf691b0be91e6"}, + {file = "frozendict-2.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fab616e7c0fea2ac928f107c740bd9ba516fc083adfcd1c391d6bfc9164403d"}, + {file = "frozendict-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:09ba8ee37d260adde311b8eb4cd12bf27f64071242f736757ae6a11d331eb860"}, + {file = "frozendict-2.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:0615ed71570eec3cc96df063930ea6e563211efeeac86e3f3cc8bdfc9c9bfab7"}, + {file = "frozendict-2.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc754117a7d60ba8e55b3c39abd67f37fbc05dd63cdcb03d1717a382fe0a3421"}, + {file = "frozendict-2.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2804ea4bd2179bb33b99483cc8d69246630cc00632b9affe2914e8666f1cc7e5"}, + {file = "frozendict-2.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4700c3f0aebdc8f4375c35590135794b1dbf2aca132f4756b584fa9910af2d"}, + {file = "frozendict-2.4.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:da4406d95c340e0b1cc43a3858fac729f52689325bcf61a9182eb94aff7451dc"}, + {file = "frozendict-2.4.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1875e7b70a5724bf964354da8fd542240d2cead0d80053ac96bf4494ce3517fa"}, + {file = "frozendict-2.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a60f353496637ca21396289a7d969af1eb4ec4d11a7c37a0e7f25fc1761a0c97"}, + {file = "frozendict-2.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b666f9c6c8a9e794d2713a944b10a65480ff459579d75b5f686c75031c2c2dfc"}, + {file = "frozendict-2.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d81fb396ea81fcba3b3dde4a4b51adcb74ff31632014fbfd030f8acd5a7292"}, + {file = "frozendict-2.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4925c8e82d2bd23d45996cd0827668a52b9c51103897c98ce409a763d0c00c61"}, + {file = "frozendict-2.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa86325da6a6071284b4ed3d9d2cd9db068560aebad503b658d6a889a0575683"}, + {file = "frozendict-2.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5bb5b62d4e2bce12e91800496d94de41bec8f16e4d8a7b16e8f263676ae2031a"}, + {file = "frozendict-2.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3909df909516cfd7bcefd9a3003948970a12a50c5648d8bbddafcef171f2117f"}, + {file = "frozendict-2.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:204f2c5c10fc018d1ba8ccc67758aa83fe769c782547bd26dc250317a7ccba71"}, + {file = "frozendict-2.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8d1d269874c94b1ed2b6667e5e43dcf4541838019b1caa4c48f848ac73634df"}, + {file = "frozendict-2.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:809f1cffb602cf06e5186c69c0e3b74bec7a3684593145331f9aa2a65b5ba3b7"}, + {file = "frozendict-2.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b017cba5f73869b04c2977139ad08e57a7480de1e384c34193939698119baa1d"}, + {file = "frozendict-2.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0b75e5e231621dedaef88334997e79fbd137dd89895543d3862fe0220fc3572c"}, + {file = "frozendict-2.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df3819a5d48ab3aae1548e62093d0111ad7c3b62ff9392421b7bbf149c08b629"}, + {file = "frozendict-2.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:42a9b33ccf9d417b22146e59803c53d5c39d7d9151d2df8df59c235f6a1a5ed7"}, + {file = "frozendict-2.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3f51bfa64e0c4a6608e3f2878bab1211a6b3b197de6fa57151bbe73f1184457"}, + {file = "frozendict-2.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a1d232f092dc686e6ef23d436bde30f82c018f31cef1b89b31caef03814b1617"}, + {file = "frozendict-2.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e530658134e88607ff8c2c8934a07b2bb5e9fffab5045f127746f6542c6c77e"}, + {file = "frozendict-2.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23a52bbea30c9e35b89291273944393770fb031e522a172e3aff19b62cc50047"}, + {file = "frozendict-2.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f91acaff475d0ef0d3436b805c9b91fc627a6a8a281771a24f7ab7f458a0b34f"}, + {file = "frozendict-2.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:08d9c7c1aa92b94538b3a79c43999f999012e174588435f197794d5e5a80e0f5"}, + {file = "frozendict-2.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:05c5a77957ecba4286c7ab33861a8f4f2badc7ea86fc82b834fb360d3aa4c108"}, + {file = "frozendict-2.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:c8af8a6a39e0050d3f3193cda56c42b43534a9b3995c44241bb9527e3c3fd451"}, + {file = "frozendict-2.4.0.tar.gz", hash = "sha256:c26758198e403337933a92b01f417a8240c954f553e1d4b5e0f8e39d9c8e3f0a"}, ] [[package]] @@ -478,13 +476,13 @@ trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" -version = "0.25.2" +version = "0.26.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, - {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, ] [package.dependencies] @@ -552,13 +550,13 @@ six = "*" [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -569,110 +567,96 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "lxml" -version = "4.9.3" +version = "5.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, + {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, + {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, + {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, + {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, + {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, + {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, + {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, + {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, + {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, + {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, + {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, + {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, + {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, + {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, + {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, + {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, + {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] +source = ["Cython (>=3.0.7)"] [[package]] name = "markdown-it-py" @@ -700,71 +684,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -805,47 +789,47 @@ setuptools = "*" [[package]] name = "numpy" -version = "1.26.2" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, - {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, - {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, - {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, - {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, - {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, - {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, - {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, - {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, - {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, - {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -876,39 +860,39 @@ files = [ [[package]] name = "pathspec" -version = "0.12.0" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" files = [ - {file = "pathspec-0.12.0-py3-none-any.whl", hash = "sha256:f1f8a7eab698c357945c85ed79715e014612b8584faebe209dca4558e2b09513"}, - {file = "pathspec-0.12.0.tar.gz", hash = "sha256:c57e16065a97b7beb175f13c84d27cb05f7b7315741c2fbd5de541042f4ea6e1"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -935,18 +919,18 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "2.5.2" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.5" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -954,116 +938,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.5" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -1193,13 +1151,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1230,13 +1188,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "python-dotenv" -version = "1.0.0" +version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] [package.extras] @@ -1244,17 +1202,17 @@ cli = ["click (>=5.0)"] [[package]] name = "python-multipart" -version = "0.0.6" +version = "0.0.7" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.7" files = [ - {file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"}, - {file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"}, + {file = "python_multipart-0.0.7-py3-none-any.whl", hash = "sha256:b1fef9a53b74c795e2347daac8c54b252d9e0df9c619712691c1cc8021bd3c49"}, + {file = "python_multipart-0.0.7.tar.gz", hash = "sha256:288a6c39b06596c1b988bb6794c6fbc80e6c369e35e5062637df256bee0c9af9"}, ] [package.extras] -dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] +dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] [[package]] name = "pyyaml" @@ -1425,13 +1383,13 @@ wheel = ">=0.36.1" [[package]] name = "setuptools" -version = "69.0.2" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] @@ -1520,20 +1478,20 @@ files = [ [[package]] name = "starlette" -version = "0.27.0" +version = "0.36.3" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, - {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, + {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, + {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, ] [package.dependencies] anyio = ">=3.4.0,<5" [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] [[package]] name = "toml" @@ -1559,29 +1517,30 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.21.1" +version = "0.27.0.post1" description = "The lightning-fast ASGI server." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "uvicorn-0.21.1-py3-none-any.whl", hash = "sha256:e47cac98a6da10cd41e6fd036d472c6f58ede6c5dbee3dbee3ef7a100ed97742"}, - {file = "uvicorn-0.21.1.tar.gz", hash = "sha256:0fac9cb342ba099e0d582966005f3fdba5b0290579fed4a6266dc702ca7bb032"}, + {file = "uvicorn-0.27.0.post1-py3-none-any.whl", hash = "sha256:4b85ba02b8a20429b9b205d015cbeb788a12da527f731811b643fd739ef90d5f"}, + {file = "uvicorn-0.27.0.post1.tar.gz", hash = "sha256:54898fcd80c13ff1cd28bf77b04ec9dbd8ff60c5259b499b4b12bb0917f22907"}, ] [package.dependencies] @@ -1628,4 +1587,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "85c668acf50861c99381889c5be9aef605635c1c9abf040529fd01ab9ee2a805" +content-hash = "f9d999e3c9ac329c3cbd40277b79312a743182e39ef1ca1f2db89b48f9cb7db8" diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl index 3eebae3c..98d97948 100644 --- a/prez/reference_data/endpoints/endpoint_metadata.ttl +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -2,6 +2,17 @@ @prefix ogce: . @prefix ont: . @prefix prez: . +@prefix sys: . + +sys:profiles-listing + a ont:ListingEndpoint ; + ont:relevantShapes ex:Profiles ; + . + +sys:profiles-object + a ont:ObjectEndpoint ; + ont:relevantShapes ex:Profiles ; + . ogce:catalog-listing a ont:ListingEndpoint ; @@ -31,4 +42,4 @@ ogce:item-listing ogce:item-object a ont:ObjectEndpoint ; ont:relevantShapes ex:Feature , ex:ConceptSchemeConcept , ex:CollectionConcept , ex:Resource ; -. \ No newline at end of file +. diff --git a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl index cf9abe11..58b17045 100644 --- a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl +++ b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl @@ -4,6 +4,7 @@ @prefix ex: . @prefix geo: . @prefix prez: . +@prefix prof: . @prefix rdfs: . @prefix sh: . @prefix skos: . @@ -31,33 +32,6 @@ ex:Collections sh:class dcat:Catalog ; ] . -#ex:ConceptScheme -# a sh:NodeShape ; -# ont:hierarchyLevel 2 ; -# sh:targetClass skos:ConceptScheme ; -# sh:property [ -# sh:path [ sh:inversePath dcterms:hasPart ] ; -# sh:class dcat:Catalog ; -# ] . -# -#ex:Collection -# a sh:NodeShape ; -# ont:hierarchyLevel 2 ; -# sh:targetClass skos:Collection ; -# sh:property [ -# sh:path [ sh:inversePath dcterms:hasPart ] ; -# sh:class dcat:Catalog ; -# ] . -# -#ex:LowerLevelCatalog -# a sh:NodeShape ; -# ont:hierarchyLevel 2 ; -# sh:targetClass dcat:Catalog ; -# sh:property [ -# sh:path [ sh:inversePath dcterms:hasPart ] ; -# sh:class dcat:Catalog ; -# ] . - ex:Feature a sh:NodeShape ; ont:hierarchyLevel 3 ; @@ -104,4 +78,10 @@ ex:Resource ] , [ sh:path ( [ sh:inversePath dcterms:hasPart ] [ sh:inversePath dcterms:hasPart ] ); sh:class dcat:Catalog ; - ] . \ No newline at end of file + ] . + +ex:Profiles + a sh:NodeShape ; + ont:hierarchyLevel 1 ; + sh:targetClass prof:Profile ; +. \ No newline at end of file diff --git a/prez/reference_data/prefixes/standard.ttl b/prez/reference_data/prefixes/standard.ttl index dbd2b56d..af227392 100755 --- a/prez/reference_data/prefixes/standard.ttl +++ b/prez/reference_data/prefixes/standard.ttl @@ -44,7 +44,7 @@ PREFIX sdoprof: vann:preferredNamespaceUri ] . -[ vann:preferredNamespacePrefix "prezprof" ; +[ vann:preferredNamespacePrefix "profile" ; vann:preferredNamespaceUri ] . diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 5469b2d1..aa9e26b8 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -49,7 +49,7 @@ prez:OGCListingProfile "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept , - dcat:Resource ; + dcat:Resource , prof:Profile ; sh:property [ sh:path rdf:type ] . @@ -95,5 +95,12 @@ prez:OGCItemProfile sh:path [ sh:inversePath dcterms:hasPart ] ; ] ; shext:bnode-depth 2 ; - altr-ext:constrainsClass dcat:Catalog , dcat:Resource , skos:ConceptScheme, skos:Collection , skos:Concept , geo:FeatureCollection , geo:Feature ; + altr-ext:constrainsClass dcat:Catalog , + dcat:Resource , + skos:ConceptScheme, + skos:Collection , + skos:Concept , + geo:FeatureCollection , + geo:Feature , + prof:Profile ; . diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index 2e1b4e37..887edaf7 100755 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -54,13 +54,11 @@ PREFIX xsd: dcterms:description "A very basic data model that lists the members of container objects only, i.e. not their other properties" ; dcterms:identifier "mem"^^xsd:token ; dcterms:title "Members" ; - altr-ext:constrainsClass prez:DatasetList , - prez:FeatureCollectionList , - prez:FeatureList , - prez:ProfilesList , - prez:SchemesList , - prez:VocPrezCollectionList , - prez:CatalogList , + altr-ext:constrainsClass geo:FeatureCollection , + dcat:Dataset , + dcat:Catalog , + skos:ConceptScheme , + skos:Collection , prez:CQLObjectList , prez:QueryablesList ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; @@ -86,22 +84,13 @@ altr-ext:alt-profile "text/turtle" ; altr-ext:constrainsClass geo:Feature , - prez:FeatureList , geo:FeatureCollection , - prez:FeatureCollectionList , dcat:Dataset , - prez:DatasetList , dcat:Catalog , - prez:CatalogList , dcat:Resource , - prez:ResourceList , skos:ConceptScheme , - prez:SchemesList , skos:Concept , - prez:ConceptList , - skos:Collection , - prez:VocPrezCollectionList , - prez:AltProfilesList ; + skos:Collection ; sh:property [ sh:path ( sh:union ( diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index c83a29cc..bfc03481 100755 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -29,6 +29,7 @@ async def profiles( hierarchy_level=1, page=page, per_page=per_page, + endpoint_structure=("profiles",) ) diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py index ee2c7f21..067b0678 100755 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -82,7 +82,7 @@ async def get_profiles_and_mediatypes( # response = profiles_graph_cache.query(query) response = await system_repo.send_queries([], [(None, query)]) # log.debug(f"ConnegP response:{results_pretty_printer(response)}") - if response[1][0][1] == [{}]: + if response[1][0][1] == []: raise NoProfilesException(classes) top_result = response[1][0][1][0] profile, mediatype, selected_class = ( diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index a72f12d3..c365e0b4 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -2,7 +2,7 @@ from string import Template from rdflib import Graph, Literal, URIRef, DCTERMS, BNode -from rdflib.namespace import SH +from rdflib.namespace import SH, RDF from prez.cache import endpoints_graph_cache, links_ids_graph_cache from prez.config import settings @@ -17,7 +17,7 @@ log = logging.getLogger(__name__) -async def add_prez_links(graph: Graph, repo: Repo): +async def add_prez_links(graph: Graph, repo: Repo, endpoint_structure): # get all URIRefs - if Prez can find a class and endpoint for them, an internal link will be generated. uris = [uri for uri in graph.all_nodes() if isinstance(uri, URIRef)] uri_to_klasses = {} @@ -26,10 +26,10 @@ async def add_prez_links(graph: Graph, repo: Repo): for uri, klasses in uri_to_klasses.items(): if klasses: # need class to know which endpoints can deliver the class - await _link_generation(uri, repo, klasses, graph) + await _link_generation(uri, repo, klasses, graph, endpoint_structure) -async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph): +async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph, endpoint_structure: str = settings.endpoint_structure): # check the cache quads = list( links_ids_graph_cache.quads((None, None, None, uri)) @@ -41,10 +41,32 @@ async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph): # get the endpoints that can deliver the class # many node shapes to one endpoint; multiple node shapes can point to the endpoint else: # generate links - available_nodeshapes = [] - available_nodeshape_triples = list(endpoints_graph_cache.triples_choices((None, SH.targetClass, list(klasses)))) - if available_nodeshape_triples: - for ns, _, _ in available_nodeshape_triples: + available_nodeshapes = await get_nodeshapes_constraining_class(klasses, uri) + # run queries for available nodeshapes to get link components + for ns in available_nodeshapes: + if int(ns.hierarchy_level) > 1: + results = await get_link_components(available_nodeshapes, repo) + for result in results: + # if the list at tuple[1] > 0 then there's some result and a link should be generated. + # NB for top level links, there will be a result (the graph pattern matched) BUT the result will not form + # part of the link. e.g. ?path_node_1 will have result(s) but is not part of the link. + for solution in result[1]: + # create link strings + curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, solution, uri, endpoint_structure) + # add links and identifiers to graph and cache + await add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri) + else: + curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, {}, uri, endpoint_structure) + await add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri) + + +async def get_nodeshapes_constraining_class(klasses, uri): + available_nodeshapes = [] + available_nodeshape_uris = list(endpoints_graph_cache.subjects(predicate=RDF.type, object=SH.NodeShape)) + available_nodeshape_triples = list(endpoints_graph_cache.triples_choices((None, SH.targetClass, list(klasses)))) + if available_nodeshape_triples: + for ns, _, _ in available_nodeshape_triples: + if ns in available_nodeshape_uris: available_nodeshapes.append( NodeShape( uri=ns, @@ -52,69 +74,69 @@ async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph): focus_node=IRI(value=uri), ) ) + return available_nodeshapes - link_queries = [] - for ns in available_nodeshapes: - link_queries.append( - ( - ns.uri, - "".join(SubSelect( - select_clause=SelectClause( - variables_or_all=ns.path_nodes.values()), - where_clause=WhereClause( - group_graph_pattern=GroupGraphPattern( - content=GroupGraphPatternSub( - triples_block=TriplesBlock( - triples=ns.triples_list - ), - graph_patterns_or_triples_blocks=ns.gpnt_list - ) + +async def add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri): + quads = [] + quads.append( + (uri, PREZ["link"], Literal(object_link), uri) + ) + quads.append( + (uri, DCTERMS.identifier, Literal(curie_for_uri, datatype=PREZ.identifier), uri) + ) + if members_link: + existing_members_link = list( + links_ids_graph_cache.quads((uri, PREZ["members"], None, uri)) + ) + if not existing_members_link: + members_bn = BNode() + quads.append( + (uri, PREZ["members"], members_bn, uri) + ) + quads.append( + (members_bn, PREZ["link"], Literal(members_link), uri) + ) + for quad in quads: + graph.add(quad[:3]) + links_ids_graph_cache.add(quad) + + +async def create_link_strings(hierarchy_level, solution, uri, endpoint_structure): + components = list(endpoint_structure[:int(hierarchy_level)]) + variables = reversed(["focus_node"] + [f"path_node_{i}" for i in range(1, len(components))]) + item_link_template = Template( + "".join([f"/{comp}/${pattern}" for comp, pattern in zip(components, variables)])) + curie_for_uri = get_curie_id_for_uri(uri) + sol_values = {k: get_curie_id_for_uri(v["value"]) for k, v in solution.items()} + object_link = item_link_template.substitute(sol_values | {"focus_node": curie_for_uri}) + members_link = None + if len(components) < len(list(endpoint_structure)): + members_link = object_link + "/" + endpoint_structure[len(components)] + return curie_for_uri, members_link, object_link + + +async def get_link_components(available_nodeshapes, repo): + link_queries = [] + for ns in available_nodeshapes: + link_queries.append( + ( + ns.uri, + "".join(SubSelect( + select_clause=SelectClause( + variables_or_all=ns.path_nodes.values()), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock( + triples=ns.triples_list + ), + graph_patterns_or_triples_blocks=ns.gpnt_list ) ) - ).render()) - ) - ) - _, results = await repo.send_queries([], link_queries) - for result in results: - # if the list at tuple[1] > 0 then there's some result and a link should be generated. - # NB for top level links, there will be a result (the graph pattern matched) BUT the result will not form - # part of the link. e.g. ?path_node_1 will have result(s) but is not part of the link. - for solution in result[1]: - # get the hierarchy level - hl = endpoints_graph_cache.value(predicate=ONT.hierarchyLevel, subject=result[0]) - if not hl: - raise ValueError( - f"Endpoint {result[0]} has no hierarchy level") # TODO validate endpoint nodes with SHACL - components = list(settings.endpoint_structure[:int(hl)]) - variables = reversed(["focus_node"] + [f"path_node_{i}" for i in range(1, len(components))]) - item_link_template = Template( - "".join([f"/{comp}/${pattern}" for comp, pattern in zip(components, variables)])) - curie_for_uri = get_curie_id_for_uri(uri) - sol_values = {k: get_curie_id_for_uri(v["value"]) for k, v in solution.items()} - object_link = item_link_template.substitute(sol_values | {"focus_node": curie_for_uri}) - members_link = None - if len(components) < len(list(settings.endpoint_structure)): - members_link = object_link + "/" + settings.endpoint_structure[len(components)] - - quads = [] - quads.append( - (uri, PREZ["link"], Literal(object_link), uri) - ) - quads.append( - (uri, DCTERMS.identifier, Literal(curie_for_uri, datatype=PREZ.identifier), uri) - ) - if members_link: - existing_members_link = list( - links_ids_graph_cache.quads((uri, PREZ["members"], None, uri)) ) - if not existing_members_link: - members_bn = BNode() - quads.append( - (uri, PREZ["members"], members_bn, uri) - ) - quads.append( - (members_bn, PREZ["link"], Literal(members_link), uri) - ) - for quad in quads: - graph.add(quad[:3]) - links_ids_graph_cache.add(quad) + ).render()) + ) + ) + _, results = await repo.send_queries([], link_queries) + return results diff --git a/prez/services/listings.py b/prez/services/listings.py index 45b1f538..6f3b5f7c 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -39,6 +39,7 @@ async def listing_function( parent_uri: Optional[URIRef] = None, cql_parser: CQLParser = None, search_term: Optional[str] = None, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): """ # determine the relevant node selection part of the query - from SHACL, CQL, Search @@ -133,11 +134,11 @@ async def listing_function( ): item_graph, _ = await system_repo.send_queries(queries, []) if "anot+" in prof_and_mt_info.mediatype: - await add_prez_links(item_graph, system_repo) + await add_prez_links(item_graph, system_repo, endpoint_structure) else: item_graph, _ = await repo.send_queries(queries, []) if "anot+" in prof_and_mt_info.mediatype: - await add_prez_links(item_graph, repo) + await add_prez_links(item_graph, repo, endpoint_structure) # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated if search_term: count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) @@ -169,53 +170,31 @@ async def determine_nodeshape(endpoint_uri, hierarchy_level, parent_uri, path_no target_classes = [URIRef(result["tc"]["value"]) for result in tabular_results] elif len(distinct_ns) > 1: # more than one possible node shape # try all of the available nodeshapes + path_node_classes = {} + for pn, uri in path_nodes.items(): + path_node_classes[pn] = await get_classes(URIRef(uri.value), repo) nodeshapes = [NodeShape(uri=URIRef(ns), graph=endpoints_graph_cache, path_nodes=path_nodes) for ns in distinct_ns] - + matching_nodeshapes = [] for ns in nodeshapes: - ns.gpnt_list.append( - GraphPatternNotTriples(content=Bind( - expression=Expression.from_primary_expr( - PrimaryExpression(content=IRIOrFunction(iri=IRI(value=ns.uri))) - ), - var=Var(value="nodeshape"), - ) - ) - ) - - ggps_list = [GroupGraphPattern( - content=GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[ - *ns.gpnt_list, - TriplesBlock(triples=ns.triples_list), - ] - ) - ) for ns in nodeshapes] - ss = SubSelect( - select_clause=SelectClause( - variables_or_all=[Var(value="nodeshape")]), - where_clause=WhereClause( - group_graph_pattern=GroupGraphPattern( - content=GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[ - GraphPatternNotTriples( - content=GroupOrUnionGraphPattern( - group_graph_patterns=ggps_list - ) - ) - ] - ) - ) - ), - solution_modifier=SolutionModifier() - ) - ss_query = "".join(ss.render()) - _, r = await repo.send_queries([], [(parent_uri, ss_query)]) - node_selection_shape = URIRef(r[0][1][0]["nodeshape"]["value"]) + match_all_keys = True # Assume a match for all keys initially + + for pn, klasses in path_node_classes.items(): + # Check if all classes for this path node are in the ns.classes_at_len at this pn + if not all(klass in ns.classes_at_len.get(pn, []) for klass in klasses): + match_all_keys = False # Found a key where not all classes match + break # No need to check further for this ns + + if match_all_keys: + matching_nodeshapes.append(ns) + # TODO logic if there is more than one nodeshape - current default nodeshapes will only return one. + node_selection_shape = matching_nodeshapes[0].uri target_classes = list(endpoints_graph_cache.objects(node_selection_shape, SH.targetClass)) return node_selection_shape, target_classes + + def find_instances(obj, cls): found = [] diff --git a/prez/services/objects.py b/prez/services/objects.py index 8cbc3596..b6807c47 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -1,10 +1,12 @@ import logging +from typing import Tuple from fastapi import Request from fastapi.responses import PlainTextResponse from rdflib import URIRef from prez.cache import profiles_graph_cache, endpoints_graph_cache +from prez.config import settings from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.reference_data.prez_ns import EP from prez.renderers.renderer import return_from_graph @@ -24,6 +26,7 @@ async def object_function( request_url: str, repo: Repo, system_repo: Repo, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): klasses = await get_classes(uri=uri, repo=repo, endpoint=endpoint_uri) # ConnegP @@ -64,8 +67,8 @@ async def object_function( item_graph, _ = await repo.send_queries([query], []) if "anot+" in prof_and_mt_info.mediatype: if not endpoint_uri == EP.object: - await add_prez_links(item_graph, repo) - await add_prez_links(item_graph, repo) + await add_prez_links(item_graph, repo, endpoint_structure) + await add_prez_links(item_graph, repo, endpoint_structure) return await return_from_graph( item_graph, prof_and_mt_info.mediatype, diff --git a/prez/sparql/search_query.py b/prez/sparql/search_query.py index aea32c24..9e5d126a 100755 --- a/prez/sparql/search_query.py +++ b/prez/sparql/search_query.py @@ -321,33 +321,3 @@ def create_inner_ggp( filter_gpnt = GraphPatternNotTriples(content=filter_expr) ggp.content.add_pattern(filter_gpnt) return ggp - -# if __name__ == "__main__": -# # additional_ss = SubSelectString(select_string="SELECT * {?focus_node a owl:Class}") -# sr_uri = Var(value="focus_node") -# additional_ss = SubSelect( -# select_clause=SelectClause(variables_or_all=[sr_uri]), -# where_clause=WhereClause( -# group_graph_pattern=GroupGraphPattern( -# content=GroupGraphPatternSub( -# triples_block=TriplesBlock( -# triples=[ -# SimplifiedTriple( -# subject=sr_uri, -# predicate=IRI( -# value="http://www.w3.org/1999/02/22-rdf-syntax-ns#type" -# ), -# object=IRI(value="http://www.w3.org/2002/07/owl#Class"), -# ) -# ] -# ) -# ) -# ) -# ), -# ) -# sq = SearchQuery( -# search_term="test", -# pred_vals=[RDFS.label], -# additional_ss=additional_ss, -# ).render() -# print(sq) diff --git a/pyproject.toml b/pyproject.toml index 97e5c6f0..3a73e336 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,15 +6,15 @@ authors = ["Jamie Feiss ", "Nicholas Car 1: - if len(self.targetClasses) == 1: - self.triples_list.append( - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=IRI(value=self.targetClasses[0]) - ) + if len(self.targetClasses) == 1: + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=IRI(value=self.targetClasses[0]) ) - else: - self.triples_list.append( - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=Var(value=f"focus_classes") - )) - dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.targetClasses] - self.gpnt_list.append( - GraphPatternNotTriples( - content=InlineData( - data_block=DataBlock( - block=InlineDataOneVar( - variable=Var(value=f"focus_classes"), - datablockvalues=dbvs - ) + ) + elif len(self.targetClasses) > 1: + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=Var(value=f"focus_classes") + )) + dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.targetClasses] + self.gpnt_list.append( + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=Var(value=f"focus_classes"), + datablockvalues=dbvs ) ) ) ) + ) + else: + raise ValueError("No target classes found") def _process_property_shapes(self): for shape in self.propertyShapes: self.triples_list.extend(shape.triples_list) self.gpnt_list.extend(shape.gpnt_list) self.path_nodes = self.path_nodes | shape.path_nodes + self.classes_at_len = self.classes_at_len | shape.classes_at_len # deduplicate self.triples_list = list(set(self.triples_list)) @@ -111,6 +119,7 @@ class PropertyShape(Shape): triples_list: Optional[List[SimplifiedTriple]] = None gpnt_list: Optional[List[GraphPatternNotTriples]] = None path_nodes: Optional[Dict[str, Var | IRI]] = {} + classes_at_len: Optional[Dict[str, List[URIRef]]] = {} _select_vars: Optional[List[Var]] = None def from_graph(self): @@ -162,6 +171,9 @@ def to_grammar(self): # sh:class applies to the end of sequence paths path_node_term = self.path_nodes[f"path_node_{len_pp}"] + # useful for determining which endpoint property shape should be used when a request comes in on endpoint + self.classes_at_len[f"path_node_{len_pp}"] = self.or_klasses + if self.or_klasses: if len(self.or_klasses) == 1: self.triples_list.append( From 4f52e8da2e08daff585a8562533821f1295c2944 Mon Sep 17 00:00:00 2001 From: david Date: Mon, 12 Feb 2024 10:13:50 +1000 Subject: [PATCH 12/25] Update compose file --- demo/prez-v4-backend/docker-compose.yml | 2 +- test_data/object_catalog_bblocks_catalog.ttl | 12 + test_data/object_vocab_api_bblocks.ttl | 39 ++ test_data/object_vocab_datatype_bblocks.ttl | 38 ++ test_data/object_vocab_parameter_bblocks.ttl | 61 +++ test_data/object_vocab_schema_bblocks.ttl | 414 +++++++++++++++++++ test_data/sandgate.ttl | 295 +++++++++++++ 7 files changed, 860 insertions(+), 1 deletion(-) create mode 100644 test_data/object_catalog_bblocks_catalog.ttl create mode 100644 test_data/object_vocab_api_bblocks.ttl create mode 100644 test_data/object_vocab_datatype_bblocks.ttl create mode 100644 test_data/object_vocab_parameter_bblocks.ttl create mode 100644 test_data/object_vocab_schema_bblocks.ttl create mode 100755 test_data/sandgate.ttl diff --git a/demo/prez-v4-backend/docker-compose.yml b/demo/prez-v4-backend/docker-compose.yml index 59fac4c1..09930154 100755 --- a/demo/prez-v4-backend/docker-compose.yml +++ b/demo/prez-v4-backend/docker-compose.yml @@ -2,7 +2,7 @@ version: "3" services: fuseki: - image: "ghcr.io/zazuko/fuseki-geosparql:v3.2.0" + image: "ghcr.io/zazuko/fuseki-geosparql:v3.3.0" ports: - "3030:3030" volumes: diff --git a/test_data/object_catalog_bblocks_catalog.ttl b/test_data/object_catalog_bblocks_catalog.ttl new file mode 100644 index 00000000..86b4b547 --- /dev/null +++ b/test_data/object_catalog_bblocks_catalog.ttl @@ -0,0 +1,12 @@ +@prefix dcat: . +@prefix dcterms: . +@prefix vocab: . +@prefix catalog: . +@prefix prez: . + +catalog:bblocks + a dcat:Catalog ; + dcterms:identifier "bblocks" ; + dcterms:title "A catalog of Building Block Vocabularies" ; + dcterms:hasPart vocab:api , vocab:datatype , vocab:parameter , vocab:schema ; + . diff --git a/test_data/object_vocab_api_bblocks.ttl b/test_data/object_vocab_api_bblocks.ttl new file mode 100644 index 00000000..20a44635 --- /dev/null +++ b/test_data/object_vocab_api_bblocks.ttl @@ -0,0 +1,39 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . +@prefix prez: . + +vocab:api + a skos:ConceptScheme ; + skos:prefLabel "API Building Blocks" ; + skos:hasTopConcept bblocks:ogc.unstable.sosa ; + dct:identifier "api" ; + . + +bblocks:ogc.unstable.sosa a skos:Concept, + bblocks:Api ; + rdfs:label "Sensor, Observation, Sample, and Actuator (SOSA)" ; + dct:abstract "The SOSA (Sensor, Observation, Sample, and Actuator) ontology is a realisation of the Observations, Measurements and Sampling (OMS) Conceptual model" ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:api ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status ; + prez:link "/catalogs/bblocks/collections/api/items/ogc.unstable.sosa" ; + . diff --git a/test_data/object_vocab_datatype_bblocks.ttl b/test_data/object_vocab_datatype_bblocks.ttl new file mode 100644 index 00000000..9651ba4c --- /dev/null +++ b/test_data/object_vocab_datatype_bblocks.ttl @@ -0,0 +1,38 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . + +vocab:datatype + a skos:ConceptScheme ; + skos:prefLabel "Datatype Building Blocks" ; + skos:hasTopConcept bblocks:ogc.ogc-utils.iri-or-curie ; + dct:identifier "datatype" ; + . + +bblocks:ogc.ogc-utils.iri-or-curie a skos:Concept, + bblocks:Datatype ; + rdfs:label "IRI or CURIE" ; + dct:abstract "This Building Block defines a data type for a full IRI/URI or a CURIE (with or without a prefix)" ; + dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/ogc-utils/iri-or-curie/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/ogc-utils/iri-or-curie/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/ogc-utils/iri-or-curie/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-03-09"^^xsd:date ; + dct:source , + , + ; + skos:inScheme , vocab:datatype ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . diff --git a/test_data/object_vocab_parameter_bblocks.ttl b/test_data/object_vocab_parameter_bblocks.ttl new file mode 100644 index 00000000..23c920a7 --- /dev/null +++ b/test_data/object_vocab_parameter_bblocks.ttl @@ -0,0 +1,61 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . + +vocab:parameter + a skos:ConceptScheme ; + skos:prefLabel "Parameter Building Blocks" ; + skos:hasTopConcept bblocks:ogc.geo.common.parameters.bbox , bblocks:ogc.geo.common.parameters.bbox-crs ; + dct:identifier "parameter" + . + +bblocks:ogc.geo.common.parameters.bbox a skos:Concept, + bblocks:Parameter ; + rdfs:label "bbox" ; + dct:abstract "The bbox query parameter provides a simple mechanism for filtering resources based on their location. It selects all resources that intersect a rectangle (map view) or box (including height information)." ; + dct:created "2022-05-24T13:51:38+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/parameters/bbox/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/parameters/bbox/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/parameters/bbox/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2022-05-24"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:parameter ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.common.parameters.bbox-crs a skos:Concept, + bblocks:Parameter ; + rdfs:label "bbox-crs" ; + dct:abstract "The bbox-crs query parameter can be used to assert the coordinate reference system that is used for the coordinate values of the bbox parameter." ; + dct:created "2022-07-05T01:01:01+02:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/parameters/bbox-crs/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/parameters/bbox-crs/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/parameters/bbox-crs/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2022-07-05"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:parameter ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + + + + diff --git a/test_data/object_vocab_schema_bblocks.ttl b/test_data/object_vocab_schema_bblocks.ttl new file mode 100644 index 00000000..f71f849a --- /dev/null +++ b/test_data/object_vocab_schema_bblocks.ttl @@ -0,0 +1,414 @@ +@prefix bblocks: . +@prefix dct: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix skos: . +@prefix xsd: . +@prefix vocab: . + +vocab:schema + a skos:ConceptScheme ; + dct:identifier "schema" ; + skos:prefLabel "Schema Building Blocks" ; + skos:hasTopConcept bblocks:ogc.unstable.sosa.examples.vectorObservation , + bblocks:ogc.unstable.sosa.examples.vectorObservationFeature , + bblocks:ogc.unstable.sosa.features.observation , + bblocks:ogc.unstable.sosa.features.observationCollection , + bblocks:ogc.unstable.sosa.properties.observation , + bblocks:ogc.unstable.sosa.properties.observationCollection , + bblocks:ogc.ogc-utils.json-link , + bblocks:ogc.geo.features.feature , + bblocks:ogc.geo.features.featureCollection , + bblocks:ogc.geo.geopose.advanced , + bblocks:ogc.geo.geopose.basic.quaternion , + bblocks:ogc.geo.geopose.basic.ypr , + bblocks:ogc.geo.json-fg.feature , + bblocks:ogc.geo.json-fg.feature-lenient , + bblocks:ogc.geo.json-fg.featureCollection , + bblocks:ogc.geo.json-fg.featureCollection-lenient , + bblocks:ogc.geo.common.data_types.bounding_box , + bblocks:ogc.geo.common.data_types.geojson + . + + +bblocks:ogc.unstable.sosa.examples.vectorObservation a skos:Concept, + bblocks:Schema ; + rdfs:label "Example SOSA Vector Observation" ; + dct:abstract "This building block defines an example SOSA Vector Observation" ; + dct:created "2023-05-19T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/examples/vectorObservation/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/examples/vectorObservation/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/examples/vectorObservation/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-19"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.examples.vectorObservationFeature a skos:Concept, + bblocks:Schema ; + rdfs:label "Example SOSA Vector Observation Feature" ; + dct:abstract "This building block defines an example SOSA Observation Feature for a Vector Observation" ; + dct:created "2023-05-19T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/examples/vectorObservationFeature/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/examples/vectorObservationFeature/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/examples/vectorObservationFeature/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-19"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.features.observation a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA Observation Feature" ; + dct:abstract "This building blocks defines a GeoJSON feature containing a SOSA Observation" ; + dct:created "2023-05-18T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/features/observation/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/features/observation/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/features/observation/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-18"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.features.observationCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA ObservationCollection Feature" ; + dct:abstract "This building blocks defines an ObservationCollection Feature according to the SOSA/SSN v1.1 specification." ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/features/observationCollection/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/features/observationCollection/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/features/observationCollection/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-28"^^xsd:date ; + dct:source , + ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.properties.observation a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA Observation" ; + dct:abstract "This building block defines the set of properties for an observation according to the SOSA/SSN specification. These properties may be directly included into a root element of a JSON object or used in the properties container of a GeoJSON feature." ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/properties/observation/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/properties/observation/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/properties/observation/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.unstable.sosa.properties.observationCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "SOSA ObservationCollection" ; + dct:abstract "This building blocks defines an ObservationCollection according to the SOSA/SSN v1.1 specification." ; + dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/properties/observationCollection/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/properties/observationCollection/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/properties/observationCollection/" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-04-28"^^xsd:date ; + dct:source , + ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.ogc-utils.json-link a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON Link" ; + dct:abstract "Web linking is used to express relationships between resources. The JSON object representation of links described here is used consistently in OGC API’s." ; + dct:created "2022-05-18T15:21:59+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/ogc-utils/json-link/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/ogc-utils/json-link/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/ogc-utils/json-link/index.md" ] ; + dct:hasVersion "0.1" ; + dct:modified "2022-05-18"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.features.feature a skos:Concept, + bblocks:Schema ; + rdfs:label "Feature" ; + dct:abstract "A feature. Every feature is a sub-resource of an OGC Collection." ; + dct:created "2023-05-24T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/features/feature/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/features/feature/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/features/feature/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-15"^^xsd:date ; + dct:source , + ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.features.featureCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "Feature Collection" ; + dct:abstract "A collection of features." ; + dct:created "2023-06-26T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/features/featureCollection/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/features/featureCollection/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/features/featureCollection/index.json" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-06-26"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.geopose.advanced a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoPose Advanced" ; + dct:abstract "Advanced GeoPose allowing flexible outer frame specification, quaternion orientation, and valid time." ; + dct:created "2023-07-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/advanced/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/advanced/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/advanced/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-07-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.geopose.basic.quaternion a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoPose Basic-Quaternion" ; + dct:abstract "Basic GeoPose using quaternion to specify orientation" ; + dct:created "2023-07-13T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/basic/quaternion/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/basic/quaternion/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/basic/quaternion/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-07-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.geopose.basic.ypr a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoPose Basic-YPR" ; + dct:abstract "Basic GeoPose using yaw, pitch, and roll to specify orientation" ; + dct:created "2023-03-15T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/geopose/basic/ypr/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/geopose/basic/ypr/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/geopose/basic/ypr/index.json" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-07-13"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.feature a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature" ; + dct:abstract "A OGC Features and Geometries JSON (JSON-FG) Feature, extending GeoJSON to support a limited set of additional capabilities that are out-of-scope for GeoJSON, but that are important for a variety of use cases involving feature data." ; + dct:created "2023-05-31T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/feature/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/feature/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/feature/index.json" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-05-31"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.feature-lenient a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature - Lenient" ; + dct:abstract "A OGC Features and Geometries JSON (JSON-FG) Feature that does not require the \"time\" and \"place\" properties." ; + dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/feature-lenient/index.md" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/feature-lenient/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/feature-lenient/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-08-08"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.featureCollection a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature Collection" ; + dct:abstract "A collection of OGC Features and Geometries JSON (JSON-FG) Features, extending GeoJSON to support a limited set of additional capabilities that are out-of-scope for GeoJSON, but that are important for a variety of use cases involving feature data." ; + dct:created "2023-05-31T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/featureCollection/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/featureCollection/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/featureCollection/index.md" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-05-31"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.json-fg.featureCollection-lenient a skos:Concept, + bblocks:Schema ; + rdfs:label "JSON-FG Feature Collection - Lenient" ; + dct:abstract "A collection of lenient OGC Features and Geometries JSON (JSON-FG) Features, that do not require the \"time\" and \"place\" properties" ; + dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/json-fg/featureCollection-lenient/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/json-fg/featureCollection-lenient/index.md" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/json-fg/featureCollection-lenient/" ] ; + dct:hasVersion "0.1" ; + dct:modified "2023-08-08"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.common.data_types.bounding_box a skos:Concept, + bblocks:Schema ; + rdfs:label "Bounding Box" ; + dct:abstract "The bounding box JSON object describes a simple spatial extent of a resource. For OGC API’s this could be a feature, a feature collection or a dataset, but it can be used in any JSON resource that wants to communicate its rough location. The extent is simple in that the bounding box does not describe the precise location and shape of the resource, but provides an axis-aligned approximation of the spatial extent that can be used as an initial test whether two resources are potentially intersecting each other." ; + dct:created "2022-05-24T13:51:38+00:00"^^xsd:dateTime ; + dct:description [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/data_types/bounding_box/index.json" ], + [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/data_types/bounding_box/" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/data_types/bounding_box/index.md" ] ; + dct:hasVersion "1.0.1" ; + dct:modified "2023-03-09"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . + +bblocks:ogc.geo.common.data_types.geojson a skos:Concept, + bblocks:Schema ; + rdfs:label "GeoJSON" ; + dct:abstract "A GeoJSON object" ; + dct:created "2023-05-24T14:56:51+00:00"^^xsd:dateTime ; + dct:description [ dct:format "text/html" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/geo/common/data_types/geojson/" ], + [ dct:format "application/json" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/geo/common/data_types/geojson/index.json" ], + [ dct:format "text/markdown" ; + rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/geo/common/data_types/geojson/index.md" ] ; + dct:hasVersion "1.0" ; + dct:modified "2023-05-15"^^xsd:date ; + dct:source ; + skos:inScheme , vocab:schema ; + bblocks:hasJsonLdContext ; + bblocks:hasSchema , + ; + bblocks:scope ; + bblocks:status . diff --git a/test_data/sandgate.ttl b/test_data/sandgate.ttl new file mode 100755 index 00000000..de42fbfb --- /dev/null +++ b/test_data/sandgate.ttl @@ -0,0 +1,295 @@ +PREFIX dcat: +PREFIX dcterms: +PREFIX geo: +PREFIX geofab: +PREFIX rdfs: +PREFIX sand: +PREFIX xsd: + + + a dcat:Dataset ; + dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; + dcterms:identifier "sandgate"^^xsd:token ; + dcterms:title "Sandgate example dataset"@en ; + rdfs:member + sand:catchments , + sand:facilities , + sand:floods , + sand:roads ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2234024,152.9075 -27.2234024,152.9075 -27.42))"^^geo:wktLiteral + ] ; +. + +sand:catchments + a geo:FeatureCollection ; + dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; + dcterms:identifier "catchments"^^xsd:token ; + dcterms:title "Geofabric Contracted Catchments"@en ; + rdfs:label "Geofabric Contracted Catchments"@en ; + rdfs:member + sand:cc12109444 , + sand:cc12109445 ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral + ] ; +. + +sand:facilities + a geo:FeatureCollection ; + dcterms:description "Sandgate area demo Facilities"@en ; + dcterms:identifier "facilities"^^xsd:token ; + dcterms:title "Sandgate are demo Facilities"@en ; + rdfs:label "Sandgate are demo Facilities"@en ; + rdfs:member + sand:bhc , + sand:bhca , + sand:bps , + sand:cpc , + sand:jcabi , + sand:rps , + sand:sac , + sand:sps , + sand:src , + sand:srca ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((153.0144819 -27.3506599,153.1143102 -27.3506599,153.1143102 -27.2234024,153.0144819 -27.2234024,153.0144819 -27.3506599))"^^geo:wktLiteral + ] ; +. + +sand:floods + a geo:FeatureCollection ; + dcterms:description "Sandgate flooded areas"@en ; + dcterms:identifier "floods"^^xsd:token ; + dcterms:title "Sandgate flooded areas"@en ; + rdfs:label "Sandgate flooded areas"@en ; + rdfs:member + sand:f001 , + sand:f023 , + sand:f332 , + sand:f632 ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((153.06307 -27.3151243,153.069877 -27.3151243,153.069877 -27.2859541,153.06307 -27.2859541,153.06307 -27.3151243))"^^geo:wktLiteral + ] ; +. + +sand:roads + a geo:FeatureCollection ; + dcterms:description "Sandgate main roads"@en ; + dcterms:identifier "roads"^^xsd:token ; + dcterms:title "Sandgate main roads"@en ; + rdfs:label "Sandgate main roads"@en ; + rdfs:member + sand:bt , + sand:fp ; + geo:hasBoundingBox [ + a geo:Geometry ; + geo:asWKT "POLYGON ((153.0617934 -27.3203138,153.0747569 -27.3203138,153.0747569 -27.2920918,153.0617934 -27.2920918,153.0617934 -27.3203138))"^^geo:wktLiteral + ] ; +. + +sand:bhc + a geo:Feature ; + rdfs:label "Brighton Health Campus Location" ; + dcterms:identifier "bhc"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0638169, -27.2897951]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0638169 -27.2897951)"^^geo:wktLiteral + ] ; +. + +sand:bhca + a geo:Feature ; + rdfs:label "Brighton Health Campus Area" ; + dcterms:identifier "bhca"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.063644,-27.2894036],[153.0635207,-27.2896229],[153.0631612,-27.2896182],[153.0631291,-27.289909],[153.0631559,-27.290338],[153.0644487,-27.2904858],[153.0645614,-27.2899185],[153.0648349,-27.2895324],[153.0648135,-27.2889174],[153.0637674,-27.2887362],[153.063644,-27.2894036]]]}"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.063644 -27.2894036, 153.0635207 -27.2896229, 153.0631612 -27.2896182, 153.0631291 -27.289909, 153.0631559 -27.290338, 153.0644487 -27.2904858, 153.0645614 -27.2899185, 153.0648349 -27.2895324, 153.0648135 -27.2889174, 153.0637674 -27.2887362, 153.063644 -27.2894036))"^^geo:wktLiteral + ] ; +. + +sand:bps + a geo:Feature ; + rdfs:label "Boondal Police Station" ; + dcterms:identifier "bps"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0536022, -27.3497934]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0536022 -27.3497934)"^^geo:wktLiteral + ] ; +. + +sand:bt + a geo:Feature ; + rdfs:label "Brighton Terrace" ; + dcterms:identifier "bt"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.06513, -27.3143431 ], [ 153.065881100000013, -27.3140285 ], [ 153.0653983, -27.3130466 ], [ 153.0652052, -27.3122745 ], [ 153.0651193, -27.3116453 ], [ 153.064550700000012, -27.3103202 ], [ 153.0641108, -27.3092526 ], [ 153.0637889, -27.3074031 ], [ 153.0631774, -27.3057253 ], [ 153.0628448, -27.3044573 ], [ 153.0627053, -27.3036565 ], [ 153.061847, -27.2988706 ], [ 153.0617934, -27.2952 ], [ 153.062168899999989, -27.2933312 ], [ 153.0622333, -27.2920918 ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "LINESTRING (153.06513 -27.3143431,153.0658811 -27.3140285,153.0653983 -27.3130466,153.0652052 -27.3122745,153.0651193 -27.3116453,153.0645507 -27.3103202,153.0641108 -27.3092526,153.0637889 -27.3074031,153.0631774 -27.3057253,153.0628448 -27.3044573,153.0627053 -27.3036565,153.061847 -27.2988706,153.0617934 -27.2952,153.0621689 -27.2933312,153.0622333 -27.2920918)"^^geo:wktLiteral + ] ; +. + +sand:cc12109444 + a + geo:Feature , + geofab:ContractedCatchment ; + rdfs:label "Contracted Catchment 12109444" ; + dcterms:identifier "cc12109444"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}'^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral + ] ; +. + +sand:cc12109445 + a + geo:Feature , + geofab:ContractedCatchment ; + rdfs:label "Contracted Catchment 12109445" ; + dcterms:identifier "cc12109445"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.0025, -27.2775], [153.0025, -27.28], [153.005, -27.28], [153.005, -27.285], [153.0075, -27.285], [153.015, -27.285], [153.015, -27.29], [153.0175, -27.29], [153.0175, -27.2925], [153.0175, -27.3025], [153.02, -27.3025], [153.02, -27.305], [153.0225, -27.305], [153.0225, -27.31], [153.0175, -27.31], [153.0175, -27.3125], [153.015, -27.3125], [153.015, -27.315], [153.015, -27.3175], [153.0175, -27.3175], [153.0175, -27.32], [153.02, -27.32], [153.02, -27.3225], [153.0275, -27.3225], [153.0275, -27.325], [153.025, -27.325], [153.025, -27.3275], [153.0225, -27.3275], [153.0225, -27.33], [153.02, -27.33], [153.02, -27.335], [153.0175, -27.335], [153.0175, -27.3475], [153.01, -27.3475], [153.01, -27.35], [153.005, -27.35], [153, -27.35], [153, -27.3525], [152.995, -27.3525], [152.995, -27.355], [152.9925, -27.355], [152.9925, -27.3625], [152.99, -27.3625], [152.99, -27.3675], [152.9875, -27.3675], [152.9875, -27.37], [152.985, -27.37], [152.985, -27.3725], [152.9825, -27.3725], [152.9825, -27.375], [152.98, -27.375], [152.98, -27.3775], [152.9675, -27.3775], [152.9675, -27.38], [152.965, -27.38], [152.9525, -27.38], [152.9525, -27.3825], [152.9475, -27.3825], [152.9475, -27.38], [152.94, -27.38], [152.94, -27.3825], [152.9375, -27.3825], [152.9375, -27.38], [152.9325, -27.38], [152.9325, -27.3825], [152.93, -27.3825], [152.925, -27.3825], [152.925, -27.385], [152.92, -27.385], [152.92, -27.3825], [152.9075, -27.3825], [152.9075, -27.38], [152.9075, -27.375], [152.9075, -27.3725], [152.915, -27.3725], [152.915, -27.37], [152.92, -27.37], [152.92, -27.3675], [152.9225, -27.3675], [152.9225, -27.365], [152.925, -27.365], [152.925, -27.3625], [152.9275, -27.3625], [152.9275, -27.36], [152.9275, -27.3575], [152.925, -27.3575], [152.925, -27.355], [152.9225, -27.355], [152.9225, -27.3525], [152.92, -27.3525], [152.92, -27.35], [152.9175, -27.35], [152.9175, -27.345], [152.92, -27.345], [152.92, -27.3325], [152.9175, -27.3325], [152.9175, -27.33], [152.915, -27.33], [152.915, -27.3275], [152.9125, -27.3275], [152.9125, -27.325], [152.9125, -27.3225], [152.9225, -27.3225], [152.9225, -27.32], [152.925, -27.32], [152.925, -27.3175], [152.9275, -27.3175], [152.9275, -27.315], [152.93, -27.315], [152.93, -27.3125], [152.9325, -27.3125], [152.9325, -27.31], [152.935, -27.31], [152.935, -27.305], [152.94, -27.305], [152.94, -27.3025], [152.9425, -27.3025], [152.9425, -27.3], [152.945, -27.3], [152.945, -27.2975], [152.95, -27.2975], [152.95, -27.295], [152.955, -27.295], [152.9575, -27.295], [152.9575, -27.2925], [152.96, -27.2925], [152.96, -27.29], [152.9625, -27.29], [152.9625, -27.2875], [152.9675, -27.2875], [152.9675, -27.285], [152.9725, -27.285], [152.9725, -27.2825], [152.9775, -27.2825], [152.9775, -27.28], [152.98, -27.28], [152.9925, -27.28], [152.9925, -27.2775], [152.9975, -27.2775], [153.0025, -27.2775]]]}'^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0025 -27.2775, 153.0025 -27.28, 153.005 -27.28, 153.005 -27.285, 153.0075 -27.285, 153.015 -27.285, 153.015 -27.29, 153.0175 -27.29, 153.0175 -27.2925, 153.0175 -27.3025, 153.02 -27.3025, 153.02 -27.305, 153.0225 -27.305, 153.0225 -27.31, 153.0175 -27.31, 153.0175 -27.3125, 153.015 -27.3125, 153.015 -27.315, 153.015 -27.3175, 153.0175 -27.3175, 153.0175 -27.32, 153.02 -27.32, 153.02 -27.3225, 153.0275 -27.3225, 153.0275 -27.325, 153.025 -27.325, 153.025 -27.3275, 153.0225 -27.3275, 153.0225 -27.33, 153.02 -27.33, 153.02 -27.335, 153.0175 -27.335, 153.0175 -27.3475, 153.01 -27.3475, 153.01 -27.35, 153.005 -27.35, 153 -27.35, 153 -27.3525, 152.995 -27.3525, 152.995 -27.355, 152.9925 -27.355, 152.9925 -27.3625, 152.99 -27.3625, 152.99 -27.3675, 152.9875 -27.3675, 152.9875 -27.37, 152.985 -27.37, 152.985 -27.3725, 152.9825 -27.3725, 152.9825 -27.375, 152.98 -27.375, 152.98 -27.3775, 152.9675 -27.3775, 152.9675 -27.38, 152.965 -27.38, 152.9525 -27.38, 152.9525 -27.3825, 152.9475 -27.3825, 152.9475 -27.38, 152.94 -27.38, 152.94 -27.3825, 152.9375 -27.3825, 152.9375 -27.38, 152.9325 -27.38, 152.9325 -27.3825, 152.93 -27.3825, 152.925 -27.3825, 152.925 -27.385, 152.92 -27.385, 152.92 -27.3825, 152.9075 -27.3825, 152.9075 -27.38, 152.9075 -27.375, 152.9075 -27.3725, 152.915 -27.3725, 152.915 -27.37, 152.92 -27.37, 152.92 -27.3675, 152.9225 -27.3675, 152.9225 -27.365, 152.925 -27.365, 152.925 -27.3625, 152.9275 -27.3625, 152.9275 -27.36, 152.9275 -27.3575, 152.925 -27.3575, 152.925 -27.355, 152.9225 -27.355, 152.9225 -27.3525, 152.92 -27.3525, 152.92 -27.35, 152.9175 -27.35, 152.9175 -27.345, 152.92 -27.345, 152.92 -27.3325, 152.9175 -27.3325, 152.9175 -27.33, 152.915 -27.33, 152.915 -27.3275, 152.9125 -27.3275, 152.9125 -27.325, 152.9125 -27.3225, 152.9225 -27.3225, 152.9225 -27.32, 152.925 -27.32, 152.925 -27.3175, 152.9275 -27.3175, 152.9275 -27.315, 152.93 -27.315, 152.93 -27.3125, 152.9325 -27.3125, 152.9325 -27.31, 152.935 -27.31, 152.935 -27.305, 152.94 -27.305, 152.94 -27.3025, 152.9425 -27.3025, 152.9425 -27.3, 152.945 -27.3, 152.945 -27.2975, 152.95 -27.2975, 152.95 -27.295, 152.955 -27.295, 152.9575 -27.295, 152.9575 -27.2925, 152.96 -27.2925, 152.96 -27.29, 152.9625 -27.29, 152.9625 -27.2875, 152.9675 -27.2875, 152.9675 -27.285, 152.9725 -27.285, 152.9725 -27.2825, 152.9775 -27.2825, 152.9775 -27.28, 152.98 -27.28, 152.9925 -27.28, 152.9925 -27.2775, 152.9975 -27.2775, 153.0025 -27.2775))"^^geo:wktLiteral + ] ; +. + +sand:cpc + a geo:Feature ; + rdfs:label "Carseldine Police Station" ; + dcterms:identifier "cpc"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0144819, -27.3506599]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0144819 -27.3506599)"^^geo:wktLiteral + ] ; +. + +sand:f001 + a geo:Feature ; + rdfs:label "Flood 001" ; + dcterms:identifier "f001"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.064893899999987, -27.2909981 ], [ 153.0648081, -27.2911506 ], [ 153.064475499999986, -27.2912364 ], [ 153.064078599999988, -27.2912269 ], [ 153.0635636, -27.291265 ], [ 153.0633383, -27.2913604 ], [ 153.0632417, -27.2914462 ], [ 153.0631559, -27.2915701 ], [ 153.0630808, -27.2917036 ], [ 153.06307, -27.2917704 ], [ 153.0631773, -27.2918943 ], [ 153.0633168, -27.2920564 ], [ 153.0634241, -27.2921613 ], [ 153.063767399999989, -27.2921994 ], [ 153.0642824, -27.2922757 ], [ 153.064400400000011, -27.292371 ], [ 153.0644111, -27.2926761 ], [ 153.0643897, -27.2928764 ], [ 153.0643682, -27.2930766 ], [ 153.06434680000001, -27.2932196 ], [ 153.0642824, -27.2934675 ], [ 153.0642824, -27.2935628 ], [ 153.0643682, -27.2936391 ], [ 153.0647223, -27.2937345 ], [ 153.0648296, -27.293744 ], [ 153.064893899999987, -27.2909981 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0648939 -27.2909981,153.0648081 -27.2911506,153.0644755 -27.2912364,153.0640786 -27.2912269,153.0635636 -27.291265,153.0633383 -27.2913604,153.0632417 -27.2914462,153.0631559 -27.2915701,153.0630808 -27.2917036,153.06307 -27.2917704,153.0631773 -27.2918943,153.0633168 -27.2920564,153.0634241 -27.2921613,153.0637674 -27.2921994,153.0642824 -27.2922757,153.0644004 -27.292371,153.0644111 -27.2926761,153.0643897 -27.2928764,153.0643682 -27.2930766,153.0643468 -27.2932196,153.0642824 -27.2934675,153.0642824 -27.2935628,153.0643682 -27.2936391,153.0647223 -27.2937345,153.0648296 -27.293744,153.0648939 -27.2909981))"^^geo:wktLiteral + ] ; +. + +sand:f023 + a geo:Feature ; + rdfs:label "Flood 023" ; + dcterms:identifier "f023"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.06487820000001, -27.30059 ], [ 153.0648031, -27.301019 ], [ 153.0648138, -27.3012955 ], [ 153.0648889, -27.3015815 ], [ 153.0648567, -27.3016768 ], [ 153.064824499999986, -27.3018198 ], [ 153.0648138, -27.3020295 ], [ 153.064824499999986, -27.3022965 ], [ 153.0647387, -27.3024109 ], [ 153.0641808, -27.3024776 ], [ 153.063698, -27.3025634 ], [ 153.0634512, -27.3026302 ], [ 153.063419, -27.3027827 ], [ 153.063440500000013, -27.303002 ], [ 153.0634619, -27.303307 ], [ 153.063622900000013, -27.3034501 ], [ 153.0638696, -27.3034882 ], [ 153.0643095, -27.3035454 ], [ 153.0645456, -27.3036026 ], [ 153.0647923, -27.3037456 ], [ 153.0650176, -27.3039553 ], [ 153.0652, -27.3041174 ], [ 153.065318, -27.3042413 ], [ 153.0653931, -27.3045083 ], [ 153.0655112, -27.3047371 ], [ 153.065790099999987, -27.3050803 ], [ 153.0660476, -27.3052519 ], [ 153.0656935, -27.3037551 ], [ 153.0652215, -27.30243 ], [ 153.06487820000001, -27.30059 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0648782 -27.30059,153.0648031 -27.301019,153.0648138 -27.3012955,153.0648889 -27.3015815,153.0648567 -27.3016768,153.0648245 -27.3018198,153.0648138 -27.3020295,153.0648245 -27.3022965,153.0647387 -27.3024109,153.0641808 -27.3024776,153.063698 -27.3025634,153.0634512 -27.3026302,153.063419 -27.3027827,153.0634405 -27.303002,153.0634619 -27.303307,153.0636229 -27.3034501,153.0638696 -27.3034882,153.0643095 -27.3035454,153.0645456 -27.3036026,153.0647923 -27.3037456,153.0650176 -27.3039553,153.0652 -27.3041174,153.065318 -27.3042413,153.0653931 -27.3045083,153.0655112 -27.3047371,153.0657901 -27.3050803,153.0660476 -27.3052519,153.0656935 -27.3037551,153.0652215 -27.30243,153.0648782 -27.30059))"^^geo:wktLiteral + ] ; +. + +sand:f332 + a geo:Feature ; + rdfs:label "Flood 332" ; + dcterms:identifier "f332"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.068289099999987, -27.3113685 ], [ 153.0681389, -27.3108346 ], [ 153.0676454, -27.3103961 ], [ 153.0673021, -27.3096144 ], [ 153.0670231, -27.3088708 ], [ 153.066615399999989, -27.3088327 ], [ 153.0659932, -27.3089662 ], [ 153.0656928, -27.3091568 ], [ 153.065564, -27.3095381 ], [ 153.0658215, -27.310377 ], [ 153.0659073, -27.3107774 ], [ 153.0660361, -27.3111587 ], [ 153.0665725, -27.3113685 ], [ 153.066744199999988, -27.3115973 ], [ 153.0674094, -27.3130272 ], [ 153.0676669, -27.3135419 ], [ 153.0680102, -27.3142473 ], [ 153.0685466, -27.3151243 ], [ 153.0693191, -27.3150862 ], [ 153.0698126, -27.3147049 ], [ 153.069877, -27.3145143 ], [ 153.06970530000001, -27.3140376 ], [ 153.0694479, -27.3134085 ], [ 153.069147500000014, -27.31297 ], [ 153.0688041, -27.3124552 ], [ 153.068375, -27.3120548 ], [ 153.068074599999989, -27.3117498 ], [ 153.068289099999987, -27.3113685 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0682891 -27.3113685,153.0681389 -27.3108346,153.0676454 -27.3103961,153.0673021 -27.3096144,153.0670231 -27.3088708,153.0666154 -27.3088327,153.0659932 -27.3089662,153.0656928 -27.3091568,153.065564 -27.3095381,153.0658215 -27.310377,153.0659073 -27.3107774,153.0660361 -27.3111587,153.0665725 -27.3113685,153.0667442 -27.3115973,153.0674094 -27.3130272,153.0676669 -27.3135419,153.0680102 -27.3142473,153.0685466 -27.3151243,153.0693191 -27.3150862,153.0698126 -27.3147049,153.069877 -27.3145143,153.0697053 -27.3140376,153.0694479 -27.3134085,153.0691475 -27.31297,153.0688041 -27.3124552,153.068375 -27.3120548,153.0680746 -27.3117498,153.0682891 -27.3113685))"^^geo:wktLiteral + ] ; +. + +sand:f632 + a geo:Feature ; + rdfs:label "Flood 632" ; + dcterms:identifier "f632"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.0649154, -27.2906357 ], [ 153.0650656, -27.2892818 ], [ 153.0651407, -27.288233 ], [ 153.06513, -27.287413 ], [ 153.0650656, -27.2859541 ], [ 153.0649905, -27.2861353 ], [ 153.065012, -27.2863737 ], [ 153.065001200000012, -27.2868218 ], [ 153.0649583, -27.2871079 ], [ 153.0648296, -27.2873463 ], [ 153.0646472, -27.2873939 ], [ 153.064604300000013, -27.2875274 ], [ 153.0646365, -27.2877849 ], [ 153.0646686, -27.2879183 ], [ 153.0646686, -27.2882711 ], [ 153.0646365, -27.2885762 ], [ 153.0642609, -27.2886716 ], [ 153.0640678, -27.2888623 ], [ 153.064035600000011, -27.2890816 ], [ 153.064293099999986, -27.2894248 ], [ 153.064379, -27.2897204 ], [ 153.0642288, -27.2899206 ], [ 153.064057100000014, -27.2899969 ], [ 153.0639605, -27.2902353 ], [ 153.0639927, -27.2904069 ], [ 153.064110699999986, -27.2905309 ], [ 153.0642824, -27.2906644 ], [ 153.064497, -27.2907216 ], [ 153.064657899999986, -27.2907406 ], [ 153.064818800000012, -27.2907406 ], [ 153.0649154, -27.2906357 ] ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0649154 -27.2906357,153.0650656 -27.2892818,153.0651407 -27.288233,153.06513 -27.287413,153.0650656 -27.2859541,153.0649905 -27.2861353,153.065012 -27.2863737,153.0650012 -27.2868218,153.0649583 -27.2871079,153.0648296 -27.2873463,153.0646472 -27.2873939,153.0646043 -27.2875274,153.0646365 -27.2877849,153.0646686 -27.2879183,153.0646686 -27.2882711,153.0646365 -27.2885762,153.0642609 -27.2886716,153.0640678 -27.2888623,153.0640356 -27.2890816,153.0642931 -27.2894248,153.064379 -27.2897204,153.0642288 -27.2899206,153.0640571 -27.2899969,153.0639605 -27.2902353,153.0639927 -27.2904069,153.0641107 -27.2905309,153.0642824 -27.2906644,153.064497 -27.2907216,153.0646579 -27.2907406,153.0648188 -27.2907406,153.0649154 -27.2906357))"^^geo:wktLiteral + ] ; +. + +sand:fp + a geo:Feature ; + rdfs:label "Flinder Parade" ; + dcterms:identifier "fp"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.074756900000011, -27.3203138 ], [ 153.0727077, -27.3183121 ], [ 153.0715276, -27.3170824 ], [ 153.070519, -27.3157669 ], [ 153.0694891, -27.3143847 ], [ 153.067751, -27.311115 ], [ 153.0664635, -27.3072446 ], [ 153.0656267, -27.3047468 ], [ 153.065111699999989, -27.3031262 ], [ 153.0647898, -27.301677 ], [ 153.064510899999988, -27.3000372 ], [ 153.0644036, -27.2984546 ], [ 153.0643392, -27.2973296 ], [ 153.06459670000001, -27.2953656 ], [ 153.0646396, -27.2936494 ], [ 153.0644465, -27.2922764 ] ] }"^^geo:geoJSONLiteral ; + geo:asWKT "LINESTRING (153.0747569 -27.3203138,153.0727077 -27.3183121,153.0715276 -27.3170824,153.070519 -27.3157669,153.0694891 -27.3143847,153.067751 -27.311115,153.0664635 -27.3072446,153.0656267 -27.3047468,153.0651117 -27.3031262,153.0647898 -27.301677,153.0645109 -27.3000372,153.0644036 -27.2984546,153.0643392 -27.2973296,153.0645967 -27.2953656,153.0646396 -27.2936494,153.0644465 -27.2922764)"^^geo:wktLiteral + ] ; +. + +sand:jcabi + a geo:Feature ; + rdfs:label "Jacana Centre for Acquired Brain Injury" ; + dcterms:identifier "jcabi"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0632873, -27.2918652]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0632873 -27.2918652)"^^geo:wktLiteral + ] ; +. + +sand:rps + a geo:Feature ; + rdfs:label "Redcliffe Police Station" ; + dcterms:identifier "rps"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.1143102, -27.2234024]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.1143102 -27.2234024)"^^geo:wktLiteral + ] ; +. + +sand:sac + a geo:Feature ; + rdfs:label "Sandgate Aquatic Centre" ; + dcterms:identifier "sac"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0688897, -27.3122011]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0688897 -27.3122011)"^^geo:wktLiteral + ] ; +. + +sand:sps + a geo:Feature ; + rdfs:label "Sandgate Police Station" ; + dcterms:identifier "sps"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0677583, -27.318185]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0677583 -27.318185)"^^geo:wktLiteral + ] ; +. + +sand:src + a geo:Feature ; + rdfs:label "Sandgate Respite Centre" ; + dcterms:identifier "src"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0614757, -27.3111489]}"^^geo:geoJSONLiteral ; + geo:asWKT "POINT (153.0614757 -27.3111489)"^^geo:wktLiteral + ] ; +. + +sand:srca + a geo:Feature ; + rdfs:label "Sandgate Respite Centre Area" ; + dcterms:identifier "srca"^^xsd:token ; + geo:hasGeometry [ + a geo:Geometry ; + geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.0606281,-27.3096141], [153.0604564,-27.3105197], [153.0600487,-27.3109296], [153.0607354,-27.3127218], [153.063203,-27.3121212], [153.0621623,-27.3095187], [153.0617868,-27.3098333], [153.0606281,-27.3096141]]]}"^^geo:geoJSONLiteral ; + geo:asWKT "POLYGON ((153.0606281 -27.3096141, 153.0604564 -27.3105197, 153.0600487 -27.3109296, 153.0607354 -27.3127218, 153.063203 -27.3121212, 153.0621623 -27.3095187, 153.0617868 -27.3098333, 153.0606281 -27.3096141))"^^geo:wktLiteral + ] ; +. From 17b12dc5fe75d36aeef29f7b228366f0030d87c3 Mon Sep 17 00:00:00 2001 From: david Date: Wed, 14 Feb 2024 00:56:28 +1000 Subject: [PATCH 13/25] Reorganise repo --- prez/app.py | 2 +- prez/dependencies.py | 2 +- prez/models/profiles_and_mediatypes.py | 2 +- .../endpoints/cql_endpoints.ttl | 23 - .../endpoints/endpoint_metadata.ttl | 5 + .../endpoint_node_selection_shapes.ttl | 26 + .../endpoints/ogc_catprez_endpoints.ttl.old | 81 --- .../endpoints/ogc_spaceprez_endpoints.ttl.old | 78 --- .../endpoints/ogc_vocprez_endpoints.ttl.old | 124 ----- .../endpoints/vocprez_endpoints.ttl.unused | 100 ---- .../profiles/ogc_records_profile.ttl | 4 +- prez/renderers/json_renderer.py | 29 +- prez/renderers/renderer.py | 4 +- prez/repositories/__init__.py | 6 + prez/repositories/base.py | 61 ++ prez/repositories/oxrdflib.py | 44 ++ prez/repositories/pyoxigraph.py | 100 ++++ prez/repositories/remote_sparql.py | 70 +++ prez/routers/cql.py | 53 +- prez/routers/ogc_router.py | 2 +- prez/routers/search.py | 4 +- prez/routers/sparql.py | 2 +- prez/services/annotations.py | 148 +++++ prez/services/app_service.py | 9 +- prez/services/cql_search.py | 178 ------ prez/services/generate_profiles.py | 7 +- prez/services/link_generation.py | 54 +- prez/services/listings.py | 12 +- prez/services/objects.py | 6 +- .../classes.py} | 3 +- prez/services/query_generation/connegp.py | 123 ++++ .../query_generation/count.py} | 31 ++ .../services/query_generation}/cql2sparql.py | 33 +- .../query_generation}/cql_sparql_reference.py | 0 .../default_cql_context.json | 0 .../query_generation/search.py} | 0 .../query_generation}/shacl_node_selection.py | 2 +- .../services/query_generation/umbrella.py | 7 +- prez/sparql/methods.py | 237 -------- prez/sparql/objects_listings.py | 523 ------------------ temp/grammar/grammar.py | 23 +- temp/test_search.py | 13 - test_data/spaceprez.ttl | 9 +- tests/_test_cql.py | 2 +- tests/test_count.py | 2 +- tests/test_dd_profiles.py | 2 +- tests/test_endpoints_cache.py | 2 +- tests/test_endpoints_catprez.py | 14 +- tests/test_endpoints_management.py | 2 +- tests/test_endpoints_object.py | 2 +- tests/test_endpoints_ok.py | 52 +- tests/test_endpoints_profiles.py | 6 +- tests/test_endpoints_spaceprez.py | 34 +- tests/test_endpoints_vocprez.py | 2 +- ...arsing.py => test_node_selection_shacl.py} | 4 +- tests/test_redirect_endpoint.py | 2 +- tests/test_search.py | 2 +- tests/test_search_grammar.py | 238 ++++++++ tests/test_sparql.py | 2 +- 59 files changed, 1017 insertions(+), 1591 deletions(-) delete mode 100755 prez/reference_data/endpoints/ogc_catprez_endpoints.ttl.old delete mode 100755 prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl.old delete mode 100755 prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl.old delete mode 100755 prez/reference_data/endpoints/vocprez_endpoints.ttl.unused create mode 100644 prez/repositories/__init__.py create mode 100755 prez/repositories/base.py create mode 100644 prez/repositories/oxrdflib.py create mode 100644 prez/repositories/pyoxigraph.py create mode 100644 prez/repositories/remote_sparql.py create mode 100755 prez/services/annotations.py delete mode 100755 prez/services/cql_search.py rename prez/services/{model_methods.py => query_generation/classes.py} (93%) create mode 100644 prez/services/query_generation/connegp.py rename prez/{sparql/count_query.py => services/query_generation/count.py} (66%) rename {temp => prez/services/query_generation}/cql2sparql.py (95%) rename {temp => prez/services/query_generation}/cql_sparql_reference.py (100%) rename {temp => prez/services/query_generation}/default_cql_context.json (100%) rename prez/{sparql/search_query.py => services/query_generation/search.py} (100%) rename {temp => prez/services/query_generation}/shacl_node_selection.py (99%) rename temp/shacl2sparql.py => prez/services/query_generation/umbrella.py (99%) delete mode 100755 prez/sparql/methods.py delete mode 100755 prez/sparql/objects_listings.py delete mode 100755 temp/test_search.py rename tests/{test_shacl_parsing.py => test_node_selection_shacl.py} (89%) create mode 100644 tests/test_search_grammar.py diff --git a/prez/app.py b/prez/app.py index 75332fe9..c3f26f11 100755 --- a/prez/app.py +++ b/prez/app.py @@ -46,7 +46,7 @@ ) from prez.services.generate_profiles import create_profiles_graph from prez.services.prez_logging import setup_logger -from prez.sparql.methods import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo +from prez.repositories import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo app = FastAPI( exception_handlers={ diff --git a/prez/dependencies.py b/prez/dependencies.py index edf6ac6c..eb4e1cc5 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -15,7 +15,7 @@ endpoints_graph_cache, ) from prez.config import settings -from prez.sparql.methods import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo +from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo from rdframe import CQLParser diff --git a/prez/models/profiles_and_mediatypes.py b/prez/models/profiles_and_mediatypes.py index 6572b5ed..92f89148 100755 --- a/prez/models/profiles_and_mediatypes.py +++ b/prez/models/profiles_and_mediatypes.py @@ -6,7 +6,7 @@ from prez.services.generate_profiles import get_profiles_and_mediatypes from prez.services.connegp_service import get_requested_profile_and_mediatype -from prez.sparql.methods import Repo +from prez.repositories import Repo PREZ = Namespace("https://prez.dev/") diff --git a/prez/reference_data/endpoints/cql_endpoints.ttl b/prez/reference_data/endpoints/cql_endpoints.ttl index 86fb76e1..42e423f7 100755 --- a/prez/reference_data/endpoints/cql_endpoints.ttl +++ b/prez/reference_data/endpoints/cql_endpoints.ttl @@ -15,26 +15,3 @@ endpoint:get a ont:ListingEndpoint ; shext:offset 0 ; . -endpoint:queryables a ont:ListingEndpoint ; - sh:rule [ sh:subject "?focus_node" ; - sh:predicate ; - sh:object ] ; - ont:deliversClasses prez:QueryablesList ; - sh:target [ sh:select """SELECT DISTINCT ?focus_node - WHERE { - ?s a ?class ; - ?focus_node ?o . - VALUES ?class { - dcat:Catalog - dcat:Dataset - dcat:Resource - skos:ConceptScheme - skos:Collection - skos:Concept - geo:FeatureCollection - geo:Feature - } - }""" ] ; - shext:limit 100 ; - shext:offset 0 ; -. \ No newline at end of file diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl index 98d97948..d08ad3b2 100644 --- a/prez/reference_data/endpoints/endpoint_metadata.ttl +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -43,3 +43,8 @@ ogce:item-object a ont:ObjectEndpoint ; ont:relevantShapes ex:Feature , ex:ConceptSchemeConcept , ex:CollectionConcept , ex:Resource ; . + +ogce:cql-queryables + a ont:ListingEndpoint ; + ont:relevantShapes ex:queryables ; +. \ No newline at end of file diff --git a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl index 58b17045..32e569fa 100644 --- a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl +++ b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl @@ -7,6 +7,7 @@ @prefix prof: . @prefix rdfs: . @prefix sh: . +@prefix shext: . @prefix skos: . ex:TopLevelCatalogs @@ -84,4 +85,29 @@ ex:Profiles a sh:NodeShape ; ont:hierarchyLevel 1 ; sh:targetClass prof:Profile ; +. + +ex:queryables a sh:NodeShape ; + ont:hierarchyLevel 1 ; + sh:rule [ sh:subject "?focus_node" ; + sh:predicate ; + sh:object ] ; + ont:deliversClasses prez:QueryablesList ; + sh:target [ sh:select """SELECT DISTINCT ?focus_node + WHERE { + ?s a ?class ; + ?focus_node ?o . + VALUES ?class { + dcat:Catalog + dcat:Dataset + dcat:Resource + skos:ConceptScheme + skos:Collection + skos:Concept + geo:FeatureCollection + geo:Feature + } + }""" ] ; + shext:limit 100 ; + shext:offset 0 ; . \ No newline at end of file diff --git a/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl.old b/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl.old deleted file mode 100755 index 7f7a0557..00000000 --- a/prez/reference_data/endpoints/ogc_catprez_endpoints.ttl.old +++ /dev/null @@ -1,81 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX endpoint: -PREFIX prez: -PREFIX ont: -PREFIX sh: -prefix skos: -PREFIX shext: -PREFIX xsd: - - -endpoint:top-level-catalog-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/c/catalogs" ; - ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP - sh:targetClass dcat:Catalog ; # required for query construction - shext:limit 20 ; - shext:offset 0 ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - ?focus_node dcterms:hasPart ?child_catalog . - ?child_catalog a dcat:Catalog . - } - """ ] ; -. - -endpoint:top-level-catalog-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses dcat:Catalog ; # required for link generation for objects - ont:endpointTemplate "/c/catalogs/$object" ; - ont:parentEndpoint endpoint:top-level-catalog-listing ; -. - -endpoint:lower-level-catalog-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/c/catalogs/$parent_1/collections" ; - ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP - ont:parentToFocusRelation dcterms:hasPart ; - sh:targetClass dcat:Catalog ; # required for query construction - ont:parentEndpoint endpoint:top-level-catalog-object ; - shext:limit 20 ; - shext:offset 0 ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - $parent_1 dcterms:hasPart ?focus_node . - } - """ ] ; -. - -endpoint:lower-level-catalog-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses dcat:Catalog ; # required for link generation for objects - ont:endpointTemplate "/c/catalogs/$parent_1/collections/$object" ; - ont:parentToFocusRelation dcterms:hasPart ; - ont:parentEndpoint endpoint:lower-level-catalog-listing ; -. - -endpoint:resource-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/c/catalogs/$parent_2/collections/$parent_1/items" ; - ont:deliversClasses prez:ResourceList ; # required to determine the correct profile for ConnegP - ont:parentToFocusRelation dcterms:hasPart ; - sh:targetClass dcat:Resource ; # required for query construction - ont:parentEndpoint endpoint:lower-level-catalog-object ; - shext:limit 20 ; - shext:offset 0 ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - $parent_1 dcterms:hasPart ?focus_node . - } - """ ] ; -. - -endpoint:resource-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses dcat:Resource ; # required for link generation for objects - ont:endpointTemplate "/c/catalogs/$parent_2/collections/$parent_1/items/$object" ; - ont:parentToFocusRelation dcterms:hasPart ; - ont:parentEndpoint endpoint:resource-listing ; -. - diff --git a/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl.old b/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl.old deleted file mode 100755 index 9d53c519..00000000 --- a/prez/reference_data/endpoints/ogc_spaceprez_endpoints.ttl.old +++ /dev/null @@ -1,78 +0,0 @@ -PREFIX dcat: -PREFIX endpoint: -PREFIX geo: -PREFIX ont: -PREFIX prez: -PREFIX rdfs: -PREFIX sh: -PREFIX xsd: -PREFIX shext: - -endpoint:spaceprez-home a ont:Endpoint ; - ont:endpointTemplate "/s" ; -. - -endpoint:dataset-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:DatasetList ; - sh:targetClass dcat:Dataset ; - ont:endpointTemplate "/s/catalogs" ; - shext:limit 20 ; - shext:offset 0 ; -. - -endpoint:dataset-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:parentEndpoint endpoint:dataset-listing ; - ont:deliversClasses dcat:Dataset ; - ont:endpointTemplate "/s/catalogs/$object" ; -. - -endpoint:feature-collection-listing a ont:ListingEndpoint ; - ont:parentEndpoint endpoint:dataset-object ; - sh:targetClass geo:FeatureCollection ; - ont:deliversClasses prez:FeatureCollectionList ; - ont:endpointTemplate "/s/catalogs/$parent_1/collections" ; - ont:parentToFocusRelation rdfs:member ; - shext:limit 20 ; - shext:offset 0 ; - shext:orderBy [ sh:path rdfs:label ] ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - $parent_1 rdfs:member ?focus_node . - } - """ ] ; -. - -endpoint:feature-collection-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:parentEndpoint endpoint:feature-collection-listing ; - ont:deliversClasses geo:FeatureCollection ; - ont:endpointTemplate "/s/catalogs/$parent_1/collections/$object" ; - ont:parentToFocusRelation rdfs:member ; -. - -endpoint:feature-listing a ont:ListingEndpoint ; - ont:parentEndpoint endpoint:feature-collection-object ; - sh:targetClass geo:Feature ; - ont:deliversClasses prez:FeatureList ; - ont:endpointTemplate "/s/catalogs/$parent_2/collections/$parent_1/items" ; - ont:parentToFocusRelation rdfs:member ; - shext:limit 20 ; - shext:offset 0 ; - shext:orderBy [ sh:path rdfs:label ] ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - $parent_1 rdfs:member ?focus_node . - } - """ ] ; -. - -endpoint:feature-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:parentEndpoint endpoint:feature-listing ; - ont:deliversClasses geo:Feature ; - ont:endpointTemplate "/s/catalogs/$parent_2/collections/$parent_1/items/$object" ; - ont:parentToFocusRelation rdfs:member ; -. diff --git a/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl.old b/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl.old deleted file mode 100755 index e1053b87..00000000 --- a/prez/reference_data/endpoints/ogc_vocprez_endpoints.ttl.old +++ /dev/null @@ -1,124 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX endpoint: -PREFIX prez: -PREFIX ont: -PREFIX sh: -prefix skos: -PREFIX shext: -PREFIX xsd: -PREFIX rdfs: - - -endpoint:catalog-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/v/catalogs" ; - ont:deliversClasses prez:CatalogList ; # required to determine the correct profile for ConnegP - sh:targetClass dcat:Catalog ; # required for query construction - shext:limit 20 ; - shext:offset 0 ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - ?focus_node dcterms:hasPart ?child_vocab . - ?child_vocab a skos:ConceptScheme . - } - """ ] ; -. - -endpoint:catalog-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses dcat:Catalog ; # required for link generation for objects - ont:endpointTemplate "/v/catalogs/$object" ; - ont:parentEndpoint endpoint:catalog-listing ; -. - -endpoint:vocab-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/v/catalogs/$parent_1/collections" ; - ont:deliversClasses prez:SchemesList ; # required to determine the correct profile for ConnegP - ont:parentToFocusRelation dcterms:hasPart ; - sh:targetClass skos:ConceptScheme ; # required for query construction - ont:parentEndpoint endpoint:catalog-object ; - shext:limit 20 ; - shext:offset 0 ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - $parent_1 dcterms:hasPart ?focus_node . - } - """ ] ; -. - -endpoint:vocab-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses skos:ConceptScheme ; # required for link generation for objects - ont:endpointTemplate "/v/catalogs/$parent_1/collections/$object" ; - ont:parentToFocusRelation dcterms:hasPart ; - ont:parentEndpoint endpoint:vocab-listing ; -. - -endpoint:concept-listing a ont:ListingEndpoint ; - ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/items" ; - ont:deliversClasses prez:ConceptList ; # required to determine the correct profile for ConnegP - ont:focusToParentRelation skos:inScheme ; - sh:targetClass skos:Concept ; # required for query construction - ont:parentEndpoint endpoint:vocab-object ; - shext:limit 20 ; - shext:offset 0 ; - sh:target [ - sh:select """SELECT ?focus_node - WHERE { - ?focus_node skos:inScheme $parent_1 . - } - """ ] ; -. - -endpoint:concept-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses skos:Concept ; # required for link generation for objects - ont:endpointTemplate "/v/catalogs/$parent_2/collections/$parent_1/items/$object" ; - ont:focusToParentRelation skos:inScheme ; - ont:parentEndpoint endpoint:concept-listing ; -. - -endpoint:narrowers - a ont:ListingEndpoint ; - ont:parentEndpoint endpoint:concept-object ; - ont:deliversClasses prez:ConceptList ; - sh:rule - [ - sh:object "?hasChildren" ; - sh:predicate prez:hasChildren ; - sh:subject sh:this ; - ] ; - sh:target - [ - sh:select """SELECT DISTINCT ?focus_node ?hasChildren - WHERE { - $parent_1 skos:narrower|^skos:broader ?focus_node . - BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) - } - """ ; - ] ; - shext:limit 20 ; - shext:offset 0 ; - shext:orderBy [ sh:path skos:prefLabel ] ; - sh:targetClass skos:Concept ; -. - -endpoint:top-concepts a ont:ListingEndpoint ; - ont:deliversClasses prez:ConceptList ; - ont:parentEndpoint endpoint:vocab-object ; - sh:rule [ sh:subject sh:this ; - sh:predicate prez:hasChildren ; - sh:object "?hasChildren" ] ; - sh:target [ sh:select """SELECT DISTINCT ?focus_node ?hasChildren - WHERE { - $parent_1 skos:hasTopConcept|^skos:isTopConceptOf ?focus_node . - BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) - } - """ ] ; - shext:limit 20 ; - shext:offset 0 ; - shext:orderBy [ sh:path skos:prefLabel ] ; - sh:targetClass skos:Concept ; -. \ No newline at end of file diff --git a/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused b/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused deleted file mode 100755 index ccdaf4a3..00000000 --- a/prez/reference_data/endpoints/vocprez_endpoints.ttl.unused +++ /dev/null @@ -1,100 +0,0 @@ -PREFIX endpoint: -PREFIX ont: -PREFIX prez: -PREFIX rdfs: -PREFIX sh: -PREFIX skos: -PREFIX xsd: -PREFIX shext: - -endpoint:vocprez-home a ont:Endpoint ; - ont:endpointTemplate "/v" ; -. - -endpoint:collection-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:VocPrezCollectionList ; - sh:targetClass skos:Collection ; - ont:endpointTemplate "/v/collection" ; -. - -endpoint:collection-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:parentEndpoint endpoint:collection-listing ; - ont:deliversClasses skos:Collection ; - ont:endpointTemplate "/v/collection/$object" ; -. - -endpoint:collection-concept a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:parentEndpoint endpoint:collection-object ; - ont:deliversClasses skos:Concept ; - ont:endpointTemplate "/v/collection/$parent_1/$object" ; - ont:parentToFocusRelation skos:member ; -. - - endpoint:vocabs-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:SchemesList ; - sh:targetClass skos:ConceptScheme ; - ont:endpointTemplate "/v/vocab" ; -. - -endpoint:vocab-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:parentEndpoint endpoint:vocabs-listing ; - ont:deliversClasses skos:ConceptScheme ; - ont:endpointTemplate "/v/vocab/$object" ; -. - -endpoint:vocab-concept a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:parentEndpoint endpoint:vocab-object ; - ont:deliversClasses skos:Concept ; - ont:endpointTemplate "/v/vocab/$parent_1/$object" ; - ont:focusToParentRelation skos:inScheme ; -. - -endpoint:cs-top-concepts a ont:ListingEndpoint ; - ont:deliversClasses skos:Concept ; - sh:rule [ sh:subject sh:this ; - sh:predicate prez:hasChildren ; - sh:object "?hasChildren" ] ; - sh:target [ sh:select """SELECT DISTINCT ?focus_node ?hasChildren - WHERE { - $parent_1 skos:hasTopConcept|^skos:isTopConceptOf ?focus_node . - ?focus_node skos:prefLabel ?label . - BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) - } - ORDER BY ?label - LIMIT $limit - OFFSET $offset - """ ] ; - sh:targetClass skos:Concept ; - ont:endpointTemplate "/v/vocab/$parent_1/top-concepts" ; -. - -endpoint:cs-children - a ont:ListingEndpoint ; - ont:deliversClasses skos:Concept ; - rdfs:comment """The concepts one level under top concepts. This query demonstrates how pagination of 2 items at a - time could work, the LIMIT is set to 3 such that we can determine if there are further (i.e. >2) objects available.""" ; - sh:rule - [ - sh:object "?hasChildren" ; - sh:predicate prez:hasChildren ; - sh:subject sh:this ; - ] ; - sh:target - [ - sh:select """SELECT DISTINCT ?focus_node ?hasChildren - WHERE { - $parent_1 skos:narrower|^skos:broader ?focus_node . - BIND(EXISTS{?focus_node skos:narrower|^skos:broader ?grandChildren} AS ?hasChildren) - } - """ ; - ] ; - shext:limit 20 ; - shext:offset 0 ; - shext:orderBy [ sh:path skos:prefLabel ] ; - sh:targetClass skos:Concept ; - ont:endpointTemplate "/v/vocabs/$parent_2/$parent_1/narrowers" ; -. diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index aa9e26b8..0b14c12e 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -29,8 +29,8 @@ prez:OGCRecordsProfile altr-ext:hasDefaultProfile prez:OGCListingProfile ] , [ a sh:NodeShape ; - sh:targetClass prez:SchemesList ; - altr-ext:hasDefaultProfile skos:ConceptScheme + sh:targetClass skos:ConceptScheme ; + altr-ext:hasDefaultProfile prez:OGCSchemesListProfile ] , [ a sh:NodeShape ; sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; diff --git a/prez/renderers/json_renderer.py b/prez/renderers/json_renderer.py index 602c15f9..a0492dfc 100755 --- a/prez/renderers/json_renderer.py +++ b/prez/renderers/json_renderer.py @@ -1,11 +1,8 @@ -from itertools import chain - -from rdflib import Graph, URIRef, RDF, SH, Literal +from rdflib import Graph, URIRef, RDF, SH from rdflib.term import Node from prez.cache import profiles_graph_cache from prez.reference_data.prez_ns import ALTREXT -from prez.sparql.objects_listings import get_listing_predicates class NotFoundError(Exception): @@ -29,11 +26,11 @@ def _get_label_predicates(profile_graph: Graph, profile: URIRef) -> list[Node]: def _get_child_iris( - graph: Graph, - iri: Node, - child_to_focus_predicates: list[Node], - parent_to_focus_predicates: list[Node], - focus_to_child_predicates: list[Node], + graph: Graph, + iri: Node, + child_to_focus_predicates: list[Node], + parent_to_focus_predicates: list[Node], + focus_to_child_predicates: list[Node], ) -> list[Node]: children = [] for predicate in child_to_focus_predicates: @@ -55,7 +52,7 @@ def _get_child_iris( def create_graph_item( - iri: str, predicates: list[Node], graph: Graph, context: dict + iri: str, predicates: list[Node], graph: Graph, context: dict ) -> tuple[dict, dict]: item = {"iri": iri} for predicate in predicates: @@ -68,9 +65,9 @@ def create_graph_item( async def render_json_dropdown( - graph: Graph, - profile: URIRef, - selected_class: URIRef, + graph: Graph, + profile: URIRef, + selected_class: URIRef, ) -> dict: profile_graph = profiles_graph_cache.cbd(profile) @@ -90,9 +87,9 @@ async def render_json_dropdown( ) = get_listing_predicates(profile, selected_class) if ( - not child_to_focus_predicates - and not focus_to_parent_predicates - and not focus_to_child_predicates + not child_to_focus_predicates + and not focus_to_parent_predicates + and not focus_to_child_predicates ): # This is a listing view, e.g. /v/vocab. node_shape = profile_graph.value( diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py index f03ecd74..8f87c850 100755 --- a/prez/renderers/renderer.py +++ b/prez/renderers/renderer.py @@ -11,8 +11,8 @@ from prez.renderers.csv_renderer import render_csv_dropdown from prez.renderers.json_renderer import render_json_dropdown, NotFoundError from prez.services.curie_functions import get_curie_id_for_uri -from prez.sparql.methods import Repo -from prez.sparql.objects_listings import ( +from prez.repositories import Repo +from prez.services.annotations import ( get_annotation_properties, ) diff --git a/prez/repositories/__init__.py b/prez/repositories/__init__.py new file mode 100644 index 00000000..b2acc6b2 --- /dev/null +++ b/prez/repositories/__init__.py @@ -0,0 +1,6 @@ +from .base import Repo +from .oxrdflib import OxrdflibRepo +from .pyoxigraph import PyoxigraphRepo +from .remote_sparql import RemoteSparqlRepo + +__all__ = ["Repo", "OxrdflibRepo", "PyoxigraphRepo", "RemoteSparqlRepo"] \ No newline at end of file diff --git a/prez/repositories/base.py b/prez/repositories/base.py new file mode 100755 index 00000000..a421d903 --- /dev/null +++ b/prez/repositories/base.py @@ -0,0 +1,61 @@ +import asyncio +import logging +import time +from abc import ABC, abstractmethod +from typing import List +from typing import Tuple +from urllib.parse import quote_plus + +import httpx +import pyoxigraph +from fastapi.concurrency import run_in_threadpool +from rdflib import Namespace, Graph, URIRef, Literal, BNode + +from prez.config import settings + +PREZ = Namespace("https://prez.dev/") + +log = logging.getLogger(__name__) + + +class Repo(ABC): + @abstractmethod + async def rdf_query_to_graph(self, query: str): + pass + + @abstractmethod + async def tabular_query_to_table(self, query: str, context: URIRef = None): + pass + + async def send_queries( + self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef, str]] = None + ) -> Tuple[Graph, List]: + # Common logic to send both query types in parallel + results = await asyncio.gather( + *[self.rdf_query_to_graph(query) for query in rdf_queries if query], + *[ + self.tabular_query_to_table(query, context) + for context, query in tabular_queries + if query + ], + ) + g = Graph() + tabular_results = [] + for result in results: + if isinstance(result, Graph): + g += result + else: + tabular_results.append(result) + return g, tabular_results + + @abstractmethod + def sparql( + self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" + ): + pass + + + + + + diff --git a/prez/repositories/oxrdflib.py b/prez/repositories/oxrdflib.py new file mode 100644 index 00000000..65d8cfd5 --- /dev/null +++ b/prez/repositories/oxrdflib.py @@ -0,0 +1,44 @@ +import logging + +from fastapi.concurrency import run_in_threadpool +from rdflib import Namespace, Graph, URIRef, Literal, BNode + +from prez.repositories.base import Repo + +PREZ = Namespace("https://prez.dev/") + +log = logging.getLogger(__name__) + + +class OxrdflibRepo(Repo): + def __init__(self, oxrdflib_graph: Graph): + self.oxrdflib_graph = oxrdflib_graph + + def _sync_rdf_query_to_graph(self, query: str) -> Graph: + results = self.oxrdflib_graph.query(query) + return results.graph + + def _sync_tabular_query_to_table(self, query: str, context: URIRef = None): + results = self.oxrdflib_graph.query(query) + reformatted_results = [] + for result in results: + reformatted_result = {} + for var in results.vars: + binding = result[var] + if binding: + str_type = self._str_type_for_rdflib_type(binding) + reformatted_result[str(var)] = {"type": str_type, "value": binding} + reformatted_results.append(reformatted_result) + return context, reformatted_results + + async def rdf_query_to_graph(self, query: str) -> Graph: + return await run_in_threadpool(self._sync_rdf_query_to_graph, query) + + async def tabular_query_to_table(self, query: str, context: URIRef = None): + return await run_in_threadpool( + self._sync_tabular_query_to_table, query, context + ) + + def _str_type_for_rdflib_type(self, instance): + map = {URIRef: "uri", BNode: "bnode", Literal: "literal"} + return map[type(instance)] diff --git a/prez/repositories/pyoxigraph.py b/prez/repositories/pyoxigraph.py new file mode 100644 index 00000000..514f2baf --- /dev/null +++ b/prez/repositories/pyoxigraph.py @@ -0,0 +1,100 @@ +import logging + +from fastapi.concurrency import run_in_threadpool +from rdflib import Namespace, Graph, URIRef, Literal + +import pyoxigraph +from prez.repositories.base import Repo + +PREZ = Namespace("https://prez.dev/") + +log = logging.getLogger(__name__) + + +class PyoxigraphRepo(Repo): + def __init__(self, pyoxi_store: pyoxigraph.Store): + self.pyoxi_store = pyoxi_store + + def _handle_query_solution_results( + self, results: pyoxigraph.QuerySolutions + ) -> dict: + """Organise the query results into format serializable by FastAPIs JSONResponse.""" + variables = results.variables + results_dict = {"head": {"vars": [v.value for v in results.variables]}} + results_list = [] + for result in results: + result_dict = {} + for var in variables: + binding = result[var] + if binding: + binding_type = self._pyoxi_result_type(binding) + result_dict[str(var)[1:]] = { + "type": binding_type, + "value": binding.value, + } + results_list.append(result_dict) + results_dict["results"] = {"bindings": results_list} + return results_dict + + @staticmethod + def _handle_query_triples_results(results: pyoxigraph.QueryTriples) -> Graph: + """Parse the query results into a Graph object.""" + ntriples = " .\n".join([str(r) for r in list(results)]) + " ." + g = Graph() + g.bind("prez", URIRef("https://prez.dev/")) + if ntriples == " .": + return g + return g.parse(data=ntriples, format="ntriples") + + def _sync_rdf_query_to_graph(self, query: str) -> Graph: + try: + results = self.pyoxi_store.query(query) + except Exception as e: + print(e) + result_graph = self._handle_query_triples_results(results) + return result_graph + + def _sync_tabular_query_to_table(self, query: str, context: URIRef = None) -> tuple: + results = self.pyoxi_store.query(query) + results_dict = self._handle_query_solution_results(results) + # only return the bindings from the results. + return context, results_dict["results"]["bindings"] + + def _sparql(self, query: str) -> dict | Graph | bool: + """Submit a sparql query to the pyoxigraph store and return the formatted results.""" + results = self.pyoxi_store.query(query) + if isinstance(results, pyoxigraph.QuerySolutions): # a SELECT query result + results_dict = self._handle_query_solution_results(results) + return results_dict + elif isinstance(results, pyoxigraph.QueryTriples): # a CONSTRUCT query result + result_graph = self._handle_query_triples_results(results) + return result_graph + elif isinstance(results, bool): + results_dict = {"head": {}, "boolean": results} + return results_dict + else: + raise TypeError(f"Unexpected result class {type(results)}") + + async def rdf_query_to_graph(self, query: str) -> Graph: + return await run_in_threadpool(self._sync_rdf_query_to_graph, query) + + async def tabular_query_to_table(self, query: str, context: URIRef = None) -> list: + return await run_in_threadpool( + self._sync_tabular_query_to_table, query, context + ) + + async def sparql( + self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "" + ) -> list | Graph | bool: + return self._sparql(query) + + @staticmethod + def _pyoxi_result_type(term) -> str: + if isinstance(term, pyoxigraph.Literal): + return "literal" + elif isinstance(term, pyoxigraph.NamedNode): + return "uri" + elif isinstance(term, pyoxigraph.BlankNode): + return "bnode" + else: + raise ValueError(f"Unknown type: {type(term)}") diff --git a/prez/repositories/remote_sparql.py b/prez/repositories/remote_sparql.py new file mode 100644 index 00000000..4dd48732 --- /dev/null +++ b/prez/repositories/remote_sparql.py @@ -0,0 +1,70 @@ +import logging + +import httpx +from rdflib import Namespace, Graph, URIRef + +from prez.config import settings +from prez.repositories.base import Repo + +PREZ = Namespace("https://prez.dev/") + +log = logging.getLogger(__name__) + + +class RemoteSparqlRepo(Repo): + def __init__(self, async_client: httpx.AsyncClient): + self.async_client = async_client + + async def _send_query(self, query: str, mediatype="text/turtle"): + """Sends a SPARQL query asynchronously. + Args: query: str: A SPARQL query to be sent asynchronously. + Returns: httpx.Response: A httpx.Response object + """ + query_rq = self.async_client.build_request( + "POST", + url=settings.sparql_endpoint, + headers={"Accept": mediatype}, + data={"query": query}, + ) + response = await self.async_client.send(query_rq, stream=True) + return response + + async def rdf_query_to_graph(self, query: str) -> Graph: + """ + Sends a SPARQL query asynchronously and parses the response into an RDFLib Graph. + Args: query: str: A SPARQL query to be sent asynchronously. + Returns: rdflib.Graph: An RDFLib Graph object + """ + response = await self._send_query(query) + g = Graph() + await response.aread() + return g.parse(data=response.text, format="turtle") + + async def tabular_query_to_table(self, query: str, context: URIRef = None): + """ + Sends a SPARQL query asynchronously and parses the response into a table format. + The optional context parameter allows an identifier to be supplied with the query, such that multiple results can be + distinguished from each other. + """ + response = await self._send_query(query, "application/sparql-results+json") + await response.aread() + return context, response.json()["results"]["bindings"] + + async def sparql( + self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" + ): + """Sends a starlette Request object (containing a SPARQL query in the URL parameters) to a proxied SPARQL + endpoint.""" + # TODO: This only supports SPARQL GET requests because the query is sent as a query parameter. + + query_escaped_as_bytes = f"query={quote_plus(query)}".encode("utf-8") + + # TODO: Global app settings should be passed in as a function argument. + url = httpx.URL(url=settings.sparql_endpoint, query=query_escaped_as_bytes) + headers = [] + for header in raw_headers: + if header[0] != b"host": + headers.append(header) + headers.append((b"host", str(url.host).encode("utf-8"))) + rp_req = self.async_client.build_request(method, url, headers=headers) + return await self.async_client.send(rp_req, stream=True) diff --git a/prez/routers/cql.py b/prez/routers/cql.py index 00505ea8..7dcb9a2b 100755 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -1,8 +1,10 @@ from typing import Optional from fastapi import APIRouter, Request, Depends -from rdflib import URIRef +from rdflib import Namespace +from rdflib.namespace import URIRef +from prez.reference_data.prez_ns import PREZ from prez.dependencies import ( get_repo, cql_post_parser_dependency, @@ -10,22 +12,24 @@ cql_get_parser_dependency, ) from prez.services.listings import listing_function -from prez.sparql.methods import Repo +from prez.repositories import Repo router = APIRouter(tags=["ogcrecords"]) +OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) + @router.post( path="/cql", - name="https://prez.dev/endpoint/cql/post", + name=OGCE["cql-post"], ) async def cql_post_endpoint( - request: Request, - cql_parser: Optional[dict] = Depends(cql_post_parser_dependency), - page: int = 1, - per_page: int = 20, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + cql_parser: Optional[dict] = Depends(cql_post_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql/post") return await listing_function( @@ -41,15 +45,15 @@ async def cql_post_endpoint( @router.get( path="/cql", - name="https://prez.dev/endpoint/cql/get", + name=OGCE["cql-get"], ) async def cql_get_endpoint( - request: Request, - cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), - page: int = 1, - per_page: int = 20, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql/get") return await listing_function( @@ -65,22 +69,23 @@ async def cql_get_endpoint( @router.get( path="/queryables", - name="https://prez.dev/endpoint/cql/queryables", + name=OGCE["cql-queryables"], ) async def queryables_endpoint( - request: Request, - cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), - page: int = 1, - per_page: int = 20, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): - endpoint_uri = URIRef("https://prez.dev/endpoint/cql/queryables") + endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( request=request, repo=repo, system_repo=system_repo, endpoint_uri=endpoint_uri, + hierarchy_level=1, page=page, per_page=per_page, cql_parser=cql_parser, diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 5c52a366..52638976 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -8,7 +8,7 @@ from prez.services.curie_functions import get_uri_for_curie_id from prez.services.listings import listing_function from prez.services.objects import object_function -from prez.sparql.methods import Repo +from prez.repositories import Repo from prez.reference_data.prez_ns import PREZ from temp.grammar import IRI diff --git a/prez/routers/search.py b/prez/routers/search.py index a00b20ab..ff6930ab 100755 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -9,8 +9,8 @@ from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph from prez.services.link_generation import add_prez_links -from prez.sparql.methods import Repo -from prez.sparql.search_query import SearchQuery +from prez.repositories import Repo +from prez.services.query_generation.search import SearchQuery router = APIRouter(tags=["Search"]) diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index eb788584..643cb67f 100755 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -11,7 +11,7 @@ from prez.dependencies import get_repo, get_system_repo from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.renderers.renderer import return_annotated_rdf -from prez.sparql.methods import Repo +from prez.repositories import Repo PREZ = Namespace("https://prez.dev/") diff --git a/prez/services/annotations.py b/prez/services/annotations.py new file mode 100755 index 00000000..271c0adf --- /dev/null +++ b/prez/services/annotations.py @@ -0,0 +1,148 @@ +import logging +from itertools import chain +from textwrap import dedent +from typing import List, Tuple + +from rdflib import Graph, URIRef, Namespace, Literal + +from prez.cache import tbox_cache +from prez.config import settings +from prez.services.curie_functions import get_uri_for_curie_id + +log = logging.getLogger(__name__) + +ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") +PREZ = Namespace("https://prez.dev/") + + +async def get_annotation_properties( + item_graph: Graph, +): + """ + Gets annotation data used for HTML display. + This includes the label, description, and provenance, if available. + Note the following three default predicates are always included. This allows context, i.e. background ontologies, + which are often diverse in the predicates they use, to be aligned with the default predicates used by Prez. The full + range of predicates used can be manually included via profiles. + """ + label_predicates = settings.label_predicates + description_predicates = settings.description_predicates + explanation_predicates = settings.provenance_predicates + other_predicates = settings.other_predicates + terms = ( + set(i for i in item_graph.predicates() if isinstance(i, URIRef)) + | set(i for i in item_graph.objects() if isinstance(i, URIRef)) + | set(i for i in item_graph.subjects() if isinstance(i, URIRef)) + ) + # TODO confirm caching of SUBJECT labels does not cause issues! this could be a lot of labels. Perhaps these are + # better separated and put in an LRU cache. Or it may not be worth the effort. + if not terms: + return None, Graph() + # read labels from the tbox cache, this should be the majority of labels + uncached_terms, labels_g = get_annotations_from_tbox_cache( + terms, + label_predicates, + description_predicates, + explanation_predicates, + other_predicates, + ) + + def other_predicates_statement(other_predicates, uncached_terms_other): + return f"""UNION + {{ + ?unannotated_term ?other_prop ?other . + VALUES ?other_prop {{ {" ".join('<' + str(pred) + '>' for pred in other_predicates)} }} + VALUES ?unannotated_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms_other)} + }} + }}""" + + queries_for_uncached = f"""CONSTRUCT {{ + ?unlabeled_term ?label_prop ?label . + ?undescribed_term ?desc_prop ?description . + ?unexplained_term ?expl_prop ?explanation . + ?unannotated_term ?other_prop ?other . + }} + WHERE {{ + {{ + ?unlabeled_term ?label_prop ?label . + VALUES ?label_prop {{ {" ".join('<' + str(pred) + '>' for pred in label_predicates)} }} + VALUES ?unlabeled_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["labels"])} }} + FILTER(lang(?label) = "" || lang(?label) = "en" || lang(?label) = "en-AU") + }} + UNION + {{ + ?undescribed_term ?desc_prop ?description . + VALUES ?desc_prop {{ {" ".join('<' + str(pred) + '>' for pred in description_predicates)} }} + VALUES ?undescribed_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["descriptions"])} + }} + }} + UNION + {{ + ?unexplained_term ?expl_prop ?explanation . + VALUES ?expl_prop {{ {" ".join('<' + str(pred) + '>' for pred in explanation_predicates)} }} + VALUES ?unexplained_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["provenance"])} + }} + }} + {other_predicates_statement(other_predicates, uncached_terms["other"]) if other_predicates else ""} + }}""" + return queries_for_uncached, labels_g + + +def get_annotations_from_tbox_cache( + terms: List[URIRef], label_props, description_props, explanation_props, other_props +): + """ + Gets labels from the TBox cache, returns a list of terms that were not found in the cache, and a graph of labels, + descriptions, and explanations + """ + labels_from_cache = Graph(bind_namespaces="rdflib") + terms_list = list(terms) + props_from_cache = { + "labels": list( + chain( + *( + tbox_cache.triples_choices((terms_list, prop, None)) + for prop in label_props + ) + ) + ), + "descriptions": list( + chain( + *( + tbox_cache.triples_choices((terms_list, prop, None)) + for prop in description_props + ) + ) + ), + "provenance": list( + chain( + *( + tbox_cache.triples_choices((terms_list, prop, None)) + for prop in explanation_props + ) + ) + ), + "other": list( + chain( + *( + tbox_cache.triples_choices((terms_list, prop, None)) + for prop in other_props + ) + ) + ), + } + # get all the annotations we can from the cache + all = list(chain(*props_from_cache.values())) + default_language = settings.default_language + for triple in all: + if isinstance(triple[2], Literal): + if triple[2].language == default_language: + labels_from_cache.add(triple) + elif triple[2].language is None: + labels_from_cache.add(triple) + # the remaining terms are not in the cache; we need to query the SPARQL endpoint to attempt to get them + uncached_props = { + k: list(set(terms) - set(triple[0] for triple in v)) + for k, v in props_from_cache.items() + } + return uncached_props, labels_from_cache diff --git a/prez/services/app_service.py b/prez/services/app_service.py index 249cd5dc..04a155a1 100755 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -3,21 +3,20 @@ from pathlib import Path import httpx -from rdflib import URIRef, Literal, BNode, RDF, Graph, RDFS, DCTERMS, SDO, SKOS, Dataset +from rdflib import URIRef, Literal, Graph, RDFS, DCTERMS, SDO, SKOS, Dataset from prez.cache import ( prez_system_graph, - profiles_graph_cache, counts_graph, prefix_graph, endpoints_graph_cache, tbox_cache, ) from prez.config import settings -from prez.reference_data.prez_ns import PREZ, ALTREXT +from prez.reference_data.prez_ns import PREZ from prez.services.curie_functions import get_curie_id_for_uri -from prez.sparql.methods import Repo -from prez.sparql.objects_listings import startup_count_objects +from prez.repositories import Repo +from prez.services.query_generation.count import startup_count_objects log = logging.getLogger(__name__) diff --git a/prez/services/cql_search.py b/prez/services/cql_search.py deleted file mode 100755 index 17be1512..00000000 --- a/prez/services/cql_search.py +++ /dev/null @@ -1,178 +0,0 @@ -import re -from typing import Tuple - -from fastapi import HTTPException - - -class CQLSearch(object): - from prez.config import settings - - def __init__(self, cql_query: str, sparql_query: str) -> None: - self.cql_query = cql_query - self.sparql_query = sparql_query - - def _check_prop_exists(self, prop: str) -> bool: - return prop in settings.cql_props.keys() - - def _check_type(self, prop: str, val: str) -> bool: - prop_type = settings.cql_props[prop].get("type") - if prop_type is not None: - correct_type = False - match prop_type: - case "integer": - if re.match(r"(-|\+)?\d+", val): - correct_type = True - case "float": - if re.match(r"(-|\+)?\d+\.\d+", val): - correct_type = True - case "string": - if re.match(r'".+"', val): - correct_type = True - case _: # invalid prop type? - pass - return correct_type - else: - return True - - def _parse_eq_ops(self, f: str) -> str: - # validate - exps = re.findall( - r'(\w+)\s?(<>|<=|>=|=|<|>)\s?(".+"|\d+(?:\.\d+)?)', f, flags=re.IGNORECASE - ) - for prop, op, val in exps: - if not self._check_prop_exists(prop): - raise HTTPException( - status_code=400, - detail=f"{prop} is not a valid property. Please consult /queryables for the list of available properties.", - ) - if not self._check_type(prop, val): - raise HTTPException( - status_code=400, - detail=f"Invalid type for the property {prop}, which is of type {settings.cql_props[prop].get('type')}", - ) - - # string replace - return re.sub( - r'(\w+)\s?(<>|<=|>=|=|<|>)\s?(".+"|\d+(?:\.\d+)?)', - lambda x: f'?{x.group(1)} {"!=" if x.group(2) == "<>" else x.group(2)} {x.group(3)}', - f, - flags=re.IGNORECASE, - ) - - def _parse_between(self, f: str) -> str: - # validate - exps = re.findall( - r'(\w+) between (".+"|\d+(?:\.\d+)?) and (".+"|\d+(?:\.\d+)?)', - f, - flags=re.IGNORECASE, - ) - for prop, val1, val2 in exps: - if not self._check_prop_exists(prop): - raise HTTPException( - status_code=400, - detail=f"{prop} is not a valid property. Please consult /queryables for the list of available properties.", - ) - if not self._check_type(prop, val1) or not self._check_type(prop, val2): - raise HTTPException( - status_code=400, - detail=f"Invalid type for the property {prop}, which is of type {settings.cql_props[prop].get('type')}", - ) - - # string replace - return re.sub( - r'(\w+) between (".+"|\d+(?:\.\d+)?) and (".+"|\d+(?:\.\d+)?)', - r"(?\1 >= \2 && ?\1 <= \3)", - f, - flags=re.IGNORECASE, - ) - - def _parse_or(self, f: str) -> str: - return re.sub(r" or ", r" || ", f, flags=re.IGNORECASE) - - def _parse_and(self, f: str) -> str: - return re.sub(r" and ", r" && ", f, flags=re.IGNORECASE) - - def _parse_like(self, f: str) -> str: - # validate - exps = re.findall(r'(\w+) like (".+")', f, flags=re.IGNORECASE) - for prop, val in exps: - if not self._check_prop_exists(prop): - raise HTTPException( - status_code=400, - detail=f"{prop} is not a valid property. Please consult /queryables for the list of available properties.", - ) - if not self._check_type(prop, val): - raise HTTPException( - status_code=400, - detail=f"Invalid type for the property {prop}, which is of type {settings.cql_props[prop].get('type')}", - ) - - # string replace - return re.sub( - r'(\w+) like (".+")', r'regex(?\1, \2, "i" )', f, flags=re.IGNORECASE - ) - - def _parse_is(self, f: str) -> str: - return re.sub( - r"(\w+) is (not )?null", - # no longer using FILTER(EXISTS {?f qname ?prop}), which is in the spec - https://opengeospatial.github.io/ogc-geosparql/geosparql11/spec.html#_f_2_4_comparison_predicates - lambda x: f'{"!" if x.group(2) is None else ""}BOUND(?{x.group(1)})', - f, - flags=re.IGNORECASE, - ) - - def _parse_in(self, f: str) -> str: - # validate - exps = re.findall( - r'(\w+) (in) (\((?:(?:".+"|\d+),\s?)*(?:".+"|\d+)\))', - f, - flags=re.IGNORECASE, - ) - for prop, op, val in exps: - if not self._check_prop_exists(prop): - raise HTTPException( - status_code=400, - detail=f"{prop} is not a valid property. Please consult /queryables for the list of available properties.", - ) - for element in val.strip("()").split(","): - if not self._check_type(prop, element.strip()): - raise HTTPException( - status_code=400, - detail=f"Invalid type for the property {prop}, which is of type {settings.cql_props[prop].get('type')}", - ) - - # string replace - return re.sub( - r'(\w+) (in) (\((?:(?:".+"|\d+),\s?)*(?:".+"|\d+)\))', - r"?\1 \2 \3", - f, - flags=re.IGNORECASE, - ) - - def generate_query(self) -> Tuple[str, str, str]: - self.dataset_query = "" - - if self.datasets != "": - self.dataset_query = f""" - VALUES ?d_id {{{" ".join([f'"{d.strip()}"^^prez:slug' for d in self.datasets.split(',')])}}} - """ - - self.collection_query = "" - - if self.collections != "": - self.collection_query = f""" - VALUES ?coll_id {{{" ".join([f'"{coll.strip()}"^^prez:slug' for coll in self.collections.split(',')])}}} - """ - - # TODO run regex at once, then separately parse components - if self.filter != "": - self.filter = self._parse_eq_ops(self.filter) - self.filter = self._parse_between(self.filter) - self.filter = self._parse_or(self.filter) - self.filter = self._parse_and(self.filter) - self.filter = self._parse_like(self.filter) - self.filter = self._parse_is(self.filter) - self.filter = self._parse_in(self.filter) - - self.filter = f"FILTER({self.filter})" - return self.filter diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py index 067b0678..8cf93ef4 100755 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -5,12 +5,11 @@ from rdflib import Graph, URIRef, RDF, PROF, Literal from prez.cache import profiles_graph_cache, prefix_graph -from prez.config import settings from prez.models.model_exceptions import NoProfilesException from prez.reference_data.prez_ns import PREZ from prez.services.curie_functions import get_curie_id_for_uri -from prez.sparql.methods import Repo -from prez.sparql.objects_listings import select_profile_mediatype +from prez.repositories import Repo +from prez.services.query_generation.connegp import select_profile_mediatype log = logging.getLogger(__name__) @@ -66,7 +65,6 @@ async def create_profiles_graph(repo) -> Graph: _add_prez_profile_links() -# @lru_cache(maxsize=128) async def get_profiles_and_mediatypes( classes: FrozenSet[URIRef], system_repo: Repo, @@ -189,4 +187,3 @@ def _add_prez_profile_links(): Literal(f"/profiles/{get_curie_id_for_uri(profile)}"), ) ) - # profiles_graph_cache.__iadd__(g) diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index c365e0b4..1d89b4dc 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -6,13 +6,12 @@ from prez.cache import endpoints_graph_cache, links_ids_graph_cache from prez.config import settings -from prez.reference_data.prez_ns import ONT from prez.reference_data.prez_ns import PREZ from prez.services.curie_functions import get_curie_id_for_uri -from prez.services.model_methods import get_classes -from prez.sparql.methods import Repo +from prez.services.query_generation.classes import get_classes +from prez.repositories import Repo from temp.grammar import * -from temp.shacl_node_selection import NodeShape +from prez.services.query_generation.shacl_node_selection import NodeShape log = logging.getLogger(__name__) @@ -29,7 +28,8 @@ async def add_prez_links(graph: Graph, repo: Repo, endpoint_structure): await _link_generation(uri, repo, klasses, graph, endpoint_structure) -async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph, endpoint_structure: str = settings.endpoint_structure): +async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph, + endpoint_structure: str = settings.endpoint_structure): # check the cache quads = list( links_ids_graph_cache.quads((None, None, None, uri)) @@ -45,18 +45,21 @@ async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph, endpo # run queries for available nodeshapes to get link components for ns in available_nodeshapes: if int(ns.hierarchy_level) > 1: - results = await get_link_components(available_nodeshapes, repo) + results = await get_link_components(ns, repo) for result in results: # if the list at tuple[1] > 0 then there's some result and a link should be generated. # NB for top level links, there will be a result (the graph pattern matched) BUT the result will not form # part of the link. e.g. ?path_node_1 will have result(s) but is not part of the link. for solution in result[1]: # create link strings - curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, solution, uri, endpoint_structure) + curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, + solution, uri, + endpoint_structure) # add links and identifiers to graph and cache await add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri) else: - curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, {}, uri, endpoint_structure) + curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, {}, uri, + endpoint_structure) await add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri) @@ -116,27 +119,26 @@ async def create_link_strings(hierarchy_level, solution, uri, endpoint_structure return curie_for_uri, members_link, object_link -async def get_link_components(available_nodeshapes, repo): +async def get_link_components(ns, repo): link_queries = [] - for ns in available_nodeshapes: - link_queries.append( - ( - ns.uri, - "".join(SubSelect( - select_clause=SelectClause( - variables_or_all=ns.path_nodes.values()), - where_clause=WhereClause( - group_graph_pattern=GroupGraphPattern( - content=GroupGraphPatternSub( - triples_block=TriplesBlock( - triples=ns.triples_list - ), - graph_patterns_or_triples_blocks=ns.gpnt_list - ) + link_queries.append( + ( + ns.uri, + "".join(SubSelect( + select_clause=SelectClause( + variables_or_all=ns.path_nodes.values()), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock( + triples=ns.triples_list + ), + graph_patterns_or_triples_blocks=ns.gpnt_list ) ) - ).render()) - ) + ) + ).render()) ) + ) _, results = await repo.send_queries([], link_queries) return results diff --git a/prez/services/listings.py b/prez/services/listings.py index 6f3b5f7c..dbc0022e 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -14,15 +14,15 @@ from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph from prez.services.link_generation import add_prez_links -from prez.services.model_methods import get_classes -from prez.sparql.count_query import CountQuery -from prez.sparql.methods import Repo -from prez.sparql.search_query import SearchQuery +from prez.services.query_generation.classes import get_classes +from prez.services.query_generation.count import CountQuery +from prez.repositories import Repo +from prez.services.query_generation.search import SearchQuery from temp.grammar import * # from rdframe.grammar import SubSelect # from rdframe import PrezQueryConstructor -from temp.shacl2sparql import PrezQueryConstructor -from temp.shacl_node_selection import NodeShape +from prez.services.query_generation.umbrella import PrezQueryConstructor +from prez.services.query_generation.shacl_node_selection import NodeShape log = logging.getLogger(__name__) diff --git a/prez/services/objects.py b/prez/services/objects.py index b6807c47..714a8c6e 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -11,10 +11,10 @@ from prez.reference_data.prez_ns import EP from prez.renderers.renderer import return_from_graph from prez.services.link_generation import add_prez_links -from prez.services.model_methods import get_classes -from prez.sparql.methods import Repo +from prez.services.query_generation.classes import get_classes +from prez.repositories import Repo from temp.grammar import IRI -from temp.shacl2sparql import PrezQueryConstructor +from prez.services.query_generation.umbrella import PrezQueryConstructor log = logging.getLogger(__name__) diff --git a/prez/services/model_methods.py b/prez/services/query_generation/classes.py similarity index 93% rename from prez/services/model_methods.py rename to prez/services/query_generation/classes.py index e32d2e90..08fc3e60 100755 --- a/prez/services/model_methods.py +++ b/prez/services/query_generation/classes.py @@ -1,8 +1,7 @@ import logging from rdflib import URIRef -from prez.cache import endpoints_graph_cache -from prez.sparql.methods import Repo +from prez.repositories import Repo log = logging.getLogger(__name__) diff --git a/prez/services/query_generation/connegp.py b/prez/services/query_generation/connegp.py new file mode 100644 index 00000000..911aee4d --- /dev/null +++ b/prez/services/query_generation/connegp.py @@ -0,0 +1,123 @@ +import logging +from textwrap import dedent +from typing import List, Tuple + +from rdflib import URIRef, Namespace + +from prez.services.curie_functions import get_uri_for_curie_id + +log = logging.getLogger(__name__) + +ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") +PREZ = Namespace("https://prez.dev/") + + +def select_profile_mediatype( + classes: List[URIRef], + requested_profile_uri: URIRef = None, + requested_profile_token: str = None, + requested_mediatypes: List[Tuple] = None, + listing: bool = False, +): + """ + Returns a SPARQL SELECT query which will determine the profile and mediatype to return based on user requests, + defaults, and the availability of these in profiles. + + NB: Most specific class refers to the rdfs:Class of an object which has the most specific rdfs:subClassOf links to + the base class delivered by that API endpoint. The base classes delivered by each API endpoint are: + + SpacePrez: + /s/catalogs -> prez:DatasetList + /s/catalogs/{ds_id} -> dcat:Dataset + /s/catalogs/{ds_id}/collections/{fc_id} -> geo:FeatureCollection + /s/catalogs/{ds_id}/collections -> prez:FeatureCollectionList + /s/catalogs/{ds_id}/collections/{fc_id}/features -> geo:Feature + + VocPrez: + /v/schemes -> skos:ConceptScheme + /v/collections -> skos:Collection + /v/schemes/{cs_id}/concepts -> skos:Concept + + CatPrez: + /c/catalogs -> dcat:Catalog + /c/catalogs/{cat_id}/datasets -> dcat:Dataset + + The following logic is used to determine the profile and mediatype to be returned: + + 1. If a profile and mediatype are requested, they are returned if a matching profile which has the requested + mediatype is found, otherwise the default profile for the most specific class is returned, with its default + mediatype. + 2. If a profile only is requested, if it can be found it is returned, otherwise the default profile for the most + specific class is returned. In both cases the default mediatype is returned. + 3. If a mediatype only is requested, the default profile for the most specific class is returned, and if the + requested mediatype is available for that profile, it is returned, otherwise the default mediatype for that profile + is returned. + 4. If neither a profile nor mediatype is requested, the default profile for the most specific class is returned, + with the default mediatype for that profile. + """ + if listing: + profile_class = PREZ.ListingProfile + else: + profile_class = PREZ.ObjectProfile + if requested_profile_token: + requested_profile_uri = get_uri_for_curie_id(requested_profile_token) + query = dedent( + f""" PREFIX altr-ext: + PREFIX dcat: + PREFIX dcterms: + PREFIX geo: + PREFIX prez: + PREFIX prof: + PREFIX rdfs: + PREFIX skos: + PREFIX sh: + + SELECT ?profile ?title ?class (count(?mid) as ?distance) ?req_profile ?def_profile ?format ?req_format ?def_format + + WHERE {{ + VALUES ?class {{{" ".join('<' + str(klass) + '>' for klass in classes)}}} + ?class rdfs:subClassOf* ?mid . + ?mid rdfs:subClassOf* ?base_class . + VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection geo:Feature + skos:ConceptScheme skos:Concept skos:Collection + dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery + prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object }} + ?profile altr-ext:constrainsClass ?class ; + altr-ext:hasResourceFormat ?format ; + dcterms:title ?title .\ + {f'?profile a {profile_class.n3()} .'} + {f'BIND(?profile=<{requested_profile_uri}> as ?req_profile)' if requested_profile_uri else ''} + BIND(EXISTS {{ ?shape sh:targetClass ?class ; + altr-ext:hasDefaultProfile ?profile }} AS ?def_profile) + {generate_mediatype_if_statements(requested_mediatypes) if requested_mediatypes else ''} + BIND(EXISTS {{ ?profile altr-ext:hasDefaultResourceFormat ?format }} AS ?def_format) + }} + GROUP BY ?class ?profile ?req_profile ?def_profile ?format ?req_format ?def_format ?title + ORDER BY DESC(?req_profile) DESC(?distance) DESC(?def_profile) DESC(?req_format) DESC(?def_format)""" + ) + return query + + +def generate_mediatype_if_statements(requested_mediatypes: list): + """ + Generates a list of if statements which will be used to determine the mediatype to return based on user requests, + and the availability of these in profiles. + These are of the form: + BIND( + IF(?format="application/ld+json", "0.9", + IF(?format="text/html", "0.8", + IF(?format="image/apng", "0.7", ""))) AS ?req_format) + """ + # TODO ConnegP appears to return a tuple of q values and profiles for headers, and only profiles (no q values) if they + # are not specified in QSAs. + if not isinstance(next(iter(requested_mediatypes)), tuple): + requested_mediatypes = [(1, mt) for mt in requested_mediatypes] + + line_join = "," + "\n" + ifs = ( + f"BIND(\n" + f"""{line_join.join({chr(9) + 'IF(?format="' + tup[1] + '", "' + str(tup[0]) + '"' for tup in requested_mediatypes})}""" + f""", ""{')' * len(requested_mediatypes)}\n""" + f"\tAS ?req_format)" + ) + return ifs diff --git a/prez/sparql/count_query.py b/prez/services/query_generation/count.py similarity index 66% rename from prez/sparql/count_query.py rename to prez/services/query_generation/count.py index a45fb358..97406deb 100755 --- a/prez/sparql/count_query.py +++ b/prez/services/query_generation/count.py @@ -16,6 +16,18 @@ def render(self): return "".join(part for part in cq.render()) def create_construct_query(self): + """Calls lower level functions and builds the overall query. + Query is of the form: + CONSTRUCT { + _:N9008750f9acb47c08dfc2c3ae72ede37 ?count . + } + WHERE { + SELECT (COUNT(DISTINCT ?focus_node) AS ?count) + WHERE { + <<>> + } + } + """ self.remove_limit_and_offset() self.rebuild_select_clause() cq = ConstructQuery( @@ -30,9 +42,15 @@ def create_construct_query(self): return cq def remove_limit_and_offset(self): + """Removes the LIMIT and OFFSET clauses from the original subselect query, + such that the count of all member objects can be obtained.""" self.subselect.solution_modifier = None def rebuild_select_clause(self): + """ + Rebuilds the SELECT clause to retrieve the count of the focus node. + SELECT (COUNT(DISTINCT ?focus_node) AS ?count) + """ sc = SelectClause( variables_or_all=[ ( @@ -60,6 +78,10 @@ def rebuild_select_clause(self): def create_construct_template(self): """ + Generates a triple for the CONSTRUCT query of the form: + { + _:N38355498469c47c5bb1dfa5b34a73df0 ?count . + } """ bn = BlankNode(value=BlankNodeLabel(part_1=BNode())) search_result_triples = [ @@ -73,3 +95,12 @@ def create_construct_template(self): construct_triples=ConstructTriples(triples=search_result_triples) ) return ct + + +def startup_count_objects(): + """ + Retrieves hardcoded counts for collections in the repository (Feature Collections, Catalogs etc.) + """ + return f"""PREFIX prez: + CONSTRUCT {{ ?collection prez:count ?count }} + WHERE {{ ?collection prez:count ?count }}""" diff --git a/temp/cql2sparql.py b/prez/services/query_generation/cql2sparql.py similarity index 95% rename from temp/cql2sparql.py rename to prez/services/query_generation/cql2sparql.py index c0f616b2..18864ea3 100755 --- a/temp/cql2sparql.py +++ b/prez/services/query_generation/cql2sparql.py @@ -4,36 +4,9 @@ from rdflib import URIRef, Namespace from rdflib.namespace import GEO, SH -from temp.grammar.grammar import ( - GroupOrUnionGraphPattern, - GroupGraphPatternSub, - TriplesBlock, - SimplifiedTriple, - GroupGraphPattern, - GraphPatternNotTriples, - Filter, - InlineDataOneVar, - InlineData, - DataBlock, - WhereClause, - ConstructTemplate, - SolutionModifier, - ConstructQuery, - ConstructTriples, - Var, - IRI, - RDFLiteral, - PrimaryExpression, - RegexExpression, - Expression, - BuiltInCall, - Constraint, - FunctionCall, - NumericLiteral, - DataBlockValue, - ArgList, -) -from temp.cql_sparql_reference import ( +from temp.grammar import * + +from prez.services.query_generation.cql_sparql_reference import ( cql_sparql_spatial_mapping, cql_to_shapely_mapping, ) diff --git a/temp/cql_sparql_reference.py b/prez/services/query_generation/cql_sparql_reference.py similarity index 100% rename from temp/cql_sparql_reference.py rename to prez/services/query_generation/cql_sparql_reference.py diff --git a/temp/default_cql_context.json b/prez/services/query_generation/default_cql_context.json similarity index 100% rename from temp/default_cql_context.json rename to prez/services/query_generation/default_cql_context.json diff --git a/prez/sparql/search_query.py b/prez/services/query_generation/search.py similarity index 100% rename from prez/sparql/search_query.py rename to prez/services/query_generation/search.py diff --git a/temp/shacl_node_selection.py b/prez/services/query_generation/shacl_node_selection.py similarity index 99% rename from temp/shacl_node_selection.py rename to prez/services/query_generation/shacl_node_selection.py index 049b3305..0527a0df 100644 --- a/temp/shacl_node_selection.py +++ b/prez/services/query_generation/shacl_node_selection.py @@ -55,7 +55,7 @@ def from_graph(self): # TODO this can be a SPARQL select against the system gra ) for ps_uri in self.propertyShapesURIs] self.hierarchy_level = next(self.graph.objects(self.uri, ONT.hierarchyLevel), None) if not self.hierarchy_level: - print('') + raise ValueError("No hierarchy level found") def to_grammar(self): if self.targetNode: diff --git a/temp/shacl2sparql.py b/prez/services/query_generation/umbrella.py similarity index 99% rename from temp/shacl2sparql.py rename to prez/services/query_generation/umbrella.py index af0c128f..cd052c74 100755 --- a/temp/shacl2sparql.py +++ b/prez/services/query_generation/umbrella.py @@ -108,12 +108,11 @@ def _generate_query(self): construct_triples=ConstructTriples(triples=self.construct_triples) ) solution_modifier = SolutionModifier() - query = ConstructQuery( + query_str = ConstructQuery( construct_template=construct_template, where_clause=where, solution_modifier=solution_modifier, - ) - query_str = "".join(part for part in query.render()) + ).to_string() self.sparql = query_str def build_inner_select(self): @@ -217,7 +216,7 @@ def create_select_subquery_from_template(self, target_bn): ).rstrip() sol_mod, order_by_triple = self._create_focus_node_solution_modifier() if order_by_triple: # insert it before the end of the string, - order_by_triple_text = "".join(order_by_triple.render()) + order_by_triple_text = order_by_triple.to_string() substituted_query = ( substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" ) diff --git a/prez/sparql/methods.py b/prez/sparql/methods.py deleted file mode 100755 index e604e979..00000000 --- a/prez/sparql/methods.py +++ /dev/null @@ -1,237 +0,0 @@ -import asyncio -import logging -import time -from abc import ABC, abstractmethod -from typing import List -from typing import Tuple -from urllib.parse import quote_plus - -import httpx -import pyoxigraph -from fastapi.concurrency import run_in_threadpool -from rdflib import Namespace, Graph, URIRef, Literal, BNode - -from prez.config import settings - -PREZ = Namespace("https://prez.dev/") - -log = logging.getLogger(__name__) - - -class Repo(ABC): - @abstractmethod - async def rdf_query_to_graph(self, query: str): - pass - - @abstractmethod - async def tabular_query_to_table(self, query: str, context: URIRef = None): - pass - - async def send_queries( - self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef, str]] = None - ) -> Tuple[Graph, List]: - # Common logic to send both query types in parallel - results = await asyncio.gather( - *[self.rdf_query_to_graph(query) for query in rdf_queries if query], - *[ - self.tabular_query_to_table(query, context) - for context, query in tabular_queries - if query - ], - ) - g = Graph() - tabular_results = [] - for result in results: - if isinstance(result, Graph): - g += result - else: - tabular_results.append(result) - return g, tabular_results - - @abstractmethod - def sparql( - self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" - ): - pass - - -class RemoteSparqlRepo(Repo): - def __init__(self, async_client: httpx.AsyncClient): - self.async_client = async_client - - async def _send_query(self, query: str, mediatype="text/turtle"): - """Sends a SPARQL query asynchronously. - Args: query: str: A SPARQL query to be sent asynchronously. - Returns: httpx.Response: A httpx.Response object - """ - query_rq = self.async_client.build_request( - "POST", - url=settings.sparql_endpoint, - headers={"Accept": mediatype}, - data={"query": query}, - ) - response = await self.async_client.send(query_rq, stream=True) - return response - - async def rdf_query_to_graph(self, query: str) -> Graph: - """ - Sends a SPARQL query asynchronously and parses the response into an RDFLib Graph. - Args: query: str: A SPARQL query to be sent asynchronously. - Returns: rdflib.Graph: An RDFLib Graph object - """ - response = await self._send_query(query) - g = Graph() - await response.aread() - return g.parse(data=response.text, format="turtle") - - async def tabular_query_to_table(self, query: str, context: URIRef = None): - """ - Sends a SPARQL query asynchronously and parses the response into a table format. - The optional context parameter allows an identifier to be supplied with the query, such that multiple results can be - distinguished from each other. - """ - response = await self._send_query(query, "application/sparql-results+json") - await response.aread() - return context, response.json()["results"]["bindings"] - - async def sparql( - self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" - ): - """Sends a starlette Request object (containing a SPARQL query in the URL parameters) to a proxied SPARQL - endpoint.""" - # TODO: This only supports SPARQL GET requests because the query is sent as a query parameter. - - query_escaped_as_bytes = f"query={quote_plus(query)}".encode("utf-8") - - # TODO: Global app settings should be passed in as a function argument. - url = httpx.URL(url=settings.sparql_endpoint, query=query_escaped_as_bytes) - headers = [] - for header in raw_headers: - if header[0] != b"host": - headers.append(header) - headers.append((b"host", str(url.host).encode("utf-8"))) - rp_req = self.async_client.build_request(method, url, headers=headers) - return await self.async_client.send(rp_req, stream=True) - - -class PyoxigraphRepo(Repo): - def __init__(self, pyoxi_store: pyoxigraph.Store): - self.pyoxi_store = pyoxi_store - - def _handle_query_solution_results( - self, results: pyoxigraph.QuerySolutions - ) -> dict: - """Organise the query results into format serializable by FastAPIs JSONResponse.""" - variables = results.variables - results_dict = {"head": {"vars": [v.value for v in results.variables]}} - results_list = [] - for result in results: - result_dict = {} - for var in variables: - binding = result[var] - if binding: - binding_type = self._pyoxi_result_type(binding) - result_dict[str(var)[1:]] = { - "type": binding_type, - "value": binding.value, - } - results_list.append(result_dict) - results_dict["results"] = {"bindings": results_list} - return results_dict - - @staticmethod - def _handle_query_triples_results(results: pyoxigraph.QueryTriples) -> Graph: - """Parse the query results into a Graph object.""" - ntriples = " .\n".join([str(r) for r in list(results)]) + " ." - g = Graph() - g.bind("prez", URIRef("https://prez.dev/")) - if ntriples == " .": - return g - return g.parse(data=ntriples, format="ntriples") - - def _sync_rdf_query_to_graph(self, query: str) -> Graph: - try: - results = self.pyoxi_store.query(query) - except Exception as e: - print(e) - result_graph = self._handle_query_triples_results(results) - return result_graph - - def _sync_tabular_query_to_table(self, query: str, context: URIRef = None) -> tuple: - results = self.pyoxi_store.query(query) - results_dict = self._handle_query_solution_results(results) - # only return the bindings from the results. - return context, results_dict["results"]["bindings"] - - def _sparql(self, query: str) -> dict | Graph | bool: - """Submit a sparql query to the pyoxigraph store and return the formatted results.""" - results = self.pyoxi_store.query(query) - if isinstance(results, pyoxigraph.QuerySolutions): # a SELECT query result - results_dict = self._handle_query_solution_results(results) - return results_dict - elif isinstance(results, pyoxigraph.QueryTriples): # a CONSTRUCT query result - result_graph = self._handle_query_triples_results(results) - return result_graph - elif isinstance(results, bool): - results_dict = {"head": {}, "boolean": results} - return results_dict - else: - raise TypeError(f"Unexpected result class {type(results)}") - - async def rdf_query_to_graph(self, query: str) -> Graph: - return await run_in_threadpool(self._sync_rdf_query_to_graph, query) - - async def tabular_query_to_table(self, query: str, context: URIRef = None) -> list: - return await run_in_threadpool( - self._sync_tabular_query_to_table, query, context - ) - - async def sparql( - self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "" - ) -> list | Graph | bool: - return self._sparql(query) - - @staticmethod - def _pyoxi_result_type(term) -> str: - if isinstance(term, pyoxigraph.Literal): - return "literal" - elif isinstance(term, pyoxigraph.NamedNode): - return "uri" - elif isinstance(term, pyoxigraph.BlankNode): - return "bnode" - else: - raise ValueError(f"Unknown type: {type(term)}") - - -class OxrdflibRepo(Repo): - def __init__(self, oxrdflib_graph: Graph): - self.oxrdflib_graph = oxrdflib_graph - - def _sync_rdf_query_to_graph(self, query: str) -> Graph: - results = self.oxrdflib_graph.query(query) - return results.graph - - def _sync_tabular_query_to_table(self, query: str, context: URIRef = None): - results = self.oxrdflib_graph.query(query) - reformatted_results = [] - for result in results: - reformatted_result = {} - for var in results.vars: - binding = result[var] - if binding: - str_type = self._str_type_for_rdflib_type(binding) - reformatted_result[str(var)] = {"type": str_type, "value": binding} - reformatted_results.append(reformatted_result) - return context, reformatted_results - - async def rdf_query_to_graph(self, query: str) -> Graph: - return await run_in_threadpool(self._sync_rdf_query_to_graph, query) - - async def tabular_query_to_table(self, query: str, context: URIRef = None): - return await run_in_threadpool( - self._sync_tabular_query_to_table, query, context - ) - - def _str_type_for_rdflib_type(self, instance): - map = {URIRef: "uri", BNode: "bnode", Literal: "literal"} - return map[type(instance)] diff --git a/prez/sparql/objects_listings.py b/prez/sparql/objects_listings.py deleted file mode 100755 index b1419cd2..00000000 --- a/prez/sparql/objects_listings.py +++ /dev/null @@ -1,523 +0,0 @@ -import logging -from itertools import chain -from textwrap import dedent -from typing import List, Tuple, Dict, FrozenSet - -from rdflib import Graph, URIRef, Namespace, Literal - -from prez.cache import tbox_cache, profiles_graph_cache -from prez.config import settings -from prez.services.curie_functions import get_uri_for_curie_id -from temp.grammar.grammar import SubSelect - -log = logging.getLogger(__name__) - -ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") -PREZ = Namespace("https://prez.dev/") - - -async def get_annotation_properties( - item_graph: Graph, -): - """ - Gets annotation data used for HTML display. - This includes the label, description, and provenance, if available. - Note the following three default predicates are always included. This allows context, i.e. background ontologies, - which are often diverse in the predicates they use, to be aligned with the default predicates used by Prez. The full - range of predicates used can be manually included via profiles. - """ - label_predicates = settings.label_predicates - description_predicates = settings.description_predicates - explanation_predicates = settings.provenance_predicates - other_predicates = settings.other_predicates - terms = ( - set(i for i in item_graph.predicates() if isinstance(i, URIRef)) - | set(i for i in item_graph.objects() if isinstance(i, URIRef)) - | set(i for i in item_graph.subjects() if isinstance(i, URIRef)) - ) - # TODO confirm caching of SUBJECT labels does not cause issues! this could be a lot of labels. Perhaps these are - # better separated and put in an LRU cache. Or it may not be worth the effort. - if not terms: - return None, Graph() - # read labels from the tbox cache, this should be the majority of labels - uncached_terms, labels_g = get_annotations_from_tbox_cache( - terms, - label_predicates, - description_predicates, - explanation_predicates, - other_predicates, - ) - - def other_predicates_statement(other_predicates, uncached_terms_other): - return f"""UNION - {{ - ?unannotated_term ?other_prop ?other . - VALUES ?other_prop {{ {" ".join('<' + str(pred) + '>' for pred in other_predicates)} }} - VALUES ?unannotated_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms_other)} - }} - }}""" - - queries_for_uncached = f"""CONSTRUCT {{ - ?unlabeled_term ?label_prop ?label . - ?undescribed_term ?desc_prop ?description . - ?unexplained_term ?expl_prop ?explanation . - ?unannotated_term ?other_prop ?other . - }} - WHERE {{ - {{ - ?unlabeled_term ?label_prop ?label . - VALUES ?label_prop {{ {" ".join('<' + str(pred) + '>' for pred in label_predicates)} }} - VALUES ?unlabeled_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["labels"])} }} - FILTER(lang(?label) = "" || lang(?label) = "en" || lang(?label) = "en-AU") - }} - UNION - {{ - ?undescribed_term ?desc_prop ?description . - VALUES ?desc_prop {{ {" ".join('<' + str(pred) + '>' for pred in description_predicates)} }} - VALUES ?undescribed_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["descriptions"])} - }} - }} - UNION - {{ - ?unexplained_term ?expl_prop ?explanation . - VALUES ?expl_prop {{ {" ".join('<' + str(pred) + '>' for pred in explanation_predicates)} }} - VALUES ?unexplained_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["provenance"])} - }} - }} - {other_predicates_statement(other_predicates, uncached_terms["other"]) if other_predicates else ""} - }}""" - return queries_for_uncached, labels_g - - -def get_annotations_from_tbox_cache( - terms: List[URIRef], label_props, description_props, explanation_props, other_props -): - """ - Gets labels from the TBox cache, returns a list of terms that were not found in the cache, and a graph of labels, - descriptions, and explanations - """ - labels_from_cache = Graph(bind_namespaces="rdflib") - terms_list = list(terms) - props_from_cache = { - "labels": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in label_props - ) - ) - ), - "descriptions": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in description_props - ) - ) - ), - "provenance": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in explanation_props - ) - ) - ), - "other": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in other_props - ) - ) - ), - } - # get all the annotations we can from the cache - all = list(chain(*props_from_cache.values())) - default_language = settings.default_language - for triple in all: - if isinstance(triple[2], Literal): - if triple[2].language == default_language: - labels_from_cache.add(triple) - elif triple[2].language is None: - labels_from_cache.add(triple) - # the remaining terms are not in the cache; we need to query the SPARQL endpoint to attempt to get them - uncached_props = { - k: list(set(terms) - set(triple[0] for triple in v)) - for k, v in props_from_cache.items() - } - return uncached_props, labels_from_cache - - -def temp_listing_count(subquery: SubSelect, klass): - """ - TODO: Implement COUNT and other expressions in SPARQL grammar. - """ - return f""" - PREFIX prez: <{PREZ}> - CONSTRUCT {{ - {klass.n3()} prez:count ?count - }} - WHERE {{ - SELECT (COUNT(DISTINCT ?focus_node) as ?count) {{ {subquery} }} - }}""" - - -def get_relevant_shape_bns_for_profile(selected_class, profile): - """ - Gets the shape blank nodes URIs from the profiles graph for a given profile. - """ - if not profile: - return None - shape_bns = list( - profiles_graph_cache.objects( - subject=profile, - predicate=ALTREXT.hasNodeShape, - ) - ) - if not shape_bns: - return None - relevant_shape_bns = [ - triple[0] - for triple in profiles_graph_cache.triples_choices( - ( - list(shape_bns), - URIRef("http://www.w3.org/ns/shacl#targetClass"), - selected_class, - ) - ) - ] - return relevant_shape_bns - - -def get_listing_predicates(profile, selected_class): - """ - Gets predicates relevant to listings of objects as specified in the profile. - This is used in two scenarios: - 1. "Collection" endpoints, for top level listing of objects of a particular type - 2. For a specific object, where it has members - The predicates retrieved from profiles are: - - child to focus, for example where the object of interest is a Concept Scheme, and is linked to Concept(s) via - the predicate skos:inScheme - - focus to child, for example where the object of interest is a Feature Collection, and is linked to Feature(s) - via the predicate rdfs:member - - parent to focus, for example where the object of interest is a Feature Collection, and is linked to Dataset(s) via - the predicate dcterms:hasPart - - focus to parents, for example where the object of interest is a Concept, and is linked to Concept Scheme(s) via - the predicate skos:inScheme - - relative properties, properties of the parent/child objects that should also be returned. For example, if the - focus object is a Concept Scheme, and the predicate skos:inScheme is used to link from Concept(s) (using - altr-ext:childToFocus) then specifying skos:broader as a relative property will cause the broader concepts to - be returned for each concept - """ - shape_bns = get_relevant_shape_bns_for_profile(selected_class, profile) - if not shape_bns: - return [], [], [], [], [] - child_to_focus = [ - i[2] - for i in profiles_graph_cache.triples_choices( - ( - shape_bns, - ALTREXT.childToFocus, - None, - ) - ) - ] - parent_to_focus = [ - i[2] - for i in profiles_graph_cache.triples_choices( - ( - shape_bns, - ALTREXT.parentToFocus, - None, - ) - ) - ] - focus_to_child = [ - i[2] - for i in profiles_graph_cache.triples_choices( - ( - shape_bns, - ALTREXT.focusToChild, - None, - ) - ) - ] - focus_to_parent = [ - i[2] - for i in profiles_graph_cache.triples_choices( - ( - shape_bns, - ALTREXT.focusToParent, - None, - ) - ) - ] - relative_properties = [ - i[2] - for i in profiles_graph_cache.triples_choices( - ( - shape_bns, - ALTREXT.relativeProperties, - None, - ) - ) - ] - return ( - child_to_focus, - parent_to_focus, - focus_to_child, - focus_to_parent, - relative_properties, - ) - - -def get_item_predicates(profile, selected_class): - """ - Gets any predicates specified in the profile, this includes: - - predicates to include. Uses sh:path - - predicates to exclude. Uses sh:path in conjunction with dash:hidden. - - inverse path predicates to include (inbound links to the object). Uses sh:inversePath. - - sequence path predicates to include, expressed as a list. Uses sh:sequencePath. - """ - shape_bns = get_relevant_shape_bns_for_profile(selected_class, profile) - if not shape_bns: - log.info( - f"No special predicates (include/exclude/inverse/sequence) found for class {selected_class} in profile " - f"{profile}. Default behaviour is to include all predicates, and blank nodes to a depth of two." - ) - return None, None, None, None - includes = [ - i[2] - for i in profiles_graph_cache.triples_choices( - (shape_bns, URIRef("http://www.w3.org/ns/shacl#path"), None) - ) - ] - excludes = [ - i[2] - for i in profiles_graph_cache.triples_choices( - (shape_bns, ALTREXT.exclude, None) - ) - ] - inverses = [ - i[2] - for i in profiles_graph_cache.triples_choices( - (shape_bns, URIRef("http://www.w3.org/ns/shacl#inversePath"), None) - ) - ] - _sequence_nodes = [ - i[2] - for i in profiles_graph_cache.triples_choices( - ( - shape_bns, - URIRef("http://www.w3.org/ns/shacl#sequencePath"), - None, - ) - ) - ] - sequence_paths = [ - [path_item for path_item in profiles_graph_cache.items(i)] - for i in _sequence_nodes - ] - return includes, excludes, inverses, sequence_paths - - -def select_profile_mediatype( - classes: List[URIRef], - requested_profile_uri: URIRef = None, - requested_profile_token: str = None, - requested_mediatypes: List[Tuple] = None, - listing: bool = False, -): - """ - Returns a SPARQL SELECT query which will determine the profile and mediatype to return based on user requests, - defaults, and the availability of these in profiles. - - NB: Most specific class refers to the rdfs:Class of an object which has the most specific rdfs:subClassOf links to - the base class delivered by that API endpoint. The base classes delivered by each API endpoint are: - - SpacePrez: - /s/catalogs -> prez:DatasetList - /s/catalogs/{ds_id} -> dcat:Dataset - /s/catalogs/{ds_id}/collections/{fc_id} -> geo:FeatureCollection - /s/catalogs/{ds_id}/collections -> prez:FeatureCollectionList - /s/catalogs/{ds_id}/collections/{fc_id}/features -> geo:Feature - - VocPrez: - /v/schemes -> skos:ConceptScheme - /v/collections -> skos:Collection - /v/schemes/{cs_id}/concepts -> skos:Concept - - CatPrez: - /c/catalogs -> dcat:Catalog - /c/catalogs/{cat_id}/datasets -> dcat:Dataset - - The following logic is used to determine the profile and mediatype to be returned: - - 1. If a profile and mediatype are requested, they are returned if a matching profile which has the requested - mediatype is found, otherwise the default profile for the most specific class is returned, with its default - mediatype. - 2. If a profile only is requested, if it can be found it is returned, otherwise the default profile for the most - specific class is returned. In both cases the default mediatype is returned. - 3. If a mediatype only is requested, the default profile for the most specific class is returned, and if the - requested mediatype is available for that profile, it is returned, otherwise the default mediatype for that profile - is returned. - 4. If neither a profile nor mediatype is requested, the default profile for the most specific class is returned, - with the default mediatype for that profile. - """ - if listing: - profile_class = PREZ.ListingProfile - else: - profile_class = PREZ.ObjectProfile - if requested_profile_token: - requested_profile_uri = get_uri_for_curie_id(requested_profile_token) - query = dedent( - f""" PREFIX altr-ext: - PREFIX dcat: - PREFIX dcterms: - PREFIX geo: - PREFIX prez: - PREFIX prof: - PREFIX rdfs: - PREFIX skos: - PREFIX sh: - - SELECT ?profile ?title ?class (count(?mid) as ?distance) ?req_profile ?def_profile ?format ?req_format ?def_format - - WHERE {{ - VALUES ?class {{{" ".join('<' + str(klass) + '>' for klass in classes)}}} - ?class rdfs:subClassOf* ?mid . - ?mid rdfs:subClassOf* ?base_class . - VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection geo:Feature - skos:ConceptScheme skos:Concept skos:Collection - prez:ProfilesList dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery - prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object }} - ?profile altr-ext:constrainsClass ?class ; - altr-ext:hasResourceFormat ?format ; - dcterms:title ?title .\ - {f'?profile a {profile_class.n3()} .'} - {f'BIND(?profile=<{requested_profile_uri}> as ?req_profile)' if requested_profile_uri else ''} - BIND(EXISTS {{ ?shape sh:targetClass ?class ; - altr-ext:hasDefaultProfile ?profile }} AS ?def_profile) - {generate_mediatype_if_statements(requested_mediatypes) if requested_mediatypes else ''} - BIND(EXISTS {{ ?profile altr-ext:hasDefaultResourceFormat ?format }} AS ?def_format) - }} - GROUP BY ?class ?profile ?req_profile ?def_profile ?format ?req_format ?def_format ?title - ORDER BY DESC(?req_profile) DESC(?distance) DESC(?def_profile) DESC(?req_format) DESC(?def_format)""" - ) - return query - - -def generate_mediatype_if_statements(requested_mediatypes: list): - """ - Generates a list of if statements which will be used to determine the mediatype to return based on user requests, - and the availability of these in profiles. - These are of the form: - BIND( - IF(?format="application/ld+json", "0.9", - IF(?format="text/html", "0.8", - IF(?format="image/apng", "0.7", ""))) AS ?req_format) - """ - # TODO ConnegP appears to return a tuple of q values and profiles for headers, and only profiles (no q values) if they - # are not specified in QSAs. - if not isinstance(next(iter(requested_mediatypes)), tuple): - requested_mediatypes = [(1, mt) for mt in requested_mediatypes] - - line_join = "," + "\n" - ifs = ( - f"BIND(\n" - f"""{line_join.join({chr(9) + 'IF(?format="' + tup[1] + '", "' + str(tup[0]) + '"' for tup in requested_mediatypes})}""" - f""", ""{')' * len(requested_mediatypes)}\n""" - f"\tAS ?req_format)" - ) - return ifs - - -def get_endpoint_template_queries(classes: FrozenSet[URIRef]): - """ - NB the FILTER clause here should NOT be required but RDFLib has a bug (perhaps related to the +/* operators - - requires further investigation). Removing the FILTER clause will return too many results in instances where there - should be NO results - as if the VALUES ?classes clause is not used. - """ - query = f""" - PREFIX ont: - PREFIX xsd: - - SELECT ?endpoint ?parent_endpoint ?relation_direction ?relation_predicate ?endpoint_template ?distance - {{ - VALUES ?classes {{ {" ".join('<' + str(klass) + '>' for klass in classes)} }} - {{ - ?endpoint a ont:ObjectEndpoint ; - ont:endpointTemplate ?endpoint_template ; - ont:deliversClasses ?classes . - BIND("0"^^xsd:integer AS ?distance) - }} - UNION - {{ - ?parent_endpoint ?relation_direction ?relation_predicate . - ?endpoint ?ep_relation_direction ?ep_relation_predicate ; - ont:endpointTemplate ?endpoint_template ; - ont:deliversClasses ?classes . - FILTER(?classes IN ({", ".join('<' + str(klass) + '>' for klass in classes)})) - VALUES ?relation_direction {{ont:focusToParentRelation ont:parentToFocusRelation}} - VALUES ?ep_relation_direction {{ont:focusToParentRelation ont:parentToFocusRelation}} - {{ SELECT ?parent_endpoint ?endpoint (count(?intermediate) as ?distance) - {{ - ?endpoint ont:parentEndpoint* ?intermediate ; - ont:deliversClasses ?classes . - ?intermediate ont:parentEndpoint* ?parent_endpoint . - ?intermediate a ?intermediateEPClass . - ?parent_endpoint a ?parentEPClass . - VALUES ?intermediateEPClass {{ont:ObjectEndpoint}} - VALUES ?parentEPClass {{ont:ObjectEndpoint}} - }} - GROUP BY ?parent_endpoint ?endpoint - - }} - }} - }} ORDER BY ASC(?distance) - """ - return query - - -def generate_relationship_query( - uri: URIRef, endpoint_to_relations: Dict[URIRef, List[Tuple[URIRef, Literal]]] -): - """ - Generates a SPARQL query of the form: - SELECT * {{ SELECT ?endpoint ?parent_1 ?parent_2 - WHERE { - BIND("/s/catalogs/$parent_1/collections/$object" as ?endpoint) - ?parent_1 . - }}} - """ - if not endpoint_to_relations: - return None - subqueries = [] - for endpoint, relations in endpoint_to_relations.items(): - subquery = f"""{{ SELECT ?endpoint {" ".join(["?parent_" + str(i + 1) for i, pred in enumerate(relations)])} - WHERE {{\n BIND("{endpoint}" as ?endpoint)\n""" - uri_str = f"<{uri}>" - for i, relation in enumerate(relations): - predicate, direction = relation - if predicate: - parent = "?parent_" + str(i) - if direction == URIRef("https://prez.dev/ont/parentToFocusRelation"): - subquery += f"{parent} <{predicate}> {uri_str} .\n" - else: # assuming the direction is "focus_to_parent" - subquery += f"{uri_str} <{predicate}> {parent} .\n" - uri_str = parent - subquery += "}}" - subqueries.append(subquery) - - union_query = "SELECT * {" + " UNION ".join(subqueries) + "}" - return union_query - - -def startup_count_objects(): - """ - Retrieves hardcoded counts for collections in the dataset (feature collections, datasets etc.) - """ - return f"""PREFIX prez: -CONSTRUCT {{ ?collection prez:count ?count }} -WHERE {{ ?collection prez:count ?count }}""" diff --git a/temp/grammar/grammar.py b/temp/grammar/grammar.py index 92e53a97..5c1b4d20 100755 --- a/temp/grammar/grammar.py +++ b/temp/grammar/grammar.py @@ -5,12 +5,10 @@ from typing import List, Union, Optional, Generator, Tuple from pydantic import BaseModel, field_validator -from rdflib import URIRef, Variable, BNode, Literal +from rdflib import URIRef, Variable from rdflib.plugins.sparql import prepareQuery from rdflib.plugins.sparql.algebra import translateAlgebra -from prez.reference_data.prez_ns import PREZ - log = logging.getLogger(__name__) @@ -24,11 +22,14 @@ def __str__(self): return "".join(part for part in self.render()) def __repr__(self): - return f"{self.__class__.__name__}({self})" + return f"{self.__class__.__name__} ({self})" def render(self): raise NotImplementedError("Subclasses must implement this method.") + def to_string(self): + return self.__str__() + def collect_triples(self) -> List[SimplifiedTriple]: """ Recursively collect SimplifiedTriple instances from this object. @@ -345,7 +346,7 @@ class ConditionalOrExpression(SPARQLGrammarBase): def render(self) -> Generator[str, None, None]: for i, conditional_and_expression in enumerate( - self.conditional_and_expressions + self.conditional_and_expressions ): yield from conditional_and_expression.render() if i < len(self.conditional_and_expressions) - 1: @@ -686,10 +687,10 @@ def render(self) -> Generator[str, None, None]: @classmethod def filter_relational( - cls, - focus: PrimaryExpression, - comparators: Union[PrimaryExpression, List[PrimaryExpression]], - operator: str, + cls, + focus: PrimaryExpression, + comparators: Union[PrimaryExpression, List[PrimaryExpression]], + operator: str, ) -> Filter: """ Convenience method to create a FILTER clause to compare the focus node to comparators. @@ -1053,7 +1054,7 @@ def render(self) -> Generator[str, None, None]: @classmethod def create_with_one_expr( - cls, function_name: str, expression: PrimaryExpression + cls, function_name: str, expression: PrimaryExpression ) -> "BuiltInCall": """ Convenience method for functions that take a single PrimaryExpression as an argument. @@ -1063,7 +1064,7 @@ def create_with_one_expr( @classmethod def create_with_n_expr( - cls, function_name: str, expressions: List[PrimaryExpression] + cls, function_name: str, expressions: List[PrimaryExpression] ) -> "BuiltInCall": """ Convenience method for functions that take a list of PrimaryExpressions as arguments. diff --git a/temp/test_search.py b/temp/test_search.py deleted file mode 100755 index 0255d2d9..00000000 --- a/temp/test_search.py +++ /dev/null @@ -1,13 +0,0 @@ -from rdflib import RDFS - -from prez.sparql.search_query import SearchQuery - -# from temp.grammar import SearchQuery - -test = SearchQuery( - search_term="test", - pred_vals=[RDFS.label], - limit=10, - offset=0, -).render() -print("") diff --git a/test_data/spaceprez.ttl b/test_data/spaceprez.ttl index 446ac86e..380bb9f0 100755 --- a/test_data/spaceprez.ttl +++ b/test_data/spaceprez.ttl @@ -4,10 +4,11 @@ PREFIX ex: PREFIX geo: PREFIX rdfs: -ex:Dataset a dcat:Dataset ; - rdfs:label "Dataset" ; - rdfs:member ex:FeatureCollection ; - ex:property "top level dataset property" ; + +ex:SpacePrezCatalog a dcat:Catalog ; + dcterms:title "SpacePrez Catalog" ; + dcterms:description "A catalog of SpacePrez data" ; + dcterms:hasPart ex:FeatureCollection ; . ex:FeatureCollection a geo:FeatureCollection ; diff --git a/tests/_test_cql.py b/tests/_test_cql.py index c2e2f5e0..ee97f148 100755 --- a/tests/_test_cql.py +++ b/tests/_test_cql.py @@ -7,7 +7,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo from urllib.parse import quote_plus diff --git a/tests/test_count.py b/tests/test_count.py index c4dc4cc9..e7c0433d 100755 --- a/tests/test_count.py +++ b/tests/test_count.py @@ -6,7 +6,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_dd_profiles.py b/tests/test_dd_profiles.py index 194a63bf..3707d3f4 100755 --- a/tests/test_dd_profiles.py +++ b/tests/test_dd_profiles.py @@ -8,7 +8,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_endpoints_cache.py b/tests/test_endpoints_cache.py index b0f33b4b..56462677 100755 --- a/tests/test_endpoints_cache.py +++ b/tests/test_endpoints_cache.py @@ -7,7 +7,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py index 8f0016cd..c7f68a83 100755 --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -1,3 +1,4 @@ +import asyncio import time from pathlib import Path @@ -6,11 +7,10 @@ from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef from rdflib.namespace import RDF, DCAT -from rdflib.compare import isomorphic from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") @@ -18,8 +18,8 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - for file in Path(__file__).parent.glob("../test_data/catprez.ttl"): - store.load(file.read_bytes(), "text/turtle") + file = Path("../test_data/catprez.ttl") + store.load(file.read_bytes(), "text/turtle") return store @@ -51,7 +51,7 @@ def override_get_repo(): app.dependency_overrides[get_repo] = override_get_repo - with TestClient(app) as c: + with TestClient(app, backend_options={'loop_factory': asyncio.new_event_loop}) as c: wait_for_app_to_be_ready(c) yield c @@ -62,7 +62,7 @@ def override_get_repo(): @pytest.fixture(scope="session") def a_catalog_link(client): # get link for first catalog - r = client.get("/c/catalogs") + r = client.get("/catalogs") g = Graph().parse(data=r.text) member_uri = g.value(None, RDF.type, DCAT.Catalog) link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) @@ -81,7 +81,7 @@ def a_resource_link(client, a_catalog_link): def test_catalog_listing_anot(client): r = client.get( - f"/c/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile" + f"/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile" ) response_graph = Graph().parse(data=r.text) expected_response_1 = ( diff --git a/tests/test_endpoints_management.py b/tests/test_endpoints_management.py index e9b83d75..6afeae72 100755 --- a/tests/test_endpoints_management.py +++ b/tests/test_endpoints_management.py @@ -8,7 +8,7 @@ from prez.app import app from prez.dependencies import get_repo from prez.reference_data.prez_ns import PREZ -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_endpoints_object.py b/tests/test_endpoints_object.py index c32b4b31..4d6c9678 100755 --- a/tests/test_endpoints_object.py +++ b/tests/test_endpoints_object.py @@ -8,7 +8,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_endpoints_ok.py b/tests/test_endpoints_ok.py index 88a49e83..799c913d 100755 --- a/tests/test_endpoints_ok.py +++ b/tests/test_endpoints_ok.py @@ -1,7 +1,7 @@ import logging import time from pathlib import Path -from typing import Optional, Set, Dict +from typing import Optional, Set import pytest from fastapi.testclient import TestClient @@ -11,7 +11,7 @@ from prez.app import app from prez.dependencies import get_repo from prez.reference_data.prez_ns import PREZ -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo log = logging.getLogger(__name__) @@ -62,8 +62,8 @@ def override_get_repo(): app.dependency_overrides.clear() -def test_catprez_links( - client: TestClient, visited: Optional[Set] = None, link="/c/catalogs" +def test_ogcprez_links( + client: TestClient, visited: Optional[Set] = None, link="/catalogs" ): if not visited: visited = set() @@ -80,46 +80,4 @@ def test_catprez_links( print(link) if link not in visited: visited.add(link) - test_catprez_links(client, visited, str(link)) - - -def test_vocprez_links( - client: TestClient, visited: Optional[Set] = None, link="/v/catalogs" -): - if not visited: - visited = set() - response = client.get(link) - g = Graph().parse(data=response.text, format="turtle") - links = list(g.objects(None, PREZ.link)) - member_bnode_list = list(g.objects(None, PREZ.members)) - if member_bnode_list: - member_bnode = member_bnode_list[0] - member_links = list(g.objects(member_bnode, PREZ.link)) - links.extend(member_links) - assert response.status_code == 200 - for link in links: - print(link) - if link not in visited: - visited.add(link) - test_vocprez_links(client, visited, str(link)) - - -def test_spaceprez_links( - client: TestClient, visited: Optional[Set] = None, link="/s/catalogs" -): - if not visited: - visited = set() - response = client.get(link) - g = Graph().parse(data=response.text, format="turtle") - links = list(g.objects(None, PREZ.link)) - member_bnode_list = list(g.objects(None, PREZ.members)) - if member_bnode_list: - member_bnode = member_bnode_list[0] - member_links = list(g.objects(member_bnode, PREZ.link)) - links.extend(member_links) - assert response.status_code == 200 - for link in links: - print(link) - if link not in visited: - visited.add(link) - test_spaceprez_links(client, visited, str(link)) + test_ogcprez_links(client, visited, str(link)) diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py index d232b595..35fd78c8 100755 --- a/tests/test_endpoints_profiles.py +++ b/tests/test_endpoints_profiles.py @@ -8,7 +8,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") @@ -52,9 +52,9 @@ def test_profile(client): def test_ogcprez_profile(client): # check the example remote profile is loaded - r = client.get("/profiles/prez:OGCProfile") + r = client.get("/profiles/prez:OGCRecordsProfile") g = Graph().parse(data=r.text) - assert (URIRef("https://prez.dev/OGCProfile"), RDF.type, PROF.Profile) in g + assert (URIRef("https://prez.dev/OGCRecordsProfile"), RDF.type, PROF.Profile) in g def test_sp_profile(client): diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index 3e1e6a07..e3264bd6 100755 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -1,24 +1,24 @@ +import asyncio from pathlib import Path import pytest from fastapi.testclient import TestClient from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef -from rdflib.compare import isomorphic -from rdflib.namespace import RDF, DCAT, RDFS, GEO +from rdflib.namespace import RDF, DCAT, GEO from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - - for file in Path(__file__).parent.glob("../test_data/spaceprez.ttl"): - store.load(file.read_bytes(), "text/turtle") + + file = Path("../test_data/spaceprez.ttl") + store.load(file.read_bytes(), "text/turtle") return store @@ -37,7 +37,7 @@ def override_get_repo(): app.dependency_overrides[get_repo] = override_get_repo - with TestClient(app) as c: + with TestClient(app, backend_options={'loop_factory': asyncio.new_event_loop}) as c: yield c # Remove the override to ensure subsequent tests are unaffected @@ -45,21 +45,21 @@ def override_get_repo(): @pytest.fixture(scope="session") -def a_dataset_link(client): - r = client.get("/s/catalogs") +def a_catalog_link(client): + r = client.get("/catalogs") g = Graph().parse(data=r.text) - member_uri = g.value(None, RDF.type, DCAT.Dataset) + member_uri = g.value(None, RDF.type, DCAT.Catalog) link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) return link @pytest.fixture(scope="session") -def an_fc_link(client, a_dataset_link): - r = client.get(f"{a_dataset_link}/collections") +def an_fc_link(client, a_catalog_link): + r = client.get(f"{a_catalog_link}/collections") g = Graph().parse(data=r.text) links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) for link in links: - if link != a_dataset_link: + if link != a_catalog_link: return link @@ -73,13 +73,13 @@ def a_feature_link(client, an_fc_link): return link -def test_dataset_anot(client, a_dataset_link): - r = client.get(f"{a_dataset_link}?_mediatype=text/turtle") +def test_dataset_anot(client, a_catalog_link): + r = client.get(f"{a_catalog_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) expected_response_1 = ( - URIRef("https://example.com/Dataset"), + URIRef("https://example.com/SpacePrezCatalog"), RDF.type, - DCAT.Dataset, + DCAT.Catalog, ) assert next(response_graph.triples(expected_response_1)) diff --git a/tests/test_endpoints_vocprez.py b/tests/test_endpoints_vocprez.py index a39a32fd..69222a35 100755 --- a/tests/test_endpoints_vocprez.py +++ b/tests/test_endpoints_vocprez.py @@ -9,7 +9,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_shacl_parsing.py b/tests/test_node_selection_shacl.py similarity index 89% rename from tests/test_shacl_parsing.py rename to tests/test_node_selection_shacl.py index 78687162..29c533dc 100755 --- a/tests/test_shacl_parsing.py +++ b/tests/test_node_selection_shacl.py @@ -1,8 +1,8 @@ -from temp.shacl_nodeshapes2sparql import NodeShape, PropertyShape +from prez.services.query_generation.shacl_node_selection import NodeShape, PropertyShape from rdflib import Graph, URIRef import pytest -endpoints_graph = Graph().parse("tests/data/nodeshapes/endpoints.ttl", format="turtle") +endpoints_graph = Graph().parse("prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl", format="turtle") # @pytest.fixture diff --git a/tests/test_redirect_endpoint.py b/tests/test_redirect_endpoint.py index 92653193..1e66b14f 100755 --- a/tests/test_redirect_endpoint.py +++ b/tests/test_redirect_endpoint.py @@ -6,7 +6,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_search.py b/tests/test_search.py index 990fc012..3d429002 100755 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -9,7 +9,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") diff --git a/tests/test_search_grammar.py b/tests/test_search_grammar.py new file mode 100644 index 00000000..9c104a29 --- /dev/null +++ b/tests/test_search_grammar.py @@ -0,0 +1,238 @@ +from rdflib import DCAT + +from prez.reference_data.prez_ns import PREZ +from temp.grammar.grammar import * + +""" +SELECT ?search_result_uri ?predicate ?match ?weight (URI(CONCAT("urn:hash:", SHA256(CONCAT(STR(?search_result_uri), STR(?predicate), STR(?match), STR(?weight))))) AS ?hashID) + WHERE { + SELECT ?search_result_uri ?predicate ?match (SUM(?w) AS ?weight) + WHERE + { + ?search_result_uri ?predicate ?match . + VALUES ?predicate { $predicates } + { + ?search_result_uri ?predicate ?match . + BIND (100 AS ?w) + FILTER (LCASE(?match) = "$term") + } + UNION + { + ?search_result_uri ?predicate ?match . + BIND (20 AS ?w) + FILTER (REGEX(?match, "^$term", "i")) + } + UNION + { + ?search_result_uri ?predicate ?match . + BIND (10 AS ?w) + FILTER (REGEX(?match, "$term", "i")) + } + } + GROUP BY ?search_result_uri ?predicate ?match + } + ORDER BY DESC(?weight) +""" + +all_vars = { + "sr_uri": Var(value="search_result_uri"), + "pred": Var(value="predicate"), + "match": Var(value="match"), + "weight": Var(value="weight"), + "w": Var(value="w"), + "search_term": Var(value="search_term"), +} + + +def test_main(): + # Assuming that the classes are defined as per your previous message + + # Create the necessary variables + # Create the necessary variables as PrimaryExpressions wrapped in STR function calls + sr_uri = Var(value="search_result_uri") + pred = Var(value="predicate") + match = Var(value="match") + weight = Var(value="weight") + + str_sr_uri = PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "STR", PrimaryExpression(content=sr_uri) + ) + ) + str_pred = PrimaryExpression( + content=BuiltInCall.create_with_one_expr("STR", PrimaryExpression(content=pred)) + ) + str_match = PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "STR", PrimaryExpression(content=match) + ) + ) + str_weight = PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "STR", PrimaryExpression(content=weight) + ) + ) + + # Create the inner CONCAT function call with the STR-wrapped variables + inner_concat = BuiltInCall.create_with_n_expr( + "CONCAT", [str_sr_uri, str_pred, str_match, str_weight] + ) + + # Wrap the inner CONCAT in a PrimaryExpression for the SHA256 function call + sha256_expr = PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "SHA256", PrimaryExpression(content=inner_concat) + ) + ) + + # Create the outer CONCAT function call, including the "urn:hash:" literal + urn_literal = PrimaryExpression(content=RDFLiteral(value="urn:hash:")) + outer_concat = BuiltInCall.create_with_n_expr("CONCAT", [urn_literal, sha256_expr]) + + # Finally, create the URI function call + uri_expr = BuiltInCall.create_with_one_expr( + "URI", PrimaryExpression(content=outer_concat) + ) + + # Render the expression + print("".join(part for part in uri_expr.render())) + + +def test_primary_expression(): + # Create a PrimaryExpression + primary_expr = PrimaryExpression(content=Var(value="myVar")) + + # Use the convenience method to create a BuiltInCall with the PrimaryExpression + str_function_call = BuiltInCall.create_with_one_expr("STR", primary_expr) + + # Render the BuiltInCall + str_function_call.to_string() + + +def test_multiple_primary_expression(): + # Create a list of PrimaryExpressions + primary_expressions = [ + PrimaryExpression(content=Var(value="var1")), + PrimaryExpression(content=Var(value="var2")), + ] + + # Use the convenience method to create a BuiltInCall with the list of PrimaryExpressions + concat_function_call = BuiltInCall.create_with_n_expr("CONCAT", primary_expressions) + + # Render the BuiltInCall + concat_function_call.to_string() + + +def test_aggregate(): + # function_name: str # One of 'COUNT', 'SUM', 'MIN', 'MAX', 'AVG', 'SAMPLE', 'GROUP_CONCAT' + # distinct: bool = False + # expression: Optional[ + # Union[str, Expression] + # ] = None # '*' for COUNT, else Expression + # separator: Optional[str] = None # Only used for GROUP_CONCAT + """ + SUM(?w) + """ + pr_exp = PrimaryExpression(content=(all_vars["w"])) + exp = Expression.from_primary_expr(pr_exp) + count_expression = Aggregate(function_name="SUM", expression=exp) + print("".join(part for part in count_expression.render())) + + +def test_regex(): + # Example usage of RegexExpression + pe1 = PrimaryExpression(content=Var(value="textVar")) + pe2 = PrimaryExpression(content=RDFLiteral(value="^regexPattern")) + pe3 = PrimaryExpression(content=RDFLiteral(value="i")) + regex_expression = RegexExpression( + text_expression=Expression.from_primary_expr(pe1), # Expression for the text + pattern_expression=Expression.from_primary_expr( + pe2 + ), # Expression for the regex pattern + flags_expression=Expression.from_primary_expr( + pe3 + ), # Optional: Expression for regex flags + ) + + # Render the RegexExpression + print("".join(part for part in regex_expression.render())) + + +def test_first_part_search(): + # Variables for outer SELECT + + expressions = [PrimaryExpression(content=v) for v in all_vars.values()] + str_builtins = [BuiltInCall.create_with_one_expr("STR", e) for e in expressions] + str_expressions = [PrimaryExpression(content=b) for b in str_builtins] + urn_literal = PrimaryExpression(content=RDFLiteral(value="urn:hash:")) + all_expressions = [urn_literal] + str_expressions + uri_expr = BuiltInCall.create_with_n_expr("CONCAT", all_expressions) + print("".join(part for part in uri_expr.render())) + + +def test_inner_ggp_search(): + # inner where + # { + # ?search_result_uri ?predicate ?match. + # BIND(100 AS ?w) + # FILTER(LCASE(?match) = "$term") + # } + ggp = GroupGraphPattern(content=GroupGraphPatternSub()) + + # select + ggp.content.add_triple( + SimplifiedTriple( + subject=all_vars["sr_uri"], + predicate=all_vars["pred"], + object=all_vars["match"], + ) + ) + + # bind + bind_for_w = Bind( + expression=Expression.from_primary_expr( + PrimaryExpression(content=NumericLiteral(value="100")) + ), + var=Var(value="w"), + ) + bind_gpnt = GraphPatternNotTriples(content=bind_for_w) + ggp.content.add_pattern(bind_gpnt) + + # filter + bifc = BuiltInCall(function_name="LCASE", arguments=[all_vars["match"]]) + pe_focus = PrimaryExpression(content=bifc) + pe_st = PrimaryExpression(content=all_vars["search_term"]) + filter_expr = Filter.filter_relational( + focus=pe_focus, comparators=pe_st, operator="=" + ) + filter_gpnt = GraphPatternNotTriples(content=filter_expr) + ggp.content.add_pattern(filter_gpnt) + + print("".join(part for part in ggp.render())) + + +def test_count_query(): + subquery = """SELECT ?focus_node { ?focus_node a dcat:Dataset }""" + + klass = IRI(value=DCAT.Dataset) + # Assuming `klass` is an instance of IRI class and `PREZ` is a predefined IRI + count_iri = IRI(value=PREZ["count"]) # Replace with actual IRI + count_var = Var(value="count") + + construct_triples = ConstructTriples( + triples=[SimplifiedTriple(subject=klass, predicate=count_iri, object=count_var)] + ) + construct_template = ConstructTemplate(construct_triples=construct_triples) + # Assuming `subquery` is a string containing the subquery + subquery_str = SubSelectString(select_string=subquery) + ggp = GroupGraphPattern(content=subquery_str) + where_clause = WhereClause(group_graph_pattern=ggp) + construct_query = ConstructQuery( + construct_template=construct_template, + where_clause=where_clause, + solution_modifier=SolutionModifier(), # Assuming no specific modifiers + ) + + +if __name__ == "__main__": + test_regex() diff --git a/tests/test_sparql.py b/tests/test_sparql.py index dddd7682..4ad1f60c 100755 --- a/tests/test_sparql.py +++ b/tests/test_sparql.py @@ -6,7 +6,7 @@ from prez.app import app from prez.dependencies import get_repo -from prez.sparql.methods import Repo, PyoxigraphRepo +from prez.repositories import Repo, PyoxigraphRepo @pytest.fixture(scope="session") From 10848636d0bb08e3a97333f6b0916df13d6a4592 Mon Sep 17 00:00:00 2001 From: david Date: Mon, 19 Feb 2024 21:45:26 +1000 Subject: [PATCH 14/25] split out node selection --- prez/models/profiles_and_mediatypes.py | 6 +- .../cql/default_context.json} | 0 .../cql/geo_function_mapping.py} | 0 .../endpoints/endpoint_metadata.ttl | 5 + .../profiles/ogc_records_profile.ttl | 4 +- prez/renderers/json_renderer.py | 24 +-- prez/repositories/__init__.py | 2 +- prez/repositories/base.py | 15 +- prez/repositories/pyoxigraph.py | 4 +- prez/repositories/remote_sparql.py | 3 +- prez/routers/cql.py | 36 ++-- prez/routers/ogc_router.py | 5 +- prez/routers/profiles.py | 2 +- prez/routers/search.py | 100 ++++++----- prez/routers/sparql.py | 5 +- prez/services/annotations.py | 10 +- prez/services/generate_profiles.py | 29 ++-- prez/services/link_generation.py | 104 ++++++++---- prez/services/listings.py | 103 +++++++----- prez/services/objects.py | 17 +- prez/services/query_generation/classes.py | 56 ++++--- prez/services/query_generation/connegp.py | 10 +- prez/services/query_generation/count.py | 13 +- .../{cql2sparql.py => node_selection/cql.py} | 2 +- .../endpoint_shacl.py} | 83 ++++++--- .../{ => node_selection}/search.py | 10 +- prez/services/query_generation/umbrella.py | 157 ++++++++---------- temp/grammar/__init__.py | 75 +++++++-- temp/grammar/grammar.py | 14 +- ...profiles.py => TO_FIX_test_dd_profiles.py} | 0 ...ez.py => TO_FIX_test_endpoints_vocprez.py} | 0 .../{test_search.py => TO_FIX_test_search.py} | 0 tests/test_endpoints_catprez.py | 6 +- tests/test_endpoints_object.py | 41 ++--- tests/test_endpoints_spaceprez.py | 14 +- tests/test_node_selection_shacl.py | 52 +++--- 36 files changed, 573 insertions(+), 434 deletions(-) rename prez/{services/query_generation/default_cql_context.json => reference_data/cql/default_context.json} (100%) rename prez/{services/query_generation/cql_sparql_reference.py => reference_data/cql/geo_function_mapping.py} (100%) rename prez/services/query_generation/{cql2sparql.py => node_selection/cql.py} (99%) rename prez/services/query_generation/{shacl_node_selection.py => node_selection/endpoint_shacl.py} (79%) rename prez/services/query_generation/{ => node_selection}/search.py (98%) rename tests/{test_dd_profiles.py => TO_FIX_test_dd_profiles.py} (100%) rename tests/{test_endpoints_vocprez.py => TO_FIX_test_endpoints_vocprez.py} (100%) rename tests/{test_search.py => TO_FIX_test_search.py} (100%) diff --git a/prez/models/profiles_and_mediatypes.py b/prez/models/profiles_and_mediatypes.py index 92f89148..93349680 100755 --- a/prez/models/profiles_and_mediatypes.py +++ b/prez/models/profiles_and_mediatypes.py @@ -38,9 +38,9 @@ def populate_requested_types(self): ) = get_requested_profile_and_mediatype(request) return self + async def populate_profile_and_mediatype( - profiles_mediatypes_model: ProfilesMediatypesInfo, - system_repo: Repo + profiles_mediatypes_model: ProfilesMediatypesInfo, system_repo: Repo ): req_profiles = profiles_mediatypes_model.req_profiles req_profiles_token = profiles_mediatypes_model.req_profiles_token @@ -55,4 +55,4 @@ async def populate_profile_and_mediatype( profiles_mediatypes_model.avail_profile_uris, ) = await get_profiles_and_mediatypes( classes, system_repo, req_profiles, req_profiles_token, req_mediatypes, listing - ) \ No newline at end of file + ) diff --git a/prez/services/query_generation/default_cql_context.json b/prez/reference_data/cql/default_context.json similarity index 100% rename from prez/services/query_generation/default_cql_context.json rename to prez/reference_data/cql/default_context.json diff --git a/prez/services/query_generation/cql_sparql_reference.py b/prez/reference_data/cql/geo_function_mapping.py similarity index 100% rename from prez/services/query_generation/cql_sparql_reference.py rename to prez/reference_data/cql/geo_function_mapping.py diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl index d08ad3b2..f3ad2b72 100644 --- a/prez/reference_data/endpoints/endpoint_metadata.ttl +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -47,4 +47,9 @@ ogce:item-object ogce:cql-queryables a ont:ListingEndpoint ; ont:relevantShapes ex:queryables ; +. + +ogce:search + a ont:ListingEndpoint ; + ont:relevantShapes ex:search ; . \ No newline at end of file diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 0b14c12e..10923704 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -25,7 +25,7 @@ prez:OGCRecordsProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasNodeShape [ a sh:NodeShape ; - sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; + sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection , prez:SearchResult ; altr-ext:hasDefaultProfile prez:OGCListingProfile ] , [ a sh:NodeShape ; @@ -49,7 +49,7 @@ prez:OGCListingProfile "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept , - dcat:Resource , prof:Profile ; + dcat:Resource , prof:Profile , prez:SearchResult ; sh:property [ sh:path rdf:type ] . diff --git a/prez/renderers/json_renderer.py b/prez/renderers/json_renderer.py index a0492dfc..42c36377 100755 --- a/prez/renderers/json_renderer.py +++ b/prez/renderers/json_renderer.py @@ -26,11 +26,11 @@ def _get_label_predicates(profile_graph: Graph, profile: URIRef) -> list[Node]: def _get_child_iris( - graph: Graph, - iri: Node, - child_to_focus_predicates: list[Node], - parent_to_focus_predicates: list[Node], - focus_to_child_predicates: list[Node], + graph: Graph, + iri: Node, + child_to_focus_predicates: list[Node], + parent_to_focus_predicates: list[Node], + focus_to_child_predicates: list[Node], ) -> list[Node]: children = [] for predicate in child_to_focus_predicates: @@ -52,7 +52,7 @@ def _get_child_iris( def create_graph_item( - iri: str, predicates: list[Node], graph: Graph, context: dict + iri: str, predicates: list[Node], graph: Graph, context: dict ) -> tuple[dict, dict]: item = {"iri": iri} for predicate in predicates: @@ -65,9 +65,9 @@ def create_graph_item( async def render_json_dropdown( - graph: Graph, - profile: URIRef, - selected_class: URIRef, + graph: Graph, + profile: URIRef, + selected_class: URIRef, ) -> dict: profile_graph = profiles_graph_cache.cbd(profile) @@ -87,9 +87,9 @@ async def render_json_dropdown( ) = get_listing_predicates(profile, selected_class) if ( - not child_to_focus_predicates - and not focus_to_parent_predicates - and not focus_to_child_predicates + not child_to_focus_predicates + and not focus_to_parent_predicates + and not focus_to_child_predicates ): # This is a listing view, e.g. /v/vocab. node_shape = profile_graph.value( diff --git a/prez/repositories/__init__.py b/prez/repositories/__init__.py index b2acc6b2..42f4690f 100644 --- a/prez/repositories/__init__.py +++ b/prez/repositories/__init__.py @@ -3,4 +3,4 @@ from .pyoxigraph import PyoxigraphRepo from .remote_sparql import RemoteSparqlRepo -__all__ = ["Repo", "OxrdflibRepo", "PyoxigraphRepo", "RemoteSparqlRepo"] \ No newline at end of file +__all__ = ["Repo", "OxrdflibRepo", "PyoxigraphRepo", "RemoteSparqlRepo"] diff --git a/prez/repositories/base.py b/prez/repositories/base.py index a421d903..3e27a481 100755 --- a/prez/repositories/base.py +++ b/prez/repositories/base.py @@ -1,17 +1,10 @@ import asyncio import logging -import time from abc import ABC, abstractmethod from typing import List from typing import Tuple -from urllib.parse import quote_plus -import httpx -import pyoxigraph -from fastapi.concurrency import run_in_threadpool -from rdflib import Namespace, Graph, URIRef, Literal, BNode - -from prez.config import settings +from rdflib import Namespace, Graph, URIRef PREZ = Namespace("https://prez.dev/") @@ -53,9 +46,3 @@ def sparql( self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" ): pass - - - - - - diff --git a/prez/repositories/pyoxigraph.py b/prez/repositories/pyoxigraph.py index 514f2baf..ecfc9e06 100644 --- a/prez/repositories/pyoxigraph.py +++ b/prez/repositories/pyoxigraph.py @@ -16,7 +16,7 @@ def __init__(self, pyoxi_store: pyoxigraph.Store): self.pyoxi_store = pyoxi_store def _handle_query_solution_results( - self, results: pyoxigraph.QuerySolutions + self, results: pyoxigraph.QuerySolutions ) -> dict: """Organise the query results into format serializable by FastAPIs JSONResponse.""" variables = results.variables @@ -84,7 +84,7 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None) -> li ) async def sparql( - self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "" + self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "" ) -> list | Graph | bool: return self._sparql(query) diff --git a/prez/repositories/remote_sparql.py b/prez/repositories/remote_sparql.py index 4dd48732..5053d268 100644 --- a/prez/repositories/remote_sparql.py +++ b/prez/repositories/remote_sparql.py @@ -1,4 +1,5 @@ import logging +from urllib.parse import quote_plus import httpx from rdflib import Namespace, Graph, URIRef @@ -51,7 +52,7 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None): return context, response.json()["results"]["bindings"] async def sparql( - self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" + self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" ): """Sends a starlette Request object (containing a SPARQL query in the URL parameters) to a proxied SPARQL endpoint.""" diff --git a/prez/routers/cql.py b/prez/routers/cql.py index 7dcb9a2b..30d5f972 100755 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -24,12 +24,12 @@ name=OGCE["cql-post"], ) async def cql_post_endpoint( - request: Request, - cql_parser: Optional[dict] = Depends(cql_post_parser_dependency), - page: int = 1, - per_page: int = 20, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + cql_parser: Optional[dict] = Depends(cql_post_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql/post") return await listing_function( @@ -48,12 +48,12 @@ async def cql_post_endpoint( name=OGCE["cql-get"], ) async def cql_get_endpoint( - request: Request, - cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), - page: int = 1, - per_page: int = 20, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql/get") return await listing_function( @@ -72,12 +72,12 @@ async def cql_get_endpoint( name=OGCE["cql-queryables"], ) async def queryables_endpoint( - request: Request, - cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), - page: int = 1, - per_page: int = 20, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), + page: int = 1, + per_page: int = 20, + repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 52638976..9a6ed2c8 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -98,7 +98,10 @@ async def item_listing( system_repo, endpoint_uri, hierarchy_level=3, - path_nodes={"path_node_1": IRI(value=path_node_1_uri), "path_node_2": IRI(value=path_node_2_uri)}, + path_nodes={ + "path_node_1": IRI(value=path_node_1_uri), + "path_node_2": IRI(value=path_node_2_uri), + }, page=page, per_page=per_page, parent_uri=path_node_1_uri, diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index bfc03481..f46fff66 100755 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -29,7 +29,7 @@ async def profiles( hierarchy_level=1, page=page, per_page=per_page, - endpoint_structure=("profiles",) + endpoint_structure=("profiles",), ) diff --git a/prez/routers/search.py b/prez/routers/search.py index ff6930ab..780dd41a 100755 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -1,59 +1,77 @@ +from typing import Optional + from fastapi import APIRouter, Request, Depends -from fastapi.responses import PlainTextResponse -from rdflib import URIRef, Literal -from rdflib.namespace import RDF +from rdflib import URIRef +from rdflib.namespace import Namespace -from prez.config import settings from prez.dependencies import get_repo -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype from prez.reference_data.prez_ns import PREZ -from prez.renderers.renderer import return_from_graph -from prez.services.link_generation import add_prez_links from prez.repositories import Repo -from prez.services.query_generation.search import SearchQuery +from prez.services.listings import listing_function router = APIRouter(tags=["Search"]) +OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) -@router.get("/search", summary="Search") +@router.get( + path="/search", + summary="Search", + name=OGCE["search"], +) async def search( request: Request, + page: Optional[int] = 1, + per_page: Optional[int] = 20, + search_term: Optional[str] = None, repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_repo), ): term = request.query_params.get("q") - limit = request.query_params.get("limit", 10) - offset = request.query_params.get("offset", 0) - pred_vals = request.query_params.get("predicates", settings.label_predicates) - query = SearchQuery( + endpoint_uri = URIRef(request.scope.get("route").name) + return await listing_function( + request, + repo, + system_repo, + endpoint_uri, + hierarchy_level=1, + page=page, + per_page=per_page, search_term=term, - limit=limit, - offset=offset, - pred_vals=pred_vals, - ).render() - graph, _ = await repo.send_queries([query], []) - - count = len(list(graph.subjects(RDF.type, PREZ.SearchResult))) - graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) - - prof_and_mt_info = ProfilesMediatypesInfo( - request=request, classes=frozenset([PREZ.SearchResult]), system_repo=system_repo ) - await populate_profile_and_mediatype(prof_and_mt_info, system_repo) - req_mt = prof_and_mt_info.req_mediatypes - if req_mt: - if list(req_mt)[0] == "application/sparql-query": - return PlainTextResponse(query, media_type="application/sparql-query") - - if "anot+" in prof_and_mt_info.mediatype: - await add_prez_links(graph, repo) - - return await return_from_graph( - graph, - mediatype=prof_and_mt_info.mediatype, - profile=URIRef("https://prez.dev/profile/open-object"), - profile_headers=prof_and_mt_info.profile_headers, - selected_class=prof_and_mt_info.selected_class, - repo=repo, - ) + # term = request.query_params.get("q") + # limit = request.query_params.get("limit", 10) + # offset = request.query_params.get("offset", 0) + # pred_vals = request.query_params.get("predicates", settings.label_predicates) + # query = SearchQuery( + # search_term=term, + # limit=limit, + # offset=offset, + # pred_vals=pred_vals, + # ).render() + # graph, _ = await repo.send_queries([query], []) + # + # count = len(list(graph.subjects(RDF.type, PREZ.SearchResult))) + # graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) + # + # prof_and_mt_info = ProfilesMediatypesInfo( + # request=request, classes=frozenset([PREZ.SearchResult]), system_repo=system_repo + # ) + # await populate_profile_and_mediatype(prof_and_mt_info, system_repo) + # + # req_mt = prof_and_mt_info.req_mediatypes + # if req_mt: + # if list(req_mt)[0] == "application/sparql-query": + # return PlainTextResponse(query, media_type="application/sparql-query") + # + # if "anot+" in prof_and_mt_info.mediatype: + # await add_prez_links(graph, repo) + # + # return await return_from_graph( + # graph, + # mediatype=prof_and_mt_info.mediatype, + # profile=URIRef("https://prez.dev/profile/open-object"), + # profile_headers=prof_and_mt_info.profile_headers, + # selected_class=prof_and_mt_info.selected_class, + # repo=repo, + # ) diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index 643cb67f..616f8277 100755 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -9,7 +9,10 @@ from starlette.responses import StreamingResponse from prez.dependencies import get_repo, get_system_repo -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype +from prez.models.profiles_and_mediatypes import ( + ProfilesMediatypesInfo, + populate_profile_and_mediatype, +) from prez.renderers.renderer import return_annotated_rdf from prez.repositories import Repo diff --git a/prez/services/annotations.py b/prez/services/annotations.py index 271c0adf..14df3e80 100755 --- a/prez/services/annotations.py +++ b/prez/services/annotations.py @@ -16,7 +16,7 @@ async def get_annotation_properties( - item_graph: Graph, + item_graph: Graph, ): """ Gets annotation data used for HTML display. @@ -30,9 +30,9 @@ async def get_annotation_properties( explanation_predicates = settings.provenance_predicates other_predicates = settings.other_predicates terms = ( - set(i for i in item_graph.predicates() if isinstance(i, URIRef)) - | set(i for i in item_graph.objects() if isinstance(i, URIRef)) - | set(i for i in item_graph.subjects() if isinstance(i, URIRef)) + set(i for i in item_graph.predicates() if isinstance(i, URIRef)) + | set(i for i in item_graph.objects() if isinstance(i, URIRef)) + | set(i for i in item_graph.subjects() if isinstance(i, URIRef)) ) # TODO confirm caching of SUBJECT labels does not cause issues! this could be a lot of labels. Perhaps these are # better separated and put in an LRU cache. Or it may not be worth the effort. @@ -89,7 +89,7 @@ def other_predicates_statement(other_predicates, uncached_terms_other): def get_annotations_from_tbox_cache( - terms: List[URIRef], label_props, description_props, explanation_props, other_props + terms: List[URIRef], label_props, description_props, explanation_props, other_props ): """ Gets labels from the TBox cache, returns a list of terms that were not found in the cache, and a graph of labels, diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py index 8cf93ef4..6443d590 100755 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -61,8 +61,6 @@ async def create_profiles_graph(repo) -> Graph: log.info(f"Remote profile(s) found and added") else: log.info("No remote profiles found") - # add profiles internal links - _add_prez_profile_links() async def get_profiles_and_mediatypes( @@ -71,10 +69,14 @@ async def get_profiles_and_mediatypes( requested_profile: URIRef = None, requested_profile_token: str = None, requested_mediatype: URIRef = None, - listing: bool = False + listing: bool = False, ): query = select_profile_mediatype( - classes, requested_profile, requested_profile_token, requested_mediatype, listing + classes, + requested_profile, + requested_profile_token, + requested_mediatype, + listing, ) log.debug(f"ConnegP query: {query}") # response = profiles_graph_cache.query(query) @@ -147,7 +149,11 @@ def generate_profiles_headers(selected_class, response, profile, mediatype): "Content-Type": mediatype, } avail_profiles = set( - (get_curie_id_for_uri(i["profile"]["value"]), i["profile"]["value"], i["title"]["value"]) + ( + get_curie_id_for_uri(i["profile"]["value"]), + i["profile"]["value"], + i["title"]["value"], + ) for i in response[1][0][1] ) avail_profiles_headers = ", ".join( @@ -174,16 +180,3 @@ def generate_profiles_headers(selected_class, response, profile, mediatype): ) avail_profile_uris = [i[1] for i in avail_profiles] return headers, avail_profile_uris - - -def _add_prez_profile_links(): - for profile in profiles_graph_cache.subjects( - predicate=RDF.type, object=PROF.Profile - ): - profiles_graph_cache.add( - ( - profile, - PREZ["link"], - Literal(f"/profiles/{get_curie_id_for_uri(profile)}"), - ) - ) diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index 1d89b4dc..56cae86b 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -11,12 +11,15 @@ from prez.services.query_generation.classes import get_classes from prez.repositories import Repo from temp.grammar import * -from prez.services.query_generation.shacl_node_selection import NodeShape +from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape log = logging.getLogger(__name__) async def add_prez_links(graph: Graph, repo: Repo, endpoint_structure): + """ + Adds internal links to the given graph for all URIRefs that have a class and endpoint associated with them. + """ # get all URIRefs - if Prez can find a class and endpoint for them, an internal link will be generated. uris = [uri for uri in graph.all_nodes() if isinstance(uri, URIRef)] uri_to_klasses = {} @@ -28,8 +31,16 @@ async def add_prez_links(graph: Graph, repo: Repo, endpoint_structure): await _link_generation(uri, repo, klasses, graph, endpoint_structure) -async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph, - endpoint_structure: str = settings.endpoint_structure): +async def _link_generation( + uri: URIRef, + repo: Repo, + klasses, + graph: Graph, + endpoint_structure: str = settings.endpoint_structure, +): + """ + Generates links for the given URI if it is not already cached. + """ # check the cache quads = list( links_ids_graph_cache.quads((None, None, None, uri)) @@ -52,21 +63,37 @@ async def _link_generation(uri: URIRef, repo: Repo, klasses, graph: Graph, # part of the link. e.g. ?path_node_1 will have result(s) but is not part of the link. for solution in result[1]: # create link strings - curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, - solution, uri, - endpoint_structure) + ( + curie_for_uri, + members_link, + object_link, + ) = await create_link_strings( + ns.hierarchy_level, solution, uri, endpoint_structure + ) # add links and identifiers to graph and cache - await add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri) + await add_links_to_graph_and_cache( + curie_for_uri, graph, members_link, object_link, uri + ) else: - curie_for_uri, members_link, object_link = await create_link_strings(ns.hierarchy_level, {}, uri, - endpoint_structure) - await add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri) + curie_for_uri, members_link, object_link = await create_link_strings( + ns.hierarchy_level, {}, uri, endpoint_structure + ) + await add_links_to_graph_and_cache( + curie_for_uri, graph, members_link, object_link, uri + ) async def get_nodeshapes_constraining_class(klasses, uri): + """ + Retrieves the node shapes that constrain the given classes. + """ available_nodeshapes = [] - available_nodeshape_uris = list(endpoints_graph_cache.subjects(predicate=RDF.type, object=SH.NodeShape)) - available_nodeshape_triples = list(endpoints_graph_cache.triples_choices((None, SH.targetClass, list(klasses)))) + available_nodeshape_uris = list( + endpoints_graph_cache.subjects(predicate=RDF.type, object=SH.NodeShape) + ) + available_nodeshape_triples = list( + endpoints_graph_cache.triples_choices((None, SH.targetClass, list(klasses))) + ) if available_nodeshape_triples: for ns, _, _ in available_nodeshape_triples: if ns in available_nodeshape_uris: @@ -80,11 +107,14 @@ async def get_nodeshapes_constraining_class(klasses, uri): return available_nodeshapes -async def add_links_to_graph_and_cache(curie_for_uri, graph, members_link, object_link, uri): +async def add_links_to_graph_and_cache( + curie_for_uri, graph, members_link, object_link, uri +): + """ + Adds links and identifiers to the given graph and cache. + """ quads = [] - quads.append( - (uri, PREZ["link"], Literal(object_link), uri) - ) + quads.append((uri, PREZ["link"], Literal(object_link), uri)) quads.append( (uri, DCTERMS.identifier, Literal(curie_for_uri, datatype=PREZ.identifier), uri) ) @@ -94,25 +124,29 @@ async def add_links_to_graph_and_cache(curie_for_uri, graph, members_link, objec ) if not existing_members_link: members_bn = BNode() - quads.append( - (uri, PREZ["members"], members_bn, uri) - ) - quads.append( - (members_bn, PREZ["link"], Literal(members_link), uri) - ) + quads.append((uri, PREZ["members"], members_bn, uri)) + quads.append((members_bn, PREZ["link"], Literal(members_link), uri)) for quad in quads: graph.add(quad[:3]) links_ids_graph_cache.add(quad) async def create_link_strings(hierarchy_level, solution, uri, endpoint_structure): - components = list(endpoint_structure[:int(hierarchy_level)]) - variables = reversed(["focus_node"] + [f"path_node_{i}" for i in range(1, len(components))]) + """ + Creates link strings based on the hierarchy level and solution provided. + """ + components = list(endpoint_structure[: int(hierarchy_level)]) + variables = reversed( + ["focus_node"] + [f"path_node_{i}" for i in range(1, len(components))] + ) item_link_template = Template( - "".join([f"/{comp}/${pattern}" for comp, pattern in zip(components, variables)])) + "".join([f"/{comp}/${pattern}" for comp, pattern in zip(components, variables)]) + ) curie_for_uri = get_curie_id_for_uri(uri) sol_values = {k: get_curie_id_for_uri(v["value"]) for k, v in solution.items()} - object_link = item_link_template.substitute(sol_values | {"focus_node": curie_for_uri}) + object_link = item_link_template.substitute( + sol_values | {"focus_node": curie_for_uri} + ) members_link = None if len(components) < len(list(endpoint_structure)): members_link = object_link + "/" + endpoint_structure[len(components)] @@ -120,24 +154,24 @@ async def create_link_strings(hierarchy_level, solution, uri, endpoint_structure async def get_link_components(ns, repo): + """ + Retrieves link components for the given node shape. + """ link_queries = [] link_queries.append( ( ns.uri, - "".join(SubSelect( - select_clause=SelectClause( - variables_or_all=ns.path_nodes.values()), + SubSelect( + select_clause=SelectClause(variables_or_all=ns.path_nodes.values()), where_clause=WhereClause( group_graph_pattern=GroupGraphPattern( content=GroupGraphPatternSub( - triples_block=TriplesBlock( - triples=ns.triples_list - ), - graph_patterns_or_triples_blocks=ns.gpnt_list + triples_block=TriplesBlock(triples=ns.triples_list), + graph_patterns_or_triples_blocks=ns.gpnt_list, ) ) - ) - ).render()) + ), + ).to_string(), ) ) _, results = await repo.send_queries([], link_queries) diff --git a/prez/services/listings.py b/prez/services/listings.py index dbc0022e..0e69c97d 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -10,36 +10,40 @@ from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.config import settings -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype +from prez.models.profiles_and_mediatypes import ( + ProfilesMediatypesInfo, + populate_profile_and_mediatype, +) from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph from prez.services.link_generation import add_prez_links from prez.services.query_generation.classes import get_classes from prez.services.query_generation.count import CountQuery from prez.repositories import Repo -from prez.services.query_generation.search import SearchQuery +from prez.services.query_generation.node_selection.search import SearchQuery from temp.grammar import * + # from rdframe.grammar import SubSelect # from rdframe import PrezQueryConstructor from prez.services.query_generation.umbrella import PrezQueryConstructor -from prez.services.query_generation.shacl_node_selection import NodeShape +from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape log = logging.getLogger(__name__) async def listing_function( - request: Request, - repo: Repo, - system_repo: Repo, - endpoint_uri: URIRef, - hierarchy_level: int, - path_nodes: Dict[str, Var | IRI] = None, - page: int = 1, - per_page: int = 20, - parent_uri: Optional[URIRef] = None, - cql_parser: CQLParser = None, - search_term: Optional[str] = None, - endpoint_structure: Tuple[str] = settings.endpoint_structure, + request: Request, + repo: Repo, + system_repo: Repo, + endpoint_uri: URIRef, + hierarchy_level: int, + path_nodes: Dict[str, Var | IRI] = None, + page: int = 1, + per_page: int = 20, + parent_uri: Optional[URIRef] = None, + cql_parser: CQLParser = None, + search_term: Optional[str] = None, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): """ # determine the relevant node selection part of the query - from SHACL, CQL, Search @@ -51,15 +55,20 @@ async def listing_function( queries = [] # determine possible SHACL node shapes for endpoint node_selection_shape, target_classes = await determine_nodeshape( - endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo) + endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo + ) if not path_nodes: path_nodes = {} - ns = NodeShape(uri=node_selection_shape, graph=endpoints_graph_cache, path_nodes=path_nodes) + if node_selection_shape: + ns = NodeShape( + uri=node_selection_shape, graph=endpoints_graph_cache, path_nodes=path_nodes + ) # determine the relevant profile - prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=target_classes, system_repo=system_repo, - listing=True) + prof_and_mt_info = ProfilesMediatypesInfo( + request=request, classes=target_classes, system_repo=system_repo, listing=True + ) await populate_profile_and_mediatype(prof_and_mt_info, system_repo) selected_class, selected_profile = ( prof_and_mt_info.selected_class, @@ -68,7 +77,7 @@ async def listing_function( runtime_values = {} if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") runtime_values["selectedClass"] = prof_and_mt_info.selected_class @@ -76,23 +85,35 @@ async def listing_function( runtime_values["limit"] = per_page runtime_values["offset"] = (page - 1) * per_page + cql_triples_list = [] + cql_gpnt_list = [] + + if cql_parser: + cql_parser.parse() + cql_select_ggps = cql_parser.ggps_inner_select + + if cql_select_ggps.triples_block: + cql_triples_list = cql_select_ggps.triples_block.triples + if cql_select_ggps.graph_patterns_or_triples_blocks: + for pattern in cql_select_ggps.graph_patterns_or_triples_blocks: + if isinstance(pattern, TriplesBlock): + cql_triples_list += pattern.triples + elif isinstance(pattern, GraphPatternNotTriples): + cql_gpnt_list.append(pattern) + query_constructor = PrezQueryConstructor( - runtime_values, - endpoints_graph_cache, - profiles_graph_cache, + runtime_values=runtime_values, + endpoint_graph=endpoints_graph_cache, + profile_graph=profiles_graph_cache, listing_or_object="listing", endpoint_uri=endpoint_uri, profile_uri=selected_profile, - node_selection_triples=ns.triples_list, - node_selection_gpnt=ns.gpnt_list, - target_class=target_classes + endpoint_shacl_triples=ns.triples_list, + endpoint_shacl_gpnt=ns.gpnt_list, + cql_triples=cql_triples_list, + cql_gpnt=cql_gpnt_list, ) - if cql_parser: - cql_parser.parse() - cql_select_ggps = cql_parser.ggps_inner_select - query_constructor.additional_ggps = cql_select_ggps - query_constructor.generate_sparql() main_query = query_constructor.sparql @@ -130,7 +151,7 @@ async def listing_function( # queries.append(temp_listing_count(subselect, count_class)) if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): item_graph, _ = await system_repo.send_queries(queries, []) if "anot+" in prof_and_mt_info.mediatype: @@ -153,7 +174,9 @@ async def listing_function( ) -async def determine_nodeshape(endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo): +async def determine_nodeshape( + endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo +): node_selection_shape = None target_classes = [] relevant_ns_query = f"""SELECT ?ns ?tc @@ -173,8 +196,12 @@ async def determine_nodeshape(endpoint_uri, hierarchy_level, parent_uri, path_no path_node_classes = {} for pn, uri in path_nodes.items(): path_node_classes[pn] = await get_classes(URIRef(uri.value), repo) - nodeshapes = [NodeShape(uri=URIRef(ns), graph=endpoints_graph_cache, path_nodes=path_nodes) for ns in - distinct_ns] + nodeshapes = [ + NodeShape( + uri=URIRef(ns), graph=endpoints_graph_cache, path_nodes=path_nodes + ) + for ns in distinct_ns + ] matching_nodeshapes = [] for ns in nodeshapes: match_all_keys = True # Assume a match for all keys initially @@ -189,12 +216,12 @@ async def determine_nodeshape(endpoint_uri, hierarchy_level, parent_uri, path_no matching_nodeshapes.append(ns) # TODO logic if there is more than one nodeshape - current default nodeshapes will only return one. node_selection_shape = matching_nodeshapes[0].uri - target_classes = list(endpoints_graph_cache.objects(node_selection_shape, SH.targetClass)) + target_classes = list( + endpoints_graph_cache.objects(node_selection_shape, SH.targetClass) + ) return node_selection_shape, target_classes - - def find_instances(obj, cls): found = [] diff --git a/prez/services/objects.py b/prez/services/objects.py index 714a8c6e..9db29713 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -7,7 +7,10 @@ from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.config import settings -from prez.models.profiles_and_mediatypes import ProfilesMediatypesInfo, populate_profile_and_mediatype +from prez.models.profiles_and_mediatypes import ( + ProfilesMediatypesInfo, + populate_profile_and_mediatype, +) from prez.reference_data.prez_ns import EP from prez.renderers.renderer import return_from_graph from prez.services.link_generation import add_prez_links @@ -28,9 +31,11 @@ async def object_function( system_repo: Repo, endpoint_structure: Tuple[str] = settings.endpoint_structure, ): - klasses = await get_classes(uri=uri, repo=repo, endpoint=endpoint_uri) + klasses = await get_classes(uri=uri, repo=repo) # ConnegP - prof_and_mt_info = ProfilesMediatypesInfo(request=request, classes=klasses, system_repo=system_repo) + prof_and_mt_info = ProfilesMediatypesInfo( + request=request, classes=klasses, system_repo=system_repo + ) await populate_profile_and_mediatype(prof_and_mt_info, system_repo) # handle alternate profiles @@ -43,9 +48,9 @@ async def object_function( # runtime_values["object"] = uri query_constructor = PrezQueryConstructor( - runtime_values, - endpoints_graph_cache, - profiles_graph_cache, + runtime_values=runtime_values, + endpoint_graph=endpoints_graph_cache, + profile_graph=profiles_graph_cache, listing_or_object="object", focus_node=IRI(value=uri), endpoint_uri=endpoint_uri, diff --git a/prez/services/query_generation/classes.py b/prez/services/query_generation/classes.py index 08fc3e60..646df1aa 100755 --- a/prez/services/query_generation/classes.py +++ b/prez/services/query_generation/classes.py @@ -1,35 +1,47 @@ import logging from rdflib import URIRef +from rdflib.namespace import RDF from prez.repositories import Repo +from temp.grammar import ( + SelectClause, + Var, + SubSelect, + WhereClause, + GroupGraphPattern, + GroupGraphPatternSub, + TriplesBlock, + SimplifiedTriple, + IRI, +) log = logging.getLogger(__name__) -async def get_classes( - uri: URIRef, repo: Repo, endpoint: URIRef = None -) -> frozenset[URIRef]: +async def get_classes(uri: URIRef, repo: Repo) -> frozenset[URIRef]: """ - if endpoint is specified, only classes that the endpoint can deliver will be returned. + Generates a query of the form: + SELECT ?class WHERE { rdf:type ?class } """ - q = f""" - SELECT ?class - {{ <{uri}> a ?class }} - """ - _, r = await repo.send_queries([], [(uri, q)]) + query = SubSelect( + select_clause=SelectClause(variables_or_all=[Var(value="class")]), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock( + triples=[ + SimplifiedTriple( + subject=IRI(value=uri), + predicate=IRI(value=RDF.type), + object=Var(value="class"), + ) + ] + ) + ) + ) + ), + ).to_string() + _, r = await repo.send_queries([], [(uri, query)]) tabular_result = r[0] # should only be one result - only one query sent - # if endpoint != URIRef("https://prez.dev/endpoint/system/object"): - # endpoint_classes = list( - # endpoints_graph_cache.objects( - # subject=endpoint, - # predicate=URIRef("https://prez.dev/ont/deliversClasses"), - # ) - # ) - # object_classes_delivered_by_endpoint = [] - # for c in tabular_result[1]: - # if URIRef(c["class"]["value"]) in endpoint_classes: - # object_classes_delivered_by_endpoint.append(URIRef(c["class"]["value"])) - # classes = frozenset(object_classes_delivered_by_endpoint) - # else: classes = frozenset([URIRef(c["class"]["value"]) for c in tabular_result[1]]) return classes diff --git a/prez/services/query_generation/connegp.py b/prez/services/query_generation/connegp.py index 911aee4d..20ab5f97 100644 --- a/prez/services/query_generation/connegp.py +++ b/prez/services/query_generation/connegp.py @@ -13,11 +13,11 @@ def select_profile_mediatype( - classes: List[URIRef], - requested_profile_uri: URIRef = None, - requested_profile_token: str = None, - requested_mediatypes: List[Tuple] = None, - listing: bool = False, + classes: List[URIRef], + requested_profile_uri: URIRef = None, + requested_profile_token: str = None, + requested_mediatypes: List[Tuple] = None, + listing: bool = False, ): """ Returns a SPARQL SELECT query which will determine the profile and mediatype to return based on user requests, diff --git a/prez/services/query_generation/count.py b/prez/services/query_generation/count.py index 97406deb..a05b1f6e 100755 --- a/prez/services/query_generation/count.py +++ b/prez/services/query_generation/count.py @@ -33,11 +33,9 @@ def create_construct_query(self): cq = ConstructQuery( construct_template=self.create_construct_template(), where_clause=WhereClause( - group_graph_pattern=GroupGraphPattern( - content=self.subselect - ) + group_graph_pattern=GroupGraphPattern(content=self.subselect) ), - solution_modifier=SolutionModifier() + solution_modifier=SolutionModifier(), ) return cq @@ -62,15 +60,14 @@ def rebuild_select_clause(self): distinct=True, expression=Expression.from_primary_expr( PrimaryExpression( - content=Var( - value="focus_node") + content=Var(value="focus_node") ) - ) + ), ) ) ) ), - Var(value="count") + Var(value="count"), ) ] ) diff --git a/prez/services/query_generation/cql2sparql.py b/prez/services/query_generation/node_selection/cql.py similarity index 99% rename from prez/services/query_generation/cql2sparql.py rename to prez/services/query_generation/node_selection/cql.py index 18864ea3..319898d6 100755 --- a/prez/services/query_generation/cql2sparql.py +++ b/prez/services/query_generation/node_selection/cql.py @@ -6,7 +6,7 @@ from temp.grammar import * -from prez.services.query_generation.cql_sparql_reference import ( +from prez.reference_data.cql.geo_function_mapping import ( cql_sparql_spatial_mapping, cql_to_shapely_mapping, ) diff --git a/prez/services/query_generation/shacl_node_selection.py b/prez/services/query_generation/node_selection/endpoint_shacl.py similarity index 79% rename from prez/services/query_generation/shacl_node_selection.py rename to prez/services/query_generation/node_selection/endpoint_shacl.py index 0527a0df..7252d9fb 100644 --- a/prez/services/query_generation/shacl_node_selection.py +++ b/prez/services/query_generation/node_selection/endpoint_shacl.py @@ -1,11 +1,13 @@ from __future__ import annotations +from string import Template from typing import List, Optional, Union, Any, Dict from pydantic import BaseModel from rdflib import URIRef, BNode, Graph from rdflib.collection import Collection from rdflib.namespace import SH, RDF +from rdflib.term import Node from prez.reference_data.prez_ns import ONT from temp.grammar import * @@ -34,26 +36,36 @@ class NodeShape(Shape): graph: Graph focus_node: Var | IRI = Var(value="focus_node") targetNode: Optional[URIRef] = None - targetClasses: Optional[List[URIRef]] = None - propertyShapesURIs: Optional[List[URIRef]] = None - propertyShapes: Optional[List[PropertyShape]] = None - triples_list: Optional[List[SimplifiedTriple]] = None - gpnt_list: Optional[List[GraphPatternNotTriples]] = None + targetClasses: Optional[List[Node]] = [] + propertyShapesURIs: Optional[List[Node]] = [] + target: Optional[Node] = None + rules: Optional[List[Node]] = [] + propertyShapes: Optional[List[PropertyShape]] = [] + triples_list: Optional[List[SimplifiedTriple]] = [] + gpnt_list: Optional[List[GraphPatternNotTriples]] = [] path_nodes: Optional[Dict[str, Var | IRI]] = {} classes_at_len: Optional[Dict[str, List[URIRef]]] = {} hierarchy_level: Optional[int] = None + select_template: Optional[str] = None def from_graph(self): # TODO this can be a SPARQL select against the system graph. self.targetNode = next(self.graph.objects(self.uri, SH.targetNode), None) self.targetClasses = list(self.graph.objects(self.uri, SH.targetClass)) self.propertyShapesURIs = list(self.graph.objects(self.uri, SH.property)) - self.propertyShapes = [PropertyShape( - uri=ps_uri, - graph=self.graph, - focus_node=self.focus_node, - path_nodes=self.path_nodes - ) for ps_uri in self.propertyShapesURIs] - self.hierarchy_level = next(self.graph.objects(self.uri, ONT.hierarchyLevel), None) + self.target = next(self.graph.objects(self.uri, SH.target), None) + self.rules = list(self.graph.objects(self.uri, SH.rule)) + self.propertyShapes = [ + PropertyShape( + uri=ps_uri, + graph=self.graph, + focus_node=self.focus_node, + path_nodes=self.path_nodes, + ) + for ps_uri in self.propertyShapesURIs + ] + self.hierarchy_level = next( + self.graph.objects(self.uri, ONT.hierarchyLevel), None + ) if not self.hierarchy_level: raise ValueError("No hierarchy level found") @@ -64,6 +76,11 @@ def to_grammar(self): self._process_class_targets() if self.propertyShapes: self._process_property_shapes() + if self.target: + self._process_target() + # rules used to construct triples only in the context of sh:target/sh:sparql at present. + if self.rules: + self._process_rules() def _process_class_targets(self): if len(self.targetClasses) == 1: @@ -71,7 +88,7 @@ def _process_class_targets(self): SimplifiedTriple( subject=self.focus_node, predicate=IRI(value=RDF.type), - object=IRI(value=self.targetClasses[0]) + object=IRI(value=self.targetClasses[0]), ) ) elif len(self.targetClasses) > 1: @@ -79,16 +96,19 @@ def _process_class_targets(self): SimplifiedTriple( subject=self.focus_node, predicate=IRI(value=RDF.type), - object=Var(value=f"focus_classes") - )) - dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.targetClasses] + object=Var(value=f"focus_classes"), + ) + ) + dbvs = [ + DataBlockValue(value=IRI(value=klass)) for klass in self.targetClasses + ] self.gpnt_list.append( GraphPatternNotTriples( content=InlineData( data_block=DataBlock( block=InlineDataOneVar( variable=Var(value=f"focus_classes"), - datablockvalues=dbvs + datablockvalues=dbvs, ) ) ) @@ -106,6 +126,14 @@ def _process_property_shapes(self): # deduplicate self.triples_list = list(set(self.triples_list)) + def _process_target(self): + self.select_statement = Template( + str(self.endpoint_graph.value(self.target, SH.select, default=None)) + ) + + def _process_rules(self): + pass + class PropertyShape(Shape): uri: URIRef | BNode # URI of the shape @@ -143,9 +171,7 @@ def from_graph(self): def _process_property_path(self, pp, graph): if isinstance(pp, BNode): - pred_objects_gen = graph.predicate_objects( - subject=pp - ) + pred_objects_gen = graph.predicate_objects(subject=pp) bn_pred, bn_obj = next(pred_objects_gen, (None, None)) if bn_obj == SH.union: pass @@ -180,7 +206,7 @@ def to_grammar(self): SimplifiedTriple( subject=path_node_term, predicate=IRI(value=RDF.type), - object=IRI(value=self.or_klasses[0]) + object=IRI(value=self.or_klasses[0]), ) ) else: @@ -188,16 +214,19 @@ def to_grammar(self): SimplifiedTriple( subject=path_node_term, predicate=IRI(value=RDF.type), - object=Var(value=f"path_node_classes_{len_pp}") - )) - dbvs = [DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses] + object=Var(value=f"path_node_classes_{len_pp}"), + ) + ) + dbvs = [ + DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses + ] self.gpnt_list.append( GraphPatternNotTriples( content=InlineData( data_block=DataBlock( block=InlineDataOneVar( variable=Var(value=f"path_node_classes_{len_pp}"), - datablockvalues=dbvs + datablockvalues=dbvs, ) ) ) @@ -218,7 +247,7 @@ def to_grammar(self): SimplifiedTriple( subject=focus_or_path_node, predicate=IRI(value=property_path.value), - object=path_node_var + object=path_node_var, ) ) elif isinstance(property_path, InversePath): @@ -226,7 +255,7 @@ def to_grammar(self): SimplifiedTriple( subject=path_node_var, predicate=IRI(value=property_path.value), - object=focus_or_path_node + object=focus_or_path_node, ) ) diff --git a/prez/services/query_generation/search.py b/prez/services/query_generation/node_selection/search.py similarity index 98% rename from prez/services/query_generation/search.py rename to prez/services/query_generation/node_selection/search.py index 9e5d126a..acb0b171 100755 --- a/prez/services/query_generation/search.py +++ b/prez/services/query_generation/node_selection/search.py @@ -248,11 +248,11 @@ def create_union_of_inner_ggps(self): return gougp def create_inner_ggp( - self, - weight_val: int, - function: str, - prefix: str, - case_insensitive: Optional[bool], + self, + weight_val: int, + function: str, + prefix: str, + case_insensitive: Optional[bool], ) -> GroupGraphPattern: ggp = GroupGraphPattern(content=GroupGraphPatternSub()) diff --git a/prez/services/query_generation/umbrella.py b/prez/services/query_generation/umbrella.py index cd052c74..e1b38604 100755 --- a/prez/services/query_generation/umbrella.py +++ b/prez/services/query_generation/umbrella.py @@ -1,7 +1,7 @@ import re from string import Template -from typing import Union, Optional, List - +from typing import Union, Optional, List, Dict +from pydantic import BaseModel, field_validator from rdflib import URIRef, Namespace, Graph, SH, RDF, BNode, Literal from rdflib.collection import Collection @@ -12,56 +12,38 @@ SHEXT = Namespace("http://example.com/shacl-extension#") -class PrezQueryConstructor: - def __init__( - self, - runtime_values: dict, - endpoint_graph: Graph, - profile_graph: Graph, - listing_or_object: str, - focus_node: Union[IRI, Var] = Var(value="focus_node"), - endpoint_uri: Optional[URIRef] = None, - profile_uri: Optional[URIRef] = None, - additional_ggps: Optional[GroupGraphPatternSub] = None, - node_selection_triples: Optional[List[SimplifiedTriple]] = None, - node_selection_gpnt: Optional[GraphPatternNotTriples] = None, - target_class: URIRef = None, - - ): - self.runtime_values = runtime_values - self.endpoint_graph: Graph = endpoint_graph - self.profile_graph: Graph = profile_graph - self.endpoint_uri: Optional[URIRef] = endpoint_uri - self.profile_uri: Optional[URIRef] = profile_uri - self.additional_ggps: Optional[GroupGraphPatternSub] = additional_ggps - - self.focus_node: Union[IRI, Var] = focus_node - - self.sparql = None - self.results = None - - self.construct_triples = None - self.main_where_ggps = GroupGraphPatternSub() - self.inner_select: Union[SubSelect, SubSelectString] = None - - self.default_limit = None - self.default_offset = None - self.default_order_by = None - self.default_order_by_desc = None - - self.runtime_vals_expanded = None - self.merged_runtime_and_default_vals = None - self._expand_runtime_vars() - self._merge_runtime_and_default_vars() - - self.node_selection_triples = node_selection_triples - self.node_selection_gpnt = node_selection_gpnt - - self.listing_or_object = listing_or_object - self.target_class = target_class +class PrezQueryConstructor(BaseModel): + runtime_values: dict + endpoint_graph: Graph + profile_graph: Graph + listing_or_object: str + focus_node: Union[IRI, Var] = Var(value="focus_node") + endpoint_uri: Optional[URIRef] = None + profile_uri: Optional[URIRef] = None + + construct_triples: Optional[List[SimplifiedTriple]] = [] + main_where_ggps: Optional[GroupGraphPatternSub] = GroupGraphPatternSub() + inner_select: Optional[Union[SubSelect, SubSelectString]] = None + + endpoint_shacl_triples: Optional[List[SimplifiedTriple]] = [] + endpoint_shacl_gpnt: Optional[List[GraphPatternNotTriples]] = [] + cql_triples: Optional[List[SimplifiedTriple]] = [] + cql_gpnt: Optional[List[GraphPatternNotTriples]] = [] + select_template: Optional[Template] = None + sparql: Optional[str] = None + + # Additional fields + default_limit: Optional[int] = None + default_offset: Optional[int] = None + default_order_by: Optional[str] = None + default_order_by_desc: Optional[bool] = None + runtime_vals_expanded: Optional[Dict] = {} + merged_runtime_and_default_vals: Optional[Dict] = {} + + class Config: + arbitrary_types_allowed = True def _expand_runtime_vars(self): - self.runtime_vals_expanded = {} for k, v in self.runtime_values.items(): if k in ["limit", "offset", "q"]: self.runtime_vals_expanded[k] = v @@ -82,6 +64,7 @@ def generate_sparql(self): """ Generates SPARQL query from Shape profile_graph. """ + self._expand_runtime_vars() if self.listing_or_object == "listing": self.build_inner_select() self.parse_profile() @@ -101,7 +84,9 @@ def _generate_query(self): if self.listing_or_object == "listing": gpnt = GraphPatternNotTriples( content=GroupOrUnionGraphPattern( - group_graph_patterns=[GroupGraphPattern(content=self.inner_select)])) + group_graph_patterns=[GroupGraphPattern(content=self.inner_select)] + ) + ) self.main_where_ggps.add_pattern(gpnt, prepend=True) construct_template = ConstructTemplate( @@ -125,17 +110,15 @@ def build_inner_select(self): self._set_limit_and_offset() self._merge_runtime_and_default_vars() - # sparql targets - for complex selection queries specified as strings - target_bn = list( - self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.target) - ) rule_nodes = list( self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.rule) ) - # sh:target / sh:select - if target_bn: - sss = self.create_select_subquery_from_template(target_bn) + sol_mod, order_by_triple = self._create_focus_node_solution_modifier() + + if self.select_template: + # sh:target / sh:select + sss = self.create_select_subquery_from_template(sol_mod, order_by_triple) self.inner_select = sss # rule nodes - for CONSTRUCT TRIPLES patterns. @@ -144,28 +127,24 @@ def build_inner_select(self): self._create_construct_triples_from_sh_rules(rule_node) else: - sol_mod, order_by_triple = self._create_focus_node_solution_modifier() - self.inner_select = SubSelect( - select_clause=SelectClause( - variables_or_all=[self.focus_node]), + select_clause=SelectClause(variables_or_all=[self.focus_node]), where_clause=WhereClause( - group_graph_pattern=GroupGraphPattern( - content=inner_select_ggps) + group_graph_pattern=GroupGraphPattern(content=inner_select_ggps) ), - solution_modifier=sol_mod + solution_modifier=sol_mod, ) if order_by_triple: inner_select_ggps.add_triple(order_by_triple) # otherwise just use what is provided by the endpoint shapes - if self.node_selection_triples: - tb = TriplesBlock(triples=self.node_selection_triples) + if self.endpoint_shacl_triples: + tb = TriplesBlock(triples=self.endpoint_shacl_triples) inner_select_ggps.add_pattern(tb) - if self.node_selection_gpnt: - for gpnt in self.node_selection_gpnt: + if self.endpoint_shacl_gpnt: + for gpnt in self.endpoint_shacl_gpnt: inner_select_ggps.add_pattern(gpnt) def _add_ggp_to_main_ggps(self, ggp): @@ -203,40 +182,38 @@ def _create_construct_triples_from_sh_rules(self, rule_node): else: self.construct_triples = [triple] - - def create_select_subquery_from_template(self, target_bn): - select_statement = Template( - str(self.endpoint_graph.value(target_bn[0], SH.select, default=None)) - ) + def create_select_subquery_from_template(self, sol_mod, order_by_triple): # expand any prefixes etc. in case the prefixes are not defined in the query this subquery is being inserted - # into. NB Shape does provide a mechanism to declare prefixes used in SPARQL targets - this has not been + # into. NB Shape does provide a mechanism to declare prefixes used in SPARQL target - this has not been # implemented - substituted_query = select_statement.substitute( + substituted_query = self.select_template.substitute( self.merged_runtime_and_default_vals ).rstrip() - sol_mod, order_by_triple = self._create_focus_node_solution_modifier() if order_by_triple: # insert it before the end of the string, order_by_triple_text = order_by_triple.to_string() substituted_query = ( - substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" + substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" ) - if self.additional_ggps: # for example from cql - additional_ggps_str = "".join( - part for part in self.additional_ggps.render() + additional_strings = [] + if self.cql_triples: # for example from cql + additional_strings.append( + TriplesBlock(triples=self.cql_triples).to_string() ) - substituted_query = self.split_query(substituted_query, additional_ggps_str) + if self.cql_gpnt: + additional_strings.extend([gpnt.to_string() for gpnt in self.cql_gpnt]) + substituted_query = self.split_query(substituted_query, additional_strings) sss = SubSelectString( select_string=substituted_query, solution_modifier=sol_mod ) return sss - def split_query(self, original_query, additional_ggps_str): + def split_query(self, original_query, additional_strings: List[str]): # Regex to match the entire structure: 'SELECT ?xxx { ... }' pattern = r"(SELECT\s+[\?\w\s\(\)]+\s*\{)(.*?)(\}\s*)" # Use re.split to split the query based on the pattern parts = re.split(pattern, original_query, flags=re.DOTALL) parts = [part for part in parts if part.strip()] - new_parts = [parts[0], additional_ggps_str] + new_parts = [parts[0]] + additional_strings if len(parts) > 1: new_parts.extend(parts[1:]) new_query = "".join(part for part in new_parts) @@ -282,12 +259,14 @@ def _set_limit_and_offset(self): default_limit = next( self.endpoint_graph.objects( subject=self.endpoint_uri, predicate=SHEXT.limit - ), 20 + ), + 20, ) default_offset = next( self.endpoint_graph.objects( subject=self.endpoint_uri, predicate=SHEXT.offset - ), 0 + ), + 0, ) default_order_by = list( self.endpoint_graph.objects( @@ -317,7 +296,7 @@ def _set_limit_and_offset(self): def parse_profile(self): for i, property_node in enumerate( - self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) + self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) ): self._parse_property_shapes(property_node, i) self._build_bnode_blocks() @@ -456,7 +435,7 @@ def process_path_object(path_obj: Union[URIRef, BNode]): self.main_where_ggps.add_pattern(gpnt) def _add_inverse_preds( - self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i + self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i ): if inverse_preds: ggps.add_triple( @@ -526,7 +505,7 @@ def _add_predicate_constraints(self, predicates, property_node, ggp_list): ggp = GroupGraphPattern(content=ggps) ggp_list.append(ggp) elif ( - IRI(value=SHEXT.allPredicateValues) not in predicates + IRI(value=SHEXT.allPredicateValues) not in predicates ): # add VALUES clause dbv_list = [DataBlockValue(value=p) for p in predicates] inline_data_one_var = InlineDataOneVar( diff --git a/temp/grammar/__init__.py b/temp/grammar/__init__.py index 6b6bf28d..00e0b4fd 100644 --- a/temp/grammar/__init__.py +++ b/temp/grammar/__init__.py @@ -1,12 +1,67 @@ -from .grammar import SPARQLGrammarBase, BlankNodeLabel, Anon, Var, IRI, BlankNode, RDFLiteral, LANGTAG, NIL, \ - NumericLiteral, SimplifiedTriple, TriplesBlock, PrimaryExpression, UnaryExpression, MultiplicativeExpression, \ - AdditiveExpression, NumericExpression, RelationalExpression, ValueLogical, ConditionalAndExpression, \ - ConditionalOrExpression, Expression, BrackettedExpression, InlineDataOneVar, DataBlockValue, InlineDataFull, \ - DataBlock, InlineData, ValuesClause, GraphPatternNotTriples, GroupGraphPatternSub, SelectClause, SubSelect, \ - SubSelectString, GroupGraphPattern, Filter, Constraint, FunctionCall, ArgList, Bind, OptionalGraphPattern, \ - GroupOrUnionGraphPattern, LimitClause, OffsetClause, OrderCondition, OrderClause, LimitOffsetClauses, \ - SolutionModifier, GroupClause, GroupCondition, ConstructTriples, ConstructTemplate, WhereClause, ConstructQuery, \ - BuiltInCall, BooleanLiteral, GraphTerm, IRIOrFunction, ExpressionList, Aggregate, RegexExpression, Tuple +from .grammar import ( + SPARQLGrammarBase, + BlankNodeLabel, + Anon, + Var, + IRI, + BlankNode, + RDFLiteral, + LANGTAG, + NIL, + NumericLiteral, + SimplifiedTriple, + TriplesBlock, + PrimaryExpression, + UnaryExpression, + MultiplicativeExpression, + AdditiveExpression, + NumericExpression, + RelationalExpression, + ValueLogical, + ConditionalAndExpression, + ConditionalOrExpression, + Expression, + BrackettedExpression, + InlineDataOneVar, + DataBlockValue, + InlineDataFull, + DataBlock, + InlineData, + ValuesClause, + GraphPatternNotTriples, + GroupGraphPatternSub, + SelectClause, + SubSelect, + SubSelectString, + GroupGraphPattern, + Filter, + Constraint, + FunctionCall, + ArgList, + Bind, + OptionalGraphPattern, + GroupOrUnionGraphPattern, + LimitClause, + OffsetClause, + OrderCondition, + OrderClause, + LimitOffsetClauses, + SolutionModifier, + GroupClause, + GroupCondition, + ConstructTriples, + ConstructTemplate, + WhereClause, + ConstructQuery, + BuiltInCall, + BooleanLiteral, + GraphTerm, + IRIOrFunction, + ExpressionList, + Aggregate, + RegexExpression, + Tuple, +) __all__ = [ "Tuple", @@ -70,5 +125,5 @@ "IRIOrFunction", "ExpressionList", "Aggregate", - "RegexExpression" + "RegexExpression", ] diff --git a/temp/grammar/grammar.py b/temp/grammar/grammar.py index 5c1b4d20..9f4fb3a6 100755 --- a/temp/grammar/grammar.py +++ b/temp/grammar/grammar.py @@ -346,7 +346,7 @@ class ConditionalOrExpression(SPARQLGrammarBase): def render(self) -> Generator[str, None, None]: for i, conditional_and_expression in enumerate( - self.conditional_and_expressions + self.conditional_and_expressions ): yield from conditional_and_expression.render() if i < len(self.conditional_and_expressions) - 1: @@ -687,10 +687,10 @@ def render(self) -> Generator[str, None, None]: @classmethod def filter_relational( - cls, - focus: PrimaryExpression, - comparators: Union[PrimaryExpression, List[PrimaryExpression]], - operator: str, + cls, + focus: PrimaryExpression, + comparators: Union[PrimaryExpression, List[PrimaryExpression]], + operator: str, ) -> Filter: """ Convenience method to create a FILTER clause to compare the focus node to comparators. @@ -1054,7 +1054,7 @@ def render(self) -> Generator[str, None, None]: @classmethod def create_with_one_expr( - cls, function_name: str, expression: PrimaryExpression + cls, function_name: str, expression: PrimaryExpression ) -> "BuiltInCall": """ Convenience method for functions that take a single PrimaryExpression as an argument. @@ -1064,7 +1064,7 @@ def create_with_one_expr( @classmethod def create_with_n_expr( - cls, function_name: str, expressions: List[PrimaryExpression] + cls, function_name: str, expressions: List[PrimaryExpression] ) -> "BuiltInCall": """ Convenience method for functions that take a list of PrimaryExpressions as arguments. diff --git a/tests/test_dd_profiles.py b/tests/TO_FIX_test_dd_profiles.py similarity index 100% rename from tests/test_dd_profiles.py rename to tests/TO_FIX_test_dd_profiles.py diff --git a/tests/test_endpoints_vocprez.py b/tests/TO_FIX_test_endpoints_vocprez.py similarity index 100% rename from tests/test_endpoints_vocprez.py rename to tests/TO_FIX_test_endpoints_vocprez.py diff --git a/tests/test_search.py b/tests/TO_FIX_test_search.py similarity index 100% rename from tests/test_search.py rename to tests/TO_FIX_test_search.py diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py index c7f68a83..1dcef5df 100755 --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -51,7 +51,7 @@ def override_get_repo(): app.dependency_overrides[get_repo] = override_get_repo - with TestClient(app, backend_options={'loop_factory': asyncio.new_event_loop}) as c: + with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: wait_for_app_to_be_ready(c) yield c @@ -80,9 +80,7 @@ def a_resource_link(client, a_catalog_link): def test_catalog_listing_anot(client): - r = client.get( - f"/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile" - ) + r = client.get(f"/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile") response_graph = Graph().parse(data=r.text) expected_response_1 = ( URIRef("https://example.com/TopLevelCatalog"), diff --git a/tests/test_endpoints_object.py b/tests/test_endpoints_object.py index 4d6c9678..a2670989 100755 --- a/tests/test_endpoints_object.py +++ b/tests/test_endpoints_object.py @@ -1,10 +1,11 @@ +import asyncio from pathlib import Path import pytest from fastapi.testclient import TestClient from pyoxigraph.pyoxigraph import Store -from rdflib import Graph -from rdflib import RDF, DCAT +from rdflib import Graph, URIRef +from rdflib.namespace import RDF, GEO from prez.app import app from prez.dependencies import get_repo @@ -36,36 +37,21 @@ def override_get_repo(): app.dependency_overrides[get_repo] = override_get_repo - with TestClient(app) as c: + with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: yield c # Remove the override to ensure subsequent tests are unaffected app.dependency_overrides.clear() -@pytest.fixture(scope="module") -def dataset_uri(test_client): - # get link for first dataset - r = test_client.get("/s/datasets") - g = Graph().parse(data=r.text) - return g.value(None, RDF.type, DCAT.Dataset) - - -def test_object_endpoint_sp_dataset(test_client, dataset_uri): - r = test_client.get(f"/object?uri={dataset_uri}") - assert r.status_code == 200 - - def test_feature_collection(test_client): r = test_client.get(f"/object?uri=https://test/feature-collection") response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent / "../tests/data/object/expected_responses/fc.ttl" - ) - assert response_graph.isomorphic(expected_graph), print( - f"""Expected-Response:{(expected_graph - response_graph).serialize()} - Response-Expected:{(expected_graph - response_graph).serialize()}""" - ) + assert ( + URIRef("https://test/feature-collection"), + RDF.type, + GEO.FeatureCollection, + ) in response_graph def test_feature(test_client): @@ -73,7 +59,8 @@ def test_feature(test_client): f"/object?uri=https://linked.data.gov.au/datasets/geofabric/hydroid/102208962" ) response_graph = Graph().parse(data=r.text) - expected_graph = Graph().parse( - Path(__file__).parent / "../tests/data/object/expected_responses/feature.ttl" - ) - assert response_graph.isomorphic(expected_graph) + assert ( + URIRef("https://linked.data.gov.au/datasets/geofabric/hydroid/102208962"), + RDF.type, + GEO.Feature, + ) in response_graph diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index e3264bd6..a53d828d 100755 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -16,7 +16,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - + file = Path("../test_data/spaceprez.ttl") store.load(file.read_bytes(), "text/turtle") @@ -37,7 +37,7 @@ def override_get_repo(): app.dependency_overrides[get_repo] = override_get_repo - with TestClient(app, backend_options={'loop_factory': asyncio.new_event_loop}) as c: + with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: yield c # Remove the override to ensure subsequent tests are unaffected @@ -84,16 +84,15 @@ def test_dataset_anot(client, a_catalog_link): assert next(response_graph.triples(expected_response_1)) - def test_feature_collection(client, an_fc_link): r = client.get(f"{an_fc_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) - expected_response_1 = ( + assert ( URIRef("https://example.com/FeatureCollection"), RDF.type, GEO.FeatureCollection, - ) - assert next(response_graph.triples(expected_response_1)) + ) in response_graph + def test_feature(client, a_feature_link): r = client.get(f"{a_feature_link}?_mediatype=text/turtle") @@ -105,6 +104,7 @@ def test_feature(client, a_feature_link): ) assert next(response_graph.triples(expected_response_1)) + def test_feature_listing_anot(client, an_fc_link): r = client.get(f"{an_fc_link}/items?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) @@ -119,4 +119,4 @@ def test_feature_listing_anot(client, an_fc_link): GEO.Feature, ) assert next(response_graph.triples(expected_response_1)) - assert next(response_graph.triples(expected_response_2)) \ No newline at end of file + assert next(response_graph.triples(expected_response_2)) diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 29c533dc..4e0ed484 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -1,8 +1,13 @@ -from prez.services.query_generation.shacl_node_selection import NodeShape, PropertyShape +from prez.services.query_generation.node_selection.endpoint_shacl import ( + NodeShape, + PropertyShape, +) from rdflib import Graph, URIRef import pytest -endpoints_graph = Graph().parse("prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl", format="turtle") +endpoints_graph = Graph().parse( + "prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl", format="turtle" +) # @pytest.fixture @@ -13,43 +18,44 @@ # ) -@pytest.mark.parametrize("nodeshape_uri", - [ - "http://example.org/ns#FeatureCollectionListing" - ]) +@pytest.mark.parametrize( + "nodeshape_uri", ["http://example.org/ns#FeatureCollectionListing"] +) def test_nodeshape_parsing(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) - assert ns.targetClasses == [URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection")] + assert ns.targetClasses == [ + URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection") + ] assert len(ns.propertyShapesURIs) == 1 -@pytest.mark.parametrize("nodeshape_uri", - [ - "http://example.org/ns#TopLevelCatalogs" - # "http://example.org/ns#FeatureListing" - ]) +@pytest.mark.parametrize( + "nodeshape_uri", + [ + "http://example.org/ns#TopLevelCatalogs" + # "http://example.org/ns#FeatureListing" + ], +) def test_nodeshape_to_grammar(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) ns.to_grammar() - print('') + print("") -@pytest.mark.parametrize("property_shape", - [ - "http://example.org/ns#resourceListingPropertyShape2" - ]) +@pytest.mark.parametrize( + "property_shape", ["http://example.org/ns#resourceListingPropertyShape2"] +) def test_propertyshape_parsing(property_shape): ps = PropertyShape(uri=URIRef(property_shape), graph=endpoints_graph) ps.to_grammar() - print('') + print("") -@pytest.mark.parametrize("property_shape", - [ - "http://example.org/ns#resourceListingPropertyShape2" - ]) +@pytest.mark.parametrize( + "property_shape", ["http://example.org/ns#resourceListingPropertyShape2"] +) def test_propertyshape_create_grammar(property_shape): ps = PropertyShape(uri=URIRef(property_shape)) # ps.from_graph(graph=endpoints_graph) # ps.to_grammar() - # assert True \ No newline at end of file + # assert True From 75c285d1809154654fd6f1aca4d2b435cd442471 Mon Sep 17 00:00:00 2001 From: david Date: Thu, 22 Feb 2024 01:25:06 +1000 Subject: [PATCH 15/25] Updates --- poetry.lock | 157 +++++++++--------- prez/dependencies.py | 4 +- prez/models/profiles_and_mediatypes.py | 22 +-- .../endpoint_node_selection_shapes.ttl | 19 +++ .../profiles/ogc_records_profile.ttl | 4 +- .../profiles/prez_default_profiles.ttl | 31 +--- prez/routers/cql.py | 1 + prez/routers/ogc_router.py | 2 - prez/routers/search.py | 12 +- prez/services/generate_profiles.py | 1 - prez/services/listings.py | 72 ++++---- .../query_generation/node_selection/cql.py | 33 ++-- prez/services/query_generation/umbrella.py | 27 +-- pyproject.toml | 9 +- test_data/sandgate.ttl | 4 +- tests/test_alt_profiles.py | 101 +++++++++++ tests/test_node_selection_shacl.py | 42 +---- 17 files changed, 309 insertions(+), 232 deletions(-) create mode 100755 tests/test_alt_profiles.py diff --git a/poetry.lock b/poetry.lock index d812069a..82c21253 100755 --- a/poetry.lock +++ b/poetry.lock @@ -29,13 +29,13 @@ files = [ [[package]] name = "anyio" -version = "4.2.0" +version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [package.dependencies] @@ -455,13 +455,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.3" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, ] [package.dependencies] @@ -472,7 +472,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.23.0)"] +trio = ["trio (>=0.22.0,<0.24.0)"] [[package]] name = "httpx" @@ -500,13 +500,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.33" +version = "2.5.35" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, - {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, ] [package.extras] @@ -1029,19 +1029,23 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.1.0" +version = "2.2.0" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.1.0-py3-none-any.whl", hash = "sha256:7621c0cb5d90d1140d2f0ef557bdf03573aac7035948109adf2574770b77605a"}, - {file = "pydantic_settings-2.1.0.tar.gz", hash = "sha256:26b1492e0a24755626ac5e6d715e9077ab7ad4fb5f19a8b7ed7011d52f36141c"}, + {file = "pydantic_settings-2.2.0-py3-none-any.whl", hash = "sha256:5f7bcaf9ad4419559dc5ac155c0324a9aeb2547c60471ee7c7d026f467a6b515"}, + {file = "pydantic_settings-2.2.0.tar.gz", hash = "sha256:648d0a76673e69c51278979cba2e83cf16a23d57519bfd7e553d1c3f37db5560"}, ] [package.dependencies] pydantic = ">=2.3.0" python-dotenv = ">=0.21.0" +[package.extras] +toml = ["tomlkit (>=0.12)"] +yaml = ["pyyaml (>=6.0.1)"] + [[package]] name = "pygments" version = "2.17.2" @@ -1059,12 +1063,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyld" -version = "2.0.3" +version = "2.0.4" description = "Python implementation of the JSON-LD API" optional = false python-versions = "*" files = [ - {file = "PyLD-2.0.3.tar.gz", hash = "sha256:287445f888c3a332ccbd20a14844c66c2fcbaeab3c99acd506a0788e2ebb2f82"}, + {file = "PyLD-2.0.4-py3-none-any.whl", hash = "sha256:6dab9905644616df33f8755489fc9b354ed7d832d387b7d1974b4fbd3b8d2a89"}, + {file = "PyLD-2.0.4.tar.gz", hash = "sha256:311e350f0dbc964311c79c28e86f84e195a81d06fef5a6f6ac2a4f6391ceeacc"}, ] [package.dependencies] @@ -1200,20 +1205,6 @@ files = [ [package.extras] cli = ["click (>=5.0)"] -[[package]] -name = "python-multipart" -version = "0.0.7" -description = "A streaming multipart parser for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "python_multipart-0.0.7-py3-none-any.whl", hash = "sha256:b1fef9a53b74c795e2347daac8c54b252d9e0df9c619712691c1cc8021bd3c49"}, - {file = "python_multipart-0.0.7.tar.gz", hash = "sha256:288a6c39b06596c1b988bb6794c6fbc80e6c369e35e5062637df256bee0c9af9"}, -] - -[package.extras] -dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] - [[package]] name = "pyyaml" version = "6.0.1" @@ -1383,72 +1374,72 @@ wheel = ">=0.36.1" [[package]] name = "setuptools" -version = "69.0.3" +version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shapely" -version = "2.0.2" +version = "2.0.3" description = "Manipulation and analysis of geometric objects" optional = false python-versions = ">=3.7" files = [ - {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6ca8cffbe84ddde8f52b297b53f8e0687bd31141abb2c373fd8a9f032df415d6"}, - {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:baa14fc27771e180c06b499a0a7ba697c7988c7b2b6cba9a929a19a4d2762de3"}, - {file = "shapely-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36480e32c434d168cdf2f5e9862c84aaf4d714a43a8465ae3ce8ff327f0affb7"}, - {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef753200cbffd4f652efb2c528c5474e5a14341a473994d90ad0606522a46a2"}, - {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9a41ff4323fc9d6257759c26eb1cf3a61ebc7e611e024e6091f42977303fd3a"}, - {file = "shapely-2.0.2-cp310-cp310-win32.whl", hash = "sha256:72b5997272ae8c25f0fd5b3b967b3237e87fab7978b8d6cd5fa748770f0c5d68"}, - {file = "shapely-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:34eac2337cbd67650248761b140d2535855d21b969d76d76123317882d3a0c1a"}, - {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b0c052709c8a257c93b0d4943b0b7a3035f87e2d6a8ac9407b6a992d206422f"}, - {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d217e56ae067e87b4e1731d0dc62eebe887ced729ba5c2d4590e9e3e9fdbd88"}, - {file = "shapely-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94ac128ae2ab4edd0bffcd4e566411ea7bdc738aeaf92c32a8a836abad725f9f"}, - {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3ee28f5e63a130ec5af4dc3c4cb9c21c5788bb13c15e89190d163b14f9fb89"}, - {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:737dba15011e5a9b54a8302f1748b62daa207c9bc06f820cd0ad32a041f1c6f2"}, - {file = "shapely-2.0.2-cp311-cp311-win32.whl", hash = "sha256:45ac6906cff0765455a7b49c1670af6e230c419507c13e2f75db638c8fc6f3bd"}, - {file = "shapely-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:dc9342fc82e374130db86a955c3c4525bfbf315a248af8277a913f30911bed9e"}, - {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:06f193091a7c6112fc08dfd195a1e3846a64306f890b151fa8c63b3e3624202c"}, - {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eebe544df5c018134f3c23b6515877f7e4cd72851f88a8d0c18464f414d141a2"}, - {file = "shapely-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e92e7c255f89f5cdf777690313311f422aa8ada9a3205b187113274e0135cd8"}, - {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be46d5509b9251dd9087768eaf35a71360de6afac82ce87c636990a0871aa18b"}, - {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5533a925d8e211d07636ffc2fdd9a7f9f13d54686d00577eeb11d16f00be9c4"}, - {file = "shapely-2.0.2-cp312-cp312-win32.whl", hash = "sha256:084b023dae8ad3d5b98acee9d3bf098fdf688eb0bb9b1401e8b075f6a627b611"}, - {file = "shapely-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:ea84d1cdbcf31e619d672b53c4532f06253894185ee7acb8ceb78f5f33cbe033"}, - {file = "shapely-2.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ed1e99702125e7baccf401830a3b94d810d5c70b329b765fe93451fe14cf565b"}, - {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7d897e6bdc6bc64f7f65155dbbb30e49acaabbd0d9266b9b4041f87d6e52b3a"}, - {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0521d76d1e8af01e712db71da9096b484f081e539d4f4a8c97342e7971d5e1b4"}, - {file = "shapely-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:5324be299d4c533ecfcfd43424dfd12f9428fd6f12cda38a4316da001d6ef0ea"}, - {file = "shapely-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:78128357a0cee573257a0c2c388d4b7bf13cb7dbe5b3fe5d26d45ebbe2a39e25"}, - {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87dc2be34ac3a3a4a319b963c507ac06682978a5e6c93d71917618b14f13066e"}, - {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42997ac806e4583dad51c80a32d38570fd9a3d4778f5e2c98f9090aa7db0fe91"}, - {file = "shapely-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ccfd5fa10a37e67dbafc601c1ddbcbbfef70d34c3f6b0efc866ddbdb55893a6c"}, - {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7c95d3379ae3abb74058938a9fcbc478c6b2e28d20dace38f8b5c587dde90aa"}, - {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a21353d28209fb0d8cc083e08ca53c52666e0d8a1f9bbe23b6063967d89ed24"}, - {file = "shapely-2.0.2-cp38-cp38-win32.whl", hash = "sha256:03e63a99dfe6bd3beb8d5f41ec2086585bb969991d603f9aeac335ad396a06d4"}, - {file = "shapely-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:c6fd29fbd9cd76350bd5cc14c49de394a31770aed02d74203e23b928f3d2f1aa"}, - {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f217d28ecb48e593beae20a0082a95bd9898d82d14b8fcb497edf6bff9a44d7"}, - {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:394e5085b49334fd5b94fa89c086edfb39c3ecab7f669e8b2a4298b9d523b3a5"}, - {file = "shapely-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd3ad17b64466a033848c26cb5b509625c87d07dcf39a1541461cacdb8f7e91c"}, - {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d41a116fcad58048d7143ddb01285e1a8780df6dc1f56c3b1e1b7f12ed296651"}, - {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dea9a0651333cf96ef5bb2035044e3ad6a54f87d90e50fe4c2636debf1b77abc"}, - {file = "shapely-2.0.2-cp39-cp39-win32.whl", hash = "sha256:b8eb0a92f7b8c74f9d8fdd1b40d395113f59bd8132ca1348ebcc1f5aece94b96"}, - {file = "shapely-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:794affd80ca0f2c536fc948a3afa90bd8fb61ebe37fe873483ae818e7f21def4"}, - {file = "shapely-2.0.2.tar.gz", hash = "sha256:1713cc04c171baffc5b259ba8531c58acc2a301707b7f021d88a15ed090649e7"}, + {file = "shapely-2.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:af7e9abe180b189431b0f490638281b43b84a33a960620e6b2e8d3e3458b61a1"}, + {file = "shapely-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98040462b36ced9671e266b95c326b97f41290d9d17504a1ee4dc313a7667b9c"}, + {file = "shapely-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71eb736ef2843f23473c6e37f6180f90f0a35d740ab284321548edf4e55d9a52"}, + {file = "shapely-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:881eb9dbbb4a6419667e91fcb20313bfc1e67f53dbb392c6840ff04793571ed1"}, + {file = "shapely-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10d2ccf0554fc0e39fad5886c839e47e207f99fdf09547bc687a2330efda35b"}, + {file = "shapely-2.0.3-cp310-cp310-win32.whl", hash = "sha256:6dfdc077a6fcaf74d3eab23a1ace5abc50c8bce56ac7747d25eab582c5a2990e"}, + {file = "shapely-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:64c5013dacd2d81b3bb12672098a0b2795c1bf8190cfc2980e380f5ef9d9e4d9"}, + {file = "shapely-2.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:56cee3e4e8159d6f2ce32e421445b8e23154fd02a0ac271d6a6c0b266a8e3cce"}, + {file = "shapely-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:619232c8276fded09527d2a9fd91a7885ff95c0ff9ecd5e3cb1e34fbb676e2ae"}, + {file = "shapely-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2a7d256db6f5b4b407dc0c98dd1b2fcf1c9c5814af9416e5498d0a2e4307a4b"}, + {file = "shapely-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45f0c8cd4583647db3216d965d49363e6548c300c23fd7e57ce17a03f824034"}, + {file = "shapely-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb37d3826972a82748a450328fe02a931dcaed10e69a4d83cc20ba021bc85f"}, + {file = "shapely-2.0.3-cp311-cp311-win32.whl", hash = "sha256:9302d7011e3e376d25acd30d2d9e70d315d93f03cc748784af19b00988fc30b1"}, + {file = "shapely-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6b464f2666b13902835f201f50e835f2f153f37741db88f68c7f3b932d3505fa"}, + {file = "shapely-2.0.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e86e7cb8e331a4850e0c2a8b2d66dc08d7a7b301b8d1d34a13060e3a5b4b3b55"}, + {file = "shapely-2.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c91981c99ade980fc49e41a544629751a0ccd769f39794ae913e53b07b2f78b9"}, + {file = "shapely-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd45d456983dc60a42c4db437496d3f08a4201fbf662b69779f535eb969660af"}, + {file = "shapely-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:882fb1ffc7577e88c1194f4f1757e277dc484ba096a3b94844319873d14b0f2d"}, + {file = "shapely-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9f2d93bff2ea52fa93245798cddb479766a18510ea9b93a4fb9755c79474889"}, + {file = "shapely-2.0.3-cp312-cp312-win32.whl", hash = "sha256:99abad1fd1303b35d991703432c9481e3242b7b3a393c186cfb02373bf604004"}, + {file = "shapely-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:6f555fe3304a1f40398977789bc4fe3c28a11173196df9ece1e15c5bc75a48db"}, + {file = "shapely-2.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983cc418c1fa160b7d797cfef0e0c9f8c6d5871e83eae2c5793fce6a837fad9"}, + {file = "shapely-2.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18bddb8c327f392189a8d5d6b9a858945722d0bb95ccbd6a077b8e8fc4c7890d"}, + {file = "shapely-2.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:442f4dcf1eb58c5a4e3428d88e988ae153f97ab69a9f24e07bf4af8038536325"}, + {file = "shapely-2.0.3-cp37-cp37m-win32.whl", hash = "sha256:31a40b6e3ab00a4fd3a1d44efb2482278642572b8e0451abdc8e0634b787173e"}, + {file = "shapely-2.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:59b16976c2473fec85ce65cc9239bef97d4205ab3acead4e6cdcc72aee535679"}, + {file = "shapely-2.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:705efbce1950a31a55b1daa9c6ae1c34f1296de71ca8427974ec2f27d57554e3"}, + {file = "shapely-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:601c5c0058a6192df704cb889439f64994708563f57f99574798721e9777a44b"}, + {file = "shapely-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f24ecbb90a45c962b3b60d8d9a387272ed50dc010bfe605f1d16dfc94772d8a1"}, + {file = "shapely-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8c2a2989222c6062f7a0656e16276c01bb308bc7e5d999e54bf4e294ce62e76"}, + {file = "shapely-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42bceb9bceb3710a774ce04908fda0f28b291323da2688f928b3f213373b5aee"}, + {file = "shapely-2.0.3-cp38-cp38-win32.whl", hash = "sha256:54d925c9a311e4d109ec25f6a54a8bd92cc03481a34ae1a6a92c1fe6729b7e01"}, + {file = "shapely-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:300d203b480a4589adefff4c4af0b13919cd6d760ba3cbb1e56275210f96f654"}, + {file = "shapely-2.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:083d026e97b6c1f4a9bd2a9171c7692461092ed5375218170d91705550eecfd5"}, + {file = "shapely-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:27b6e1910094d93e9627f2664121e0e35613262fc037051680a08270f6058daf"}, + {file = "shapely-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:71b2de56a9e8c0e5920ae5ddb23b923490557ac50cb0b7fa752761bf4851acde"}, + {file = "shapely-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d279e56bbb68d218d63f3efc80c819cedcceef0e64efbf058a1df89dc57201b"}, + {file = "shapely-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88566d01a30f0453f7d038db46bc83ce125e38e47c5f6bfd4c9c287010e9bf74"}, + {file = "shapely-2.0.3-cp39-cp39-win32.whl", hash = "sha256:58afbba12c42c6ed44c4270bc0e22f3dadff5656d711b0ad335c315e02d04707"}, + {file = "shapely-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5026b30433a70911979d390009261b8c4021ff87c7c3cbd825e62bb2ffa181bc"}, + {file = "shapely-2.0.3.tar.gz", hash = "sha256:4d65d0aa7910af71efa72fd6447e02a8e5dd44da81a983de9d736d6e6ccbe674"}, ] [package.dependencies] -numpy = ">=1.14" +numpy = ">=1.14,<2" [package.extras] docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] @@ -1517,13 +1508,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1534,13 +1525,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.27.0.post1" +version = "0.27.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.27.0.post1-py3-none-any.whl", hash = "sha256:4b85ba02b8a20429b9b205d015cbeb788a12da527f731811b643fd739ef90d5f"}, - {file = "uvicorn-0.27.0.post1.tar.gz", hash = "sha256:54898fcd80c13ff1cd28bf77b04ec9dbd8ff60c5259b499b4b12bb0917f22907"}, + {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, + {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, ] [package.dependencies] @@ -1587,4 +1578,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "f9d999e3c9ac329c3cbd40277b79312a743182e39ef1ca1f2db89b48f9cb7db8" +content-hash = "9e52b0cd2075bbbc6693b39f79b388a505e3744feeeb85c235b7f735afedc848" diff --git a/prez/dependencies.py b/prez/dependencies.py index eb4e1cc5..2b3ce22f 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -16,7 +16,7 @@ ) from prez.config import settings from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo -from rdframe import CQLParser +from prez.services.query_generation.node_selection.cql import CQLParser async def get_async_http_client(): @@ -110,7 +110,7 @@ async def cql_get_parser_dependency(request: Request): query = json.loads(request.query_params["filter"]) context = json.load( ( - Path(__file__).parent.parent / "temp" / "default_cql_context.json" + Path(__file__).parent / "reference_data/cql/default_context.json" ).open() ) cql_parser = CQLParser(cql=query, context=context) diff --git a/prez/models/profiles_and_mediatypes.py b/prez/models/profiles_and_mediatypes.py index 93349680..fdaa646f 100755 --- a/prez/models/profiles_and_mediatypes.py +++ b/prez/models/profiles_and_mediatypes.py @@ -40,19 +40,19 @@ def populate_requested_types(self): async def populate_profile_and_mediatype( - profiles_mediatypes_model: ProfilesMediatypesInfo, system_repo: Repo + prof_model: ProfilesMediatypesInfo, system_repo: Repo ): - req_profiles = profiles_mediatypes_model.req_profiles - req_profiles_token = profiles_mediatypes_model.req_profiles_token - req_mediatypes = profiles_mediatypes_model.req_mediatypes - classes = profiles_mediatypes_model.classes - listing = profiles_mediatypes_model.listing + req_profiles = prof_model.req_profiles + req_profiles_token = prof_model.req_profiles_token + req_mediatypes = prof_model.req_mediatypes + classes = prof_model.classes + listing = prof_model.listing ( - profiles_mediatypes_model.profile, - profiles_mediatypes_model.mediatype, - profiles_mediatypes_model.selected_class, - profiles_mediatypes_model.profile_headers, - profiles_mediatypes_model.avail_profile_uris, + prof_model.profile, + prof_model.mediatype, + prof_model.selected_class, + prof_model.profile_headers, + prof_model.avail_profile_uris, ) = await get_profiles_and_mediatypes( classes, system_repo, req_profiles, req_profiles_token, req_mediatypes, listing ) diff --git a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl index 32e569fa..fda11289 100644 --- a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl +++ b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl @@ -9,6 +9,7 @@ @prefix sh: . @prefix shext: . @prefix skos: . +@prefix altr-ext: . ex:TopLevelCatalogs a sh:NodeShape ; @@ -110,4 +111,22 @@ ex:queryables a sh:NodeShape ; }""" ] ; shext:limit 100 ; shext:offset 0 ; +. + +ex:AltProfilesForListing + a sh:NodeShape ; + ont:hierarchyLevel 1 ; + sh:targetClass prez:ListingProfile ; + sh:property [ + sh:path altr-ext:constrainsClass ; + ] +. + +ex:AltProfilesForObject + a sh:NodeShape ; + ont:hierarchyLevel 1 ; + sh:targetClass prez:ObjectProfile ; + sh:property [ + sh:path altr-ext:constrainsClass ; + ] . \ No newline at end of file diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 10923704..333ef557 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -25,7 +25,7 @@ prez:OGCRecordsProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasNodeShape [ a sh:NodeShape ; - sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection , prez:SearchResult ; + sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection , prez:SearchResult , prez:CQLObjectList ; altr-ext:hasDefaultProfile prez:OGCListingProfile ] , [ a sh:NodeShape ; @@ -49,7 +49,7 @@ prez:OGCListingProfile "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept , - dcat:Resource , prof:Profile , prez:SearchResult ; + dcat:Resource , prof:Profile , prez:SearchResult , prez:CQLObjectList ; sh:property [ sh:path rdf:type ] . diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index 887edaf7..ececa07b 100755 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -31,7 +31,7 @@ PREFIX xsd: . - a prof:Profile ; + a prof:Profile , prez:ObjectProfile ; dcterms:identifier "openobj"^^xsd:token ; dcterms:description "An open profile for objects which will return all direct properties for a resource." ; dcterms:title "Open profile" ; @@ -50,7 +50,7 @@ PREFIX xsd: . - a prof:Profile ; + a prof:Profile , prez:ListingProfile ; dcterms:description "A very basic data model that lists the members of container objects only, i.e. not their other properties" ; dcterms:identifier "mem"^^xsd:token ; dcterms:title "Members" ; @@ -71,7 +71,7 @@ PREFIX xsd: . altr-ext:alt-profile - a prof:Profile , sh:NodeShape ; + a prof:Profile , prez:ListingProfile , prez:ObjectProfile ; dcterms:description "The representation of the resource that lists all other representations (profiles and Media Types)" ; dcterms:identifier "alt"^^xsd:token ; dcterms:title "Alternates Profile" ; @@ -94,6 +94,7 @@ altr-ext:alt-profile sh:property [ sh:path ( sh:union ( + rdf:type altr-ext:hasResourceFormat altr-ext:hasDefaultResourceFormat dcterms:description @@ -103,27 +104,3 @@ altr-ext:alt-profile ) ] ; . - - - - - a prof:Profile , prez:CatPrezProfile ; - dcterms:description "Dataset Catalog Vocabulary (DCAT) is a W3C-authored RDF vocabulary designed to facilitate interoperability between data catalogs" ; - dcterms:identifier "dcat"^^xsd:token ; - dcterms:title "DCAT" ; - altr-ext:constrainsClass - dcat:Catalog , - dcat:Dataset , - dcat:Resource , - prez:CatalogList , - prez:ResourceList ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - sh:property [ - sh:path shext:allPredicateValues - ] ; - . \ No newline at end of file diff --git a/prez/routers/cql.py b/prez/routers/cql.py index 30d5f972..2d89a910 100755 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -60,6 +60,7 @@ async def cql_get_endpoint( request=request, repo=repo, system_repo=system_repo, + hierarchy_level=1, endpoint_uri=endpoint_uri, page=page, per_page=per_page, diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 9a6ed2c8..3f7488a7 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -70,7 +70,6 @@ async def collection_listing( path_nodes={"path_node_1": IRI(value=path_node_1_uri)}, page=page, per_page=per_page, - parent_uri=path_node_1_uri, search_term=search_term, ) @@ -104,7 +103,6 @@ async def item_listing( }, page=page, per_page=per_page, - parent_uri=path_node_1_uri, search_term=search_term, ) diff --git a/prez/routers/search.py b/prez/routers/search.py index 780dd41a..2830cdc8 100755 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -4,7 +4,7 @@ from rdflib import URIRef from rdflib.namespace import Namespace -from prez.dependencies import get_repo +from prez.dependencies import get_repo, get_system_repo from prez.reference_data.prez_ns import PREZ from prez.repositories import Repo from prez.services.listings import listing_function @@ -24,15 +24,15 @@ async def search( per_page: Optional[int] = 20, search_term: Optional[str] = None, repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_repo), + system_repo: Repo = Depends(get_system_repo), ): term = request.query_params.get("q") endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( - request, - repo, - system_repo, - endpoint_uri, + request=request, + repo=repo, + system_repo=system_repo, + endpoint_uri=endpoint_uri, hierarchy_level=1, page=page, per_page=per_page, diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py index 6443d590..532a1785 100755 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -79,7 +79,6 @@ async def get_profiles_and_mediatypes( listing, ) log.debug(f"ConnegP query: {query}") - # response = profiles_graph_cache.query(query) response = await system_repo.send_queries([], [(None, query)]) # log.debug(f"ConnegP response:{results_pretty_printer(response)}") if response[1][0][1] == []: diff --git a/prez/services/listings.py b/prez/services/listings.py index 0e69c97d..a8b6cc81 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -6,7 +6,6 @@ from fastapi.responses import PlainTextResponse from rdflib import URIRef, Literal from rdflib.namespace import RDF, SH -from rdframe import CQLParser from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.config import settings @@ -16,17 +15,18 @@ ) from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph +from prez.repositories import Repo from prez.services.link_generation import add_prez_links from prez.services.query_generation.classes import get_classes from prez.services.query_generation.count import CountQuery -from prez.repositories import Repo +from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape from prez.services.query_generation.node_selection.search import SearchQuery -from temp.grammar import * +from prez.services.query_generation.node_selection.cql import CQLParser # from rdframe.grammar import SubSelect # from rdframe import PrezQueryConstructor from prez.services.query_generation.umbrella import PrezQueryConstructor -from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape +from temp.grammar import * log = logging.getLogger(__name__) @@ -40,7 +40,6 @@ async def listing_function( path_nodes: Dict[str, Var | IRI] = None, page: int = 1, per_page: int = 20, - parent_uri: Optional[URIRef] = None, cql_parser: CQLParser = None, search_term: Optional[str] = None, endpoint_structure: Tuple[str] = settings.endpoint_structure, @@ -52,19 +51,20 @@ async def listing_function( # gather relevant info for the profile part of the query # build the query """ + if not path_nodes: + path_nodes = {} queries = [] # determine possible SHACL node shapes for endpoint - node_selection_shape, target_classes = await determine_nodeshape( - endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo + ns_triples, ns_gpnt, target_classes = await get_shacl_node_selection( + endpoint_uri, hierarchy_level, path_nodes, repo, system_repo ) - if not path_nodes: - path_nodes = {} - if node_selection_shape: - ns = NodeShape( - uri=node_selection_shape, graph=endpoints_graph_cache, path_nodes=path_nodes - ) - + if not target_classes: + # then there is no target class - i.e. it's a search *only* or CQL *only* query (not SHACL + CQL or SHACL + Search) + if cql_parser: + target_classes = frozenset([PREZ.CQLObjectList]) + elif search_term: + target_classes = frozenset([PREZ.SearchResult]) # determine the relevant profile prof_and_mt_info = ProfilesMediatypesInfo( request=request, classes=target_classes, system_repo=system_repo, listing=True @@ -79,6 +79,13 @@ async def listing_function( if prof_and_mt_info.profile == URIRef( "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): + ns = NodeShape( + uri=URIRef("http://example.org/ns#AltProfilesForListing"), + graph=endpoints_graph_cache, + path_nodes={"path_node_1": IRI(value=prof_and_mt_info.selected_class)} + ) + ns_triples = ns.triples_list + ns_gpnt = ns.gpnt_list endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") runtime_values["selectedClass"] = prof_and_mt_info.selected_class @@ -108,8 +115,8 @@ async def listing_function( listing_or_object="listing", endpoint_uri=endpoint_uri, profile_uri=selected_profile, - endpoint_shacl_triples=ns.triples_list, - endpoint_shacl_gpnt=ns.gpnt_list, + endpoint_shacl_triples=ns_triples, + endpoint_shacl_gpnt=ns_gpnt, cql_triples=cql_triples_list, cql_gpnt=cql_gpnt_list, ) @@ -137,25 +144,15 @@ async def listing_function( # add a count query if it's an annotated mediatype if "anot+" in prof_and_mt_info.mediatype and not search_term: subselect = copy.deepcopy(query_constructor.inner_select) - subselect.solution_modifier = None # remove the limit and offset from the subselect so that we can get a count count_query = CountQuery(subselect=subselect).render() queries.append(count_query) - # if prof_and_mt_info.profile == URIRef( - # "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - # ): - # count_class = PROF.Profile - # else: - # count_class = target_classes - # if count_class: # target_class may be unknown (None) for queries involving CQL - # queries.append(temp_listing_count(subselect, count_class)) - if prof_and_mt_info.profile == URIRef( "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" ): item_graph, _ = await system_repo.send_queries(queries, []) if "anot+" in prof_and_mt_info.mediatype: - await add_prez_links(item_graph, system_repo, endpoint_structure) + await add_prez_links(item_graph, system_repo, endpoint_structure=("profiles",)) else: item_graph, _ = await repo.send_queries(queries, []) if "anot+" in prof_and_mt_info.mediatype: @@ -174,9 +171,12 @@ async def listing_function( ) -async def determine_nodeshape( - endpoint_uri, hierarchy_level, parent_uri, path_nodes, repo, system_repo +async def get_shacl_node_selection( + endpoint_uri, hierarchy_level, path_nodes, repo, system_repo ): + """ + Determines the relevant nodeshape based on the endpoint, hierarchy level, and parent URI + """ node_selection_shape = None target_classes = [] relevant_ns_query = f"""SELECT ?ns ?tc @@ -185,7 +185,7 @@ async def determine_nodeshape( ?ns ?tc ; {hierarchy_level} . }}""" - _, r = await system_repo.send_queries([], [(parent_uri, relevant_ns_query)]) + _, r = await system_repo.send_queries([], [(None, relevant_ns_query)]) tabular_results = r[0][1] distinct_ns = set([result["ns"]["value"] for result in tabular_results]) if len(distinct_ns) == 1: # only one possible node shape @@ -219,7 +219,17 @@ async def determine_nodeshape( target_classes = list( endpoints_graph_cache.objects(node_selection_shape, SH.targetClass) ) - return node_selection_shape, target_classes + ns_triples = [] + ns_gpnt = [] + if not path_nodes: + path_nodes = {} + if node_selection_shape: + ns = NodeShape( + uri=node_selection_shape, graph=endpoints_graph_cache, path_nodes=path_nodes + ) + ns_triples = ns.triples_list + ns_gpnt = ns.gpnt_list + return ns_triples, ns_gpnt, target_classes def find_instances(obj, cls): diff --git a/prez/services/query_generation/node_selection/cql.py b/prez/services/query_generation/node_selection/cql.py index 319898d6..1dde3521 100755 --- a/prez/services/query_generation/node_selection/cql.py +++ b/prez/services/query_generation/node_selection/cql.py @@ -23,7 +23,6 @@ def __init__(self, cql=None, context: dict = None, cql_json: dict = None): self.var_counter = 0 self.query_object = None self.query_str = None - # self.prefixes = self.extract_prefixes(self.context) def generate_jsonld(self): combined = {"@context": self.context, **self.cql} @@ -60,7 +59,7 @@ def parse(self): where_clause=where, solution_modifier=solution_modifier, ) - self.query_str = "".join(part for part in self.query_object.render()) + self.query_str = self.query_object.to_string() def parse_logical_operators( self, element, existing_ggps=None @@ -119,11 +118,11 @@ def _add_triple(self, ggps, subject, predicate, object): else: ggps.triples_block = TriplesBlock(triples=[simple_triple]) - def _append_graph_pattern(self, ggps, graph_pattern): - if ggps.graph_patterns_or_triples_blocks: - ggps.graph_patterns_or_triples_blocks.append(graph_pattern) - else: - ggps.graph_patterns_or_triples_blocks = [graph_pattern] + # def _append_graph_pattern(self, ggps, graph_pattern): + # if ggps.graph_patterns_or_triples_blocks: + # ggps.graph_patterns_or_triples_blocks.append(graph_pattern) + # else: + # ggps.graph_patterns_or_triples_blocks = [graph_pattern] def _handle_comparison(self, operator, args, existing_ggps=None): self.var_counter += 1 @@ -150,14 +149,16 @@ def _handle_comparison(self, operator, args, existing_ggps=None): gpnt = GraphPatternNotTriples( content=InlineData(data_block=DataBlock(block=ildov)) ) - self._append_graph_pattern(ggps, gpnt) + ggps.add_pattern(gpnt) + # self._append_graph_pattern(ggps, gpnt) else: value_pe = PrimaryExpression(content=value) values_constraint = Filter.filter_relational( focus=object_pe, comparators=value_pe, operator=operator ) gpnt = GraphPatternNotTriples(content=values_constraint) - self._append_graph_pattern(ggps, gpnt) + ggps.add_pattern(gpnt) + # self._append_graph_pattern(ggps, gpnt) if inverse: self._add_triple(ggps, object, predicate, subject) @@ -200,8 +201,9 @@ def _handle_like(self, args, existing_ggps=None): bic = BuiltInCall(other_expressions=re) cons = Constraint(content=bic) filter_expr = Filter(constraint=cons) - - self._append_graph_pattern(ggps, filter_expr) + filter_gpnt = GraphPatternNotTriples(content=filter_expr) + ggps.add_pattern(filter_gpnt) + # self._append_graph_pattern(ggps, filter_expr) yield ggps def _handle_spatial(self, operator, args, existing_ggps=None): @@ -239,8 +241,9 @@ def _handle_spatial(self, operator, args, existing_ggps=None): fc = FunctionCall(iri=geom_func_iri, arg_list=arg_list) spatial_filter = Filter(constraint=Constraint(content=fc)) - self._append_graph_pattern(ggps, spatial_filter) - + filter_gpnt = GraphPatternNotTriples(content=spatial_filter) + ggps.add_pattern(filter_gpnt) + # self._append_graph_pattern(ggps, spatial_filter) yield ggps def _handle_in(self, args, existing_ggps=None): @@ -277,8 +280,8 @@ def _handle_in(self, args, existing_ggps=None): gpnt = GraphPatternNotTriples( content=InlineData(data_block=DataBlock(block=ildov)) ) - self._append_graph_pattern(ggps, gpnt) - + ggps.add_pattern(gpnt) + # self._append_graph_pattern(ggps, gpnt) yield ggps def _extract_spatial_info(self, coordinates_list, args): diff --git a/prez/services/query_generation/umbrella.py b/prez/services/query_generation/umbrella.py index e1b38604..23ab47b4 100755 --- a/prez/services/query_generation/umbrella.py +++ b/prez/services/query_generation/umbrella.py @@ -13,6 +13,9 @@ class PrezQueryConstructor(BaseModel): + class Config: + arbitrary_types_allowed = True + runtime_values: dict endpoint_graph: Graph profile_graph: Graph @@ -40,15 +43,14 @@ class PrezQueryConstructor(BaseModel): runtime_vals_expanded: Optional[Dict] = {} merged_runtime_and_default_vals: Optional[Dict] = {} - class Config: - arbitrary_types_allowed = True + def _expand_runtime_vars(self): for k, v in self.runtime_values.items(): if k in ["limit", "offset", "q"]: self.runtime_vals_expanded[k] = v elif v: - val = "".join(IRI(value=v).render()) + val = IRI(value=v).to_string() self.runtime_vals_expanded[k] = val def _merge_runtime_and_default_vars(self): @@ -139,18 +141,16 @@ def build_inner_select(self): inner_select_ggps.add_triple(order_by_triple) # otherwise just use what is provided by the endpoint shapes - if self.endpoint_shacl_triples: - tb = TriplesBlock(triples=self.endpoint_shacl_triples) + all_triples = self.endpoint_shacl_triples + self.cql_triples + if all_triples: + tb = TriplesBlock(triples=all_triples) inner_select_ggps.add_pattern(tb) - if self.endpoint_shacl_gpnt: - for gpnt in self.endpoint_shacl_gpnt: + all_gpnt = self.endpoint_shacl_gpnt + self.cql_gpnt + if all_gpnt: + for gpnt in all_gpnt: inner_select_ggps.add_pattern(gpnt) - def _add_ggp_to_main_ggps(self, ggp): - gorugp = GroupOrUnionGraphPattern(group_graph_patterns=[ggp]) - gpnt = GraphPatternNotTriples(content=gorugp) - self.main_where_ggps.add_pattern(gpnt) def sh_rule_type_conversion(self, items: List): """ @@ -366,7 +366,10 @@ def process_bn_level(depth, max_depth, outer_ggps): if bnode_depth > 1: process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) - self._add_ggp_to_main_ggps(container_ggp) + gorugp = GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) + gpnt = GraphPatternNotTriples(content=gorugp) + self.main_where_ggps.add_pattern(gpnt) + def _parse_property_shapes(self, property_node, i): def process_path_object(path_obj: Union[URIRef, BNode]): diff --git a/pyproject.toml b/pyproject.toml index 3a73e336..428aacc0 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Jamie Feiss ", "Nicholas Car PREFIX xsd: - a dcat:Dataset ; + a dcat:Catalog ; dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; dcterms:identifier "sandgate"^^xsd:token ; dcterms:title "Sandgate example dataset"@en ; - rdfs:member + dcterms:hasPart sand:catchments , sand:facilities , sand:floods , diff --git a/tests/test_alt_profiles.py b/tests/test_alt_profiles.py new file mode 100755 index 00000000..32121a49 --- /dev/null +++ b/tests/test_alt_profiles.py @@ -0,0 +1,101 @@ +import asyncio +import time +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store +from rdflib import Graph, URIRef +from rdflib.namespace import RDF, DCAT + +from prez.app import app +from prez.dependencies import get_repo +from prez.repositories import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + file = Path("../test_data/catprez.ttl") + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +def wait_for_app_to_be_ready(client, timeout=10): + start_time = time.time() + while time.time() - start_time < timeout: + try: + response = client.get("/health") + if response.status_code == 200: + return + except Exception as e: + print(e) + time.sleep(0.5) + raise RuntimeError("App did not start within the specified timeout") + + +@pytest.fixture(scope="session") +def client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: + wait_for_app_to_be_ready(c) + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() + + +@pytest.fixture(scope="session") +def a_catalog_link(client): + # get link for first catalog + r = client.get("/catalogs") + g = Graph().parse(data=r.text) + member_uri = g.value(None, RDF.type, DCAT.Catalog) + link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +@pytest.fixture(scope="session") +def a_resource_link(client, a_catalog_link): + r = client.get(a_catalog_link) + g = Graph().parse(data=r.text) + links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) + for link in links: + if link != a_catalog_link: + return link + + +def test_listing_alt_profile(client): + r = client.get(f"/catalogs?_mediatype=text/turtle&_profile=altr-ext:alt-profile") + response_graph = Graph().parse(data=r.text) + assert ( + URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"), + RDF.type, + URIRef("https://prez.dev/ListingProfile") + ) in response_graph + + + +def test_object_alt_profile(client, a_catalog_link): + r = client.get(f"{a_catalog_link}?_mediatype=text/turtle&_profile=altr-ext:alt-profile") + response_graph = Graph().parse(data=r.text) + expected_response = ( + URIRef("https://example.com/TopLevelCatalog"), + RDF.type, + DCAT.Catalog, + ) + assert next(response_graph.triples(expected_response)) diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 4e0ed484..54659c26 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -1,30 +1,26 @@ +import pytest +from rdflib import Graph, URIRef + from prez.services.query_generation.node_selection.endpoint_shacl import ( NodeShape, PropertyShape, ) -from rdflib import Graph, URIRef -import pytest endpoints_graph = Graph().parse( "prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl", format="turtle" ) -# @pytest.fixture -# def property_shape(): -# return endpoints_graph.value( -# subject=URIRef("http://example.org/ns#ResourceListing"), -# predicate=URIRef("http://www.w3.org/ns/shacl#property"), -# ) - - @pytest.mark.parametrize( - "nodeshape_uri", ["http://example.org/ns#FeatureCollectionListing"] + "nodeshape_uri", ["http://example.org/ns#Collections"] ) def test_nodeshape_parsing(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) assert ns.targetClasses == [ - URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection") + URIRef('http://www.opengis.net/ont/geosparql#FeatureCollection'), + URIRef('http://www.w3.org/2004/02/skos/core#ConceptScheme'), + URIRef('http://www.w3.org/2004/02/skos/core#Collection'), + URIRef('http://www.w3.org/ns/dcat#Catalog') ] assert len(ns.propertyShapesURIs) == 1 @@ -33,29 +29,9 @@ def test_nodeshape_parsing(nodeshape_uri): "nodeshape_uri", [ "http://example.org/ns#TopLevelCatalogs" - # "http://example.org/ns#FeatureListing" + "http://example.org/ns#FeatureListing" ], ) def test_nodeshape_to_grammar(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) ns.to_grammar() - print("") - - -@pytest.mark.parametrize( - "property_shape", ["http://example.org/ns#resourceListingPropertyShape2"] -) -def test_propertyshape_parsing(property_shape): - ps = PropertyShape(uri=URIRef(property_shape), graph=endpoints_graph) - ps.to_grammar() - print("") - - -@pytest.mark.parametrize( - "property_shape", ["http://example.org/ns#resourceListingPropertyShape2"] -) -def test_propertyshape_create_grammar(property_shape): - ps = PropertyShape(uri=URIRef(property_shape)) - # ps.from_graph(graph=endpoints_graph) - # ps.to_grammar() - # assert True From 5e0df3d90314c67fbc59ad1b2ac560e6dc8e30c1 Mon Sep 17 00:00:00 2001 From: david Date: Mon, 4 Mar 2024 12:24:19 +1000 Subject: [PATCH 16/25] minimally working annotations using same repo abstraction --- prez/app.py | 26 +-- prez/cache.py | 7 + prez/dependencies.py | 33 ++- prez/renderers/renderer.py | 66 ++---- prez/repositories/base.py | 5 +- prez/routers/management.py | 39 ++-- prez/services/annotations.py | 188 ++++++------------ prez/services/app_service.py | 20 -- prez/services/query_generation/annotations.py | 38 ++++ temp/grammar/grammar.py | 2 +- 10 files changed, 193 insertions(+), 231 deletions(-) create mode 100644 prez/services/query_generation/annotations.py diff --git a/prez/app.py b/prez/app.py index c3f26f11..fd74cfd4 100755 --- a/prez/app.py +++ b/prez/app.py @@ -1,11 +1,11 @@ import logging +import time from textwrap import dedent import uvicorn -from fastapi import FastAPI from rdflib import Graph from starlette.middleware.cors import CORSMiddleware - +from fastapi import FastAPI from prez.config import settings from prez.dependencies import ( get_async_http_client, @@ -13,7 +13,7 @@ load_local_data_to_oxigraph, get_oxrdflib_store, get_system_store, - load_system_data_to_oxigraph, + load_system_data_to_oxigraph, get_annotations_store, load_annotations_data_to_oxigraph, ) from prez.models.model_exceptions import ( ClassNotFoundException, @@ -34,7 +34,7 @@ create_endpoints_graph, populate_api_info, add_prefixes_to_prefix_graph, - add_common_context_ontologies_to_tbox_cache, + # add_common_context_ontologies_to_tbox_cache, ) from prez.services.exception_catchers import ( catch_400, @@ -89,18 +89,6 @@ async def add_cors_headers(request, call_next): ) -# def prez_open_api_metadata(): -# return get_openapi( -# title=settings.prez_title, -# version=settings.prez_version, -# description=settings.prez_desc, -# routes=app.routes, -# ) -# -# -# app.openapi = prez_open_api_metadata - - @app.on_event("startup") async def app_startup(): """ @@ -108,6 +96,7 @@ async def app_startup(): are available. Initial caching can be triggered within the try block. NB this function does not check that data is appropriately configured at the SPARQL endpoint(s), only that the SPARQL endpoint(s) are reachable. """ + a = time.time() setup_logger(settings) log = logging.getLogger("prez") log.info("Starting up") @@ -133,11 +122,14 @@ async def app_startup(): await create_endpoints_graph(app.state.repo) await count_objects(app.state.repo) await populate_api_info() - await add_common_context_ontologies_to_tbox_cache() app.state.pyoxi_system_store = get_system_store() await load_system_data_to_oxigraph(app.state.pyoxi_system_store) + app.state.pyoxi_annotations_store = get_annotations_store() + await load_annotations_data_to_oxigraph(app.state.pyoxi_annotations_store) + + log.info(f"Startup took {time.time() - a} seconds") @app.on_event("shutdown") async def app_shutdown(): diff --git a/prez/cache.py b/prez/cache.py index c8b0620a..ca93ee3c 100755 --- a/prez/cache.py +++ b/prez/cache.py @@ -1,7 +1,11 @@ from pyoxigraph.pyoxigraph import Store from rdflib import Graph, ConjunctiveGraph, Dataset +from aiocache import SimpleMemoryCache, Cache + +from prez.repositories import PyoxigraphRepo tbox_cache = Graph() +tbox_cache_aio = Cache.MEMORY profiles_graph_cache = ConjunctiveGraph() profiles_graph_cache.bind("prez", "https://prez.dev/") @@ -26,4 +30,7 @@ system_store = Store() +annotations_store = Store() +annotations_repo = PyoxigraphRepo(annotations_store) + oxrdflib_store = Graph(store="Oxigraph") diff --git a/prez/dependencies.py b/prez/dependencies.py index 2b3ce22f..e2a93e03 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -1,11 +1,10 @@ import json from pathlib import Path -from typing import Optional import httpx from fastapi import Depends, Request, HTTPException -from pydantic import BaseModel from pyoxigraph import Store +from rdflib import Dataset from prez.cache import ( store, @@ -13,6 +12,8 @@ system_store, profiles_graph_cache, endpoints_graph_cache, + annotations_store, + annotations_repo ) from prez.config import settings from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo @@ -36,6 +37,10 @@ def get_system_store(): return system_store +def get_annotations_store(): + return annotations_store + + def get_oxrdflib_store(): return oxrdflib_store @@ -63,6 +68,14 @@ async def get_system_repo( return PyoxigraphRepo(pyoxi_store) +async def get_annotations_repo(): + """ + A pyoxigraph Store with labels, descriptions etc. from Context Ontologies + """ + return annotations_repo + + + async def load_local_data_to_oxigraph(store: Store): """ Loads all the data from the local data directory into the local SPARQL endpoint @@ -83,8 +96,20 @@ async def load_system_data_to_oxigraph(store: Store): store.load(endpoints_bytes, "application/n-triples") -class CQLRequest(BaseModel): - cql: Optional[dict] +async def load_annotations_data_to_oxigraph(store: Store): + """ + Loads all the data from the local data directory into the local SPARQL endpoint + """ + relevant_predicates = settings.label_predicates + settings.description_predicates + settings.provenance_predicates + raw_g = Dataset(default_union=True) + for file in (Path(__file__).parent / "reference_data/context_ontologies").glob("*"): + raw_g.parse(file) + relevant_g = Dataset(default_union=True) + relevant_triples = raw_g.triples_choices((None, relevant_predicates, None)) + for triple in relevant_triples: + relevant_g.add(triple) + file_bytes = relevant_g.serialize(format="nt", encoding="utf-8") + store.load(file_bytes, "application/n-triples") async def cql_post_parser_dependency(request: Request): diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py index 8f87c850..2e9093f6 100755 --- a/prez/renderers/renderer.py +++ b/prez/renderers/renderer.py @@ -1,6 +1,7 @@ import io import json import logging +import time from connegp import RDF_MEDIATYPES, RDF_SERIALIZER_TYPES_MAP from fastapi import status @@ -10,22 +11,22 @@ from prez.renderers.csv_renderer import render_csv_dropdown from prez.renderers.json_renderer import render_json_dropdown, NotFoundError -from prez.services.curie_functions import get_curie_id_for_uri from prez.repositories import Repo from prez.services.annotations import ( get_annotation_properties, ) +from prez.services.curie_functions import get_curie_id_for_uri log = logging.getLogger(__name__) async def return_from_graph( - graph, - mediatype, - profile, - profile_headers, - selected_class: URIRef, - repo: Repo, + graph, + mediatype, + profile, + profile_headers, + selected_class: URIRef, + repo: Repo, ): profile_headers["Content-Disposition"] = "inline" @@ -64,7 +65,7 @@ async def return_from_graph( else: if "anot+" in mediatype: non_anot_mediatype = mediatype.replace("anot+", "") - graph = await return_annotated_rdf(graph, profile, repo) + graph = await return_annotated_rdf(graph, repo) content = io.BytesIO( graph.serialize(format=non_anot_mediatype, encoding="utf-8") ) @@ -88,43 +89,16 @@ async def return_rdf(graph, mediatype, profile_headers): return StreamingResponse(content=obj, media_type=mediatype, headers=profile_headers) -async def get_annotations_graph(graph, cache, repo): - queries_for_uncached, annotations_graph = await get_annotation_properties(graph) - - if queries_for_uncached is None: - anots_from_triplestore = Graph() - else: - anots_from_triplestore, _ = await repo.send_queries([queries_for_uncached], []) - - if len(anots_from_triplestore) > 1: - annotations_graph += anots_from_triplestore - cache += anots_from_triplestore - - return annotations_graph - - async def return_annotated_rdf( - graph: Graph, - profile, - repo, + graph: Graph, + repo, ) -> Graph: - from prez.cache import tbox_cache - - cache = tbox_cache - queries_for_uncached, annotations_graph = await get_annotation_properties(graph) - anots_from_triplestore, _ = await repo.send_queries([queries_for_uncached], []) - if len(anots_from_triplestore) > 0: - annotations_graph += anots_from_triplestore - cache += anots_from_triplestore - - previous_triples_count = len(graph) - - # Expand the graph with annotations specified in the profile until no new statements are added. - while True: - graph += await get_annotations_graph(graph, cache, repo) - if len(graph) == previous_triples_count: - break - previous_triples_count = len(graph) - - graph.bind("prez", "https://prez.dev/") - return graph + annotations_graph = await get_annotation_properties(graph, repo) + # previous_annotation_len = 0 + # current_annotation_len = len(annotations_graph) + # while current_annotation_len != previous_annotation_len: + # previous_annotation_len = current_annotation_len + # new_annotations = await get_annotation_properties(annotations_graph, repo) + # current_annotation_len = len(new_annotations) + # annotations_graph += new_annotations + return graph.__iadd__(annotations_graph) diff --git a/prez/repositories/base.py b/prez/repositories/base.py index 3e27a481..3f475a3a 100755 --- a/prez/repositories/base.py +++ b/prez/repositories/base.py @@ -1,11 +1,13 @@ import asyncio import logging +import time from abc import ABC, abstractmethod from typing import List from typing import Tuple from rdflib import Namespace, Graph, URIRef + PREZ = Namespace("https://prez.dev/") log = logging.getLogger(__name__) @@ -32,7 +34,8 @@ async def send_queries( if query ], ) - g = Graph() + from prez.cache import prefix_graph + g = Graph(namespace_manager=prefix_graph.namespace_manager) tabular_results = [] for result in results: if isinstance(result, Graph): diff --git a/prez/routers/management.py b/prez/routers/management.py index 4614f77b..6ccc30d1 100755 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -7,13 +7,13 @@ from rdflib.collection import Collection from starlette.requests import Request from starlette.responses import PlainTextResponse - +from prez.services.annotations import process_term from prez.cache import endpoints_graph_cache -from prez.cache import tbox_cache +from prez.cache import tbox_cache, tbox_cache_aio from prez.config import settings from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_rdf -from prez.services.app_service import add_common_context_ontologies_to_tbox_cache +# from prez.services.app_service import add_common_context_ontologies_to_tbox_cache router = APIRouter(tags=["Management"]) log = logging.getLogger(__name__) @@ -36,18 +36,31 @@ async def index(): async def purge_tbox_cache(): """Purges the tbox cache, then re-adds annotations from common ontologies Prez has a copy of (reference_data/context_ontologies).""" - tbox_cache.remove((None, None, None)) - await add_common_context_ontologies_to_tbox_cache() - return PlainTextResponse("Tbox cache purged and reset to startup state") + cache = process_term.cache + cache_size = len(cache._cache) + result = await cache.clear() + if result: + return PlainTextResponse(f"{cache_size} terms removed from tbox cache.") + else: + return PlainTextResponse("Tbox cache already empty.") + + +# @router.get("/tbox-cache", summary="Show the Tbox Cache") +# async def return_tbox_cache(request: Request): +# """gets the mediatype from the request and returns the tbox cache in this mediatype""" +# mediatype = request.headers.get("Accept").split(",")[0] +# if not mediatype or mediatype not in RDF_MEDIATYPES: +# mediatype = "text/turtle" +# return await return_rdf(tbox_cache, mediatype, profile_headers={}) -@router.get("/tbox-cache", summary="Show the Tbox Cache") -async def return_tbox_cache(request: Request): - """gets the mediatype from the request and returns the tbox cache in this mediatype""" - mediatype = request.headers.get("Accept").split(",")[0] - if not mediatype or mediatype not in RDF_MEDIATYPES: - mediatype = "text/turtle" - return await return_rdf(tbox_cache, mediatype, profile_headers={}) +def unpack_cache(): + pass + # #useful code for function + # cache = process_term.cache + # serialized_data = cache._cache[rdflib.term.URIRef('https://example.com/TopLevelCatalogTwo')] + # deserialized_data = pickle.loads(serialized_data) + # print(f"{' '.join([t.n3() for t in list(*b)])} .\n") @router.get("/health") diff --git a/prez/services/annotations.py b/prez/services/annotations.py index 14df3e80..4abee06f 100755 --- a/prez/services/annotations.py +++ b/prez/services/annotations.py @@ -1,23 +1,68 @@ +import asyncio import logging +import os +import time from itertools import chain -from textwrap import dedent -from typing import List, Tuple +from typing import List, FrozenSet +from aiocache.serializers import PickleSerializer +from aiocache import cached +from rdflib import Graph, URIRef, Literal, Dataset +from rdflib.namespace import RDFS -from rdflib import Graph, URIRef, Namespace, Literal - -from prez.cache import tbox_cache +from prez.cache import tbox_cache, tbox_cache_aio from prez.config import settings -from prez.services.curie_functions import get_uri_for_curie_id +from prez.dependencies import get_annotations_repo +from prez.reference_data.prez_ns import PREZ +from prez.repositories import Repo +from prez.services.query_generation.annotations import AnnotationsConstructQuery +from temp.grammar import * log = logging.getLogger(__name__) -ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") -PREZ = Namespace("https://prez.dev/") +pred = IRI(value=URIRef("https://prez.dev/label")) + + +async def process_terms(terms, repo) -> Graph: + """ + """ + results = await asyncio.gather(*[process_term(term, repo) for term in terms]) + triples = list(chain(*results)) + annotations_g = Graph() + for triple in triples: + annotations_g.add(triple) + return annotations_g + + +def term_based_key_builder(func, *args, **kwargs): + return args[0] + + +@cached(cache=tbox_cache_aio, key_builder=term_based_key_builder, serializer=PickleSerializer()) +async def process_term(term, repo) -> FrozenSet[Tuple[URIRef, URIRef, Literal]]: + """ + gets annotations for an individual term + """ + log.info(f"Processing term within func {term}") + annotations_repo = await get_annotations_repo() + annotations_query = AnnotationsConstructQuery( + term=IRI(value=term), + construct_predicate=IRI(value=PREZ.label), # TODO change to predicate map + select_predicates=[IRI(value=RDFS.label)] + ).to_string() + # check the prez cache + context_results = await annotations_repo.send_queries(rdf_queries=[annotations_query], tabular_queries=[]) + # if not found, query the data repo + repo_results = await repo.send_queries(rdf_queries=[annotations_query], tabular_queries=[]) + all_results = context_results[0] + repo_results[0] + cacheable_results = frozenset(all_results) + log.info(f"Processed term {term}, found {len(cacheable_results)} annotations.") + return cacheable_results async def get_annotation_properties( - item_graph: Graph, -): + item_graph: Graph, + repo: Repo, +) -> Graph: """ Gets annotation data used for HTML display. This includes the label, description, and provenance, if available. @@ -25,124 +70,9 @@ async def get_annotation_properties( which are often diverse in the predicates they use, to be aligned with the default predicates used by Prez. The full range of predicates used can be manually included via profiles. """ - label_predicates = settings.label_predicates - description_predicates = settings.description_predicates - explanation_predicates = settings.provenance_predicates - other_predicates = settings.other_predicates - terms = ( - set(i for i in item_graph.predicates() if isinstance(i, URIRef)) - | set(i for i in item_graph.objects() if isinstance(i, URIRef)) - | set(i for i in item_graph.subjects() if isinstance(i, URIRef)) - ) - # TODO confirm caching of SUBJECT labels does not cause issues! this could be a lot of labels. Perhaps these are - # better separated and put in an LRU cache. Or it may not be worth the effort. + terms = set(term for term in item_graph.all_nodes() if isinstance(term, URIRef)) if not terms: - return None, Graph() - # read labels from the tbox cache, this should be the majority of labels - uncached_terms, labels_g = get_annotations_from_tbox_cache( - terms, - label_predicates, - description_predicates, - explanation_predicates, - other_predicates, - ) - - def other_predicates_statement(other_predicates, uncached_terms_other): - return f"""UNION - {{ - ?unannotated_term ?other_prop ?other . - VALUES ?other_prop {{ {" ".join('<' + str(pred) + '>' for pred in other_predicates)} }} - VALUES ?unannotated_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms_other)} - }} - }}""" + return Graph() - queries_for_uncached = f"""CONSTRUCT {{ - ?unlabeled_term ?label_prop ?label . - ?undescribed_term ?desc_prop ?description . - ?unexplained_term ?expl_prop ?explanation . - ?unannotated_term ?other_prop ?other . - }} - WHERE {{ - {{ - ?unlabeled_term ?label_prop ?label . - VALUES ?label_prop {{ {" ".join('<' + str(pred) + '>' for pred in label_predicates)} }} - VALUES ?unlabeled_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["labels"])} }} - FILTER(lang(?label) = "" || lang(?label) = "en" || lang(?label) = "en-AU") - }} - UNION - {{ - ?undescribed_term ?desc_prop ?description . - VALUES ?desc_prop {{ {" ".join('<' + str(pred) + '>' for pred in description_predicates)} }} - VALUES ?undescribed_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["descriptions"])} - }} - }} - UNION - {{ - ?unexplained_term ?expl_prop ?explanation . - VALUES ?expl_prop {{ {" ".join('<' + str(pred) + '>' for pred in explanation_predicates)} }} - VALUES ?unexplained_term {{ {" ".join('<' + str(term) + '>' for term in uncached_terms["provenance"])} - }} - }} - {other_predicates_statement(other_predicates, uncached_terms["other"]) if other_predicates else ""} - }}""" - return queries_for_uncached, labels_g - - -def get_annotations_from_tbox_cache( - terms: List[URIRef], label_props, description_props, explanation_props, other_props -): - """ - Gets labels from the TBox cache, returns a list of terms that were not found in the cache, and a graph of labels, - descriptions, and explanations - """ - labels_from_cache = Graph(bind_namespaces="rdflib") - terms_list = list(terms) - props_from_cache = { - "labels": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in label_props - ) - ) - ), - "descriptions": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in description_props - ) - ) - ), - "provenance": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in explanation_props - ) - ) - ), - "other": list( - chain( - *( - tbox_cache.triples_choices((terms_list, prop, None)) - for prop in other_props - ) - ) - ), - } - # get all the annotations we can from the cache - all = list(chain(*props_from_cache.values())) - default_language = settings.default_language - for triple in all: - if isinstance(triple[2], Literal): - if triple[2].language == default_language: - labels_from_cache.add(triple) - elif triple[2].language is None: - labels_from_cache.add(triple) - # the remaining terms are not in the cache; we need to query the SPARQL endpoint to attempt to get them - uncached_props = { - k: list(set(terms) - set(triple[0] for triple in v)) - for k, v in props_from_cache.items() - } - return uncached_props, labels_from_cache + annotations_g = await process_terms(terms, repo) + return annotations_g diff --git a/prez/services/app_service.py b/prez/services/app_service.py index 04a155a1..17ae753e 100755 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -152,23 +152,3 @@ async def get_remote_endpoint_definitions(repo): log.info(f"Remote endpoint definition(s) found and added") else: log.info("No remote endpoint definitions found") - - -async def add_common_context_ontologies_to_tbox_cache(): - g = Dataset(default_union=True) - for file in ( - Path(__file__).parent.parent / "reference_data/context_ontologies" - ).glob("*"): - g.parse(file) - relevant_predicates = [ - RDFS.label, - DCTERMS.title, - DCTERMS.description, - SDO.name, - SKOS.prefLabel, - SKOS.definition, - ] - triples = g.triples_choices((None, relevant_predicates, None)) - for triple in triples: - tbox_cache.add(triple) - log.info(f"Added {len(tbox_cache):,} triples from context ontologies to TBox cache") diff --git a/prez/services/query_generation/annotations.py b/prez/services/query_generation/annotations.py new file mode 100644 index 00000000..ce28d1dd --- /dev/null +++ b/prez/services/query_generation/annotations.py @@ -0,0 +1,38 @@ +from typing import List + +from temp.grammar import * + + +class AnnotationsConstructQuery(ConstructQuery): + def __init__( + self, + term: IRI, + construct_predicate: IRI, + select_predicates: List[IRI] + ): + construct_template = ConstructTemplate( + construct_triples=ConstructTriples( + triples=[SimplifiedTriple( + subject=term, + predicate=construct_predicate, + object=Var(value="annotation"))] + ) + ) + where_clause = WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[TriplesBlock( + triples=[SimplifiedTriple( + subject=term, + predicate=select_predicates[0], # Assuming a single select predicate for simplicity + object=Var(value="annotation"))] + )] + ) + ) + ) + solution_modifier = SolutionModifier() + super().__init__( + construct_template=construct_template, + where_clause=where_clause, + solution_modifier=solution_modifier + ) diff --git a/temp/grammar/grammar.py b/temp/grammar/grammar.py index 9f4fb3a6..060a9b61 100755 --- a/temp/grammar/grammar.py +++ b/temp/grammar/grammar.py @@ -945,7 +945,7 @@ class GroupCondition(SPARQLGrammarBase): GroupCondition ::= BuiltInCall | FunctionCall | '(' Expression ( 'AS' Var )? ')' | Var """ - condition: Union["BuiltInCall", FunctionCall, Tuple[Expression, Var], Var] + condition: Union[BuiltInCall, FunctionCall, Tuple[Expression, Var], Var] def render(self) -> Generator[str, None, None]: if isinstance(self.condition, Tuple): From d7b708e7532c403214f0c87cde2624573db453f3 Mon Sep 17 00:00:00 2001 From: Lawson Lewis Date: Tue, 20 Feb 2024 08:21:57 +1000 Subject: [PATCH 17/25] connegp refactor --- .gitignore | 4 + connegp-0.1.6-py3-none-any.whl | Bin 5052 -> 0 bytes poetry.lock | 343 +++++++++--------- prez/models/profiles_and_mediatypes.py | 58 --- prez/repositories/base.py | 2 +- prez/routers/sparql.py | 31 +- prez/services/connegp_service.py | 243 ++++++++++++- prez/services/generate_profiles.py | 128 +------ prez/services/listings.py | 68 ++-- prez/services/objects.py | 67 ++-- prez/services/query_generation/connegp.py | 123 ------- pyproject.toml | 1 - tests/data/profiles/ogc_records_profile.ttl | 106 ++++++ .../profiles/spaceprez_default_profiles.ttl | 138 +++++++ tests/test_connegp.py | 151 ++++++++ 15 files changed, 872 insertions(+), 591 deletions(-) delete mode 100755 connegp-0.1.6-py3-none-any.whl delete mode 100755 prez/models/profiles_and_mediatypes.py delete mode 100644 prez/services/query_generation/connegp.py create mode 100755 tests/data/profiles/ogc_records_profile.ttl create mode 100755 tests/data/profiles/spaceprez_default_profiles.ttl create mode 100644 tests/test_connegp.py diff --git a/.gitignore b/.gitignore index 02f11d33..434402ea 100755 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,7 @@ __pycache__/ .pytest_cache/ .env* !.env-template +http/ +/.python-version +http/ +rdf/ \ No newline at end of file diff --git a/connegp-0.1.6-py3-none-any.whl b/connegp-0.1.6-py3-none-any.whl deleted file mode 100755 index 1cc73c1f496f823f23e2b09aad796cd32cefe045..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5052 zcmaJ_2Q*w;+aAOOQKN)tL$v5UI-~dAdl@wtT@a#|QAZikOSEXwdj=t+w;+UEy#^sd z#MM6e?*ILgtb6Y{Yp=b|TIYH8+0S#{cfWgUs$kqA0RRA4fFm<618$SjzUAvD8gu}F z_`0`&y1LrhyKz}qIJ!D|Sy*to`Nb!V!QOBaqW1k@<0;Z#@We={3oO&M#mk0x!$tOh zb4(24Tk{nzDe<4{o~t~E27TSk8Z1Mkm+zFlzvH6INV!rsJ$NKWE2AGVd?aco5Xm$1 z#_$bdRQt$4jf_^0WiZ(*agkvZl1n+U4o@zx=lbfrkrixXof~WHBS8LQ_*I1Se!wt! zXD9A^$2!T4U|J<$`xC<0V_+ctxZpM!A^w@v92m|te{>5IpW9A}re4@4y3mpEH2E36 z8~+|veB*={{yn5SNz|py!UIWWw|B8x%#tQ}dQd@6@N3Iwry;7wKf!OwFQ02X~qw$+fqZ)UkHfIC;>q6Sf- zV-!VMU!LX4Xp=nDYRdW}>Jwcp8cV282^OP#qS_)a}j==Aa zdV*$w+`xSLLP>*bxpKy2c~lNj;+#Y9VJLJ!ps?@|Wd^vCn>gm){J^Siczk3YxOeQY z(nK%39y-$GOrf)s5<}sdGl5AXG|$}z5z}3imn9LSPjA)^$9|fT;D4(cg!LH?1HFxe zi?%*5t14nrU1{jYQd2@LqnRY%D(li$D%4njE}6=2WKS?pAyn)X{8TqVJ?Bze9x-=( zR!?8~?=#aW%L~fEV<)DkMK}f5sdVdJ`+q<9;wjcSNRTJ_@sDS1F!!96KhGLJ+7IB? zfd+USf>;Lq$ahi82Wm&0lae+t2w-RW$nKPnBZcv0-H*eVepUM#1OCy5_` zzpWQx8RW}c& zeB3>?E@o2Ig@3z;QY~FJ-#*v_Twb+8l^upqDlq7#P^((5JtQ+PI1Qg+9dIB*>hgBn zDI7TV>NmU%(X8c4@Z6lY36rHv17p8V4aBH_n?7;~!C5~9XL zFV^M78xLxK9urz=%?XQqD)W)dqseAzepSJo*9ChNmMmI&>>nGB174_zzEgji=d2~m zD#kr%S1%MF`=L@l#d4sWEVADIKoyIpu|7!CU7Q!h10L?$qSS?p<%B*&LLOE$JFbo# zeKUfK*Xjam>UHJ$f~bX`;@M}?QV<>LthQ=x%(~4QF0TU+0sHL%Y%tP$!K5~YA%(3D z`iI{;A`bIcRjNSF>|c|^6_no#@EwM0-Bz-%ZKZTdPnIampH+lnxHE)>%#;1H zD?Dv)y+BpMRU;IJyb@h@aX#@?>2rbok~FmYY%DVDnwNNNjItP}x(V{B_&iexRdz$x z5JOC22W9kj?>Q$3w_oQX!AU1R9}?;Rw8a={S}2vy3N|}-pJv>O;De_*3SF7ZGWS3YU=cEg5tzK`s@_3ZK+lTGFiz75(PU{A zBUX)Q|Kog9)g%!f_@hS$ed3}{llx*VZJ^NGYZU5{51eXflXm*s<))?A^7tQqfM(mD ziw!2Z<@U|!Gt3*@E!C*ml?*{JkuRI)Xmh!`D!CESp|!lyp7(saWmwX*#U*K`&@7X= zxs*;v^Pa5A5vVHROp(hZ3ewo+R^ReL7OgqT<}!6L1443IjCJQWX>aRo%G$pY?1Sn> z%r&9(Ep189+1Lgmr7yH6i%HG2ad2k@`5hBE1skNA6DH+Q!VLXu!RoCM{xFUx@Q$Lo zO&J3}(U4X4K6eVU&@4OaYY6K+*IioPDpGdE5ZrdgXU;mBu-69OG z`1ELJgGE{-Av+=_i@HK*&5YQ8L-9d3H;^x8oH$Ov5I2N}7dWIIH>q&}hm<*8rIjiO zUNW%bRp=4j4cyUwhe?c{+?v>!>znqHIe{5v556kp@6ShJj+K{5!$5pxm?vwA$zL!| zOGH9Nk(|x!9HWD=jmp~6YWRb~Cg9E#&elPa@ zjnS41UOZ5cV_1b4N-@ftxaHv8Ca~Set7~uFs3=k?-P!nr{!Fc7Hi=Xdauz2^_k5V5 znlr++uYn6%2GG@_gU(9t`aW?&)vAw<{-IMiN0P;BY~>NoJUAm^o_)APrJgiOgl`tgKA9o(@gzim|Gs}t1WE7eup0WP4d$B7JLT82hJCY`fkpXx zoA63z+4f86jwn_ptE5kE@1zWIqI(^?-vHZ#T4pHGQp2*k2o56Ipf5q<d$f4d^I*rRuS(WwcrF%)ijxYD-Z&aGewn=8aPKXZ zjPwVw6=qOEGg7vNjNLfN1snXx-ffV60ATl}H_Ie11^67O;DG~KUzK9)d+(y^^-ZmF zJ%k+y)%&cNhUL{vh)$P0dK<|;lT9CCKsp(;U(G3ubQa%pe&#WqOHPw0C}}S!!8tP8 zRGMowYo>Ae!Ym;^91j|o04{Nb?zV68^Wr4}DOK9NqAY!7*i^jF&eb2)sjQXIdW#M} zhMh4bzT!H{qW8LY(KdwV>YAveW5%3`cJ>KqTb$d{0w7TWStFv9DVOf8X@ujo-D+VG z@%3%VX4IehHmzUkG4HHfJ~R(AB!Dol`XqWOtlqNFB46cim*2g`^YPL{1T~XE?x@E8 zV&#@$t4@lZ={D4B=Nq#kyeJ>+X@LS2QP@`|F_QTGN4Ag&QO-lO^x))0b zjO~GV{T@%-2j-9i>!Q9SP>wF$A+~I1-$l$wqT(|xH!_iXS*5yx)^S#Edt-Wv5L2QR z{1u)t-(G&2)?BA@0i9`$7~u!39fMzuyx11CYM07oRY*IIuB*0smP1GWwbB%d^WMAA)cv&t{ zy3xfIjYp2<+f4UUqwh+krui9>5QS3hYM@lk=aI5YB4GETa?;g?wqoDHqmND|iUNYF|@ z`NYXp{JHRu^tLY|C{VRsAQb10QNfvq&Oz((N@A>cb$9uiD@*fWw6uDSs+B?B;~~Qm z&0*;cO&1~cm=HE2Z=tnae&0gX`tusIvT4b)GG&20yABK!`-;T87aWgEGrKmFeb{{U zS%s`mX0oiDLT&H3=H${Hc&Tg2$hZu~oT}VHDVkge)zPse7{w8COh_-rp zKKPIa?lNI)H8{e?y^lU75JinPBkWO9lrM1+qlNgwkiZRpGq~+hQ*~vbTJl~et8iz< z>%d5{!1Uy@$9YZNtGZgQG?r*X31w8ETE8K)`Vq_jv>T#xgcxvr=hW6R%d(An#@-_t zsnta5FRBTf*x`ZE0K|1p5_tI+KF#U$G1B?r;Ll9DqG_3w)Q28E0e1R(`Xj2l$Y2^| z340=35F&jC>9O3r&+Y`ej4<~!_w&xe%}mZ`a&#(pcR)UK&R0dj^(sEDV@Su#mN> zLUy)g*?xzf+X&t$CT~CHAhchdIKv~vk-K)l`nr++@L8(TLoN2u!$H&jNI*g5sF zSOztQhLt$Jsi_U9vcF_k)`o{!JTSq+IE z?L8Xq@T%d*PXqiqZkaPqbjS60s@H$P&l?0c52&4^v+ebu-U&nHZ=`S|Prj0%J;IRy z`_g>G!f`c?p|WPy!N^d`P$D?m(-1Xp-8oh`}L^PrzMd zX8@NNhkiq-B*{02%7F`vo4N8BdmC`(rF@vap6kT*TtPP)b8vI=a0+rl96h}_99`|8 zTxxQ6-G zyl~Dp-7Zb;b$L!9Kb$de`r<=qaJV*I9L&zs}DDG`;_yHDJHwT$8^cVyV<42%;j^9 z{#l52vUy9Tc(3IHVdjN5cl<=*w%l@^y`wtyseL}~D^~8(T-71QSI^dsv<10GnGcOb zT0Xc4G_{z!+^?)f>iw1g#+EsDb+4r`yl&M0B8h>boSZ7&AQ&eYM2I>5y7T~@Sgxu= zsv$Po*yO2lQUkMxQjw+9_;tr8fTbWdU1WsAlVcmjJvLNB2{kJA8`|uq zQfivyBzT_X#U4`EQN#eA$Oj&vLs*cI#i^6gBB2KVJjLqC zFWfj*spZ-A3kZmA#zxO07rlwp$>v@8C!hmO(hahm*|>6E2OC?P?LRkexzMeKx~+dG z;y30L|A<4CbkT}SeTZZY0UbzcOW66%Pdr;ty+hM}8#}=H5mbjVZsNP#ThQ8Yzh_p$ zUfYwStjj`2F5gW@)bj%MQZB7t=%M7JV`Lv7QX7NWgn@X}-EZGgWW zmg2j5(mt;6h&oWJH&s>a#x$SJ$hh@hrwLWDio;??`NPY*rD>^?RxQ=0UY*pG(3s=n zDbqM(omDOX>8XJ)G-G4K&!?1Nb64csd^wvY;;D$3XUF*L#|VtCXA=$B%5e_^x3HzQ z4EXqu6UPBd>s*>DXy_yu|9t*(?cjf}qL3fm-<-z$PVoD2#b0m$peW?-^fw$w{z<)`}zf=64Pj4vr@NOt>vg^0h84I3}PKePQe*ng(_yU_d}RYOSkzfk=ZsG2I6z#s2pUEeI%p&j(2 H)&TqmtxK%P diff --git a/poetry.lock b/poetry.lock index 82c21253..ca483b23 100755 --- a/poetry.lock +++ b/poetry.lock @@ -93,13 +93,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -259,82 +259,65 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "connegp" -version = "0.1.6" -description = "Content negotiation by profile" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "connegp-0.1.6-py3-none-any.whl", hash = "sha256:8d4f7f605d568032243e7cfa84c22bedae66e28651acb58af82b4b43d3de899f"}, -] - -[package.dependencies] -pydantic = ">=1.8.2,<3.0.0" - -[package.source] -type = "file" -url = "connegp-0.1.6-py3-none-any.whl" - [[package]] name = "coverage" -version = "7.4.1" +version = "7.4.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, + {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, + {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, + {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, + {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, + {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, + {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, ] [package.extras] @@ -455,13 +438,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.3" +version = "1.0.4" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, - {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, + {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, + {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, ] [package.dependencies] @@ -472,7 +455,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.24.0)"] +trio = ["trio (>=0.22.0,<0.25.0)"] [[package]] name = "httpx" @@ -919,18 +902,18 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "2.6.1" +version = "2.6.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, - {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, + {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, + {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.2" +pydantic-core = "2.16.3" typing-extensions = ">=4.6.1" [package.extras] @@ -938,90 +921,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.2" +version = "2.16.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, - {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, - {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, - {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, - {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, - {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, - {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, - {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, - {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, - {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, - {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, - {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, - {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, - {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, ] [package.dependencies] @@ -1029,13 +1012,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.2.0" +version = "2.2.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.2.0-py3-none-any.whl", hash = "sha256:5f7bcaf9ad4419559dc5ac155c0324a9aeb2547c60471ee7c7d026f467a6b515"}, - {file = "pydantic_settings-2.2.0.tar.gz", hash = "sha256:648d0a76673e69c51278979cba2e83cf16a23d57519bfd7e553d1c3f37db5560"}, + {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"}, + {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"}, ] [package.dependencies] @@ -1043,7 +1026,7 @@ pydantic = ">=2.3.0" python-dotenv = ">=0.21.0" [package.extras] -toml = ["tomlkit (>=0.12)"] +toml = ["tomli (>=2.0.1)"] yaml = ["pyyaml (>=6.0.1)"] [[package]] @@ -1374,19 +1357,19 @@ wheel = ">=0.36.1" [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shapely" @@ -1458,13 +1441,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -1497,13 +1480,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1543,13 +1526,13 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "virtualenv" -version = "20.25.0" +version = "20.25.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, - {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, ] [package.dependencies] @@ -1578,4 +1561,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "9e52b0cd2075bbbc6693b39f79b388a505e3744feeeb85c235b7f735afedc848" +content-hash = "86ae28eb5f2c4f08bc245ca34113f8d401778b377ca1050aa5d25565ff7ebe1e" diff --git a/prez/models/profiles_and_mediatypes.py b/prez/models/profiles_and_mediatypes.py deleted file mode 100755 index fdaa646f..00000000 --- a/prez/models/profiles_and_mediatypes.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import FrozenSet, Optional - -from pydantic import BaseModel, model_validator -from rdflib import Namespace, URIRef -from starlette.requests import Request - -from prez.services.generate_profiles import get_profiles_and_mediatypes -from prez.services.connegp_service import get_requested_profile_and_mediatype -from prez.repositories import Repo - -PREZ = Namespace("https://prez.dev/") - - -class ProfilesMediatypesInfo(BaseModel): - class Config: - arbitrary_types_allowed = True - - request: Request # TODO slim down once connegp is refactored so the whole request doesn't need to be passed through - classes: FrozenSet[URIRef] - system_repo: Repo - req_profiles: Optional[str] = None - req_profiles_token: Optional[str] = None - req_mediatypes: Optional[FrozenSet] = None - profile: Optional[URIRef] = None - mediatype: Optional[str] = None - selected_class: Optional[URIRef] = None - profile_headers: Optional[str] = None - avail_profile_uris: Optional[str] = None - listing: Optional[bool] = False - - @model_validator(mode="after") - def populate_requested_types(self): - request = self.request - ( - self.req_profiles, - self.req_profiles_token, - self.req_mediatypes, - ) = get_requested_profile_and_mediatype(request) - return self - - -async def populate_profile_and_mediatype( - prof_model: ProfilesMediatypesInfo, system_repo: Repo -): - req_profiles = prof_model.req_profiles - req_profiles_token = prof_model.req_profiles_token - req_mediatypes = prof_model.req_mediatypes - classes = prof_model.classes - listing = prof_model.listing - ( - prof_model.profile, - prof_model.mediatype, - prof_model.selected_class, - prof_model.profile_headers, - prof_model.avail_profile_uris, - ) = await get_profiles_and_mediatypes( - classes, system_repo, req_profiles, req_profiles_token, req_mediatypes, listing - ) diff --git a/prez/repositories/base.py b/prez/repositories/base.py index 3f475a3a..64dec54f 100755 --- a/prez/repositories/base.py +++ b/prez/repositories/base.py @@ -23,7 +23,7 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None): pass async def send_queries( - self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef, str]] = None + self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef | None, str]] = None ) -> Tuple[Graph, List]: # Common logic to send both query types in parallel results = await asyncio.gather( diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index 616f8277..71300b18 100755 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -9,12 +9,9 @@ from starlette.responses import StreamingResponse from prez.dependencies import get_repo, get_system_repo -from prez.models.profiles_and_mediatypes import ( - ProfilesMediatypesInfo, - populate_profile_and_mediatype, -) from prez.renderers.renderer import return_annotated_rdf from prez.repositories import Repo +from prez.services.connegp_service import NegotiatedPMTs PREZ = Namespace("https://prez.dev/") @@ -31,33 +28,27 @@ async def sparql_endpoint( repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - request_mediatype = request.headers.get("accept").split(",")[ - 0 - ] # can't default the MT where not provided as it could be - # graph (CONSTRUCT like queries) or tabular (SELECT queries) - - # Intercept "+anot" mediatypes - if "anot+" in request_mediatype: - prof_and_mt_info = ProfilesMediatypesInfo( - request=request, - classes=frozenset([PREZ.SPARQLQuery]), - system_repo=system_repo, - ) - await populate_profile_and_mediatype(prof_and_mt_info, system_repo) - non_anot_mediatype = request_mediatype.replace("anot+", "") + pmts = NegotiatedPMTs(**{ + "headers": request.headers, + "params": request.query_params, + "classes": [PREZ.SPARQlQuery], + "system_repo": system_repo + }) + if pmts.requested_mediatypes is not None and "anot+" in pmts.requested_mediatypes[0][0]: + non_anot_mediatype = pmts.requested_mediatypes[0][0].replace("anot+", "") request._headers = Headers({**request.headers, "accept": non_anot_mediatype}) response = await repo.sparql(request) await response.aread() g = Graph() g.parse(data=response.text, format=non_anot_mediatype) - graph = await return_annotated_rdf(g, prof_and_mt_info.profile) + graph = await return_annotated_rdf(g, pmts.selected["profile"]) content = io.BytesIO( graph.serialize(format=non_anot_mediatype, encoding="utf-8") ) return StreamingResponse( content=content, media_type=non_anot_mediatype, - headers=prof_and_mt_info.profile_headers, + headers=pmts.generate_response_headers() ) else: query_result = await repo.sparql(query, request.headers.raw) diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index ce1cdbff..1053dfc8 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -1,15 +1,236 @@ -import time +import logging +import re +from textwrap import dedent -from connegp import Connegp -from fastapi import Request +from pydantic import BaseModel +from rdflib import Graph, Namespace, URIRef +from prez.models.model_exceptions import NoProfilesException +from prez.repositories.base import Repo +from prez.services.curie_functions import get_curie_id_for_uri, get_uri_for_curie_id -def get_requested_profile_and_mediatype(request: Request): - """Return the requested profile and mediatype.""" +logger = logging.getLogger("prez") - c = Connegp(request) - return ( - c.profile_uris_requested, - c.profile_tokens_requested, - frozenset(c.mediatypes_requested), - ) + +class TokenError(Exception): + def __init__(self, *args): + super().__init__(*args) + + +class NegotiatedPMTs(BaseModel): + """The Requested Profiles and Media Types as negotiated by the ConnegP standard. + See: https://w3c.github.io/dx-connegp/connegp/#introduction + + Exposes the selected profile / media type as self.selected: dict + with keys: + - profile: URIRef + - title: str + - mediatype: str + - class: str + + Response headers with alternate profiles / mediatypes can be generated by calling + the .generate_response_headers() method. + """ + headers: dict + params: dict + classes: list[URIRef] + system_repo: Repo + listing: bool = False + default_weighting: float = 1.0 + requested_profiles: list[tuple[str, float]] | None = None + requested_mediatypes: list[tuple[str, float]] | None = None + available: list[dict] | None = None + selected: dict | None = None + + class Config: + arbitrary_types_allowed = True + + async def setup(self) -> bool: + self.requested_profiles = await self._get_requested_profiles() + self.requested_mediatypes = await self._get_requested_mediatypes() + self.available = await self._get_available() + self.selected = await self._get_selected() + return True if self.selected else False + + async def _resolve_token(self, token: str) -> str: + query_str: str = dedent(""" + PREFIX dcterms: + PREFIX xsd: + PREFIX prof: + + SELECT ?profile + WHERE { + ?profile a prof:Profile . + ?profile dcterms:identifier ?o . + FILTER(?o=""^^xsd:token) + } + """.replace("", token)) + try: + _, results = await self.system_repo.send_queries([], [(None, query_str)]) + result: str = results[0][1][0]["profile"]["value"] + except (KeyError, IndexError, ValueError): + raise TokenError(f"Token: '{token}' could not be resolved to URI") + uri = "<" + result + ">" + return uri + + async def _tupilize(self, string: str, is_profile: bool = False) -> tuple[str, float]: + parts: list[str | float] = string.split("q=") # split out the weighting + parts[0] = parts[0].strip(" ;") # remove the seperator character, and any whitespace characters + if is_profile and not re.search(r"^<.*>$", parts[0]): # If it doesn't look like a URI ... + try: + parts[0] = await self._resolve_token(parts[0]) # then try to resolve the token to a URI + except TokenError as e: + logger.error(e.args[0]) + try: # if token resolution fails, try to resolve as a curie + result = str(get_uri_for_curie_id(parts[0])) + parts[0] = "<" + result + ">" + except ValueError as e: + parts[0] = "" # if curie resolution failed, then the profile is invalid + logger.error(e.args[0]) + if len(parts) == 1: + parts.append(self.default_weighting) # If no weight given, set the default + else: + try: + parts[1] = float(parts[1]) # Type-check the seperated weighting + except ValueError as e: + logger.debug( + f"Could not cast q={parts[1]} as float. Defaulting to {self.default_weighting}. {e.args[0]}") + return parts[0], parts[1] + + @staticmethod + def _prioritize(types: list[tuple[str, float]]) -> list[tuple[str, float]]: + return sorted(types, key=lambda x: x[1], reverse=True) + + async def _get_requested_profiles(self) -> list[tuple[str, float]] | None: + raw_profiles: str = self.params.get("_profile", "") # Prefer profiles declared in the QSA, as per the spec. + if not raw_profiles: + raw_profiles: str = self.headers.get("accept-profile", "") + if raw_profiles: + profiles: list = [await self._tupilize(profile, is_profile=True) for profile in raw_profiles.split(",")] + return self._prioritize(profiles) + return None + + async def _get_requested_mediatypes(self) -> list[tuple[str, float]] | None: + raw_mediatypes: str = self.params.get("_media", "") # Prefer mediatypes declared in the QSA, as per the spec. + if not raw_mediatypes: + raw_mediatypes: str = self.headers.get("accept", "") + if raw_mediatypes: + mediatypes: list = [await self._tupilize(mediatype) for mediatype in raw_mediatypes.split(",")] + return self._prioritize(mediatypes) + return None + + async def _get_available(self) -> list[dict]: + query = self._compose_select_query() + repo_response = await self._do_query(query) + available = [ + { + "profile": URIRef(result["profile"]["value"]), + "title": result["title"]["value"], + "mediatype": result["format"]["value"], + "class": result["class"]["value"] + } for result in repo_response[1][0][1] + ] + return available + + async def _get_selected(self) -> dict: + return self.available[0] + + def generate_response_headers(self) -> dict: + profile_uri = "" + distinct_profiles = {(pmt["profile"], pmt["title"]) for pmt in self.available} + profile_header_links = ", ".join( + [f'<{self.selected["profile"]}>; rel="profile"'] + + [ + f'{profile_uri}; rel="type"; title="{pmt[1]}"; token="{get_curie_id_for_uri(pmt[0])}"; anchor={pmt[0]}"' + for pmt in distinct_profiles + ] + ) + mediatype_header_links = ", ".join( + [ + f'<{self.selected["class"]}?_profile={get_curie_id_for_uri(pmt["profile"])}&_mediatype={pmt["mediatype"]}>; rel="{"self" if pmt == self.selected else "alternate"}"; type="{pmt["mediatype"]}"; profile="{pmt["profile"]}"' + for pmt in self.available + ] + ) + headers = { + "Content-Type": self.selected["mediatype"], + "link": profile_header_links + mediatype_header_links + } + return headers + + def _compose_select_query(self) -> str: + prez = Namespace("https://prez.dev/") + profile_class = prez.ListingProfile if self.listing else prez.ObjectProfile + try: + requested_profile = self.requested_profiles[0][0] # TODO: handle multiple requested profiles + except TypeError as e: + requested_profile = None + logger.debug(f"{e}. normally this just means no profiles were requested") + + query = dedent( + f""" + PREFIX altr-ext: + PREFIX dcat: + PREFIX dcterms: + PREFIX geo: + PREFIX prez: + PREFIX prof: + PREFIX rdfs: + PREFIX skos: + PREFIX sh: + + SELECT ?profile ?title ?class (count(?mid) as ?distance) ?req_profile ?def_profile ?format ?req_format ?def_format + + WHERE {{ + VALUES ?class {{{" ".join('<' + str(klass) + '>' for klass in self.classes)}}} + ?class rdfs:subClassOf* ?mid . + ?mid rdfs:subClassOf* ?base_class . + VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection geo:Feature + skos:ConceptScheme skos:Concept skos:Collection + dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery + prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object }} + ?profile altr-ext:constrainsClass ?class ; + altr-ext:hasResourceFormat ?format ; + dcterms:title ?title .\ + {f'?profile a {profile_class.n3()} .'} + {f'BIND(?profile={requested_profile} as ?req_profile)' if requested_profile else ''} + BIND(EXISTS {{ ?shape sh:targetClass ?class ; + altr-ext:hasDefaultProfile ?profile }} AS ?def_profile) + {self._generate_mediatype_if_statements()} + BIND(EXISTS {{ ?profile altr-ext:hasDefaultResourceFormat ?format }} AS ?def_format) + }} + GROUP BY ?class ?profile ?req_profile ?def_profile ?format ?req_format ?def_format ?title + ORDER BY DESC(?req_profile) DESC(?distance) DESC(?def_profile) DESC(?req_format) DESC(?def_format) + """ + ) + + logger.debug(f"ConnegP query: {query}") + return query + + def _generate_mediatype_if_statements(self) -> str: + """ + Generates a list of if statements used to determine the response mediatype based on user requests, + and the availability of these in profiles. + These are of the form: + BIND( + IF(?format="application/ld+json", "0.9", + IF(?format="text/html", "0.8", + IF(?format="image/apng", "0.7", ""))) AS ?req_format) + """ + if not self.requested_mediatypes: + return "" + line_join = "," + "\n" + ifs = ( + f"BIND(\n" + f"""{line_join.join( + {chr(9) + 'IF(?format="' + tup[0] + '", "' + str(tup[1]) + '"' for tup in self.requested_mediatypes} + )}""" + f""", ""{')' * len(self.requested_mediatypes)}\n""" + f"\tAS ?req_format)" + ) + return ifs + + async def _do_query(self, query: str) -> tuple[Graph, list]: + response = await self.system_repo.send_queries([], [(None, query)]) + if not response[1][0][1]: + raise NoProfilesException(self.classes) + return response diff --git a/prez/services/generate_profiles.py b/prez/services/generate_profiles.py index 532a1785..734122bf 100755 --- a/prez/services/generate_profiles.py +++ b/prez/services/generate_profiles.py @@ -1,15 +1,9 @@ import logging from pathlib import Path -from typing import FrozenSet -from rdflib import Graph, URIRef, RDF, PROF, Literal +from rdflib import Graph -from prez.cache import profiles_graph_cache, prefix_graph -from prez.models.model_exceptions import NoProfilesException -from prez.reference_data.prez_ns import PREZ -from prez.services.curie_functions import get_curie_id_for_uri -from prez.repositories import Repo -from prez.services.query_generation.connegp import select_profile_mediatype +from prez.cache import profiles_graph_cache log = logging.getLogger(__name__) @@ -61,121 +55,3 @@ async def create_profiles_graph(repo) -> Graph: log.info(f"Remote profile(s) found and added") else: log.info("No remote profiles found") - - -async def get_profiles_and_mediatypes( - classes: FrozenSet[URIRef], - system_repo: Repo, - requested_profile: URIRef = None, - requested_profile_token: str = None, - requested_mediatype: URIRef = None, - listing: bool = False, -): - query = select_profile_mediatype( - classes, - requested_profile, - requested_profile_token, - requested_mediatype, - listing, - ) - log.debug(f"ConnegP query: {query}") - response = await system_repo.send_queries([], [(None, query)]) - # log.debug(f"ConnegP response:{results_pretty_printer(response)}") - if response[1][0][1] == []: - raise NoProfilesException(classes) - top_result = response[1][0][1][0] - profile, mediatype, selected_class = ( - URIRef(top_result["profile"]["value"]), - Literal(top_result["format"]["value"]), - URIRef(top_result["class"]["value"]), - ) - profile_headers, avail_profile_uris = generate_profiles_headers( - selected_class, response, profile, mediatype - ) - return profile, mediatype, selected_class, profile_headers, avail_profile_uris - - -def results_pretty_printer(response): - # Calculate max width for each column, including the new "#" column - max_widths = [ - len(str(len(response.bindings))) - ] # length of the highest row number as a string - for header in response.vars: - max_width = max( - len(header.n3(prefix_graph.namespace_manager)), - max( - len( - row[header].n3(prefix_graph.namespace_manager) - if row[header] - else "" - ) - for row in response.bindings - ), - ) - max_widths.append(max_width) - - # Header row - header_row = "\n" + " | ".join( - ["#".ljust(max_widths[0])] - + [ - str(header).ljust(max_widths[i + 1]) - for i, header in enumerate(response.vars) - ] - ) - pp_string = header_row + "\n" - pp_string += ("-" * len(header_row)) + "\n" # Divider - - # Data rows - row_number = 1 - for row in response.bindings: - row_data = [str(row_number).ljust(max_widths[0])] - row_data += [ - ( - row[header].n3(prefix_graph.namespace_manager) if row[header] else "" - ).ljust(max_widths[i + 1]) - for i, header in enumerate(response.vars) - ] - formatted_row = " | ".join(row_data) - pp_string += formatted_row + "\n" - row_number += 1 - - return pp_string - - -def generate_profiles_headers(selected_class, response, profile, mediatype): - headers = { - "Access-Control-Allow-Origin": "*", - "Content-Type": mediatype, - } - avail_profiles = set( - ( - get_curie_id_for_uri(i["profile"]["value"]), - i["profile"]["value"], - i["title"]["value"], - ) - for i in response[1][0][1] - ) - avail_profiles_headers = ", ".join( - [ - f'; rel="type"; title="{i[2]}"; token="{i[0]}"; anchor=<{i[1]}>' - for i in avail_profiles - ] - ) - avail_mediatypes_headers = ", ".join( - [ - f"""<{selected_class}?_profile={get_curie_id_for_uri(i["profile"]["value"])}&_mediatype={i["format"]["value"]}>; \ -rel="{"self" if i["profile"]["value"] == profile and i["format"]["value"] == mediatype else "alternate"}"; \ -type="{i["format"]["value"]}"; profile="{i["profile"]["value"]}"\ -""" - for i in response[1][0][1] - ] - ) - headers["Link"] = ", ".join( - [ - f'<{profile}>; rel="profile"', - avail_profiles_headers, - avail_mediatypes_headers, - ] - ) - avail_profile_uris = [i[1] for i in avail_profiles] - return headers, avail_profile_uris diff --git a/prez/services/listings.py b/prez/services/listings.py index a8b6cc81..073a82a2 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -9,13 +9,10 @@ from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.config import settings -from prez.models.profiles_and_mediatypes import ( - ProfilesMediatypesInfo, - populate_profile_and_mediatype, -) from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_from_graph from prez.repositories import Repo +from prez.services.connegp_service import NegotiatedPMTs from prez.services.link_generation import add_prez_links from prez.services.query_generation.classes import get_classes from prez.services.query_generation.count import CountQuery @@ -41,8 +38,8 @@ async def listing_function( page: int = 1, per_page: int = 20, cql_parser: CQLParser = None, - search_term: Optional[str] = None, - endpoint_structure: Tuple[str] = settings.endpoint_structure, + search_term: Optional[str] = None, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): """ # determine the relevant node selection part of the query - from SHACL, CQL, Search @@ -66,28 +63,24 @@ async def listing_function( elif search_term: target_classes = frozenset([PREZ.SearchResult]) # determine the relevant profile - prof_and_mt_info = ProfilesMediatypesInfo( - request=request, classes=target_classes, system_repo=system_repo, listing=True - ) - await populate_profile_and_mediatype(prof_and_mt_info, system_repo) - selected_class, selected_profile = ( - prof_and_mt_info.selected_class, - prof_and_mt_info.profile, - ) + pmts = NegotiatedPMTs(headers= + request.headers, params=request.query_params, classes=target_classes, listing=True, + system_repo=system_repo) + success = await pmts.setup() + if not success: + log.error("ConnegP Error. NegotiatedPMTs.setup() was not successful") runtime_values = {} - if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): + if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): ns = NodeShape( uri=URIRef("http://example.org/ns#AltProfilesForListing"), graph=endpoints_graph_cache, - path_nodes={"path_node_1": IRI(value=prof_and_mt_info.selected_class)} + path_nodes={"path_node_1": IRI(value=pmts.selected["class"])} ) ns_triples = ns.triples_list ns_gpnt = ns.gpnt_list endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") - runtime_values["selectedClass"] = prof_and_mt_info.selected_class + runtime_values["selectedClass"] = pmts.selected["class"] runtime_values["limit"] = per_page runtime_values["offset"] = (page - 1) * per_page @@ -114,7 +107,7 @@ async def listing_function( profile_graph=profiles_graph_cache, listing_or_object="listing", endpoint_uri=endpoint_uri, - profile_uri=selected_profile, + profile_uri=pmts.selected["profile"], endpoint_shacl_triples=ns_triples, endpoint_shacl_gpnt=ns_gpnt, cql_triples=cql_triples_list, @@ -136,26 +129,31 @@ async def listing_function( queries.append(search_query) else: queries.append(main_query) - req_mt = prof_and_mt_info.req_mediatypes - if req_mt: - if list(req_mt)[0] == "application/sparql-query": - return PlainTextResponse(queries[0], media_type="application/sparql-query") + if pmts.requested_mediatypes is not None and pmts.requested_mediatypes[0][0] == "application/sparql-query": + return PlainTextResponse(queries[0], media_type="application/sparql-query") # add a count query if it's an annotated mediatype - if "anot+" in prof_and_mt_info.mediatype and not search_term: + if "anot+" in pmts.selected["mediatype"] and not search_term: subselect = copy.deepcopy(query_constructor.inner_select) count_query = CountQuery(subselect=subselect).render() queries.append(count_query) - if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): + # if prof_and_mt_info.profile == URIRef( + # "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + # ): + # count_class = PROF.Profile + # else: + # count_class = target_classes + # if count_class: # target_class may be unknown (None) for queries involving CQL + # queries.append(temp_listing_count(subselect, count_class)) + + if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): item_graph, _ = await system_repo.send_queries(queries, []) - if "anot+" in prof_and_mt_info.mediatype: + if "anot+" in pmts.selected["mediatype"]: await add_prez_links(item_graph, system_repo, endpoint_structure=("profiles",)) else: item_graph, _ = await repo.send_queries(queries, []) - if "anot+" in prof_and_mt_info.mediatype: + if "anot+" in pmts.selected["mediatype"]: await add_prez_links(item_graph, repo, endpoint_structure) # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated if search_term: @@ -163,16 +161,16 @@ async def listing_function( item_graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) return await return_from_graph( item_graph, - prof_and_mt_info.mediatype, - selected_profile, - prof_and_mt_info.profile_headers, - prof_and_mt_info.selected_class, + pmts.selected["mediatype"], + pmts.selected["profile"], + pmts.generate_response_headers(), + pmts.selected["class"], repo, ) async def get_shacl_node_selection( - endpoint_uri, hierarchy_level, path_nodes, repo, system_repo + endpoint_uri, hierarchy_level, path_nodes, repo, system_repo ): """ Determines the relevant nodeshape based on the endpoint, hierarchy level, and parent URI diff --git a/prez/services/objects.py b/prez/services/objects.py index 9db29713..6b0052f9 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -5,46 +5,42 @@ from fastapi.responses import PlainTextResponse from rdflib import URIRef -from prez.cache import profiles_graph_cache, endpoints_graph_cache +from prez.cache import endpoints_graph_cache, profiles_graph_cache from prez.config import settings -from prez.models.profiles_and_mediatypes import ( - ProfilesMediatypesInfo, - populate_profile_and_mediatype, -) from prez.reference_data.prez_ns import EP from prez.renderers.renderer import return_from_graph +from prez.repositories import Repo +from prez.services.connegp_service import NegotiatedPMTs from prez.services.link_generation import add_prez_links from prez.services.query_generation.classes import get_classes -from prez.repositories import Repo -from temp.grammar import IRI from prez.services.query_generation.umbrella import PrezQueryConstructor +from temp.grammar import IRI log = logging.getLogger(__name__) async def object_function( - request: Request, - endpoint_uri: URIRef, - uri: URIRef, - request_url: str, - repo: Repo, - system_repo: Repo, - endpoint_structure: Tuple[str] = settings.endpoint_structure, + request: Request, + endpoint_uri: URIRef, + uri: URIRef, + request_url: str, + repo: Repo, + system_repo: Repo, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): - klasses = await get_classes(uri=uri, repo=repo) - # ConnegP - prof_and_mt_info = ProfilesMediatypesInfo( - request=request, classes=klasses, system_repo=system_repo - ) - await populate_profile_and_mediatype(prof_and_mt_info, system_repo) + classes = await get_classes(uri=uri, repo=repo) + pmts = NegotiatedPMTs(headers= + request.headers, params=request.query_params, classes=classes, + system_repo=system_repo) + success = await pmts.setup() + if not success: + log.error("ConnegP Error. NegotiatedPMTs.setup() was not successful") # handle alternate profiles runtime_values = {} - if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): + if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") - # runtime_values["selectedClass"] = prof_and_mt_info.selected_class + # runtime_values["selectedClass"] = prof_and_mt_info.selected_class # runtime_values["object"] = uri query_constructor = PrezQueryConstructor( @@ -54,31 +50,30 @@ async def object_function( listing_or_object="object", focus_node=IRI(value=uri), endpoint_uri=endpoint_uri, - profile_uri=prof_and_mt_info.profile, + profile_uri=pmts.selected["profile"], ) query_constructor.generate_sparql() query = query_constructor.sparql - req_mt = prof_and_mt_info.req_mediatypes - if req_mt: - if list(req_mt)[0] == "application/sparql-query": + try: + if pmts.requested_mediatypes[0][0] == "application/sparql-query": return PlainTextResponse(query, media_type="application/sparql-query") + except IndexError as e: + log.debug(e.args[0]) - if prof_and_mt_info.profile == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): + if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): item_graph, _ = await system_repo.send_queries([query], []) else: item_graph, _ = await repo.send_queries([query], []) - if "anot+" in prof_and_mt_info.mediatype: + if "anot+" in pmts.selected["mediatype"]: if not endpoint_uri == EP.object: await add_prez_links(item_graph, repo, endpoint_structure) await add_prez_links(item_graph, repo, endpoint_structure) return await return_from_graph( item_graph, - prof_and_mt_info.mediatype, - prof_and_mt_info.profile, - prof_and_mt_info.profile_headers, - prof_and_mt_info.selected_class, + pmts.selected["mediatype"], + pmts.selected["profile"], + pmts.generate_response_headers(), + pmts.selected["class"], repo, ) diff --git a/prez/services/query_generation/connegp.py b/prez/services/query_generation/connegp.py deleted file mode 100644 index 20ab5f97..00000000 --- a/prez/services/query_generation/connegp.py +++ /dev/null @@ -1,123 +0,0 @@ -import logging -from textwrap import dedent -from typing import List, Tuple - -from rdflib import URIRef, Namespace - -from prez.services.curie_functions import get_uri_for_curie_id - -log = logging.getLogger(__name__) - -ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") -PREZ = Namespace("https://prez.dev/") - - -def select_profile_mediatype( - classes: List[URIRef], - requested_profile_uri: URIRef = None, - requested_profile_token: str = None, - requested_mediatypes: List[Tuple] = None, - listing: bool = False, -): - """ - Returns a SPARQL SELECT query which will determine the profile and mediatype to return based on user requests, - defaults, and the availability of these in profiles. - - NB: Most specific class refers to the rdfs:Class of an object which has the most specific rdfs:subClassOf links to - the base class delivered by that API endpoint. The base classes delivered by each API endpoint are: - - SpacePrez: - /s/catalogs -> prez:DatasetList - /s/catalogs/{ds_id} -> dcat:Dataset - /s/catalogs/{ds_id}/collections/{fc_id} -> geo:FeatureCollection - /s/catalogs/{ds_id}/collections -> prez:FeatureCollectionList - /s/catalogs/{ds_id}/collections/{fc_id}/features -> geo:Feature - - VocPrez: - /v/schemes -> skos:ConceptScheme - /v/collections -> skos:Collection - /v/schemes/{cs_id}/concepts -> skos:Concept - - CatPrez: - /c/catalogs -> dcat:Catalog - /c/catalogs/{cat_id}/datasets -> dcat:Dataset - - The following logic is used to determine the profile and mediatype to be returned: - - 1. If a profile and mediatype are requested, they are returned if a matching profile which has the requested - mediatype is found, otherwise the default profile for the most specific class is returned, with its default - mediatype. - 2. If a profile only is requested, if it can be found it is returned, otherwise the default profile for the most - specific class is returned. In both cases the default mediatype is returned. - 3. If a mediatype only is requested, the default profile for the most specific class is returned, and if the - requested mediatype is available for that profile, it is returned, otherwise the default mediatype for that profile - is returned. - 4. If neither a profile nor mediatype is requested, the default profile for the most specific class is returned, - with the default mediatype for that profile. - """ - if listing: - profile_class = PREZ.ListingProfile - else: - profile_class = PREZ.ObjectProfile - if requested_profile_token: - requested_profile_uri = get_uri_for_curie_id(requested_profile_token) - query = dedent( - f""" PREFIX altr-ext: - PREFIX dcat: - PREFIX dcterms: - PREFIX geo: - PREFIX prez: - PREFIX prof: - PREFIX rdfs: - PREFIX skos: - PREFIX sh: - - SELECT ?profile ?title ?class (count(?mid) as ?distance) ?req_profile ?def_profile ?format ?req_format ?def_format - - WHERE {{ - VALUES ?class {{{" ".join('<' + str(klass) + '>' for klass in classes)}}} - ?class rdfs:subClassOf* ?mid . - ?mid rdfs:subClassOf* ?base_class . - VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection geo:Feature - skos:ConceptScheme skos:Concept skos:Collection - dcat:Catalog dcat:Resource prof:Profile prez:SPARQLQuery - prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object }} - ?profile altr-ext:constrainsClass ?class ; - altr-ext:hasResourceFormat ?format ; - dcterms:title ?title .\ - {f'?profile a {profile_class.n3()} .'} - {f'BIND(?profile=<{requested_profile_uri}> as ?req_profile)' if requested_profile_uri else ''} - BIND(EXISTS {{ ?shape sh:targetClass ?class ; - altr-ext:hasDefaultProfile ?profile }} AS ?def_profile) - {generate_mediatype_if_statements(requested_mediatypes) if requested_mediatypes else ''} - BIND(EXISTS {{ ?profile altr-ext:hasDefaultResourceFormat ?format }} AS ?def_format) - }} - GROUP BY ?class ?profile ?req_profile ?def_profile ?format ?req_format ?def_format ?title - ORDER BY DESC(?req_profile) DESC(?distance) DESC(?def_profile) DESC(?req_format) DESC(?def_format)""" - ) - return query - - -def generate_mediatype_if_statements(requested_mediatypes: list): - """ - Generates a list of if statements which will be used to determine the mediatype to return based on user requests, - and the availability of these in profiles. - These are of the form: - BIND( - IF(?format="application/ld+json", "0.9", - IF(?format="text/html", "0.8", - IF(?format="image/apng", "0.7", ""))) AS ?req_format) - """ - # TODO ConnegP appears to return a tuple of q values and profiles for headers, and only profiles (no q values) if they - # are not specified in QSAs. - if not isinstance(next(iter(requested_mediatypes)), tuple): - requested_mediatypes = [(1, mt) for mt in requested_mediatypes] - - line_join = "," + "\n" - ifs = ( - f"BIND(\n" - f"""{line_join.join({chr(9) + 'IF(?format="' + tup[1] + '", "' + str(tup[0]) + '"' for tup in requested_mediatypes})}""" - f""", ""{')' * len(requested_mediatypes)}\n""" - f"\tAS ?req_format)" - ) - return ifs diff --git a/pyproject.toml b/pyproject.toml index 428aacc0..f0547c3e 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ python = "^3.11" uvicorn = "^0.27.1" httpx = "^0.26.0" rdflib = "^7.0.0" -connegp = { file = "connegp-0.1.6-py3-none-any.whl" } async-lru = "^2.0.4" geojson-rewind = "^1.0.3" toml = "^0.10.2" diff --git a/tests/data/profiles/ogc_records_profile.ttl b/tests/data/profiles/ogc_records_profile.ttl new file mode 100755 index 00000000..333ef557 --- /dev/null +++ b/tests/data/profiles/ogc_records_profile.ttl @@ -0,0 +1,106 @@ +PREFIX altr-ext: +PREFIX dcat: +PREFIX dcterms: +PREFIX geo: +PREFIX owl: +PREFIX prez: +PREFIX prof: +PREFIX prov: +PREFIX reg: +PREFIX rdf: +PREFIX rdfs: +PREFIX sh: +PREFIX skos: +PREFIX xsd: +PREFIX endpoint: +PREFIX shext: + + +prez:OGCRecordsProfile + a prof:Profile ; + dcterms:identifier "ogc"^^xsd:token ; + dcterms:description "A system profile for OGC Records conformant API" ; + dcterms:title "OGC Profile" ; + altr-ext:constrainsClass prez:CatPrez ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection , prez:SearchResult , prez:CQLObjectList ; + altr-ext:hasDefaultProfile prez:OGCListingProfile + ] , [ + a sh:NodeShape ; + sh:targetClass skos:ConceptScheme ; + altr-ext:hasDefaultProfile prez:OGCSchemesListProfile + ] , [ + a sh:NodeShape ; + sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; + altr-ext:hasDefaultProfile prez:OGCItemProfile + ] + . + +prez:OGCListingProfile + a prof:Profile , prez:ListingProfile , sh:NodeShape ; + dcterms:title "OGC Listing Profile" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept , + dcat:Resource , prof:Profile , prez:SearchResult , prez:CQLObjectList ; + sh:property [ sh:path rdf:type ] + . + +prez:OGCSchemesListProfile + a prof:Profile , prez:ListingProfile , sh:NodeShape ; + dcterms:title "OGC Concept Scheme Listing Profile" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:constrainsClass skos:ConceptScheme ; + sh:property [ + sh:minCount 0 ; + sh:path ( + sh:union ( + dcterms:publisher + reg:status + ( prov:qualifiedDerivation prov:hadRole ) + ( prov:qualifiedDerivation prov:entity ) + ) + ) + ] + . + +prez:OGCItemProfile + a prof:Profile , prez:ObjectProfile , sh:NodeShape ; + dcterms:title "OGC Object Profile" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + sh:property [ + sh:path shext:allPredicateValues ; + ] , + [ + sh:minCount 0 ; + sh:path [ sh:inversePath dcterms:hasPart ] ; + ] ; + shext:bnode-depth 2 ; + altr-ext:constrainsClass dcat:Catalog , + dcat:Resource , + skos:ConceptScheme, + skos:Collection , + skos:Concept , + geo:FeatureCollection , + geo:Feature , + prof:Profile ; + . diff --git a/tests/data/profiles/spaceprez_default_profiles.ttl b/tests/data/profiles/spaceprez_default_profiles.ttl new file mode 100755 index 00000000..9e6a3c8a --- /dev/null +++ b/tests/data/profiles/spaceprez_default_profiles.ttl @@ -0,0 +1,138 @@ +PREFIX altr-ext: +PREFIX dcat: +PREFIX dcterms: +PREFIX geo: +PREFIX owl: +PREFIX prez: +PREFIX prof: +PREFIX rdf: +PREFIX rdfs: +PREFIX sh: +PREFIX skos: +PREFIX xsd: +PREFIX shext: + + +prez:SpacePrezProfile + a prof:Profile ; + dcterms:identifier "spaceprez"^^xsd:token ; + dcterms:description "A system profile for SpacePrez" ; + skos:prefLabel "SpacePrez profile" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:constrainsClass prez:SpacePrez ; + altr-ext:hasNodeShape [ + a sh:NodeShape ; + sh:targetClass dcat:Dataset ; + altr-ext:hasDefaultProfile + ] , [ + a sh:NodeShape ; + sh:targetClass geo:FeatureCollection ; + altr-ext:hasDefaultProfile prez:FeatureCollectionProfile + ] , [ + a sh:NodeShape ; + sh:targetClass geo:Feature ; + altr-ext:hasDefaultProfile prez:FeatureProfile + ] , [ + a sh:NodeShape ; + sh:targetClass prez:DatasetList ; + altr-ext:hasDefaultProfile + ] , [ + a sh:NodeShape ; + sh:targetClass prez:FeatureCollectionList ; + altr-ext:hasDefaultProfile prez:GeoListingProfile + ] , [ + a sh:NodeShape ; + sh:targetClass prez:FeatureList ; + altr-ext:hasDefaultProfile prez:GeoListingProfile + ] +. + +prez:FeatureCollectionProfile a prof:Profile ; + dcterms:description "A profile for GeoSPARQL FeatureCollections" ; + dcterms:identifier "geofc"^^xsd:token ; + dcterms:title "Feature Collection Profile" ; + altr-ext:constrainsClass geo:FeatureCollection ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:targetClass geo:FeatureCollection ; + sh:property + [ + sh:maxCount 0 ; + sh:path rdfs:member ; + ] , + [ + sh:path [ sh:inversePath rdfs:member ] ; + ] ; + shext:bnode-depth 2 ; +. + +prez:FeatureProfile a prof:Profile ; + dcterms:description "A profile for GeoSPARQL Features" ; + dcterms:identifier "geofeat"^^xsd:token ; + dcterms:title "Feature Profile" ; + altr-ext:constrainsClass geo:Feature ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:targetClass geo:Feature ; + sh:property + [ + sh:path [ sh:inversePath rdfs:member ] ; + ] , + [ + sh:path shext:allPredicateValues ; + ] ; + shext:bnode-depth 2 ; +. + + +prez:GeoListingProfile a prof:Profile ; + dcterms:description "A profile for listing GeoSPARQL Features and FeatureCollections" ; + dcterms:identifier "geolisting"^^xsd:token ; + dcterms:title "Geo Listing Profile" ; + altr-ext:constrainsClass prez:FeatureCollectionList , prez:FeatureList ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:property + [ + sh:path [ sh:inversePath rdfs:member ] ; + ] +. + + + a prof:Profile , prez:SpacePrezProfile ; + dcterms:description "Dataset Catalog Vocabulary (DCAT) is a W3C-authored RDF vocabulary designed to facilitate interoperability between data catalogs" ; + dcterms:identifier "dcat"^^xsd:token ; + dcterms:title "DCAT" ; + altr-ext:constrainsClass + dcat:Catalog , + dcat:Dataset ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + sh:property [ + sh:path shext:allPredicateValues ; + ] ; + shext:bnode-depth 2 ; + altr-ext:constrainsClass dcat:Catalog , dcat:Dataset ; +. + diff --git a/tests/test_connegp.py b/tests/test_connegp.py new file mode 100644 index 00000000..d09316e6 --- /dev/null +++ b/tests/test_connegp.py @@ -0,0 +1,151 @@ +from pathlib import Path + +import pytest +from pyoxigraph import Store +from pyoxigraph.pyoxigraph import Store +from rdflib import URIRef + +from prez.app import app +from prez.dependencies import get_repo +from prez.repositories import PyoxigraphRepo, Repo +from prez.services.connegp_service import NegotiatedPMTs + + +@pytest.fixture(scope="session") +def test_store() -> Store: + store = Store() + file = Path(__file__).parent / "data/profiles/ogc_records_profile.ttl" + store.load(file.read_bytes(), "text/turtle") + file = Path(__file__).parent / "data/profiles/spaceprez_default_profiles.ttl" + store.load(file.read_bytes(), "text/turtle") + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + return PyoxigraphRepo(test_store) + + +@pytest.mark.parametrize( + "headers, params, classes, listing, expected_selected", + [ + [ + {}, # Test that profiles/mediatypes resolve to their defaults if not requested (object endpoint) + {}, + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + False, + { + "profile": URIRef("https://prez.dev/OGCItemProfile"), + "title": "OGC Object Profile", + "mediatype": "text/anot+turtle", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {}, # Test that profiles/mediatypes resolve to their defaults if not requested (listing endpoint) + {}, + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + True, + { + "profile": URIRef("https://prez.dev/OGCListingProfile"), + "title": "OGC Listing Profile", + "mediatype": "text/anot+turtle", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {"accept": "application/ld+json"}, # Test that a valid mediatype is resolved + {}, + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + False, + { + "profile": URIRef("https://prez.dev/OGCItemProfile"), + "title": "OGC Object Profile", + "mediatype": "application/ld+json", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {"accept": "application/ld+json;q=0.7,text/turtle"}, # Test resolution of multiple mediatypes + {}, + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + False, + { + "profile": URIRef("https://prez.dev/OGCItemProfile"), + "title": "OGC Object Profile", + "mediatype": "text/turtle", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {}, + {"_media": "application/ld+json"}, # Test mediatype resolution as QSA + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + False, + { + "profile": URIRef("https://prez.dev/OGCItemProfile"), + "title": "OGC Object Profile", + "mediatype": "application/ld+json", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {"accept": "text/turtle"}, + {"_media": "application/ld+json"}, # Test QSA mediatype is preferred + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + False, + { + "profile": URIRef("https://prez.dev/OGCItemProfile"), + "title": "OGC Object Profile", + "mediatype": "application/ld+json", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {"accept-profile": "oogabooga"}, # Test that invalid profile is ignored + {}, + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + False, + { + "profile": URIRef("https://prez.dev/OGCItemProfile"), + "title": "OGC Object Profile", + "mediatype": "text/anot+turtle", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {"accept": "oogabooga"}, # Test that invalid mediatype is ignored + {}, + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + False, + { + "profile": URIRef("https://prez.dev/OGCItemProfile"), + "title": "OGC Object Profile", + "mediatype": "text/anot+turtle", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + [ + {"accept-profile": ""}, # Test that a valid profile is resolved + {}, + [URIRef("http://www.w3.org/ns/dcat#Catalog")], + True, + { + "profile": URIRef("https://www.w3.org/TR/vocab-dcat/"), + "title": "DCAT", + "mediatype": "text/anot+turtle", + "class": "http://www.w3.org/ns/dcat#Catalog" + } + ], + ] +) +@pytest.mark.asyncio +async def test_connegp(headers, params, classes, listing, expected_selected, test_repo): + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + pmts = NegotiatedPMTs(headers=headers, params=params, classes=classes, listing=listing, system_repo=test_repo) + success = await pmts.setup() + assert success + assert pmts.selected == expected_selected From baa03848444a353d3f701d7e1e728ef2c3414fa8 Mon Sep 17 00:00:00 2001 From: Lawson Lewis Date: Mon, 4 Mar 2024 10:29:35 +1000 Subject: [PATCH 18/25] connegp refactor complete - found some calls to the old connegp that needed removing. --- prez/services/connegp_service.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index 1053dfc8..79cc1354 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -11,6 +11,30 @@ logger = logging.getLogger("prez") +RDF_MEDIATYPES = [ + "text/turtle", + "application/rdf+xml", + "application/ld+json", + "application/n-triples", +] + +RDF_SERIALIZER_TYPES_MAP = { + "text/turtle": "turtle", + "text/n3": "n3", + "application/n-triples": "nt", + "application/ld+json": "json-ld", + "application/rdf+xml": "xml", + # Some common but incorrect mimetypes + "application/rdf": "xml", + "application/rdf xml": "xml", + "application/json": "json-ld", + "application/ld json": "json-ld", + "text/ttl": "turtle", + "text/ntriples": "nt", + "text/n-triples": "nt", + "text/plain": "nt", # text/plain is the old/deprecated mimetype for n-triples +} + class TokenError(Exception): def __init__(self, *args): From 2e612e35a987f150b32ca47a87ff894fa82a45a4 Mon Sep 17 00:00:00 2001 From: Lawson Lewis Date: Mon, 4 Mar 2024 10:43:52 +1000 Subject: [PATCH 19/25] fix incorrect relative file paths in test suite --- tests/test_alt_profiles.py | 2 +- tests/test_endpoints_catprez.py | 2 +- tests/test_endpoints_spaceprez.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_alt_profiles.py b/tests/test_alt_profiles.py index 32121a49..86f251b9 100755 --- a/tests/test_alt_profiles.py +++ b/tests/test_alt_profiles.py @@ -18,7 +18,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - file = Path("../test_data/catprez.ttl") + file = Path(__file__).parent.parent / "test_data/catprez.ttl" store.load(file.read_bytes(), "text/turtle") return store diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py index 1dcef5df..050b9fad 100755 --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -18,7 +18,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - file = Path("../test_data/catprez.ttl") + file = Path(__file__).parent.parent / "test_data/catprez.ttl" store.load(file.read_bytes(), "text/turtle") return store diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index a53d828d..a0acf459 100755 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -17,7 +17,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - file = Path("../test_data/spaceprez.ttl") + file = Path(__file__).parent.parent / "test_data/spaceprez.ttl" store.load(file.read_bytes(), "text/turtle") return store From 8275dc202bc15b32713246e21efdefdde509b6c0 Mon Sep 17 00:00:00 2001 From: david Date: Mon, 4 Mar 2024 17:01:47 +1000 Subject: [PATCH 20/25] black all code and update imports --- poetry.lock | 20 +++--- prez/app.py | 10 ++- prez/cache.py | 2 +- prez/config.py | 8 --- prez/dependencies.py | 9 ++- prez/renderers/csv_renderer.py | 2 +- prez/renderers/renderer.py | 19 +++--- prez/repositories/base.py | 7 +- prez/repositories/pyoxigraph.py | 4 +- prez/routers/cql.py | 4 +- prez/routers/identifier.py | 2 +- prez/routers/management.py | 7 +- prez/routers/object.py | 3 +- prez/routers/ogc_router.py | 4 +- prez/routers/profiles.py | 2 +- prez/routers/sparql.py | 22 ++++--- prez/services/annotations.py | 37 ++++++----- prez/services/app_service.py | 7 +- prez/services/connegp_service.py | 65 ++++++++++++++----- prez/services/link_generation.py | 4 +- prez/services/listings.py | 40 +++++++----- prez/services/objects.py | 31 +++++---- prez/services/query_generation/annotations.py | 37 ++++++----- prez/services/query_generation/umbrella.py | 4 -- pyproject.toml | 4 +- tests/test_alt_profiles.py | 9 +-- tests/test_connegp.py | 58 ++++++++++------- tests/test_node_selection_shacl.py | 17 ++--- 28 files changed, 251 insertions(+), 187 deletions(-) diff --git a/poetry.lock b/poetry.lock index ca483b23..a96824bf 100755 --- a/poetry.lock +++ b/poetry.lock @@ -336,13 +336,13 @@ files = [ [[package]] name = "fastapi" -version = "0.109.2" +version = "0.110.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, - {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, + {file = "fastapi-0.110.0-py3-none-any.whl", hash = "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b"}, + {file = "fastapi-0.110.0.tar.gz", hash = "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3"}, ] [package.dependencies] @@ -459,13 +459,13 @@ trio = ["trio (>=0.22.0,<0.25.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.27.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] @@ -1308,13 +1308,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -1561,4 +1561,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "86ae28eb5f2c4f08bc245ca34113f8d401778b377ca1050aa5d25565ff7ebe1e" +content-hash = "7fc8b112be822b96246a4f1639edd1cd5cb7543abf0eded604c7aaf7ed1ebdc8" diff --git a/prez/app.py b/prez/app.py index fd74cfd4..4a0e24a4 100755 --- a/prez/app.py +++ b/prez/app.py @@ -3,9 +3,10 @@ from textwrap import dedent import uvicorn +from fastapi import FastAPI from rdflib import Graph from starlette.middleware.cors import CORSMiddleware -from fastapi import FastAPI + from prez.config import settings from prez.dependencies import ( get_async_http_client, @@ -13,13 +14,16 @@ load_local_data_to_oxigraph, get_oxrdflib_store, get_system_store, - load_system_data_to_oxigraph, get_annotations_store, load_annotations_data_to_oxigraph, + load_system_data_to_oxigraph, + get_annotations_store, + load_annotations_data_to_oxigraph, ) from prez.models.model_exceptions import ( ClassNotFoundException, URINotFoundException, NoProfilesException, ) +from prez.repositories import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo from prez.routers.cql import router as cql_router from prez.routers.identifier import router as identifier_router from prez.routers.management import router as management_router @@ -46,7 +50,6 @@ ) from prez.services.generate_profiles import create_profiles_graph from prez.services.prez_logging import setup_logger -from prez.repositories import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo app = FastAPI( exception_handlers={ @@ -131,6 +134,7 @@ async def app_startup(): log.info(f"Startup took {time.time() - a} seconds") + @app.on_event("shutdown") async def app_shutdown(): """ diff --git a/prez/cache.py b/prez/cache.py index ca93ee3c..1413a52d 100755 --- a/prez/cache.py +++ b/prez/cache.py @@ -1,6 +1,6 @@ +from aiocache import Cache from pyoxigraph.pyoxigraph import Store from rdflib import Graph, ConjunctiveGraph, Dataset -from aiocache import SimpleMemoryCache, Cache from prez.repositories import PyoxigraphRepo diff --git a/prez/config.py b/prez/config.py index 86b10297..0362560a 100755 --- a/prez/config.py +++ b/prez/config.py @@ -83,14 +83,6 @@ class Settings(BaseSettings): # )["tool"]["poetry"]["version"] # # return values - # - # @root_validator() - # def set_system_uri(cls, values): - # if not values.get("system_uri"): - # values["system_uri"] = URIRef( - # f"{values['protocol']}://{values['host']}:{values['port']}" - # ) - # return values settings = Settings() diff --git a/prez/dependencies.py b/prez/dependencies.py index e2a93e03..cd88a751 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -13,7 +13,7 @@ profiles_graph_cache, endpoints_graph_cache, annotations_store, - annotations_repo + annotations_repo, ) from prez.config import settings from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo @@ -75,7 +75,6 @@ async def get_annotations_repo(): return annotations_repo - async def load_local_data_to_oxigraph(store: Store): """ Loads all the data from the local data directory into the local SPARQL endpoint @@ -100,7 +99,11 @@ async def load_annotations_data_to_oxigraph(store: Store): """ Loads all the data from the local data directory into the local SPARQL endpoint """ - relevant_predicates = settings.label_predicates + settings.description_predicates + settings.provenance_predicates + relevant_predicates = ( + settings.label_predicates + + settings.description_predicates + + settings.provenance_predicates + ) raw_g = Dataset(default_union=True) for file in (Path(__file__).parent / "reference_data/context_ontologies").glob("*"): raw_g.parse(file) diff --git a/prez/renderers/csv_renderer.py b/prez/renderers/csv_renderer.py index 5510610b..3d001f94 100755 --- a/prez/renderers/csv_renderer.py +++ b/prez/renderers/csv_renderer.py @@ -1,5 +1,5 @@ -import io import csv +import io def render_csv_dropdown(rows: list[dict]) -> io.StringIO: diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py index 2e9093f6..59be56e2 100755 --- a/prez/renderers/renderer.py +++ b/prez/renderers/renderer.py @@ -1,9 +1,7 @@ import io import json import logging -import time -from connegp import RDF_MEDIATYPES, RDF_SERIALIZER_TYPES_MAP from fastapi import status from fastapi.exceptions import HTTPException from fastapi.responses import StreamingResponse @@ -15,18 +13,19 @@ from prez.services.annotations import ( get_annotation_properties, ) +from prez.services.connegp_service import RDF_MEDIATYPES, RDF_SERIALIZER_TYPES_MAP from prez.services.curie_functions import get_curie_id_for_uri log = logging.getLogger(__name__) async def return_from_graph( - graph, - mediatype, - profile, - profile_headers, - selected_class: URIRef, - repo: Repo, + graph, + mediatype, + profile, + profile_headers, + selected_class: URIRef, + repo: Repo, ): profile_headers["Content-Disposition"] = "inline" @@ -90,8 +89,8 @@ async def return_rdf(graph, mediatype, profile_headers): async def return_annotated_rdf( - graph: Graph, - repo, + graph: Graph, + repo, ) -> Graph: annotations_graph = await get_annotation_properties(graph, repo) # previous_annotation_len = 0 diff --git a/prez/repositories/base.py b/prez/repositories/base.py index 64dec54f..51fdc522 100755 --- a/prez/repositories/base.py +++ b/prez/repositories/base.py @@ -1,13 +1,11 @@ import asyncio import logging -import time from abc import ABC, abstractmethod from typing import List from typing import Tuple from rdflib import Namespace, Graph, URIRef - PREZ = Namespace("https://prez.dev/") log = logging.getLogger(__name__) @@ -23,7 +21,9 @@ async def tabular_query_to_table(self, query: str, context: URIRef = None): pass async def send_queries( - self, rdf_queries: List[str], tabular_queries: List[Tuple[URIRef | None, str]] = None + self, + rdf_queries: List[str], + tabular_queries: List[Tuple[URIRef | None, str]] = None, ) -> Tuple[Graph, List]: # Common logic to send both query types in parallel results = await asyncio.gather( @@ -35,6 +35,7 @@ async def send_queries( ], ) from prez.cache import prefix_graph + g = Graph(namespace_manager=prefix_graph.namespace_manager) tabular_results = [] for result in results: diff --git a/prez/repositories/pyoxigraph.py b/prez/repositories/pyoxigraph.py index ecfc9e06..976ebfe4 100644 --- a/prez/repositories/pyoxigraph.py +++ b/prez/repositories/pyoxigraph.py @@ -1,9 +1,9 @@ import logging +import pyoxigraph from fastapi.concurrency import run_in_threadpool -from rdflib import Namespace, Graph, URIRef, Literal +from rdflib import Namespace, Graph, URIRef -import pyoxigraph from prez.repositories.base import Repo PREZ = Namespace("https://prez.dev/") diff --git a/prez/routers/cql.py b/prez/routers/cql.py index 2d89a910..39b006ad 100755 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -4,15 +4,15 @@ from rdflib import Namespace from rdflib.namespace import URIRef -from prez.reference_data.prez_ns import PREZ from prez.dependencies import ( get_repo, cql_post_parser_dependency, get_system_repo, cql_get_parser_dependency, ) -from prez.services.listings import listing_function +from prez.reference_data.prez_ns import PREZ from prez.repositories import Repo +from prez.services.listings import listing_function router = APIRouter(tags=["ogcrecords"]) diff --git a/prez/routers/identifier.py b/prez/routers/identifier.py index f1c6cc0f..26921899 100755 --- a/prez/routers/identifier.py +++ b/prez/routers/identifier.py @@ -4,8 +4,8 @@ from rdflib.term import _is_valid_uri from prez.dependencies import get_repo -from prez.services.curie_functions import get_uri_for_curie_id, get_curie_id_for_uri from prez.queries.identifier import get_foaf_homepage_query +from prez.services.curie_functions import get_uri_for_curie_id, get_curie_id_for_uri router = APIRouter(tags=["Identifier Resolution"]) diff --git a/prez/routers/management.py b/prez/routers/management.py index 6ccc30d1..c757a4d6 100755 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -1,18 +1,17 @@ import logging -from connegp import RDF_MEDIATYPES from fastapi import APIRouter from rdflib import BNode from rdflib import Graph, URIRef, Literal from rdflib.collection import Collection -from starlette.requests import Request from starlette.responses import PlainTextResponse -from prez.services.annotations import process_term + from prez.cache import endpoints_graph_cache -from prez.cache import tbox_cache, tbox_cache_aio from prez.config import settings from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_rdf +from prez.services.annotations import process_term + # from prez.services.app_service import add_common_context_ontologies_to_tbox_cache router = APIRouter(tags=["Management"]) diff --git a/prez/routers/object.py b/prez/routers/object.py index 395885a0..fdab0572 100755 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -1,7 +1,8 @@ from fastapi import APIRouter, Request, HTTPException, status, Query from fastapi import Depends -from starlette.responses import PlainTextResponse from rdflib import URIRef +from starlette.responses import PlainTextResponse + from prez.dependencies import get_repo, get_system_repo from prez.queries.object import object_inbound_query, object_outbound_query from prez.routers.identifier import get_iri_route diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 3f7488a7..33b0a3f6 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -5,11 +5,11 @@ from rdflib import URIRef from prez.dependencies import get_repo, get_system_repo +from prez.reference_data.prez_ns import PREZ +from prez.repositories import Repo from prez.services.curie_functions import get_uri_for_curie_id from prez.services.listings import listing_function from prez.services.objects import object_function -from prez.repositories import Repo -from prez.reference_data.prez_ns import PREZ from temp.grammar import IRI router = APIRouter(tags=["ogccatprez"]) diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index f46fff66..72446949 100755 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -1,10 +1,10 @@ from fastapi import APIRouter, Request, Depends +from rdflib import URIRef from prez.dependencies import get_system_repo from prez.services.curie_functions import get_uri_for_curie_id from prez.services.listings import listing_function from prez.services.objects import object_function -from rdflib import URIRef router = APIRouter(tags=["Profiles"]) diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index 71300b18..6d017662 100755 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -17,6 +17,7 @@ router = APIRouter(tags=["SPARQL"]) + # TODO: Split this into two routes on the same /sparql path. # One to handle SPARQL GET requests, the other for SPARQL POST requests. @@ -28,13 +29,18 @@ async def sparql_endpoint( repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - pmts = NegotiatedPMTs(**{ - "headers": request.headers, - "params": request.query_params, - "classes": [PREZ.SPARQlQuery], - "system_repo": system_repo - }) - if pmts.requested_mediatypes is not None and "anot+" in pmts.requested_mediatypes[0][0]: + pmts = NegotiatedPMTs( + **{ + "headers": request.headers, + "params": request.query_params, + "classes": [PREZ.SPARQlQuery], + "system_repo": system_repo, + } + ) + if ( + pmts.requested_mediatypes is not None + and "anot+" in pmts.requested_mediatypes[0][0] + ): non_anot_mediatype = pmts.requested_mediatypes[0][0].replace("anot+", "") request._headers = Headers({**request.headers, "accept": non_anot_mediatype}) response = await repo.sparql(request) @@ -48,7 +54,7 @@ async def sparql_endpoint( return StreamingResponse( content=content, media_type=non_anot_mediatype, - headers=pmts.generate_response_headers() + headers=pmts.generate_response_headers(), ) else: query_result = await repo.sparql(query, request.headers.raw) diff --git a/prez/services/annotations.py b/prez/services/annotations.py index 4abee06f..fc89ebd6 100755 --- a/prez/services/annotations.py +++ b/prez/services/annotations.py @@ -1,16 +1,14 @@ import asyncio import logging -import os -import time from itertools import chain -from typing import List, FrozenSet -from aiocache.serializers import PickleSerializer +from typing import FrozenSet + from aiocache import cached -from rdflib import Graph, URIRef, Literal, Dataset +from aiocache.serializers import PickleSerializer +from rdflib import Graph, URIRef, Literal from rdflib.namespace import RDFS -from prez.cache import tbox_cache, tbox_cache_aio -from prez.config import settings +from prez.cache import tbox_cache_aio from prez.dependencies import get_annotations_repo from prez.reference_data.prez_ns import PREZ from prez.repositories import Repo @@ -23,8 +21,7 @@ async def process_terms(terms, repo) -> Graph: - """ - """ + """ """ results = await asyncio.gather(*[process_term(term, repo) for term in terms]) triples = list(chain(*results)) annotations_g = Graph() @@ -37,31 +34,37 @@ def term_based_key_builder(func, *args, **kwargs): return args[0] -@cached(cache=tbox_cache_aio, key_builder=term_based_key_builder, serializer=PickleSerializer()) +@cached( + cache=tbox_cache_aio, + key_builder=term_based_key_builder, + serializer=PickleSerializer(), +) async def process_term(term, repo) -> FrozenSet[Tuple[URIRef, URIRef, Literal]]: """ gets annotations for an individual term """ - log.info(f"Processing term within func {term}") annotations_repo = await get_annotations_repo() annotations_query = AnnotationsConstructQuery( term=IRI(value=term), construct_predicate=IRI(value=PREZ.label), # TODO change to predicate map - select_predicates=[IRI(value=RDFS.label)] + select_predicates=[IRI(value=RDFS.label)], ).to_string() # check the prez cache - context_results = await annotations_repo.send_queries(rdf_queries=[annotations_query], tabular_queries=[]) + context_results = await annotations_repo.send_queries( + rdf_queries=[annotations_query], tabular_queries=[] + ) # if not found, query the data repo - repo_results = await repo.send_queries(rdf_queries=[annotations_query], tabular_queries=[]) + repo_results = await repo.send_queries( + rdf_queries=[annotations_query], tabular_queries=[] + ) all_results = context_results[0] + repo_results[0] cacheable_results = frozenset(all_results) - log.info(f"Processed term {term}, found {len(cacheable_results)} annotations.") return cacheable_results async def get_annotation_properties( - item_graph: Graph, - repo: Repo, + item_graph: Graph, + repo: Repo, ) -> Graph: """ Gets annotation data used for HTML display. diff --git a/prez/services/app_service.py b/prez/services/app_service.py index 17ae753e..e434e351 100755 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -3,19 +3,18 @@ from pathlib import Path import httpx -from rdflib import URIRef, Literal, Graph, RDFS, DCTERMS, SDO, SKOS, Dataset +from rdflib import URIRef, Literal, Graph from prez.cache import ( prez_system_graph, counts_graph, prefix_graph, endpoints_graph_cache, - tbox_cache, ) from prez.config import settings from prez.reference_data.prez_ns import PREZ -from prez.services.curie_functions import get_curie_id_for_uri from prez.repositories import Repo +from prez.services.curie_functions import get_curie_id_for_uri from prez.services.query_generation.count import startup_count_objects log = logging.getLogger(__name__) @@ -81,7 +80,7 @@ async def add_prefixes_to_prefix_graph(repo: Repo): prefix_graph.bind(str(prefix), namespace) # prefix_graph.bind(str(subject_objects[1]), subject_objects[0]) - log.info(f"{i+1:,} prefixes bound from file {f.name}") + log.info(f"{i + 1:,} prefixes bound from file {f.name}") log.info("Prefixes from local files added to prefix graph") if settings.disable_prefix_generation: diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index 79cc1354..918d730b 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -55,6 +55,7 @@ class NegotiatedPMTs(BaseModel): Response headers with alternate profiles / mediatypes can be generated by calling the .generate_response_headers() method. """ + headers: dict params: dict classes: list[URIRef] @@ -77,7 +78,8 @@ async def setup(self) -> bool: return True if self.selected else False async def _resolve_token(self, token: str) -> str: - query_str: str = dedent(""" + query_str: str = dedent( + """ PREFIX dcterms: PREFIX xsd: PREFIX prof: @@ -88,7 +90,10 @@ async def _resolve_token(self, token: str) -> str: ?profile dcterms:identifier ?o . FILTER(?o=""^^xsd:token) } - """.replace("", token)) + """.replace( + "", token + ) + ) try: _, results = await self.system_repo.send_queries([], [(None, query_str)]) result: str = results[0][1][0]["profile"]["value"] @@ -97,19 +102,29 @@ async def _resolve_token(self, token: str) -> str: uri = "<" + result + ">" return uri - async def _tupilize(self, string: str, is_profile: bool = False) -> tuple[str, float]: + async def _tupilize( + self, string: str, is_profile: bool = False + ) -> tuple[str, float]: parts: list[str | float] = string.split("q=") # split out the weighting - parts[0] = parts[0].strip(" ;") # remove the seperator character, and any whitespace characters - if is_profile and not re.search(r"^<.*>$", parts[0]): # If it doesn't look like a URI ... + parts[0] = parts[0].strip( + " ;" + ) # remove the seperator character, and any whitespace characters + if is_profile and not re.search( + r"^<.*>$", parts[0] + ): # If it doesn't look like a URI ... try: - parts[0] = await self._resolve_token(parts[0]) # then try to resolve the token to a URI + parts[0] = await self._resolve_token( + parts[0] + ) # then try to resolve the token to a URI except TokenError as e: logger.error(e.args[0]) try: # if token resolution fails, try to resolve as a curie result = str(get_uri_for_curie_id(parts[0])) parts[0] = "<" + result + ">" except ValueError as e: - parts[0] = "" # if curie resolution failed, then the profile is invalid + parts[ + 0 + ] = "" # if curie resolution failed, then the profile is invalid logger.error(e.args[0]) if len(parts) == 1: parts.append(self.default_weighting) # If no weight given, set the default @@ -118,7 +133,8 @@ async def _tupilize(self, string: str, is_profile: bool = False) -> tuple[str, f parts[1] = float(parts[1]) # Type-check the seperated weighting except ValueError as e: logger.debug( - f"Could not cast q={parts[1]} as float. Defaulting to {self.default_weighting}. {e.args[0]}") + f"Could not cast q={parts[1]} as float. Defaulting to {self.default_weighting}. {e.args[0]}" + ) return parts[0], parts[1] @staticmethod @@ -126,20 +142,30 @@ def _prioritize(types: list[tuple[str, float]]) -> list[tuple[str, float]]: return sorted(types, key=lambda x: x[1], reverse=True) async def _get_requested_profiles(self) -> list[tuple[str, float]] | None: - raw_profiles: str = self.params.get("_profile", "") # Prefer profiles declared in the QSA, as per the spec. + raw_profiles: str = self.params.get( + "_profile", "" + ) # Prefer profiles declared in the QSA, as per the spec. if not raw_profiles: raw_profiles: str = self.headers.get("accept-profile", "") if raw_profiles: - profiles: list = [await self._tupilize(profile, is_profile=True) for profile in raw_profiles.split(",")] + profiles: list = [ + await self._tupilize(profile, is_profile=True) + for profile in raw_profiles.split(",") + ] return self._prioritize(profiles) return None async def _get_requested_mediatypes(self) -> list[tuple[str, float]] | None: - raw_mediatypes: str = self.params.get("_media", "") # Prefer mediatypes declared in the QSA, as per the spec. + raw_mediatypes: str = self.params.get( + "_media", "" + ) # Prefer mediatypes declared in the QSA, as per the spec. if not raw_mediatypes: raw_mediatypes: str = self.headers.get("accept", "") if raw_mediatypes: - mediatypes: list = [await self._tupilize(mediatype) for mediatype in raw_mediatypes.split(",")] + mediatypes: list = [ + await self._tupilize(mediatype) + for mediatype in raw_mediatypes.split(",") + ] return self._prioritize(mediatypes) return None @@ -151,8 +177,9 @@ async def _get_available(self) -> list[dict]: "profile": URIRef(result["profile"]["value"]), "title": result["title"]["value"], "mediatype": result["format"]["value"], - "class": result["class"]["value"] - } for result in repo_response[1][0][1] + "class": result["class"]["value"], + } + for result in repo_response[1][0][1] ] return available @@ -163,8 +190,8 @@ def generate_response_headers(self) -> dict: profile_uri = "" distinct_profiles = {(pmt["profile"], pmt["title"]) for pmt in self.available} profile_header_links = ", ".join( - [f'<{self.selected["profile"]}>; rel="profile"'] + - [ + [f'<{self.selected["profile"]}>; rel="profile"'] + + [ f'{profile_uri}; rel="type"; title="{pmt[1]}"; token="{get_curie_id_for_uri(pmt[0])}"; anchor={pmt[0]}"' for pmt in distinct_profiles ] @@ -177,7 +204,7 @@ def generate_response_headers(self) -> dict: ) headers = { "Content-Type": self.selected["mediatype"], - "link": profile_header_links + mediatype_header_links + "link": profile_header_links + mediatype_header_links, } return headers @@ -185,7 +212,9 @@ def _compose_select_query(self) -> str: prez = Namespace("https://prez.dev/") profile_class = prez.ListingProfile if self.listing else prez.ObjectProfile try: - requested_profile = self.requested_profiles[0][0] # TODO: handle multiple requested profiles + requested_profile = self.requested_profiles[0][ + 0 + ] # TODO: handle multiple requested profiles except TypeError as e: requested_profile = None logger.debug(f"{e}. normally this just means no profiles were requested") diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index 56cae86b..27f788e1 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -7,11 +7,11 @@ from prez.cache import endpoints_graph_cache, links_ids_graph_cache from prez.config import settings from prez.reference_data.prez_ns import PREZ +from prez.repositories import Repo from prez.services.curie_functions import get_curie_id_for_uri from prez.services.query_generation.classes import get_classes -from prez.repositories import Repo -from temp.grammar import * from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape +from temp.grammar import * log = logging.getLogger(__name__) diff --git a/prez/services/listings.py b/prez/services/listings.py index 073a82a2..1e7012ac 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -16,12 +16,9 @@ from prez.services.link_generation import add_prez_links from prez.services.query_generation.classes import get_classes from prez.services.query_generation.count import CountQuery +from prez.services.query_generation.node_selection.cql import CQLParser from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape from prez.services.query_generation.node_selection.search import SearchQuery -from prez.services.query_generation.node_selection.cql import CQLParser - -# from rdframe.grammar import SubSelect -# from rdframe import PrezQueryConstructor from prez.services.query_generation.umbrella import PrezQueryConstructor from temp.grammar import * @@ -38,8 +35,8 @@ async def listing_function( page: int = 1, per_page: int = 20, cql_parser: CQLParser = None, - search_term: Optional[str] = None, - endpoint_structure: Tuple[str] = settings.endpoint_structure, + search_term: Optional[str] = None, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): """ # determine the relevant node selection part of the query - from SHACL, CQL, Search @@ -63,19 +60,25 @@ async def listing_function( elif search_term: target_classes = frozenset([PREZ.SearchResult]) # determine the relevant profile - pmts = NegotiatedPMTs(headers= - request.headers, params=request.query_params, classes=target_classes, listing=True, - system_repo=system_repo) + pmts = NegotiatedPMTs( + headers=request.headers, + params=request.query_params, + classes=target_classes, + listing=True, + system_repo=system_repo, + ) success = await pmts.setup() if not success: log.error("ConnegP Error. NegotiatedPMTs.setup() was not successful") runtime_values = {} - if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): + if pmts.selected["profile"] == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): ns = NodeShape( uri=URIRef("http://example.org/ns#AltProfilesForListing"), graph=endpoints_graph_cache, - path_nodes={"path_node_1": IRI(value=pmts.selected["class"])} + path_nodes={"path_node_1": IRI(value=pmts.selected["class"])}, ) ns_triples = ns.triples_list ns_gpnt = ns.gpnt_list @@ -129,7 +132,10 @@ async def listing_function( queries.append(search_query) else: queries.append(main_query) - if pmts.requested_mediatypes is not None and pmts.requested_mediatypes[0][0] == "application/sparql-query": + if ( + pmts.requested_mediatypes is not None + and pmts.requested_mediatypes[0][0] == "application/sparql-query" + ): return PlainTextResponse(queries[0], media_type="application/sparql-query") # add a count query if it's an annotated mediatype @@ -147,10 +153,14 @@ async def listing_function( # if count_class: # target_class may be unknown (None) for queries involving CQL # queries.append(temp_listing_count(subselect, count_class)) - if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): + if pmts.selected["profile"] == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): item_graph, _ = await system_repo.send_queries(queries, []) if "anot+" in pmts.selected["mediatype"]: - await add_prez_links(item_graph, system_repo, endpoint_structure=("profiles",)) + await add_prez_links( + item_graph, system_repo, endpoint_structure=("profiles",) + ) else: item_graph, _ = await repo.send_queries(queries, []) if "anot+" in pmts.selected["mediatype"]: @@ -170,7 +180,7 @@ async def listing_function( async def get_shacl_node_selection( - endpoint_uri, hierarchy_level, path_nodes, repo, system_repo + endpoint_uri, hierarchy_level, path_nodes, repo, system_repo ): """ Determines the relevant nodeshape based on the endpoint, hierarchy level, and parent URI diff --git a/prez/services/objects.py b/prez/services/objects.py index 6b0052f9..d60be71e 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -20,25 +20,30 @@ async def object_function( - request: Request, - endpoint_uri: URIRef, - uri: URIRef, - request_url: str, - repo: Repo, - system_repo: Repo, - endpoint_structure: Tuple[str] = settings.endpoint_structure, + request: Request, + endpoint_uri: URIRef, + uri: URIRef, + request_url: str, + repo: Repo, + system_repo: Repo, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): classes = await get_classes(uri=uri, repo=repo) - pmts = NegotiatedPMTs(headers= - request.headers, params=request.query_params, classes=classes, - system_repo=system_repo) + pmts = NegotiatedPMTs( + headers=request.headers, + params=request.query_params, + classes=classes, + system_repo=system_repo, + ) success = await pmts.setup() if not success: log.error("ConnegP Error. NegotiatedPMTs.setup() was not successful") # handle alternate profiles runtime_values = {} - if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): + if pmts.selected["profile"] == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") # runtime_values["selectedClass"] = prof_and_mt_info.selected_class @@ -61,7 +66,9 @@ async def object_function( except IndexError as e: log.debug(e.args[0]) - if pmts.selected["profile"] == URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"): + if pmts.selected["profile"] == URIRef( + "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" + ): item_graph, _ = await system_repo.send_queries([query], []) else: item_graph, _ = await repo.send_queries([query], []) diff --git a/prez/services/query_generation/annotations.py b/prez/services/query_generation/annotations.py index ce28d1dd..5aef8c2b 100644 --- a/prez/services/query_generation/annotations.py +++ b/prez/services/query_generation/annotations.py @@ -5,28 +5,35 @@ class AnnotationsConstructQuery(ConstructQuery): def __init__( - self, - term: IRI, - construct_predicate: IRI, - select_predicates: List[IRI] + self, term: IRI, construct_predicate: IRI, select_predicates: List[IRI] ): construct_template = ConstructTemplate( construct_triples=ConstructTriples( - triples=[SimplifiedTriple( - subject=term, - predicate=construct_predicate, - object=Var(value="annotation"))] + triples=[ + SimplifiedTriple( + subject=term, + predicate=construct_predicate, + object=Var(value="annotation"), + ) + ] ) ) where_clause = WhereClause( group_graph_pattern=GroupGraphPattern( content=GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[TriplesBlock( - triples=[SimplifiedTriple( - subject=term, - predicate=select_predicates[0], # Assuming a single select predicate for simplicity - object=Var(value="annotation"))] - )] + graph_patterns_or_triples_blocks=[ + TriplesBlock( + triples=[ + SimplifiedTriple( + subject=term, + predicate=select_predicates[ + 0 + ], # Assuming a single select predicate for simplicity + object=Var(value="annotation"), + ) + ] + ) + ] ) ) ) @@ -34,5 +41,5 @@ def __init__( super().__init__( construct_template=construct_template, where_clause=where_clause, - solution_modifier=solution_modifier + solution_modifier=solution_modifier, ) diff --git a/prez/services/query_generation/umbrella.py b/prez/services/query_generation/umbrella.py index 23ab47b4..3c08aace 100755 --- a/prez/services/query_generation/umbrella.py +++ b/prez/services/query_generation/umbrella.py @@ -43,8 +43,6 @@ class Config: runtime_vals_expanded: Optional[Dict] = {} merged_runtime_and_default_vals: Optional[Dict] = {} - - def _expand_runtime_vars(self): for k, v in self.runtime_values.items(): if k in ["limit", "offset", "q"]: @@ -151,7 +149,6 @@ def build_inner_select(self): for gpnt in all_gpnt: inner_select_ggps.add_pattern(gpnt) - def sh_rule_type_conversion(self, items: List): """ Assumes Literals are actually Variables. @@ -370,7 +367,6 @@ def process_bn_level(depth, max_depth, outer_ggps): gpnt = GraphPatternNotTriples(content=gorugp) self.main_where_ggps.add_pattern(gpnt) - def _parse_property_shapes(self, property_node, i): def process_path_object(path_obj: Union[URIRef, BNode]): if isinstance(path_obj, BNode): diff --git a/pyproject.toml b/pyproject.toml index f0547c3e..e60993bd 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,12 +7,12 @@ authors = ["Jamie Feiss ", "Nicholas Car Repo: "profile": URIRef("https://prez.dev/OGCItemProfile"), "title": "OGC Object Profile", "mediatype": "text/anot+turtle", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ {}, # Test that profiles/mediatypes resolve to their defaults if not requested (listing endpoint) @@ -50,11 +50,13 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://prez.dev/OGCListingProfile"), "title": "OGC Listing Profile", "mediatype": "text/anot+turtle", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ - {"accept": "application/ld+json"}, # Test that a valid mediatype is resolved + { + "accept": "application/ld+json" + }, # Test that a valid mediatype is resolved {}, [URIRef("http://www.w3.org/ns/dcat#Catalog")], False, @@ -62,11 +64,13 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://prez.dev/OGCItemProfile"), "title": "OGC Object Profile", "mediatype": "application/ld+json", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ - {"accept": "application/ld+json;q=0.7,text/turtle"}, # Test resolution of multiple mediatypes + { + "accept": "application/ld+json;q=0.7,text/turtle" + }, # Test resolution of multiple mediatypes {}, [URIRef("http://www.w3.org/ns/dcat#Catalog")], False, @@ -74,8 +78,8 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://prez.dev/OGCItemProfile"), "title": "OGC Object Profile", "mediatype": "text/turtle", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ {}, @@ -86,8 +90,8 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://prez.dev/OGCItemProfile"), "title": "OGC Object Profile", "mediatype": "application/ld+json", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ {"accept": "text/turtle"}, @@ -98,8 +102,8 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://prez.dev/OGCItemProfile"), "title": "OGC Object Profile", "mediatype": "application/ld+json", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ {"accept-profile": "oogabooga"}, # Test that invalid profile is ignored @@ -110,8 +114,8 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://prez.dev/OGCItemProfile"), "title": "OGC Object Profile", "mediatype": "text/anot+turtle", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ {"accept": "oogabooga"}, # Test that invalid mediatype is ignored @@ -122,11 +126,13 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://prez.dev/OGCItemProfile"), "title": "OGC Object Profile", "mediatype": "text/anot+turtle", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], [ - {"accept-profile": ""}, # Test that a valid profile is resolved + { + "accept-profile": "" + }, # Test that a valid profile is resolved {}, [URIRef("http://www.w3.org/ns/dcat#Catalog")], True, @@ -134,10 +140,10 @@ def test_repo(test_store: Store) -> Repo: "profile": URIRef("https://www.w3.org/TR/vocab-dcat/"), "title": "DCAT", "mediatype": "text/anot+turtle", - "class": "http://www.w3.org/ns/dcat#Catalog" - } + "class": "http://www.w3.org/ns/dcat#Catalog", + }, ], - ] + ], ) @pytest.mark.asyncio async def test_connegp(headers, params, classes, listing, expected_selected, test_repo): @@ -145,7 +151,13 @@ def override_get_repo(): return test_repo app.dependency_overrides[get_repo] = override_get_repo - pmts = NegotiatedPMTs(headers=headers, params=params, classes=classes, listing=listing, system_repo=test_repo) + pmts = NegotiatedPMTs( + headers=headers, + params=params, + classes=classes, + listing=listing, + system_repo=test_repo, + ) success = await pmts.setup() assert success assert pmts.selected == expected_selected diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 54659c26..173856e9 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -11,26 +11,21 @@ ) -@pytest.mark.parametrize( - "nodeshape_uri", ["http://example.org/ns#Collections"] -) +@pytest.mark.parametrize("nodeshape_uri", ["http://example.org/ns#Collections"]) def test_nodeshape_parsing(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) assert ns.targetClasses == [ - URIRef('http://www.opengis.net/ont/geosparql#FeatureCollection'), - URIRef('http://www.w3.org/2004/02/skos/core#ConceptScheme'), - URIRef('http://www.w3.org/2004/02/skos/core#Collection'), - URIRef('http://www.w3.org/ns/dcat#Catalog') + URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection"), + URIRef("http://www.w3.org/2004/02/skos/core#ConceptScheme"), + URIRef("http://www.w3.org/2004/02/skos/core#Collection"), + URIRef("http://www.w3.org/ns/dcat#Catalog"), ] assert len(ns.propertyShapesURIs) == 1 @pytest.mark.parametrize( "nodeshape_uri", - [ - "http://example.org/ns#TopLevelCatalogs" - "http://example.org/ns#FeatureListing" - ], + ["http://example.org/ns#TopLevelCatalogs" "http://example.org/ns#FeatureListing"], ) def test_nodeshape_to_grammar(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) From 89781dd50e35973f8f55e238438c742e7e4a1a32 Mon Sep 17 00:00:00 2001 From: david Date: Thu, 7 Mar 2024 23:57:49 +1000 Subject: [PATCH 21/25] Clean up tests add language tag handling to annotations query --- Dockerfile | 2 +- prez/renderers/renderer.py | 20 ++- prez/repositories/base.py | 6 +- prez/routers/management.py | 32 ++-- prez/routers/sparql.py | 2 +- prez/services/annotations.py | 147 ++++++++++++++---- prez/services/link_generation.py | 12 ++ prez/services/listings.py | 5 + prez/services/objects.py | 1 + prez/services/query_generation/annotations.py | 107 +++++++++++-- temp/grammar/grammar.py | 75 +++++++-- .../ogc_records_profile.ttl | 0 .../spaceprez_default_profiles.ttl | 0 tests/conftest.py | 42 +++++ tests/test_alt_profiles.py | 46 ------ tests/test_connegp.py | 4 +- tests/test_count.py | 74 ++++----- tests/test_curie_endpoint.py | 24 +-- tests/test_endpoints_cache.py | 51 +----- tests/test_endpoints_catprez.py | 56 ------- tests/test_endpoints_management.py | 40 ----- tests/test_endpoints_object.py | 50 +----- tests/test_endpoints_profiles.py | 41 ----- tests/test_endpoints_spaceprez.py | 43 +---- tests/test_node_selection_shacl.py | 2 +- tests/test_redirect_endpoint.py | 43 +---- tests/test_sparql.py | 43 ----- 27 files changed, 430 insertions(+), 538 deletions(-) rename {tests/data/profiles => test_data}/ogc_records_profile.ttl (100%) rename {tests/data/profiles => test_data}/spaceprez_default_profiles.ttl (100%) diff --git a/Dockerfile b/Dockerfile index 9288b542..f53383fc 100755 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ RUN curl -sSL https://install.python-poetry.org | python && \ chmod a+x /opt/poetry/bin/poetry WORKDIR /app -COPY poetry.lock pyproject.toml connegp-0.1.6-py3-none-any.whl ./ +COPY poetry.lock pyproject.toml ./ RUN poetry install --only main --no-root --no-ansi FROM python:3.11-slim-buster diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py index 59be56e2..2422e5d3 100755 --- a/prez/renderers/renderer.py +++ b/prez/renderers/renderer.py @@ -1,6 +1,7 @@ import io import json import logging +import time from fastapi import status from fastapi.exceptions import HTTPException @@ -26,6 +27,7 @@ async def return_from_graph( profile_headers, selected_class: URIRef, repo: Repo, + system_repo: Repo, ): profile_headers["Content-Disposition"] = "inline" @@ -64,7 +66,7 @@ async def return_from_graph( else: if "anot+" in mediatype: non_anot_mediatype = mediatype.replace("anot+", "") - graph = await return_annotated_rdf(graph, repo) + graph = await return_annotated_rdf(graph, repo, system_repo) content = io.BytesIO( graph.serialize(format=non_anot_mediatype, encoding="utf-8") ) @@ -77,7 +79,7 @@ async def return_from_graph( ) -async def return_rdf(graph, mediatype, profile_headers): +async def return_rdf(graph: Graph, mediatype, profile_headers): RDF_SERIALIZER_TYPES_MAP["text/anot+turtle"] = "turtle" obj = io.BytesIO( graph.serialize( @@ -90,14 +92,10 @@ async def return_rdf(graph, mediatype, profile_headers): async def return_annotated_rdf( graph: Graph, - repo, + repo: Repo, + system_repo: Repo, ) -> Graph: - annotations_graph = await get_annotation_properties(graph, repo) - # previous_annotation_len = 0 - # current_annotation_len = len(annotations_graph) - # while current_annotation_len != previous_annotation_len: - # previous_annotation_len = current_annotation_len - # new_annotations = await get_annotation_properties(annotations_graph, repo) - # current_annotation_len = len(new_annotations) - # annotations_graph += new_annotations + t_start = time.time() + annotations_graph = await get_annotation_properties(graph, repo, system_repo) + log.debug(f"Time to get annotations: {time.time() - t_start}") return graph.__iadd__(annotations_graph) diff --git a/prez/repositories/base.py b/prez/repositories/base.py index 51fdc522..14dfe073 100755 --- a/prez/repositories/base.py +++ b/prez/repositories/base.py @@ -34,9 +34,9 @@ async def send_queries( if query ], ) - from prez.cache import prefix_graph - - g = Graph(namespace_manager=prefix_graph.namespace_manager) + # from prez.cache import prefix_graph + # g = Graph(namespace_manager=prefix_graph.namespace_manager) #TODO find where else this can go. significantly degrades performance + g = Graph() tabular_results = [] for result in results: if isinstance(result, Graph): diff --git a/prez/routers/management.py b/prez/routers/management.py index c757a4d6..4e1f242c 100755 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -10,9 +10,7 @@ from prez.config import settings from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_rdf -from prez.services.annotations import process_term -# from prez.services.app_service import add_common_context_ontologies_to_tbox_cache router = APIRouter(tags=["Management"]) log = logging.getLogger(__name__) @@ -31,17 +29,25 @@ async def index(): return await return_rdf(g, "text/turtle", profile_headers={}) -@router.get("/purge-tbox-cache", summary="Reset Tbox Cache") -async def purge_tbox_cache(): - """Purges the tbox cache, then re-adds annotations from common ontologies Prez has a copy of - (reference_data/context_ontologies).""" - cache = process_term.cache - cache_size = len(cache._cache) - result = await cache.clear() - if result: - return PlainTextResponse(f"{cache_size} terms removed from tbox cache.") - else: - return PlainTextResponse("Tbox cache already empty.") +# @router.get("/purge-tbox-cache", summary="Reset Tbox Cache") +# async def purge_tbox_cache(): +# """Purges the tbox cache, then re-adds annotations from common ontologies Prez has a copy of +# (reference_data/context_ontologies).""" +# cache = process_term.cache +# cache_size = len(cache._cache) +# test = await cache.multi_get( +# [ +# URIRef("https://prez.dev/profile/prez"), +# URIRef("https://example.com/TopLevelCatalogTwo"), +# URIRef("https://example.com/VocPrezCatalog"), +# URIRef("http://nonoenoenone"), +# ] +# ) +# result = await cache.clear() +# if result: +# return PlainTextResponse(f"{cache_size} terms removed from tbox cache.") +# else: +# return PlainTextResponse("Tbox cache already empty.") # @router.get("/tbox-cache", summary="Show the Tbox Cache") diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index 6d017662..5e5444a3 100755 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -47,7 +47,7 @@ async def sparql_endpoint( await response.aread() g = Graph() g.parse(data=response.text, format=non_anot_mediatype) - graph = await return_annotated_rdf(g, pmts.selected["profile"]) + graph = await return_annotated_rdf(g, repo, system_repo) content = io.BytesIO( graph.serialize(format=non_anot_mediatype, encoding="utf-8") ) diff --git a/prez/services/annotations.py b/prez/services/annotations.py index fc89ebd6..64ca3860 100755 --- a/prez/services/annotations.py +++ b/prez/services/annotations.py @@ -1,70 +1,142 @@ -import asyncio import logging -from itertools import chain -from typing import FrozenSet +from typing import List, FrozenSet, Set -from aiocache import cached -from aiocache.serializers import PickleSerializer +from aiocache import caches from rdflib import Graph, URIRef, Literal -from rdflib.namespace import RDFS -from prez.cache import tbox_cache_aio from prez.dependencies import get_annotations_repo -from prez.reference_data.prez_ns import PREZ from prez.repositories import Repo -from prez.services.query_generation.annotations import AnnotationsConstructQuery +from prez.services.query_generation.annotations import ( + AnnotationsConstructQuery, +) from temp.grammar import * log = logging.getLogger(__name__) pred = IRI(value=URIRef("https://prez.dev/label")) +caches.set_config( + { + "default": { + "cache": "aiocache.SimpleMemoryCache", + "serializer": {"class": "aiocache.serializers.PickleSerializer"}, + } + } +) + -async def process_terms(terms, repo) -> Graph: - """ """ - results = await asyncio.gather(*[process_term(term, repo) for term in terms]) - triples = list(chain(*results)) +async def process_terms(terms_and_dtypes: Set[URIRef], repo: Repo, system_repo: Repo): + """ + This function processes the terms and their data types. It first retrieves the cached results for the given terms + and data types. Then, it processes the terms that are not cached. The results are added to a graph which is then + returned. + + Args: + terms_and_dtypes (set): A list of tuples where each tuple contains a term and its data type. + repo (Repo): An instance of the Repo class. + system_repo (Repo): An instance of the Repo class with the Prez system graph. + + Returns: + annotations_g (Graph): A graph containing the processed terms and their data types. + """ annotations_g = Graph() - for triple in triples: - annotations_g.add(triple) + cache = caches.get("default") # This always returns the SAME instance + results = await cache.multi_get(list(terms_and_dtypes)) + zipped = list(zip(terms_and_dtypes, results)) + + cached = [z for z in zipped if z[1] is not None] + await add_cached_entries(annotations_g, cached) + + uncached = [z[0] for z in zipped if z[1] is None] + if uncached: + await process_uncached_terms(uncached, repo, system_repo, annotations_g) + return annotations_g -def term_based_key_builder(func, *args, **kwargs): - return args[0] +async def add_cached_entries( + annotations_g: Graph, cached: List[Tuple[URIRef, FrozenSet[Tuple[URIRef, Literal]]]] +): + """ + This function adds the cached entries to the graph. It iterates over the cached entries and for each entry, + it extracts the subject and the frozenset of predicate-object pairs. Then, it adds the expanded triple + (subject, predicate, object) to the graph. + Args: + annotations_g (Graph): A graph to which the cached entries are added. + cached (list): A list of cached entries. -@cached( - cache=tbox_cache_aio, - key_builder=term_based_key_builder, - serializer=PickleSerializer(), -) -async def process_term(term, repo) -> FrozenSet[Tuple[URIRef, URIRef, Literal]]: + Returns: + None """ - gets annotations for an individual term + for triples in cached: + subject = triples[0] # Extract the subject from the current cached object + predicate_objects = triples[ + 1 + ] # Extract the frozenset of predicate-object pairs + # Iterate over each predicate-object pair in the frozenset + for pred, obj in predicate_objects: + # Add the expanded triple (subject, predicate, object) to 'annotations_g' + annotations_g.add((subject, pred, obj)) + + +async def process_uncached_terms( + terms: List[URIRef], repo: Repo, system_repo: Repo, annotations_g: Graph +): + """ + This function processes the terms that are not cached. It sends queries to the annotations repository and the + main repository to get the results for the uncached terms. The results are then added to the graph and also + cached for future use. + + Args: + terms (list): A list of terms that are not cached. + repo (Repo): An instance of the Repo class. + annotations_g (Graph): A graph to which the results are added. + + Returns: + None """ annotations_repo = await get_annotations_repo() annotations_query = AnnotationsConstructQuery( - term=IRI(value=term), - construct_predicate=IRI(value=PREZ.label), # TODO change to predicate map - select_predicates=[IRI(value=RDFS.label)], + terms=[IRI(value=term) for term in terms] ).to_string() - # check the prez cache + context_results = await annotations_repo.send_queries( rdf_queries=[annotations_query], tabular_queries=[] ) - # if not found, query the data repo repo_results = await repo.send_queries( rdf_queries=[annotations_query], tabular_queries=[] ) - all_results = context_results[0] + repo_results[0] - cacheable_results = frozenset(all_results) - return cacheable_results + system_results = await system_repo.send_queries( + rdf_queries=[annotations_query], tabular_queries=[] + ) + + all_results = context_results[0] + repo_results[0] + system_results[0] + + # Initialize subjects_map with each term having an empty set to start with + subjects_map = {term: set() for term in terms} + + for s, p, o in all_results: + subjects_map[s].add((p, o)) + + # Prepare subjects_list, only converting to frozenset where there are actual results + subjects_list = [ + (subject, frozenset(po_pairs)) if po_pairs else (subject, frozenset()) + for subject, po_pairs in subjects_map.items() + ] + + # Cache the results + cache = caches.get("default") + await cache.multi_set(subjects_list) + + # Add all results to annotations_g + annotations_g += all_results async def get_annotation_properties( item_graph: Graph, repo: Repo, + system_repo: Repo, ) -> Graph: """ Gets annotation data used for HTML display. @@ -73,9 +145,16 @@ async def get_annotation_properties( which are often diverse in the predicates they use, to be aligned with the default predicates used by Prez. The full range of predicates used can be manually included via profiles. """ + # get all terms and datatypes for which we want to retrieve annotations terms = set(term for term in item_graph.all_nodes() if isinstance(term, URIRef)) - if not terms: + dtypes = set( + term.datatype + for term in item_graph.all_nodes() + if isinstance(term, Literal) and term.datatype + ) + terms_and_types = terms.union(dtypes) + if not terms_and_types: return Graph() - annotations_g = await process_terms(terms, repo) + annotations_g = await process_terms(terms_and_types, repo, system_repo) return annotations_g diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index 27f788e1..e269d585 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -1,4 +1,5 @@ import logging +import time from string import Template from rdflib import Graph, Literal, URIRef, DCTERMS, BNode @@ -20,6 +21,7 @@ async def add_prez_links(graph: Graph, repo: Repo, endpoint_structure): """ Adds internal links to the given graph for all URIRefs that have a class and endpoint associated with them. """ + t_start = time.time() # get all URIRefs - if Prez can find a class and endpoint for them, an internal link will be generated. uris = [uri for uri in graph.all_nodes() if isinstance(uri, URIRef)] uri_to_klasses = {} @@ -29,6 +31,7 @@ async def add_prez_links(graph: Graph, repo: Repo, endpoint_structure): for uri, klasses in uri_to_klasses.items(): if klasses: # need class to know which endpoints can deliver the class await _link_generation(uri, repo, klasses, graph, endpoint_structure) + log.debug(f"Time taken to add links: {time.time() - t_start}") async def _link_generation( @@ -156,6 +159,15 @@ async def create_link_strings(hierarchy_level, solution, uri, endpoint_structure async def get_link_components(ns, repo): """ Retrieves link components for the given node shape. + + Of the form: + SELECT ?path_node_1 + WHERE { + ?path_node_1 . + ?focus_classes . + ?path_node_1 . + VALUES ?focus_classes{ } + } """ link_queries = [] link_queries.append( diff --git a/prez/services/listings.py b/prez/services/listings.py index 1e7012ac..b6da4b15 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -176,6 +176,7 @@ async def listing_function( pmts.generate_response_headers(), pmts.selected["class"], repo, + system_repo, ) @@ -223,6 +224,10 @@ async def get_shacl_node_selection( if match_all_keys: matching_nodeshapes.append(ns) # TODO logic if there is more than one nodeshape - current default nodeshapes will only return one. + if not matching_nodeshapes: + raise ValueError( + "No matching nodeshapes found for the given path nodes and hierarchy level" + ) node_selection_shape = matching_nodeshapes[0].uri target_classes = list( endpoints_graph_cache.objects(node_selection_shape, SH.targetClass) diff --git a/prez/services/objects.py b/prez/services/objects.py index d60be71e..bef16ddb 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -83,4 +83,5 @@ async def object_function( pmts.generate_response_headers(), pmts.selected["class"], repo, + system_repo, ) diff --git a/prez/services/query_generation/annotations.py b/prez/services/query_generation/annotations.py index 5aef8c2b..36a23bb5 100644 --- a/prez/services/query_generation/annotations.py +++ b/prez/services/query_generation/annotations.py @@ -1,19 +1,80 @@ from typing import List +from prez.config import settings +from prez.reference_data.prez_ns import PREZ from temp.grammar import * class AnnotationsConstructQuery(ConstructQuery): - def __init__( - self, term: IRI, construct_predicate: IRI, select_predicates: List[IRI] - ): + def __init__(self, terms: List[IRI]): + # create terms VALUES clause + # e.g. VALUES ?term { ... } + term_var = Var(value="term") + terms_gpnt = GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=term_var, + datablockvalues=[DataBlockValue(value=term) for term in terms], + ) + ) + ) + ) + + # create prez annotation to annotation properties VALUES clause + # e.g. VALUES ( ?prop ?prezAnotProp ) { (...) (...) } + + prez_anot_var = Var(value="prezAnotProp") + prop_var = Var(value="prop") + all_annotation_tuples = get_prez_annotation_tuples() + props_gpnt = GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataFull( + vars=[prop_var, prez_anot_var], + datablocks=[ + [ + DataBlockValue(value=IRI(value=prop)), + DataBlockValue(value=IRI(value=prez_prop)), + ] + for prop, prez_prop in all_annotation_tuples + ], + ) + ) + ) + ) + + # create a language filter + # e.g. FILTER (LANG(?annotation) IN ("en", "")) + anot_var = Var(value="annotation") + in_expr = Expression.create_in_expression( + left_primary_expression=PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + function_name="LANG", + expression=PrimaryExpression(content=anot_var), + ) + ), + operator="IN", + right_primary_expressions=[ + PrimaryExpression(content=RDFLiteral(value=settings.default_language)), + PrimaryExpression(content=RDFLiteral(value="")), + ], + ) + + lang_filter_gpnt = GraphPatternNotTriples( + content=Filter( + constraint=Constraint(content=BrackettedExpression(expression=in_expr)) + ) + ) + + # create the main query components - construct and where clauses construct_template = ConstructTemplate( construct_triples=ConstructTriples( triples=[ SimplifiedTriple( - subject=term, - predicate=construct_predicate, - object=Var(value="annotation"), + subject=term_var, + predicate=prez_anot_var, + object=anot_var, ) ] ) @@ -22,17 +83,18 @@ def __init__( group_graph_pattern=GroupGraphPattern( content=GroupGraphPatternSub( graph_patterns_or_triples_blocks=[ + terms_gpnt, # VALUES ?term { ... } + props_gpnt, # VALUES ( ?prop ?prezAnotProp ) { (...) (...) } TriplesBlock( triples=[ - SimplifiedTriple( - subject=term, - predicate=select_predicates[ - 0 - ], # Assuming a single select predicate for simplicity - object=Var(value="annotation"), + SimplifiedTriple( # ?term ?prop ?annotation + subject=term_var, + predicate=prop_var, + object=anot_var, ) ] - ) + ), + lang_filter_gpnt, # FILTER (LANG(?annotation) IN ("en", "")) ] ) ) @@ -43,3 +105,22 @@ def __init__( where_clause=where_clause, solution_modifier=solution_modifier, ) + + +def get_prez_annotation_tuples(): + label_tuples = [ + (label_prop, PREZ.label) for label_prop in settings.label_predicates + ] + description_tuples = [ + (description_prop, PREZ.description) + for description_prop in settings.description_predicates + ] + provenance_tuples = [ + (provenance_prop, PREZ.provenance) + for provenance_prop in settings.provenance_predicates + ] + other_tuples = [ + (other_prop, PREZ.other) for other_prop in settings.other_predicates + ] + all_tuples = label_tuples + description_tuples + provenance_tuples + other_tuples + return all_tuples diff --git a/temp/grammar/grammar.py b/temp/grammar/grammar.py index 060a9b61..189dd0b1 100755 --- a/temp/grammar/grammar.py +++ b/temp/grammar/grammar.py @@ -4,7 +4,7 @@ from decimal import Decimal from typing import List, Union, Optional, Generator, Tuple -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, field_validator, validator from rdflib import URIRef, Variable from rdflib.plugins.sparql import prepareQuery from rdflib.plugins.sparql.algebra import translateAlgebra @@ -306,7 +306,7 @@ class RelationalExpression(SPARQLGrammarBase): left: NumericExpression operator: Optional[str] = None # '=', '!=', '<', '>', '<=', '>=', 'IN' and 'NOT IN' - right: Optional[Union[NumericExpression, "ExpressionList"]] = None + right: Optional[Union[NumericExpression, ExpressionList]] = None def render(self) -> Generator[str, None, None]: yield from self.left.render() @@ -392,6 +392,45 @@ def from_primary_expr(cls, primary_expression: PrimaryExpression) -> Expression: ) ) + @classmethod + def create_in_expression( + cls, + left_primary_expression: PrimaryExpression, + operator: str, # "IN" or "NOT IN" + right_primary_expressions: List[PrimaryExpression], + ) -> Expression: + """ """ + return cls( + conditional_or_expression=ConditionalOrExpression( + conditional_and_expressions=[ + ConditionalAndExpression( + value_logicals=[ + ValueLogical( + relational_expression=RelationalExpression( + left=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=left_primary_expression + ) + ) + ) + ), + operator=operator, + right=ExpressionList( + expressions=[ + Expression.from_primary_expr(expr) + for expr in right_primary_expressions + ] + ), + ) + ) + ] + ) + ] + ) + ) + class BrackettedExpression(SPARQLGrammarBase): expression: Expression @@ -428,6 +467,12 @@ class DataBlockValue(SPARQLGrammarBase): value: Union[IRI, RDFLiteral, NumericLiteral, BooleanLiteral, str] + @field_validator("value") + def check_string_is_undef(cls, v): + if isinstance(v, str) and v != "UNDEF": + raise ValueError("Only permitted string value is 'UNDEF'") + return v + def render(self) -> Generator[str, None, None]: if isinstance(self.value, str): yield self.value @@ -442,7 +487,7 @@ class InlineDataFull(SPARQLGrammarBase): """ vars: Union[NIL, List[Var]] - values: List[List[Union[IRI, RDFLiteral]]] + datablocks: List[Union[List[DataBlockValue], NIL]] def render(self) -> Generator[str, None, None]: if self.vars: @@ -454,16 +499,17 @@ def render(self) -> Generator[str, None, None]: else: yield "{" - if self.values_blocks is None: + if self.datablocks is None: yield "()" else: - for values_block in self.values_blocks: - if values_block: + for data_block in self.datablocks: + if data_block: yield "(" - for value in values_block: + for value in data_block: yield from value.render() yield " " yield ")" + yield "\n" else: yield "()" yield "}" @@ -1035,9 +1081,20 @@ class BuiltInCall(SPARQLGrammarBase): @field_validator("function_name") def validate_function_name(cls, v): - implemented = ["URI", "STR", "CONCAT", "SHA256", "LCASE", "isBLANK"] + implemented = [ + "URI", + "STR", + "CONCAT", + "SHA256", + "LCASE", + "isBLANK", + "LANG", + "LANGMATCHES", + ] if v not in implemented: - raise ValueError(f"{v} is not a valid SPARQL built-in function") + raise ValueError( + f"{v} is not a valid SPARQL built-in function or it is not implemented yet" + ) return v def render(self) -> Generator[str, None, None]: diff --git a/tests/data/profiles/ogc_records_profile.ttl b/test_data/ogc_records_profile.ttl similarity index 100% rename from tests/data/profiles/ogc_records_profile.ttl rename to test_data/ogc_records_profile.ttl diff --git a/tests/data/profiles/spaceprez_default_profiles.ttl b/test_data/spaceprez_default_profiles.ttl similarity index 100% rename from tests/data/profiles/spaceprez_default_profiles.ttl rename to test_data/spaceprez_default_profiles.ttl diff --git a/tests/conftest.py b/tests/conftest.py index a0c54127..2fb7a8af 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,3 +2,45 @@ os.environ["SPARQL_REPO_TYPE"] = "pyoxigraph" # os.environ["LOG_LEVEL"] = "DEBUG" + +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient +from pyoxigraph.pyoxigraph import Store + +from prez.app import app +from prez.dependencies import get_repo +from prez.repositories import Repo, PyoxigraphRepo + + +@pytest.fixture(scope="session") +def test_store() -> Store: + # Create a new pyoxigraph Store + store = Store() + + for file in Path(__file__).parent.glob("../test_data/*.ttl"): + store.load(file.read_bytes(), "text/turtle") + + return store + + +@pytest.fixture(scope="session") +def test_repo(test_store: Store) -> Repo: + # Create a PyoxigraphQuerySender using the test_store + return PyoxigraphRepo(test_store) + + +@pytest.fixture(scope="session") +def client(test_repo: Repo) -> TestClient: + # Override the dependency to use the test_repo + def override_get_repo(): + return test_repo + + app.dependency_overrides[get_repo] = override_get_repo + + with TestClient(app) as c: + yield c + + # Remove the override to ensure subsequent tests are unaffected + app.dependency_overrides.clear() diff --git a/tests/test_alt_profiles.py b/tests/test_alt_profiles.py index 7608b547..dcce339c 100755 --- a/tests/test_alt_profiles.py +++ b/tests/test_alt_profiles.py @@ -13,52 +13,6 @@ from prez.repositories import Repo, PyoxigraphRepo -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - file = Path(__file__).parent.parent / "test_data/catprez.ttl" - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -def wait_for_app_to_be_ready(client, timeout=10): - start_time = time.time() - while time.time() - start_time < timeout: - try: - response = client.get("/health") - if response.status_code == 200: - return - except Exception as e: - print(e) - time.sleep(0.5) - raise RuntimeError("App did not start within the specified timeout") - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: - wait_for_app_to_be_ready(c) - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - @pytest.fixture(scope="session") def a_catalog_link(client): # get link for first catalog diff --git a/tests/test_connegp.py b/tests/test_connegp.py index 0f1ae948..ef39f019 100644 --- a/tests/test_connegp.py +++ b/tests/test_connegp.py @@ -14,9 +14,9 @@ @pytest.fixture(scope="session") def test_store() -> Store: store = Store() - file = Path(__file__).parent / "data/profiles/ogc_records_profile.ttl" + file = Path(__file__).parent.parent / "test_data/ogc_records_profile.ttl" store.load(file.read_bytes(), "text/turtle") - file = Path(__file__).parent / "data/profiles/spaceprez_default_profiles.ttl" + file = Path(__file__).parent.parent / "test_data/spaceprez_default_profiles.ttl" store.load(file.read_bytes(), "text/turtle") return store diff --git a/tests/test_count.py b/tests/test_count.py index e7c0433d..cc2eff06 100755 --- a/tests/test_count.py +++ b/tests/test_count.py @@ -9,40 +9,40 @@ from prez.repositories import Repo, PyoxigraphRepo -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def test_client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -def get_curie(test_client: TestClient, iri: str) -> str: - response = test_client.get(f"/identifier/curie/{iri}") +# @pytest.fixture(scope="session") +# def test_store() -> Store: +# # Create a new pyoxigraph Store +# store = Store() +# +# for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): +# store.load(file.read_bytes(), "text/turtle") +# +# return store +# +# +# @pytest.fixture(scope="session") +# def test_repo(test_store: Store) -> Repo: +# # Create a PyoxigraphQuerySender using the test_store +# return PyoxigraphRepo(test_store) +# +# +# @pytest.fixture(scope="session") +# def client(test_repo: Repo) -> TestClient: +# # Override the dependency to use the test_repo +# def override_get_repo(): +# return test_repo +# +# app.dependency_overrides[get_repo] = override_get_repo +# +# with TestClient(app) as c: +# yield c +# +# # Remove the override to ensure subsequent tests are unaffected +# app.dependency_overrides.clear() + + +def get_curie(client: TestClient, iri: str) -> str: + response = client.get(f"/identifier/curie/{iri}") if response.status_code != 200: raise ValueError(f"Failed to retrieve curie for {iri}. {response.text}") return response.text @@ -72,13 +72,13 @@ def get_curie(test_client: TestClient, iri: str) -> str: ], ) def test_count( - test_client: TestClient, + client: TestClient, iri: str, inbound: str | None, outbound: str | None, count: int, ): - curie = get_curie(test_client, iri) + curie = get_curie(client, iri) params = {"curie": curie, "inbound": inbound, "outbound": outbound} - response = test_client.get(f"/count", params=params) + response = client.get(f"/count", params=params) assert int(response.text) == count diff --git a/tests/test_curie_endpoint.py b/tests/test_curie_endpoint.py index 2909374a..d8b7b354 100755 --- a/tests/test_curie_endpoint.py +++ b/tests/test_curie_endpoint.py @@ -4,18 +4,18 @@ from prez.app import app -@pytest.fixture -def client() -> TestClient: - testclient = TestClient(app) - - # Make a request for the following IRI to ensure - # the curie is available in the 'test_curie' test. - iri = "http://example.com/namespace/test" - response = testclient.get(f"/identifier/curie/{iri}") - assert response.status_code == 200 - assert response.text == "nmspc:test" - - return testclient +# @pytest.fixture +# def client() -> TestClient: +# testclient = TestClient(app) +# +# # Make a request for the following IRI to ensure +# # the curie is available in the 'test_curie' test. +# iri = "http://example.com/namespace/test" +# response = testclient.get(f"/identifier/curie/{iri}") +# assert response.status_code == 200 +# assert response.text == "nmspc:test" +# +# return testclient @pytest.mark.parametrize( diff --git a/tests/test_endpoints_cache.py b/tests/test_endpoints_cache.py index 56462677..88affe0e 100755 --- a/tests/test_endpoints_cache.py +++ b/tests/test_endpoints_cache.py @@ -1,55 +1,14 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def test_client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - -def test_reset_cache(test_client): - test_client.get("/reset-tbox-cache") - r = test_client.get("/tbox-cache") +def test_reset_cache(client): + client.get("/reset-tbox-cache") + r = client.get("/tbox-cache") g = Graph().parse(data=r.text) assert len(g) > 6000 # cache expands as tests are run -def test_cache(test_client): - r = test_client.get("/tbox-cache") +def test_cache(client): + r = client.get("/tbox-cache") g = Graph().parse(data=r.text) assert len(g) > 6000 # cache expands as tests are run diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py index 050b9fad..c6831127 100755 --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -1,63 +1,7 @@ -import asyncio -import time -from pathlib import Path - import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef from rdflib.namespace import RDF, DCAT -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - file = Path(__file__).parent.parent / "test_data/catprez.ttl" - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -def wait_for_app_to_be_ready(client, timeout=10): - start_time = time.time() - while time.time() - start_time < timeout: - try: - response = client.get("/health") - if response.status_code == 200: - return - except Exception as e: - print(e) - time.sleep(0.5) - raise RuntimeError("App did not start within the specified timeout") - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: - wait_for_app_to_be_ready(c) - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - @pytest.fixture(scope="session") def a_catalog_link(client): diff --git a/tests/test_endpoints_management.py b/tests/test_endpoints_management.py index 6afeae72..0f211e72 100755 --- a/tests/test_endpoints_management.py +++ b/tests/test_endpoints_management.py @@ -1,46 +1,6 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph -from prez.app import app -from prez.dependencies import get_repo from prez.reference_data.prez_ns import PREZ -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() def test_annotation_predicates(client): diff --git a/tests/test_endpoints_object.py b/tests/test_endpoints_object.py index a2670989..63a2f459 100755 --- a/tests/test_endpoints_object.py +++ b/tests/test_endpoints_object.py @@ -1,51 +1,9 @@ -import asyncio -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef from rdflib.namespace import RDF, GEO -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def test_client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - -def test_feature_collection(test_client): - r = test_client.get(f"/object?uri=https://test/feature-collection") +def test_feature_collection(client): + r = client.get(f"/object?uri=https://test/feature-collection") response_graph = Graph().parse(data=r.text) assert ( URIRef("https://test/feature-collection"), @@ -54,8 +12,8 @@ def test_feature_collection(test_client): ) in response_graph -def test_feature(test_client): - r = test_client.get( +def test_feature(client): + r = client.get( f"/object?uri=https://linked.data.gov.au/datasets/geofabric/hydroid/102208962" ) response_graph = Graph().parse(data=r.text) diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py index 35fd78c8..b71d02ed 100755 --- a/tests/test_endpoints_profiles.py +++ b/tests/test_endpoints_profiles.py @@ -1,47 +1,6 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef from rdflib.namespace import RDF, PROF -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - def test_profile(client): # check the example remote profile is loaded diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index a0acf459..c3cb7f6b 100755 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -1,54 +1,13 @@ -import asyncio -from pathlib import Path - import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef from rdflib.namespace import RDF, DCAT, GEO -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - file = Path(__file__).parent.parent / "test_data/spaceprez.ttl" - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - @pytest.fixture(scope="session") def a_catalog_link(client): r = client.get("/catalogs") g = Graph().parse(data=r.text) - member_uri = g.value(None, RDF.type, DCAT.Catalog) + member_uri = URIRef("https://example.com/SpacePrezCatalog") link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) return link diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 173856e9..5ea8fece 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -29,4 +29,4 @@ def test_nodeshape_parsing(nodeshape_uri): ) def test_nodeshape_to_grammar(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) - ns.to_grammar() + ns.to_string() diff --git a/tests/test_redirect_endpoint.py b/tests/test_redirect_endpoint.py index 1e66b14f..15d12b7b 100755 --- a/tests/test_redirect_endpoint.py +++ b/tests/test_redirect_endpoint.py @@ -1,44 +1,5 @@ -from pathlib import Path - import pytest from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store - -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def test_client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() @pytest.mark.parametrize( @@ -60,7 +21,7 @@ def override_get_repo(): ], ) def test_redirect_endpoint( - test_client: TestClient, + client: TestClient, iri: str, url: str, expected_response_code, @@ -68,7 +29,7 @@ def test_redirect_endpoint( ): params = {"iri": iri} headers = {"accept": accept_header_value} - response = test_client.get( + response = client.get( "/identifier/redirect", params=params, headers=headers, follow_redirects=False ) diff --git a/tests/test_sparql.py b/tests/test_sparql.py index 4ad1f60c..9cde80d1 100755 --- a/tests/test_sparql.py +++ b/tests/test_sparql.py @@ -1,46 +1,3 @@ -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store - -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - def test_select(client): """check that a valid select query returns a 200 response.""" r = client.get( From 25283e23b2943dbc79b36421a2366d2c504f6d0b Mon Sep 17 00:00:00 2001 From: david Date: Fri, 15 Mar 2024 15:34:09 +1000 Subject: [PATCH 22/25] progress commit --- prez/cache.py | 18 +- .../profiles/ogc_records_profile.ttl | 4 +- prez/routers/identifier.py | 4 +- prez/routers/management.py | 65 +- prez/routers/object.py | 2 +- prez/routers/ogc_router.py | 12 +- prez/routers/ogc_vocprez.py.old | 14 +- prez/routers/profiles.py | 2 +- prez/routers/search.py | 37 - prez/routers/vocprez.py.unused | 10 +- prez/services/annotations.py | 12 - prez/services/app_service.py | 2 +- prez/services/connegp_service.py | 27 +- prez/services/curie_functions.py | 17 +- prez/services/link_generation.py | 2 +- prez/services/listings.py | 130 +-- prez/services/objects.py | 37 +- prez/services/query_generation/annotations.py | 34 +- prez/services/query_generation/count.py | 66 +- .../node_selection/endpoint_shacl.py | 4 +- .../bnode_depth-1.ttl | 0 .../bnode_depth-2-2.ttl | 0 .../bnode_depth-2.ttl | 0 .../bnode_depth-4.ttl | 0 .../cql/input/example01.json | 0 .../cql/input/example02.json | 0 .../cql/input/example03.json | 0 .../cql/input/example05a.json | 0 .../cql/input/example05b.json | 0 .../cql/input/example06b.json | 0 .../cql/input/example07.json | 0 .../cql/input/example08.json | 0 .../cql/input/example09.json | 0 .../cql/input/example10.json | 0 .../cql/input/example11.json | 0 .../cql/input/example12.json | 0 .../cql/input/example14.json | 0 .../cql/input/example15.json | 0 .../cql/input/example17.json | 0 .../cql/input/example29.json | 0 .../cql/input/example31.json | 0 .../cql/input/example32.json | 0 .../cql/input/example33.json | 0 .../cql/input/example34.json | 0 .../cql/input/example35.json | 0 .../cql/input/example39.json | 0 .../cql/input/geo_intersects.json | 0 test_data/object_vocab_api_bblocks.ttl | 1 - .../redirect-foaf-homepage.ttl | 0 tests/_test_curie_generation.py | 4 +- tests/conftest.py | 65 +- .../expected_responses/resource_anot.ttl | 57 -- .../resource_listing_anot.ttl | 201 ---- .../top_level_catalog_anot.ttl | 46 - .../top_level_catalog_listing_anot.ttl | 41 - tests/data/catprez/input/catprez.ttl | 38 - tests/data/object/expected_responses/fc.ttl | 58 -- .../object/expected_responses/feature.ttl | 59 -- tests/data/profiles/remote_profile.ttl | 19 - .../filter_to_focus_search.ttl | 84 -- .../focus_to_filter_search.ttl | 167 ---- .../expected_responses/dataset_anot.ttl | 79 -- .../dataset_listing_anot.ttl | 53 -- .../expected_responses/feature_anot.ttl | 64 -- .../feature_collection_anot.ttl | 57 -- .../feature_collection_listing_anot.ttl | 69 -- .../feature_listing_anot.ttl | 53 -- .../data/spaceprez/input/geofabric_small.ttl | 110 --- tests/data/spaceprez/input/gnaf_small.ttl | 318 ------- tests/data/spaceprez/input/labels.ttl | 13 - .../data/spaceprez/input/multiple_object.ttl | 30 - tests/data/spaceprez/input/sandgate.ttl | 295 ------ .../input/sandgate/catchments.geojson | 8 - .../input/sandgate/facilities.geojson | 16 - .../spaceprez/input/sandgate/floods.geojson | 10 - .../spaceprez/input/sandgate/roads.geojson | 8 - .../spaceprez/input/sandgate/sandgate.json | 53 -- .../beddingsurfacestructure_top_concepts.ttl | 186 ---- .../collection_listing_anot.ttl | 51 -- .../collection_listing_item.ttl | 375 -------- .../expected_responses/concept-coal.ttl | 35 - .../concept-open-cut-coal-mining.ttl | 67 -- .../concept-with-2-narrower-concepts.ttl | 64 -- .../expected_responses/concept_anot.ttl | 29 - .../concept_scheme_no_children.ttl | 49 - ...cept_scheme_top_concepts_with_children.ttl | 106 --- .../concept_scheme_with_children.ttl | 49 - .../data/vocprez/expected_responses/empty.ttl | 0 .../expected_responses/vocab_listing_anot.ttl | 119 --- .../vocprez/input/absolute-collection.ttl | 42 - tests/data/vocprez/input/alteration-types.ttl | 334 ------- .../vocprez/input/beddingsurfacestructure.ttl | 177 ---- .../input/borehole-purpose-no-children.ttl | 26 - tests/data/vocprez/input/borehole-purpose.ttl | 238 ----- .../data/vocprez/input/catalog-of-vocabs.ttl | 12 - tests/data/vocprez/input/contacttype.ttl | 565 ------------ .../data/vocprez/input/dublin_core_terms.ttl | 867 ------------------ tests/data/vocprez/input/reg-status.ttl | 213 ----- .../vocprez/input/vocab-derivation-modes.ttl | 127 --- tests/test_alt_profiles.py | 40 +- tests/test_bnode.py | 2 +- tests/test_connegp.py | 16 +- tests/test_count.py | 49 +- tests/test_curie_endpoint.py | 20 +- tests/test_endpoints_cache.py | 29 +- tests/test_endpoints_catprez.py | 29 +- tests/test_endpoints_object.py | 8 +- tests/test_endpoints_ok.py | 61 +- tests/test_endpoints_profiles.py | 6 - tests/test_endpoints_spaceprez.py | 50 +- tests/test_node_selection_shacl.py | 4 +- 111 files changed, 437 insertions(+), 6183 deletions(-) rename {tests/data/bnode_depth => test_data}/bnode_depth-1.ttl (100%) mode change 100755 => 100644 rename {tests/data/bnode_depth => test_data}/bnode_depth-2-2.ttl (100%) mode change 100755 => 100644 rename {tests/data/bnode_depth => test_data}/bnode_depth-2.ttl (100%) mode change 100755 => 100644 rename {tests/data/bnode_depth => test_data}/bnode_depth-4.ttl (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example01.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example02.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example03.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example05a.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example05b.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example06b.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example07.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example08.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example09.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example10.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example11.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example12.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example14.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example15.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example17.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example29.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example31.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example32.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example33.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example34.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example35.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/example39.json (100%) mode change 100755 => 100644 rename {tests/data => test_data}/cql/input/geo_intersects.json (100%) mode change 100755 => 100644 rename {tests/data/spaceprez/input => test_data}/redirect-foaf-homepage.ttl (100%) mode change 100755 => 100644 delete mode 100755 tests/data/catprez/expected_responses/resource_anot.ttl delete mode 100755 tests/data/catprez/expected_responses/resource_listing_anot.ttl delete mode 100755 tests/data/catprez/expected_responses/top_level_catalog_anot.ttl delete mode 100755 tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl delete mode 100755 tests/data/catprez/input/catprez.ttl delete mode 100755 tests/data/object/expected_responses/fc.ttl delete mode 100755 tests/data/object/expected_responses/feature.ttl delete mode 100755 tests/data/profiles/remote_profile.ttl delete mode 100755 tests/data/search/expected_responses/filter_to_focus_search.ttl delete mode 100755 tests/data/search/expected_responses/focus_to_filter_search.ttl delete mode 100755 tests/data/spaceprez/expected_responses/dataset_anot.ttl delete mode 100755 tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl delete mode 100755 tests/data/spaceprez/expected_responses/feature_anot.ttl delete mode 100755 tests/data/spaceprez/expected_responses/feature_collection_anot.ttl delete mode 100755 tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl delete mode 100755 tests/data/spaceprez/expected_responses/feature_listing_anot.ttl delete mode 100755 tests/data/spaceprez/input/geofabric_small.ttl delete mode 100755 tests/data/spaceprez/input/gnaf_small.ttl delete mode 100755 tests/data/spaceprez/input/labels.ttl delete mode 100755 tests/data/spaceprez/input/multiple_object.ttl delete mode 100755 tests/data/spaceprez/input/sandgate.ttl delete mode 100755 tests/data/spaceprez/input/sandgate/catchments.geojson delete mode 100755 tests/data/spaceprez/input/sandgate/facilities.geojson delete mode 100755 tests/data/spaceprez/input/sandgate/floods.geojson delete mode 100755 tests/data/spaceprez/input/sandgate/roads.geojson delete mode 100755 tests/data/spaceprez/input/sandgate/sandgate.json delete mode 100755 tests/data/vocprez/expected_responses/beddingsurfacestructure_top_concepts.ttl delete mode 100755 tests/data/vocprez/expected_responses/collection_listing_anot.ttl delete mode 100755 tests/data/vocprez/expected_responses/collection_listing_item.ttl delete mode 100755 tests/data/vocprez/expected_responses/concept-coal.ttl delete mode 100755 tests/data/vocprez/expected_responses/concept-open-cut-coal-mining.ttl delete mode 100755 tests/data/vocprez/expected_responses/concept-with-2-narrower-concepts.ttl delete mode 100755 tests/data/vocprez/expected_responses/concept_anot.ttl delete mode 100755 tests/data/vocprez/expected_responses/concept_scheme_no_children.ttl delete mode 100755 tests/data/vocprez/expected_responses/concept_scheme_top_concepts_with_children.ttl delete mode 100755 tests/data/vocprez/expected_responses/concept_scheme_with_children.ttl delete mode 100755 tests/data/vocprez/expected_responses/empty.ttl delete mode 100755 tests/data/vocprez/expected_responses/vocab_listing_anot.ttl delete mode 100755 tests/data/vocprez/input/absolute-collection.ttl delete mode 100755 tests/data/vocprez/input/alteration-types.ttl delete mode 100755 tests/data/vocprez/input/beddingsurfacestructure.ttl delete mode 100755 tests/data/vocprez/input/borehole-purpose-no-children.ttl delete mode 100755 tests/data/vocprez/input/borehole-purpose.ttl delete mode 100755 tests/data/vocprez/input/catalog-of-vocabs.ttl delete mode 100755 tests/data/vocprez/input/contacttype.ttl delete mode 100755 tests/data/vocprez/input/dublin_core_terms.ttl delete mode 100755 tests/data/vocprez/input/reg-status.ttl delete mode 100755 tests/data/vocprez/input/vocab-derivation-modes.ttl diff --git a/prez/cache.py b/prez/cache.py index 1413a52d..848c7397 100755 --- a/prez/cache.py +++ b/prez/cache.py @@ -1,12 +1,9 @@ -from aiocache import Cache +from aiocache import caches from pyoxigraph.pyoxigraph import Store from rdflib import Graph, ConjunctiveGraph, Dataset from prez.repositories import PyoxigraphRepo -tbox_cache = Graph() -tbox_cache_aio = Cache.MEMORY - profiles_graph_cache = ConjunctiveGraph() profiles_graph_cache.bind("prez", "https://prez.dev/") @@ -34,3 +31,16 @@ annotations_repo = PyoxigraphRepo(annotations_store) oxrdflib_store = Graph(store="Oxigraph") + +caches.set_config( + { + "default": { + "cache": "aiocache.SimpleMemoryCache", + "serializer": {"class": "aiocache.serializers.PickleSerializer"}, + }, + "curies": { + "cache": "aiocache.SimpleMemoryCache", + "serializer": {"class": "aiocache.serializers.PickleSerializer"}, + }, + } +) diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 333ef557..8e973dea 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -25,7 +25,7 @@ prez:OGCRecordsProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasNodeShape [ a sh:NodeShape ; - sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection , prez:SearchResult , prez:CQLObjectList ; + sh:targetClass dcat:Catalog , dcat:Resource , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection , prez:SearchResult , prez:CQLObjectList ; altr-ext:hasDefaultProfile prez:OGCListingProfile ] , [ a sh:NodeShape ; @@ -33,7 +33,7 @@ prez:OGCRecordsProfile altr-ext:hasDefaultProfile prez:OGCSchemesListProfile ] , [ a sh:NodeShape ; - sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; + sh:targetClass dcat:Catalog , dcat:Resource , skos:ConceptScheme , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; altr-ext:hasDefaultProfile prez:OGCItemProfile ] . diff --git a/prez/routers/identifier.py b/prez/routers/identifier.py index 26921899..3d66a6ae 100755 --- a/prez/routers/identifier.py +++ b/prez/routers/identifier.py @@ -74,9 +74,9 @@ def get_curie_route(iri: str): status.HTTP_500_INTERNAL_SERVER_ERROR: {"content": {"application/json": {}}}, }, ) -def get_iri_route(curie: str): +async def get_iri_route(curie: str): try: - return get_uri_for_curie_id(curie) + return await get_uri_for_curie_id(curie) except ValueError as err: raise HTTPException( status.HTTP_400_BAD_REQUEST, f"Invalid input '{curie}'. {err}" diff --git a/prez/routers/management.py b/prez/routers/management.py index 4e1f242c..a51324ab 100755 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -1,16 +1,19 @@ import logging +import pickle +from aiocache import caches from fastapi import APIRouter from rdflib import BNode from rdflib import Graph, URIRef, Literal from rdflib.collection import Collection +from starlette.requests import Request from starlette.responses import PlainTextResponse from prez.cache import endpoints_graph_cache from prez.config import settings from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_rdf - +from prez.services.connegp_service import RDF_MEDIATYPES router = APIRouter(tags=["Management"]) log = logging.getLogger(__name__) @@ -29,34 +32,38 @@ async def index(): return await return_rdf(g, "text/turtle", profile_headers={}) -# @router.get("/purge-tbox-cache", summary="Reset Tbox Cache") -# async def purge_tbox_cache(): -# """Purges the tbox cache, then re-adds annotations from common ontologies Prez has a copy of -# (reference_data/context_ontologies).""" -# cache = process_term.cache -# cache_size = len(cache._cache) -# test = await cache.multi_get( -# [ -# URIRef("https://prez.dev/profile/prez"), -# URIRef("https://example.com/TopLevelCatalogTwo"), -# URIRef("https://example.com/VocPrezCatalog"), -# URIRef("http://nonoenoenone"), -# ] -# ) -# result = await cache.clear() -# if result: -# return PlainTextResponse(f"{cache_size} terms removed from tbox cache.") -# else: -# return PlainTextResponse("Tbox cache already empty.") - - -# @router.get("/tbox-cache", summary="Show the Tbox Cache") -# async def return_tbox_cache(request: Request): -# """gets the mediatype from the request and returns the tbox cache in this mediatype""" -# mediatype = request.headers.get("Accept").split(",")[0] -# if not mediatype or mediatype not in RDF_MEDIATYPES: -# mediatype = "text/turtle" -# return await return_rdf(tbox_cache, mediatype, profile_headers={}) +@router.get("/purge-tbox-cache", summary="Reset Tbox Cache") +async def purge_tbox_cache(): + """Purges the tbox cache, then re-adds annotations from common ontologies Prez has a copy of + (reference_data/context_ontologies).""" + cache = caches.get("default") + cache_size = len(cache._cache) + result = await cache.clear() + if result and cache_size > 0: + return PlainTextResponse(f"{cache_size} terms removed from tbox cache.") + elif result and cache_size == 0: + return PlainTextResponse("Tbox cache already empty.") + elif not result: + raise Exception("Internal Error: Tbox cache not purged.") + + +@router.get("/tbox-cache", summary="Show the Tbox Cache") +async def return_tbox_cache(request: Request): + """gets the mediatype from the request and returns the tbox cache in this mediatype""" + mediatype = request.headers.get("Accept").split(",")[0] + if not mediatype or mediatype not in RDF_MEDIATYPES: + mediatype = "text/turtle" + cache = caches.get("default") + cache_g = Graph() + cache_dict = cache._cache + for subject, pred_obj_bytes in cache_dict.items(): + # use pickle to deserialize the pred_obj_bytes + pred_obj = pickle.loads(pred_obj_bytes) + for pred, obj in pred_obj: + if pred_obj: # cache entry for a URI can be empty - i.e. no annotations found for URI + # Add the expanded triple (subject, predicate, object) to 'annotations_g' + cache_g.add((subject, pred, obj)) + return await return_rdf(cache_g, mediatype, profile_headers={}) def unpack_cache(): diff --git a/prez/routers/object.py b/prez/routers/object.py index fdab0572..af6c64e8 100755 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -43,7 +43,7 @@ async def count_route( repo=Depends(get_repo), ): """Get an Object's statements count based on the inbound or outbound predicate""" - iri = get_iri_route(curie) + iri = await get_iri_route(curie) if inbound is None and outbound is None: raise HTTPException( diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 33b0a3f6..5462e44e 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -59,7 +59,7 @@ async def collection_listing( ): search_term = request.query_params.get("q") - path_node_1_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + path_node_1_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( request, @@ -88,8 +88,8 @@ async def item_listing( system_repo: Repo = Depends(get_system_repo), ): search_term = request.query_params.get("q") - path_node_1_uri = get_uri_for_curie_id(request.path_params["collectionId"]) - path_node_2_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + path_node_1_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) + path_node_2_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( request, @@ -119,7 +119,7 @@ async def catalog_object( ): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + object_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -137,7 +137,7 @@ async def collection_object( ): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + object_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -155,7 +155,7 @@ async def item_object( ): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = get_uri_for_curie_id(request.path_params["itemId"]) + object_uri = await get_uri_for_curie_id(request.path_params["itemId"]) return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) diff --git a/prez/routers/ogc_vocprez.py.old b/prez/routers/ogc_vocprez.py.old index 26906c60..508d818d 100755 --- a/prez/routers/ogc_vocprez.py.old +++ b/prez/routers/ogc_vocprez.py.old @@ -71,7 +71,7 @@ async def vocab_list( system_repo: Repo = Depends(get_system_repo), ): search_term = request.query_params.get("q") - parent_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + parent_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( request, @@ -99,7 +99,7 @@ async def concept_list( system_repo: Repo = Depends(get_system_repo), ): search_term = request.query_params.get("q") - parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + parent_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) endpoint_uri = URIRef(request.scope.get("route").name) return await listing_function( request, @@ -120,7 +120,7 @@ async def top_concepts( repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - parent_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + parent_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) endpoint_uri = URIRef(ogc_endpoints["top-concepts"]) return await listing_function( request, @@ -140,7 +140,7 @@ async def narrowers( repo: Repo = Depends(get_repo), system_repo: Repo = Depends(get_system_repo), ): - parent_uri = get_uri_for_curie_id(request.path_params["itemId"]) + parent_uri = await get_uri_for_curie_id(request.path_params["itemId"]) endpoint_uri = URIRef(ogc_endpoints["narrowers"]) return await listing_function( request, @@ -165,7 +165,7 @@ async def catalog_object( ): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = get_uri_for_curie_id(request.path_params["catalogId"]) + object_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -189,7 +189,7 @@ async def catalog_object( ) request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = get_uri_for_curie_id(request.path_params["collectionId"]) + object_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) @@ -214,7 +214,7 @@ async def catalog_object( ) request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = get_uri_for_curie_id(request.path_params["itemId"]) + object_uri = await get_uri_for_curie_id(request.path_params["itemId"]) return await object_function( request, endpoint_uri, object_uri, request_url, repo, system_repo ) diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index 72446949..fdfb8b3b 100755 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -41,7 +41,7 @@ async def profiles( async def profile(request: Request, profile_curie: str, repo=Depends(get_system_repo)): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) - profile_uri = get_uri_for_curie_id(profile_curie) + profile_uri = await get_uri_for_curie_id(profile_curie) return await object_function( request=request, endpoint_uri=endpoint_uri, diff --git a/prez/routers/search.py b/prez/routers/search.py index 2830cdc8..a2dc451c 100755 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -38,40 +38,3 @@ async def search( per_page=per_page, search_term=term, ) - - # term = request.query_params.get("q") - # limit = request.query_params.get("limit", 10) - # offset = request.query_params.get("offset", 0) - # pred_vals = request.query_params.get("predicates", settings.label_predicates) - # query = SearchQuery( - # search_term=term, - # limit=limit, - # offset=offset, - # pred_vals=pred_vals, - # ).render() - # graph, _ = await repo.send_queries([query], []) - # - # count = len(list(graph.subjects(RDF.type, PREZ.SearchResult))) - # graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) - # - # prof_and_mt_info = ProfilesMediatypesInfo( - # request=request, classes=frozenset([PREZ.SearchResult]), system_repo=system_repo - # ) - # await populate_profile_and_mediatype(prof_and_mt_info, system_repo) - # - # req_mt = prof_and_mt_info.req_mediatypes - # if req_mt: - # if list(req_mt)[0] == "application/sparql-query": - # return PlainTextResponse(query, media_type="application/sparql-query") - # - # if "anot+" in prof_and_mt_info.mediatype: - # await add_prez_links(graph, repo) - # - # return await return_from_graph( - # graph, - # mediatype=prof_and_mt_info.mediatype, - # profile=URIRef("https://prez.dev/profile/open-object"), - # profile_headers=prof_and_mt_info.profile_headers, - # selected_class=prof_and_mt_info.selected_class, - # repo=repo, - # ) diff --git a/prez/routers/vocprez.py.unused b/prez/routers/vocprez.py.unused index dcc16e50..f95a8b4a 100755 --- a/prez/routers/vocprez.py.unused +++ b/prez/routers/vocprez.py.unused @@ -104,7 +104,7 @@ async def vocprez_scheme( async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): request_url = request.scope["path"] endpoint_uri = URIRef(vp_endpoints["vocab-object"]) - object_uri = get_uri_for_curie_id(request.path_params["concept_scheme_curie"]) + object_uri = await get_uri_for_curie_id(request.path_params["concept_scheme_curie"]) return await object_function_new( request, endpoint_uri, request_url, repo, object_uri ) @@ -156,7 +156,7 @@ async def cs_narrowers_endpoint( per_page: int = 20, ): endpoint_uri = URIRef(vp_endpoints["cs-children"]) - parent_uri = get_uri_for_curie_id(concept_curie) + parent_uri = await get_uri_for_curie_id(concept_curie) return await listing_function_new( request=request, repo=repo, @@ -181,7 +181,7 @@ async def cs_narrowers_endpoint( async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): request_url = request.scope["path"] endpoint_uri = URIRef(vp_endpoints["vocab-concept"]) - object_uri = get_uri_for_curie_id(request.path_params["concept_curie"]) + object_uri = await get_uri_for_curie_id(request.path_params["concept_curie"]) return await object_function_new( request, endpoint_uri, request_url, repo, object_uri ) @@ -195,7 +195,7 @@ async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)) async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): request_url = request.scope["path"] endpoint_uri = URIRef(vp_endpoints["concept-object"]) - object_uri = get_uri_for_curie_id(request.path_params["concept_curie"]) + object_uri = await get_uri_for_curie_id(request.path_params["concept_curie"]) return await object_function_new( request, endpoint_uri, request_url, repo, object_uri ) @@ -209,7 +209,7 @@ async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)) async def concept_scheme_route(request: Request, repo: Repo = Depends(get_repo)): request_url = request.scope["path"] endpoint_uri = URIRef(vp_endpoints["collection-concept"]) - object_uri = get_uri_for_curie_id(request.path_params["concept_curie"]) + object_uri = await get_uri_for_curie_id(request.path_params["concept_curie"]) return await object_function_new( request, endpoint_uri, request_url, repo, object_uri ) diff --git a/prez/services/annotations.py b/prez/services/annotations.py index 64ca3860..96c1f05c 100755 --- a/prez/services/annotations.py +++ b/prez/services/annotations.py @@ -3,7 +3,6 @@ from aiocache import caches from rdflib import Graph, URIRef, Literal - from prez.dependencies import get_annotations_repo from prez.repositories import Repo from prez.services.query_generation.annotations import ( @@ -13,17 +12,6 @@ log = logging.getLogger(__name__) -pred = IRI(value=URIRef("https://prez.dev/label")) - -caches.set_config( - { - "default": { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.PickleSerializer"}, - } - } -) - async def process_terms(terms_and_dtypes: Set[URIRef], repo: Repo, system_repo: Repo): """ diff --git a/prez/services/app_service.py b/prez/services/app_service.py index e434e351..98f46a75 100755 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -84,7 +84,7 @@ async def add_prefixes_to_prefix_graph(repo: Repo): log.info("Prefixes from local files added to prefix graph") if settings.disable_prefix_generation: - log.info("DISABLE_PREFIX_GENERATION set to false. Skipping prefix generation.") + log.info("DISABLE_PREFIX_GENERATION set to true. Skipping prefix generation.") else: query = """ SELECT DISTINCT ?iri diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index 918d730b..c41d0336 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -9,7 +9,7 @@ from prez.repositories.base import Repo from prez.services.curie_functions import get_curie_id_for_uri, get_uri_for_curie_id -logger = logging.getLogger("prez") +log = logging.getLogger(__name__) RDF_MEDIATYPES = [ "text/turtle", @@ -70,12 +70,11 @@ class NegotiatedPMTs(BaseModel): class Config: arbitrary_types_allowed = True - async def setup(self) -> bool: + async def setup(self): self.requested_profiles = await self._get_requested_profiles() self.requested_mediatypes = await self._get_requested_mediatypes() self.available = await self._get_available() - self.selected = await self._get_selected() - return True if self.selected else False + self.selected = self.available[0] async def _resolve_token(self, token: str) -> str: query_str: str = dedent( @@ -117,22 +116,23 @@ async def _tupilize( parts[0] ) # then try to resolve the token to a URI except TokenError as e: - logger.error(e.args[0]) + log.error(e.args[0]) try: # if token resolution fails, try to resolve as a curie - result = str(get_uri_for_curie_id(parts[0])) + result = await get_uri_for_curie_id(parts[0]) + result = str(result) parts[0] = "<" + result + ">" except ValueError as e: parts[ 0 ] = "" # if curie resolution failed, then the profile is invalid - logger.error(e.args[0]) + log.error(e.args[0]) if len(parts) == 1: parts.append(self.default_weighting) # If no weight given, set the default else: try: parts[1] = float(parts[1]) # Type-check the seperated weighting except ValueError as e: - logger.debug( + log.debug( f"Could not cast q={parts[1]} as float. Defaulting to {self.default_weighting}. {e.args[0]}" ) return parts[0], parts[1] @@ -157,7 +157,7 @@ async def _get_requested_profiles(self) -> list[tuple[str, float]] | None: async def _get_requested_mediatypes(self) -> list[tuple[str, float]] | None: raw_mediatypes: str = self.params.get( - "_media", "" + "_mediatype", "" ) # Prefer mediatypes declared in the QSA, as per the spec. if not raw_mediatypes: raw_mediatypes: str = self.headers.get("accept", "") @@ -181,11 +181,10 @@ async def _get_available(self) -> list[dict]: } for result in repo_response[1][0][1] ] + if not available: + raise NoProfilesException(self.classes) return available - async def _get_selected(self) -> dict: - return self.available[0] - def generate_response_headers(self) -> dict: profile_uri = "" distinct_profiles = {(pmt["profile"], pmt["title"]) for pmt in self.available} @@ -217,7 +216,7 @@ def _compose_select_query(self) -> str: ] # TODO: handle multiple requested profiles except TypeError as e: requested_profile = None - logger.debug(f"{e}. normally this just means no profiles were requested") + log.debug(f"{e}. normally this just means no profiles were requested") query = dedent( f""" @@ -255,8 +254,6 @@ def _compose_select_query(self) -> str: ORDER BY DESC(?req_profile) DESC(?distance) DESC(?def_profile) DESC(?req_format) DESC(?def_format) """ ) - - logger.debug(f"ConnegP query: {query}") return query def _generate_mediatype_if_statements(self) -> str: diff --git a/prez/services/curie_functions.py b/prez/services/curie_functions.py index b9b44ccd..1b20460a 100755 --- a/prez/services/curie_functions.py +++ b/prez/services/curie_functions.py @@ -1,10 +1,12 @@ import logging from urllib.parse import urlparse +from aiocache.serializers import PickleSerializer from rdflib import URIRef from prez.cache import prefix_graph from prez.config import settings +from aiocache import cached, Cache, caches log = logging.getLogger(__name__) @@ -85,10 +87,17 @@ def get_curie_id_for_uri(uri: URIRef) -> str: return f"{qname[0]}{separator}{qname[2]}" -def get_uri_for_curie_id(curie_id: str): +async def get_uri_for_curie_id(curie_id: str): """ Returns a URI for a given CURIE id with the specified separator """ - separator = settings.curie_separator - curie = curie_id.replace(separator, ":") - return prefix_graph.namespace_manager.expand_curie(curie) + curie_cache = caches.get("curies") + result = await curie_cache.get(curie_id) + if result: + return result + else: + separator = settings.curie_separator + curie = curie_id.replace(separator, ":") + uri = prefix_graph.namespace_manager.expand_curie(curie) + curie_cache.set(curie_id, uri) + return uri diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index e269d585..603598b8 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -121,7 +121,7 @@ async def add_links_to_graph_and_cache( quads.append( (uri, DCTERMS.identifier, Literal(curie_for_uri, datatype=PREZ.identifier), uri) ) - if members_link: + if members_link: #TODO need to confirm the link value doesn't match the existing link value, as multiple endpoints can deliver the same class/have different links for the same URI existing_members_link = list( links_ids_graph_cache.quads((uri, PREZ["members"], None, uri)) ) diff --git a/prez/services/listings.py b/prez/services/listings.py index b6da4b15..5f00c034 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -9,7 +9,7 @@ from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.config import settings -from prez.reference_data.prez_ns import PREZ +from prez.reference_data.prez_ns import PREZ, ALTREXT, ONT from prez.renderers.renderer import return_from_graph from prez.repositories import Repo from prez.services.connegp_service import NegotiatedPMTs @@ -26,17 +26,17 @@ async def listing_function( - request: Request, - repo: Repo, - system_repo: Repo, - endpoint_uri: URIRef, - hierarchy_level: int, - path_nodes: Dict[str, Var | IRI] = None, - page: int = 1, - per_page: int = 20, - cql_parser: CQLParser = None, - search_term: Optional[str] = None, - endpoint_structure: Tuple[str] = settings.endpoint_structure, + request: Request, + repo: Repo, + system_repo: Repo, + endpoint_uri: URIRef, + hierarchy_level: int, + path_nodes: Dict[str, Var | IRI] = None, + page: int = 1, + per_page: int = 20, + cql_parser: CQLParser = None, + search_term: Optional[str] = None, + endpoint_structure: Tuple[str] = settings.endpoint_structure, ): """ # determine the relevant node selection part of the query - from SHACL, CQL, Search @@ -59,6 +59,7 @@ async def listing_function( target_classes = frozenset([PREZ.CQLObjectList]) elif search_term: target_classes = frozenset([PREZ.SearchResult]) + # determine the relevant profile pmts = NegotiatedPMTs( headers=request.headers, @@ -67,42 +68,20 @@ async def listing_function( listing=True, system_repo=system_repo, ) - success = await pmts.setup() - if not success: - log.error("ConnegP Error. NegotiatedPMTs.setup() was not successful") - + await pmts.setup() runtime_values = {} - if pmts.selected["profile"] == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): - ns = NodeShape( - uri=URIRef("http://example.org/ns#AltProfilesForListing"), - graph=endpoints_graph_cache, - path_nodes={"path_node_1": IRI(value=pmts.selected["class"])}, + if pmts.selected["profile"] == ALTREXT["alt-profile"]: + endpoint_uri, ns_gpnt, ns_triples = await handle_alternate_profile( + current_endpoint_uri=endpoint_uri, pmts=pmts, runtime_values=runtime_values ) - ns_triples = ns.triples_list - ns_gpnt = ns.gpnt_list - endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") - runtime_values["selectedClass"] = pmts.selected["class"] runtime_values["limit"] = per_page runtime_values["offset"] = (page - 1) * per_page cql_triples_list = [] cql_gpnt_list = [] - if cql_parser: - cql_parser.parse() - cql_select_ggps = cql_parser.ggps_inner_select - - if cql_select_ggps.triples_block: - cql_triples_list = cql_select_ggps.triples_block.triples - if cql_select_ggps.graph_patterns_or_triples_blocks: - for pattern in cql_select_ggps.graph_patterns_or_triples_blocks: - if isinstance(pattern, TriplesBlock): - cql_triples_list += pattern.triples - elif isinstance(pattern, GraphPatternNotTriples): - cql_gpnt_list.append(pattern) + cql_triples_list = await handle_cql(cql_gpnt_list, cql_parser, cql_triples_list) query_constructor = PrezQueryConstructor( runtime_values=runtime_values, @@ -133,38 +112,30 @@ async def listing_function( else: queries.append(main_query) if ( - pmts.requested_mediatypes is not None - and pmts.requested_mediatypes[0][0] == "application/sparql-query" + pmts.requested_mediatypes is not None + and pmts.requested_mediatypes[0][0] == "application/sparql-query" ): return PlainTextResponse(queries[0], media_type="application/sparql-query") # add a count query if it's an annotated mediatype if "anot+" in pmts.selected["mediatype"] and not search_term: subselect = copy.deepcopy(query_constructor.inner_select) - count_query = CountQuery(subselect=subselect).render() + count_query = CountQuery(subselect=subselect).render().to_string() queries.append(count_query) - # if prof_and_mt_info.profile == URIRef( - # "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - # ): - # count_class = PROF.Profile - # else: - # count_class = target_classes - # if count_class: # target_class may be unknown (None) for queries involving CQL - # queries.append(temp_listing_count(subselect, count_class)) - - if pmts.selected["profile"] == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): - item_graph, _ = await system_repo.send_queries(queries, []) - if "anot+" in pmts.selected["mediatype"]: - await add_prez_links( - item_graph, system_repo, endpoint_structure=("profiles",) - ) + if pmts.selected["profile"] == ALTREXT["alt-profile"]: + query_repo = system_repo + endpoint_structure = ("profiles",) else: - item_graph, _ = await repo.send_queries(queries, []) - if "anot+" in pmts.selected["mediatype"]: - await add_prez_links(item_graph, repo, endpoint_structure) + query_repo = repo + endpoint_structure = endpoint_structure + + item_graph, _ = await query_repo.send_queries(queries, []) + if "anot+" in pmts.selected["mediatype"]: + await add_prez_links( # TODO can this go under return_from_graph? + item_graph, query_repo, endpoint_structure + ) + # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated if search_term: count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) @@ -180,8 +151,41 @@ async def listing_function( ) +async def handle_cql(cql_gpnt_list, cql_parser, cql_triples_list): + cql_parser.parse() + cql_select_ggps = cql_parser.ggps_inner_select + if cql_select_ggps.triples_block: + cql_triples_list = cql_select_ggps.triples_block.triples + if cql_select_ggps.graph_patterns_or_triples_blocks: + for pattern in cql_select_ggps.graph_patterns_or_triples_blocks: + if isinstance(pattern, TriplesBlock): + cql_triples_list += pattern.triples + elif isinstance(pattern, GraphPatternNotTriples): + cql_gpnt_list.append(pattern) + return cql_triples_list + + +async def handle_alternate_profile(current_endpoint_uri, pmts, runtime_values): + # determine whether we are displaying alternate profiles for a LISTING or OBJECT + ep_type = list(endpoints_graph_cache.objects(current_endpoint_uri, RDF.type)) + if ONT["ObjectEndpoint"] in ep_type: + nodeshape_uri = URIRef("http://example.org/ns#AltProfilesForObject") + elif ONT["ListingEndpoint"] in ep_type: + nodeshape_uri = URIRef("http://example.org/ns#AltProfilesForListing") + ns = NodeShape( + uri=nodeshape_uri, + graph=endpoints_graph_cache, + path_nodes={"path_node_1": IRI(value=pmts.selected["class"])}, + ) + ns_triples = ns.triples_list + ns_gpnt = ns.gpnt_list + new_endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") + runtime_values["selectedClass"] = pmts.selected["class"] + return new_endpoint_uri, ns_gpnt, ns_triples + + async def get_shacl_node_selection( - endpoint_uri, hierarchy_level, path_nodes, repo, system_repo + endpoint_uri, hierarchy_level, path_nodes, repo, system_repo ): """ Determines the relevant nodeshape based on the endpoint, hierarchy level, and parent URI diff --git a/prez/services/objects.py b/prez/services/objects.py index bef16ddb..3359ba22 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -7,11 +7,12 @@ from prez.cache import endpoints_graph_cache, profiles_graph_cache from prez.config import settings -from prez.reference_data.prez_ns import EP +from prez.reference_data.prez_ns import EP, ALTREXT from prez.renderers.renderer import return_from_graph from prez.repositories import Repo from prez.services.connegp_service import NegotiatedPMTs from prez.services.link_generation import add_prez_links +from prez.services.listings import listing_function from prez.services.query_generation.classes import get_classes from prez.services.query_generation.umbrella import PrezQueryConstructor from temp.grammar import IRI @@ -35,27 +36,32 @@ async def object_function( classes=classes, system_repo=system_repo, ) - success = await pmts.setup() - if not success: - log.error("ConnegP Error. NegotiatedPMTs.setup() was not successful") + await pmts.setup() # handle alternate profiles - runtime_values = {} - if pmts.selected["profile"] == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): - endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") - # runtime_values["selectedClass"] = prof_and_mt_info.selected_class + if pmts.selected["profile"] == ALTREXT["alt-profile"]: + return await listing_function( + request=request, + repo=repo, + system_repo=system_repo, + endpoint_uri=endpoint_uri, + hierarchy_level=1, + ) - # runtime_values["object"] = uri + runtime_values = {} + listing_or_object = "object" + ns_gpnt = [] + ns_triples = [] query_constructor = PrezQueryConstructor( runtime_values=runtime_values, endpoint_graph=endpoints_graph_cache, profile_graph=profiles_graph_cache, - listing_or_object="object", + listing_or_object=listing_or_object, focus_node=IRI(value=uri), endpoint_uri=endpoint_uri, profile_uri=pmts.selected["profile"], + endpoint_shacl_triples=ns_triples, + endpoint_shacl_gpnt=ns_gpnt, ) query_constructor.generate_sparql() query = query_constructor.sparql @@ -66,12 +72,7 @@ async def object_function( except IndexError as e: log.debug(e.args[0]) - if pmts.selected["profile"] == URIRef( - "http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile" - ): - item_graph, _ = await system_repo.send_queries([query], []) - else: - item_graph, _ = await repo.send_queries([query], []) + item_graph, _ = await repo.send_queries([query], []) if "anot+" in pmts.selected["mediatype"]: if not endpoint_uri == EP.object: await add_prez_links(item_graph, repo, endpoint_structure) diff --git a/prez/services/query_generation/annotations.py b/prez/services/query_generation/annotations.py index 36a23bb5..cfbd7f90 100644 --- a/prez/services/query_generation/annotations.py +++ b/prez/services/query_generation/annotations.py @@ -47,23 +47,27 @@ def __init__(self, terms: List[IRI]): # create a language filter # e.g. FILTER (LANG(?annotation) IN ("en", "")) anot_var = Var(value="annotation") - in_expr = Expression.create_in_expression( - left_primary_expression=PrimaryExpression( - content=BuiltInCall.create_with_one_expr( - function_name="LANG", - expression=PrimaryExpression(content=anot_var), - ) - ), - operator="IN", - right_primary_expressions=[ - PrimaryExpression(content=RDFLiteral(value=settings.default_language)), - PrimaryExpression(content=RDFLiteral(value="")), - ], - ) - lang_filter_gpnt = GraphPatternNotTriples( content=Filter( - constraint=Constraint(content=BrackettedExpression(expression=in_expr)) + constraint=Constraint( + content=BrackettedExpression( + expression=Expression.create_in_expression( + left_primary_expression=PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + function_name="LANG", + expression=PrimaryExpression(content=anot_var), + ) + ), + operator="IN", + right_primary_expressions=[ + PrimaryExpression( + content=RDFLiteral(value=settings.default_language) + ), + PrimaryExpression(content=RDFLiteral(value="")), + ], + ) + ) + ) ) ) diff --git a/prez/services/query_generation/count.py b/prez/services/query_generation/count.py index a05b1f6e..18f02c9a 100755 --- a/prez/services/query_generation/count.py +++ b/prez/services/query_generation/count.py @@ -1,5 +1,5 @@ from pydantic import BaseModel -from rdflib import RDF, BNode +from rdflib import BNode from prez.reference_data.prez_ns import PREZ from temp.grammar import * @@ -13,7 +13,7 @@ class Config: def render(self): cq = self.create_construct_query() - return "".join(part for part in cq.render()) + return cq def create_construct_query(self): """Calls lower level functions and builds the overall query. @@ -94,6 +94,68 @@ def create_construct_template(self): return ct +class CountQueryV2(ConstructQuery): + """Query is of the form: + CONSTRUCT { + _:N9008750f9acb47c08dfc2c3ae72ede37 ?count . + } + WHERE { + SELECT (COUNT(DISTINCT ?focus_node) AS ?count) + WHERE { + <<>> + } + } + """ + + def __init__(self, original_subselect: SubSelect): + # Construct Template + construct_template = ConstructTemplate( + construct_triples=ConstructTriples( + triples=[ + SimplifiedTriple( + subject=BNode(), + predicate=IRI(value="https://prez.dev/count"), + object=Var(value="count"), + ) + ] + ) + ) + + # Rebuild the SELECT clause in the new SubSelect to retrieve the count of the focus node + count_expression = Expression.from_primary_expr( + PrimaryExpression( + content=BuiltInCall( + other_expressions=Aggregate( + function_name="COUNT", + distinct=True, + expression=Expression.from_primary_expr( + PrimaryExpression(content=Var(value="focus_node")) + ), + ) + ) + ) + ) + + # Where Clause using the new SubSelect + where_clause = WhereClause( + group_graph_pattern=GroupGraphPattern( + content=SubSelect( + select_clause=SelectClause( + variables_or_all=[(count_expression, Var(value="count"))], + ), + where_clause=original_subselect.where_clause, + values_clause=original_subselect.values_clause, + solution_modifier=SolutionModifier(), + ) + ) + ) + # Initialize the base ConstructQuery + super().__init__( + construct_template=construct_template, + where_clause=where_clause, + ) + + def startup_count_objects(): """ Retrieves hardcoded counts for collections in the repository (Feature Collections, Catalogs etc.) diff --git a/prez/services/query_generation/node_selection/endpoint_shacl.py b/prez/services/query_generation/node_selection/endpoint_shacl.py index 7252d9fb..ea78eb2f 100644 --- a/prez/services/query_generation/node_selection/endpoint_shacl.py +++ b/prez/services/query_generation/node_selection/endpoint_shacl.py @@ -170,7 +170,9 @@ def from_graph(self): self._process_property_path(pp, self.graph) def _process_property_path(self, pp, graph): - if isinstance(pp, BNode): + if isinstance(pp, URIRef): + self.property_paths.append(Path(value=pp)) + elif isinstance(pp, BNode): pred_objects_gen = graph.predicate_objects(subject=pp) bn_pred, bn_obj = next(pred_objects_gen, (None, None)) if bn_obj == SH.union: diff --git a/tests/data/bnode_depth/bnode_depth-1.ttl b/test_data/bnode_depth-1.ttl old mode 100755 new mode 100644 similarity index 100% rename from tests/data/bnode_depth/bnode_depth-1.ttl rename to test_data/bnode_depth-1.ttl diff --git a/tests/data/bnode_depth/bnode_depth-2-2.ttl b/test_data/bnode_depth-2-2.ttl old mode 100755 new mode 100644 similarity index 100% rename from tests/data/bnode_depth/bnode_depth-2-2.ttl rename to test_data/bnode_depth-2-2.ttl diff --git a/tests/data/bnode_depth/bnode_depth-2.ttl b/test_data/bnode_depth-2.ttl old mode 100755 new mode 100644 similarity index 100% rename from tests/data/bnode_depth/bnode_depth-2.ttl rename to test_data/bnode_depth-2.ttl diff --git a/tests/data/bnode_depth/bnode_depth-4.ttl b/test_data/bnode_depth-4.ttl old mode 100755 new mode 100644 similarity index 100% rename from tests/data/bnode_depth/bnode_depth-4.ttl rename to test_data/bnode_depth-4.ttl diff --git a/tests/data/cql/input/example01.json b/test_data/cql/input/example01.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example01.json rename to test_data/cql/input/example01.json diff --git a/tests/data/cql/input/example02.json b/test_data/cql/input/example02.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example02.json rename to test_data/cql/input/example02.json diff --git a/tests/data/cql/input/example03.json b/test_data/cql/input/example03.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example03.json rename to test_data/cql/input/example03.json diff --git a/tests/data/cql/input/example05a.json b/test_data/cql/input/example05a.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example05a.json rename to test_data/cql/input/example05a.json diff --git a/tests/data/cql/input/example05b.json b/test_data/cql/input/example05b.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example05b.json rename to test_data/cql/input/example05b.json diff --git a/tests/data/cql/input/example06b.json b/test_data/cql/input/example06b.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example06b.json rename to test_data/cql/input/example06b.json diff --git a/tests/data/cql/input/example07.json b/test_data/cql/input/example07.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example07.json rename to test_data/cql/input/example07.json diff --git a/tests/data/cql/input/example08.json b/test_data/cql/input/example08.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example08.json rename to test_data/cql/input/example08.json diff --git a/tests/data/cql/input/example09.json b/test_data/cql/input/example09.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example09.json rename to test_data/cql/input/example09.json diff --git a/tests/data/cql/input/example10.json b/test_data/cql/input/example10.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example10.json rename to test_data/cql/input/example10.json diff --git a/tests/data/cql/input/example11.json b/test_data/cql/input/example11.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example11.json rename to test_data/cql/input/example11.json diff --git a/tests/data/cql/input/example12.json b/test_data/cql/input/example12.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example12.json rename to test_data/cql/input/example12.json diff --git a/tests/data/cql/input/example14.json b/test_data/cql/input/example14.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example14.json rename to test_data/cql/input/example14.json diff --git a/tests/data/cql/input/example15.json b/test_data/cql/input/example15.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example15.json rename to test_data/cql/input/example15.json diff --git a/tests/data/cql/input/example17.json b/test_data/cql/input/example17.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example17.json rename to test_data/cql/input/example17.json diff --git a/tests/data/cql/input/example29.json b/test_data/cql/input/example29.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example29.json rename to test_data/cql/input/example29.json diff --git a/tests/data/cql/input/example31.json b/test_data/cql/input/example31.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example31.json rename to test_data/cql/input/example31.json diff --git a/tests/data/cql/input/example32.json b/test_data/cql/input/example32.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example32.json rename to test_data/cql/input/example32.json diff --git a/tests/data/cql/input/example33.json b/test_data/cql/input/example33.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example33.json rename to test_data/cql/input/example33.json diff --git a/tests/data/cql/input/example34.json b/test_data/cql/input/example34.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example34.json rename to test_data/cql/input/example34.json diff --git a/tests/data/cql/input/example35.json b/test_data/cql/input/example35.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example35.json rename to test_data/cql/input/example35.json diff --git a/tests/data/cql/input/example39.json b/test_data/cql/input/example39.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/example39.json rename to test_data/cql/input/example39.json diff --git a/tests/data/cql/input/geo_intersects.json b/test_data/cql/input/geo_intersects.json old mode 100755 new mode 100644 similarity index 100% rename from tests/data/cql/input/geo_intersects.json rename to test_data/cql/input/geo_intersects.json diff --git a/test_data/object_vocab_api_bblocks.ttl b/test_data/object_vocab_api_bblocks.ttl index 20a44635..15cb9aa3 100644 --- a/test_data/object_vocab_api_bblocks.ttl +++ b/test_data/object_vocab_api_bblocks.ttl @@ -35,5 +35,4 @@ bblocks:ogc.unstable.sosa a skos:Concept, ; bblocks:scope ; bblocks:status ; - prez:link "/catalogs/bblocks/collections/api/items/ogc.unstable.sosa" ; . diff --git a/tests/data/spaceprez/input/redirect-foaf-homepage.ttl b/test_data/redirect-foaf-homepage.ttl old mode 100755 new mode 100644 similarity index 100% rename from tests/data/spaceprez/input/redirect-foaf-homepage.ttl rename to test_data/redirect-foaf-homepage.ttl diff --git a/tests/_test_curie_generation.py b/tests/_test_curie_generation.py index 79c2164e..5c3d3d19 100755 --- a/tests/_test_curie_generation.py +++ b/tests/_test_curie_generation.py @@ -5934,7 +5934,7 @@ def test_get_curie_id_for_uri_negative(): assert get_curie_id_for_uri(uri) -def test_get_uri_for_curie_id(): - assert get_uri_for_curie_id(curie_id="skos:prefLabel") == URIRef( +def test_await get_uri_for_curie_id(): + assert await get_uri_for_curie_id(curie_id="skos:prefLabel") == URIRef( "http://www.w3.org/2004/02/skos/core#prefLabel" ) diff --git a/tests/conftest.py b/tests/conftest.py index 2fb7a8af..3be7a303 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,10 @@ +import logging import os +from typing import Optional, Set + +from prez.reference_data.prez_ns import PREZ + +from rdflib import Graph, URIRef os.environ["SPARQL_REPO_TYPE"] = "pyoxigraph" # os.environ["LOG_LEVEL"] = "DEBUG" @@ -14,7 +20,7 @@ from prez.repositories import Repo, PyoxigraphRepo -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def test_store() -> Store: # Create a new pyoxigraph Store store = Store() @@ -25,13 +31,13 @@ def test_store() -> Store: return store -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def test_repo(test_store: Store) -> Repo: # Create a PyoxigraphQuerySender using the test_store return PyoxigraphRepo(test_store) -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def client(test_repo: Repo) -> TestClient: # Override the dependency to use the test_repo def override_get_repo(): @@ -44,3 +50,56 @@ def override_get_repo(): # Remove the override to ensure subsequent tests are unaffected app.dependency_overrides.clear() + + +@pytest.fixture() +def a_catalog_link(client): + r = client.get("/catalogs") + g = Graph().parse(data=r.text) + member_uri = URIRef("https://example.com/SpacePrezCatalog") + link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +@pytest.fixture() +def an_fc_link(client, a_catalog_link): + r = client.get(f"{a_catalog_link}/collections") + g = Graph().parse(data=r.text) + links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) + for link in links: + if link != a_catalog_link: + return link + + +@pytest.fixture() +def a_feature_link(client, an_fc_link): + r = client.get(f"{an_fc_link}/items") + g = Graph().parse(data=r.text) + links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) + for link in links: + if link != an_fc_link: + return link + + +@pytest.fixture() +def a_top_level_catalog_link(client): + # get link for first catalog + r = client.get("/catalogs") + g = Graph().parse(data=r.text) + member_uri = URIRef("https://example.com/TopLevelCatalog") + link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +@pytest.fixture() +def a_resource_link(client, a_top_level_catalog_link): + r = client.get(a_top_level_catalog_link) + g = Graph().parse(data=r.text) + links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) + for link in links: + if link != a_top_level_catalog_link: + return link + + + + diff --git a/tests/data/catprez/expected_responses/resource_anot.ttl b/tests/data/catprez/expected_responses/resource_anot.ttl deleted file mode 100755 index 0ab4b07a..00000000 --- a/tests/data/catprez/expected_responses/resource_anot.ttl +++ /dev/null @@ -1,57 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix xsd: . - -dcterms:creator rdfs:label "Creator"@en ; - dcterms:description "Recommended practice is to identify the creator with a URI. If this is not possible or feasible, a literal value that identifies the creator may be provided."@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:issued rdfs:label "Date Issued"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:publisher rdfs:label "Publisher"@en . - -dcterms:title rdfs:label "Title"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - - a dcat:Resource ; - dcterms:creator ; - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:identifier "pd:AAC-SA"^^prez:identifier ; - dcterms:issued "2011-07-22"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Annual Aboriginal Census,1921-1944 - South Australia" ; - prez:link "/c/catalogs/pd:democat/resources/pd:AAC-SA" . - - rdfs:label "IDN Demonstration Catalogue" ; - dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. - -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. - -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:identifier "pd:democat"^^prez:identifier ; - dcterms:title "IDN Demonstration Catalogue" . - -schema:description rdfs:label "description" . - -schema:name rdfs:label "name" . - - rdfs:label "Australian National University" ; - schema:description "ANU is a world-leading university in Australia’s capital. Excellence is embedded in our approach to research and education." ; - schema:name "Australian National University" . - diff --git a/tests/data/catprez/expected_responses/resource_listing_anot.ttl b/tests/data/catprez/expected_responses/resource_listing_anot.ttl deleted file mode 100755 index 71167d57..00000000 --- a/tests/data/catprez/expected_responses/resource_listing_anot.ttl +++ /dev/null @@ -1,201 +0,0 @@ -@prefix dcterms: . -@prefix prez: . -@prefix rdfs: . -@prefix schema: . -@prefix xsd: . - -dcterms:creator rdfs:label "Creator"@en ; - dcterms:description "Recommended practice is to identify the creator with a URI. If this is not possible or feasible, a literal value that identifies the creator may be provided."@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:hasPart rdfs:label "Has Part"@en ; - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Part Of."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:issued rdfs:label "Date Issued"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:publisher rdfs:label "Publisher"@en . - -dcterms:title rdfs:label "Title"@en . - -rdfs:label rdfs:label "label" . - - rdfs:label "IDN Demonstration Catalogue" ; - dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. - -The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. - -The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; - dcterms:hasPart , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - ; - dcterms:identifier "pd:democat"^^prez:identifier ; - dcterms:title "IDN Demonstration Catalogue" ; - prez:count 67 ; - prez:link "/c/catalogs/pd:democat" . - -schema:description rdfs:label "description" . - -schema:name rdfs:label "name" . - - dcterms:creator ; - dcterms:description """This dataset has been developed by the Australian Government as an authoritative source of indigenous location names across Australia. It is sponsored by the Spatial Policy Branch within the Department of Communications and managed solely by the Department of Human Services. -The dataset is designed to support the accurate positioning, consistent reporting, and effective delivery of Australian Government programs and services to indigenous locations. -The dataset contains Preferred and Alternate names for indigenous locations where Australian Government programs and services have been, are being, or may be provided. The Preferred name will always default to a State or Territory jurisdiction's gazetted name so the term 'preferred' does not infer that this is the locally known name for the location. Similarly, locational details are aligned, where possible, with those published in State and Territory registers. -This dataset is NOT a complete listing of all locations at which indigenous people reside. Town and city names are not included in the dataset. The dataset contains names that represent indigenous communities, outstations, defined indigenous areas within a town or city or locations where services have been provided.""" ; - dcterms:issued "2013-12-02"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Australian Government Indigenous Programs & Policy Locations (AGIL) dataset" . - - dcterms:creator ; - dcterms:description """This study contains time series of data of the Annual Aboriginal Census for Australia, Australian Capital Territory, New South Wales, Northern Territory, Queensland, South Australia, Tasmania, Victoria and Western Australia from 1921 to 1944. - -Special care notice: -Aboriginal and Torres Strait Islander people, researchers and other users should be aware that material in this dataset may contain material that is considered offensive. The data has been retained in its original format because it represents an evidential record of language, beliefs or other cultural situations at a point in time.""" ; - dcterms:identifier "pd:AAC-SA"^^prez:identifier ; - dcterms:issued "2011-07-22"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Annual Aboriginal Census,1921-1944 - South Australia" ; - prez:link "/c/catalogs/pd:democat/resources/pd:AAC-SA" . - - dcterms:description "A 2020 review of First Nations Identified physical collections held by the ANU. Not published." ; - dcterms:publisher ; - dcterms:title "2020 ANU First Nations Collections Review" . - - dcterms:creator ; - dcterms:description """The Aboriginal and Torres Strait Islander Community Profiles (ACPs) are tabulations giving key census characteristics of Aboriginal and Torres Strait Islander persons, families and dwellings, covering most topics on the 1991 Census of Population and Housing form. This profile is presented at the ATSIC Region level. - -The ACP consists of 29 tables which crosstabulate characteristics including gender, age, place of birth, religion, marital status, education, income, occupation and employment status.""" ; - dcterms:issued "2007-03-16"^^xsd:date ; - dcterms:publisher ; - dcterms:title "1991 Census of Population and Housing: Aboriginal and Torres Strait Islander Community Profile: ATSIC Regions" . - - dcterms:creator ; - dcterms:description """Austlang provides information about Indigenous Australian languages which has been assembled from referenced sources. -The dataset provided here includes the language names, each with a unique alpha-numeric code which functions as a stable identifier, alternative/variant names and spellings and the approximate location of each language variety.""" ; - dcterms:publisher ; - dcterms:title "Austlang database." . - - dcterms:creator ; - dcterms:description """The Indigenous Protected Areas (IPA) programme has demonstrated successes across a broad range of outcome areas, effectively overcoming barriers to addressing Indigenous disadvantage and engaging Indigenous Australians in meaningful employment to achieve large scale conservation outcomes, thus aligning the interests of Indigenous Australians and the broader community. - -The Birriliburu & Matuwa Kurrara Kurrara (MKK) IPAs have provided an opportunity for Martu people to reconnect with and actively manage their traditional country. - -The two IPAs have proved a useful tool with which to leverage third party investment, through a joint management arrangement with the Western Australia (WA) Government, project specific funding from environmental NGOs and mutually beneficial partnerships with the private sector. - -Increased and diversified investment from a range of funding sources would meet the high demand for Ranger jobs and could deliver a more expansive programme of works, which would, in turn, increase the social, economic and cultural outcomes for Martu Rangers and Community Members.""" ; - dcterms:issued "0601-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "SRI Investment Analysis of the Birriliburu and Matuwa Kurrara Kurrara Indigenous Protected Areas (2016)" . - - dcterms:description "UTS has taken over this data, but needs help to turn it into an ongoing public database" ; - dcterms:publisher , - ; - dcterms:title "Aboriginal Deaths and Injuries in Custody" . - - dcterms:description "(Torrens University). An earlier application with Marcia for AIATSIS funding was never considered." ; - dcterms:publisher ; - dcterms:title "GDP and Genuine Progress Indicator" . - - dcterms:creator ; - dcterms:description "Land that is owned or managed by Australia’s Indigenous communities, or over which Indigenous people have use and rights, was compiled from information supplied by Australian, state and territory governments and other statutory authorities with Indigenous land and sea management interests." ; - dcterms:issued "2019-04-03"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Land and Sea Interests " . - - dcterms:creator ; - dcterms:description "Registered & Notified Indigenous Land Use Agreements – (as per s. 24BH(1)(a), s. 24CH and s. 24DI(1)(a)) across Australia, The Central Resource for Sharing and Enabling Environmental Data in NSW" ; - dcterms:issued "2013-12-05"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Indigenous Land Use Agreement Boundaries with basic metadata and status" . - - dcterms:description "Printed catalog highlighting ANU Indigenous Research activities at the time of publication" ; - dcterms:publisher ; - dcterms:title "Indigenous Research Compendium 2018" . - - dcterms:description "These are extensive paper records which Ian Anderson has proposed incorporating in a database. Negotiation is still needed." ; - dcterms:publisher ; - dcterms:title "Tasmanian Aboriginal genealogies" . - - dcterms:creator ; - dcterms:description "NSW prison population data and quarterly custody reports" ; - dcterms:issued "2022-08-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "NSW Custody Statistics" . - - dcterms:description "This comprises records of about 70,000 Indigenous and 30,000 non-Indigenous people surveyed in the 1970s and 1980s. Some paper records are held at AIATSIS. Microfilms of others are at UNSW Archives. There have been preliminary discussions with AIATSIS, the National Library and former members of the Hollows team about a program to digitise the records. IDN staff/resources would be needed." ; - dcterms:publisher , - ; - dcterms:title "The Fred Hollows Archive (National Trachoma and Eye Health Program)" . - - dcterms:creator ; - dcterms:description """Conference powerpoint presentation - -Case study in exemplary IDG. -- Survey of native title prescribed bodies corporate (PBCs) -- Collect data on PBCs’ capacity, capabilities, needs and aspirations to better inform policies that affect PBCs -- Started data collection May 2019, to finish in 3rd quarter 2019""" ; - dcterms:issued "2019-07-03"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Prescribed bodies corporate (PBCs) Survey 2019" . - - dcterms:creator ; - dcterms:description """Aboriginal and Torres Strait Islander people are the Indigenous people of Australia. They are not one group, but comprise hundreds of groups that have their own distinct set of languages, histories and cultural traditions. - -AIHW reports and other products include information about Indigenous Australians, where data quality permits. Thus, information and statistics about Indigenous Australians can be found in most AIHW products. - -In December 2021, AIHW released the Regional Insights for Indigenous Communities (RIFIC). The aim of this website is to provide access to data at a regional level, to help communities set their priorities and participate in joint planning with government and service providers. - -AIHW products that focus specifically on Indigenous Australians are captured on this page.""" ; - dcterms:issued "1101-01-01"^^xsd:date ; - dcterms:publisher ; - dcterms:title "Regional Insights for Indigenous Communities" . - - dcterms:description "Access still to be negotiated with the Museum." ; - dcterms:publisher ; - dcterms:title "The Sandra Smith Archive" . - - dcterms:description "Strong demand but controversial." ; - dcterms:publisher ; - dcterms:title "Tindale/Horton map" . - - dcterms:description """TLCMap is a set of tools that work together for mapping Australian history and culture. - -Note that historical placenames in TLCmap is a HASS-I integration activity.""" ; - dcterms:publisher ; - dcterms:title "Time Layered Cultural Map of Australia" . - - rdfs:label "Services Australia" ; - schema:name "Services Australia" . - - rdfs:label "Australian Federal Government" ; - schema:name "Australian Government" . - - rdfs:label "Australian National University" ; - schema:description "ANU is a world-leading university in Australia’s capital. Excellence is embedded in our approach to research and education." ; - schema:name "Australian National University" . - - rdfs:label "AIATSIS" . - diff --git a/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl deleted file mode 100755 index 24547d3c..00000000 --- a/tests/data/catprez/expected_responses/top_level_catalog_anot.ttl +++ /dev/null @@ -1,46 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:hasPart rdfs:label "Has Part"@en ; - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Part Of."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -ns1:TopLevelCatalog a dcat:Catalog ; - rdfs:label "Top level catalog" ; - dcterms:hasPart ns1:LowerLevelCatalog ; - dcterms:identifier "exm:TopLevelCatalog"^^prez:identifier ; - ns1:property "top level catalog property" ; - prez:link "/c/catalogs/exm:TopLevelCatalog", - "/v/catalogs/exm:TopLevelCatalog" ; - prez:members [ prez:link "/c/catalogs/exm:TopLevelCatalog/collections" ] . - -prez:link rdfs:label "link" . - -prez:members rdfs:label "members" . - -dcat:Catalog rdfs:label "Catalog"@en ; - skos:definition "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en . - -ns1:LowerLevelCatalog rdfs:label "Lower level catalog" ; - dcterms:identifier "exm:LowerLevelCatalog"^^prez:identifier ; - prez:link "/c/catalogs/exm:LowerLevelCatalog", - "/c/catalogs/exm:TopLevelCatalog/collections/exm:LowerLevelCatalog", - "/v/catalogs/exm:LowerLevelCatalog" . - diff --git a/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl b/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl deleted file mode 100755 index dd7bcefa..00000000 --- a/tests/data/catprez/expected_responses/top_level_catalog_listing_anot.ttl +++ /dev/null @@ -1,41 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - - a dcat:Catalog ; - rdfs:label "Top level catalog" ; - dcterms:identifier "exm:TopLevelCatalog"^^prez:identifier ; - prez:link "/c/catalogs/exm:TopLevelCatalog", - "/v/catalogs/exm:TopLevelCatalog" . - - a dcat:Catalog ; - rdfs:label "amazing catalog" ; - dcterms:identifier "exm:TopLevelCatalogTwo"^^prez:identifier ; - prez:link "/c/catalogs/exm:TopLevelCatalogTwo", - "/v/catalogs/exm:TopLevelCatalogTwo" . - -prez:count rdfs:label "count" . - -prez:link rdfs:label "link" . - -dcat:Catalog rdfs:label "Catalog"@en ; - skos:definition "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en ; - prez:count 2 . - diff --git a/tests/data/catprez/input/catprez.ttl b/tests/data/catprez/input/catprez.ttl deleted file mode 100755 index f599aeed..00000000 --- a/tests/data/catprez/input/catprez.ttl +++ /dev/null @@ -1,38 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX ex: -PREFIX rdfs: - -ex:TopLevelCatalog a dcat:Catalog ; - rdfs:label "Top level catalog" ; - dcterms:hasPart ex:LowerLevelCatalog ; - ex:property "top level catalog property" ; -. - -ex:LowerLevelCatalog a dcat:Catalog ; - rdfs:label "Lower level catalog" ; - dcterms:hasPart ex:Resource ; - ex:property "lower level catalog property" -. - -ex:Resource a dcat:Resource ; - rdfs:label "Resource" ; - ex:property "resource property" ; -. - -ex:TopLevelCatalogTwo a dcat:Catalog ; - rdfs:label "amazing catalog" ; - dcterms:hasPart ex:LowerLevelCatalogTwo ; - ex:property "complete" ; -. - -ex:LowerLevelCatalogTwo a dcat:Catalog ; - rdfs:label "rightful" ; - dcterms:hasPart ex:ResourceTwo ; - ex:property "exposure" -. - -ex:ResourceTwo a dcat:Resource ; - rdfs:label "salty" ; - ex:property "proficient" ; -. \ No newline at end of file diff --git a/tests/data/object/expected_responses/fc.ttl b/tests/data/object/expected_responses/fc.ttl deleted file mode 100755 index 059b7f64..00000000 --- a/tests/data/object/expected_responses/fc.ttl +++ /dev/null @@ -1,58 +0,0 @@ -@prefix dcterms: . -@prefix geo: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - - dcterms:identifier "cgi:contacttype"^^prez:identifier ; - dcterms:provenance "this vocabulary" ; - skos:definition "All Concepts in this vocabulary" ; - skos:prefLabel "Contact Type - All Concepts"@en . - - dcterms:identifier "2016.01:contacttype"^^prez:identifier ; - dcterms:provenance "Original set of terms from the GeosciML standard" ; - skos:definition "This scheme describes the concept space for Contact Type concepts, as defined by the IUGS Commission for Geoscience Information (CGI) Geoscience Terminology Working Group. By extension, it includes all concepts in this conceptScheme, as well as concepts in any previous versions of the scheme. Designed for use in the contactType property in GeoSciML Contact elements."@en ; - skos:prefLabel "Contact Type"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -rdfs:member rdfs:label "member" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - dcterms:identifier "preztest:dataset"^^prez:identifier . - - a geo:FeatureCollection ; - dcterms:identifier "preztest:feature-collection"^^prez:identifier ; - rdfs:member ; - prez:link "/s/datasets/preztest:dataset/collections/preztest:feature-collection" . - - dcterms:identifier "cntcttyp:alteration_facies_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A metasomatic facies contact separating rocks that have undergone alteration of a particular facies from those that have undergone metasomatism of another facies. Alteration is a kind of metasomatism that does not introduce economically important minerals."@en ; - skos:prefLabel "alteration facies contact"@en ; - prez:link "/s/datasets/preztest:dataset/collections/preztest:feature-collection/items/cntcttyp:alteration_facies_contact", - "/v/collection/cgi:contacttype/cntcttyp:alteration_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:alteration_facies_contact" . - -geo:FeatureCollection skos:definition "A collection of individual Features."@en ; - skos:prefLabel "Feature Collection"@en . - diff --git a/tests/data/object/expected_responses/feature.ttl b/tests/data/object/expected_responses/feature.ttl deleted file mode 100755 index fc02e124..00000000 --- a/tests/data/object/expected_responses/feature.ttl +++ /dev/null @@ -1,59 +0,0 @@ -@prefix dcterms: . -@prefix geo: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:title rdfs:label "Title"@en . - -dcterms:type rdfs:label "Type"@en ; - dcterms:description "Recommended practice is to use a controlled vocabulary such as the DCMI Type Vocabulary [[DCMI-TYPE](http://dublincore.org/documents/dcmi-type-vocabulary/)]. To describe the file format, physical medium, or dimensions of the resource, use the property Format."@en . - -geo:asWKT skos:definition "The WKT serialization of a Geometry"@en ; - skos:prefLabel "as WKT"@en . - -geo:hasGeometry skos:definition "A spatial representation for a given Feature."@en ; - skos:prefLabel "has geometry"@en . - -geo:hasMetricArea skos:definition "The area of a Spatial Object in square meters."@en ; - skos:prefLabel "has area in square meters"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - dcterms:description "The Australian national dataset of important hydrological features such as rivers, water bodies, aquifers and monitoring points"@en ; - dcterms:identifier "ldgovau:geofabric"^^prez:identifier ; - dcterms:title "Australian Hydrological Geospatial Fabric"@en . - - dcterms:description "Contracted Catchments hydrological catchments designed to build stable reporting regions"@en ; - dcterms:identifier "geofab:catchments"^^prez:identifier ; - dcterms:title "Contracted Catchments"@en . - - a geo:Feature, - ; - dcterms:identifier "102208962"^^xsd:token, - "hydrd:102208962"^^prez:identifier ; - dcterms:title "Contracted Catchment 102208962" ; - dcterms:type ; - geo:hasGeometry [ geo:asWKT "MULTIPOLYGON (((122.23180562900006 -17.564583177999964, 122.23208340700012 -17.564583177999964, 122.23208340700012 -17.56486095599996, 122.23180562900006 -17.56486095599996, 122.23180562900006 -17.564583177999964)), ((122.23180562900006 -17.564583177999964, 122.23152785200011 -17.564583177999964, 122.23152785200011 -17.564305399999967, 122.23180562900006 -17.564305399999967, 122.23180562900006 -17.564583177999964)), ((122.23152785200011 -17.564305399999967, 122.23125007400006 -17.564305399999967, 122.23125007400006 -17.56402762199997, 122.23152785200011 -17.56402762199997, 122.23152785200011 -17.564305399999967)), ((122.23125007400006 -17.56402762199997, 122.22902785200006 -17.56402762199997, 122.22902785200006 -17.564305399999967, 122.22875007400012 -17.564305399999967, 122.22875007400012 -17.564583177999964, 122.22847229600006 -17.564583177999964, 122.22847229600006 -17.56486095599996, 122.22819451800001 -17.56486095599996, 122.22819451800001 -17.56513873299997, 122.22791674000007 -17.56513873299997, 122.22791674000007 -17.565416510999967, 122.22763896300012 -17.565416510999967, 122.22763896300012 -17.565694288999964, 122.22736118500006 -17.565694288999964, 122.22736118500006 -17.56597206699996, 122.22708340700001 -17.56597206699996, 122.22708340700001 -17.56624984499996, 122.22680562900007 -17.56624984499996, 122.22680562900007 -17.566527621999967, 122.22291674000007 -17.566527621999967, 122.22291674000007 -17.566805399999964, 122.22263896300001 -17.566805399999964, 122.22263896300001 -17.56708317799996, 122.22236118500007 -17.56708317799996, 122.22236118500007 -17.56736095599996, 122.22208340700001 -17.56736095599996, 122.22208340700001 -17.567638732999967, 122.22180562900007 -17.567638732999967, 122.22180562900007 -17.567916510999964, 122.22152785200001 -17.567916510999964, 122.22152785200001 -17.568194288999962, 122.22125007400007 -17.568194288999962, 122.22125007400007 -17.56847206699996, 122.22097229600001 -17.56847206699996, 122.22097229600001 -17.568749844999957, 122.22069451800007 -17.568749844999957, 122.22069451800007 -17.569027621999965, 122.22041674000002 -17.569027621999965, 122.22041674000002 -17.569305399999962, 122.22013896300007 -17.569305399999962, 122.22013896300007 -17.56958317799996, 122.21986118500001 -17.56958317799996, 122.21986118500001 -17.569860955999957, 122.21958340700007 -17.569860955999957, 122.21958340700007 -17.570138732999965, 122.21930562900002 -17.570138732999965, 122.21930562900002 -17.570416510999962, 122.21902785200007 -17.570416510999962, 122.21902785200007 -17.57069428899996, 122.21875007400001 -17.57069428899996, 122.21875007400001 -17.570972066999957, 122.21208340700002 -17.570972066999957, 122.21208340700002 -17.571249844999954, 122.21180562900008 -17.571249844999954, 122.21180562900008 -17.571527621999962, 122.21152785100003 -17.571527621999962, 122.21152785100003 -17.57180539999996, 122.21125007400008 -17.57180539999996, 122.21125007400008 -17.572083177999957, 122.21097229600002 -17.572083177999957, 122.21097229600002 -17.572360955999955, 122.21069451800008 -17.572360955999955, 122.21069451800008 -17.572638732999962, 122.21041674000003 -17.572638732999962, 122.21041674000003 -17.57291651099996, 122.21013896300008 -17.57291651099996, 122.21013896300008 -17.573194288999957, 122.20986118500002 -17.573194288999957, 122.20986118500002 -17.573472066999955, 122.20958340700008 -17.573472066999955, 122.20958340700008 -17.573749844999952, 122.20930562900003 -17.573749844999952, 122.20930562900003 -17.57402762199996, 122.20902785100009 -17.57402762199996, 122.20902785100009 -17.574305399999957, 122.20875007400002 -17.574305399999957, 122.20875007400002 -17.574583177999955, 122.20847229600008 -17.574583177999955, 122.20847229600008 -17.574860955999952, 122.20819451800003 -17.574860955999952, 122.20819451800003 -17.57513873299996, 122.20791674000009 -17.57513873299996, 122.20791674000009 -17.575416510999958, 122.20763896300002 -17.575416510999958, 122.20763896300002 -17.575694288999955, 122.20736118500008 -17.575694288999955, 122.20736118500008 -17.575972066999952, 122.20708340700003 -17.575972066999952, 122.20708340700003 -17.57624984499995, 122.20680562900009 -17.57624984499995, 122.20680562900009 -17.576527621999958, 122.20652785100003 -17.576527621999958, 122.20652785100003 -17.576805399999955, 122.20625007400008 -17.576805399999955, 122.20625007400008 -17.577083177999953, 122.20597229600003 -17.577083177999953, 122.20597229600003 -17.57736095599995, 122.20569451800009 -17.57736095599995, 122.20569451800009 -17.577638733999947, 122.20541674000003 -17.577638733999947, 122.20541674000003 -17.577916510999955, 122.20513896300008 -17.577916510999955, 122.20513896300008 -17.578194288999953, 122.20486118500003 -17.578194288999953, 122.20486118500003 -17.57847206699995, 122.20430562900003 -17.57847206699995, 122.20430562900003 -17.578749844999948, 122.20402785100009 -17.578749844999948, 122.20402785100009 -17.579027621999955, 122.20375007400003 -17.579027621999955, 122.20375007400003 -17.579305399999953, 122.20347229600009 -17.579305399999953, 122.20347229600009 -17.57958317799995, 122.2001389620001 -17.57958317799995, 122.2001389620001 -17.579860955999948, 122.19986118500003 -17.579860955999948, 122.19986118500003 -17.580138733999945, 122.19958340700009 -17.580138733999945, 122.19958340700009 -17.580416510999953, 122.19930562900004 -17.580416510999953, 122.19930562900004 -17.58069428899995, 122.1990278510001 -17.58069428899995, 122.1990278510001 -17.580972066999948, 122.19875007400003 -17.580972066999948, 122.19875007400003 -17.581249844999945, 122.19847229600009 -17.581249844999945, 122.19847229600009 -17.581527621999953, 122.19819451800004 -17.581527621999953, 122.19819451800004 -17.58180539999995, 122.1979167400001 -17.58180539999995, 122.1979167400001 -17.582083177999948, 122.19763896200004 -17.582083177999948, 122.19763896200004 -17.582360955999945, 122.19736118500009 -17.582360955999945, 122.19736118500009 -17.582638733999943, 122.19708340700004 -17.582638733999943, 122.19708340700004 -17.58291651099995, 122.19652785100004 -17.58291651099995, 122.19652785100004 -17.583194288999948, 122.19597229600004 -17.583194288999948, 122.19597229600004 -17.583472066999946, 122.19152785100005 -17.583472066999946, 122.19152785100005 -17.583749844999943, 122.1912500740001 -17.583749844999943, 122.1912500740001 -17.58402762199995, 122.19097229600004 -17.58402762199995, 122.19097229600004 -17.58430539999995, 122.1906945180001 -17.58430539999995, 122.1906945180001 -17.584583177999946, 122.19041674000005 -17.584583177999946, 122.19041674000005 -17.584860955999943, 122.18986118500004 -17.584860955999943, 122.18986118500004 -17.58513873399994, 122.1868056290001 -17.58513873399994, 122.1868056290001 -17.584860955999943, 122.18597229600005 -17.584860955999943, 122.18597229600005 -17.584583177999946, 122.18541674000005 -17.584583177999946, 122.18541674000005 -17.58430539999995, 122.18513896200011 -17.58430539999995, 122.18513896200011 -17.584583177999946, 122.18541674000005 -17.584583177999946, 122.18541674000005 -17.584860955999943, 122.1856945180001 -17.584860955999943, 122.1856945180001 -17.58986095599994, 122.18541674000005 -17.58986095599994, 122.18541674000005 -17.59097206699994, 122.18513896200011 -17.59097206699994, 122.18513896200011 -17.591249844999936, 122.18513896200011 -17.591527622999934, 122.18541674000005 -17.591527622999934, 122.1856945180001 -17.591527622999934, 122.1856945180001 -17.59180539999994, 122.18597229600005 -17.59180539999994, 122.18597229600005 -17.59208317799994, 122.18625007300011 -17.59208317799994, 122.18625007300011 -17.592360955999936, 122.18652785100005 -17.592360955999936, 122.18652785100005 -17.592638733999934, 122.1868056290001 -17.592638733999934, 122.1868056290001 -17.59291651099994, 122.18708340700005 -17.59291651099994, 122.18708340700005 -17.59430539999994, 122.18708340700005 -17.594583177999937, 122.18708340700005 -17.594860955999934, 122.1868056290001 -17.594860955999934, 122.1868056290001 -17.59513873399993, 122.1868056290001 -17.59541651099994, 122.1868056290001 -17.595694288999937, 122.1868056290001 -17.595972066999934, 122.1868056290001 -17.59624984499993, 122.18652785100005 -17.59624984499993, 122.18652785100005 -17.59652762299993, 122.18652785100005 -17.596805399999937, 122.18652785100005 -17.597083177999934, 122.18625007300011 -17.597083177999934, 122.18625007300011 -17.59736095599993, 122.18597229600005 -17.59736095599993, 122.18597229600005 -17.59763873399993, 122.18597229600005 -17.597916510999937, 122.1856945180001 -17.597916510999937, 122.1856945180001 -17.598194288999935, 122.18541674000005 -17.598194288999935, 122.18541674000005 -17.598472066999932, 122.18541674000005 -17.59874984499993, 122.18541674000005 -17.599027622999927, 122.18513896200011 -17.599027622999927, 122.18513896200011 -17.599305399999935, 122.18513896200011 -17.599583177999932, 122.18513896200011 -17.59986095599993, 122.18486118500005 -17.59986095599993, 122.18486118500005 -17.600138733999927, 122.18486118500005 -17.600416510999935, 122.18486118500005 -17.600694288999932, 122.18486118500005 -17.60097206699993, 122.1845834070001 -17.60097206699993, 122.1845834070001 -17.601249844999927, 122.18430562900005 -17.601249844999927, 122.18430562900005 -17.60152762299998, 122.18402785100011 -17.60152762299998, 122.18402785100011 -17.601805399999932, 122.18375007300006 -17.601805399999932, 122.18375007300006 -17.60208317799993, 122.1834722960001 -17.60208317799993, 122.1834722960001 -17.602360955999927, 122.18319451800005 -17.602360955999927, 122.18319451800005 -17.60263873399998, 122.18291674000011 -17.60263873399998, 122.18291674000011 -17.602916510999933, 122.18263896200006 -17.602916510999933, 122.18263896200006 -17.60319428899993, 122.1823611850001 -17.60319428899993, 122.1823611850001 -17.603472066999927, 122.18208340700005 -17.603472066999927, 122.18208340700005 -17.60374984499998, 122.18180562900011 -17.60374984499998, 122.18180562900011 -17.60402762299998, 122.18152785100006 -17.60402762299998, 122.18152785100006 -17.60430539999993, 122.18125007300011 -17.60430539999993, 122.18125007300011 -17.604583177999928, 122.18097229600005 -17.604583177999928, 122.18097229600005 -17.604860955999982, 122.18069451800011 -17.604860955999982, 122.18069451800011 -17.60513873399998, 122.18041674000006 -17.60513873399998, 122.18041674000006 -17.605416511999977, 122.18013896200011 -17.605416511999977, 122.18013896200011 -17.605694288999928, 122.17986118500005 -17.605694288999928, 122.17986118500005 -17.605972066999982, 122.17958340700011 -17.605972066999982, 122.17958340700011 -17.60624984499998, 122.17930562900005 -17.60624984499998, 122.17930562900005 -17.606527622999977, 122.17902785100011 -17.606527622999977, 122.17902785100011 -17.606805399999928, 122.17875007300006 -17.606805399999928, 122.17875007300006 -17.607083177999982, 122.17847229600011 -17.607083177999982, 122.17847229600011 -17.60736095599998, 122.17819451800005 -17.60736095599998, 122.17819451800005 -17.607638733999977, 122.17791674000011 -17.607638733999977, 122.17791674000011 -17.607916511999974, 122.17763896200006 -17.607916511999974, 122.17763896200006 -17.608194288999982, 122.17736118500011 -17.608194288999982, 122.17736118500011 -17.60847206699998, 122.17708340700005 -17.60847206699998, 122.17708340700005 -17.608749844999977, 122.17680562900011 -17.608749844999977, 122.17680562900011 -17.609027622999974, 122.17652785100006 -17.609027622999974, 122.17652785100006 -17.608749844999977, 122.17652785100006 -17.60847206699998, 122.176250073 -17.60847206699998, 122.176250073 -17.608194288999982, 122.176250073 -17.607916511999974, 122.176250073 -17.607638733999977, 122.17597229600005 -17.607638733999977, 122.17597229600005 -17.60736095599998, 122.17597229600005 -17.607083177999982, 122.17597229600005 -17.606805399999928, 122.17569451800011 -17.606805399999928, 122.17569451800011 -17.606527622999977, 122.17569451800011 -17.60624984499998, 122.17569451800011 -17.605972066999982, 122.17569451800011 -17.605694288999928, 122.175138962 -17.605694288999928, 122.175138962 -17.605416511999977, 122.17486118500005 -17.605416511999977, 122.17486118500005 -17.604860955999982, 122.17458340700011 -17.604860955999982, 122.17458340700011 -17.604583177999928, 122.17458340700011 -17.60430539999993, 122.17430562900006 -17.60430539999993, 122.17430562900006 -17.60402762299998, 122.17430562900006 -17.60374984499998, 122.17402785100012 -17.60374984499998, 122.17402785100012 -17.603472066999927, 122.17402785100012 -17.60319428899993, 122.17402785100012 -17.60208317799993, 122.17375007300006 -17.60208317799993, 122.17375007300006 -17.601805399999932, 122.17375007300006 -17.60152762299998, 122.17375007300006 -17.601249844999927, 122.17347229600011 -17.601249844999927, 122.17347229600011 -17.60097206699993, 122.17347229600011 -17.600694288999932, 122.17319451800006 -17.600694288999932, 122.17319451800006 -17.600416510999935, 122.17291674000012 -17.600416510999935, 122.17291674000012 -17.600138733999927, 122.17263896200006 -17.600138733999927, 122.17263896200006 -17.59986095599993, 122.17263896200006 -17.599583177999932, 122.17236118400001 -17.599583177999932, 122.17236118400001 -17.599305399999935, 122.17208340700006 -17.599305399999935, 122.17208340700006 -17.599027622999927, 122.17180562900012 -17.599027622999927, 122.17180562900012 -17.59874984499993, 122.17152785100006 -17.59874984499993, 122.17152785100006 -17.598472066999932, 122.17152785100006 -17.598194288999935, 122.17152785100006 -17.597916510999937, 122.17152785100006 -17.59763873399993, 122.17125007300001 -17.59763873399993, 122.17125007300001 -17.59736095599993, 122.17125007300001 -17.597083177999934, 122.17097229600006 -17.597083177999934, 122.17097229600006 -17.59652762299993, 122.17069451800012 -17.59652762299993, 122.17069451800012 -17.59624984499993, 122.17041674000006 -17.59624984499993, 122.17041674000006 -17.595972066999934, 122.17013896200001 -17.595972066999934, 122.17013896200001 -17.595694288999937, 122.16958340700012 -17.595694288999937, 122.16958340700012 -17.59541651099994, 122.16930562900006 -17.59541651099994, 122.16930562900006 -17.59513873399993, 122.16902785100001 -17.59513873399993, 122.16902785100001 -17.594860955999934, 122.16902785100001 -17.594583177999937, 122.16902785100001 -17.59430539999994, 122.16875007300007 -17.59430539999994, 122.16875007300007 -17.59402762299993, 122.16847229600012 -17.59402762299993, 122.16847229600012 -17.593749844999934, 122.16819451800006 -17.593749844999934, 122.16819451800006 -17.593472066999936, 122.16819451800006 -17.59319428899994, 122.16791674000001 -17.59319428899994, 122.16791674000001 -17.59291651099994, 122.16791674000001 -17.592638733999934, 122.16791674000001 -17.592360955999936, 122.16791674000001 -17.59208317799994, 122.16819451800006 -17.59208317799994, 122.16819451800006 -17.59180539999994, 122.16847229600012 -17.59180539999994, 122.16847229600012 -17.591527622999934, 122.16819451800006 -17.591527622999934, 122.16819451800006 -17.591249844999936, 122.16791674000001 -17.591249844999936, 122.16791674000001 -17.59097206699994, 122.16763896200007 -17.59097206699994, 122.16763896200007 -17.59069428899994, 122.16763896200007 -17.590416510999944, 122.16736118400001 -17.590416510999944, 122.16736118400001 -17.590138733999936, 122.16708340700006 -17.590138733999936, 122.16680562900001 -17.590138733999936, 122.16680562900001 -17.58986095599994, 122.16652785100007 -17.58986095599994, 122.16652785100007 -17.58958317799994, 122.16652785100007 -17.589305399999944, 122.16652785100007 -17.589027621999946, 122.16652785100007 -17.58874984499994, 122.16625007300001 -17.58874984499994, 122.16625007300001 -17.589027621999946, 122.16597229600006 -17.589027621999946, 122.16569451800001 -17.589027621999946, 122.16541674000007 -17.589027621999946, 122.16513896200001 -17.589027621999946, 122.16513896200001 -17.58874984499994, 122.16486118400007 -17.58874984499994, 122.16486118400007 -17.58847206699994, 122.16458340700001 -17.58847206699994, 122.16430562900007 -17.58847206699994, 122.16430562900007 -17.588194288999944, 122.16430562900007 -17.587916510999946, 122.16402785100001 -17.587916510999946, 122.16402785100001 -17.58763873399994, 122.16430562900007 -17.58763873399994, 122.16430562900007 -17.58736095599994, 122.16430562900007 -17.587083177999943, 122.16430562900007 -17.586805399999946, 122.16402785100001 -17.586805399999946, 122.16402785100001 -17.58652762199995, 122.16375007300007 -17.58652762199995, 122.16375007300007 -17.58624984499994, 122.16347229600001 -17.58624984499994, 122.16347229600001 -17.585972066999943, 122.16347229600001 -17.585694288999946, 122.16319451800007 -17.585694288999946, 122.16319451800007 -17.58541651099995, 122.16291674000001 -17.58541651099995, 122.16291674000001 -17.58513873399994, 122.16263896200007 -17.58513873399994, 122.16263896200007 -17.584860955999943, 122.16263896200007 -17.584583177999946, 122.16263896200007 -17.58430539999995, 122.16236118400002 -17.58430539999995, 122.16208340700007 -17.58430539999995, 122.16208340700007 -17.58402762199995, 122.16180562900001 -17.58402762199995, 122.16152785100007 -17.58402762199995, 122.16152785100007 -17.583749844999943, 122.16125007300002 -17.583749844999943, 122.16097229600007 -17.583749844999943, 122.16069451800001 -17.583749844999943, 122.16069451800001 -17.583472066999946, 122.16041674000007 -17.583472066999946, 122.16013896200002 -17.583472066999946, 122.16013896200002 -17.583194288999948, 122.15986118400008 -17.583194288999948, 122.15986118400008 -17.58291651099995, 122.15902785100002 -17.58291651099995, 122.15902785100002 -17.582638733999943, 122.15875007300008 -17.582638733999943, 122.15875007300008 -17.582360955999945, 122.15847229500002 -17.582360955999945, 122.15847229500002 -17.582638733999943, 122.15791674000002 -17.582638733999943, 122.15763896200008 -17.582638733999943, 122.15763896200008 -17.58291651099995, 122.15736118400002 -17.58291651099995, 122.15708340700007 -17.58291651099995, 122.15708340700007 -17.582638733999943, 122.15680562900002 -17.582638733999943, 122.15680562900002 -17.582360955999945, 122.15652785100008 -17.582360955999945, 122.15652785100008 -17.582083177999948, 122.15652785100008 -17.58180539999995, 122.15625007300002 -17.58180539999995, 122.15625007300002 -17.581249844999945, 122.15597229500008 -17.581249844999945, 122.15597229500008 -17.580972066999948, 122.15541674000008 -17.580972066999948, 122.15541674000008 -17.58069428899995, 122.15513896200002 -17.58069428899995, 122.15458340700002 -17.58069428899995, 122.15458340700002 -17.580416510999953, 122.15430562900008 -17.580416510999953, 122.15347229500003 -17.580416510999953, 122.15347229500003 -17.580138733999945, 122.15291674000002 -17.580138733999945, 122.15291674000002 -17.579860955999948, 122.15263896200008 -17.579860955999948, 122.15263896200008 -17.57958317799995, 122.15236118400003 -17.57958317799995, 122.15236118400003 -17.579305399999953, 122.15208340700008 -17.579305399999953, 122.15208340700008 -17.578749844999948, 122.15180562900002 -17.578749844999948, 122.15180562900002 -17.57847206699995, 122.15152785100008 -17.57847206699995, 122.15152785100008 -17.577916510999955, 122.15125007300003 -17.577916510999955, 122.15125007300003 -17.57513873299996, 122.15125007300003 -17.574860955999952, 122.15125007300003 -17.574583177999955, 122.15125007300003 -17.574305399999957, 122.15125007300003 -17.57180539999996, 122.15097229500009 -17.57180539999996, 122.15097229500009 -17.57069428899996, 122.15069451800002 -17.57069428899996, 122.15069451800002 -17.56958317799996, 122.15041674000008 -17.56958317799996, 122.15041674000008 -17.569305399999962, 122.15041674000008 -17.568749844999957, 122.15013896200003 -17.568749844999957, 122.15013896200003 -17.567916510999964, 122.14986118400009 -17.567916510999964, 122.14986118400009 -17.56736095599996, 122.14958340700002 -17.56736095599996, 122.14958340700002 -17.566805399999964, 122.14930562900008 -17.566805399999964, 122.14930562900008 -17.566527621999967, 122.14902785100003 -17.566527621999967, 122.14902785100003 -17.56597206699996, 122.14875007300009 -17.56597206699996, 122.14875007300009 -17.565694288999964, 122.14847229500003 -17.565694288999964, 122.14847229500003 -17.56513873299997, 122.14819451800008 -17.56513873299997, 122.14819451800008 -17.56486095599996, 122.14763896200009 -17.56486095599996, 122.14763896200009 -17.564583177999964, 122.14736118400003 -17.564583177999964, 122.14736118400003 -17.564305399999967, 122.14708340700008 -17.564305399999967, 122.14708340700008 -17.56402762199997, 122.14680562900003 -17.56402762199997, 122.14680562900003 -17.56374984499996, 122.14652785100009 -17.56374984499996, 122.14652785100009 -17.563472066999964, 122.14625007300003 -17.563472066999964, 122.14625007300003 -17.563194288999966, 122.14625007300003 -17.56291651099997, 122.14625007300003 -17.56263873299997, 122.14625007300003 -17.562360955999964, 122.14597229500009 -17.562360955999964, 122.14597229500009 -17.56180539999997, 122.14569451800003 -17.56180539999997, 122.14569451800003 -17.559860955999966, 122.14597229500009 -17.559860955999966, 122.14597229500009 -17.55958317799997, 122.14597229500009 -17.559027621999974, 122.14597229500009 -17.558749843999976, 122.14625007300003 -17.558749843999976, 122.14625007300003 -17.55847206699997, 122.14625007300003 -17.55819428899997, 122.14652785100009 -17.55819428899997, 122.14652785100009 -17.557916510999974, 122.14680562900003 -17.557916510999974, 122.14680562900003 -17.556805399999973, 122.14708340700008 -17.556805399999973, 122.14708340700008 -17.550972066999975, 122.15013896200003 -17.550972066999975, 122.15013896200003 -17.551249843999926, 122.15041674000008 -17.551249843999926, 122.15041674000008 -17.550694288999978, 122.15069451800002 -17.550694288999978, 122.15069451800002 -17.549583177999978, 122.15097229500009 -17.549583177999978, 122.15097229500009 -17.54874984399993, 122.15125007300003 -17.54874984399993, 122.15125007300003 -17.547916510999983, 122.15152785100008 -17.547916510999983, 122.15152785100008 -17.545416510999928, 122.15125007300003 -17.545416510999928, 122.15125007300003 -17.54402762199993, 122.15097229500009 -17.54402762199993, 122.15097229500009 -17.539027621999935, 122.15541674000008 -17.539027621999935, 122.15541674000008 -17.538749843999938, 122.15736118400002 -17.538749843999938, 122.15736118400002 -17.53847206699993, 122.16486118400007 -17.53847206699993, 122.16486118400007 -17.538749843999938, 122.16513896200001 -17.538749843999938, 122.16513896200001 -17.539027621999935, 122.16569451800001 -17.539027621999935, 122.16569451800001 -17.539305399999932, 122.16597229600006 -17.539305399999932, 122.16597229600006 -17.53958317799993, 122.16680562900001 -17.53958317799993, 122.16680562900001 -17.539860954999938, 122.175138962 -17.539860954999938, 122.175138962 -17.53958317799993, 122.17597229600005 -17.53958317799993, 122.17597229600005 -17.539305399999932, 122.17652785100006 -17.539305399999932, 122.17652785100006 -17.539027621999935, 122.17708340700005 -17.539027621999935, 122.17708340700005 -17.538749843999938, 122.17986118500005 -17.538749843999938, 122.17986118500005 -17.539027621999935, 122.18013896200011 -17.539027621999935, 122.18013896200011 -17.539305399999932, 122.18041674000006 -17.539305399999932, 122.18041674000006 -17.53958317799993, 122.18097229600005 -17.53958317799993, 122.18097229600005 -17.539860954999938, 122.18125007300011 -17.539860954999938, 122.18125007300011 -17.540138732999935, 122.18180562900011 -17.540138732999935, 122.18180562900011 -17.540416510999933, 122.1823611850001 -17.540416510999933, 122.1823611850001 -17.54069428899993, 122.18291674000011 -17.54069428899993, 122.18291674000011 -17.540972066999927, 122.1834722960001 -17.540972066999927, 122.1834722960001 -17.541249843999935, 122.18402785100011 -17.541249843999935, 122.18402785100011 -17.541527621999933, 122.18430562900005 -17.541527621999933, 122.18430562900005 -17.54180539999993, 122.1845834070001 -17.54180539999993, 122.1845834070001 -17.542360954999936, 122.18486118500005 -17.542360954999936, 122.18486118500005 -17.542638732999933, 122.18513896200011 -17.542638732999933, 122.18513896200011 -17.54291651099993, 122.18541674000005 -17.54291651099993, 122.18541674000005 -17.543472066999982, 122.1856945180001 -17.543472066999982, 122.1856945180001 -17.543749843999933, 122.18597229600005 -17.543749843999933, 122.18597229600005 -17.54402762199993, 122.18625007300011 -17.54402762199993, 122.18625007300011 -17.544305399999928, 122.18652785100005 -17.544305399999928, 122.18652785100005 -17.544583177999982, 122.18708340700005 -17.544583177999982, 122.18708340700005 -17.544860954999933, 122.1879167400001 -17.544860954999933, 122.1879167400001 -17.54513873299993, 122.1884722960001 -17.54513873299993, 122.1884722960001 -17.545416510999928, 122.1890278510001 -17.545416510999928, 122.1890278510001 -17.545694288999982, 122.18986118500004 -17.545694288999982, 122.18986118500004 -17.54597206699998, 122.19986118500003 -17.54597206699998, 122.19986118500003 -17.54624984399993, 122.20041674000004 -17.54624984399993, 122.20041674000004 -17.546527621999928, 122.20069451800009 -17.546527621999928, 122.20069451800009 -17.546805399999982, 122.20097229600003 -17.546805399999982, 122.20097229600003 -17.54708317799998, 122.20125007400009 -17.54708317799998, 122.20125007400009 -17.54736095499993, 122.20152785100004 -17.54736095499993, 122.20152785100004 -17.54763873299993, 122.20180562900009 -17.54763873299993, 122.20180562900009 -17.547916510999983, 122.20208340700003 -17.547916510999983, 122.20208340700003 -17.54819428899998, 122.20236118500009 -17.54819428899998, 122.20236118500009 -17.548472066999977, 122.20375007400003 -17.548472066999977, 122.20375007400003 -17.54874984399993, 122.20458340700009 -17.54874984399993, 122.20458340700009 -17.549027621999983, 122.20513896300008 -17.549027621999983, 122.20513896300008 -17.54930539999998, 122.20541674000003 -17.54930539999998, 122.20541674000003 -17.549583177999978, 122.20597229600003 -17.549583177999978, 122.20597229600003 -17.549860955999975, 122.20625007400008 -17.549860955999975, 122.20625007400008 -17.550138732999926, 122.20652785100003 -17.550138732999926, 122.20652785100003 -17.55041651099998, 122.20680562900009 -17.55041651099998, 122.20680562900009 -17.550694288999978, 122.20708340700003 -17.550694288999978, 122.20708340700003 -17.550972066999975, 122.20736118500008 -17.550972066999975, 122.20736118500008 -17.551249843999926, 122.20763896300002 -17.551249843999926, 122.20763896300002 -17.55152762199998, 122.20791674000009 -17.55152762199998, 122.20791674000009 -17.551805399999978, 122.20819451800003 -17.551805399999978, 122.20819451800003 -17.552083177999975, 122.20847229600008 -17.552083177999975, 122.20847229600008 -17.552360955999973, 122.20875007400002 -17.552360955999973, 122.20875007400002 -17.55263873299998, 122.20902785100009 -17.55263873299998, 122.20902785100009 -17.552916510999978, 122.20930562900003 -17.552916510999978, 122.20930562900003 -17.553194288999975, 122.20958340700008 -17.553194288999975, 122.20958340700008 -17.553472066999973, 122.21013896300008 -17.553472066999973, 122.21013896300008 -17.55374984399998, 122.21041674000003 -17.55374984399998, 122.21041674000003 -17.55402762199998, 122.21069451800008 -17.55402762199998, 122.21069451800008 -17.554305399999976, 122.21097229600002 -17.554305399999976, 122.21097229600002 -17.554583177999973, 122.21125007400008 -17.554583177999973, 122.21125007400008 -17.55486095599997, 122.21152785100003 -17.55486095599997, 122.21152785100003 -17.55513873299998, 122.21180562900008 -17.55513873299998, 122.21180562900008 -17.555416510999976, 122.21208340700002 -17.555416510999976, 122.21208340700002 -17.555694288999973, 122.21236118500008 -17.555694288999973, 122.21236118500008 -17.55597206699997, 122.21319451800002 -17.55597206699997, 122.21319451800002 -17.55624984399998, 122.21402785100008 -17.55624984399998, 122.21402785100008 -17.556527621999976, 122.21486118500002 -17.556527621999976, 122.21486118500002 -17.556805399999973, 122.21541674000002 -17.556805399999973, 122.21541674000002 -17.55708317799997, 122.21569451800008 -17.55708317799997, 122.21569451800008 -17.557360955999968, 122.21625007400007 -17.557360955999968, 122.21625007400007 -17.557638732999976, 122.21680562900008 -17.557638732999976, 122.21680562900008 -17.557916510999974, 122.21736118500007 -17.557916510999974, 122.21736118500007 -17.55819428899997, 122.21791674000008 -17.55819428899997, 122.21791674000008 -17.55847206699997, 122.21819451800002 -17.55847206699997, 122.21819451800002 -17.558749843999976, 122.21847229600007 -17.558749843999976, 122.21847229600007 -17.559027621999974, 122.21875007400001 -17.559027621999974, 122.21875007400001 -17.55930539999997, 122.21930562900002 -17.55930539999997, 122.21930562900002 -17.55958317799997, 122.21958340700007 -17.55958317799997, 122.21958340700007 -17.559860955999966, 122.22041674000002 -17.559860955999966, 122.22041674000002 -17.560138732999974, 122.22097229600001 -17.560138732999974, 122.22097229600001 -17.56041651099997, 122.22152785200001 -17.56041651099997, 122.22152785200001 -17.56069428899997, 122.22180562900007 -17.56069428899997, 122.22180562900007 -17.560972066999966, 122.22208340700001 -17.560972066999966, 122.22208340700001 -17.561249843999974, 122.22236118500007 -17.561249843999974, 122.22236118500007 -17.56152762199997, 122.22263896300001 -17.56152762199997, 122.22263896300001 -17.56180539999997, 122.22291674000007 -17.56180539999997, 122.22291674000007 -17.562083177999966, 122.22319451800001 -17.562083177999966, 122.22319451800001 -17.562360955999964, 122.22347229600007 -17.562360955999964, 122.22347229600007 -17.56263873299997, 122.22375007400001 -17.56263873299997, 122.22375007400001 -17.56291651099997, 122.22402785200006 -17.56291651099997, 122.22402785200006 -17.563194288999966, 122.22430562900001 -17.563194288999966, 122.22430562900001 -17.563472066999964, 122.23097229600012 -17.563472066999964, 122.23097229600012 -17.56374984499996, 122.23125007400006 -17.56374984499996, 122.23125007400006 -17.56402762199997)))"^^geo:wktLiteral ] ; - geo:hasMetricArea 3.455107e+07 ; - prez:link "/s/datasets/ldgovau:geofabric/collections/geofab:catchments/items/hydrd:102208962" . - -geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; - skos:prefLabel "Feature"@en . - diff --git a/tests/data/profiles/remote_profile.ttl b/tests/data/profiles/remote_profile.ttl deleted file mode 100755 index dba42432..00000000 --- a/tests/data/profiles/remote_profile.ttl +++ /dev/null @@ -1,19 +0,0 @@ -PREFIX altr-ext: -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX owl: -PREFIX prez: -PREFIX prof: -PREFIX rdf: -PREFIX rdfs: -PREFIX sh: -PREFIX skos: -PREFIX xsd: - - - a prof:Profile , prez:SpacePrezProfile ; - dcterms:description "an example profile" ; - dcterms:identifier "exprof"^^xsd:token ; - dcterms:title "Example" ; - . diff --git a/tests/data/search/expected_responses/filter_to_focus_search.ttl b/tests/data/search/expected_responses/filter_to_focus_search.ttl deleted file mode 100755 index dc2006b7..00000000 --- a/tests/data/search/expected_responses/filter_to_focus_search.ttl +++ /dev/null @@ -1,84 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - - dcterms:identifier "brhl-prps:pggd"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Borehole purposes applicable to regulatory notification forms."@en ; - skos:prefLabel "PGGD selection"@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -rdf:type rdfs:label "type" . - -rdfs:isDefinedBy rdfs:label "isDefinedBy" . - -rdfs:label rdfs:label "label" . - -skos:altLabel rdfs:label "alternative label"@en ; - skos:definition "An alternative lexical label for a resource."@en . - -skos:inScheme rdfs:label "is in scheme"@en ; - skos:definition "Relates a resource (for example a concept) to a concept scheme in which it is included."@en . - -skos:topConceptOf rdfs:label "is top concept in scheme"@en ; - skos:definition "Relates a concept to the concept scheme that it is a top level concept of."@en . - -schema:color rdfs:label "color" . - - a prez:SearchResult ; - prez:searchResultMatch "Greenhouse Gas Storage"@en ; - prez:searchResultPredicate skos:prefLabel ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "Wells and bores drilled under permits governed by the Queensland Greenhouse Gas Storage Act 2009"@en ; - prez:searchResultPredicate skos:definition ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - -skos:Concept rdfs:label "Concept"@en ; - skos:definition "An idea or notion; a unit of thought."@en . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en ; - skos:prefLabel "stable"@en ; - schema:color "#2e8c09" . - - a skos:Concept ; - dcterms:identifier "brhl-prps:greenhouse-gas-storage"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy ; - skos:altLabel "GHG"@en ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Greenhouse Gas Storage Act 2009"@en ; - skos:inScheme ; - skos:prefLabel "Greenhouse Gas Storage"@en ; - skos:topConceptOf ; - prez:link "/v/collection/brhl-prps:pggd/brhl-prps:greenhouse-gas-storage", - "/v/vocab/def2:borehole-purpose/brhl-prps:greenhouse-gas-storage" . - - dcterms:identifier "def2:borehole-purpose"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:prefLabel "Borehole Purpose"@en ; - prez:link "/v/vocab/def2:borehole-purpose" . - diff --git a/tests/data/search/expected_responses/focus_to_filter_search.ttl b/tests/data/search/expected_responses/focus_to_filter_search.ttl deleted file mode 100755 index 0247dad4..00000000 --- a/tests/data/search/expected_responses/focus_to_filter_search.ttl +++ /dev/null @@ -1,167 +0,0 @@ -@prefix dcterms: . -@prefix geo: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - - dcterms:identifier "cgi:contacttype"^^prez:identifier ; - dcterms:provenance "this vocabulary" ; - skos:definition "All Concepts in this vocabulary" ; - skos:prefLabel "Contact Type - All Concepts"@en . - -rdf:type rdfs:label "type" . - -rdfs:isDefinedBy rdfs:label "isDefinedBy" . - -rdfs:label rdfs:label "label" . - -skos:broader rdfs:label "has broader"@en ; - skos:definition "Relates a concept to a concept that is more general in meaning."@en . - -skos:inScheme rdfs:label "is in scheme"@en ; - skos:definition "Relates a resource (for example a concept) to a concept scheme in which it is included."@en . - - dcterms:identifier "preztest:dataset"^^prez:identifier . - - dcterms:identifier "preztest:feature-collection"^^prez:identifier . - - a prez:SearchResult ; - prez:searchResultMatch "metasomatic facies contact"@en ; - prez:searchResultPredicate skos:prefLabel ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "A metasomatic facies contact separating rocks that have undergone alteration of a particular facies from those that have undergone metasomatism of another facies. Alteration is a kind of metasomatism that does not introduce economically important minerals."@en ; - prez:searchResultPredicate skos:definition ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "A metasomatic facies contact separating rocks which have been mineralised and contain a particular mineral assemblage from those which contain a different assemblage. Mineralization is a kind of metasomatism that introduces ecomomically important minerals."@en ; - prez:searchResultPredicate skos:definition ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "A metamorphic contact separating rocks that have undergone metasomatism of a particular facies from those that have undergone metasomatism of another facies. Metasomatism is distinguished from metamorphism by significant changes in bulk chemistry of the affected rock."@en ; - prez:searchResultPredicate skos:definition ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "metamorphic facies contact"@en ; - prez:searchResultPredicate skos:prefLabel ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "mineralisation assemblage contact"@en ; - prez:searchResultPredicate skos:prefLabel ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "A metamorphic contact separating rocks that have undergone metamorphism of a particular facies from those that have undergone metamorphism of another facies."@en ; - prez:searchResultPredicate skos:definition ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - - a prez:SearchResult ; - prez:searchResultMatch "alteration facies contact"@en ; - prez:searchResultPredicate skos:prefLabel ; - prez:searchResultURI ; - prez:searchResultWeight 10 . - -geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; - skos:prefLabel "Feature"@en . - - a geo:Feature, - skos:Concept ; - dcterms:identifier "alteration_facies_contact"^^xsd:token, - "cntcttyp:alteration_facies_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metasomatic facies contact separating rocks that have undergone alteration of a particular facies from those that have undergone metasomatism of another facies. Alteration is a kind of metasomatism that does not introduce economically important minerals."@en ; - skos:inScheme ; - skos:prefLabel "alteration facies contact"@en ; - prez:link "/s/datasets/preztest:dataset/collections/preztest:feature-collection/items/cntcttyp:alteration_facies_contact", - "/v/collection/cgi:contacttype/cntcttyp:alteration_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:alteration_facies_contact" . - - a skos:Concept ; - dcterms:identifier "metamorphic_facies_contact"^^xsd:token, - "cntcttyp:metamorphic_facies_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metamorphic contact separating rocks that have undergone metamorphism of a particular facies from those that have undergone metamorphism of another facies."@en ; - skos:inScheme ; - skos:prefLabel "metamorphic facies contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:metamorphic_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:metamorphic_facies_contact" . - - a skos:Concept ; - dcterms:identifier "metasomatic_facies_contact"^^xsd:token, - "cntcttyp:metasomatic_facies_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metamorphic contact separating rocks that have undergone metasomatism of a particular facies from those that have undergone metasomatism of another facies. Metasomatism is distinguished from metamorphism by significant changes in bulk chemistry of the affected rock."@en ; - skos:inScheme ; - skos:prefLabel "metasomatic facies contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:metasomatic_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:metasomatic_facies_contact" . - - a skos:Concept ; - dcterms:identifier "mineralisation_assemblage_contact"^^xsd:token, - "cntcttyp:mineralisation_assemblage_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metasomatic facies contact separating rocks which have been mineralised and contain a particular mineral assemblage from those which contain a different assemblage. Mineralization is a kind of metasomatism that introduces ecomomically important minerals."@en ; - skos:inScheme ; - skos:prefLabel "mineralisation assemblage contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:mineralisation_assemblage_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:mineralisation_assemblage_contact" . - - dcterms:identifier "cntcttyp:metamorphic_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "Lithogenetic contact separating rocks that have different lithologic properties related to metamorphism, metasomatism, alteration, or mineralization. Generally separates metamorphic rock bodies, but may separate metamorphosed (broadly speaking) and non-metamorphosed rock."@en ; - skos:prefLabel "metamorphic contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:metamorphic_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:metamorphic_contact" . - -skos:Concept rdfs:label "Concept"@en ; - skos:definition "An idea or notion; a unit of thought."@en . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - dcterms:identifier "2016.01:contacttype"^^prez:identifier ; - dcterms:provenance "Original set of terms from the GeosciML standard" ; - skos:definition "This scheme describes the concept space for Contact Type concepts, as defined by the IUGS Commission for Geoscience Information (CGI) Geoscience Terminology Working Group. By extension, it includes all concepts in this conceptScheme, as well as concepts in any previous versions of the scheme. Designed for use in the contactType property in GeoSciML Contact elements."@en ; - skos:prefLabel "Contact Type"@en ; - prez:link "/v/vocab/2016.01:contacttype" . - diff --git a/tests/data/spaceprez/expected_responses/dataset_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_anot.ttl deleted file mode 100755 index 325d30a2..00000000 --- a/tests/data/spaceprez/expected_responses/dataset_anot.ttl +++ /dev/null @@ -1,79 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix geo: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - - a dcat:Dataset ; - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "sandgate"^^xsd:token, - "exds:sandgate"^^prez:identifier ; - dcterms:title "Sandgate example dataset"@en ; - geo:hasBoundingBox [ a geo:Geometry ; - geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2234024,152.9075 -27.2234024,152.9075 -27.42))"^^geo:wktLiteral ] ; - rdfs:member , - , - , - ; - prez:link "/s/catalogs/exds:sandgate" ; - prez:members [ prez:link "/s/catalogs/exds:sandgate/collections" ] . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:title rdfs:label "Title"@en . - -geo:asWKT skos:definition "The WKT serialization of a Geometry"@en ; - skos:prefLabel "as WKT"@en . - -geo:hasBoundingBox skos:definition "The minimum or smallest bounding or enclosing box of a given Feature."@en ; - skos:prefLabel "has bounding box"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -rdfs:member rdfs:label "member" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - rdfs:label "Geofabric Contracted Catchments"@en ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "sndgt:catchments"^^prez:identifier ; - dcterms:title "Geofabric Contracted Catchments"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . - - rdfs:label "Sandgate are demo Facilities"@en ; - dcterms:description "Sandgate area demo Facilities"@en ; - dcterms:identifier "sndgt:facilities"^^prez:identifier ; - dcterms:title "Sandgate are demo Facilities"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:facilities" . - - rdfs:label "Sandgate flooded areas"@en ; - dcterms:description "Sandgate flooded areas"@en ; - dcterms:identifier "sndgt:floods"^^prez:identifier ; - dcterms:title "Sandgate flooded areas"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:floods" . - - rdfs:label "Sandgate main roads"@en ; - dcterms:description "Sandgate main roads"@en ; - dcterms:identifier "sndgt:roads"^^prez:identifier ; - dcterms:title "Sandgate main roads"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:roads" . - -geo:Geometry skos:definition "A coherent set of direct positions in space. The positions are held within a Spatial Reference System (SRS)."@en ; - skos:prefLabel "Geometry"@en . - -dcat:Dataset rdfs:label "Dataset"@en ; - skos:definition "A collection of data, published or curated by a single source, and available for access or download in one or more represenations."@en . - diff --git a/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl b/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl deleted file mode 100755 index 08b86eb5..00000000 --- a/tests/data/spaceprez/expected_responses/dataset_listing_anot.ttl +++ /dev/null @@ -1,53 +0,0 @@ -@prefix dcat: . -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - - a dcat:Dataset ; - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "exds:sandgate"^^prez:identifier ; - dcterms:title "Sandgate example dataset"@en ; - prez:link "/s/catalogs/exds:sandgate" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:title rdfs:label "Title"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - a dcat:Dataset ; - dcterms:description "The Australian national dataset of important hydrological features such as rivers, water bodies, aquifers and monitoring points"@en ; - dcterms:identifier "ldgovau:geofabric"^^prez:identifier ; - dcterms:title "Australian Hydrological Geospatial Fabric"@en ; - prez:link "/s/catalogs/ldgovau:geofabric" . - - a dcat:Dataset ; - dcterms:description "The Australian Geocoded National Address File (G-NAF) is Australia’s authoritative, geocoded address file. It is built and maintained by Geoscape Australia using authoritative government data.."@en ; - dcterms:identifier "ldgovau:gnaf"^^prez:identifier ; - dcterms:title "Geocoded National Address File"@en ; - skos:prefLabel "Geocoded National Address File"@en ; - prez:link "/s/catalogs/ldgovau:gnaf" . - - a dcat:Dataset ; - dcterms:identifier "preztest:dataset"^^prez:identifier ; - prez:link "/s/catalogs/preztest:dataset" . - -dcat:Dataset rdfs:label "Dataset"@en ; - skos:definition "A collection of data, published or curated by a single source, and available for access or download in one or more represenations."@en ; - prez:count 4 . - diff --git a/tests/data/spaceprez/expected_responses/feature_anot.ttl b/tests/data/spaceprez/expected_responses/feature_anot.ttl deleted file mode 100755 index f3495b07..00000000 --- a/tests/data/spaceprez/expected_responses/feature_anot.ttl +++ /dev/null @@ -1,64 +0,0 @@ -@prefix dcterms: . -@prefix geo: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "exds:sandgate"^^prez:identifier ; - dcterms:title "Sandgate example dataset"@en . - - rdfs:label "Geofabric Contracted Catchments"@en ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "sndgt:catchments"^^prez:identifier ; - dcterms:title "Geofabric Contracted Catchments"@en ; - rdfs:member ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:title rdfs:label "Title"@en . - -geo:asGeoJSON skos:definition "The GeoJSON serialization of a Geometry"@en ; - skos:prefLabel "as GeoJSON"@en . - -geo:asWKT skos:definition "The WKT serialization of a Geometry"@en ; - skos:prefLabel "as WKT"@en . - -geo:hasGeometry skos:definition "A spatial representation for a given Feature."@en ; - skos:prefLabel "has geometry"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -rdfs:member rdfs:label "member" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - a geo:Feature, - ; - rdfs:label "Contracted Catchment 12109444" ; - dcterms:identifier "cc12109444"^^xsd:token, - "sndgt:cc12109444"^^prez:identifier ; - geo:hasGeometry [ a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral ] ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . - -geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; - skos:prefLabel "Feature"@en . - -geo:Geometry skos:definition "A coherent set of direct positions in space. The positions are held within a Spatial Reference System (SRS)."@en ; - skos:prefLabel "Geometry"@en . - diff --git a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl deleted file mode 100755 index 80ee4e30..00000000 --- a/tests/data/spaceprez/expected_responses/feature_collection_anot.ttl +++ /dev/null @@ -1,57 +0,0 @@ -@prefix dcterms: . -@prefix geo: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "exds:sandgate"^^prez:identifier ; - dcterms:title "Sandgate example dataset"@en ; - rdfs:member ; - prez:link "/s/catalogs/exds:sandgate" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:title rdfs:label "Title"@en . - -geo:asWKT skos:definition "The WKT serialization of a Geometry"@en ; - skos:prefLabel "as WKT"@en . - -geo:hasBoundingBox skos:definition "The minimum or smallest bounding or enclosing box of a given Feature."@en ; - skos:prefLabel "has bounding box"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -rdfs:member rdfs:label "member" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - a geo:FeatureCollection ; - rdfs:label "Geofabric Contracted Catchments"@en ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "catchments"^^xsd:token, - "sndgt:catchments"^^prez:identifier ; - dcterms:title "Geofabric Contracted Catchments"@en ; - geo:hasBoundingBox [ a geo:Geometry ; - geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral ] ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" ; - prez:members [ prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items" ] . - -geo:FeatureCollection skos:definition "A collection of individual Features."@en ; - skos:prefLabel "Feature Collection"@en . - -geo:Geometry skos:definition "A coherent set of direct positions in space. The positions are held within a Spatial Reference System (SRS)."@en ; - skos:prefLabel "Geometry"@en . - diff --git a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl deleted file mode 100755 index b610456b..00000000 --- a/tests/data/spaceprez/expected_responses/feature_collection_listing_anot.ttl +++ /dev/null @@ -1,69 +0,0 @@ -@prefix dcterms: . -@prefix geo: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "exds:sandgate"^^prez:identifier ; - dcterms:title "Sandgate example dataset"@en ; - rdfs:member , - , - , - ; - prez:link "/s/catalogs/exds:sandgate" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:title rdfs:label "Title"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -rdfs:member rdfs:label "member" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - a geo:FeatureCollection ; - rdfs:label "Geofabric Contracted Catchments"@en ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "sndgt:catchments"^^prez:identifier ; - dcterms:title "Geofabric Contracted Catchments"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . - - a geo:FeatureCollection ; - rdfs:label "Sandgate are demo Facilities"@en ; - dcterms:description "Sandgate area demo Facilities"@en ; - dcterms:identifier "sndgt:facilities"^^prez:identifier ; - dcterms:title "Sandgate are demo Facilities"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:facilities" . - - a geo:FeatureCollection ; - rdfs:label "Sandgate flooded areas"@en ; - dcterms:description "Sandgate flooded areas"@en ; - dcterms:identifier "sndgt:floods"^^prez:identifier ; - dcterms:title "Sandgate flooded areas"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:floods" . - - a geo:FeatureCollection ; - rdfs:label "Sandgate main roads"@en ; - dcterms:description "Sandgate main roads"@en ; - dcterms:identifier "sndgt:roads"^^prez:identifier ; - dcterms:title "Sandgate main roads"@en ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:roads" . - -geo:FeatureCollection skos:definition "A collection of individual Features."@en ; - skos:prefLabel "Feature Collection"@en ; - prez:count 4 . - diff --git a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl b/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl deleted file mode 100755 index 8607dab8..00000000 --- a/tests/data/spaceprez/expected_responses/feature_listing_anot.ttl +++ /dev/null @@ -1,53 +0,0 @@ -@prefix dcterms: . -@prefix geo: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "exds:sandgate"^^prez:identifier ; - dcterms:title "Sandgate example dataset"@en . - - rdfs:label "Geofabric Contracted Catchments"@en ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "sndgt:catchments"^^prez:identifier ; - dcterms:title "Geofabric Contracted Catchments"@en ; - rdfs:member , - ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:title rdfs:label "Title"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -rdfs:member rdfs:label "member" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - a geo:Feature ; - rdfs:label "Contracted Catchment 12109444" ; - dcterms:identifier "sndgt:cc12109444"^^prez:identifier ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109444" . - - a geo:Feature ; - rdfs:label "Contracted Catchment 12109445" ; - dcterms:identifier "sndgt:cc12109445"^^prez:identifier ; - prez:link "/s/catalogs/exds:sandgate/collections/sndgt:catchments/items/sndgt:cc12109445" . - -geo:Feature skos:definition "A discrete spatial phenomenon in a universe of discourse."@en ; - skos:prefLabel "Feature"@en ; - prez:count 2 . diff --git a/tests/data/spaceprez/input/geofabric_small.ttl b/tests/data/spaceprez/input/geofabric_small.ttl deleted file mode 100755 index faa52915..00000000 --- a/tests/data/spaceprez/input/geofabric_small.ttl +++ /dev/null @@ -1,110 +0,0 @@ -@prefix ahgf: . -@prefix dcat: . -@prefix dcterms: . -@prefix geo: . -@prefix owl: . -@prefix prov: . -@prefix rdf: . -@prefix rdfs: . -@prefix vcard: . -@prefix xsd: . - - - a dcat:Dataset ; - dcterms:identifier "geofabric"^^xsd:token ; - dcterms:title "Australian Hydrological Geospatial Fabric"@en ; - dcterms:description "The Australian national dataset of important hydrological features such as rivers, water bodies, aquifers and monitoring points"@en ; - rdfs:member - ; - dcterms:source "http://www.bom.gov.au/water/geofabric/download.shtml"^^xsd:anyURI ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((112 -44, 112 -10, 154 -10, 154 -44, 112 -44))"^^geo:wktLiteral ; - ] ; -. - - - a geo:FeatureCollection ; - dcterms:identifier "catchments"^^xsd:token ; - dcterms:title "Contracted Catchments"@en ; - dcterms:description "Contracted Catchments hydrological catchments designed to build stable reporting regions"@en ; - dcterms:isPartOf ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((112 -44, 112 -10, 154 -10, 154 -44, 112 -44))"^^geo:wktLiteral ; - ] ; - rdfs:member - , - , - , - , - ; -. - - a geo:Feature, - ahgf:ContractedCatchment ; - dcterms:identifier "102208961"^^xsd:token ; - dcterms:title "Fake Catchment Name" ; - dcterms:type ahgf:NonContractedArea ; - geo:hasGeometry [ geo:asWKT "MULTIPOLYGON (((122.2548611850001 -17.495138732999976, 122.2548611850001 -17.495416510999974, 122.25541674100009 -17.495416510999974, 122.25541674100009 -17.49569428799998, 122.25625007400004 -17.49569428799998, 122.25625007400004 -17.49597206599998, 122.25680563000003 -17.49597206599998, 122.25680563000003 -17.496249843999976, 122.25736118500004 -17.496249843999976, 122.25736118500004 -17.496527621999974, 122.25763896300009 -17.496527621999974, 122.25763896300009 -17.49680539999997, 122.25819451900009 -17.49680539999997, 122.25819451900009 -17.49708317699998, 122.25847229600004 -17.49708317699998, 122.25847229600004 -17.497360954999976, 122.25930563000009 -17.497360954999976, 122.25930563000009 -17.497638732999974, 122.26013896300003 -17.497638732999974, 122.26013896300003 -17.49791651099997, 122.26069451900003 -17.49791651099997, 122.26069451900003 -17.49819428799998, 122.26097229600009 -17.49819428799998, 122.26097229600009 -17.498472065999977, 122.26125007400003 -17.498472065999977, 122.26125007400003 -17.498749843999974, 122.26180563000003 -17.498749843999974, 122.26180563000003 -17.49902762199997, 122.26208340700009 -17.49902762199997, 122.26208340700009 -17.49930539999997, 122.26236118500003 -17.49930539999997, 122.26236118500003 -17.499583176999977, 122.26347229600003 -17.499583176999977, 122.26347229600003 -17.499860954999974, 122.26541674100008 -17.499860954999974, 122.26541674100008 -17.50013873299997, 122.26736118500003 -17.50013873299997, 122.26736118500003 -17.50041651099997, 122.26791674100002 -17.50041651099997, 122.26791674100002 -17.500694287999977, 122.26819451900008 -17.500694287999977, 122.26819451900008 -17.500972065999974, 122.26847229600003 -17.500972065999974, 122.26847229600003 -17.50124984399997, 122.26902785200002 -17.50124984399997, 122.26902785200002 -17.50152762199997, 122.26958340700003 -17.50152762199997, 122.26958340700003 -17.501805399999967, 122.26986118500008 -17.501805399999967, 122.26986118500008 -17.502083176999975, 122.27013896300002 -17.502083176999975, 122.27013896300002 -17.502360954999972, 122.27069451900002 -17.502360954999972, 122.27069451900002 -17.50263873299997, 122.27097229600008 -17.50263873299997, 122.27097229600008 -17.502916510999967, 122.27152785200008 -17.502916510999967, 122.27152785200008 -17.503194287999975, 122.27791674100001 -17.503194287999975, 122.27791674100001 -17.503472065999972, 122.27847229600002 -17.503472065999972, 122.27847229600002 -17.50374984399997, 122.28402785200001 -17.50374984399997, 122.28402785200001 -17.504027621999967, 122.28708340800006 -17.504027621999967, 122.28736118500001 -17.504027621999967, 122.28736118500001 -17.50374984399997, 122.28791674100012 -17.50374984399997, 122.28791674100012 -17.503472065999972, 122.28819451900006 -17.503472065999972, 122.28819451900006 -17.503194287999975, 122.28847229700011 -17.503194287999975, 122.28847229700011 -17.502916510999967, 122.28875007400006 -17.502916510999967, 122.28875007400006 -17.50263873299997, 122.28902785200012 -17.50263873299997, 122.28902785200012 -17.502360954999972, 122.28930563000006 -17.502360954999972, 122.28930563000006 -17.502083176999975, 122.28958340800011 -17.502083176999975, 122.28958340800011 -17.501805399999967, 122.28986118500006 -17.501805399999967, 122.28986118500006 -17.50152762199997, 122.29013896300012 -17.50152762199997, 122.29013896300012 -17.50124984399997, 122.29041674100006 -17.50124984399997, 122.29041674100006 -17.500972065999974, 122.29069451900011 -17.500972065999974, 122.29069451900011 -17.500694287999977, 122.29097229700005 -17.500694287999977, 122.29097229700005 -17.50041651099997, 122.29125007400012 -17.50041651099997, 122.29125007400012 -17.50013873299997, 122.29152785200006 -17.50013873299997, 122.29152785200006 -17.499860954999974, 122.29180563000011 -17.499860954999974, 122.29180563000011 -17.499583176999977, 122.29208340800005 -17.499583176999977, 122.29208340800005 -17.49930539999997, 122.29236118500012 -17.49930539999997, 122.29236118500012 -17.49902762199997, 122.29263896300006 -17.49902762199997, 122.29263896300006 -17.498749843999974, 122.29347229700011 -17.498749843999974, 122.29347229700011 -17.49902762199997, 122.29375007400006 -17.49902762199997, 122.29375007400006 -17.49930539999997, 122.29430563000005 -17.49930539999997, 122.29430563000005 -17.499583176999977, 122.29458340800011 -17.499583176999977, 122.29458340800011 -17.499860954999974, 122.29513896300011 -17.499860954999974, 122.29513896300011 -17.50013873299997, 122.29541674100005 -17.50013873299997, 122.29541674100005 -17.50041651099997, 122.29597229700005 -17.50041651099997, 122.29597229700005 -17.500694287999977, 122.29986118600004 -17.500694287999977, 122.29986118600004 -17.50041651099997, 122.30319451900004 -17.50041651099997, 122.30319451900004 -17.500694287999977, 122.30375007400005 -17.500694287999977, 122.30375007400005 -17.500972065999974, 122.30430563000004 -17.500972065999974, 122.30430563000004 -17.50124984399997, 122.3045834080001 -17.50124984399997, 122.3051389630001 -17.50124984399997, 122.3051389630001 -17.50152762199997, 122.3062500740001 -17.50152762199997, 122.3062500740001 -17.501805399999967, 122.3079167410001 -17.501805399999967, 122.3079167410001 -17.50152762199997, 122.3084722970001 -17.50152762199997, 122.3084722970001 -17.50124984399997, 122.30930563000004 -17.50124984399997, 122.30930563000004 -17.500972065999974, 122.31041674100004 -17.500972065999974, 122.31041674100004 -17.500694287999977, 122.31152785200004 -17.500694287999977, 122.31152785200004 -17.50041651099997, 122.31208340800003 -17.50041651099997, 122.31208340800003 -17.50013873299997, 122.3129167410001 -17.50013873299997, 122.3129167410001 -17.499860954999974, 122.31319451900004 -17.499860954999974, 122.31319451900004 -17.499583176999977, 122.3140278520001 -17.499583176999977, 122.3140278520001 -17.49930539999997, 122.32069451900009 -17.49930539999997, 122.32069451900009 -17.499583176999977, 122.32152785200003 -17.499583176999977, 122.32152785200003 -17.499860954999974, 122.32458340800008 -17.499860954999974, 122.32458340800008 -17.50013873299997, 122.32958340800008 -17.50013873299997, 122.32958340800008 -17.499860954999974, 122.33013896400007 -17.499860954999974, 122.33013896400007 -17.50041651099997, 122.33041674100002 -17.50041651099997, 122.33041674100002 -17.506527621999965, 122.33013896400007 -17.506527621999965, 122.33013896400007 -17.507360954999967, 122.32986118600002 -17.507360954999967, 122.32986118600002 -17.507916510999962, 122.32958340800008 -17.507916510999962, 122.32958340800008 -17.510972065999965, 122.32930563000002 -17.510972065999965, 122.32930563000002 -17.512083176999965, 122.32902785200008 -17.512083176999965, 122.32902785200008 -17.514027621999958, 122.32875007500002 -17.514027621999958, 122.32875007500002 -17.515416510999955, 122.32847229700008 -17.515416510999955, 122.32847229700008 -17.515694288999953, 122.32819451900002 -17.515694288999953, 122.32819451900002 -17.51597206599996, 122.32791674100008 -17.51597206599996, 122.32791674100008 -17.516249843999958, 122.32763896400002 -17.516249843999958, 122.32763896400002 -17.516527621999955, 122.32736118600008 -17.516527621999955, 122.32736118600008 -17.516805399999953, 122.32708340800002 -17.516805399999953, 122.32708340800002 -17.51708317699996, 122.32680563000008 -17.51708317699996, 122.32680563000008 -17.51736095499996, 122.32652785200003 -17.51736095499996, 122.32652785200003 -17.517638732999956, 122.32625007500008 -17.517638732999956, 122.32625007500008 -17.517916510999953, 122.32597229700002 -17.517916510999953, 122.32597229700002 -17.51819428899995, 122.32569451900008 -17.51819428899995, 122.32569451900008 -17.51847206599996, 122.32541674100003 -17.51847206599996, 122.32541674100003 -17.518749843999956, 122.32513896300009 -17.518749843999956, 122.32513896300009 -17.519027621999953, 122.32458340800008 -17.519027621999953, 122.32458340800008 -17.51930539999995, 122.32430563000003 -17.51930539999995, 122.32430563000003 -17.51958317699996, 122.32375007500002 -17.51958317699996, 122.32375007500002 -17.519860954999956, 122.32319451900003 -17.519860954999956, 122.32319451900003 -17.520138732999953, 122.32291674100009 -17.520138732999953, 122.32291674100009 -17.52041651099995, 122.32263896300003 -17.52041651099995, 122.32263896300003 -17.520694288999948, 122.32236118600008 -17.520694288999948, 122.32236118600008 -17.520972065999956, 122.32208340800003 -17.520972065999956, 122.32208340800003 -17.52180539999995, 122.32180563000009 -17.52180539999995, 122.32180563000009 -17.524583177999943, 122.32152785200003 -17.524583177999943, 122.32152785200003 -17.525416510999946, 122.32125007500008 -17.525416510999946, 122.32125007500008 -17.52847206599995, 122.32097229700003 -17.52847206599995, 122.32097229700003 -17.528749843999947, 122.32069451900009 -17.528749843999947, 122.32069451900009 -17.52930539999994, 122.32041674100003 -17.52930539999994, 122.32041674100003 -17.529860954999947, 122.32013896300009 -17.529860954999947, 122.32013896300009 -17.530138732999944, 122.31986118600003 -17.530138732999944, 122.31986118600003 -17.53041651099994, 122.31958340800009 -17.53041651099994, 122.31958340800009 -17.53069428899994, 122.31930563000003 -17.53069428899994, 122.31930563000003 -17.531249843999944, 122.31902785200009 -17.531249843999944, 122.31902785200009 -17.531527621999942, 122.31875007500003 -17.531527621999942, 122.31875007500003 -17.532083177999937, 122.31847229700008 -17.532083177999937, 122.31847229700008 -17.53291651099994, 122.31819451900003 -17.53291651099994, 122.31819451900003 -17.533472065999945, 122.31791674100009 -17.533472065999945, 122.31791674100009 -17.534305399999937, 122.31763896300004 -17.534305399999937, 122.31763896300004 -17.534860954999942, 122.31736118600008 -17.534860954999942, 122.31736118600008 -17.535416510999937, 122.31708340800003 -17.535416510999937, 122.31708340800003 -17.535972066999932, 122.31680563000009 -17.535972066999932, 122.31680563000009 -17.536527621999937, 122.31652785200004 -17.536527621999937, 122.31652785200004 -17.536805399999935, 122.31625007500008 -17.536805399999935, 122.31625007500008 -17.537083177999932, 122.31597229700003 -17.537083177999932, 122.31597229700003 -17.53736095499994, 122.31569451900009 -17.53736095499994, 122.31569451900009 -17.537638732999937, 122.31541674100004 -17.537638732999937, 122.31541674100004 -17.537916510999935, 122.3151389630001 -17.537916510999935, 122.3151389630001 -17.538194288999932, 122.31486118600003 -17.538194288999932, 122.31486118600003 -17.53847206699993, 122.31458340800009 -17.53847206699993, 122.31458340800009 -17.538749843999938, 122.31430563000004 -17.538749843999938, 122.31430563000004 -17.539027621999935, 122.3140278520001 -17.539027621999935, 122.3140278520001 -17.539305399999932, 122.31375007500003 -17.539305399999932, 122.31375007500003 -17.53958317799993, 122.31347229700009 -17.53958317799993, 122.31347229700009 -17.539860954999938, 122.31319451900004 -17.539860954999938, 122.31319451900004 -17.540138732999935, 122.3129167410001 -17.540138732999935, 122.3129167410001 -17.540416510999933, 122.31263896300004 -17.540416510999933, 122.31263896300004 -17.54069428899993, 122.31208340800003 -17.54069428899993, 122.31208340800003 -17.540972066999927, 122.31097229700003 -17.540972066999927, 122.31097229700003 -17.541249843999935, 122.3095834080001 -17.541249843999935, 122.3095834080001 -17.541527621999933, 122.30819451900004 -17.541527621999933, 122.30819451900004 -17.54180539999993, 122.3073611860001 -17.54180539999993, 122.3073611860001 -17.542083177999928, 122.3068056300001 -17.542083177999928, 122.3068056300001 -17.542360954999936, 122.3062500740001 -17.542360954999936, 122.3062500740001 -17.542638732999933, 122.3056945190001 -17.542638732999933, 122.3056945190001 -17.54291651099993, 122.30541674100004 -17.54291651099993, 122.30541674100004 -17.543194288999928, 122.3051389630001 -17.543194288999928, 122.3051389630001 -17.543472066999982, 122.30486118600004 -17.543472066999982, 122.30486118600004 -17.543749843999933, 122.3045834080001 -17.543749843999933, 122.3045834080001 -17.54402762199993, 122.30430563000004 -17.54402762199993, 122.30430563000004 -17.544305399999928, 122.3040278520001 -17.544305399999928, 122.3040278520001 -17.544583177999982, 122.3034722970001 -17.544583177999982, 122.3034722970001 -17.544860954999933, 122.30263896300005 -17.544860954999933, 122.30263896300005 -17.54513873299993, 122.30125007400011 -17.54513873299993, 122.30125007400011 -17.545416510999928, 122.30041674100005 -17.545416510999928, 122.30041674100005 -17.545694288999982, 122.29986118600004 -17.545694288999982, 122.29986118600004 -17.54597206699998, 122.29875007400005 -17.54597206699998, 122.29875007400005 -17.54624984399993, 122.29736118500011 -17.54624984399993, 122.29736118500011 -17.546527621999928, 122.29652785200005 -17.546527621999928, 122.29652785200005 -17.546805399999982, 122.29597229700005 -17.546805399999982, 122.29597229700005 -17.54708317799998, 122.29541674100005 -17.54708317799998, 122.29541674100005 -17.54736095499993, 122.29513896300011 -17.54736095499993, 122.29513896300011 -17.54763873299993, 122.29458340800011 -17.54763873299993, 122.29458340800011 -17.547916510999983, 122.29430563000005 -17.547916510999983, 122.29430563000005 -17.54819428899998, 122.29402785200011 -17.54819428899998, 122.29402785200011 -17.548472066999977, 122.29375007400006 -17.548472066999977, 122.29375007400006 -17.54874984399993, 122.29347229700011 -17.54874984399993, 122.29347229700011 -17.549027621999983, 122.29319451900005 -17.549027621999983, 122.29319451900005 -17.54930539999998, 122.29291674100011 -17.54930539999998, 122.29291674100011 -17.549583177999978, 122.29263896300006 -17.549583177999978, 122.29263896300006 -17.549860955999975, 122.29236118500012 -17.549860955999975, 122.29236118500012 -17.550138732999926, 122.29208340800005 -17.550138732999926, 122.29208340800005 -17.55041651099998, 122.29180563000011 -17.55041651099998, 122.29180563000011 -17.550694288999978, 122.29152785200006 -17.550694288999978, 122.29152785200006 -17.550972066999975, 122.29125007400012 -17.550972066999975, 122.29125007400012 -17.551249843999926, 122.29097229700005 -17.551249843999926, 122.29097229700005 -17.55152762199998, 122.28791674100012 -17.55152762199998, 122.28791674100012 -17.551805399999978, 122.28680563000012 -17.551805399999978, 122.28680563000012 -17.552083177999975, 122.28625007400001 -17.552083177999975, 122.28625007400001 -17.552360955999973, 122.28541674100006 -17.552360955999973, 122.28541674100006 -17.55263873299998, 122.28430563000006 -17.55263873299998, 122.28430563000006 -17.552916510999978, 122.28291674100001 -17.552916510999978, 122.28291674100001 -17.553194288999975, 122.28236118500001 -17.553194288999975, 122.28236118500001 -17.553472066999973, 122.28208340800006 -17.553472066999973, 122.28208340800006 -17.55374984399998, 122.28152785200007 -17.55374984399998, 122.28152785200007 -17.55402762199998, 122.28125007400001 -17.55402762199998, 122.28125007400001 -17.554305399999976, 122.28041674100007 -17.554305399999976, 122.28041674100007 -17.554583177999973, 122.28013896300001 -17.554583177999973, 122.28013896300001 -17.55486095599997, 122.27986118500007 -17.55486095599997, 122.27986118500007 -17.55513873299998, 122.27958340800001 -17.55513873299998, 122.27958340800001 -17.555416510999976, 122.27930563000007 -17.555416510999976, 122.27930563000007 -17.555694288999973, 122.27902785200001 -17.555694288999973, 122.27902785200001 -17.55597206699997, 122.27875007400007 -17.55597206699997, 122.27875007400007 -17.55624984399998, 122.27847229600002 -17.55624984399998, 122.27847229600002 -17.556527621999976, 122.27819451900007 -17.556527621999976, 122.27819451900007 -17.556805399999973, 122.27791674100001 -17.556805399999973, 122.27791674100001 -17.55708317799997, 122.27375007400008 -17.55708317799997, 122.27375007400008 -17.557360955999968, 122.27319451900007 -17.557360955999968, 122.27319451900007 -17.557638732999976, 122.27291674100002 -17.557638732999976, 122.27291674100002 -17.557916510999974, 122.27263896300008 -17.557916510999974, 122.27263896300008 -17.55819428899997, 122.26930563000008 -17.55819428899997, 122.26930563000008 -17.55847206699997, 122.26819451900008 -17.55847206699997, 122.26819451900008 -17.558749843999976, 122.26541674100008 -17.558749843999976, 122.26541674100008 -17.559027621999974, 122.26430563000008 -17.559027621999974, 122.26430563000008 -17.55930539999997, 122.25902785200003 -17.55930539999997, 122.25902785200003 -17.559027621999974, 122.25625007400004 -17.559027621999974, 122.25625007400004 -17.55930539999997, 122.25430563000009 -17.55930539999997, 122.25430563000009 -17.55958317799997, 122.2531945180001 -17.55958317799997, 122.2531945180001 -17.559860955999966, 122.2526389630001 -17.559860955999966, 122.2526389630001 -17.560138732999974, 122.25180563000004 -17.560138732999974, 122.25180563000004 -17.56041651099997, 122.25069451800005 -17.56041651099997, 122.25069451800005 -17.56069428899997, 122.2493056300001 -17.56069428899997, 122.2493056300001 -17.560972066999966, 122.2487500740001 -17.560972066999966, 122.2487500740001 -17.561249843999974, 122.24347229600005 -17.561249843999974, 122.24347229600005 -17.560972066999966, 122.24236118500005 -17.560972066999966, 122.24236118500005 -17.56069428899997, 122.23930562900011 -17.56069428899997, 122.23930562900011 -17.560972066999966, 122.23902785200005 -17.560972066999966, 122.23902785200005 -17.561249843999974, 122.23875007400011 -17.561249843999974, 122.23875007400011 -17.56152762199997, 122.23847229600005 -17.56152762199997, 122.23847229600005 -17.562083177999966, 122.23819451800011 -17.562083177999966, 122.23819451800011 -17.562360955999964, 122.23791674100005 -17.562360955999964, 122.23791674100005 -17.56263873299997, 122.23763896300011 -17.56263873299997, 122.23763896300011 -17.56291651099997, 122.23736118500005 -17.56291651099997, 122.23736118500005 -17.563194288999966, 122.23708340700011 -17.563194288999966, 122.23708340700011 -17.563472066999964, 122.23680562900006 -17.563472066999964, 122.23680562900006 -17.56374984499996, 122.23652785200011 -17.56374984499996, 122.23652785200011 -17.56402762199997, 122.23625007400005 -17.56402762199997, 122.23625007400005 -17.564305399999967, 122.23597229600011 -17.564305399999967, 122.23597229600011 -17.564583177999964, 122.23569451800006 -17.564583177999964, 122.23569451800006 -17.56486095599996, 122.23513896300005 -17.56486095599996, 122.23513896300005 -17.56513873299997, 122.23236118500006 -17.56513873299997, 122.23236118500006 -17.56486095599996, 122.23208340700012 -17.56486095599996, 122.23208340700012 -17.564583177999964, 122.23180562900006 -17.564583177999964, 122.23180562900006 -17.564305399999967, 122.23152785200011 -17.564305399999967, 122.23152785200011 -17.56402762199997, 122.23125007400006 -17.56402762199997, 122.23125007400006 -17.56374984499996, 122.23097229600012 -17.56374984499996, 122.23097229600012 -17.563472066999964, 122.22430562900001 -17.563472066999964, 122.22430562900001 -17.563194288999966, 122.22402785200006 -17.563194288999966, 122.22402785200006 -17.56291651099997, 122.22375007400001 -17.56291651099997, 122.22375007400001 -17.56263873299997, 122.22347229600007 -17.56263873299997, 122.22347229600007 -17.562360955999964, 122.22319451800001 -17.562360955999964, 122.22319451800001 -17.562083177999966, 122.22291674000007 -17.562083177999966, 122.22291674000007 -17.56180539999997, 122.22263896300001 -17.56180539999997, 122.22263896300001 -17.56152762199997, 122.22236118500007 -17.56152762199997, 122.22236118500007 -17.561249843999974, 122.22208340700001 -17.561249843999974, 122.22208340700001 -17.560972066999966, 122.22180562900007 -17.560972066999966, 122.22180562900007 -17.56069428899997, 122.22152785200001 -17.56069428899997, 122.22152785200001 -17.56041651099997, 122.22097229600001 -17.56041651099997, 122.22097229600001 -17.560138732999974, 122.22041674000002 -17.560138732999974, 122.22041674000002 -17.559860955999966, 122.21958340700007 -17.559860955999966, 122.21958340700007 -17.55958317799997, 122.21930562900002 -17.55958317799997, 122.21930562900002 -17.55930539999997, 122.21875007400001 -17.55930539999997, 122.21875007400001 -17.559027621999974, 122.21847229600007 -17.559027621999974, 122.21847229600007 -17.558749843999976, 122.21819451800002 -17.558749843999976, 122.21819451800002 -17.55847206699997, 122.21791674000008 -17.55847206699997, 122.21791674000008 -17.55819428899997, 122.21736118500007 -17.55819428899997, 122.21736118500007 -17.557916510999974, 122.21680562900008 -17.557916510999974, 122.21680562900008 -17.557638732999976, 122.21625007400007 -17.557638732999976, 122.21625007400007 -17.557360955999968, 122.21569451800008 -17.557360955999968, 122.21569451800008 -17.55708317799997, 122.21541674000002 -17.55708317799997, 122.21541674000002 -17.556805399999973, 122.21486118500002 -17.556805399999973, 122.21486118500002 -17.556527621999976, 122.21402785100008 -17.556527621999976, 122.21402785100008 -17.55624984399998, 122.21319451800002 -17.55624984399998, 122.21319451800002 -17.55597206699997, 122.21236118500008 -17.55597206699997, 122.21236118500008 -17.555694288999973, 122.21208340700002 -17.555694288999973, 122.21208340700002 -17.555416510999976, 122.21180562900008 -17.555416510999976, 122.21180562900008 -17.55513873299998, 122.21152785100003 -17.55513873299998, 122.21152785100003 -17.55486095599997, 122.21125007400008 -17.55486095599997, 122.21125007400008 -17.554583177999973, 122.21097229600002 -17.554583177999973, 122.21097229600002 -17.554305399999976, 122.21069451800008 -17.554305399999976, 122.21069451800008 -17.55402762199998, 122.21041674000003 -17.55402762199998, 122.21041674000003 -17.55374984399998, 122.21013896300008 -17.55374984399998, 122.21013896300008 -17.553472066999973, 122.20958340700008 -17.553472066999973, 122.20958340700008 -17.553194288999975, 122.20930562900003 -17.553194288999975, 122.20930562900003 -17.552916510999978, 122.20902785100009 -17.552916510999978, 122.20902785100009 -17.55263873299998, 122.20875007400002 -17.55263873299998, 122.20875007400002 -17.552360955999973, 122.20847229600008 -17.552360955999973, 122.20847229600008 -17.552083177999975, 122.20819451800003 -17.552083177999975, 122.20819451800003 -17.551805399999978, 122.20791674000009 -17.551805399999978, 122.20791674000009 -17.55152762199998, 122.20763896300002 -17.55152762199998, 122.20763896300002 -17.551249843999926, 122.20736118500008 -17.551249843999926, 122.20736118500008 -17.550972066999975, 122.20708340700003 -17.550972066999975, 122.20708340700003 -17.550694288999978, 122.20680562900009 -17.550694288999978, 122.20680562900009 -17.55041651099998, 122.20652785100003 -17.55041651099998, 122.20652785100003 -17.550138732999926, 122.20625007400008 -17.550138732999926, 122.20625007400008 -17.549860955999975, 122.20597229600003 -17.549860955999975, 122.20597229600003 -17.549583177999978, 122.20541674000003 -17.549583177999978, 122.20541674000003 -17.54930539999998, 122.20513896300008 -17.54930539999998, 122.20513896300008 -17.549027621999983, 122.20458340700009 -17.549027621999983, 122.20458340700009 -17.54874984399993, 122.20375007400003 -17.54874984399993, 122.20375007400003 -17.548472066999977, 122.20236118500009 -17.548472066999977, 122.20236118500009 -17.54819428899998, 122.20208340700003 -17.54819428899998, 122.20208340700003 -17.547916510999983, 122.20180562900009 -17.547916510999983, 122.20180562900009 -17.54763873299993, 122.20152785100004 -17.54763873299993, 122.20152785100004 -17.54736095499993, 122.20125007400009 -17.54736095499993, 122.20125007400009 -17.54708317799998, 122.20097229600003 -17.54708317799998, 122.20097229600003 -17.546805399999982, 122.20069451800009 -17.546805399999982, 122.20069451800009 -17.546527621999928, 122.20041674000004 -17.546527621999928, 122.20041674000004 -17.54624984399993, 122.19986118500003 -17.54624984399993, 122.19986118500003 -17.54597206699998, 122.18986118500004 -17.54597206699998, 122.18986118500004 -17.545694288999982, 122.1890278510001 -17.545694288999982, 122.1890278510001 -17.545416510999928, 122.1884722960001 -17.545416510999928, 122.1884722960001 -17.54513873299993, 122.1879167400001 -17.54513873299993, 122.1879167400001 -17.544860954999933, 122.18708340700005 -17.544860954999933, 122.18708340700005 -17.544583177999982, 122.18652785100005 -17.544583177999982, 122.18652785100005 -17.544305399999928, 122.18625007300011 -17.544305399999928, 122.18625007300011 -17.54402762199993, 122.18597229600005 -17.54402762199993, 122.18597229600005 -17.543749843999933, 122.1856945180001 -17.543749843999933, 122.1856945180001 -17.543472066999982, 122.18541674000005 -17.543472066999982, 122.18541674000005 -17.54291651099993, 122.18513896200011 -17.54291651099993, 122.18513896200011 -17.542638732999933, 122.18486118500005 -17.542638732999933, 122.18486118500005 -17.542360954999936, 122.1845834070001 -17.542360954999936, 122.1845834070001 -17.54180539999993, 122.18430562900005 -17.54180539999993, 122.18430562900005 -17.541527621999933, 122.18402785100011 -17.541527621999933, 122.18402785100011 -17.541249843999935, 122.1834722960001 -17.541249843999935, 122.1834722960001 -17.540972066999927, 122.18291674000011 -17.540972066999927, 122.18291674000011 -17.54069428899993, 122.1823611850001 -17.54069428899993, 122.1823611850001 -17.540416510999933, 122.18180562900011 -17.540416510999933, 122.18180562900011 -17.540138732999935, 122.18125007300011 -17.540138732999935, 122.18125007300011 -17.539860954999938, 122.18097229600005 -17.539860954999938, 122.18097229600005 -17.53958317799993, 122.18041674000006 -17.53958317799993, 122.18041674000006 -17.539305399999932, 122.18013896200011 -17.539305399999932, 122.18013896200011 -17.539027621999935, 122.17986118500005 -17.539027621999935, 122.17986118500005 -17.538749843999938, 122.17708340700005 -17.538749843999938, 122.17708340700005 -17.539027621999935, 122.17652785100006 -17.539027621999935, 122.17652785100006 -17.539305399999932, 122.17597229600005 -17.539305399999932, 122.17597229600005 -17.53958317799993, 122.175138962 -17.53958317799993, 122.175138962 -17.539860954999938, 122.16680562900001 -17.539860954999938, 122.16680562900001 -17.53958317799993, 122.16652785100007 -17.53958317799993, 122.16597229600006 -17.53958317799993, 122.16597229600006 -17.539305399999932, 122.16569451800001 -17.539305399999932, 122.16569451800001 -17.539027621999935, 122.16513896200001 -17.539027621999935, 122.16513896200001 -17.538749843999938, 122.16486118400007 -17.538749843999938, 122.16486118400007 -17.53847206699993, 122.15736118400002 -17.53847206699993, 122.15736118400002 -17.538749843999938, 122.15541674000008 -17.538749843999938, 122.15541674000008 -17.539027621999935, 122.15097229500009 -17.539027621999935, 122.15097229500009 -17.539305399999932, 122.15097229500009 -17.542360954999936, 122.15097229500009 -17.54402762199993, 122.15125007300003 -17.54402762199993, 122.15125007300003 -17.545416510999928, 122.15152785100008 -17.545416510999928, 122.15152785100008 -17.547916510999983, 122.15125007300003 -17.547916510999983, 122.15125007300003 -17.54874984399993, 122.15097229500009 -17.54874984399993, 122.15097229500009 -17.549583177999978, 122.15069451800002 -17.549583177999978, 122.15069451800002 -17.550694288999978, 122.15041674000008 -17.550694288999978, 122.15041674000008 -17.551249843999926, 122.15013896200003 -17.551249843999926, 122.15013896200003 -17.550972066999975, 122.14736118400003 -17.550972066999975, 122.14736118400003 -17.54291651099993, 122.14708340700008 -17.54291651099993, 122.14708340700008 -17.539305399999932, 122.14736118400003 -17.539305399999932, 122.14736118400003 -17.536805399999935, 122.14763896200009 -17.536805399999935, 122.14763896200009 -17.536527621999937, 122.14736118400003 -17.536527621999937, 122.14736118400003 -17.53513873299994, 122.14708340700008 -17.53513873299994, 122.14708340700008 -17.53402762199994, 122.14680562900003 -17.53402762199994, 122.14680562900003 -17.533194288999937, 122.14652785100009 -17.533194288999937, 122.14652785100009 -17.53291651099994, 122.14652785100009 -17.532638732999942, 122.14652785100009 -17.532083177999937, 122.14625007300003 -17.532083177999937, 122.14625007300003 -17.53180539999994, 122.14625007300003 -17.531527621999942, 122.14652785100009 -17.531527621999942, 122.14652785100009 -17.531249843999944, 122.14652785100009 -17.528749843999947, 122.14625007300003 -17.528749843999947, 122.14625007300003 -17.526805399999944, 122.14597229500009 -17.526805399999944, 122.14597229500009 -17.52513873299995, 122.14569451800003 -17.52513873299995, 122.14569451800003 -17.523472065999954, 122.14597229500009 -17.523472065999954, 122.14597229500009 -17.522360954999954, 122.14569451800003 -17.522360954999954, 122.14569451800003 -17.51958317699996, 122.14541674000009 -17.51958317699996, 122.14541674000009 -17.518749843999956, 122.14513896200003 -17.518749843999956, 122.14513896200003 -17.517916510999953, 122.14486118400009 -17.517916510999953, 122.14486118400009 -17.516805399999953, 122.14763896200009 -17.516805399999953, 122.14763896200009 -17.51708317699996, 122.14791674000003 -17.51708317699996, 122.14791674000003 -17.51736095499996, 122.14819451800008 -17.51736095499996, 122.14819451800008 -17.517916510999953, 122.14847229500003 -17.517916510999953, 122.14847229500003 -17.518749843999956, 122.14847229500003 -17.51930539999995, 122.14847229500003 -17.51958317699996, 122.14847229500003 -17.519860954999956, 122.14875007300009 -17.519860954999956, 122.14875007300009 -17.52041651099995, 122.14902785100003 -17.52041651099995, 122.14902785100003 -17.521249843999954, 122.14930562900008 -17.521249843999954, 122.14930562900008 -17.52291651099995, 122.14930562900008 -17.524583177999943, 122.14930562900008 -17.52486095499995, 122.14958340700002 -17.52486095499995, 122.14958340700002 -17.52597206599995, 122.14958340700002 -17.52624984399995, 122.14986118400009 -17.52624984399995, 122.14986118400009 -17.526805399999944, 122.14986118400009 -17.52708317799994, 122.15013896200003 -17.52708317799994, 122.15013896200003 -17.526805399999944, 122.15375007300008 -17.526805399999944, 122.15375007300008 -17.526527621999946, 122.15486118400008 -17.526527621999946, 122.15486118400008 -17.52624984399995, 122.15652785100008 -17.52624984399995, 122.15652785100008 -17.52597206599995, 122.15986118400008 -17.52597206599995, 122.15986118400008 -17.525694288999944, 122.16041674000007 -17.525694288999944, 122.16041674000007 -17.525416510999946, 122.16097229600007 -17.525416510999946, 122.16097229600007 -17.52513873299995, 122.16208340700007 -17.52513873299995, 122.16208340700007 -17.52486095499995, 122.16347229600001 -17.52486095499995, 122.16347229600001 -17.524583177999943, 122.17069451800012 -17.524583177999943, 122.17069451800012 -17.524305399999946, 122.17097229600006 -17.524305399999946, 122.17097229600006 -17.52402762199995, 122.17125007300001 -17.52402762199995, 122.17125007300001 -17.52374984399995, 122.17152785100006 -17.52374984399995, 122.17152785100006 -17.523472065999954, 122.17180562900012 -17.523472065999954, 122.17180562900012 -17.523194288999946, 122.17208340700006 -17.523194288999946, 122.17208340700006 -17.52291651099995, 122.17236118400001 -17.52291651099995, 122.17236118400001 -17.52263873299995, 122.17291674000012 -17.52263873299995, 122.17291674000012 -17.522360954999954, 122.17458340700011 -17.522360954999954, 122.17458340700011 -17.522083177999946, 122.175138962 -17.522083177999946, 122.175138962 -17.52180539999995, 122.17541674000006 -17.52180539999995, 122.17541674000006 -17.52152762199995, 122.17569451800011 -17.52152762199995, 122.17569451800011 -17.521249843999954, 122.17597229600005 -17.521249843999954, 122.17597229600005 -17.520972065999956, 122.176250073 -17.520972065999956, 122.176250073 -17.520694288999948, 122.17652785100006 -17.520694288999948, 122.17652785100006 -17.52041651099995, 122.17680562900011 -17.52041651099995, 122.17680562900011 -17.520138732999953, 122.17708340700005 -17.520138732999953, 122.17708340700005 -17.519860954999956, 122.17736118500011 -17.519860954999956, 122.17736118500011 -17.51958317699996, 122.17763896200006 -17.51958317699996, 122.17763896200006 -17.51930539999995, 122.17791674000011 -17.51930539999995, 122.17791674000011 -17.519027621999953, 122.17819451800005 -17.519027621999953, 122.17819451800005 -17.518749843999956, 122.17847229600011 -17.518749843999956, 122.17847229600011 -17.51847206599996, 122.17875007300006 -17.51847206599996, 122.17875007300006 -17.51819428899995, 122.17902785100011 -17.51819428899995, 122.17902785100011 -17.517916510999953, 122.17930562900005 -17.517916510999953, 122.17930562900005 -17.517638732999956, 122.17958340700011 -17.517638732999956, 122.17958340700011 -17.51736095499996, 122.17986118500005 -17.51736095499996, 122.17986118500005 -17.51708317699996, 122.18013896200011 -17.51708317699996, 122.18013896200011 -17.516805399999953, 122.18125007300011 -17.516805399999953, 122.18125007300011 -17.516527621999955, 122.18208340700005 -17.516527621999955, 122.18208340700005 -17.516249843999958, 122.18319451800005 -17.516249843999958, 122.18319451800005 -17.51597206599996, 122.18402785100011 -17.51597206599996, 122.18402785100011 -17.515694288999953, 122.1845834070001 -17.515694288999953, 122.1845834070001 -17.515416510999955, 122.18513896200011 -17.515416510999955, 122.18513896200011 -17.515138732999958, 122.18986118500004 -17.515138732999958, 122.18986118500004 -17.515416510999955, 122.19041674000005 -17.515416510999955, 122.19041674000005 -17.515694288999953, 122.1912500740001 -17.515694288999953, 122.1912500740001 -17.51597206599996, 122.19208340700004 -17.51597206599996, 122.19208340700004 -17.516249843999958, 122.19541674000004 -17.516249843999958, 122.19541674000004 -17.516527621999955, 122.19625007400009 -17.516527621999955, 122.19625007400009 -17.516805399999953, 122.1968056290001 -17.516805399999953, 122.1968056290001 -17.51708317699996, 122.19763896200004 -17.51708317699996, 122.19763896200004 -17.51736095499996, 122.20902785100009 -17.51736095499996, 122.20902785100009 -17.51708317699996, 122.20930562900003 -17.51708317699996, 122.20930562900003 -17.516805399999953, 122.20958340700008 -17.516805399999953, 122.20958340700008 -17.516527621999955, 122.21013896300008 -17.516527621999955, 122.21013896300008 -17.516249843999958, 122.21069451800008 -17.516249843999958, 122.21069451800008 -17.51597206599996, 122.21125007400008 -17.51597206599996, 122.21125007400008 -17.515694288999953, 122.21152785100003 -17.515694288999953, 122.21152785100003 -17.515416510999955, 122.21180562900008 -17.515416510999955, 122.21180562900008 -17.515138732999958, 122.21208340700002 -17.515138732999958, 122.21208340700002 -17.51486095499996, 122.21236118500008 -17.51486095499996, 122.21236118500008 -17.514583176999963, 122.21263896300002 -17.514583176999963, 122.21263896300002 -17.514305399999955, 122.21291674000008 -17.514305399999955, 122.21291674000008 -17.514027621999958, 122.21319451800002 -17.514027621999958, 122.21319451800002 -17.51374984399996, 122.21347229600008 -17.51374984399996, 122.21347229600008 -17.513472065999963, 122.21375007400002 -17.513472065999963, 122.21375007400002 -17.513194288999955, 122.21402785100008 -17.513194288999955, 122.21402785100008 -17.512916510999958, 122.21430562900002 -17.512916510999958, 122.21430562900002 -17.51263873299996, 122.21458340700008 -17.51263873299996, 122.21458340700008 -17.512360954999963, 122.21486118500002 -17.512360954999963, 122.21486118500002 -17.512083176999965, 122.21513896300007 -17.512083176999965, 122.21513896300007 -17.511805399999957, 122.21541674000002 -17.511805399999957, 122.21541674000002 -17.51152762199996, 122.21569451800008 -17.51152762199996, 122.21569451800008 -17.511249843999963, 122.21597229600002 -17.511249843999963, 122.21597229600002 -17.510972065999965, 122.21625007400007 -17.510972065999965, 122.21625007400007 -17.510694288999957, 122.21652785200001 -17.510694288999957, 122.21652785200001 -17.51041651099996, 122.21680562900008 -17.51041651099996, 122.21680562900008 -17.510138732999962, 122.21791674000008 -17.510138732999962, 122.21791674000008 -17.509860954999965, 122.21902785200007 -17.509860954999965, 122.21902785200007 -17.509583176999968, 122.21958340700007 -17.509583176999968, 122.21958340700007 -17.50930539999996, 122.21986118500001 -17.50930539999996, 122.21986118500001 -17.509027621999962, 122.22013896300007 -17.509027621999962, 122.22013896300007 -17.508749843999965, 122.22041674000002 -17.508749843999965, 122.22041674000002 -17.508472065999968, 122.22069451800007 -17.508472065999968, 122.22069451800007 -17.50819428899996, 122.22097229600001 -17.50819428899996, 122.22097229600001 -17.507916510999962, 122.22125007400007 -17.507916510999962, 122.22125007400007 -17.507638732999965, 122.22152785200001 -17.507638732999965, 122.22152785200001 -17.507360954999967, 122.22180562900007 -17.507360954999967, 122.22180562900007 -17.50708317699997, 122.22208340700001 -17.50708317699997, 122.22208340700001 -17.506805399999962, 122.22708340700001 -17.506805399999962, 122.22708340700001 -17.506527621999965, 122.22763896300012 -17.506527621999965, 122.22763896300012 -17.506249843999967, 122.22791674000007 -17.506249843999967, 122.22791674000007 -17.50597206599997, 122.22847229600006 -17.50597206599997, 122.22847229600006 -17.505694287999972, 122.23680562900006 -17.505694287999972, 122.23680562900006 -17.505416510999964, 122.23736118500005 -17.505416510999964, 122.23736118500005 -17.505138732999967, 122.23763896300011 -17.505138732999967, 122.23763896300011 -17.50486095499997, 122.23791674100005 -17.50486095499997, 122.23791674100005 -17.504583176999972, 122.23819451800011 -17.504583176999972, 122.23819451800011 -17.504305399999964, 122.23847229600005 -17.504305399999964, 122.23847229600005 -17.504027621999967, 122.23902785200005 -17.504027621999967, 122.23902785200005 -17.50374984399997, 122.23930562900011 -17.50374984399997, 122.23930562900011 -17.503472065999972, 122.23958340700005 -17.503472065999972, 122.23958340700005 -17.503194287999975, 122.23986118500011 -17.503194287999975, 122.23986118500011 -17.502916510999967, 122.24013896300005 -17.502916510999967, 122.24013896300005 -17.50263873299997, 122.2404167410001 -17.50263873299997, 122.2404167410001 -17.502360954999972, 122.24069451800005 -17.502360954999972, 122.24069451800005 -17.502083176999975, 122.24097229600011 -17.502083176999975, 122.24097229600011 -17.501805399999967, 122.24125007400005 -17.501805399999967, 122.24125007400005 -17.50152762199997, 122.2415278520001 -17.50152762199997, 122.2415278520001 -17.50124984399997, 122.24180562900005 -17.50124984399997, 122.24180562900005 -17.500972065999974, 122.24208340700011 -17.500972065999974, 122.24208340700011 -17.500694287999977, 122.24236118500005 -17.500694287999977, 122.24236118500005 -17.50041651099997, 122.2426389630001 -17.50041651099997, 122.2426389630001 -17.50013873299997, 122.24291674100004 -17.50013873299997, 122.24291674100004 -17.499860954999974, 122.24319451800011 -17.499860954999974, 122.24319451800011 -17.499583176999977, 122.24347229600005 -17.499583176999977, 122.24347229600005 -17.49930539999997, 122.2437500740001 -17.49930539999997, 122.2437500740001 -17.49902762199997, 122.24402785200004 -17.49902762199997, 122.24402785200004 -17.498749843999974, 122.2443056300001 -17.498749843999974, 122.2443056300001 -17.498472065999977, 122.24458340700005 -17.498472065999977, 122.24458340700005 -17.49819428799998, 122.2448611850001 -17.49819428799998, 122.2448611850001 -17.49791651099997, 122.24513896300004 -17.49791651099997, 122.24513896300004 -17.497638732999974, 122.2454167410001 -17.497638732999974, 122.2454167410001 -17.497360954999976, 122.24569451800005 -17.497360954999976, 122.24569451800005 -17.49708317699998, 122.2459722960001 -17.49708317699998, 122.2459722960001 -17.49680539999997, 122.24625007400005 -17.49680539999997, 122.24625007400005 -17.496527621999974, 122.2465278520001 -17.496527621999974, 122.2465278520001 -17.496249843999976, 122.24680563000004 -17.496249843999976, 122.24680563000004 -17.49597206599998, 122.2470834070001 -17.49597206599998, 122.2470834070001 -17.49569428799998, 122.24736118500005 -17.49569428799998, 122.24736118500005 -17.495416510999974, 122.2476389630001 -17.495416510999974, 122.2476389630001 -17.495138732999976, 122.2548611850001 -17.495138732999976)))"^^geo:wktLiteral ] ; - geo:hasMetricArea 8.701201e+07 . - - a geo:Feature, - ahgf:ContractedCatchment ; - dcterms:identifier "102208962"^^xsd:token ; - dcterms:title "Contracted Catchment 102208962" ; - dcterms:type ahgf:NonContractedArea ; - geo:hasGeometry [ geo:asWKT "MULTIPOLYGON (((122.23180562900006 -17.564583177999964, 122.23208340700012 -17.564583177999964, 122.23208340700012 -17.56486095599996, 122.23180562900006 -17.56486095599996, 122.23180562900006 -17.564583177999964)), ((122.23180562900006 -17.564583177999964, 122.23152785200011 -17.564583177999964, 122.23152785200011 -17.564305399999967, 122.23180562900006 -17.564305399999967, 122.23180562900006 -17.564583177999964)), ((122.23152785200011 -17.564305399999967, 122.23125007400006 -17.564305399999967, 122.23125007400006 -17.56402762199997, 122.23152785200011 -17.56402762199997, 122.23152785200011 -17.564305399999967)), ((122.23125007400006 -17.56402762199997, 122.22902785200006 -17.56402762199997, 122.22902785200006 -17.564305399999967, 122.22875007400012 -17.564305399999967, 122.22875007400012 -17.564583177999964, 122.22847229600006 -17.564583177999964, 122.22847229600006 -17.56486095599996, 122.22819451800001 -17.56486095599996, 122.22819451800001 -17.56513873299997, 122.22791674000007 -17.56513873299997, 122.22791674000007 -17.565416510999967, 122.22763896300012 -17.565416510999967, 122.22763896300012 -17.565694288999964, 122.22736118500006 -17.565694288999964, 122.22736118500006 -17.56597206699996, 122.22708340700001 -17.56597206699996, 122.22708340700001 -17.56624984499996, 122.22680562900007 -17.56624984499996, 122.22680562900007 -17.566527621999967, 122.22291674000007 -17.566527621999967, 122.22291674000007 -17.566805399999964, 122.22263896300001 -17.566805399999964, 122.22263896300001 -17.56708317799996, 122.22236118500007 -17.56708317799996, 122.22236118500007 -17.56736095599996, 122.22208340700001 -17.56736095599996, 122.22208340700001 -17.567638732999967, 122.22180562900007 -17.567638732999967, 122.22180562900007 -17.567916510999964, 122.22152785200001 -17.567916510999964, 122.22152785200001 -17.568194288999962, 122.22125007400007 -17.568194288999962, 122.22125007400007 -17.56847206699996, 122.22097229600001 -17.56847206699996, 122.22097229600001 -17.568749844999957, 122.22069451800007 -17.568749844999957, 122.22069451800007 -17.569027621999965, 122.22041674000002 -17.569027621999965, 122.22041674000002 -17.569305399999962, 122.22013896300007 -17.569305399999962, 122.22013896300007 -17.56958317799996, 122.21986118500001 -17.56958317799996, 122.21986118500001 -17.569860955999957, 122.21958340700007 -17.569860955999957, 122.21958340700007 -17.570138732999965, 122.21930562900002 -17.570138732999965, 122.21930562900002 -17.570416510999962, 122.21902785200007 -17.570416510999962, 122.21902785200007 -17.57069428899996, 122.21875007400001 -17.57069428899996, 122.21875007400001 -17.570972066999957, 122.21208340700002 -17.570972066999957, 122.21208340700002 -17.571249844999954, 122.21180562900008 -17.571249844999954, 122.21180562900008 -17.571527621999962, 122.21152785100003 -17.571527621999962, 122.21152785100003 -17.57180539999996, 122.21125007400008 -17.57180539999996, 122.21125007400008 -17.572083177999957, 122.21097229600002 -17.572083177999957, 122.21097229600002 -17.572360955999955, 122.21069451800008 -17.572360955999955, 122.21069451800008 -17.572638732999962, 122.21041674000003 -17.572638732999962, 122.21041674000003 -17.57291651099996, 122.21013896300008 -17.57291651099996, 122.21013896300008 -17.573194288999957, 122.20986118500002 -17.573194288999957, 122.20986118500002 -17.573472066999955, 122.20958340700008 -17.573472066999955, 122.20958340700008 -17.573749844999952, 122.20930562900003 -17.573749844999952, 122.20930562900003 -17.57402762199996, 122.20902785100009 -17.57402762199996, 122.20902785100009 -17.574305399999957, 122.20875007400002 -17.574305399999957, 122.20875007400002 -17.574583177999955, 122.20847229600008 -17.574583177999955, 122.20847229600008 -17.574860955999952, 122.20819451800003 -17.574860955999952, 122.20819451800003 -17.57513873299996, 122.20791674000009 -17.57513873299996, 122.20791674000009 -17.575416510999958, 122.20763896300002 -17.575416510999958, 122.20763896300002 -17.575694288999955, 122.20736118500008 -17.575694288999955, 122.20736118500008 -17.575972066999952, 122.20708340700003 -17.575972066999952, 122.20708340700003 -17.57624984499995, 122.20680562900009 -17.57624984499995, 122.20680562900009 -17.576527621999958, 122.20652785100003 -17.576527621999958, 122.20652785100003 -17.576805399999955, 122.20625007400008 -17.576805399999955, 122.20625007400008 -17.577083177999953, 122.20597229600003 -17.577083177999953, 122.20597229600003 -17.57736095599995, 122.20569451800009 -17.57736095599995, 122.20569451800009 -17.577638733999947, 122.20541674000003 -17.577638733999947, 122.20541674000003 -17.577916510999955, 122.20513896300008 -17.577916510999955, 122.20513896300008 -17.578194288999953, 122.20486118500003 -17.578194288999953, 122.20486118500003 -17.57847206699995, 122.20430562900003 -17.57847206699995, 122.20430562900003 -17.578749844999948, 122.20402785100009 -17.578749844999948, 122.20402785100009 -17.579027621999955, 122.20375007400003 -17.579027621999955, 122.20375007400003 -17.579305399999953, 122.20347229600009 -17.579305399999953, 122.20347229600009 -17.57958317799995, 122.2001389620001 -17.57958317799995, 122.2001389620001 -17.579860955999948, 122.19986118500003 -17.579860955999948, 122.19986118500003 -17.580138733999945, 122.19958340700009 -17.580138733999945, 122.19958340700009 -17.580416510999953, 122.19930562900004 -17.580416510999953, 122.19930562900004 -17.58069428899995, 122.1990278510001 -17.58069428899995, 122.1990278510001 -17.580972066999948, 122.19875007400003 -17.580972066999948, 122.19875007400003 -17.581249844999945, 122.19847229600009 -17.581249844999945, 122.19847229600009 -17.581527621999953, 122.19819451800004 -17.581527621999953, 122.19819451800004 -17.58180539999995, 122.1979167400001 -17.58180539999995, 122.1979167400001 -17.582083177999948, 122.19763896200004 -17.582083177999948, 122.19763896200004 -17.582360955999945, 122.19736118500009 -17.582360955999945, 122.19736118500009 -17.582638733999943, 122.19708340700004 -17.582638733999943, 122.19708340700004 -17.58291651099995, 122.19652785100004 -17.58291651099995, 122.19652785100004 -17.583194288999948, 122.19597229600004 -17.583194288999948, 122.19597229600004 -17.583472066999946, 122.19152785100005 -17.583472066999946, 122.19152785100005 -17.583749844999943, 122.1912500740001 -17.583749844999943, 122.1912500740001 -17.58402762199995, 122.19097229600004 -17.58402762199995, 122.19097229600004 -17.58430539999995, 122.1906945180001 -17.58430539999995, 122.1906945180001 -17.584583177999946, 122.19041674000005 -17.584583177999946, 122.19041674000005 -17.584860955999943, 122.18986118500004 -17.584860955999943, 122.18986118500004 -17.58513873399994, 122.1868056290001 -17.58513873399994, 122.1868056290001 -17.584860955999943, 122.18597229600005 -17.584860955999943, 122.18597229600005 -17.584583177999946, 122.18541674000005 -17.584583177999946, 122.18541674000005 -17.58430539999995, 122.18513896200011 -17.58430539999995, 122.18513896200011 -17.584583177999946, 122.18541674000005 -17.584583177999946, 122.18541674000005 -17.584860955999943, 122.1856945180001 -17.584860955999943, 122.1856945180001 -17.58986095599994, 122.18541674000005 -17.58986095599994, 122.18541674000005 -17.59097206699994, 122.18513896200011 -17.59097206699994, 122.18513896200011 -17.591249844999936, 122.18513896200011 -17.591527622999934, 122.18541674000005 -17.591527622999934, 122.1856945180001 -17.591527622999934, 122.1856945180001 -17.59180539999994, 122.18597229600005 -17.59180539999994, 122.18597229600005 -17.59208317799994, 122.18625007300011 -17.59208317799994, 122.18625007300011 -17.592360955999936, 122.18652785100005 -17.592360955999936, 122.18652785100005 -17.592638733999934, 122.1868056290001 -17.592638733999934, 122.1868056290001 -17.59291651099994, 122.18708340700005 -17.59291651099994, 122.18708340700005 -17.59430539999994, 122.18708340700005 -17.594583177999937, 122.18708340700005 -17.594860955999934, 122.1868056290001 -17.594860955999934, 122.1868056290001 -17.59513873399993, 122.1868056290001 -17.59541651099994, 122.1868056290001 -17.595694288999937, 122.1868056290001 -17.595972066999934, 122.1868056290001 -17.59624984499993, 122.18652785100005 -17.59624984499993, 122.18652785100005 -17.59652762299993, 122.18652785100005 -17.596805399999937, 122.18652785100005 -17.597083177999934, 122.18625007300011 -17.597083177999934, 122.18625007300011 -17.59736095599993, 122.18597229600005 -17.59736095599993, 122.18597229600005 -17.59763873399993, 122.18597229600005 -17.597916510999937, 122.1856945180001 -17.597916510999937, 122.1856945180001 -17.598194288999935, 122.18541674000005 -17.598194288999935, 122.18541674000005 -17.598472066999932, 122.18541674000005 -17.59874984499993, 122.18541674000005 -17.599027622999927, 122.18513896200011 -17.599027622999927, 122.18513896200011 -17.599305399999935, 122.18513896200011 -17.599583177999932, 122.18513896200011 -17.59986095599993, 122.18486118500005 -17.59986095599993, 122.18486118500005 -17.600138733999927, 122.18486118500005 -17.600416510999935, 122.18486118500005 -17.600694288999932, 122.18486118500005 -17.60097206699993, 122.1845834070001 -17.60097206699993, 122.1845834070001 -17.601249844999927, 122.18430562900005 -17.601249844999927, 122.18430562900005 -17.60152762299998, 122.18402785100011 -17.60152762299998, 122.18402785100011 -17.601805399999932, 122.18375007300006 -17.601805399999932, 122.18375007300006 -17.60208317799993, 122.1834722960001 -17.60208317799993, 122.1834722960001 -17.602360955999927, 122.18319451800005 -17.602360955999927, 122.18319451800005 -17.60263873399998, 122.18291674000011 -17.60263873399998, 122.18291674000011 -17.602916510999933, 122.18263896200006 -17.602916510999933, 122.18263896200006 -17.60319428899993, 122.1823611850001 -17.60319428899993, 122.1823611850001 -17.603472066999927, 122.18208340700005 -17.603472066999927, 122.18208340700005 -17.60374984499998, 122.18180562900011 -17.60374984499998, 122.18180562900011 -17.60402762299998, 122.18152785100006 -17.60402762299998, 122.18152785100006 -17.60430539999993, 122.18125007300011 -17.60430539999993, 122.18125007300011 -17.604583177999928, 122.18097229600005 -17.604583177999928, 122.18097229600005 -17.604860955999982, 122.18069451800011 -17.604860955999982, 122.18069451800011 -17.60513873399998, 122.18041674000006 -17.60513873399998, 122.18041674000006 -17.605416511999977, 122.18013896200011 -17.605416511999977, 122.18013896200011 -17.605694288999928, 122.17986118500005 -17.605694288999928, 122.17986118500005 -17.605972066999982, 122.17958340700011 -17.605972066999982, 122.17958340700011 -17.60624984499998, 122.17930562900005 -17.60624984499998, 122.17930562900005 -17.606527622999977, 122.17902785100011 -17.606527622999977, 122.17902785100011 -17.606805399999928, 122.17875007300006 -17.606805399999928, 122.17875007300006 -17.607083177999982, 122.17847229600011 -17.607083177999982, 122.17847229600011 -17.60736095599998, 122.17819451800005 -17.60736095599998, 122.17819451800005 -17.607638733999977, 122.17791674000011 -17.607638733999977, 122.17791674000011 -17.607916511999974, 122.17763896200006 -17.607916511999974, 122.17763896200006 -17.608194288999982, 122.17736118500011 -17.608194288999982, 122.17736118500011 -17.60847206699998, 122.17708340700005 -17.60847206699998, 122.17708340700005 -17.608749844999977, 122.17680562900011 -17.608749844999977, 122.17680562900011 -17.609027622999974, 122.17652785100006 -17.609027622999974, 122.17652785100006 -17.608749844999977, 122.17652785100006 -17.60847206699998, 122.176250073 -17.60847206699998, 122.176250073 -17.608194288999982, 122.176250073 -17.607916511999974, 122.176250073 -17.607638733999977, 122.17597229600005 -17.607638733999977, 122.17597229600005 -17.60736095599998, 122.17597229600005 -17.607083177999982, 122.17597229600005 -17.606805399999928, 122.17569451800011 -17.606805399999928, 122.17569451800011 -17.606527622999977, 122.17569451800011 -17.60624984499998, 122.17569451800011 -17.605972066999982, 122.17569451800011 -17.605694288999928, 122.175138962 -17.605694288999928, 122.175138962 -17.605416511999977, 122.17486118500005 -17.605416511999977, 122.17486118500005 -17.604860955999982, 122.17458340700011 -17.604860955999982, 122.17458340700011 -17.604583177999928, 122.17458340700011 -17.60430539999993, 122.17430562900006 -17.60430539999993, 122.17430562900006 -17.60402762299998, 122.17430562900006 -17.60374984499998, 122.17402785100012 -17.60374984499998, 122.17402785100012 -17.603472066999927, 122.17402785100012 -17.60319428899993, 122.17402785100012 -17.60208317799993, 122.17375007300006 -17.60208317799993, 122.17375007300006 -17.601805399999932, 122.17375007300006 -17.60152762299998, 122.17375007300006 -17.601249844999927, 122.17347229600011 -17.601249844999927, 122.17347229600011 -17.60097206699993, 122.17347229600011 -17.600694288999932, 122.17319451800006 -17.600694288999932, 122.17319451800006 -17.600416510999935, 122.17291674000012 -17.600416510999935, 122.17291674000012 -17.600138733999927, 122.17263896200006 -17.600138733999927, 122.17263896200006 -17.59986095599993, 122.17263896200006 -17.599583177999932, 122.17236118400001 -17.599583177999932, 122.17236118400001 -17.599305399999935, 122.17208340700006 -17.599305399999935, 122.17208340700006 -17.599027622999927, 122.17180562900012 -17.599027622999927, 122.17180562900012 -17.59874984499993, 122.17152785100006 -17.59874984499993, 122.17152785100006 -17.598472066999932, 122.17152785100006 -17.598194288999935, 122.17152785100006 -17.597916510999937, 122.17152785100006 -17.59763873399993, 122.17125007300001 -17.59763873399993, 122.17125007300001 -17.59736095599993, 122.17125007300001 -17.597083177999934, 122.17097229600006 -17.597083177999934, 122.17097229600006 -17.59652762299993, 122.17069451800012 -17.59652762299993, 122.17069451800012 -17.59624984499993, 122.17041674000006 -17.59624984499993, 122.17041674000006 -17.595972066999934, 122.17013896200001 -17.595972066999934, 122.17013896200001 -17.595694288999937, 122.16958340700012 -17.595694288999937, 122.16958340700012 -17.59541651099994, 122.16930562900006 -17.59541651099994, 122.16930562900006 -17.59513873399993, 122.16902785100001 -17.59513873399993, 122.16902785100001 -17.594860955999934, 122.16902785100001 -17.594583177999937, 122.16902785100001 -17.59430539999994, 122.16875007300007 -17.59430539999994, 122.16875007300007 -17.59402762299993, 122.16847229600012 -17.59402762299993, 122.16847229600012 -17.593749844999934, 122.16819451800006 -17.593749844999934, 122.16819451800006 -17.593472066999936, 122.16819451800006 -17.59319428899994, 122.16791674000001 -17.59319428899994, 122.16791674000001 -17.59291651099994, 122.16791674000001 -17.592638733999934, 122.16791674000001 -17.592360955999936, 122.16791674000001 -17.59208317799994, 122.16819451800006 -17.59208317799994, 122.16819451800006 -17.59180539999994, 122.16847229600012 -17.59180539999994, 122.16847229600012 -17.591527622999934, 122.16819451800006 -17.591527622999934, 122.16819451800006 -17.591249844999936, 122.16791674000001 -17.591249844999936, 122.16791674000001 -17.59097206699994, 122.16763896200007 -17.59097206699994, 122.16763896200007 -17.59069428899994, 122.16763896200007 -17.590416510999944, 122.16736118400001 -17.590416510999944, 122.16736118400001 -17.590138733999936, 122.16708340700006 -17.590138733999936, 122.16680562900001 -17.590138733999936, 122.16680562900001 -17.58986095599994, 122.16652785100007 -17.58986095599994, 122.16652785100007 -17.58958317799994, 122.16652785100007 -17.589305399999944, 122.16652785100007 -17.589027621999946, 122.16652785100007 -17.58874984499994, 122.16625007300001 -17.58874984499994, 122.16625007300001 -17.589027621999946, 122.16597229600006 -17.589027621999946, 122.16569451800001 -17.589027621999946, 122.16541674000007 -17.589027621999946, 122.16513896200001 -17.589027621999946, 122.16513896200001 -17.58874984499994, 122.16486118400007 -17.58874984499994, 122.16486118400007 -17.58847206699994, 122.16458340700001 -17.58847206699994, 122.16430562900007 -17.58847206699994, 122.16430562900007 -17.588194288999944, 122.16430562900007 -17.587916510999946, 122.16402785100001 -17.587916510999946, 122.16402785100001 -17.58763873399994, 122.16430562900007 -17.58763873399994, 122.16430562900007 -17.58736095599994, 122.16430562900007 -17.587083177999943, 122.16430562900007 -17.586805399999946, 122.16402785100001 -17.586805399999946, 122.16402785100001 -17.58652762199995, 122.16375007300007 -17.58652762199995, 122.16375007300007 -17.58624984499994, 122.16347229600001 -17.58624984499994, 122.16347229600001 -17.585972066999943, 122.16347229600001 -17.585694288999946, 122.16319451800007 -17.585694288999946, 122.16319451800007 -17.58541651099995, 122.16291674000001 -17.58541651099995, 122.16291674000001 -17.58513873399994, 122.16263896200007 -17.58513873399994, 122.16263896200007 -17.584860955999943, 122.16263896200007 -17.584583177999946, 122.16263896200007 -17.58430539999995, 122.16236118400002 -17.58430539999995, 122.16208340700007 -17.58430539999995, 122.16208340700007 -17.58402762199995, 122.16180562900001 -17.58402762199995, 122.16152785100007 -17.58402762199995, 122.16152785100007 -17.583749844999943, 122.16125007300002 -17.583749844999943, 122.16097229600007 -17.583749844999943, 122.16069451800001 -17.583749844999943, 122.16069451800001 -17.583472066999946, 122.16041674000007 -17.583472066999946, 122.16013896200002 -17.583472066999946, 122.16013896200002 -17.583194288999948, 122.15986118400008 -17.583194288999948, 122.15986118400008 -17.58291651099995, 122.15902785100002 -17.58291651099995, 122.15902785100002 -17.582638733999943, 122.15875007300008 -17.582638733999943, 122.15875007300008 -17.582360955999945, 122.15847229500002 -17.582360955999945, 122.15847229500002 -17.582638733999943, 122.15791674000002 -17.582638733999943, 122.15763896200008 -17.582638733999943, 122.15763896200008 -17.58291651099995, 122.15736118400002 -17.58291651099995, 122.15708340700007 -17.58291651099995, 122.15708340700007 -17.582638733999943, 122.15680562900002 -17.582638733999943, 122.15680562900002 -17.582360955999945, 122.15652785100008 -17.582360955999945, 122.15652785100008 -17.582083177999948, 122.15652785100008 -17.58180539999995, 122.15625007300002 -17.58180539999995, 122.15625007300002 -17.581249844999945, 122.15597229500008 -17.581249844999945, 122.15597229500008 -17.580972066999948, 122.15541674000008 -17.580972066999948, 122.15541674000008 -17.58069428899995, 122.15513896200002 -17.58069428899995, 122.15458340700002 -17.58069428899995, 122.15458340700002 -17.580416510999953, 122.15430562900008 -17.580416510999953, 122.15347229500003 -17.580416510999953, 122.15347229500003 -17.580138733999945, 122.15291674000002 -17.580138733999945, 122.15291674000002 -17.579860955999948, 122.15263896200008 -17.579860955999948, 122.15263896200008 -17.57958317799995, 122.15236118400003 -17.57958317799995, 122.15236118400003 -17.579305399999953, 122.15208340700008 -17.579305399999953, 122.15208340700008 -17.578749844999948, 122.15180562900002 -17.578749844999948, 122.15180562900002 -17.57847206699995, 122.15152785100008 -17.57847206699995, 122.15152785100008 -17.577916510999955, 122.15125007300003 -17.577916510999955, 122.15125007300003 -17.57513873299996, 122.15125007300003 -17.574860955999952, 122.15125007300003 -17.574583177999955, 122.15125007300003 -17.574305399999957, 122.15125007300003 -17.57180539999996, 122.15097229500009 -17.57180539999996, 122.15097229500009 -17.57069428899996, 122.15069451800002 -17.57069428899996, 122.15069451800002 -17.56958317799996, 122.15041674000008 -17.56958317799996, 122.15041674000008 -17.569305399999962, 122.15041674000008 -17.568749844999957, 122.15013896200003 -17.568749844999957, 122.15013896200003 -17.567916510999964, 122.14986118400009 -17.567916510999964, 122.14986118400009 -17.56736095599996, 122.14958340700002 -17.56736095599996, 122.14958340700002 -17.566805399999964, 122.14930562900008 -17.566805399999964, 122.14930562900008 -17.566527621999967, 122.14902785100003 -17.566527621999967, 122.14902785100003 -17.56597206699996, 122.14875007300009 -17.56597206699996, 122.14875007300009 -17.565694288999964, 122.14847229500003 -17.565694288999964, 122.14847229500003 -17.56513873299997, 122.14819451800008 -17.56513873299997, 122.14819451800008 -17.56486095599996, 122.14763896200009 -17.56486095599996, 122.14763896200009 -17.564583177999964, 122.14736118400003 -17.564583177999964, 122.14736118400003 -17.564305399999967, 122.14708340700008 -17.564305399999967, 122.14708340700008 -17.56402762199997, 122.14680562900003 -17.56402762199997, 122.14680562900003 -17.56374984499996, 122.14652785100009 -17.56374984499996, 122.14652785100009 -17.563472066999964, 122.14625007300003 -17.563472066999964, 122.14625007300003 -17.563194288999966, 122.14625007300003 -17.56291651099997, 122.14625007300003 -17.56263873299997, 122.14625007300003 -17.562360955999964, 122.14597229500009 -17.562360955999964, 122.14597229500009 -17.56180539999997, 122.14569451800003 -17.56180539999997, 122.14569451800003 -17.559860955999966, 122.14597229500009 -17.559860955999966, 122.14597229500009 -17.55958317799997, 122.14597229500009 -17.559027621999974, 122.14597229500009 -17.558749843999976, 122.14625007300003 -17.558749843999976, 122.14625007300003 -17.55847206699997, 122.14625007300003 -17.55819428899997, 122.14652785100009 -17.55819428899997, 122.14652785100009 -17.557916510999974, 122.14680562900003 -17.557916510999974, 122.14680562900003 -17.556805399999973, 122.14708340700008 -17.556805399999973, 122.14708340700008 -17.550972066999975, 122.15013896200003 -17.550972066999975, 122.15013896200003 -17.551249843999926, 122.15041674000008 -17.551249843999926, 122.15041674000008 -17.550694288999978, 122.15069451800002 -17.550694288999978, 122.15069451800002 -17.549583177999978, 122.15097229500009 -17.549583177999978, 122.15097229500009 -17.54874984399993, 122.15125007300003 -17.54874984399993, 122.15125007300003 -17.547916510999983, 122.15152785100008 -17.547916510999983, 122.15152785100008 -17.545416510999928, 122.15125007300003 -17.545416510999928, 122.15125007300003 -17.54402762199993, 122.15097229500009 -17.54402762199993, 122.15097229500009 -17.539027621999935, 122.15541674000008 -17.539027621999935, 122.15541674000008 -17.538749843999938, 122.15736118400002 -17.538749843999938, 122.15736118400002 -17.53847206699993, 122.16486118400007 -17.53847206699993, 122.16486118400007 -17.538749843999938, 122.16513896200001 -17.538749843999938, 122.16513896200001 -17.539027621999935, 122.16569451800001 -17.539027621999935, 122.16569451800001 -17.539305399999932, 122.16597229600006 -17.539305399999932, 122.16597229600006 -17.53958317799993, 122.16680562900001 -17.53958317799993, 122.16680562900001 -17.539860954999938, 122.175138962 -17.539860954999938, 122.175138962 -17.53958317799993, 122.17597229600005 -17.53958317799993, 122.17597229600005 -17.539305399999932, 122.17652785100006 -17.539305399999932, 122.17652785100006 -17.539027621999935, 122.17708340700005 -17.539027621999935, 122.17708340700005 -17.538749843999938, 122.17986118500005 -17.538749843999938, 122.17986118500005 -17.539027621999935, 122.18013896200011 -17.539027621999935, 122.18013896200011 -17.539305399999932, 122.18041674000006 -17.539305399999932, 122.18041674000006 -17.53958317799993, 122.18097229600005 -17.53958317799993, 122.18097229600005 -17.539860954999938, 122.18125007300011 -17.539860954999938, 122.18125007300011 -17.540138732999935, 122.18180562900011 -17.540138732999935, 122.18180562900011 -17.540416510999933, 122.1823611850001 -17.540416510999933, 122.1823611850001 -17.54069428899993, 122.18291674000011 -17.54069428899993, 122.18291674000011 -17.540972066999927, 122.1834722960001 -17.540972066999927, 122.1834722960001 -17.541249843999935, 122.18402785100011 -17.541249843999935, 122.18402785100011 -17.541527621999933, 122.18430562900005 -17.541527621999933, 122.18430562900005 -17.54180539999993, 122.1845834070001 -17.54180539999993, 122.1845834070001 -17.542360954999936, 122.18486118500005 -17.542360954999936, 122.18486118500005 -17.542638732999933, 122.18513896200011 -17.542638732999933, 122.18513896200011 -17.54291651099993, 122.18541674000005 -17.54291651099993, 122.18541674000005 -17.543472066999982, 122.1856945180001 -17.543472066999982, 122.1856945180001 -17.543749843999933, 122.18597229600005 -17.543749843999933, 122.18597229600005 -17.54402762199993, 122.18625007300011 -17.54402762199993, 122.18625007300011 -17.544305399999928, 122.18652785100005 -17.544305399999928, 122.18652785100005 -17.544583177999982, 122.18708340700005 -17.544583177999982, 122.18708340700005 -17.544860954999933, 122.1879167400001 -17.544860954999933, 122.1879167400001 -17.54513873299993, 122.1884722960001 -17.54513873299993, 122.1884722960001 -17.545416510999928, 122.1890278510001 -17.545416510999928, 122.1890278510001 -17.545694288999982, 122.18986118500004 -17.545694288999982, 122.18986118500004 -17.54597206699998, 122.19986118500003 -17.54597206699998, 122.19986118500003 -17.54624984399993, 122.20041674000004 -17.54624984399993, 122.20041674000004 -17.546527621999928, 122.20069451800009 -17.546527621999928, 122.20069451800009 -17.546805399999982, 122.20097229600003 -17.546805399999982, 122.20097229600003 -17.54708317799998, 122.20125007400009 -17.54708317799998, 122.20125007400009 -17.54736095499993, 122.20152785100004 -17.54736095499993, 122.20152785100004 -17.54763873299993, 122.20180562900009 -17.54763873299993, 122.20180562900009 -17.547916510999983, 122.20208340700003 -17.547916510999983, 122.20208340700003 -17.54819428899998, 122.20236118500009 -17.54819428899998, 122.20236118500009 -17.548472066999977, 122.20375007400003 -17.548472066999977, 122.20375007400003 -17.54874984399993, 122.20458340700009 -17.54874984399993, 122.20458340700009 -17.549027621999983, 122.20513896300008 -17.549027621999983, 122.20513896300008 -17.54930539999998, 122.20541674000003 -17.54930539999998, 122.20541674000003 -17.549583177999978, 122.20597229600003 -17.549583177999978, 122.20597229600003 -17.549860955999975, 122.20625007400008 -17.549860955999975, 122.20625007400008 -17.550138732999926, 122.20652785100003 -17.550138732999926, 122.20652785100003 -17.55041651099998, 122.20680562900009 -17.55041651099998, 122.20680562900009 -17.550694288999978, 122.20708340700003 -17.550694288999978, 122.20708340700003 -17.550972066999975, 122.20736118500008 -17.550972066999975, 122.20736118500008 -17.551249843999926, 122.20763896300002 -17.551249843999926, 122.20763896300002 -17.55152762199998, 122.20791674000009 -17.55152762199998, 122.20791674000009 -17.551805399999978, 122.20819451800003 -17.551805399999978, 122.20819451800003 -17.552083177999975, 122.20847229600008 -17.552083177999975, 122.20847229600008 -17.552360955999973, 122.20875007400002 -17.552360955999973, 122.20875007400002 -17.55263873299998, 122.20902785100009 -17.55263873299998, 122.20902785100009 -17.552916510999978, 122.20930562900003 -17.552916510999978, 122.20930562900003 -17.553194288999975, 122.20958340700008 -17.553194288999975, 122.20958340700008 -17.553472066999973, 122.21013896300008 -17.553472066999973, 122.21013896300008 -17.55374984399998, 122.21041674000003 -17.55374984399998, 122.21041674000003 -17.55402762199998, 122.21069451800008 -17.55402762199998, 122.21069451800008 -17.554305399999976, 122.21097229600002 -17.554305399999976, 122.21097229600002 -17.554583177999973, 122.21125007400008 -17.554583177999973, 122.21125007400008 -17.55486095599997, 122.21152785100003 -17.55486095599997, 122.21152785100003 -17.55513873299998, 122.21180562900008 -17.55513873299998, 122.21180562900008 -17.555416510999976, 122.21208340700002 -17.555416510999976, 122.21208340700002 -17.555694288999973, 122.21236118500008 -17.555694288999973, 122.21236118500008 -17.55597206699997, 122.21319451800002 -17.55597206699997, 122.21319451800002 -17.55624984399998, 122.21402785100008 -17.55624984399998, 122.21402785100008 -17.556527621999976, 122.21486118500002 -17.556527621999976, 122.21486118500002 -17.556805399999973, 122.21541674000002 -17.556805399999973, 122.21541674000002 -17.55708317799997, 122.21569451800008 -17.55708317799997, 122.21569451800008 -17.557360955999968, 122.21625007400007 -17.557360955999968, 122.21625007400007 -17.557638732999976, 122.21680562900008 -17.557638732999976, 122.21680562900008 -17.557916510999974, 122.21736118500007 -17.557916510999974, 122.21736118500007 -17.55819428899997, 122.21791674000008 -17.55819428899997, 122.21791674000008 -17.55847206699997, 122.21819451800002 -17.55847206699997, 122.21819451800002 -17.558749843999976, 122.21847229600007 -17.558749843999976, 122.21847229600007 -17.559027621999974, 122.21875007400001 -17.559027621999974, 122.21875007400001 -17.55930539999997, 122.21930562900002 -17.55930539999997, 122.21930562900002 -17.55958317799997, 122.21958340700007 -17.55958317799997, 122.21958340700007 -17.559860955999966, 122.22041674000002 -17.559860955999966, 122.22041674000002 -17.560138732999974, 122.22097229600001 -17.560138732999974, 122.22097229600001 -17.56041651099997, 122.22152785200001 -17.56041651099997, 122.22152785200001 -17.56069428899997, 122.22180562900007 -17.56069428899997, 122.22180562900007 -17.560972066999966, 122.22208340700001 -17.560972066999966, 122.22208340700001 -17.561249843999974, 122.22236118500007 -17.561249843999974, 122.22236118500007 -17.56152762199997, 122.22263896300001 -17.56152762199997, 122.22263896300001 -17.56180539999997, 122.22291674000007 -17.56180539999997, 122.22291674000007 -17.562083177999966, 122.22319451800001 -17.562083177999966, 122.22319451800001 -17.562360955999964, 122.22347229600007 -17.562360955999964, 122.22347229600007 -17.56263873299997, 122.22375007400001 -17.56263873299997, 122.22375007400001 -17.56291651099997, 122.22402785200006 -17.56291651099997, 122.22402785200006 -17.563194288999966, 122.22430562900001 -17.563194288999966, 122.22430562900001 -17.563472066999964, 122.23097229600012 -17.563472066999964, 122.23097229600012 -17.56374984499996, 122.23125007400006 -17.56374984499996, 122.23125007400006 -17.56402762199997)))"^^geo:wktLiteral ] ; - geo:hasMetricArea 3.455107e+07 . - - a geo:Feature, - ahgf:ContractedCatchment ; - dcterms:identifier "cabbage-tree"^^xsd:token ; - dcterms:title "Cabbage Tree Creek" ; - dcterms:type ahgf:NonContractedArea ; - geo:hasGeometry [ - geo:asWKT "POLYGON ((153.083029 -27.3297989, 153.0638029 -27.3246138, 153.0263808 -27.3270538, 152.9929068 -27.3546536, 152.9685309 -27.3744725, 152.9582312 -27.4061752, 152.9966833 -27.414709, 153.0366804 -27.4075467, 153.0677511 -27.4016032, 153.0775358 -27.384533, 153.0838873 -27.3676125, 153.0933287 -27.3546536, 153.0984785 -27.3459627, 153.0921271 -27.339406, 153.0857756 -27.3333063, 153.083029 -27.3297989))"^^geo:wktLiteral ; - ] ; -. - - a geo:Feature, - ahgf:ContractedCatchment ; - dcterms:identifier "cabbage-tree-geojson"^^xsd:token ; - dcterms:title "Cabbage Tree Creek w GeoJSON" ; - dcterms:type ahgf:NonContractedArea ; - geo:hasGeometry [ - geo:asWKT "POLYGON ((153.083029 -27.3297989, 153.0638029 -27.3246138, 153.0263808 -27.3270538, 152.9929068 -27.3546536, 152.9685309 -27.3744725, 152.9582312 -27.4061752, 152.9966833 -27.414709, 153.0366804 -27.4075467, 153.0677511 -27.4016032, 153.0775358 -27.384533, 153.0838873 -27.3676125, 153.0933287 -27.3546536, 153.0984785 -27.3459627, 153.0921271 -27.339406, 153.0857756 -27.3333063, 153.083029 -27.3297989))"^^geo:wktLiteral ; - geo:asGeoJSON """{ - "type": "Polygon", - "coordinates": [ - [ - [153.083029, -27.3297989], - [153.0638029, -27.3246138], - [153.0263808, -27.3270538], - [152.9929068, -27.3546536], - [152.9685309, -27.3744725], - [152.9582312, -27.4061752], - [152.9966833, -27.414709], - - [153.0775358, -27.384533], - [153.0838873, -27.3676125], - [153.0933287, -27.3546536], - [153.0984785, -27.3459627], - [153.0921271, -27.339406], - [153.0857756, -27.3333063], - [153.083029, -27.3297989] - ] - ] - }"""^^geo:asGeoJSON ; - ] ; -. - - a geo:Feature, - ahgf:ContractedCatchment ; - dcterms:identifier "kedron"^^xsd:token ; - dcterms:title "Kedron Brook" ; - dcterms:type ahgf:NonContractedArea ; - geo:hasGeometry [ - geo:asWKT "POLYGON ((153.0984785 -27.3459627, 153.0838873 -27.3676125, 153.0775358 -27.384533, 153.0677511 -27.4016032, 153.0497267 -27.4048036, 153.049555 -27.4163852, 153.0593397 -27.4253753, 153.0735876 -27.4244611, 153.0972769 -27.4169947, 153.1034567 -27.4038892, 153.1118681 -27.3961167, 153.1214812 -27.3857524, 153.1247427 -27.3731005, 153.1211378 -27.3653258, 153.1187346 -27.3575505, 153.114443 -27.3496222, 153.1080916 -27.347335, 153.0984785 -27.3459627))"^^geo:wktLiteral - ] ; -. diff --git a/tests/data/spaceprez/input/gnaf_small.ttl b/tests/data/spaceprez/input/gnaf_small.ttl deleted file mode 100755 index 61d64bd2..00000000 --- a/tests/data/spaceprez/input/gnaf_small.ttl +++ /dev/null @@ -1,318 +0,0 @@ -PREFIX addr: -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX gnaf: -PREFIX rdfs: -PREFIX sdo: -PREFIX skos: -PREFIX xsd: - - - - - a dcat:Dataset ; - dcterms:identifier "gnaf"^^xsd:token ; - dcterms:source "https://data.gov.au/data/datasets/19432f89-dc3a-4ef3-b943-5326ef1dbecc"^^xsd:anyURI ; - dcterms:title "Geocoded National Address File"@en ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((96 -45, 96 -9, 168 -9, 168 -45, 96 -45))"^^geo:wktLiteral - ] ; - rdfs:member gnaf:address ; - dcterms:description "The Australian Geocoded National Address File (G-NAF) is Australia’s authoritative, geocoded address file. It is built and maintained by Geoscape Australia using authoritative government data.."@en ; - skos:prefLabel "Geocoded National Address File"@en ; - sdo:creator ; - sdo:dateCreated "2022-05-25"^^xsd:date ; - sdo:dateModified "2022-05-25"^^xsd:date ; - sdo:datePublished "0000-00-00"^^xsd:date ; - sdo:publisher ; -. - - - a sdo:Organization ; - sdo:name "Geoscience Australia" ; - sdo:url "https://www.ga.gov.au"^^xsd:anyURI ; -. - -gnaf:address - a geo:FeatureCollection ; - dcterms:description "Contains the G-NAF's instances of the National Address Model's Address class"@en ; - dcterms:identifier "address"^^xsd:token ; - dcterms:isPartOf ; - dcterms:title "Addresses Feature Collection"@en ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((96 -45, 96 -9, 168 -9, 168 -45, 96 -45))"^^geo:wktLiteral - ] ; - rdfs:member - , - , - , - ; -. - - - a - addr:Address , - geo:Feature ; - dcterms:identifier "GAQLD155129953"^^xsd:token ; - addr:dateCreated "2013-01-11"^^xsd:date ; - addr:dateModified "2021-07-07"^^xsd:date ; - addr:hasAddressComponent - [ - addr:hasAddressComponentType ; - addr:hasTextValue "locc589ce7a8432" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "QLD172655" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "43" ; - addr:hasValue "43" - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "4116" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "11" ; - addr:hasValue "11" - ] ; - addr:hasQualifiedGeometry [ - addr:hasRole ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (153.0550911 -27.61729753)"^^geo:wktLiteral ; - ] - ] ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (153.0550911 -27.61729753)"^^geo:wktLiteral ; - ] ; - addr:isAddressFor - , - ; - geo:sfWithin - , - ; -. - - - a - addr:Address , - geo:Feature ; - dcterms:identifier "GAQLD159032388"^^xsd:token ; - addr:dateCreated "2004-05-09"^^xsd:date ; - addr:dateModified "2021-07-07"^^xsd:date ; - addr:hasAddressComponent - [ - addr:hasAddressComponentType ; - addr:hasTextValue "QLD125772" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "loc69e70bdd81a8" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "343" ; - addr:hasValue "343" - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "4013" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "1" ; - addr:hasValue "1" - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "UNIT" ; - addr:hasValue "UNIT" - ] ; - addr:hasPrimary ; - addr:hasQualifiedGeometry [ - addr:hasRole ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (153.07162251 -27.39373631)"^^geo:wktLiteral ; - ] - ] ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (153.07162251 -27.39373631)"^^geo:wktLiteral ; - ] ; - addr:isAddressFor - , - ; - geo:sfWithin - , - ; -. - - - a - addr:Address , - geo:Feature ; - dcterms:identifier "GAQLD163179943"^^xsd:token ; - addr:dateCreated "2011-04-20"^^xsd:date ; - addr:dateModified "2021-07-07"^^xsd:date ; - addr:hasAddressComponent - [ - addr:hasAddressComponentType ; - addr:hasTextValue "QLD3324769" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "4818" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "102" ; - addr:hasValue "102" - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "40" ; - addr:hasValue "40" - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "loc510fd5eb7c24" ; - addr:hasValue - ] ; - addr:hasQualifiedGeometry [ - addr:hasRole ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (146.70866358 -19.25418702)"^^geo:wktLiteral ; - ] - ] ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (146.70866358 -19.25418702)"^^geo:wktLiteral ; - ] ; - addr:isAddressFor - , - ; - geo:sfWithin - , - ; -. - - - a - addr:Address , - geo:Feature ; - dcterms:identifier "GAQLD719299059"^^xsd:token ; - addr:dateCreated "2017-04-21"^^xsd:date ; - addr:dateModified "2021-07-07"^^xsd:date ; - addr:hasAddressComponent - [ - addr:hasAddressComponentType ; - addr:hasTextValue "4132" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "44" ; - addr:hasValue "44" - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "loc0e23d7691901" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "QLD110859" ; - addr:hasValue - ] , - [ - addr:hasAddressComponentType ; - addr:hasTextValue "82" ; - addr:hasValue "82" - ] ; - addr:hasQualifiedGeometry [ - addr:hasRole ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (153.08998758 -27.66876057)"^^geo:wktLiteral - ] - ] ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asWKT "POINT (153.08998758 -27.66876057)"^^geo:wktLiteral - ] ; - addr:hasSecondary - , - ; - addr:isAddressFor - , - ; - geo:sfWithin - , - ; -. - - - rdfs:label "Marsden" ; -. - - - rdfs:label "Burdell" ; -. - - - rdfs:label "Northgate" ; -. - - - rdfs:label "Calamvale" ; -. - - - rdfs:label "Kerry" ; -. - - - rdfs:label "Melton" ; -. - - - rdfs:label "Palatine" ; -. - - - rdfs:label "Lady Musgrave" ; -. - - - rdfs:label "Mesh Block 30044850000" ; -. - - - rdfs:label "Mesh Block 30562470700" ; -. - - - rdfs:label "Mesh Block 30562777400" ; -. - - - rdfs:label "Mesh Block 30563194000" ; -. diff --git a/tests/data/spaceprez/input/labels.ttl b/tests/data/spaceprez/input/labels.ttl deleted file mode 100755 index 51f3e6c3..00000000 --- a/tests/data/spaceprez/input/labels.ttl +++ /dev/null @@ -1,13 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX geofab: -PREFIX rdfs: -PREFIX sand: -PREFIX xsd: - - -dcterms:identifier rdfs:label "Identifier"@en ; - rdfs:comment "A unique identifier of the item." . - -dcat:Dataset rdfs:label "Dataset"@en . diff --git a/tests/data/spaceprez/input/multiple_object.ttl b/tests/data/spaceprez/input/multiple_object.ttl deleted file mode 100755 index 303a862e..00000000 --- a/tests/data/spaceprez/input/multiple_object.ttl +++ /dev/null @@ -1,30 +0,0 @@ -PREFIX dcat: -PREFIX skos: -PREFIX dcterms: -PREFIX reg: -PREFIX status: -PREFIX rdfs: -PREFIX xsd: -PREFIX geo: - - - a geo:Feature ; - dcterms:identifier "alteration_facies_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metasomatic facies contact separating rocks that have undergone alteration of a particular facies from those that have undergone metasomatism of another facies. Alteration is a kind of metasomatism that does not introduce economically important minerals."@en ; - skos:inScheme ; - skos:prefLabel "alteration facies contact"@en ; -. - - - a geo:FeatureCollection ; - rdfs:member ; -. - - - a dcat:Dataset ; - rdfs:member ; -. diff --git a/tests/data/spaceprez/input/sandgate.ttl b/tests/data/spaceprez/input/sandgate.ttl deleted file mode 100755 index de42fbfb..00000000 --- a/tests/data/spaceprez/input/sandgate.ttl +++ /dev/null @@ -1,295 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX geofab: -PREFIX rdfs: -PREFIX sand: -PREFIX xsd: - - - a dcat:Dataset ; - dcterms:description "Example floods, roads, catchment and facilities in the Sandgate are"@en ; - dcterms:identifier "sandgate"^^xsd:token ; - dcterms:title "Sandgate example dataset"@en ; - rdfs:member - sand:catchments , - sand:facilities , - sand:floods , - sand:roads ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2234024,152.9075 -27.2234024,152.9075 -27.42))"^^geo:wktLiteral - ] ; -. - -sand:catchments - a geo:FeatureCollection ; - dcterms:description "Hydrological catchments that are 'contracted', that is, guarenteed, to appear on multiple Geofabric surface hydrology data products"@en ; - dcterms:identifier "catchments"^^xsd:token ; - dcterms:title "Geofabric Contracted Catchments"@en ; - rdfs:label "Geofabric Contracted Catchments"@en ; - rdfs:member - sand:cc12109444 , - sand:cc12109445 ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((152.9075 -27.42,153.16 -27.42,153.16 -27.2775,152.9075 -27.2775,152.9075 -27.42))"^^geo:wktLiteral - ] ; -. - -sand:facilities - a geo:FeatureCollection ; - dcterms:description "Sandgate area demo Facilities"@en ; - dcterms:identifier "facilities"^^xsd:token ; - dcterms:title "Sandgate are demo Facilities"@en ; - rdfs:label "Sandgate are demo Facilities"@en ; - rdfs:member - sand:bhc , - sand:bhca , - sand:bps , - sand:cpc , - sand:jcabi , - sand:rps , - sand:sac , - sand:sps , - sand:src , - sand:srca ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((153.0144819 -27.3506599,153.1143102 -27.3506599,153.1143102 -27.2234024,153.0144819 -27.2234024,153.0144819 -27.3506599))"^^geo:wktLiteral - ] ; -. - -sand:floods - a geo:FeatureCollection ; - dcterms:description "Sandgate flooded areas"@en ; - dcterms:identifier "floods"^^xsd:token ; - dcterms:title "Sandgate flooded areas"@en ; - rdfs:label "Sandgate flooded areas"@en ; - rdfs:member - sand:f001 , - sand:f023 , - sand:f332 , - sand:f632 ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((153.06307 -27.3151243,153.069877 -27.3151243,153.069877 -27.2859541,153.06307 -27.2859541,153.06307 -27.3151243))"^^geo:wktLiteral - ] ; -. - -sand:roads - a geo:FeatureCollection ; - dcterms:description "Sandgate main roads"@en ; - dcterms:identifier "roads"^^xsd:token ; - dcterms:title "Sandgate main roads"@en ; - rdfs:label "Sandgate main roads"@en ; - rdfs:member - sand:bt , - sand:fp ; - geo:hasBoundingBox [ - a geo:Geometry ; - geo:asWKT "POLYGON ((153.0617934 -27.3203138,153.0747569 -27.3203138,153.0747569 -27.2920918,153.0617934 -27.2920918,153.0617934 -27.3203138))"^^geo:wktLiteral - ] ; -. - -sand:bhc - a geo:Feature ; - rdfs:label "Brighton Health Campus Location" ; - dcterms:identifier "bhc"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0638169, -27.2897951]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0638169 -27.2897951)"^^geo:wktLiteral - ] ; -. - -sand:bhca - a geo:Feature ; - rdfs:label "Brighton Health Campus Area" ; - dcterms:identifier "bhca"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.063644,-27.2894036],[153.0635207,-27.2896229],[153.0631612,-27.2896182],[153.0631291,-27.289909],[153.0631559,-27.290338],[153.0644487,-27.2904858],[153.0645614,-27.2899185],[153.0648349,-27.2895324],[153.0648135,-27.2889174],[153.0637674,-27.2887362],[153.063644,-27.2894036]]]}"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.063644 -27.2894036, 153.0635207 -27.2896229, 153.0631612 -27.2896182, 153.0631291 -27.289909, 153.0631559 -27.290338, 153.0644487 -27.2904858, 153.0645614 -27.2899185, 153.0648349 -27.2895324, 153.0648135 -27.2889174, 153.0637674 -27.2887362, 153.063644 -27.2894036))"^^geo:wktLiteral - ] ; -. - -sand:bps - a geo:Feature ; - rdfs:label "Boondal Police Station" ; - dcterms:identifier "bps"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0536022, -27.3497934]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0536022 -27.3497934)"^^geo:wktLiteral - ] ; -. - -sand:bt - a geo:Feature ; - rdfs:label "Brighton Terrace" ; - dcterms:identifier "bt"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.06513, -27.3143431 ], [ 153.065881100000013, -27.3140285 ], [ 153.0653983, -27.3130466 ], [ 153.0652052, -27.3122745 ], [ 153.0651193, -27.3116453 ], [ 153.064550700000012, -27.3103202 ], [ 153.0641108, -27.3092526 ], [ 153.0637889, -27.3074031 ], [ 153.0631774, -27.3057253 ], [ 153.0628448, -27.3044573 ], [ 153.0627053, -27.3036565 ], [ 153.061847, -27.2988706 ], [ 153.0617934, -27.2952 ], [ 153.062168899999989, -27.2933312 ], [ 153.0622333, -27.2920918 ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "LINESTRING (153.06513 -27.3143431,153.0658811 -27.3140285,153.0653983 -27.3130466,153.0652052 -27.3122745,153.0651193 -27.3116453,153.0645507 -27.3103202,153.0641108 -27.3092526,153.0637889 -27.3074031,153.0631774 -27.3057253,153.0628448 -27.3044573,153.0627053 -27.3036565,153.061847 -27.2988706,153.0617934 -27.2952,153.0621689 -27.2933312,153.0622333 -27.2920918)"^^geo:wktLiteral - ] ; -. - -sand:cc12109444 - a - geo:Feature , - geofab:ContractedCatchment ; - rdfs:label "Contracted Catchment 12109444" ; - dcterms:identifier "cc12109444"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}'^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.06 -27.28, 153.06 -27.2825, 153.0625 -27.2825, 153.065 -27.2825, 153.065 -27.305, 153.0675 -27.305, 153.0675 -27.31, 153.07 -27.31, 153.07 -27.3125, 153.0725 -27.3125, 153.0725 -27.3175, 153.075 -27.3175, 153.075 -27.32, 153.0775 -27.32, 153.0775 -27.3225, 153.08 -27.3225, 153.085 -27.3225, 153.085 -27.325, 153.0875 -27.325, 153.0875 -27.33, 153.085 -27.33, 153.0825 -27.33, 153.0825 -27.3325, 153.085 -27.3325, 153.085 -27.335, 153.0875 -27.335, 153.09 -27.335, 153.09 -27.3375, 153.0925 -27.3375, 153.0925 -27.34, 153.0975 -27.34, 153.0975 -27.3425, 153.1025 -27.3425, 153.1025 -27.345, 153.1075 -27.345, 153.1075 -27.3475, 153.11 -27.3475, 153.1125 -27.3475, 153.115 -27.3475, 153.115 -27.35, 153.12 -27.35, 153.12 -27.3525, 153.125 -27.3525, 153.125 -27.355, 153.13 -27.355, 153.13 -27.3575, 153.135 -27.3575, 153.135 -27.36, 153.1375 -27.36, 153.1425 -27.36, 153.1475 -27.36, 153.1525 -27.36, 153.1525 -27.3625, 153.155 -27.3625, 153.155 -27.365, 153.1575 -27.365, 153.1575 -27.375, 153.16 -27.375, 153.16 -27.3775, 153.16 -27.38, 153.1575 -27.38, 153.155 -27.38, 153.155 -27.3825, 153.1525 -27.3825, 153.1525 -27.385, 153.15 -27.385, 153.15 -27.3875, 153.145 -27.3875, 153.145 -27.39, 153.1425 -27.39, 153.1425 -27.3925, 153.14 -27.3925, 153.14 -27.395, 153.14 -27.3975, 153.14 -27.4, 153.1375 -27.4, 153.1375 -27.4025, 153.135 -27.4025, 153.135 -27.405, 153.135 -27.4075, 153.135 -27.4125, 153.135 -27.415, 153.13 -27.415, 153.13 -27.4175, 153.1275 -27.4175, 153.1225 -27.4175, 153.1225 -27.42, 153.1175 -27.42, 153.1125 -27.42, 153.1125 -27.4175, 153.11 -27.4175, 153.11 -27.415, 153.1075 -27.415, 153.1075 -27.4125, 153.0975 -27.4125, 153.0975 -27.415, 153.0925 -27.415, 153.0875 -27.415, 153.085 -27.415, 153.08 -27.415, 153.08 -27.4125, 153.0775 -27.4125, 153.0775 -27.41, 153.075 -27.41, 153.075 -27.405, 153.07 -27.405, 153.07 -27.4025, 153.0675 -27.4025, 153.0675 -27.4, 153.065 -27.4, 153.065 -27.3975, 153.0625 -27.3975, 153.0625 -27.395, 153.06 -27.395, 153.06 -27.3925, 153.0275 -27.3925, 153.0275 -27.395, 153.025 -27.395, 153.025 -27.3975, 153.0175 -27.3975, 153.0175 -27.4, 153.0125 -27.4, 153.0125 -27.4025, 153.005 -27.4025, 153.005 -27.405, 153.0025 -27.405, 152.9975 -27.405, 152.9975 -27.4025, 152.9925 -27.4025, 152.9925 -27.4, 152.9875 -27.4, 152.9825 -27.4, 152.9825 -27.3975, 152.98 -27.3975, 152.98 -27.3925, 152.975 -27.3925, 152.975 -27.3875, 152.97 -27.3875, 152.96 -27.3875, 152.96 -27.39, 152.955 -27.39, 152.955 -27.3925, 152.945 -27.3925, 152.94 -27.3925, 152.9375 -27.3925, 152.9375 -27.39, 152.925 -27.39, 152.925 -27.385, 152.925 -27.3825, 152.93 -27.3825, 152.9325 -27.3825, 152.9325 -27.38, 152.9375 -27.38, 152.9375 -27.3825, 152.94 -27.3825, 152.94 -27.38, 152.9475 -27.38, 152.9475 -27.3825, 152.9525 -27.3825, 152.9525 -27.38, 152.965 -27.38, 152.9675 -27.38, 152.9675 -27.3775, 152.98 -27.3775, 152.98 -27.375, 152.9825 -27.375, 152.9825 -27.3725, 152.985 -27.3725, 152.985 -27.37, 152.9875 -27.37, 152.9875 -27.3675, 152.99 -27.3675, 152.99 -27.3625, 152.9925 -27.3625, 152.9925 -27.355, 152.995 -27.355, 152.995 -27.3525, 153 -27.3525, 153 -27.35, 153.005 -27.35, 153.01 -27.35, 153.01 -27.3475, 153.0175 -27.3475, 153.0175 -27.335, 153.02 -27.335, 153.02 -27.33, 153.0225 -27.33, 153.0225 -27.3275, 153.025 -27.3275, 153.025 -27.325, 153.0275 -27.325, 153.0275 -27.3225, 153.03 -27.3225, 153.03 -27.32, 153.0325 -27.32, 153.0325 -27.3175, 153.035 -27.3175, 153.035 -27.305, 153.0375 -27.305, 153.0375 -27.3, 153.04 -27.3, 153.04 -27.2975, 153.0425 -27.2975, 153.0425 -27.2825, 153.04 -27.2825, 153.04 -27.28, 153.0425 -27.28, 153.05 -27.28, 153.06 -27.28))"^^geo:wktLiteral - ] ; -. - -sand:cc12109445 - a - geo:Feature , - geofab:ContractedCatchment ; - rdfs:label "Contracted Catchment 12109445" ; - dcterms:identifier "cc12109445"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON '{"type": "Polygon", "coordinates": [[[153.0025, -27.2775], [153.0025, -27.28], [153.005, -27.28], [153.005, -27.285], [153.0075, -27.285], [153.015, -27.285], [153.015, -27.29], [153.0175, -27.29], [153.0175, -27.2925], [153.0175, -27.3025], [153.02, -27.3025], [153.02, -27.305], [153.0225, -27.305], [153.0225, -27.31], [153.0175, -27.31], [153.0175, -27.3125], [153.015, -27.3125], [153.015, -27.315], [153.015, -27.3175], [153.0175, -27.3175], [153.0175, -27.32], [153.02, -27.32], [153.02, -27.3225], [153.0275, -27.3225], [153.0275, -27.325], [153.025, -27.325], [153.025, -27.3275], [153.0225, -27.3275], [153.0225, -27.33], [153.02, -27.33], [153.02, -27.335], [153.0175, -27.335], [153.0175, -27.3475], [153.01, -27.3475], [153.01, -27.35], [153.005, -27.35], [153, -27.35], [153, -27.3525], [152.995, -27.3525], [152.995, -27.355], [152.9925, -27.355], [152.9925, -27.3625], [152.99, -27.3625], [152.99, -27.3675], [152.9875, -27.3675], [152.9875, -27.37], [152.985, -27.37], [152.985, -27.3725], [152.9825, -27.3725], [152.9825, -27.375], [152.98, -27.375], [152.98, -27.3775], [152.9675, -27.3775], [152.9675, -27.38], [152.965, -27.38], [152.9525, -27.38], [152.9525, -27.3825], [152.9475, -27.3825], [152.9475, -27.38], [152.94, -27.38], [152.94, -27.3825], [152.9375, -27.3825], [152.9375, -27.38], [152.9325, -27.38], [152.9325, -27.3825], [152.93, -27.3825], [152.925, -27.3825], [152.925, -27.385], [152.92, -27.385], [152.92, -27.3825], [152.9075, -27.3825], [152.9075, -27.38], [152.9075, -27.375], [152.9075, -27.3725], [152.915, -27.3725], [152.915, -27.37], [152.92, -27.37], [152.92, -27.3675], [152.9225, -27.3675], [152.9225, -27.365], [152.925, -27.365], [152.925, -27.3625], [152.9275, -27.3625], [152.9275, -27.36], [152.9275, -27.3575], [152.925, -27.3575], [152.925, -27.355], [152.9225, -27.355], [152.9225, -27.3525], [152.92, -27.3525], [152.92, -27.35], [152.9175, -27.35], [152.9175, -27.345], [152.92, -27.345], [152.92, -27.3325], [152.9175, -27.3325], [152.9175, -27.33], [152.915, -27.33], [152.915, -27.3275], [152.9125, -27.3275], [152.9125, -27.325], [152.9125, -27.3225], [152.9225, -27.3225], [152.9225, -27.32], [152.925, -27.32], [152.925, -27.3175], [152.9275, -27.3175], [152.9275, -27.315], [152.93, -27.315], [152.93, -27.3125], [152.9325, -27.3125], [152.9325, -27.31], [152.935, -27.31], [152.935, -27.305], [152.94, -27.305], [152.94, -27.3025], [152.9425, -27.3025], [152.9425, -27.3], [152.945, -27.3], [152.945, -27.2975], [152.95, -27.2975], [152.95, -27.295], [152.955, -27.295], [152.9575, -27.295], [152.9575, -27.2925], [152.96, -27.2925], [152.96, -27.29], [152.9625, -27.29], [152.9625, -27.2875], [152.9675, -27.2875], [152.9675, -27.285], [152.9725, -27.285], [152.9725, -27.2825], [152.9775, -27.2825], [152.9775, -27.28], [152.98, -27.28], [152.9925, -27.28], [152.9925, -27.2775], [152.9975, -27.2775], [153.0025, -27.2775]]]}'^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0025 -27.2775, 153.0025 -27.28, 153.005 -27.28, 153.005 -27.285, 153.0075 -27.285, 153.015 -27.285, 153.015 -27.29, 153.0175 -27.29, 153.0175 -27.2925, 153.0175 -27.3025, 153.02 -27.3025, 153.02 -27.305, 153.0225 -27.305, 153.0225 -27.31, 153.0175 -27.31, 153.0175 -27.3125, 153.015 -27.3125, 153.015 -27.315, 153.015 -27.3175, 153.0175 -27.3175, 153.0175 -27.32, 153.02 -27.32, 153.02 -27.3225, 153.0275 -27.3225, 153.0275 -27.325, 153.025 -27.325, 153.025 -27.3275, 153.0225 -27.3275, 153.0225 -27.33, 153.02 -27.33, 153.02 -27.335, 153.0175 -27.335, 153.0175 -27.3475, 153.01 -27.3475, 153.01 -27.35, 153.005 -27.35, 153 -27.35, 153 -27.3525, 152.995 -27.3525, 152.995 -27.355, 152.9925 -27.355, 152.9925 -27.3625, 152.99 -27.3625, 152.99 -27.3675, 152.9875 -27.3675, 152.9875 -27.37, 152.985 -27.37, 152.985 -27.3725, 152.9825 -27.3725, 152.9825 -27.375, 152.98 -27.375, 152.98 -27.3775, 152.9675 -27.3775, 152.9675 -27.38, 152.965 -27.38, 152.9525 -27.38, 152.9525 -27.3825, 152.9475 -27.3825, 152.9475 -27.38, 152.94 -27.38, 152.94 -27.3825, 152.9375 -27.3825, 152.9375 -27.38, 152.9325 -27.38, 152.9325 -27.3825, 152.93 -27.3825, 152.925 -27.3825, 152.925 -27.385, 152.92 -27.385, 152.92 -27.3825, 152.9075 -27.3825, 152.9075 -27.38, 152.9075 -27.375, 152.9075 -27.3725, 152.915 -27.3725, 152.915 -27.37, 152.92 -27.37, 152.92 -27.3675, 152.9225 -27.3675, 152.9225 -27.365, 152.925 -27.365, 152.925 -27.3625, 152.9275 -27.3625, 152.9275 -27.36, 152.9275 -27.3575, 152.925 -27.3575, 152.925 -27.355, 152.9225 -27.355, 152.9225 -27.3525, 152.92 -27.3525, 152.92 -27.35, 152.9175 -27.35, 152.9175 -27.345, 152.92 -27.345, 152.92 -27.3325, 152.9175 -27.3325, 152.9175 -27.33, 152.915 -27.33, 152.915 -27.3275, 152.9125 -27.3275, 152.9125 -27.325, 152.9125 -27.3225, 152.9225 -27.3225, 152.9225 -27.32, 152.925 -27.32, 152.925 -27.3175, 152.9275 -27.3175, 152.9275 -27.315, 152.93 -27.315, 152.93 -27.3125, 152.9325 -27.3125, 152.9325 -27.31, 152.935 -27.31, 152.935 -27.305, 152.94 -27.305, 152.94 -27.3025, 152.9425 -27.3025, 152.9425 -27.3, 152.945 -27.3, 152.945 -27.2975, 152.95 -27.2975, 152.95 -27.295, 152.955 -27.295, 152.9575 -27.295, 152.9575 -27.2925, 152.96 -27.2925, 152.96 -27.29, 152.9625 -27.29, 152.9625 -27.2875, 152.9675 -27.2875, 152.9675 -27.285, 152.9725 -27.285, 152.9725 -27.2825, 152.9775 -27.2825, 152.9775 -27.28, 152.98 -27.28, 152.9925 -27.28, 152.9925 -27.2775, 152.9975 -27.2775, 153.0025 -27.2775))"^^geo:wktLiteral - ] ; -. - -sand:cpc - a geo:Feature ; - rdfs:label "Carseldine Police Station" ; - dcterms:identifier "cpc"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0144819, -27.3506599]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0144819 -27.3506599)"^^geo:wktLiteral - ] ; -. - -sand:f001 - a geo:Feature ; - rdfs:label "Flood 001" ; - dcterms:identifier "f001"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.064893899999987, -27.2909981 ], [ 153.0648081, -27.2911506 ], [ 153.064475499999986, -27.2912364 ], [ 153.064078599999988, -27.2912269 ], [ 153.0635636, -27.291265 ], [ 153.0633383, -27.2913604 ], [ 153.0632417, -27.2914462 ], [ 153.0631559, -27.2915701 ], [ 153.0630808, -27.2917036 ], [ 153.06307, -27.2917704 ], [ 153.0631773, -27.2918943 ], [ 153.0633168, -27.2920564 ], [ 153.0634241, -27.2921613 ], [ 153.063767399999989, -27.2921994 ], [ 153.0642824, -27.2922757 ], [ 153.064400400000011, -27.292371 ], [ 153.0644111, -27.2926761 ], [ 153.0643897, -27.2928764 ], [ 153.0643682, -27.2930766 ], [ 153.06434680000001, -27.2932196 ], [ 153.0642824, -27.2934675 ], [ 153.0642824, -27.2935628 ], [ 153.0643682, -27.2936391 ], [ 153.0647223, -27.2937345 ], [ 153.0648296, -27.293744 ], [ 153.064893899999987, -27.2909981 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0648939 -27.2909981,153.0648081 -27.2911506,153.0644755 -27.2912364,153.0640786 -27.2912269,153.0635636 -27.291265,153.0633383 -27.2913604,153.0632417 -27.2914462,153.0631559 -27.2915701,153.0630808 -27.2917036,153.06307 -27.2917704,153.0631773 -27.2918943,153.0633168 -27.2920564,153.0634241 -27.2921613,153.0637674 -27.2921994,153.0642824 -27.2922757,153.0644004 -27.292371,153.0644111 -27.2926761,153.0643897 -27.2928764,153.0643682 -27.2930766,153.0643468 -27.2932196,153.0642824 -27.2934675,153.0642824 -27.2935628,153.0643682 -27.2936391,153.0647223 -27.2937345,153.0648296 -27.293744,153.0648939 -27.2909981))"^^geo:wktLiteral - ] ; -. - -sand:f023 - a geo:Feature ; - rdfs:label "Flood 023" ; - dcterms:identifier "f023"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.06487820000001, -27.30059 ], [ 153.0648031, -27.301019 ], [ 153.0648138, -27.3012955 ], [ 153.0648889, -27.3015815 ], [ 153.0648567, -27.3016768 ], [ 153.064824499999986, -27.3018198 ], [ 153.0648138, -27.3020295 ], [ 153.064824499999986, -27.3022965 ], [ 153.0647387, -27.3024109 ], [ 153.0641808, -27.3024776 ], [ 153.063698, -27.3025634 ], [ 153.0634512, -27.3026302 ], [ 153.063419, -27.3027827 ], [ 153.063440500000013, -27.303002 ], [ 153.0634619, -27.303307 ], [ 153.063622900000013, -27.3034501 ], [ 153.0638696, -27.3034882 ], [ 153.0643095, -27.3035454 ], [ 153.0645456, -27.3036026 ], [ 153.0647923, -27.3037456 ], [ 153.0650176, -27.3039553 ], [ 153.0652, -27.3041174 ], [ 153.065318, -27.3042413 ], [ 153.0653931, -27.3045083 ], [ 153.0655112, -27.3047371 ], [ 153.065790099999987, -27.3050803 ], [ 153.0660476, -27.3052519 ], [ 153.0656935, -27.3037551 ], [ 153.0652215, -27.30243 ], [ 153.06487820000001, -27.30059 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0648782 -27.30059,153.0648031 -27.301019,153.0648138 -27.3012955,153.0648889 -27.3015815,153.0648567 -27.3016768,153.0648245 -27.3018198,153.0648138 -27.3020295,153.0648245 -27.3022965,153.0647387 -27.3024109,153.0641808 -27.3024776,153.063698 -27.3025634,153.0634512 -27.3026302,153.063419 -27.3027827,153.0634405 -27.303002,153.0634619 -27.303307,153.0636229 -27.3034501,153.0638696 -27.3034882,153.0643095 -27.3035454,153.0645456 -27.3036026,153.0647923 -27.3037456,153.0650176 -27.3039553,153.0652 -27.3041174,153.065318 -27.3042413,153.0653931 -27.3045083,153.0655112 -27.3047371,153.0657901 -27.3050803,153.0660476 -27.3052519,153.0656935 -27.3037551,153.0652215 -27.30243,153.0648782 -27.30059))"^^geo:wktLiteral - ] ; -. - -sand:f332 - a geo:Feature ; - rdfs:label "Flood 332" ; - dcterms:identifier "f332"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.068289099999987, -27.3113685 ], [ 153.0681389, -27.3108346 ], [ 153.0676454, -27.3103961 ], [ 153.0673021, -27.3096144 ], [ 153.0670231, -27.3088708 ], [ 153.066615399999989, -27.3088327 ], [ 153.0659932, -27.3089662 ], [ 153.0656928, -27.3091568 ], [ 153.065564, -27.3095381 ], [ 153.0658215, -27.310377 ], [ 153.0659073, -27.3107774 ], [ 153.0660361, -27.3111587 ], [ 153.0665725, -27.3113685 ], [ 153.066744199999988, -27.3115973 ], [ 153.0674094, -27.3130272 ], [ 153.0676669, -27.3135419 ], [ 153.0680102, -27.3142473 ], [ 153.0685466, -27.3151243 ], [ 153.0693191, -27.3150862 ], [ 153.0698126, -27.3147049 ], [ 153.069877, -27.3145143 ], [ 153.06970530000001, -27.3140376 ], [ 153.0694479, -27.3134085 ], [ 153.069147500000014, -27.31297 ], [ 153.0688041, -27.3124552 ], [ 153.068375, -27.3120548 ], [ 153.068074599999989, -27.3117498 ], [ 153.068289099999987, -27.3113685 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0682891 -27.3113685,153.0681389 -27.3108346,153.0676454 -27.3103961,153.0673021 -27.3096144,153.0670231 -27.3088708,153.0666154 -27.3088327,153.0659932 -27.3089662,153.0656928 -27.3091568,153.065564 -27.3095381,153.0658215 -27.310377,153.0659073 -27.3107774,153.0660361 -27.3111587,153.0665725 -27.3113685,153.0667442 -27.3115973,153.0674094 -27.3130272,153.0676669 -27.3135419,153.0680102 -27.3142473,153.0685466 -27.3151243,153.0693191 -27.3150862,153.0698126 -27.3147049,153.069877 -27.3145143,153.0697053 -27.3140376,153.0694479 -27.3134085,153.0691475 -27.31297,153.0688041 -27.3124552,153.068375 -27.3120548,153.0680746 -27.3117498,153.0682891 -27.3113685))"^^geo:wktLiteral - ] ; -. - -sand:f632 - a geo:Feature ; - rdfs:label "Flood 632" ; - dcterms:identifier "f632"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 153.0649154, -27.2906357 ], [ 153.0650656, -27.2892818 ], [ 153.0651407, -27.288233 ], [ 153.06513, -27.287413 ], [ 153.0650656, -27.2859541 ], [ 153.0649905, -27.2861353 ], [ 153.065012, -27.2863737 ], [ 153.065001200000012, -27.2868218 ], [ 153.0649583, -27.2871079 ], [ 153.0648296, -27.2873463 ], [ 153.0646472, -27.2873939 ], [ 153.064604300000013, -27.2875274 ], [ 153.0646365, -27.2877849 ], [ 153.0646686, -27.2879183 ], [ 153.0646686, -27.2882711 ], [ 153.0646365, -27.2885762 ], [ 153.0642609, -27.2886716 ], [ 153.0640678, -27.2888623 ], [ 153.064035600000011, -27.2890816 ], [ 153.064293099999986, -27.2894248 ], [ 153.064379, -27.2897204 ], [ 153.0642288, -27.2899206 ], [ 153.064057100000014, -27.2899969 ], [ 153.0639605, -27.2902353 ], [ 153.0639927, -27.2904069 ], [ 153.064110699999986, -27.2905309 ], [ 153.0642824, -27.2906644 ], [ 153.064497, -27.2907216 ], [ 153.064657899999986, -27.2907406 ], [ 153.064818800000012, -27.2907406 ], [ 153.0649154, -27.2906357 ] ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0649154 -27.2906357,153.0650656 -27.2892818,153.0651407 -27.288233,153.06513 -27.287413,153.0650656 -27.2859541,153.0649905 -27.2861353,153.065012 -27.2863737,153.0650012 -27.2868218,153.0649583 -27.2871079,153.0648296 -27.2873463,153.0646472 -27.2873939,153.0646043 -27.2875274,153.0646365 -27.2877849,153.0646686 -27.2879183,153.0646686 -27.2882711,153.0646365 -27.2885762,153.0642609 -27.2886716,153.0640678 -27.2888623,153.0640356 -27.2890816,153.0642931 -27.2894248,153.064379 -27.2897204,153.0642288 -27.2899206,153.0640571 -27.2899969,153.0639605 -27.2902353,153.0639927 -27.2904069,153.0641107 -27.2905309,153.0642824 -27.2906644,153.064497 -27.2907216,153.0646579 -27.2907406,153.0648188 -27.2907406,153.0649154 -27.2906357))"^^geo:wktLiteral - ] ; -. - -sand:fp - a geo:Feature ; - rdfs:label "Flinder Parade" ; - dcterms:identifier "fp"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{ \"type\": \"LineString\", \"coordinates\": [ [ 153.074756900000011, -27.3203138 ], [ 153.0727077, -27.3183121 ], [ 153.0715276, -27.3170824 ], [ 153.070519, -27.3157669 ], [ 153.0694891, -27.3143847 ], [ 153.067751, -27.311115 ], [ 153.0664635, -27.3072446 ], [ 153.0656267, -27.3047468 ], [ 153.065111699999989, -27.3031262 ], [ 153.0647898, -27.301677 ], [ 153.064510899999988, -27.3000372 ], [ 153.0644036, -27.2984546 ], [ 153.0643392, -27.2973296 ], [ 153.06459670000001, -27.2953656 ], [ 153.0646396, -27.2936494 ], [ 153.0644465, -27.2922764 ] ] }"^^geo:geoJSONLiteral ; - geo:asWKT "LINESTRING (153.0747569 -27.3203138,153.0727077 -27.3183121,153.0715276 -27.3170824,153.070519 -27.3157669,153.0694891 -27.3143847,153.067751 -27.311115,153.0664635 -27.3072446,153.0656267 -27.3047468,153.0651117 -27.3031262,153.0647898 -27.301677,153.0645109 -27.3000372,153.0644036 -27.2984546,153.0643392 -27.2973296,153.0645967 -27.2953656,153.0646396 -27.2936494,153.0644465 -27.2922764)"^^geo:wktLiteral - ] ; -. - -sand:jcabi - a geo:Feature ; - rdfs:label "Jacana Centre for Acquired Brain Injury" ; - dcterms:identifier "jcabi"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0632873, -27.2918652]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0632873 -27.2918652)"^^geo:wktLiteral - ] ; -. - -sand:rps - a geo:Feature ; - rdfs:label "Redcliffe Police Station" ; - dcterms:identifier "rps"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.1143102, -27.2234024]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.1143102 -27.2234024)"^^geo:wktLiteral - ] ; -. - -sand:sac - a geo:Feature ; - rdfs:label "Sandgate Aquatic Centre" ; - dcterms:identifier "sac"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0688897, -27.3122011]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0688897 -27.3122011)"^^geo:wktLiteral - ] ; -. - -sand:sps - a geo:Feature ; - rdfs:label "Sandgate Police Station" ; - dcterms:identifier "sps"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0677583, -27.318185]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0677583 -27.318185)"^^geo:wktLiteral - ] ; -. - -sand:src - a geo:Feature ; - rdfs:label "Sandgate Respite Centre" ; - dcterms:identifier "src"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Point\", \"coordinates\": [153.0614757, -27.3111489]}"^^geo:geoJSONLiteral ; - geo:asWKT "POINT (153.0614757 -27.3111489)"^^geo:wktLiteral - ] ; -. - -sand:srca - a geo:Feature ; - rdfs:label "Sandgate Respite Centre Area" ; - dcterms:identifier "srca"^^xsd:token ; - geo:hasGeometry [ - a geo:Geometry ; - geo:asGeoJSON "{\"type\": \"Polygon\", \"coordinates\": [[[153.0606281,-27.3096141], [153.0604564,-27.3105197], [153.0600487,-27.3109296], [153.0607354,-27.3127218], [153.063203,-27.3121212], [153.0621623,-27.3095187], [153.0617868,-27.3098333], [153.0606281,-27.3096141]]]}"^^geo:geoJSONLiteral ; - geo:asWKT "POLYGON ((153.0606281 -27.3096141, 153.0604564 -27.3105197, 153.0600487 -27.3109296, 153.0607354 -27.3127218, 153.063203 -27.3121212, 153.0621623 -27.3095187, 153.0617868 -27.3098333, 153.0606281 -27.3096141))"^^geo:wktLiteral - ] ; -. diff --git a/tests/data/spaceprez/input/sandgate/catchments.geojson b/tests/data/spaceprez/input/sandgate/catchments.geojson deleted file mode 100755 index f84929cd..00000000 --- a/tests/data/spaceprez/input/sandgate/catchments.geojson +++ /dev/null @@ -1,8 +0,0 @@ -{ - "type": "FeatureCollection", - "bbox": [152.9075, -27.42, 153.16, -27.2775], - "features": [ - {"type": "Feature", "properties": {"id": "cc12109444"}, "geometry": {"type": "Polygon", "coordinates": [[[153.06, -27.28], [153.05, -27.28], [153.0425, -27.28], [153.04, -27.28], [153.04, -27.2825], [153.0425, -27.2825], [153.0425, -27.2975], [153.04, -27.2975], [153.04, -27.3], [153.0375, -27.3], [153.0375, -27.305], [153.035, -27.305], [153.035, -27.3175], [153.0325, -27.3175], [153.0325, -27.32], [153.03, -27.32], [153.03, -27.3225], [153.0275, -27.3225], [153.0275, -27.325], [153.025, -27.325], [153.025, -27.3275], [153.0225, -27.3275], [153.0225, -27.33], [153.02, -27.33], [153.02, -27.335], [153.0175, -27.335], [153.0175, -27.3475], [153.01, -27.3475], [153.01, -27.35], [153.005, -27.35], [153, -27.35], [153, -27.3525], [152.995, -27.3525], [152.995, -27.355], [152.9925, -27.355], [152.9925, -27.3625], [152.99, -27.3625], [152.99, -27.3675], [152.9875, -27.3675], [152.9875, -27.37], [152.985, -27.37], [152.985, -27.3725], [152.9825, -27.3725], [152.9825, -27.375], [152.98, -27.375], [152.98, -27.3775], [152.9675, -27.3775], [152.9675, -27.38], [152.965, -27.38], [152.9525, -27.38], [152.9525, -27.3825], [152.9475, -27.3825], [152.9475, -27.38], [152.94, -27.38], [152.94, -27.3825], [152.9375, -27.3825], [152.9375, -27.38], [152.9325, -27.38], [152.9325, -27.3825], [152.93, -27.3825], [152.925, -27.3825], [152.925, -27.385], [152.925, -27.39], [152.9375, -27.39], [152.9375, -27.3925], [152.94, -27.3925], [152.945, -27.3925], [152.955, -27.3925], [152.955, -27.39], [152.96, -27.39], [152.96, -27.3875], [152.97, -27.3875], [152.975, -27.3875], [152.975, -27.3925], [152.98, -27.3925], [152.98, -27.3975], [152.9825, -27.3975], [152.9825, -27.4], [152.9875, -27.4], [152.9925, -27.4], [152.9925, -27.4025], [152.9975, -27.4025], [152.9975, -27.405], [153.0025, -27.405], [153.005, -27.405], [153.005, -27.4025], [153.0125, -27.4025], [153.0125, -27.4], [153.0175, -27.4], [153.0175, -27.3975], [153.025, -27.3975], [153.025, -27.395], [153.0275, -27.395], [153.0275, -27.3925], [153.06, -27.3925], [153.06, -27.395], [153.0625, -27.395], [153.0625, -27.3975], [153.065, -27.3975], [153.065, -27.4], [153.0675, -27.4], [153.0675, -27.4025], [153.07, -27.4025], [153.07, -27.405], [153.075, -27.405], [153.075, -27.41], [153.0775, -27.41], [153.0775, -27.4125], [153.08, -27.4125], [153.08, -27.415], [153.085, -27.415], [153.0875, -27.415], [153.0925, -27.415], [153.0975, -27.415], [153.0975, -27.4125], [153.1075, -27.4125], [153.1075, -27.415], [153.11, -27.415], [153.11, -27.4175], [153.1125, -27.4175], [153.1125, -27.42], [153.1175, -27.42], [153.1225, -27.42], [153.1225, -27.4175], [153.1275, -27.4175], [153.13, -27.4175], [153.13, -27.415], [153.135, -27.415], [153.135, -27.4125], [153.135, -27.4075], [153.135, -27.405], [153.135, -27.4025], [153.1375, -27.4025], [153.1375, -27.4], [153.14, -27.4], [153.14, -27.3975], [153.14, -27.395], [153.14, -27.3925], [153.1425, -27.3925], [153.1425, -27.39], [153.145, -27.39], [153.145, -27.3875], [153.15, -27.3875], [153.15, -27.385], [153.1525, -27.385], [153.1525, -27.3825], [153.155, -27.3825], [153.155, -27.38], [153.1575, -27.38], [153.16, -27.38], [153.16, -27.3775], [153.16, -27.375], [153.1575, -27.375], [153.1575, -27.365], [153.155, -27.365], [153.155, -27.3625], [153.1525, -27.3625], [153.1525, -27.36], [153.1475, -27.36], [153.1425, -27.36], [153.1375, -27.36], [153.135, -27.36], [153.135, -27.3575], [153.13, -27.3575], [153.13, -27.355], [153.125, -27.355], [153.125, -27.3525], [153.12, -27.3525], [153.12, -27.35], [153.115, -27.35], [153.115, -27.3475], [153.1125, -27.3475], [153.11, -27.3475], [153.1075, -27.3475], [153.1075, -27.345], [153.1025, -27.345], [153.1025, -27.3425], [153.0975, -27.3425], [153.0975, -27.34], [153.0925, -27.34], [153.0925, -27.3375], [153.09, -27.3375], [153.09, -27.335], [153.0875, -27.335], [153.085, -27.335], [153.085, -27.3325], [153.0825, -27.3325], [153.0825, -27.33], [153.085, -27.33], [153.0875, -27.33], [153.0875, -27.325], [153.085, -27.325], [153.085, -27.3225], [153.08, -27.3225], [153.0775, -27.3225], [153.0775, -27.32], [153.075, -27.32], [153.075, -27.3175], [153.0725, -27.3175], [153.0725, -27.3125], [153.07, -27.3125], [153.07, -27.31], [153.0675, -27.31], [153.0675, -27.305], [153.065, -27.305], [153.065, -27.2825], [153.0625, -27.2825], [153.06, -27.2825], [153.06, -27.28]]]}}, - {"type": "Feature", "properties": {"id": "cc12109445"}, "geometry": {"type": "Polygon", "coordinates": [[[153.0025, -27.2775], [153.0025, -27.28], [153.005, -27.28], [153.005, -27.285], [153.0075, -27.285], [153.015, -27.285], [153.015, -27.29], [153.0175, -27.29], [153.0175, -27.2925], [153.0175, -27.3025], [153.02, -27.3025], [153.02, -27.305], [153.0225, -27.305], [153.0225, -27.31], [153.0175, -27.31], [153.0175, -27.3125], [153.015, -27.3125], [153.015, -27.315], [153.015, -27.3175], [153.0175, -27.3175], [153.0175, -27.32], [153.02, -27.32], [153.02, -27.3225], [153.0275, -27.3225], [153.0275, -27.325], [153.025, -27.325], [153.025, -27.3275], [153.0225, -27.3275], [153.0225, -27.33], [153.02, -27.33], [153.02, -27.335], [153.0175, -27.335], [153.0175, -27.3475], [153.01, -27.3475], [153.01, -27.35], [153.005, -27.35], [153, -27.35], [153, -27.3525], [152.995, -27.3525], [152.995, -27.355], [152.9925, -27.355], [152.9925, -27.3625], [152.99, -27.3625], [152.99, -27.3675], [152.9875, -27.3675], [152.9875, -27.37], [152.985, -27.37], [152.985, -27.3725], [152.9825, -27.3725], [152.9825, -27.375], [152.98, -27.375], [152.98, -27.3775], [152.9675, -27.3775], [152.9675, -27.38], [152.965, -27.38], [152.9525, -27.38], [152.9525, -27.3825], [152.9475, -27.3825], [152.9475, -27.38], [152.94, -27.38], [152.94, -27.3825], [152.9375, -27.3825], [152.9375, -27.38], [152.9325, -27.38], [152.9325, -27.3825], [152.93, -27.3825], [152.925, -27.3825], [152.925, -27.385], [152.92, -27.385], [152.92, -27.3825], [152.9075, -27.3825], [152.9075, -27.38], [152.9075, -27.375], [152.9075, -27.3725], [152.915, -27.3725], [152.915, -27.37], [152.92, -27.37], [152.92, -27.3675], [152.9225, -27.3675], [152.9225, -27.365], [152.925, -27.365], [152.925, -27.3625], [152.9275, -27.3625], [152.9275, -27.36], [152.9275, -27.3575], [152.925, -27.3575], [152.925, -27.355], [152.9225, -27.355], [152.9225, -27.3525], [152.92, -27.3525], [152.92, -27.35], [152.9175, -27.35], [152.9175, -27.345], [152.92, -27.345], [152.92, -27.3325], [152.9175, -27.3325], [152.9175, -27.33], [152.915, -27.33], [152.915, -27.3275], [152.9125, -27.3275], [152.9125, -27.325], [152.9125, -27.3225], [152.9225, -27.3225], [152.9225, -27.32], [152.925, -27.32], [152.925, -27.3175], [152.9275, -27.3175], [152.9275, -27.315], [152.93, -27.315], [152.93, -27.3125], [152.9325, -27.3125], [152.9325, -27.31], [152.935, -27.31], [152.935, -27.305], [152.94, -27.305], [152.94, -27.3025], [152.9425, -27.3025], [152.9425, -27.3], [152.945, -27.3], [152.945, -27.2975], [152.95, -27.2975], [152.95, -27.295], [152.955, -27.295], [152.9575, -27.295], [152.9575, -27.2925], [152.96, -27.2925], [152.96, -27.29], [152.9625, -27.29], [152.9625, -27.2875], [152.9675, -27.2875], [152.9675, -27.285], [152.9725, -27.285], [152.9725, -27.2825], [152.9775, -27.2825], [152.9775, -27.28], [152.98, -27.28], [152.9925, -27.28], [152.9925, -27.2775], [152.9975, -27.2775], [153.0025, -27.2775]]]}} - ] -} diff --git a/tests/data/spaceprez/input/sandgate/facilities.geojson b/tests/data/spaceprez/input/sandgate/facilities.geojson deleted file mode 100755 index e11a6132..00000000 --- a/tests/data/spaceprez/input/sandgate/facilities.geojson +++ /dev/null @@ -1,16 +0,0 @@ -{ - "type": "FeatureCollection", - "bbox": [153.0144819, -27.3506599, 153.1143102, -27.2234024], - "features": [ - {"type": "Feature", "properties": {"id": "bhc"}, "geometry": {"type": "Point", "coordinates": [153.0638169, -27.2897951]}}, - {"type": "Feature", "properties": {"id": "bhca"}, "geometry": {"type": "Polygon", "coordinates": [[[153.063644,-27.2894036],[153.0635207,-27.2896229],[153.0631612,-27.2896182],[153.0631291,-27.289909],[153.0631559,-27.290338],[153.0644487,-27.2904858],[153.0645614,-27.2899185],[153.0648349,-27.2895324],[153.0648135,-27.2889174],[153.0637674,-27.2887362],[153.063644,-27.2894036]]]}}, - {"type": "Feature", "properties": {"id": "bps"}, "geometry": {"type": "Point", "coordinates": [153.0536022, -27.3497934]}}, - {"type": "Feature", "properties": {"id": "cpc"}, "geometry": {"type": "Point", "coordinates": [153.0144819, -27.3506599]}}, - {"type": "Feature", "properties": {"id": "jcabi"}, "geometry": {"type": "Point", "coordinates": [153.0632873, -27.2918652]}}, - {"type": "Feature", "properties": {"id": "rps"}, "geometry": {"type": "Point", "coordinates": [153.1143102, -27.2234024]}}, - {"type": "Feature", "properties": {"id": "sac"}, "geometry": {"type": "Point", "coordinates": [153.0688897, -27.3122011]}}, - {"type": "Feature", "properties": {"id": "sps"}, "geometry": {"type": "Point", "coordinates": [153.0677583, -27.318185]}}, - {"type": "Feature", "properties": {"id": "src"}, "geometry": {"type": "Point", "coordinates": [153.0614757, -27.3111489]}}, - {"type": "Feature", "properties": {"id": "srca"}, "geometry": {"type": "Polygon", "coordinates": [[[153.0606281,-27.3096141], [153.0604564,-27.3105197], [153.0600487,-27.3109296], [153.0607354,-27.3127218], [153.063203,-27.3121212], [153.0621623,-27.3095187], [153.0617868,-27.3098333], [153.0606281,-27.3096141]]]}} - ] -} diff --git a/tests/data/spaceprez/input/sandgate/floods.geojson b/tests/data/spaceprez/input/sandgate/floods.geojson deleted file mode 100755 index 25055918..00000000 --- a/tests/data/spaceprez/input/sandgate/floods.geojson +++ /dev/null @@ -1,10 +0,0 @@ -{ - "type": "FeatureCollection", - "bbox": [153.06307, -27.3151243, 153.069877, -27.2859541], - "features": [ - {"type": "Feature", "properties": {"id": "f001"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.064893899999987, -27.2909981 ], [ 153.0648081, -27.2911506 ], [ 153.064475499999986, -27.2912364 ], [ 153.064078599999988, -27.2912269 ], [ 153.0635636, -27.291265 ], [ 153.0633383, -27.2913604 ], [ 153.0632417, -27.2914462 ], [ 153.0631559, -27.2915701 ], [ 153.0630808, -27.2917036 ], [ 153.06307, -27.2917704 ], [ 153.0631773, -27.2918943 ], [ 153.0633168, -27.2920564 ], [ 153.0634241, -27.2921613 ], [ 153.063767399999989, -27.2921994 ], [ 153.0642824, -27.2922757 ], [ 153.064400400000011, -27.292371 ], [ 153.0644111, -27.2926761 ], [ 153.0643897, -27.2928764 ], [ 153.0643682, -27.2930766 ], [ 153.06434680000001, -27.2932196 ], [ 153.0642824, -27.2934675 ], [ 153.0642824, -27.2935628 ], [ 153.0643682, -27.2936391 ], [ 153.0647223, -27.2937345 ], [ 153.0648296, -27.293744 ], [ 153.064893899999987, -27.2909981 ] ] ] }}, - {"type": "Feature", "properties": {"id": "f023"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.06487820000001, -27.30059 ], [ 153.0648031, -27.301019 ], [ 153.0648138, -27.3012955 ], [ 153.0648889, -27.3015815 ], [ 153.0648567, -27.3016768 ], [ 153.064824499999986, -27.3018198 ], [ 153.0648138, -27.3020295 ], [ 153.064824499999986, -27.3022965 ], [ 153.0647387, -27.3024109 ], [ 153.0641808, -27.3024776 ], [ 153.063698, -27.3025634 ], [ 153.0634512, -27.3026302 ], [ 153.063419, -27.3027827 ], [ 153.063440500000013, -27.303002 ], [ 153.0634619, -27.303307 ], [ 153.063622900000013, -27.3034501 ], [ 153.0638696, -27.3034882 ], [ 153.0643095, -27.3035454 ], [ 153.0645456, -27.3036026 ], [ 153.0647923, -27.3037456 ], [ 153.0650176, -27.3039553 ], [ 153.0652, -27.3041174 ], [ 153.065318, -27.3042413 ], [ 153.0653931, -27.3045083 ], [ 153.0655112, -27.3047371 ], [ 153.065790099999987, -27.3050803 ], [ 153.0660476, -27.3052519 ], [ 153.0656935, -27.3037551 ], [ 153.0652215, -27.30243 ], [ 153.06487820000001, -27.30059 ] ] ] }}, - {"type": "Feature", "properties": {"id": "f332"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.068289099999987, -27.3113685 ], [ 153.0681389, -27.3108346 ], [ 153.0676454, -27.3103961 ], [ 153.0673021, -27.3096144 ], [ 153.0670231, -27.3088708 ], [ 153.066615399999989, -27.3088327 ], [ 153.0659932, -27.3089662 ], [ 153.0656928, -27.3091568 ], [ 153.065564, -27.3095381 ], [ 153.0658215, -27.310377 ], [ 153.0659073, -27.3107774 ], [ 153.0660361, -27.3111587 ], [ 153.0665725, -27.3113685 ], [ 153.066744199999988, -27.3115973 ], [ 153.0674094, -27.3130272 ], [ 153.0676669, -27.3135419 ], [ 153.0680102, -27.3142473 ], [ 153.0685466, -27.3151243 ], [ 153.0693191, -27.3150862 ], [ 153.0698126, -27.3147049 ], [ 153.069877, -27.3145143 ], [ 153.06970530000001, -27.3140376 ], [ 153.0694479, -27.3134085 ], [ 153.069147500000014, -27.31297 ], [ 153.0688041, -27.3124552 ], [ 153.068375, -27.3120548 ], [ 153.068074599999989, -27.3117498 ], [ 153.068289099999987, -27.3113685 ] ] ] }}, - {"type": "Feature", "properties": {"id": "f632"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.0649154, -27.2906357 ], [ 153.0650656, -27.2892818 ], [ 153.0651407, -27.288233 ], [ 153.06513, -27.287413 ], [ 153.0650656, -27.2859541 ], [ 153.0649905, -27.2861353 ], [ 153.065012, -27.2863737 ], [ 153.065001200000012, -27.2868218 ], [ 153.0649583, -27.2871079 ], [ 153.0648296, -27.2873463 ], [ 153.0646472, -27.2873939 ], [ 153.064604300000013, -27.2875274 ], [ 153.0646365, -27.2877849 ], [ 153.0646686, -27.2879183 ], [ 153.0646686, -27.2882711 ], [ 153.0646365, -27.2885762 ], [ 153.0642609, -27.2886716 ], [ 153.0640678, -27.2888623 ], [ 153.064035600000011, -27.2890816 ], [ 153.064293099999986, -27.2894248 ], [ 153.064379, -27.2897204 ], [ 153.0642288, -27.2899206 ], [ 153.064057100000014, -27.2899969 ], [ 153.0639605, -27.2902353 ], [ 153.0639927, -27.2904069 ], [ 153.064110699999986, -27.2905309 ], [ 153.0642824, -27.2906644 ], [ 153.064497, -27.2907216 ], [ 153.064657899999986, -27.2907406 ], [ 153.064818800000012, -27.2907406 ], [ 153.0649154, -27.2906357 ] ] ] }} - ] -} diff --git a/tests/data/spaceprez/input/sandgate/roads.geojson b/tests/data/spaceprez/input/sandgate/roads.geojson deleted file mode 100755 index 6fc5775d..00000000 --- a/tests/data/spaceprez/input/sandgate/roads.geojson +++ /dev/null @@ -1,8 +0,0 @@ -{ - "type": "FeatureCollection", - "bbox": [153.0617934, -27.3203138, 153.0747569, -27.2920918], - "features": [ - {"type": "Feature", "properties": {"id": "bt"}, "geometry": {"type": "LineString", "coordinates": [ [ 153.06513, -27.3143431 ], [ 153.065881100000013, -27.3140285 ], [ 153.0653983, -27.3130466 ], [ 153.0652052, -27.3122745 ], [ 153.0651193, -27.3116453 ], [ 153.064550700000012, -27.3103202 ], [ 153.0641108, -27.3092526 ], [ 153.0637889, -27.3074031 ], [ 153.0631774, -27.3057253 ], [ 153.0628448, -27.3044573 ], [ 153.0627053, -27.3036565 ], [ 153.061847, -27.2988706 ], [ 153.0617934, -27.2952 ], [ 153.062168899999989, -27.2933312 ], [ 153.0622333, -27.2920918 ] ] }}, - {"type": "Feature", "properties": {"id": "fp"}, "geometry": {"type": "LineString", "coordinates": [ [ 153.074756900000011, -27.3203138 ], [ 153.0727077, -27.3183121 ], [ 153.0715276, -27.3170824 ], [ 153.070519, -27.3157669 ], [ 153.0694891, -27.3143847 ], [ 153.067751, -27.311115 ], [ 153.0664635, -27.3072446 ], [ 153.0656267, -27.3047468 ], [ 153.065111699999989, -27.3031262 ], [ 153.0647898, -27.301677 ], [ 153.064510899999988, -27.3000372 ], [ 153.0644036, -27.2984546 ], [ 153.0643392, -27.2973296 ], [ 153.06459670000001, -27.2953656 ], [ 153.0646396, -27.2936494 ], [ 153.0644465, -27.2922764 ] ] }} - ] -} diff --git a/tests/data/spaceprez/input/sandgate/sandgate.json b/tests/data/spaceprez/input/sandgate/sandgate.json deleted file mode 100755 index 0c1c84ae..00000000 --- a/tests/data/spaceprez/input/sandgate/sandgate.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "type": "Dataset", - "bbox": [-10.0, -10.0, 10.0, 10.0], - "properties": {"id": "sandgate"}, - "featureCollections": [ - { - "type": "FeatureCollection", - "bbox": [152.9075, -27.42, 153.16, -27.2775], - "properties": {"id": "catchments"}, - "features": [ - {"type": "Feature", "properties": {"id": "cc12109444"}, "geometry": {"type": "Polygon", "coordinates": [[[153.06, -27.28], [153.06, -27.2825], [153.0625, -27.2825], [153.065, -27.2825], [153.065, -27.305], [153.0675, -27.305], [153.0675, -27.31], [153.07, -27.31], [153.07, -27.3125], [153.0725, -27.3125], [153.0725, -27.3175], [153.075, -27.3175], [153.075, -27.32], [153.0775, -27.32], [153.0775, -27.3225], [153.08, -27.3225], [153.085, -27.3225], [153.085, -27.325], [153.0875, -27.325], [153.0875, -27.33], [153.085, -27.33], [153.0825, -27.33], [153.0825, -27.3325], [153.085, -27.3325], [153.085, -27.335], [153.0875, -27.335], [153.09, -27.335], [153.09, -27.3375], [153.0925, -27.3375], [153.0925, -27.34], [153.0975, -27.34], [153.0975, -27.3425], [153.1025, -27.3425], [153.1025, -27.345], [153.1075, -27.345], [153.1075, -27.3475], [153.11, -27.3475], [153.1125, -27.3475], [153.115, -27.3475], [153.115, -27.35], [153.12, -27.35], [153.12, -27.3525], [153.125, -27.3525], [153.125, -27.355], [153.13, -27.355], [153.13, -27.3575], [153.135, -27.3575], [153.135, -27.36], [153.1375, -27.36], [153.1425, -27.36], [153.1475, -27.36], [153.1525, -27.36], [153.1525, -27.3625], [153.155, -27.3625], [153.155, -27.365], [153.1575, -27.365], [153.1575, -27.375], [153.16, -27.375], [153.16, -27.3775], [153.16, -27.38], [153.1575, -27.38], [153.155, -27.38], [153.155, -27.3825], [153.1525, -27.3825], [153.1525, -27.385], [153.15, -27.385], [153.15, -27.3875], [153.145, -27.3875], [153.145, -27.39], [153.1425, -27.39], [153.1425, -27.3925], [153.14, -27.3925], [153.14, -27.395], [153.14, -27.3975], [153.14, -27.4], [153.1375, -27.4], [153.1375, -27.4025], [153.135, -27.4025], [153.135, -27.405], [153.135, -27.4075], [153.135, -27.4125], [153.135, -27.415], [153.13, -27.415], [153.13, -27.4175], [153.1275, -27.4175], [153.1225, -27.4175], [153.1225, -27.42], [153.1175, -27.42], [153.1125, -27.42], [153.1125, -27.4175], [153.11, -27.4175], [153.11, -27.415], [153.1075, -27.415], [153.1075, -27.4125], [153.0975, -27.4125], [153.0975, -27.415], [153.0925, -27.415], [153.0875, -27.415], [153.085, -27.415], [153.08, -27.415], [153.08, -27.4125], [153.0775, -27.4125], [153.0775, -27.41], [153.075, -27.41], [153.075, -27.405], [153.07, -27.405], [153.07, -27.4025], [153.0675, -27.4025], [153.0675, -27.4], [153.065, -27.4], [153.065, -27.3975], [153.0625, -27.3975], [153.0625, -27.395], [153.06, -27.395], [153.06, -27.3925], [153.0275, -27.3925], [153.0275, -27.395], [153.025, -27.395], [153.025, -27.3975], [153.0175, -27.3975], [153.0175, -27.4], [153.0125, -27.4], [153.0125, -27.4025], [153.005, -27.4025], [153.005, -27.405], [153.0025, -27.405], [152.9975, -27.405], [152.9975, -27.4025], [152.9925, -27.4025], [152.9925, -27.4], [152.9875, -27.4], [152.9825, -27.4], [152.9825, -27.3975], [152.98, -27.3975], [152.98, -27.3925], [152.975, -27.3925], [152.975, -27.3875], [152.97, -27.3875], [152.96, -27.3875], [152.96, -27.39], [152.955, -27.39], [152.955, -27.3925], [152.945, -27.3925], [152.94, -27.3925], [152.9375, -27.3925], [152.9375, -27.39], [152.925, -27.39], [152.925, -27.385], [152.925, -27.3825], [152.93, -27.3825], [152.9325, -27.3825], [152.9325, -27.38], [152.9375, -27.38], [152.9375, -27.3825], [152.94, -27.3825], [152.94, -27.38], [152.9475, -27.38], [152.9475, -27.3825], [152.9525, -27.3825], [152.9525, -27.38], [152.965, -27.38], [152.9675, -27.38], [152.9675, -27.3775], [152.98, -27.3775], [152.98, -27.375], [152.9825, -27.375], [152.9825, -27.3725], [152.985, -27.3725], [152.985, -27.37], [152.9875, -27.37], [152.9875, -27.3675], [152.99, -27.3675], [152.99, -27.3625], [152.9925, -27.3625], [152.9925, -27.355], [152.995, -27.355], [152.995, -27.3525], [153, -27.3525], [153, -27.35], [153.005, -27.35], [153.01, -27.35], [153.01, -27.3475], [153.0175, -27.3475], [153.0175, -27.335], [153.02, -27.335], [153.02, -27.33], [153.0225, -27.33], [153.0225, -27.3275], [153.025, -27.3275], [153.025, -27.325], [153.0275, -27.325], [153.0275, -27.3225], [153.03, -27.3225], [153.03, -27.32], [153.0325, -27.32], [153.0325, -27.3175], [153.035, -27.3175], [153.035, -27.305], [153.0375, -27.305], [153.0375, -27.3], [153.04, -27.3], [153.04, -27.2975], [153.0425, -27.2975], [153.0425, -27.2825], [153.04, -27.2825], [153.04, -27.28], [153.0425, -27.28], [153.05, -27.28], [153.06, -27.28]]]}}, - {"type": "Feature", "properties": {"id": "cc12109445"}, "geometry": {"type": "Polygon", "coordinates": [[[153.0025, -27.2775], [153.0025, -27.28], [153.005, -27.28], [153.005, -27.285], [153.0075, -27.285], [153.015, -27.285], [153.015, -27.29], [153.0175, -27.29], [153.0175, -27.2925], [153.0175, -27.3025], [153.02, -27.3025], [153.02, -27.305], [153.0225, -27.305], [153.0225, -27.31], [153.0175, -27.31], [153.0175, -27.3125], [153.015, -27.3125], [153.015, -27.315], [153.015, -27.3175], [153.0175, -27.3175], [153.0175, -27.32], [153.02, -27.32], [153.02, -27.3225], [153.0275, -27.3225], [153.0275, -27.325], [153.025, -27.325], [153.025, -27.3275], [153.0225, -27.3275], [153.0225, -27.33], [153.02, -27.33], [153.02, -27.335], [153.0175, -27.335], [153.0175, -27.3475], [153.01, -27.3475], [153.01, -27.35], [153.005, -27.35], [153, -27.35], [153, -27.3525], [152.995, -27.3525], [152.995, -27.355], [152.9925, -27.355], [152.9925, -27.3625], [152.99, -27.3625], [152.99, -27.3675], [152.9875, -27.3675], [152.9875, -27.37], [152.985, -27.37], [152.985, -27.3725], [152.9825, -27.3725], [152.9825, -27.375], [152.98, -27.375], [152.98, -27.3775], [152.9675, -27.3775], [152.9675, -27.38], [152.965, -27.38], [152.9525, -27.38], [152.9525, -27.3825], [152.9475, -27.3825], [152.9475, -27.38], [152.94, -27.38], [152.94, -27.3825], [152.9375, -27.3825], [152.9375, -27.38], [152.9325, -27.38], [152.9325, -27.3825], [152.93, -27.3825], [152.925, -27.3825], [152.925, -27.385], [152.92, -27.385], [152.92, -27.3825], [152.9075, -27.3825], [152.9075, -27.38], [152.9075, -27.375], [152.9075, -27.3725], [152.915, -27.3725], [152.915, -27.37], [152.92, -27.37], [152.92, -27.3675], [152.9225, -27.3675], [152.9225, -27.365], [152.925, -27.365], [152.925, -27.3625], [152.9275, -27.3625], [152.9275, -27.36], [152.9275, -27.3575], [152.925, -27.3575], [152.925, -27.355], [152.9225, -27.355], [152.9225, -27.3525], [152.92, -27.3525], [152.92, -27.35], [152.9175, -27.35], [152.9175, -27.345], [152.92, -27.345], [152.92, -27.3325], [152.9175, -27.3325], [152.9175, -27.33], [152.915, -27.33], [152.915, -27.3275], [152.9125, -27.3275], [152.9125, -27.325], [152.9125, -27.3225], [152.9225, -27.3225], [152.9225, -27.32], [152.925, -27.32], [152.925, -27.3175], [152.9275, -27.3175], [152.9275, -27.315], [152.93, -27.315], [152.93, -27.3125], [152.9325, -27.3125], [152.9325, -27.31], [152.935, -27.31], [152.935, -27.305], [152.94, -27.305], [152.94, -27.3025], [152.9425, -27.3025], [152.9425, -27.3], [152.945, -27.3], [152.945, -27.2975], [152.95, -27.2975], [152.95, -27.295], [152.955, -27.295], [152.9575, -27.295], [152.9575, -27.2925], [152.96, -27.2925], [152.96, -27.29], [152.9625, -27.29], [152.9625, -27.2875], [152.9675, -27.2875], [152.9675, -27.285], [152.9725, -27.285], [152.9725, -27.2825], [152.9775, -27.2825], [152.9775, -27.28], [152.98, -27.28], [152.9925, -27.28], [152.9925, -27.2775], [152.9975, -27.2775], [153.0025, -27.2775]]]}} - ] - }, - { - "type": "FeatureCollection", - "bbox": [153.0144819, -27.3506599, 153.1143102, -27.2234024], - "properties": {"id": "facilities"}, - "features": [ - {"type": "Feature", "properties": {"id": "bhc"}, "geometry": {"type": "Point", "coordinates": [153.0638169, -27.2897951]}}, - {"type": "Feature", "properties": {"id": "bhca"}, "geometry": {"type": "Polygon", "coordinates": [[[153.063644,-27.2894036],[153.0635207,-27.2896229],[153.0631612,-27.2896182],[153.0631291,-27.289909],[153.0631559,-27.290338],[153.0644487,-27.2904858],[153.0645614,-27.2899185],[153.0648349,-27.2895324],[153.0648135,-27.2889174],[153.0637674,-27.2887362],[153.063644,-27.2894036]]]}}, - {"type": "Feature", "properties": {"id": "bps"}, "geometry": {"type": "Point", "coordinates": [153.0536022, -27.3497934]}}, - {"type": "Feature", "properties": {"id": "cpc"}, "geometry": {"type": "Point", "coordinates": [153.0144819, -27.3506599]}}, - {"type": "Feature", "properties": {"id": "jcabi"}, "geometry": {"type": "Point", "coordinates": [153.0632873, -27.2918652]}}, - {"type": "Feature", "properties": {"id": "rps"}, "geometry": {"type": "Point", "coordinates": [153.1143102, -27.2234024]}}, - {"type": "Feature", "properties": {"id": "sac"}, "geometry": {"type": "Point", "coordinates": [153.0688897, -27.3122011]}}, - {"type": "Feature", "properties": {"id": "sps"}, "geometry": {"type": "Point", "coordinates": [153.0677583, -27.318185]}}, - {"type": "Feature", "properties": {"id": "src"}, "geometry": {"type": "Point", "coordinates": [153.0614757, -27.3111489]}}, - {"type": "Feature", "properties": {"id": "srca"}, "geometry": {"type": "Polygon", "coordinates": [[[153.0606281,-27.3096141], [153.0604564,-27.3105197], [153.0600487,-27.3109296], [153.0607354,-27.3127218], [153.063203,-27.3121212], [153.0621623,-27.3095187], [153.0617868,-27.3098333], [153.0606281,-27.3096141]]]}} - ] - }, - { - "type": "FeatureCollection", - "properties": {"id": "floods"}, - "bbox": [153.06307, -27.3151243, 153.069877, -27.2859541], - "features": [ - {"type": "Feature", "properties": {"id": "f001"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.064893899999987, -27.2909981 ], [ 153.0648081, -27.2911506 ], [ 153.064475499999986, -27.2912364 ], [ 153.064078599999988, -27.2912269 ], [ 153.0635636, -27.291265 ], [ 153.0633383, -27.2913604 ], [ 153.0632417, -27.2914462 ], [ 153.0631559, -27.2915701 ], [ 153.0630808, -27.2917036 ], [ 153.06307, -27.2917704 ], [ 153.0631773, -27.2918943 ], [ 153.0633168, -27.2920564 ], [ 153.0634241, -27.2921613 ], [ 153.063767399999989, -27.2921994 ], [ 153.0642824, -27.2922757 ], [ 153.064400400000011, -27.292371 ], [ 153.0644111, -27.2926761 ], [ 153.0643897, -27.2928764 ], [ 153.0643682, -27.2930766 ], [ 153.06434680000001, -27.2932196 ], [ 153.0642824, -27.2934675 ], [ 153.0642824, -27.2935628 ], [ 153.0643682, -27.2936391 ], [ 153.0647223, -27.2937345 ], [ 153.0648296, -27.293744 ], [ 153.064893899999987, -27.2909981 ] ] ] }}, - {"type": "Feature", "properties": {"id": "f023"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.06487820000001, -27.30059 ], [ 153.0648031, -27.301019 ], [ 153.0648138, -27.3012955 ], [ 153.0648889, -27.3015815 ], [ 153.0648567, -27.3016768 ], [ 153.064824499999986, -27.3018198 ], [ 153.0648138, -27.3020295 ], [ 153.064824499999986, -27.3022965 ], [ 153.0647387, -27.3024109 ], [ 153.0641808, -27.3024776 ], [ 153.063698, -27.3025634 ], [ 153.0634512, -27.3026302 ], [ 153.063419, -27.3027827 ], [ 153.063440500000013, -27.303002 ], [ 153.0634619, -27.303307 ], [ 153.063622900000013, -27.3034501 ], [ 153.0638696, -27.3034882 ], [ 153.0643095, -27.3035454 ], [ 153.0645456, -27.3036026 ], [ 153.0647923, -27.3037456 ], [ 153.0650176, -27.3039553 ], [ 153.0652, -27.3041174 ], [ 153.065318, -27.3042413 ], [ 153.0653931, -27.3045083 ], [ 153.0655112, -27.3047371 ], [ 153.065790099999987, -27.3050803 ], [ 153.0660476, -27.3052519 ], [ 153.0656935, -27.3037551 ], [ 153.0652215, -27.30243 ], [ 153.06487820000001, -27.30059 ] ] ] }}, - {"type": "Feature", "properties": {"id": "f332"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.068289099999987, -27.3113685 ], [ 153.0681389, -27.3108346 ], [ 153.0676454, -27.3103961 ], [ 153.0673021, -27.3096144 ], [ 153.0670231, -27.3088708 ], [ 153.066615399999989, -27.3088327 ], [ 153.0659932, -27.3089662 ], [ 153.0656928, -27.3091568 ], [ 153.065564, -27.3095381 ], [ 153.0658215, -27.310377 ], [ 153.0659073, -27.3107774 ], [ 153.0660361, -27.3111587 ], [ 153.0665725, -27.3113685 ], [ 153.066744199999988, -27.3115973 ], [ 153.0674094, -27.3130272 ], [ 153.0676669, -27.3135419 ], [ 153.0680102, -27.3142473 ], [ 153.0685466, -27.3151243 ], [ 153.0693191, -27.3150862 ], [ 153.0698126, -27.3147049 ], [ 153.069877, -27.3145143 ], [ 153.06970530000001, -27.3140376 ], [ 153.0694479, -27.3134085 ], [ 153.069147500000014, -27.31297 ], [ 153.0688041, -27.3124552 ], [ 153.068375, -27.3120548 ], [ 153.068074599999989, -27.3117498 ], [ 153.068289099999987, -27.3113685 ] ] ] }}, - {"type": "Feature", "properties": {"id": "f632"}, "geometry": {"type": "Polygon", "coordinates": [ [ [ 153.0649154, -27.2906357 ], [ 153.0650656, -27.2892818 ], [ 153.0651407, -27.288233 ], [ 153.06513, -27.287413 ], [ 153.0650656, -27.2859541 ], [ 153.0649905, -27.2861353 ], [ 153.065012, -27.2863737 ], [ 153.065001200000012, -27.2868218 ], [ 153.0649583, -27.2871079 ], [ 153.0648296, -27.2873463 ], [ 153.0646472, -27.2873939 ], [ 153.064604300000013, -27.2875274 ], [ 153.0646365, -27.2877849 ], [ 153.0646686, -27.2879183 ], [ 153.0646686, -27.2882711 ], [ 153.0646365, -27.2885762 ], [ 153.0642609, -27.2886716 ], [ 153.0640678, -27.2888623 ], [ 153.064035600000011, -27.2890816 ], [ 153.064293099999986, -27.2894248 ], [ 153.064379, -27.2897204 ], [ 153.0642288, -27.2899206 ], [ 153.064057100000014, -27.2899969 ], [ 153.0639605, -27.2902353 ], [ 153.0639927, -27.2904069 ], [ 153.064110699999986, -27.2905309 ], [ 153.0642824, -27.2906644 ], [ 153.064497, -27.2907216 ], [ 153.064657899999986, -27.2907406 ], [ 153.064818800000012, -27.2907406 ], [ 153.0649154, -27.2906357 ] ] ] }} - ] - }, - { - "type": "FeatureCollection", - "properties": {"id": "roads"}, - "bbox": [153.0617934, -27.3203138, 153.0747569, -27.2920918], - "features": [ - {"type": "Feature", "properties": {"id": "bt"}, "geometry": {"type": "LineString", "coordinates": [ [ 153.06513, -27.3143431 ], [ 153.065881100000013, -27.3140285 ], [ 153.0653983, -27.3130466 ], [ 153.0652052, -27.3122745 ], [ 153.0651193, -27.3116453 ], [ 153.064550700000012, -27.3103202 ], [ 153.0641108, -27.3092526 ], [ 153.0637889, -27.3074031 ], [ 153.0631774, -27.3057253 ], [ 153.0628448, -27.3044573 ], [ 153.0627053, -27.3036565 ], [ 153.061847, -27.2988706 ], [ 153.0617934, -27.2952 ], [ 153.062168899999989, -27.2933312 ], [ 153.0622333, -27.2920918 ] ] }}, - {"type": "Feature", "properties": {"id": "fp"}, "geometry": {"type": "LineString", "coordinates": [ [ 153.074756900000011, -27.3203138 ], [ 153.0727077, -27.3183121 ], [ 153.0715276, -27.3170824 ], [ 153.070519, -27.3157669 ], [ 153.0694891, -27.3143847 ], [ 153.067751, -27.311115 ], [ 153.0664635, -27.3072446 ], [ 153.0656267, -27.3047468 ], [ 153.065111699999989, -27.3031262 ], [ 153.0647898, -27.301677 ], [ 153.064510899999988, -27.3000372 ], [ 153.0644036, -27.2984546 ], [ 153.0643392, -27.2973296 ], [ 153.06459670000001, -27.2953656 ], [ 153.0646396, -27.2936494 ], [ 153.0644465, -27.2922764 ] ] }} - ] - } - ] -} diff --git a/tests/data/vocprez/expected_responses/beddingsurfacestructure_top_concepts.ttl b/tests/data/vocprez/expected_responses/beddingsurfacestructure_top_concepts.ttl deleted file mode 100755 index 5e536486..00000000 --- a/tests/data/vocprez/expected_responses/beddingsurfacestructure_top_concepts.ttl +++ /dev/null @@ -1,186 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - - a skos:ConceptScheme ; - dcterms:identifier "rf:BeddingSurfaceStructure"^^prez:identifier ; - ns1:status ; - skos:hasTopConcept , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - ; - skos:prefLabel "BeddingSurfaceStructure"@en ; - prez:childrenCount 21 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - - a skos:Concept ; - rdfs:label "Shrinkage (Desiccation) Cracks"@en ; - ns1:status ; - skos:prefLabel "Shrinkage (Desiccation) Cracks"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:CRACKDES" . - - a skos:Concept ; - rdfs:label "Synaeresis Cracks"@en ; - ns1:status ; - skos:prefLabel "Synaeresis Cracks"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:CRACKSYN" . - - a skos:Concept ; - rdfs:label "Parting Lineation (Primary Current Lineation)"@en ; - ns1:status ; - skos:prefLabel "Parting Lineation (Primary Current Lineation)"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:PART" . - - a skos:Concept ; - rdfs:label "Rainspots"@en ; - ns1:status ; - skos:prefLabel "Rainspots"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RAIN" . - - a skos:Concept ; - rdfs:label "Ripples"@en ; - ns1:status ; - skos:prefLabel "Ripples"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIP" . - - a skos:Concept ; - rdfs:label "Current Ripples"@en ; - ns1:status ; - skos:prefLabel "Current Ripples"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIPCUR" . - - a skos:Concept ; - rdfs:label "Linguoid Current Ripples"@en ; - ns1:status ; - skos:prefLabel "Linguoid Current Ripples"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIPCURLIN" . - - a skos:Concept ; - rdfs:label "Sinuous-Crested Current Rippled"@en ; - ns1:status ; - skos:prefLabel "Sinuous-Crested Current Rippled"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIPCURSIN" . - - a skos:Concept ; - rdfs:label "Straight-Crested Current Ripples"@en ; - ns1:status ; - skos:prefLabel "Straight-Crested Current Ripples"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIPCURSTR" . - - a skos:Concept ; - rdfs:label "Wave-Formed Ripples"@en ; - ns1:status ; - skos:prefLabel "Wave-Formed Ripples"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIPWAV" . - - a skos:Concept ; - rdfs:label "Interference Wave-Formed Ripples"@en ; - ns1:status ; - skos:prefLabel "Interference Wave-Formed Ripples"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIPWAVINT" . - - a skos:Concept ; - rdfs:label "Modified Wave-Formed Ripples"@en ; - ns1:status ; - skos:prefLabel "Modified Wave-Formed Ripples"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:RIPWAVMOD" . - - a skos:Concept ; - rdfs:label "Trace Fossils"@en ; - ns1:status ; - skos:prefLabel "Trace Fossils"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TF" . - - a skos:Concept ; - rdfs:label "Crawling / Walking Tracks and Trails"@en ; - ns1:status ; - skos:prefLabel "Crawling / Walking Tracks and Trails"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TFCR" . - - a skos:Concept ; - rdfs:label "Foot Prints"@en ; - ns1:status ; - skos:prefLabel "Foot Prints"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TFCRFOOT" . - - a skos:Concept ; - rdfs:label "Grazing Traces"@en ; - ns1:status ; - skos:prefLabel "Grazing Traces"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TFGRZ" . - - a skos:Concept ; - rdfs:label "Coiled Grazing Traces"@en ; - ns1:status ; - skos:prefLabel "Coiled Grazing Traces"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TFGRZCOIL" . - - a skos:Concept ; - rdfs:label "Meandering Grazing Traces"@en ; - ns1:status ; - skos:prefLabel "Meandering Grazing Traces"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TFGRZMEAND" . - - a skos:Concept ; - rdfs:label "Radial Grazing Traces"@en ; - ns1:status ; - skos:prefLabel "Radial Grazing Traces"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TFGRZRAD" . - - a skos:Concept ; - rdfs:label "Resting Traces"@en ; - ns1:status ; - skos:prefLabel "Resting Traces"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure/bddngsrfcstrctr:TFREST" . - - skos:prefLabel "experimental"@en ; - schema:color "#eae72c" . - - skos:prefLabel "valid"@en ; - schema:color "#36a80d" . diff --git a/tests/data/vocprez/expected_responses/collection_listing_anot.ttl b/tests/data/vocprez/expected_responses/collection_listing_anot.ttl deleted file mode 100755 index a9efcf9d..00000000 --- a/tests/data/vocprez/expected_responses/collection_listing_anot.ttl +++ /dev/null @@ -1,51 +0,0 @@ -@prefix dcterms: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - - a skos:Collection ; - dcterms:identifier "brhl-prps:pggd"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Borehole purposes applicable to regulatory notification forms."@en ; - skos:prefLabel "PGGD selection"@en ; - prez:link "/v/collection/brhl-prps:pggd" . - - a skos:Collection ; - dcterms:identifier "dpth-rfrnc:absolute"^^prez:identifier ; - dcterms:provenance "Defined here" ; - skos:definition "A fixed plane or point that describes an absolute reference for depth observations."@en ; - skos:prefLabel "Absolute"@en ; - prez:link "/v/collection/dpth-rfrnc:absolute" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - - a skos:Collection ; - dcterms:identifier "cgi:contacttype"^^prez:identifier ; - dcterms:provenance "this vocabulary" ; - skos:definition "All Concepts in this vocabulary" ; - skos:prefLabel "Contact Type - All Concepts"@en ; - prez:link "/v/collection/cgi:contacttype" . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - -skos:Collection rdfs:label "Collection"@en ; - skos:definition "A meaningful collection of concepts."@en ; - prez:count 3 . - diff --git a/tests/data/vocprez/expected_responses/collection_listing_item.ttl b/tests/data/vocprez/expected_responses/collection_listing_item.ttl deleted file mode 100755 index 92907fac..00000000 --- a/tests/data/vocprez/expected_responses/collection_listing_item.ttl +++ /dev/null @@ -1,375 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix skos: . -@prefix xsd: . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - - a skos:Collection ; - dcterms:identifier "contacttype"^^xsd:token, - "cgi:contacttype"^^prez:identifier ; - dcterms:provenance "this vocabulary" ; - skos:definition "All Concepts in this vocabulary" ; - skos:member , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - ; - skos:prefLabel "Contact Type - All Concepts"@en ; - prez:link "/v/collection/cgi:contacttype" . - - dcterms:identifier "2016.01:contacttype"^^prez:identifier ; - dcterms:provenance "Original set of terms from the GeosciML standard" ; - skos:definition "This scheme describes the concept space for Contact Type concepts, as defined by the IUGS Commission for Geoscience Information (CGI) Geoscience Terminology Working Group. By extension, it includes all concepts in this conceptScheme, as well as concepts in any previous versions of the scheme. Designed for use in the contactType property in GeoSciML Contact elements."@en ; - skos:prefLabel "Contact Type"@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:member rdfs:label "has member"@en ; - skos:definition "Relates a collection to one of its members."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - dcterms:identifier "preztest:dataset"^^prez:identifier . - - dcterms:identifier "preztest:feature-collection"^^prez:identifier . - - dcterms:identifier "cntcttyp:alteration_facies_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A metasomatic facies contact separating rocks that have undergone alteration of a particular facies from those that have undergone metasomatism of another facies. Alteration is a kind of metasomatism that does not introduce economically important minerals."@en ; - skos:prefLabel "alteration facies contact"@en ; - prez:link "/s/datasets/preztest:dataset/collections/preztest:feature-collection/items/cntcttyp:alteration_facies_contact", - "/v/collection/cgi:contacttype/cntcttyp:alteration_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:alteration_facies_contact" . - - dcterms:identifier "cntcttyp:angular_unconformable_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "An unconformable contact between two geological units in which the older, underlying rocks dip at an angle different from the younger, overlying strata, usually in which younger sediments rest upon the eroded surface of tilted or folded older rocks."@en ; - skos:prefLabel "angular unconformable contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:angular_unconformable_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:angular_unconformable_contact" . - - dcterms:identifier "cntcttyp:buttress_unconformity"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "An unconformity in which onlapping strata are truncated against a steep topographic scarp."@en ; - skos:prefLabel "buttress unconformity"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:buttress_unconformity", - "/v/vocab/2016.01:contacttype/cntcttyp:buttress_unconformity" . - - dcterms:identifier "cntcttyp:chronostratigraphic_zone_contact"^^prez:identifier ; - dcterms:provenance "FGDC"@en ; - ns1:status ; - skos:definition "A contact between bodies of material having different ages of origin."@en ; - skos:prefLabel "chronostratigraphic-zone contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:chronostratigraphic_zone_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:chronostratigraphic_zone_contact" . - - dcterms:identifier "cntcttyp:conductivity_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A geophysical contact between bodies of material distinguished based on electrical conductivity characteristics"@en ; - skos:prefLabel "conductivity contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:conductivity_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:conductivity_contact" . - - dcterms:identifier "cntcttyp:conformable_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "A contact separating two geological units in which the layers are formed one above the other in order by regular, uninterrupted deposition under the same general conditions."@en ; - skos:prefLabel "conformable contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:conformable_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:conformable_contact" . - - dcterms:identifier "cntcttyp:contact"^^prez:identifier ; - dcterms:provenance "adapted from Jackson, 1997, page 137, NADM C1 2004"@en ; - ns1:status ; - skos:definition "A surface that separates geologic units. Very general concept representing any kind of surface separating two geologic units, including primary boundaries such as depositional contacts, all kinds of unconformities, intrusive contacts, and gradational contacts, as well as faults that separate geologic units."@en ; - skos:prefLabel "contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:contact", - "/v/vocab/2016.01:contacttype/cntcttyp:contact" . - - dcterms:identifier "cntcttyp:deformation_zone_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A lithogenetic bundary separating rock masses that have different deformation structure, e.g. sheared rock against non sheared rock, brecciated rock against non-brecciated rock."@en ; - skos:prefLabel "deformation zone contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:deformation_zone_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:deformation_zone_contact" . - - dcterms:identifier "cntcttyp:density_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A geophysical contact separating bodies of material with different density characteristics, generally determined through measurement and modelling of gravity variations."@en ; - skos:prefLabel "density contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:density_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:density_contact" . - - dcterms:identifier "cntcttyp:depositional_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "Lithogenetic contact at which a sedimentary or volcanic rock has been deposited on (or against) another rock body. The relationship between the older underlying rocks and younger overlying rocks is unknown or not specfied."@en ; - skos:prefLabel "depositional contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:depositional_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:depositional_contact" . - - dcterms:identifier "cntcttyp:disconformable_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "An unconformable contact between two geological units in which the bedding of the older, underlying unit is parallel to the bedding of the younger, overlying unit, but in which the contact between the two units is marked by an irregular or uneven surface of appreciable relief."@en ; - skos:prefLabel "disconformable contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:disconformable_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:disconformable_contact" . - - dcterms:identifier "cntcttyp:faulted_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "A contact separating two bodies of material across which one body has slid past the other."@en ; - skos:prefLabel "faulted contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:faulted_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:faulted_contact" . - - dcterms:identifier "cntcttyp:geologic_province_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "A contact between regions characterised by their geological history or by similar structural, petrographic or stratigraphic features"@en ; - skos:prefLabel "geologic province contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:geologic_province_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:geologic_province_contact" . - - dcterms:identifier "cntcttyp:geophysical_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A contact separating bodies of material in the earth that have different geophysical properties. Use for boundaries that are detected by geophysical sensor techniques as opposed to direct lithologic observation."@en ; - skos:prefLabel "geophysical contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:geophysical_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:geophysical_contact" . - - dcterms:identifier "cntcttyp:glacial_stationary_line"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A boundary between a subglacial geomorphic unit and a periglacial geomorphic unit, marking the maximum extent of glacial cover. This can be thought of as the outcrop of the contact between a glacier and its substrate at some time at each point along the boundary. This contact type is included as an interim concept, assuming that in the future, there will be extensions to account better for geomorphic units and line types."@en ; - skos:prefLabel "glacial stationary line"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:glacial_stationary_line", - "/v/vocab/2016.01:contacttype/cntcttyp:glacial_stationary_line" . - - dcterms:identifier "cntcttyp:igneous_intrusive_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "An intrusive contact between a younger igneous rock and an older, pre-existing geological unit into which it has been intruded."@en ; - skos:prefLabel "igneous intrusive contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:igneous_intrusive_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:igneous_intrusive_contact" . - - dcterms:identifier "cntcttyp:igneous_phase_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A lithogenetic contact separating lithologically distinct phases of a single intrusive body. Does not denote nature of contact (intrusive or gradation)."@en ; - skos:prefLabel "igneous phase contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:igneous_phase_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:igneous_phase_contact" . - - dcterms:identifier "cntcttyp:impact_structure_boundary"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "surface that bounds a body of rock affected by an extraterrestrial impact event"@en ; - skos:prefLabel "impact structure boundary"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:impact_structure_boundary", - "/v/vocab/2016.01:contacttype/cntcttyp:impact_structure_boundary" . - - dcterms:identifier "cntcttyp:lithogenetic_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A non-faulted contact separating bodies of material in the earth that have different lithologic character or geologic history."@en ; - skos:prefLabel "lithogenetic contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:lithogenetic_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:lithogenetic_contact" . - - dcterms:identifier "cntcttyp:magnetic_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A geophysical contact separating bodies of material distinguished based on properties related to magnetic fields."@en ; - skos:prefLabel "magnetic contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:magnetic_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:magnetic_contact" . - - dcterms:identifier "cntcttyp:magnetic_polarity_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A magentic contact between bodies of material with different polarity of remnant magnetization, e.g. between sections of ocean floor with different polarity."@en ; - skos:prefLabel "magnetic polarity contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:magnetic_polarity_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:magnetic_polarity_contact" . - - dcterms:identifier "cntcttyp:magnetic_susceptiblity_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A magnetic contact between bodies of material distinguished based on magnetic susceptibility characteristics."@en ; - skos:prefLabel "magnetic susceptiblity contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:magnetic_susceptiblity_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:magnetic_susceptiblity_contact" . - - dcterms:identifier "cntcttyp:magnetization_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A magnetic contact between bodies of material distinguished based on any aspect of magnetization of material in the units."@en ; - skos:prefLabel "magnetization contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:magnetization_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:magnetization_contact" . - - dcterms:identifier "cntcttyp:metamorphic_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "Lithogenetic contact separating rocks that have different lithologic properties related to metamorphism, metasomatism, alteration, or mineralization. Generally separates metamorphic rock bodies, but may separate metamorphosed (broadly speaking) and non-metamorphosed rock."@en ; - skos:prefLabel "metamorphic contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:metamorphic_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:metamorphic_contact" . - - dcterms:identifier "cntcttyp:metamorphic_facies_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A metamorphic contact separating rocks that have undergone metamorphism of a particular facies from those that have undergone metamorphism of another facies."@en ; - skos:prefLabel "metamorphic facies contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:metamorphic_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:metamorphic_facies_contact" . - - dcterms:identifier "cntcttyp:metasomatic_facies_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A metamorphic contact separating rocks that have undergone metasomatism of a particular facies from those that have undergone metasomatism of another facies. Metasomatism is distinguished from metamorphism by significant changes in bulk chemistry of the affected rock."@en ; - skos:prefLabel "metasomatic facies contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:metasomatic_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:metasomatic_facies_contact" . - - dcterms:identifier "cntcttyp:mineralisation_assemblage_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A metasomatic facies contact separating rocks which have been mineralised and contain a particular mineral assemblage from those which contain a different assemblage. Mineralization is a kind of metasomatism that introduces ecomomically important minerals."@en ; - skos:prefLabel "mineralisation assemblage contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:mineralisation_assemblage_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:mineralisation_assemblage_contact" . - - dcterms:identifier "cntcttyp:nonconformable_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "An unconformable contact between an underlying, older nonstratified geological unit (usually intrusive igneous rocks or metamorphics) and an overlying, younger stratified geological unit."@en ; - skos:prefLabel "nonconformable contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:nonconformable_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:nonconformable_contact" . - - dcterms:identifier "cntcttyp:paraconformable_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "An unconformable contact between two geological units in which the bedding of the older, underlying unit is parallel to the bedding of the younger, overlying unit, in which the contact between the two units is planar, and may be coincident with a bedding plane."@en ; - skos:prefLabel "paraconformable contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:paraconformable_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:paraconformable_contact" . - - dcterms:identifier "cntcttyp:radiometric_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A geophysical contact separating bodies of material distinguished based on the characteristics of emitted of radiant energy related to radioactivity (e.g. gamma rays)."@en ; - skos:prefLabel "radiometric contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:radiometric_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:radiometric_contact" . - - dcterms:identifier "cntcttyp:sedimentary_facies_contact"^^prez:identifier ; - dcterms:provenance "base on Nichols, Gary, 1999, Sedimentology and stratigraphy, Blackwell, p. 62-63."@en ; - ns1:status ; - skos:definition "A lithogenetic contact separating essentially coeval sedimentary material bodies distinguished by characteristics reflecting different physical or chemical processes active at the time of deposition of the sediment."@en ; - skos:prefLabel "sedimentary facies contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:sedimentary_facies_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:sedimentary_facies_contact" . - - dcterms:identifier "cntcttyp:sedimentary_intrusive_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "An intrusive contact between a sedimentary rock unit and plastic sediment (e.g., clay, chalk, salt, gypsum, etc.), forced upward into it from underlying sediment"@en ; - skos:prefLabel "sedimentary intrusive contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:sedimentary_intrusive_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:sedimentary_intrusive_contact" . - - dcterms:identifier "cntcttyp:seismic_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A geophysical contact separating bodies of material defined based on their seismic character. Seismic character is based on transmission of vibrations (seismic waves) through a rock body, and relates to the velocity of transmission, and the nature of reflection, refraction, or transformation of seismic waves by inhomogeneities in the rock body."@en ; - skos:prefLabel "seismic contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:seismic_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:seismic_contact" . - - dcterms:identifier "cntcttyp:unconformable_contact"^^prez:identifier ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - ns1:status ; - skos:definition "A contact separating two geological units in which the younger unit succeeds the older after a substantial hiatus in deposition."@en ; - skos:prefLabel "unconformable contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:unconformable_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:unconformable_contact" . - - dcterms:identifier "cntcttyp:volcanic_subsidence_zone_boundary"^^prez:identifier ; - dcterms:provenance "this vocabulary, concept to encompass boundary of caldron, caldera, or crater."@en ; - ns1:status ; - skos:definition "boundary around a body of rock that is within a zone of subsidence or cratering produced by volcanic activity."@en ; - skos:prefLabel "volcanic subsidence zone boundary"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:volcanic_subsidence_zone_boundary", - "/v/vocab/2016.01:contacttype/cntcttyp:volcanic_subsidence_zone_boundary" . - - dcterms:identifier "cntcttyp:weathering_contact"^^prez:identifier ; - dcterms:provenance "this vocabulary"@en ; - ns1:status ; - skos:definition "A lithogenetic contact separating bodies of material differentiated based on lithologic properties related to weathering."@en ; - skos:prefLabel "weathering contact"@en ; - prez:link "/v/collection/cgi:contacttype/cntcttyp:weathering_contact", - "/v/vocab/2016.01:contacttype/cntcttyp:weathering_contact" . - -skos:Collection rdfs:label "Collection"@en ; - skos:definition "A meaningful collection of concepts."@en . - diff --git a/tests/data/vocprez/expected_responses/concept-coal.ttl b/tests/data/vocprez/expected_responses/concept-coal.ttl deleted file mode 100755 index 921784dd..00000000 --- a/tests/data/vocprez/expected_responses/concept-coal.ttl +++ /dev/null @@ -1,35 +0,0 @@ -@prefix bhpur: . -@prefix cs3: . -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . - -bhpur:coal a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs3: ; - skos:definition "Wells and bores drilled to facilitate the mining of coal under permits governed by the Queensland Mineral Resources Act 1989"@en ; - skos:inScheme cs3: ; - skos:prefLabel "Coal"@en ; - skos:topConceptOf cs3: ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:coal" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - - skos:prefLabel "stable"@en ; - schema:color "#2e8c09" . - -cs3: dcterms:identifier "def2:borehole-purpose"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - ns1:status ; - skos:prefLabel "Borehole Purpose"@en ; - prez:link "/v/vocab/def2:borehole-purpose" . diff --git a/tests/data/vocprez/expected_responses/concept-open-cut-coal-mining.ttl b/tests/data/vocprez/expected_responses/concept-open-cut-coal-mining.ttl deleted file mode 100755 index 81a349eb..00000000 --- a/tests/data/vocprez/expected_responses/concept-open-cut-coal-mining.ttl +++ /dev/null @@ -1,67 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . - - a skos:Concept ; - dcterms:identifier "brhl-prps:open-cut-coal-mining"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "Wells drilled for the purpose of assessing coal resources for an open cut coal mine."@en ; - skos:inScheme ; - skos:prefLabel "Open-Cut Coal Mining"@en ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:open-cut-coal-mining" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -rdf:type rdfs:label "type" . - -rdfs:isDefinedBy rdfs:label "isDefinedBy" . - -rdfs:label rdfs:label "label" . - -skos:broader rdfs:label "has broader"@en ; - skos:definition "Relates a concept to a concept that is more general in meaning."@en . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:inScheme rdfs:label "is in scheme"@en ; - skos:definition "Relates a resource (for example a concept) to a concept scheme in which it is included."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - -schema:color rdfs:label "color" . - - dcterms:identifier "brhl-prps:coal"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled to facilitate the mining of coal under permits governed by the Queensland Mineral Resources Act 1989"@en ; - skos:prefLabel "Coal"@en ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:coal" . - -skos:Concept rdfs:label "Concept"@en ; - skos:definition "An idea or notion; a unit of thought."@en . - - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en ; - skos:prefLabel "stable"@en ; - schema:color "#2e8c09" . - - dcterms:identifier "def2:borehole-purpose"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:prefLabel "Borehole Purpose"@en ; - prez:link "/v/vocab/def2:borehole-purpose" . - diff --git a/tests/data/vocprez/expected_responses/concept-with-2-narrower-concepts.ttl b/tests/data/vocprez/expected_responses/concept-with-2-narrower-concepts.ttl deleted file mode 100755 index 756dc0ce..00000000 --- a/tests/data/vocprez/expected_responses/concept-with-2-narrower-concepts.ttl +++ /dev/null @@ -1,64 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - - a skos:Concept ; - dcterms:identifier "brhl-prps:coal"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled to facilitate the mining of coal under permits governed by the Queensland Mineral Resources Act 1989"@en ; - skos:narrower , - ; - skos:prefLabel "Coal"@en ; - prez:childrenCount 2 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:coal" . - - dcterms:identifier "def2:borehole-purpose"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:prefLabel "Borehole Purpose"@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - a skos:Concept ; - dcterms:identifier "brhl-prps:open-cut-coal-mining"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells drilled for the purpose of assessing coal resources for an open cut coal mine."@en ; - skos:prefLabel "Open-Cut Coal Mining"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:open-cut-coal-mining" . - - a skos:Concept ; - dcterms:identifier "brhl-prps:underground-coal-mining"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells drilled for the purpose of assessing coal resources for an underground coal mine."@en ; - skos:prefLabel "Underground Coal Mining"@en ; - prez:childrenCount 1 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:underground-coal-mining" . - - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en ; - skos:prefLabel "stable"@en ; - schema:color "#2e8c09" . - diff --git a/tests/data/vocprez/expected_responses/concept_anot.ttl b/tests/data/vocprez/expected_responses/concept_anot.ttl deleted file mode 100755 index 94aa4de7..00000000 --- a/tests/data/vocprez/expected_responses/concept_anot.ttl +++ /dev/null @@ -1,29 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix ns2: . -@prefix ns3: . -@prefix rdfs: . -@prefix xsd: . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - - a ns3:Concept ; - dcterms:identifier "alteration_facies_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - ns2:status ; - rdfs:isDefinedBy ; - ns3:broader ; - ns3:definition "A metasomatic facies contact separating rocks that have undergone alteration of a particular facies from those that have undergone metasomatism of another facies. Alteration is a kind of metasomatism that does not introduce economically important minerals."@en ; - ns3:inScheme ; - ns3:prefLabel "alteration facies contact"@en . - - dcterms:provenance "this vocabulary"@en ; - ns3:prefLabel "metamorphic contact"@en . - - dcterms:provenance "Original set of terms from the GeosciML standard" ; - ns3:prefLabel "Contact Type"@en ; - ns1:link "/v/vocab/2016.01:contacttype" . diff --git a/tests/data/vocprez/expected_responses/concept_scheme_no_children.ttl b/tests/data/vocprez/expected_responses/concept_scheme_no_children.ttl deleted file mode 100755 index 1275b256..00000000 --- a/tests/data/vocprez/expected_responses/concept_scheme_no_children.ttl +++ /dev/null @@ -1,49 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix owl: . -@prefix prez: . -@prefix prov: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - - a owl:Ontology, - skos:ConceptScheme ; - dcterms:created "2020-07-17"^^xsd:date ; - dcterms:creator ; - dcterms:modified "2023-03-16"^^xsd:date ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - dcterms:publisher ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:prefLabel "Borehole Purpose no children"@en ; - prov:qualifiedDerivation [ prov:entity ; - prov:hadRole ] ; - prez:childrenCount 0 . - -dcterms:created rdfs:label "Date Created"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:creator rdfs:label "Creator"@en ; - dcterms:description "Recommended practice is to identify the creator with a URI. If this is not possible or feasible, a literal value that identifies the creator may be provided."@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:modified rdfs:label "Date Modified"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -dcterms:publisher rdfs:label "Publisher"@en . - -rdfs:label rdfs:label "label" . - - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en ; - skos:prefLabel "stable"@en ; - schema:color "#2e8c09" . - - schema:name "Geological Survey of Queensland" . - diff --git a/tests/data/vocprez/expected_responses/concept_scheme_top_concepts_with_children.ttl b/tests/data/vocprez/expected_responses/concept_scheme_top_concepts_with_children.ttl deleted file mode 100755 index db552cff..00000000 --- a/tests/data/vocprez/expected_responses/concept_scheme_top_concepts_with_children.ttl +++ /dev/null @@ -1,106 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix owl: . -@prefix prez: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - - dcterms:identifier "brhl-prps:pggd"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Borehole purposes applicable to regulatory notification forms."@en ; - skos:prefLabel "PGGD selection"@en . - - a owl:Ontology, - skos:ConceptScheme ; - dcterms:identifier "def2:borehole-purpose"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:hasTopConcept , - , - , - , - , - , - , - ; - skos:prefLabel "Borehole Purpose"@en ; - prez:childrenCount 8 ; - prez:link "/v/vocab/def2:borehole-purpose" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -rdfs:label rdfs:label "label" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled to facilitate the mining of coal under permits governed by the Queensland Mineral Resources Act 1989"@en ; - skos:prefLabel "Coal"@en ; - prez:childrenCount 2 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:coal" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Geothermal Energy Act 2010"@en ; - skos:prefLabel "Geothermal"@en ; - prez:childrenCount 0 ; - prez:link "/v/collection/brhl-prps:pggd/brhl-prps:geothermal", - "/v/vocab/def2:borehole-purpose/brhl-prps:geothermal" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Greenhouse Gas Storage Act 2009"@en ; - skos:prefLabel "Greenhouse Gas Storage"@en ; - prez:childrenCount 1 ; - prez:link "/v/collection/brhl-prps:pggd/brhl-prps:greenhouse-gas-storage", - "/v/vocab/def2:borehole-purpose/brhl-prps:greenhouse-gas-storage" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled to facilitate the mining of minerals, excluding coal and oil shale, under permits governed by the Queensland Mineral Resources Act (1989)"@en ; - skos:prefLabel "Mineral"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:mineral" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled by non-industry agents outside of the State Resources Acts"@en ; - skos:prefLabel "Non-Industry"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:non-industry" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled to facilitate the mining of oil shale under permits governed by the Queensland Mineral Resources Act 1989"@en ; - skos:prefLabel "Oil Shale"@en ; - prez:childrenCount 0 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:oil-shale" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Petroleum Act 1923 and Petroleum and Gas (Production and Safety) Act 2004. This includes water observation, water disposal, and water supply wells drilled under the relevant Petroleum Acts rather than the Water Act."@en ; - skos:prefLabel "Petroleum"@en ; - prez:childrenCount 3 ; - prez:link "/v/vocab/def2:borehole-purpose/brhl-prps:petroleum" . - - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Water Act 2000. A well or bore is only considered a water well or bore where drilled under the Water Act, e.g. a well or bore drilled to serve a water observation function under the Petroleum Act is considered a Petroleum Well with an Observation function or sub-purpose. Additional rights, obligations, and responsibilities may be conferred by intersecting legislation on wells and bores drilled by mineral and coal permit holders and petroleum and gas permit holders under the Mineral Resources Act 1989 and the Petroleum and Gas (Production and Safety) Act 2004 respectively."@en ; - skos:prefLabel "Water"@en ; - prez:childrenCount 0 ; - prez:link "/v/collection/brhl-prps:pggd/brhl-prps:water", - "/v/vocab/def2:borehole-purpose/brhl-prps:water" . - - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en ; - skos:prefLabel "stable"@en ; - schema:color "#2e8c09" . - diff --git a/tests/data/vocprez/expected_responses/concept_scheme_with_children.ttl b/tests/data/vocprez/expected_responses/concept_scheme_with_children.ttl deleted file mode 100755 index e3c46478..00000000 --- a/tests/data/vocprez/expected_responses/concept_scheme_with_children.ttl +++ /dev/null @@ -1,49 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix owl: . -@prefix prez: . -@prefix prov: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - - a owl:Ontology, - skos:ConceptScheme ; - dcterms:created "2020-07-17"^^xsd:date ; - dcterms:creator ; - dcterms:modified "2023-03-16"^^xsd:date ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - dcterms:publisher ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:prefLabel "Borehole Purpose"@en ; - prov:qualifiedDerivation [ prov:entity ; - prov:hadRole ] ; - prez:childrenCount 8 . - -dcterms:created rdfs:label "Date Created"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:creator rdfs:label "Creator"@en ; - dcterms:description "Recommended practice is to identify the creator with a URI. If this is not possible or feasible, a literal value that identifies the creator may be provided."@en . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:modified rdfs:label "Date Modified"@en ; - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -dcterms:publisher rdfs:label "Publisher"@en . - -rdfs:label rdfs:label "label" . - - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en ; - skos:prefLabel "stable"@en ; - schema:color "#2e8c09" . - - schema:name "Geological Survey of Queensland" . - diff --git a/tests/data/vocprez/expected_responses/empty.ttl b/tests/data/vocprez/expected_responses/empty.ttl deleted file mode 100755 index e69de29b..00000000 diff --git a/tests/data/vocprez/expected_responses/vocab_listing_anot.ttl b/tests/data/vocprez/expected_responses/vocab_listing_anot.ttl deleted file mode 100755 index d4c5c974..00000000 --- a/tests/data/vocprez/expected_responses/vocab_listing_anot.ttl +++ /dev/null @@ -1,119 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix prov: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . -@prefix xsd: . - - a skos:ConceptScheme ; - dcterms:identifier "rf:BeddingSurfaceStructure"^^prez:identifier ; - ns1:status ; - skos:definition "A dictionary of bed surface structures, eg. ripples, dessication cracks."@en ; - skos:prefLabel "BeddingSurfaceStructure"@en ; - prez:link "/v/vocab/rf:BeddingSurfaceStructure" . - - a skos:ConceptScheme ; - dcterms:identifier "def2:borehole-purpose"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - dcterms:publisher ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:prefLabel "Borehole Purpose"@en ; - prov:qualifiedDerivation [ prov:entity ; - prov:hadRole ] ; - prez:link "/v/vocab/def2:borehole-purpose" . - - a skos:ConceptScheme ; - dcterms:identifier "def2:borehole-purpose-no-children"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - dcterms:publisher ; - ns1:status ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:prefLabel "Borehole Purpose no children"@en ; - prov:qualifiedDerivation [ prov:entity ; - prov:hadRole ] ; - prez:link "/v/vocab/def2:borehole-purpose-no-children" . - -dcterms:description rdfs:label "Description"@en ; - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en . - -dcterms:identifier rdfs:label "Identifier"@en ; - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en . - -dcterms:provenance rdfs:label "Provenance"@en ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en . - -dcterms:publisher rdfs:label "Publisher"@en . - - a skos:ConceptScheme ; - dcterms:identifier "2016.01:contacttype"^^prez:identifier ; - dcterms:provenance "Original set of terms from the GeosciML standard" ; - dcterms:publisher ; - skos:definition "This scheme describes the concept space for Contact Type concepts, as defined by the IUGS Commission for Geoscience Information (CGI) Geoscience Terminology Working Group. By extension, it includes all concepts in this conceptScheme, as well as concepts in any previous versions of the scheme. Designed for use in the contactType property in GeoSciML Contact elements."@en ; - skos:prefLabel "Contact Type"@en ; - prez:link "/v/vocab/2016.01:contacttype" . - -rdf:type rdfs:label "type" . - -rdfs:label rdfs:label "label" . - -skos:definition rdfs:label "definition"@en ; - skos:definition "A statement or formal explanation of the meaning of a concept."@en . - -skos:prefLabel rdfs:label "preferred label"@en ; - skos:definition "The preferred lexical label for a resource, in a given language."@en . - - a skos:ConceptScheme ; - dcterms:identifier "defn:reg-statuses"^^prez:identifier ; - dcterms:publisher ; - skos:definition """This vocabulary is a re-published and only marginally changed version of the Registry Ontology's (http://epimorphics.com/public/vocabulary/Registry.html) *Status* vocabulary (online in RDF: http://purl.org/linked-data/registry). The only real change to content has been the addition of the term `unstable`. This re-publication has been performed to allow the IRIs of each vocab term (skos:Concept) to resolve to both human-readable and machine-readable forms of content (HTML and RDF), using HTTP content negotiation. - -Note that just like the original form of this vocabulary, while it is a SKOS vocabulary implemented as a single skos:ConceptScheme, it is also an OWL Ontology and that each *Status* is both a skos:Concept and a reg:Status individual."""@en ; - skos:prefLabel "Registry Status Vocabulary"@en ; - prez:link "/v/vocab/defn:reg-statuses" . - - a skos:ConceptScheme ; - dcterms:identifier "defn:vocdermods"^^prez:identifier ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - dcterms:publisher ; - ns1:status ; - skos:definition "The modes by which one vocabulary may derive from another"@en ; - skos:prefLabel "Vocabulary Derivation Modes"@en ; - prez:link "/v/vocab/defn:vocdermods" . - - a skos:ConceptScheme ; - dcterms:identifier "defn:warox-alteration-types"^^prez:identifier ; - dcterms:provenance "This vocabulary was built on an extract of the WAROX system's lookup table"@en ; - skos:definition "This vocabulary give Alteration Type concepts, listed in the Geologicla Survey of Western Australia's WAROX database."@en ; - skos:prefLabel "WAROX Alteration Type"@en ; - prez:link "/v/vocab/defn:warox-alteration-types" . - -schema:color rdfs:label "color" . - -schema:name rdfs:label "name" . - - dcterms:identifier "rg-sttss:experimental"^^prez:identifier ; - skos:definition "An entry that has been accepted into the register temporarily and may be subject to change or withdrawal."@en ; - skos:prefLabel "experimental"@en ; - prez:link "/v/vocab/defn:reg-statuses/rg-sttss:experimental" ; - schema:color "#eae72c" . - - schema:name "Commission for the Management and Application of Geoscience Information" . - - schema:name "SA Minerals and Energy Resources" . - - schema:name "Geological Survey of Queensland" . - - dcterms:identifier "rg-sttss:stable"^^prez:identifier ; - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en ; - skos:prefLabel "stable"@en ; - prez:link "/v/vocab/defn:reg-statuses/rg-sttss:stable" ; - schema:color "#2e8c09" . - -skos:ConceptScheme rdfs:label "Concept Scheme"@en ; - skos:definition "A set of concepts, optionally including statements about semantic relationships between those concepts."@en ; - prez:count 7 . - diff --git a/tests/data/vocprez/input/absolute-collection.ttl b/tests/data/vocprez/input/absolute-collection.ttl deleted file mode 100755 index 2761e46c..00000000 --- a/tests/data/vocprez/input/absolute-collection.ttl +++ /dev/null @@ -1,42 +0,0 @@ -@prefix dcterms: . -@prefix ns1: . -@prefix prez: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . -@prefix skos: . - - dcterms:identifier "df:depth-reference"^^prez:identifier ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - ns1:status ; - skos:definition "The point or level from which all depths are measured and referenced to for an entity or activity. Typically relative to a common global or regional reference datum such as the Australian Height Datum (AHD)."@en ; - skos:prefLabel "Depth Reference"@en . - - a skos:Collection ; - dcterms:identifier "dpth-rfrnc:absolute"^^prez:identifier, - "depth-reference:absolute"^^prez:slug ; - dcterms:provenance "Defined here" ; - skos:definition "A fixed plane or point that describes an absolute reference for depth observations."@en ; - skos:member , - , - ; - skos:prefLabel "Absolute"@en ; - prez:link "/v/collection/dpth-rfrnc:absolute" . - - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "The Australian Height Datum is a vertical datum in Australia.In 1971 the mean sea level for 1966-1968 was assigned the value of 0.000m on the Australian Height Datum at thirty tide gauges around the coast of the Australian continent."@en ; - skos:prefLabel "Australian Height Datum"@en ; - prez:link "/v/collection/dpth-rfrnc:absolute/dpth-rfrnc:australian-height-datum", - "/v/vocab/df:depth-reference/dpth-rfrnc:australian-height-datum" . - - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "The elevation (on the ground) or altitude (in the air) of an object, relative to the average sea level."@en ; - skos:prefLabel "Mean Sea Level"@en ; - prez:link "/v/collection/dpth-rfrnc:absolute/dpth-rfrnc:mean-sea-level", - "/v/vocab/df:depth-reference/dpth-rfrnc:mean-sea-level" . - - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "The distance below mean sea level, the inverse of measurements to Mean Sea Level."@en ; - skos:prefLabel "Metres Sub-Sea"@en ; - prez:link "/v/collection/dpth-rfrnc:absolute/dpth-rfrnc:metres-sub-sea", - "/v/vocab/df:depth-reference/dpth-rfrnc:metres-sub-sea" . diff --git a/tests/data/vocprez/input/alteration-types.ttl b/tests/data/vocprez/input/alteration-types.ttl deleted file mode 100755 index 21f7d9f0..00000000 --- a/tests/data/vocprez/input/alteration-types.ttl +++ /dev/null @@ -1,334 +0,0 @@ -PREFIX : -PREFIX cs: -PREFIX dcterms: -PREFIX reg: -PREFIX sdo: -PREFIX skos: -PREFIX status: -PREFIX xsd: - -:argillic-advanced - a skos:Concept ; - dcterms:identifier "argillic-advanced"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:broader :argillic ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "argillic - advanced"@en ; -. - -:argillic-intermediate - a skos:Concept ; - dcterms:identifier "argillic-intermediate"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:broader :argillic ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "argillic - intermediate"@en ; -. - -:skarn-magnesian - a skos:Concept ; - dcterms:identifier "skarn-magnesian"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:broader :skarn ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "skarn - magnesian"@en ; -. - -:skarn-prograde-stage - a skos:Concept ; - dcterms:identifier "skarn-prograde-stage"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:broader :skarn ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "skarn - prograde stage"@en ; -. - -:skarn-retrograde-stage - a skos:Concept ; - dcterms:identifier "skarn-retrograde-stage"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:broader :skarn ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "skarn - retrograde stage"@en ; -. - -:albitic - a skos:Concept ; - dcterms:identifier "albitic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "albitic"@en ; -. - -:alkali-metasomatism - a skos:Concept ; - dcterms:identifier "alkali-metasomatism"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "alkali metasomatism"@en ; -. - -:carbonate - a skos:Concept ; - dcterms:identifier "carbonate"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "carbonate"@en ; -. - -:chloritic - a skos:Concept ; - dcterms:identifier "chloritic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "chloritic"@en ; -. - -:deuteric - a skos:Concept ; - dcterms:identifier "deuteric"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "deuteric"@en ; -. - -:fenitization - a skos:Concept ; - dcterms:identifier "fenitization"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "fenitization"@en ; -. - -:fluorite-and-topaz - a skos:Concept ; - dcterms:identifier "fluorite-and-topaz"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "fluorite and topaz"@en ; -. - -:greisen - a skos:Concept ; - dcterms:identifier "greisen"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "greisen"@en ; -. - -:hematitic - a skos:Concept ; - dcterms:identifier "hematitic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "hematitic"@en ; -. - -:jasperoid - a skos:Concept ; - dcterms:identifier "jasperoid"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "jasperoid"@en ; -. - -:listvenitization - a skos:Concept ; - dcterms:identifier "listvenitization"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "listvenitization"@en ; -. - -:phyllic-qsp - a skos:Concept ; - dcterms:identifier "phyllic-qsp"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "phyllic (QSP)"@en ; -. - -:potassic - a skos:Concept ; - dcterms:identifier "potassic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "potassic"@en ; -. - -:propylitic - a skos:Concept ; - dcterms:identifier "propylitic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "propylitic"@en ; -. - -:pyritic - a skos:Concept ; - dcterms:identifier "pyritic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "pyritic"@en ; -. - -:rodingitization - a skos:Concept ; - dcterms:identifier "rodingitization"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "rodingitization"@en ; -. - -:sericitic - a skos:Concept ; - dcterms:identifier "sericitic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "sericitic"@en ; -. - -:serpentinization - a skos:Concept ; - dcterms:identifier "serpentinization"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "serpentinization"@en ; -. - -:silicification - a skos:Concept ; - dcterms:identifier "silicification"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "silicification"@en ; -. - -:tourmalinization - a skos:Concept ; - dcterms:identifier "tourmalinization"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "tourmalinization"@en ; -. - -:zeolitic - a skos:Concept ; - dcterms:identifier "zeolitic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "zeolitic"@en ; -. - - - a sdo:Organization ; - sdo:name "Geological Survey of Western Australia" ; - sdo:url "http://dmp.wa.gov.au/Geological-Survey/Geological-Survey-262.aspx"^^xsd:anyURI ; -. - -:argillic - a skos:Concept ; - dcterms:identifier "argillic"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "argillic"@en ; -. - -:skarn - a skos:Concept ; - dcterms:identifier "skarn"^^xsd:token ; - dcterms:provenance "From WAROX"@en ; - reg:status status:submitted ; - skos:definition "Not given"@en ; - skos:inScheme cs: ; - skos:prefLabel "skarn"@en ; -. - -cs: - a skos:ConceptScheme ; - dcterms:identifier "warox-alteration-type"^^xsd:token ; - dcterms:created "2021-09-26"^^xsd:date ; - dcterms:creator ; - dcterms:modified "2021-09-29"^^xsd:date ; - dcterms:provenance "This vocabulary was built on an extract of the WAROX system's lookup table"@en ; - skos:definition "This vocabulary give Alteration Type concepts, listed in the Geologicla Survey of Western Australia's WAROX database."@en ; - skos:hasTopConcept - :albitic , - :alkali-metasomatism , - :argillic , - :carbonate , - :chloritic , - :deuteric , - :fenitization , - :fluorite-and-topaz , - :greisen , - :hematitic , - :jasperoid , - :listvenitization , - :phyllic-qsp , - :potassic , - :propylitic , - :pyritic , - :rodingitization , - :sericitic , - :serpentinization , - :silicification , - :skarn , - :tourmalinization , - :zeolitic ; - skos:prefLabel "WAROX Alteration Type"@en ; -. diff --git a/tests/data/vocprez/input/beddingsurfacestructure.ttl b/tests/data/vocprez/input/beddingsurfacestructure.ttl deleted file mode 100755 index eeca1838..00000000 --- a/tests/data/vocprez/input/beddingsurfacestructure.ttl +++ /dev/null @@ -1,177 +0,0 @@ - . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - . - . - "A dictionary of bed surface structures, eg. ripples, dessication cracks."@en . - "Created for internal use in corporate BGS relational database"@en . - "BeddingSurfaceStructure"@en . - "2003-06-04"^^ . - "2003-06-04"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - "http://data.bgs.ac.uk/ref/BeddingSurfaceStructure"^^ . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - "Shrinkage (Desiccation) Cracks"@en . - "Synaeresis Cracks"@en . - "Parting Lineation (Primary Current Lineation)"@en . - "Rainspots"@en . - "Ripples"@en . - "Current Ripples"@en . - "Linguoid Current Ripples"@en . - "Sinuous-Crested Current Rippled"@en . - "Straight-Crested Current Ripples"@en . - "Wave-Formed Ripples"@en . - "Interference Wave-Formed Ripples"@en . - "Modified Wave-Formed Ripples"@en . - "Wind Ripples"@en . - "Trace Fossils"@en . - "Crawling / Walking Tracks and Trails"@en . - "Foot Prints"@en . - "Grazing Traces"@en . - "Coiled Grazing Traces"@en . - "Meandering Grazing Traces"@en . - "Radial Grazing Traces"@en . - "Resting Traces"@en . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - "SHRINKAGE (DESICCATION) CRACKS"@en . - "SYNAERESIS CRACKS"@en . - "PARTING LINEATION (PRIMARY CURRENT LINEATION)"@en . - "RAINSPOTS"@en . - "RIPPLES, TYPE UNDEFINED"@en . - "CURRENT RIPPLES"@en . - "LINGUOID CURRENT RIPPLES"@en . - "SINUOUS-CRESTED CURRENT RIPPLED"@en . - "STRAIGHT-CRESTED CURRENT RIPPLES"@en . - "WAVE-FORMED RIPPLES"@en . - "INTERFERENCE WAVE-FORMED RIPPLES"@en . - "MODIFIED WAVE-FORMED RIPPLES"@en . - "WIND RIPPLES"@en . - "TRACE FOSSILS, TYPE UNDEFINED"@en . - "CRAWLING / WALKING TRACKS AND TRAILS"@en . - "FOOT PRINTS"@en . - "GRAZING TRACES"@en . - "COILED GRAZING TRACES"@en . - "MEANDERING GRAZING TRACES"@en . - "RADIAL GRAZING TRACES"@en . - "RESTING TRACES"@en . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - . - "Shrinkage (Desiccation) Cracks"@en . - "Synaeresis Cracks"@en . - "Parting Lineation (Primary Current Lineation)"@en . - "Rainspots"@en . - "Ripples"@en . - "Current Ripples"@en . - "Linguoid Current Ripples"@en . - "Sinuous-Crested Current Rippled"@en . - "Straight-Crested Current Ripples"@en . - "Wave-Formed Ripples"@en . - "Interference Wave-Formed Ripples"@en . - "Modified Wave-Formed Ripples"@en . - "Wind Ripples"@en . - "Trace Fossils"@en . - "Crawling / Walking Tracks and Trails"@en . - "Foot Prints"@en . - "Grazing Traces"@en . - "Coiled Grazing Traces"@en . - "Meandering Grazing Traces"@en . - "Radial Grazing Traces"@en . - "Resting Traces"@en . diff --git a/tests/data/vocprez/input/borehole-purpose-no-children.ttl b/tests/data/vocprez/input/borehole-purpose-no-children.ttl deleted file mode 100755 index d2e619fd..00000000 --- a/tests/data/vocprez/input/borehole-purpose-no-children.ttl +++ /dev/null @@ -1,26 +0,0 @@ -PREFIX agldwgstatus: -PREFIX cs: -PREFIX dcterms: -PREFIX owl: -PREFIX prov: -PREFIX rdfs: -PREFIX reg: -PREFIX sdo: -PREFIX skos: -PREFIX xsd: - -cs: - a - owl:Ontology , - skos:ConceptScheme ; - dcterms:created "2020-07-17"^^xsd:date ; - dcterms:creator ; - dcterms:modified "2023-03-16"^^xsd:date ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - dcterms:publisher ; - reg:status agldwgstatus:stable ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - prov:qualifiedDerivation [ prov:entity ; - prov:hadRole ] ; - skos:prefLabel "Borehole Purpose no children"@en ; -. diff --git a/tests/data/vocprez/input/borehole-purpose.ttl b/tests/data/vocprez/input/borehole-purpose.ttl deleted file mode 100755 index cdb797bb..00000000 --- a/tests/data/vocprez/input/borehole-purpose.ttl +++ /dev/null @@ -1,238 +0,0 @@ -PREFIX agldwgstatus: -PREFIX bhpur: -PREFIX cs: -PREFIX dcterms: -PREFIX owl: -PREFIX prov: -PREFIX rdfs: -PREFIX reg: -PREFIX sdo: -PREFIX skos: -PREFIX xsd: - -bhpur:carbon-capture-and-storage - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:greenhouse-gas-storage ; - skos:definition "Wells that deposit carbon dioxide into an underground geological formation after capture from large point sources, such as a cement factory or biomass power plant."@en ; - skos:inScheme cs: ; - skos:prefLabel "Carbon Capture and Storage"@en ; -. - -bhpur:open-cut-coal-mining - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:coal ; - skos:definition "Wells drilled for the purpose of assessing coal resources for an open cut coal mine."@en ; - skos:inScheme cs: ; - skos:prefLabel "Open-Cut Coal Mining"@en ; -. - -bhpur:pggd - a skos:Collection ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - skos:definition "Borehole purposes applicable to regulatory notification forms."@en ; - skos:member - bhpur:coal-seam-gas , - bhpur:conventional-petroleum , - bhpur:geothermal , - bhpur:greenhouse-gas-storage , - bhpur:unconventional-petroleum , - bhpur:water ; - skos:prefLabel "PGGD selection"@en ; -. - -bhpur:shale-gas - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:unconventional-petroleum ; - skos:definition "Wells targetting shale that produces natural gas. A shale that is thermally mature enough and has sufficient gas content to produce economic quantities of natural gas."@en ; - skos:inScheme cs: ; - skos:prefLabel "Shale Gas"@en ; -. - -bhpur:shale-oil - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:unconventional-petroleum ; - skos:definition "Wells targetting shale that produces oil. Oil obtained by artificial maturation of oil shale. The process of artificial maturation uses controlled heating, or pyrolysis, of kerogen to release the shale oil."@en ; - skos:inScheme cs: ; - skos:prefLabel "Shale Oil"@en ; -. - -bhpur:tight-gas - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:unconventional-petroleum ; - skos:definition "Wells targetting gas from relatively impermeable reservoir rock."@en ; - skos:inScheme cs: ; - skos:prefLabel "Tight Gas"@en ; -. - -bhpur:tight-oil - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:unconventional-petroleum ; - skos:definition "Wells targetting oil from relatively impermeable reservoir rock."@en ; - skos:inScheme cs: ; - skos:prefLabel "Tight Oil"@en ; -. - -bhpur:underground-coal-mining - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:coal ; - skos:definition "Wells drilled for the purpose of assessing coal resources for an underground coal mine."@en ; - skos:inScheme cs: ; - skos:prefLabel "Underground Coal Mining"@en ; -. - -bhpur:coal-seam-gas - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:petroleum ; - skos:definition "Wells targetting coal seams where hydrocarbons are kept in place via adsorption to the coal surface and hydrostatic pressure"@en ; - skos:inScheme cs: ; - skos:prefLabel "Coal Seam Gas"@en ; -. - -bhpur:conventional-petroleum - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:petroleum ; - skos:definition "Wells targetting conventional petroleum reservoirs where buoyant forces keep hydrocarbons in place below a sealing caprock."@en ; - skos:inScheme cs: ; - skos:prefLabel "Conventional Petroleum"@en ; -. - -bhpur:mineral - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:definition "Wells and bores drilled to facilitate the mining of minerals, excluding coal and oil shale, under permits governed by the Queensland Mineral Resources Act (1989)"@en ; - skos:inScheme cs: ; - skos:prefLabel "Mineral"@en ; - skos:topConceptOf cs: ; -. - -bhpur:non-industry - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:altLabel "Non-Industry"@en ; - skos:definition "Wells and bores drilled by non-industry agents outside of the State Resources Acts"@en ; - skos:inScheme cs: ; - skos:prefLabel "Non-Industry"@en ; - skos:topConceptOf cs: ; -. - -bhpur:oil-shale - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:definition "Wells and bores drilled to facilitate the mining of oil shale under permits governed by the Queensland Mineral Resources Act 1989"@en ; - skos:inScheme cs: ; - skos:prefLabel "Oil Shale"@en ; - skos:topConceptOf cs: ; -. - -bhpur:geothermal - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Geothermal Energy Act 2010"@en ; - skos:inScheme cs: ; - skos:prefLabel "Geothermal"@en ; - skos:topConceptOf cs: ; -. - -bhpur:water - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Water Act 2000. A well or bore is only considered a water well or bore where drilled under the Water Act, e.g. a well or bore drilled to serve a water observation function under the Petroleum Act is considered a Petroleum Well with an Observation function or sub-purpose. Additional rights, obligations, and responsibilities may be conferred by intersecting legislation on wells and bores drilled by mineral and coal permit holders and petroleum and gas permit holders under the Mineral Resources Act 1989 and the Petroleum and Gas (Production and Safety) Act 2004 respectively."@en ; - skos:inScheme cs: ; - skos:prefLabel "Water"@en ; - skos:topConceptOf cs: ; -. - - - a sdo:Organization ; - sdo:name "Geological Survey of Queensland" ; - sdo:url "http://www.business.qld.gov.au/industries/mining-energy-water/resources/geoscience-information/gsq"^^xsd:anyURI ; -. - -bhpur:coal - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:definition "Wells and bores drilled to facilitate the mining of coal under permits governed by the Queensland Mineral Resources Act 1989"@en ; - skos:inScheme cs: ; - skos:prefLabel "Coal"@en ; - skos:topConceptOf cs: ; -. - -bhpur:greenhouse-gas-storage - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:altLabel "GHG"@en ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Greenhouse Gas Storage Act 2009"@en ; - skos:inScheme cs: ; - skos:prefLabel "Greenhouse Gas Storage"@en ; - skos:topConceptOf cs: ; -. - -bhpur:petroleum - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:definition "Wells and bores drilled under permits governed by the Queensland Petroleum Act 1923 and Petroleum and Gas (Production and Safety) Act 2004. This includes water observation, water disposal, and water supply wells drilled under the relevant Petroleum Acts rather than the Water Act."@en ; - skos:inScheme cs: ; - skos:prefLabel "Petroleum"@en ; - skos:topConceptOf cs: ; -. - -bhpur:unconventional-petroleum - a skos:Concept ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - rdfs:isDefinedBy cs: ; - skos:broader bhpur:petroleum ; - skos:definition "Wells targetting unconventional reservoirs whose properties including porosity, permeability, or trapping mechanism differ from conventional reservoirs"@en ; - skos:inScheme cs: ; - skos:prefLabel "Unconventional Petroleum"@en ; -. - -cs: - a - owl:Ontology , - skos:ConceptScheme ; - dcterms:created "2020-07-17"^^xsd:date ; - dcterms:creator ; - dcterms:modified "2023-03-16"^^xsd:date ; - dcterms:provenance "Compiled by the Geological Survey of Queensland" ; - dcterms:publisher ; - reg:status agldwgstatus:stable ; - skos:definition "The primary purpose of a borehole based on the legislative State Act and/or the resources industry sector."@en ; - skos:hasTopConcept - bhpur:coal , - bhpur:geothermal , - bhpur:greenhouse-gas-storage , - bhpur:mineral , - bhpur:non-industry , - bhpur:oil-shale , - bhpur:petroleum , - bhpur:water ; - prov:qualifiedDerivation [ prov:entity ; - prov:hadRole ] ; - skos:prefLabel "Borehole Purpose"@en ; -. diff --git a/tests/data/vocprez/input/catalog-of-vocabs.ttl b/tests/data/vocprez/input/catalog-of-vocabs.ttl deleted file mode 100755 index 3674424b..00000000 --- a/tests/data/vocprez/input/catalog-of-vocabs.ttl +++ /dev/null @@ -1,12 +0,0 @@ -PREFIX dcat: -PREFIX dcterms: - - a dcat:Catalog ; - dcterms:hasPart , - , - , - , - , - , - ; - . \ No newline at end of file diff --git a/tests/data/vocprez/input/contacttype.ttl b/tests/data/vocprez/input/contacttype.ttl deleted file mode 100755 index d6fed418..00000000 --- a/tests/data/vocprez/input/contacttype.ttl +++ /dev/null @@ -1,565 +0,0 @@ -PREFIX dcterms: -PREFIX rdfs: -PREFIX reg: -PREFIX sdo: -PREFIX skos: -PREFIX status: -PREFIX xsd: - - - a skos:Collection ; - dcterms:identifier "contacttype"^^xsd:token ; - dcterms:provenance "this vocabulary" ; - skos:definition "All Concepts in this vocabulary" ; - skos:member - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - , - ; - skos:prefLabel "Contact Type - All Concepts"@en ; -. - - - a sdo:Organization ; - sdo:affiliation ; - sdo:name "CGI Geoscience Terminology Working Group" ; - sdo:url "http://www.cgi-iugs.org/tech_collaboration/geoscience_terminology_working_group.html"^^xsd:anyURI ; -. - - - a skos:Concept ; - dcterms:identifier "alteration_facies_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metasomatic facies contact separating rocks that have undergone alteration of a particular facies from those that have undergone metasomatism of another facies. Alteration is a kind of metasomatism that does not introduce economically important minerals."@en ; - skos:inScheme ; - skos:prefLabel "alteration facies contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "angular_unconformable_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "An unconformable contact between two geological units in which the older, underlying rocks dip at an angle different from the younger, overlying strata, usually in which younger sediments rest upon the eroded surface of tilted or folded older rocks."@en ; - skos:inScheme ; - skos:prefLabel "angular unconformable contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "buttress_unconformity"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "An unconformity in which onlapping strata are truncated against a steep topographic scarp."@en ; - skos:inScheme ; - skos:prefLabel "buttress unconformity"@en ; -. - - - a skos:Concept ; - dcterms:identifier "chronostratigraphic_zone_contact"^^xsd:token ; - dcterms:provenance "FGDC"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A contact between bodies of material having different ages of origin."@en ; - skos:inScheme ; - skos:prefLabel "chronostratigraphic-zone contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "conductivity_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A geophysical contact between bodies of material distinguished based on electrical conductivity characteristics"@en ; - skos:inScheme ; - skos:prefLabel "conductivity contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "conformable_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A contact separating two geological units in which the layers are formed one above the other in order by regular, uninterrupted deposition under the same general conditions."@en ; - skos:inScheme ; - skos:prefLabel "conformable contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "deformation_zone_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A lithogenetic bundary separating rock masses that have different deformation structure, e.g. sheared rock against non sheared rock, brecciated rock against non-brecciated rock."@en ; - skos:inScheme ; - skos:prefLabel "deformation zone contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "density_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A geophysical contact separating bodies of material with different density characteristics, generally determined through measurement and modelling of gravity variations."@en ; - skos:inScheme ; - skos:prefLabel "density contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "disconformable_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "An unconformable contact between two geological units in which the bedding of the older, underlying unit is parallel to the bedding of the younger, overlying unit, but in which the contact between the two units is marked by an irregular or uneven surface of appreciable relief."@en ; - skos:inScheme ; - skos:prefLabel "disconformable contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "faulted_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A contact separating two bodies of material across which one body has slid past the other."@en ; - skos:inScheme ; - skos:prefLabel "faulted contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "geologic_province_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A contact between regions characterised by their geological history or by similar structural, petrographic or stratigraphic features"@en ; - skos:inScheme ; - skos:prefLabel "geologic province contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "glacial_stationary_line"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A boundary between a subglacial geomorphic unit and a periglacial geomorphic unit, marking the maximum extent of glacial cover. This can be thought of as the outcrop of the contact between a glacier and its substrate at some time at each point along the boundary. This contact type is included as an interim concept, assuming that in the future, there will be extensions to account better for geomorphic units and line types."@en ; - skos:inScheme ; - skos:prefLabel "glacial stationary line"@en ; -. - - - a skos:Concept ; - dcterms:identifier "igneous_intrusive_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "An intrusive contact between a younger igneous rock and an older, pre-existing geological unit into which it has been intruded."@en ; - skos:inScheme ; - skos:prefLabel "igneous intrusive contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "igneous_phase_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A lithogenetic contact separating lithologically distinct phases of a single intrusive body. Does not denote nature of contact (intrusive or gradation)."@en ; - skos:inScheme ; - skos:prefLabel "igneous phase contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "impact_structure_boundary"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "surface that bounds a body of rock affected by an extraterrestrial impact event"@en ; - skos:inScheme ; - skos:prefLabel "impact structure boundary"@en ; -. - - - a skos:Concept ; - dcterms:identifier "magnetic_polarity_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A magentic contact between bodies of material with different polarity of remnant magnetization, e.g. between sections of ocean floor with different polarity."@en ; - skos:inScheme ; - skos:prefLabel "magnetic polarity contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "magnetic_susceptiblity_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A magnetic contact between bodies of material distinguished based on magnetic susceptibility characteristics."@en ; - skos:inScheme ; - skos:prefLabel "magnetic susceptiblity contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "magnetization_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A magnetic contact between bodies of material distinguished based on any aspect of magnetization of material in the units."@en ; - skos:inScheme ; - skos:prefLabel "magnetization contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "metamorphic_facies_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metamorphic contact separating rocks that have undergone metamorphism of a particular facies from those that have undergone metamorphism of another facies."@en ; - skos:inScheme ; - skos:prefLabel "metamorphic facies contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "metasomatic_facies_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metamorphic contact separating rocks that have undergone metasomatism of a particular facies from those that have undergone metasomatism of another facies. Metasomatism is distinguished from metamorphism by significant changes in bulk chemistry of the affected rock."@en ; - skos:inScheme ; - skos:prefLabel "metasomatic facies contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "mineralisation_assemblage_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A metasomatic facies contact separating rocks which have been mineralised and contain a particular mineral assemblage from those which contain a different assemblage. Mineralization is a kind of metasomatism that introduces ecomomically important minerals."@en ; - skos:inScheme ; - skos:prefLabel "mineralisation assemblage contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "nonconformable_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "An unconformable contact between an underlying, older nonstratified geological unit (usually intrusive igneous rocks or metamorphics) and an overlying, younger stratified geological unit."@en ; - skos:inScheme ; - skos:prefLabel "nonconformable contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "paraconformable_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader - , - ; - skos:definition "An unconformable contact between two geological units in which the bedding of the older, underlying unit is parallel to the bedding of the younger, overlying unit, in which the contact between the two units is planar, and may be coincident with a bedding plane."@en ; - skos:inScheme ; - skos:prefLabel "paraconformable contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "radiometric_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A geophysical contact separating bodies of material distinguished based on the characteristics of emitted of radiant energy related to radioactivity (e.g. gamma rays)."@en ; - skos:inScheme ; - skos:prefLabel "radiometric contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "sedimentary_facies_contact"^^xsd:token ; - dcterms:provenance "base on Nichols, Gary, 1999, Sedimentology and stratigraphy, Blackwell, p. 62-63."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A lithogenetic contact separating essentially coeval sedimentary material bodies distinguished by characteristics reflecting different physical or chemical processes active at the time of deposition of the sediment."@en ; - skos:inScheme ; - skos:prefLabel "sedimentary facies contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "sedimentary_intrusive_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "An intrusive contact between a sedimentary rock unit and plastic sediment (e.g., clay, chalk, salt, gypsum, etc.), forced upward into it from underlying sediment"@en ; - skos:inScheme ; - skos:prefLabel "sedimentary intrusive contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "seismic_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A geophysical contact separating bodies of material defined based on their seismic character. Seismic character is based on transmission of vibrations (seismic waves) through a rock body, and relates to the velocity of transmission, and the nature of reflection, refraction, or transformation of seismic waves by inhomogeneities in the rock body."@en ; - skos:inScheme ; - skos:prefLabel "seismic contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "volcanic_subsidence_zone_boundary"^^xsd:token ; - dcterms:provenance "this vocabulary, concept to encompass boundary of caldron, caldera, or crater."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "boundary around a body of rock that is within a zone of subsidence or cratering produced by volcanic activity."@en ; - skos:inScheme ; - skos:prefLabel "volcanic subsidence zone boundary"@en ; -. - - - a skos:Concept ; - dcterms:identifier "weathering_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A lithogenetic contact separating bodies of material differentiated based on lithologic properties related to weathering."@en ; - skos:inScheme ; - skos:prefLabel "weathering contact"@en ; -. - - - a sdo:Organization ; - sdo:name "Commission for the Management and Application of Geoscience Information" ; - sdo:url "http://www.cgi-iugs.org"^^xsd:anyURI ; -. - - - a skos:Concept ; - dcterms:identifier "depositional_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "Lithogenetic contact at which a sedimentary or volcanic rock has been deposited on (or against) another rock body. The relationship between the older underlying rocks and younger overlying rocks is unknown or not specfied."@en ; - skos:inScheme ; - skos:narrower - , - ; - skos:prefLabel "depositional contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "magnetic_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A geophysical contact separating bodies of material distinguished based on properties related to magnetic fields."@en ; - skos:inScheme ; - skos:narrower - , - , - ; - skos:prefLabel "magnetic contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "metamorphic_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "Lithogenetic contact separating rocks that have different lithologic properties related to metamorphism, metasomatism, alteration, or mineralization. Generally separates metamorphic rock bodies, but may separate metamorphosed (broadly speaking) and non-metamorphosed rock."@en ; - skos:inScheme ; - skos:narrower - , - , - , - ; - skos:prefLabel "metamorphic contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "geophysical_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A contact separating bodies of material in the earth that have different geophysical properties. Use for boundaries that are detected by geophysical sensor techniques as opposed to direct lithologic observation."@en ; - skos:inScheme ; - skos:narrower - , - , - , - , - ; - skos:prefLabel "geophysical contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "unconformable_contact"^^xsd:token ; - dcterms:provenance "Neuendorf, K.K.E, Mehl, J.P. & Jackson, J.A. (eds), 2005. Glossary of geology, 5th Edition. American Geological Institute, Alexandria, 779 p."@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A contact separating two geological units in which the younger unit succeeds the older after a substantial hiatus in deposition."@en ; - skos:inScheme ; - skos:narrower - , - , - , - , - ; - skos:prefLabel "unconformable contact"@en ; -. - - - a skos:Concept ; - dcterms:identifier "contact"^^xsd:token ; - dcterms:provenance "adapted from Jackson, 1997, page 137, NADM C1 2004"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:definition "A surface that separates geologic units. Very general concept representing any kind of surface separating two geologic units, including primary boundaries such as depositional contacts, all kinds of unconformities, intrusive contacts, and gradational contacts, as well as faults that separate geologic units."@en ; - skos:inScheme ; - skos:narrower - , - , - , - , - , - ; - skos:prefLabel "contact"@en ; - skos:topConceptOf ; -. - - - a skos:Concept ; - dcterms:identifier "lithogenetic_contact"^^xsd:token ; - dcterms:provenance "this vocabulary"@en ; - reg:status status:submitted ; - rdfs:isDefinedBy ; - skos:broader ; - skos:definition "A non-faulted contact separating bodies of material in the earth that have different lithologic character or geologic history."@en ; - skos:inScheme ; - skos:narrower - , - , - , - , - , - , - , - , - , - ; - skos:prefLabel "lithogenetic contact"@en ; -. - - - a skos:ConceptScheme ; - dcterms:identifier "contacttype"^^xsd:token ; - dcterms:created "2009-07-14"^^xsd:date ; - dcterms:creator ; - dcterms:modified "2020-06-23"^^xsd:date ; - dcterms:provenance "Original set of terms from the GeosciML standard" ; - dcterms:publisher ; - dcterms:source "http://www.opengis.net/doc/geosciml/4.1"^^xsd:anyURI ; - skos:changeNote - "2009 Revised from ContactType200811 with addition of impact_structure_boundary and volcanic_subsidence_zone_boundary, and addition of more metadata annotation"@en , - "2009-12-07 SMR Update metadata properties for version, creator, title, and format. Change skos:HistoryNote to dc:source for information on origin of terms and definitions."@en , - "2011-02-16 SMR replace URN with cgi http URI's. Last changes to fix URN for conceptScheme that was not updated in original updates."@en , - "2012-02-07 SMR update URI to replace numeric final token with English-language string as in original URN scheme."@en , - "2012-02-27 SMR add skos:exactMatch triples to map URIs for concepts in this vocabulary to number-token URIs in 201012 version of same concepts."@en , - "2012-11-24 SMR Update to 201211 version; add collection entity, check all pref labels are lower case, remove owl:NamedIndividual and Owl:Thing rdf:types."@en , - "2016-06-15 OLR - redo Excel spreadsheet to work with XSLT, to make consistent SKOS-RDF with all CGI vocabularies. Generate new SKOS-RDF file."@en , - "2020-06-23 NJC Added properties to ensure vocab matched Geoscience Australia's vocab profile (http://linked.data.gov.au/def/ga-skos-profile). Just annotation properties, no new content. Agents (creator/publisher) now not text but RDF resource. Dates (create/modified) derived from editorial notes & existing date properties."@en ; - skos:definition "This scheme describes the concept space for Contact Type concepts, as defined by the IUGS Commission for Geoscience Information (CGI) Geoscience Terminology Working Group. By extension, it includes all concepts in this conceptScheme, as well as concepts in any previous versions of the scheme. Designed for use in the contactType property in GeoSciML Contact elements."@en ; - skos:editorialNote "This file contains the 2016 SKOS-RDF version of the CGI Contact Type vocabulary. Compilation and review in MS Excel spreadsheet, converted to MS Excel for SKOS generation using GSML_SKOS_fromXLS_2016.01.xslt."@en ; - skos:hasTopConcept ; - skos:prefLabel "Contact Type"@en ; -. diff --git a/tests/data/vocprez/input/dublin_core_terms.ttl b/tests/data/vocprez/input/dublin_core_terms.ttl deleted file mode 100755 index 89e4fb64..00000000 --- a/tests/data/vocprez/input/dublin_core_terms.ttl +++ /dev/null @@ -1,867 +0,0 @@ -@prefix rdf: . -@prefix owl: . -@prefix skos: . -@prefix dcam: . -@prefix dcterms: . -@prefix rdfs: . - - - dcterms:modified "2012-06-14"^^ ; - dcterms:publisher ; - dcterms:title "DCMI Metadata Terms - other"@en . - -dcterms:Agent - dcterms:issued "2008-01-14"^^ ; - a dcterms:AgentClass, rdfs:Class ; - rdfs:comment "A resource that acts or has the power to act."@en ; - rdfs:isDefinedBy ; - rdfs:label "Agent"@en . - -dcterms:AgentClass - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A group of agents."@en ; - rdfs:isDefinedBy ; - rdfs:label "Agent Class"@en ; - rdfs:subClassOf rdfs:Class . - -dcterms:BibliographicResource - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A book, article, or other documentary resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Bibliographic Resource"@en . - -dcterms:Box - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of regions in space defined by their geographic coordinates according to the DCMI Box Encoding Scheme."@en ; - rdfs:isDefinedBy ; - rdfs:label "DCMI Box"@en ; - rdfs:seeAlso . - -dcterms:DCMIType - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of classes specified by the DCMI Type Vocabulary, used to categorize the nature or genre of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "DCMI Type Vocabulary"@en ; - rdfs:seeAlso . - -dcterms:DDC - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of conceptual resources specified by the Dewey Decimal Classification."@en ; - rdfs:isDefinedBy ; - rdfs:label "DDC"@en ; - rdfs:seeAlso . - -dcterms:FileFormat - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A digital resource format."@en ; - rdfs:isDefinedBy ; - rdfs:label "File Format"@en ; - rdfs:subClassOf dcterms:MediaType . - -dcterms:Frequency - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A rate at which something recurs."@en ; - rdfs:isDefinedBy ; - rdfs:label "Frequency"@en . - -dcterms:IMT - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of media types specified by the Internet Assigned Numbers Authority."@en ; - rdfs:isDefinedBy ; - rdfs:label "IMT"@en ; - rdfs:seeAlso . - -dcterms:ISO3166 - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of codes listed in ISO 3166-1 for the representation of names of countries."@en ; - rdfs:isDefinedBy ; - rdfs:label "ISO 3166"@en ; - rdfs:seeAlso . - -dcterms:ISO639-2 - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The three-letter alphabetic codes listed in ISO639-2 for the representation of names of languages."@en ; - rdfs:isDefinedBy ; - rdfs:label "ISO 639-2"@en ; - rdfs:seeAlso . - -dcterms:ISO639-3 - dcterms:issued "2008-01-14"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of three-letter codes listed in ISO 639-3 for the representation of names of languages."@en ; - rdfs:isDefinedBy ; - rdfs:label "ISO 639-3"@en ; - rdfs:seeAlso . - -dcterms:Jurisdiction - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "The extent or range of judicial, law enforcement, or other authority."@en ; - rdfs:isDefinedBy ; - rdfs:label "Jurisdiction"@en ; - rdfs:subClassOf dcterms:LocationPeriodOrJurisdiction . - -dcterms:LCC - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of conceptual resources specified by the Library of Congress Classification."@en ; - rdfs:isDefinedBy ; - rdfs:label "LCC"@en ; - rdfs:seeAlso . - -dcterms:LCSH - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of labeled concepts specified by the Library of Congress Subject Headings."@en ; - rdfs:isDefinedBy ; - rdfs:label "LCSH"@en . - -dcterms:LicenseDocument - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A legal document giving official permission to do something with a resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "License Document"@en ; - rdfs:subClassOf dcterms:RightsStatement . - -dcterms:LinguisticSystem - dcterms:description "Written, spoken, sign, and computer languages are linguistic systems."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A system of signs, symbols, sounds, gestures, or rules used in communication."@en ; - rdfs:isDefinedBy ; - rdfs:label "Linguistic System"@en . - -dcterms:Location - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A spatial region or named place."@en ; - rdfs:isDefinedBy ; - rdfs:label "Location"@en ; - rdfs:subClassOf dcterms:LocationPeriodOrJurisdiction . - -dcterms:LocationPeriodOrJurisdiction - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A location, period of time, or jurisdiction."@en ; - rdfs:isDefinedBy ; - rdfs:label "Location, Period, or Jurisdiction"@en . - -dcterms:MESH - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of labeled concepts specified by the Medical Subject Headings."@en ; - rdfs:isDefinedBy ; - rdfs:label "MeSH"@en ; - rdfs:seeAlso . - -dcterms:MediaType - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A file format or physical medium."@en ; - rdfs:isDefinedBy ; - rdfs:label "Media Type"@en ; - rdfs:subClassOf dcterms:MediaTypeOrExtent . - -dcterms:MediaTypeOrExtent - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A media type or extent."@en ; - rdfs:isDefinedBy ; - rdfs:label "Media Type or Extent"@en . - -dcterms:MethodOfAccrual - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A method by which resources are added to a collection."@en ; - rdfs:isDefinedBy ; - rdfs:label "Method of Accrual"@en . - -dcterms:MethodOfInstruction - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A process that is used to engender knowledge, attitudes, and skills."@en ; - rdfs:isDefinedBy ; - rdfs:label "Method of Instruction"@en . - -dcterms:NLM - dcterms:issued "2005-06-13"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of conceptual resources specified by the National Library of Medicine Classification."@en ; - rdfs:isDefinedBy ; - rdfs:label "NLM"@en ; - rdfs:seeAlso . - -dcterms:Period - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of time intervals defined by their limits according to the DCMI Period Encoding Scheme."@en ; - rdfs:isDefinedBy ; - rdfs:label "DCMI Period"@en ; - rdfs:seeAlso . - -dcterms:PeriodOfTime - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "An interval of time that is named or defined by its start and end dates."@en ; - rdfs:isDefinedBy ; - rdfs:label "Period of Time"@en ; - rdfs:subClassOf dcterms:LocationPeriodOrJurisdiction . - -dcterms:PhysicalMedium - dcterms:description "Examples include paper, canvas, or DVD."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A physical material or carrier."@en ; - rdfs:isDefinedBy ; - rdfs:label "Physical Medium"@en ; - rdfs:subClassOf dcterms:MediaType . - -dcterms:PhysicalResource - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A material thing."@en ; - rdfs:isDefinedBy ; - rdfs:label "Physical Resource"@en . - -dcterms:Point - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of points in space defined by their geographic coordinates according to the DCMI Point Encoding Scheme."@en ; - rdfs:isDefinedBy ; - rdfs:label "DCMI Point"@en ; - rdfs:seeAlso . - -dcterms:Policy - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A plan or course of action by an authority, intended to influence and determine decisions, actions, and other matters."@en ; - rdfs:isDefinedBy ; - rdfs:label "Policy"@en . - -dcterms:ProvenanceStatement - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "Any changes in ownership and custody of a resource since its creation that are significant for its authenticity, integrity, and interpretation."@en ; - rdfs:isDefinedBy ; - rdfs:label "Provenance Statement"@en . - -dcterms:RFC1766 - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of tags, constructed according to RFC 1766, for the identification of languages."@en ; - rdfs:isDefinedBy ; - rdfs:label "RFC 1766"@en ; - rdfs:seeAlso . - -dcterms:RFC3066 - dcterms:description "RFC 3066 has been obsoleted by RFC 4646."@en ; - dcterms:issued "2002-07-13"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of tags constructed according to RFC 3066 for the identification of languages."@en ; - rdfs:isDefinedBy ; - rdfs:label "RFC 3066"@en ; - rdfs:seeAlso . - -dcterms:RFC4646 - dcterms:description "RFC 4646 obsoletes RFC 3066."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of tags constructed according to RFC 4646 for the identification of languages."@en ; - rdfs:isDefinedBy ; - rdfs:label "RFC 4646"@en ; - rdfs:seeAlso . - -dcterms:RFC5646 - dcterms:description "RFC 5646 obsoletes RFC 4646."@en ; - dcterms:issued "2010-10-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of tags constructed according to RFC 5646 for the identification of languages."@en ; - rdfs:isDefinedBy ; - rdfs:label "RFC 5646"@en ; - rdfs:seeAlso . - -dcterms:RightsStatement - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A statement about the intellectual property rights (IPR) held in or over a resource, a legal document giving official permission to do something with a resource, or a statement about access rights."@en ; - rdfs:isDefinedBy ; - rdfs:label "Rights Statement"@en . - -dcterms:SizeOrDuration - dcterms:description "Examples include a number of pages, a specification of length, width, and breadth, or a period in hours, minutes, and seconds."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A dimension or extent, or a time taken to play or execute."@en ; - rdfs:isDefinedBy ; - rdfs:label "Size or Duration"@en ; - rdfs:subClassOf dcterms:MediaTypeOrExtent . - -dcterms:Standard - dcterms:issued "2008-01-14"^^ ; - a rdfs:Class ; - rdfs:comment "A reference point against which other things can be evaluated or compared."@en ; - rdfs:isDefinedBy ; - rdfs:label "Standard"@en . - -dcterms:TGN - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of places specified by the Getty Thesaurus of Geographic Names."@en ; - rdfs:isDefinedBy ; - rdfs:label "TGN"@en ; - rdfs:seeAlso . - -dcterms:UDC - dcterms:issued "2000-07-11"^^ ; - a dcam:VocabularyEncodingScheme ; - rdfs:comment "The set of conceptual resources specified by the Universal Decimal Classification."@en ; - rdfs:isDefinedBy ; - rdfs:label "UDC"@en ; - rdfs:seeAlso . - -dcterms:URI - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of identifiers constructed according to the generic syntax for Uniform Resource Identifiers as specified by the Internet Engineering Task Force."@en ; - rdfs:isDefinedBy ; - rdfs:label "URI"@en ; - rdfs:seeAlso . - -dcterms:W3CDTF - dcterms:issued "2000-07-11"^^ ; - a rdfs:Datatype ; - rdfs:comment "The set of dates and times constructed according to the W3C Date and Time Formats Specification."@en ; - rdfs:isDefinedBy ; - rdfs:label "W3C-DTF"@en ; - rdfs:seeAlso . - -dcterms:abstract - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A summary of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Abstract"@en ; - rdfs:subPropertyOf , dcterms:description . - -dcterms:accessRights - dcam:rangeIncludes dcterms:RightsStatement ; - dcterms:description "Access Rights may include information regarding access or restrictions based on privacy, security, or other policies."@en ; - dcterms:issued "2003-02-15"^^ ; - a rdf:Property ; - rdfs:comment "Information about who access the resource or an indication of its security status."@en ; - rdfs:isDefinedBy ; - rdfs:label "Access Rights"@en ; - rdfs:subPropertyOf , dcterms:rights . - -dcterms:accrualMethod - dcam:rangeIncludes dcterms:MethodOfAccrual ; - dcterms:description "Recommended practice is to use a value from the Collection Description Accrual Method Vocabulary [[DCMI-ACCRUALMETHOD](https://dublincore.org/groups/collections/accrual-method/)]."@en ; - dcterms:issued "2005-06-13"^^ ; - a rdf:Property ; - rdfs:comment "The method by which items are added to a collection."@en ; - rdfs:domain ; - rdfs:isDefinedBy ; - rdfs:label "Accrual Method"@en . - -dcterms:accrualPeriodicity - dcam:rangeIncludes dcterms:Frequency ; - dcterms:description "Recommended practice is to use a value from the Collection Description Frequency Vocabulary [[DCMI-COLLFREQ](https://dublincore.org/groups/collections/frequency/)]."@en ; - dcterms:issued "2005-06-13"^^ ; - a rdf:Property ; - rdfs:comment "The frequency with which items are added to a collection."@en ; - rdfs:domain ; - rdfs:isDefinedBy ; - rdfs:label "Accrual Periodicity"@en . - -dcterms:accrualPolicy - dcam:rangeIncludes dcterms:Policy ; - dcterms:description "Recommended practice is to use a value from the Collection Description Accrual Policy Vocabulary [[DCMI-ACCRUALPOLICY](https://dublincore.org/groups/collections/accrual-policy/)]."@en ; - dcterms:issued "2005-06-13"^^ ; - a rdf:Property ; - rdfs:comment "The policy governing the addition of items to a collection."@en ; - rdfs:domain ; - rdfs:isDefinedBy ; - rdfs:label "Accrual Policy"@en . - -dcterms:alternative - dcterms:description "The distinction between titles and alternative titles is application-specific."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "An alternative name for the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Alternative Title"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:title . - -dcterms:audience - dcam:rangeIncludes dcterms:AgentClass ; - dcterms:description "Recommended practice is to use this property with non-literal values from a vocabulary of audience types."@en ; - dcterms:issued "2001-05-21"^^ ; - a rdf:Property ; - rdfs:comment "A class of agents for whom the resource is intended or useful."@en ; - rdfs:isDefinedBy ; - rdfs:label "Audience"@en . - -dcterms:available - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "Date that the resource became or will become available."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Available"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . - -dcterms:bibliographicCitation - dcterms:description "Recommended practice is to include sufficient bibliographic detail to identify the resource as unambiguously as possible."@en ; - dcterms:issued "2003-02-15"^^ ; - a rdf:Property ; - rdfs:comment "A bibliographic reference for the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Bibliographic Citation"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:identifier . - -dcterms:conformsTo - dcam:rangeIncludes dcterms:Standard ; - dcterms:issued "2001-05-21"^^ ; - a rdf:Property ; - rdfs:comment "An established standard to which the described resource conforms."@en ; - rdfs:isDefinedBy ; - rdfs:label "Conforms To"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:contributor - dcam:rangeIncludes dcterms:Agent ; - dcterms:description "The guidelines for using names of persons or organizations as creators apply to contributors."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "An entity responsible for making contributions to the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Contributor"@en ; - rdfs:subPropertyOf . - -dcterms:coverage - dcam:rangeIncludes dcterms:Jurisdiction, dcterms:Location, dcterms:Period ; - dcterms:description "Spatial topic and spatial applicability may be a named place or a location specified by its geographic coordinates. Temporal topic may be a named period, date, or date range. A jurisdiction may be a named administrative entity or a geographic place to which the resource applies. Recommended practice is to use a controlled vocabulary such as the Getty Thesaurus of Geographic Names [[TGN](https://www.getty.edu/research/tools/vocabulary/tgn/index.html)]. Where appropriate, named places or time periods may be used in preference to numeric identifiers such as sets of coordinates or date ranges. Because coverage is so broadly defined, it is preferable to use the more specific subproperties Temporal Coverage and Spatial Coverage."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "The spatial or temporal topic of the resource, spatial applicability of the resource, or jurisdiction under which the resource is relevant."@en ; - rdfs:isDefinedBy ; - rdfs:label "Coverage"@en ; - rdfs:subPropertyOf . - -dcterms:created - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "Date of creation of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Created"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . - -dcterms:creator - dcam:rangeIncludes dcterms:Agent ; - dcterms:description "Recommended practice is to identify the creator with a URI. If this is not possible or feasible, a literal value that identifies the creator may be provided."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "An entity responsible for making the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Creator"@en ; - rdfs:subPropertyOf , dcterms:contributor ; - owl:equivalentProperty . - -dcterms:date - dcterms:description "Date may be used to express temporal information at any level of granularity. Recommended practice is to express the date, date/time, or period of time according to ISO 8601-1 [[ISO 8601-1](https://www.iso.org/iso-8601-date-and-time-format.html)] or a published profile of the ISO standard, such as the W3C Note on Date and Time Formats [[W3CDTF](https://www.w3.org/TR/NOTE-datetime)] or the Extended Date/Time Format Specification [[EDTF](http://www.loc.gov/standards/datetime/)]. If the full date is unknown, month and year (YYYY-MM) or just year (YYYY) may be used. Date ranges may be specified using ISO 8601 period of time specification in which start and end dates are separated by a '/' (slash) character. Either the start or end date may be missing."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "A point or period of time associated with an event in the lifecycle of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf . - -dcterms:dateAccepted - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty. Examples of resources to which a date of acceptance may be relevant are a thesis (accepted by a university department) or an article (accepted by a journal)."@en ; - dcterms:issued "2002-07-13"^^ ; - a rdf:Property ; - rdfs:comment "Date of acceptance of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Accepted"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . - -dcterms:dateCopyrighted - dcterms:description "Typically a year. Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; - dcterms:issued "2002-07-13"^^ ; - a rdf:Property ; - rdfs:comment "Date of copyright of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Copyrighted"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . - -dcterms:dateSubmitted - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty. Examples of resources to which a 'Date Submitted' may be relevant include a thesis (submitted to a university department) or an article (submitted to a journal)."@en ; - dcterms:issued "2002-07-13"^^ ; - a rdf:Property ; - rdfs:comment "Date of submission of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Submitted"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . - -dcterms:description - dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "An account of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Description"@en ; - rdfs:subPropertyOf . - -dcterms:educationLevel - dcam:rangeIncludes dcterms:AgentClass ; - dcterms:issued "2002-07-13"^^ ; - a rdf:Property ; - rdfs:comment "A class of agents, defined in terms of progression through an educational or training context, for which the described resource is intended."@en ; - rdfs:isDefinedBy ; - rdfs:label "Audience Education Level"@en ; - rdfs:subPropertyOf dcterms:audience . - -dcterms:extent - dcam:rangeIncludes dcterms:SizeOrDuration ; - dcterms:description "Recommended practice is to specify the file size in megabytes and duration in ISO 8601 format."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "The size or duration of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Extent"@en ; - rdfs:subPropertyOf , dcterms:format . - -dcterms:format - dcam:rangeIncludes dcterms:Extent, dcterms:MediaType ; - dcterms:description "Recommended practice is to use a controlled vocabulary where available. For example, for file formats one could use the list of Internet Media Types [[MIME](https://www.iana.org/assignments/media-types/media-types.xhtml)]. Examples of dimensions include size and duration."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "The file format, physical medium, or dimensions of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Format"@en ; - rdfs:subPropertyOf . - -dcterms:hasFormat - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Format Of."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that is substantially the same as the pre-existing described resource, but in another format."@en ; - rdfs:isDefinedBy ; - rdfs:label "Has Format"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:hasPart - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Part Of."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that is included either physically or logically in the described resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Has Part"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:hasVersion - dcterms:description "Changes in version imply substantive changes in content rather than differences in format. This property is intended to be used with non-literal values. This property is an inverse property of Is Version Of."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that is a version, edition, or adaptation of the described resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Has Version"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:identifier - dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "An unambiguous reference to the resource within a given context."@en ; - rdfs:isDefinedBy ; - rdfs:label "Identifier"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf . - -dcterms:instructionalMethod - dcam:rangeIncludes dcterms:MethodOfInstruction ; - dcterms:description "Instructional Method typically includes ways of presenting instructional materials or conducting instructional activities, patterns of learner-to-learner and learner-to-instructor interactions, and mechanisms by which group and individual levels of learning are measured. Instructional methods include all aspects of the instruction and learning processes from planning and implementation through evaluation and feedback."@en ; - dcterms:issued "2005-06-13"^^ ; - a rdf:Property ; - rdfs:comment "A process, used to engender knowledge, attitudes and skills, that the described resource is designed to support."@en ; - rdfs:isDefinedBy ; - rdfs:label "Instructional Method"@en . - -dcterms:isFormatOf - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Has Format."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A pre-existing related resource that is substantially the same as the described resource, but in another format."@en ; - rdfs:isDefinedBy ; - rdfs:label "Is Format Of"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:isPartOf - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Has Part."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource in which the described resource is physically or logically included."@en ; - rdfs:isDefinedBy ; - rdfs:label "Is Part Of"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:isReferencedBy - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of References."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that references, cites, or otherwise points to the described resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Is Referenced By"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:isReplacedBy - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Replaces."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that supplants, displaces, or supersedes the described resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Is Replaced By"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:isRequiredBy - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Requires."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that requires the described resource to support its function, delivery, or coherence."@en ; - rdfs:isDefinedBy ; - rdfs:label "Is Required By"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:isVersionOf - dcterms:description "Changes in version imply substantive changes in content rather than differences in format. This property is intended to be used with non-literal values. This property is an inverse property of Has Version."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource of which the described resource is a version, edition, or adaptation."@en ; - rdfs:isDefinedBy ; - rdfs:label "Is Version Of"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:issued - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "Date of formal issuance of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Issued"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . - -dcterms:language - dcam:rangeIncludes dcterms:LinguisticSystem ; - dcterms:description "Recommended practice is to use either a non-literal value representing a language from a controlled vocabulary such as ISO 639-2 or ISO 639-3, or a literal value consisting of an IETF Best Current Practice 47 [[IETF-BCP47](https://tools.ietf.org/html/bcp47)] language tag."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "A language of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Language"@en ; - rdfs:subPropertyOf . - -dcterms:license - dcam:rangeIncludes dcterms:LicenseDocument ; - dcterms:description "Recommended practice is to identify the license document with a URI. If this is not possible or feasible, a literal value that identifies the license may be provided."@en ; - dcterms:issued "2004-06-14"^^ ; - a rdf:Property ; - rdfs:comment "A legal document giving official permission to do something with the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "License"@en ; - rdfs:subPropertyOf , dcterms:rights . - -dcterms:mediator - dcam:rangeIncludes dcterms:AgentClass ; - dcterms:description "In an educational context, a mediator might be a parent, teacher, teaching assistant, or care-giver."@en ; - dcterms:issued "2001-05-21"^^ ; - a rdf:Property ; - rdfs:comment "An entity that mediates access to the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Mediator"@en ; - rdfs:subPropertyOf dcterms:audience . - -dcterms:medium - dcam:domainIncludes dcterms:PhysicalResource ; - dcam:rangeIncludes dcterms:PhysicalMedium ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "The material or physical carrier of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Medium"@en ; - rdfs:subPropertyOf , dcterms:format . - -dcterms:modified - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "Date on which the resource was changed."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Modified"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . - -dcterms:provenance - dcam:rangeIncludes dcterms:ProvenanceStatement ; - dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en ; - dcterms:issued "2004-09-20"^^ ; - a rdf:Property ; - rdfs:comment "A statement of any changes in ownership and custody of the resource since its creation that are significant for its authenticity, integrity, and interpretation."@en ; - rdfs:isDefinedBy ; - rdfs:label "Provenance"@en . - -dcterms:publisher - dcam:rangeIncludes dcterms:Agent ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "An entity responsible for making the resource available."@en ; - rdfs:isDefinedBy ; - rdfs:label "Publisher"@en ; - rdfs:subPropertyOf . - -dcterms:references - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Referenced By."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that is referenced, cited, or otherwise pointed to by the described resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "References"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:relation - dcterms:description "Recommended practice is to identify the related resource by means of a URI. If this is not possible or feasible, a string conforming to a formal identification system may be provided."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "A related resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Relation"@en ; - rdfs:subPropertyOf . - -dcterms:replaces - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Replaced By."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that is supplanted, displaced, or superseded by the described resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Replaces"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:requires - dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Required By."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A related resource that is required by the described resource to support its function, delivery, or coherence."@en ; - rdfs:isDefinedBy ; - rdfs:label "Requires"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:rights - dcam:rangeIncludes dcterms:RightsStatement ; - dcterms:description "Typically, rights information includes a statement about various property rights associated with the resource, including intellectual property rights. Recommended practice is to refer to a rights statement with a URI. If this is not possible or feasible, a literal value (name, label, or short text) may be provided."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "Information about rights held in and over the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Rights"@en ; - rdfs:subPropertyOf . - -dcterms:rightsHolder - dcam:rangeIncludes dcterms:Agent ; - dcterms:description "Recommended practice is to refer to the rights holder with a URI. If this is not possible or feasible, a literal value that identifies the rights holder may be provided."@en ; - dcterms:issued "2004-06-14"^^ ; - a rdf:Property ; - rdfs:comment "A person or organization owning or managing rights over the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Rights Holder"@en . - -dcterms:source - dcterms:description "This property is intended to be used with non-literal values. The described resource may be derived from the related resource in whole or in part. Best practice is to identify the related resource by means of a URI or a string conforming to a formal identification system."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "A related resource from which the described resource is derived."@en ; - rdfs:isDefinedBy ; - rdfs:label "Source"@en ; - rdfs:subPropertyOf , dcterms:relation . - -dcterms:spatial - dcam:rangeIncludes dcterms:Location ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "Spatial characteristics of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Spatial Coverage"@en ; - rdfs:subPropertyOf , dcterms:coverage . - -dcterms:subject - dcterms:description "Recommended practice is to refer to the subject with a URI. If this is not possible or feasible, a literal value that identifies the subject may be provided. Both should preferably refer to a subject in a controlled vocabulary."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "A topic of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Subject"@en ; - rdfs:subPropertyOf . - -dcterms:tableOfContents - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "A list of subunits of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Table Of Contents"@en ; - rdfs:subPropertyOf , dcterms:description . - -dcterms:temporal - dcam:rangeIncludes dcterms:PeriodOfTime ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "Temporal characteristics of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Temporal Coverage"@en ; - rdfs:subPropertyOf , dcterms:coverage . - -dcterms:title - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "A name given to the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Title"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf . - -dcterms:type - dcterms:description "Recommended practice is to use a controlled vocabulary such as the DCMI Type Vocabulary [[DCMI-TYPE](http://dublincore.org/documents/dcmi-type-vocabulary/)]. To describe the file format, physical medium, or dimensions of the resource, use the property Format."@en ; - dcterms:issued "2008-01-14"^^ ; - a rdf:Property ; - rdfs:comment "The nature or genre of the resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Type"@en ; - rdfs:subPropertyOf . - -dcterms:valid - dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; - dcterms:issued "2000-07-11"^^ ; - a rdf:Property ; - rdfs:comment "Date (often a range) of validity of a resource."@en ; - rdfs:isDefinedBy ; - rdfs:label "Date Valid"@en ; - rdfs:range rdfs:Literal ; - rdfs:subPropertyOf , dcterms:date . diff --git a/tests/data/vocprez/input/reg-status.ttl b/tests/data/vocprez/input/reg-status.ttl deleted file mode 100755 index 1231130d..00000000 --- a/tests/data/vocprez/input/reg-status.ttl +++ /dev/null @@ -1,213 +0,0 @@ -PREFIX cs: -PREFIX rdf: -PREFIX rdfs: -PREFIX owl: -PREFIX xsd: -PREFIX sdo: -PREFIX skos: -PREFIX reg: -PREFIX dcterms: -PREFIX vann: -PREFIX : - - - - a owl:Ontology , skos:ConceptScheme; - skos:prefLabel "Registry Status Vocabulary"@en ; - skos:definition """This vocabulary is a re-published and only marginally changed version of the Registry Ontology's (http://epimorphics.com/public/vocabulary/Registry.html) *Status* vocabulary (online in RDF: http://purl.org/linked-data/registry). The only real change to content has been the addition of the term `unstable`. This re-publication has been performed to allow the IRIs of each vocab term (skos:Concept) to resolve to both human-readable and machine-readable forms of content (HTML and RDF), using HTTP content negotiation. - -Note that just like the original form of this vocabulary, while it is a SKOS vocabulary implemented as a single skos:ConceptScheme, it is also an OWL Ontology and that each *Status* is both a skos:Concept and a reg:Status individual."""@en ; - owl:versionIRI ; - owl:versionInfo - "2.1 - 2023-05 - added colours" , - "2.0 - 2023-01 - addition of Concepts addition & original, as per updated Registry Item status codes" , - "1.2 - 2021-11 - changes IRI to reg-statuses; added unstable" , - "1.1 - 2020-06 - altered structure & metadata, not content, to conform to Vocab Publication Profile" , - "1.0 - 2018-06 - as per Registry Ontology original" ; - dcterms:contributor ; - vann:preferredNamespaceUri "https://linked.data.gov.au/def/reg-status/"^^xsd:string ; - dcterms:creator ; - vann:preferredNamespacePrefix "reg-status"@en ; - dcterms:publisher ; - rdfs:seeAlso ; - dcterms:modified "2023-05-26"^^xsd:date ; - dcterms:rights "(c) Commonwealth of Australia 2021"@en ; - dcterms:created "2018-07-23"^^xsd:date ; - dcterms:source ; - skos:hasTopConcept :accepted , :notAccepted ; -. - - - a sdo:Organization ; - sdo:name "Epimorphics" ; - sdo:url "https://www.epimorphics.com"^^xsd:anyURI ; -. - - - a sdo:Organization ; - sdo:name "Australian Government Linked Data Working Group" ; - sdo:url "http://www.linked.data.gov.au"^^xsd:anyURI ; -. - - - a sdo:Person ; - sdo:honorificPrefix "Dr" ; - sdo:name "Nicholas J. Car" ; - sdo:email "nicholas.car@surroundaustralia.com"^^xsd:anyURI ; - sdo:affiliation ; -. - - - a sdo:Organization ; - sdo:name "SURROUND Australia Pty Ltd" ; - sdo:url "https://surroundaustralia.com"^^xsd:anyURI ; -. - -reg:Status a owl:Class; - skos:prefLabel "Status"@en; - skos:definition "Open set of status code for entries in a register"@en; - rdfs:subClassOf skos:Concept; -. - -:accepted a skos:Concept, reg:Status ; - owl:sameAs reg:statusAccepted ; - skos:prefLabel "accepted"@en; - skos:definition "An entry that has been accepted for use and is visible in the default register listing. Includes entries that have seen been retired or superseded."@en; - skos:topConceptOf cs: ; - skos:inScheme cs: ; - skos:topConceptOf cs: ; - rdfs:isDefinedBy cs: ; - sdo:color "#1bc13f" ; -. - -:addition a skos:Concept, reg:Status ; - skos:prefLabel "addition"@en; - skos:definition "The item's status is stable and was supplied to the registry after initial creation"@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :stable ; - sdo:color "#4ac11b" ; -. - -:deprecated a skos:Concept, reg:Status ; - owl:sameAs reg:statusDeprecated ; - skos:prefLabel "deprecated"@en; - skos:definition "An entry that has been Retired or Superseded or has become Unstable and is no longer to be used."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :accepted ; - sdo:color "#a86a0d" ; -. - -:experimental a skos:Concept, reg:Status ; - owl:sameAs reg:statusExperimental ; - skos:prefLabel "experimental"@en; - skos:altLabel "provisional"@en; - skos:definition "An entry that has been accepted into the register temporarily and may be subject to change or withdrawal."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :valid ; - sdo:color "#eae72c" ; -. - -:invalid a skos:Concept, reg:Status ; - owl:sameAs reg:statusInvalid ; - skos:prefLabel "invalid"@en; - skos:definition "An entry which has been invalidated due to serious flaws, distinct from retrirement."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :notAccepted ; - sdo:color "#ea3c2c" ; -. - -:notAccepted a skos:Concept, reg:Status ; - owl:sameAs reg:statusNotAccepted ; - skos:prefLabel "notAccepted"@en; - skos:definition "An entry that should not be visible in the default register listing."@en; - skos:topConceptOf cs: ; - skos:inScheme cs: ; - skos:topConceptOf cs: ; - rdfs:isDefinedBy cs: ; - sdo:color "#ea9e2c" ; -. - -:original a skos:Concept, reg:Status ; - skos:prefLabel "stable"@en; - skos:definition "The item's status is stable and was supplied to the registry after initial creation."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :stable ; - sdo:color "#38a30e" ; -. - -:reserved a skos:Concept, reg:Status ; - owl:sameAs reg:statusReserved ; - skos:prefLabel "reserved"@en; - skos:definition "A reserved entry allocated for some as yet undetermined future use."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :notAccepted ; - sdo:color "#9b8d79" ; -. - -:retired a skos:Concept, reg:Status ; - owl:sameAs reg:statusRetired ; - skos:prefLabel "retired"@en; - skos:altLabel "withdrawn"@en; - skos:definition "An entry that has been withdrawn from use."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :deprecated ; - sdo:color "#ad5b24" ; -. - -:stable a skos:Concept, reg:Status ; - owl:sameAs reg:statusStable ; - skos:prefLabel "stable"@en; - skos:definition "An entry that is seen as having a reasonable measure of stability, may be used to mark the full adoption of a previously 'experimental' entry."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :valid ; - sdo:color "#2e8c09" ; -. - -:submitted a skos:Concept, reg:Status ; - owl:sameAs reg:statusSubmitted ; - skos:prefLabel "submitted"@en; - skos:definition "A proposed entry which is not yet approved for use for use."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :notAccepted ; - sdo:color "#248bad" ; -. - -:superseded a skos:Concept, reg:Status ; - owl:sameAs reg:statusSuperseded ; - skos:prefLabel "superseded"@en; - skos:altLabel "replaced"@en; - skos:definition "An entry that has been replaced by a new alternative which should be used instead."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :deprecated ; - sdo:color "#ad7624" ; -. - -:unstable a skos:Concept, reg:Status ; - rdfs:isDefinedBy cs: ; - skos:prefLabel "stable"@en; - skos:definition "An entry that is not seen as having a reasonable measure of stability. This status is expected to be allocated to entries that were once Stable but have become Unstable, due to a management or technical mishap, rather than being allocated to resources before they become Stable. Those resources should be allocated Experimental."@en; - skos:inScheme cs: ; - skos:broader :valid ; - owl:sameAs reg:statusStable ; - sdo:color "#678c09" ; -. - -:valid a skos:Concept, reg:Status ; - owl:sameAs reg:statusValid ; - skos:prefLabel "valid"@en; - skos:definition "An entry that has been accepted into the register and is deemed fit for use."@en; - skos:inScheme cs: ; - rdfs:isDefinedBy cs: ; - skos:broader :accepted ; - sdo:color "#36a80d" ; -. diff --git a/tests/data/vocprez/input/vocab-derivation-modes.ttl b/tests/data/vocprez/input/vocab-derivation-modes.ttl deleted file mode 100755 index 1e9025ed..00000000 --- a/tests/data/vocprez/input/vocab-derivation-modes.ttl +++ /dev/null @@ -1,127 +0,0 @@ -PREFIX : -PREFIX agldwgstatus: -PREFIX cs: -PREFIX dcterms: -PREFIX rdfs: -PREFIX reg: -PREFIX sdo: -PREFIX skos: -PREFIX xsd: - -:direct - a skos:Concept ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - reg:status agldwgstatus:original ; - rdfs:isDefinedBy cs: ; - skos:definition "Derivation without alteration"@en ; - skos:inScheme cs: ; - skos:prefLabel "Direct"@en ; - skos:topConceptOf cs: ; -. - -:extension - a skos:Concept ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - reg:status agldwgstatus:original ; - rdfs:isDefinedBy cs: ; - skos:definition "Derivation with extension"@en ; - skos:inScheme cs: ; - skos:prefLabel "Extension"@en ; - skos:scopeNote "Use this Concept if the reusing vocabulary extends the original vocabulary but does not subset it" ; - skos:topConceptOf cs: ; -. - -:none - a skos:Concept ; - dcterms:provenance "Added to this vocabulary for multiple projects in 2023"@en ; - reg:status agldwgstatus:original ; - rdfs:isDefinedBy cs: ; - skos:definition "This vocabulary does not derive from another"@en ; - skos:inScheme cs: ; - skos:prefLabel "None"@en ; - skos:scopeNote "Use this Concept if the vocabulary is known to not reuse any other vocabularies" ; - skos:topConceptOf cs: ; -. - -:not-applicable - a skos:Concept ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - reg:status agldwgstatus:original ; - rdfs:isDefinedBy cs: ; - skos:definition "Derivation mode is not applicable to this vocabulary"@en ; - skos:inScheme cs: ; - skos:prefLabel "Not Applicable"@en ; - skos:scopeNote "Use this Concept if the vocabulary is known not to reuse any other vocabularies" ; - skos:topConceptOf cs: ; -. - -:relabelling - a skos:Concept ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - reg:status agldwgstatus:original ; - rdfs:isDefinedBy cs: ; - skos:definition "Derivation with relabelling"@en ; - skos:inScheme cs: ; - skos:prefLabel "Relabelling"@en ; - skos:scopeNote "Use this Concept if the reusing vocabulary only relabels Concepts in the original vocabulary but does not alter their place in the Concept hierarchy or their definitions" ; - skos:topConceptOf cs: ; -. - -:subsetting - a skos:Concept ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - reg:status agldwgstatus:original ; - rdfs:isDefinedBy cs: ; - skos:definition "Derivation with subsetting"@en ; - skos:inScheme cs: ; - skos:prefLabel "Subsetting"@en ; - skos:scopeNote "Use this Concept if the reusing vocabulary only subsets the original but does not extend it" ; - skos:topConceptOf cs: ; -. - -:subsetting-and-extension - a skos:Concept ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - reg:status agldwgstatus:original ; - rdfs:isDefinedBy cs: ; - skos:definition "Derivation with subsetting and extension"@en ; - skos:inScheme cs: ; - skos:prefLabel "Subset & Extension"@en ; - skos:scopeNote "Use this Concept if the reusing vocabulary both extends and subsets the original vocabulary" ; - skos:broader - :extension , - :subsetting ; -. - - - a sdo:Organization ; - sdo:name "KurrawongAI" ; - sdo:url "https://kurrawong.ai"^^xsd:anyURI ; -. - - - a sdo:Organization ; - sdo:name "SA Minerals and Energy Resources" ; - sdo:url "https://www.energymining.sa.gov.au/industry/geological-survey"^^xsd:anyURI ; -. - -cs: - a skos:ConceptScheme ; - dcterms:contributor ; - dcterms:created "2022-12-05"^^xsd:date ; - dcterms:creator ; - dcterms:issued "2022-12-05"^^xsd:date ; - dcterms:modified "2023-05-22"^^xsd:date ; - dcterms:provenance "Created for the MER catalogue upgrade project, 2022"@en ; - dcterms:publisher ; - reg:status agldwgstatus:stable ; - skos:definition "The modes by which one vocabulary may derive from another"@en ; - skos:hasTopConcept - :direct , - :extension , - :none , - :not-applicable , - :relabelling , - :subsetting ; - skos:prefLabel "Vocabulary Derivation Modes"@en ; -. diff --git a/tests/test_alt_profiles.py b/tests/test_alt_profiles.py index dcce339c..083f59ef 100755 --- a/tests/test_alt_profiles.py +++ b/tests/test_alt_profiles.py @@ -1,19 +1,11 @@ -import asyncio -import time -from pathlib import Path - import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph, URIRef from rdflib.namespace import RDF, DCAT -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo +from prez.reference_data.prez_ns import PREZ -@pytest.fixture(scope="session") +@pytest.fixture() def a_catalog_link(client): # get link for first catalog r = client.get("/catalogs") @@ -23,7 +15,7 @@ def a_catalog_link(client): return link -@pytest.fixture(scope="session") +@pytest.fixture() def a_resource_link(client, a_catalog_link): r = client.get(a_catalog_link) g = Graph().parse(data=r.text) @@ -37,20 +29,26 @@ def test_listing_alt_profile(client): r = client.get(f"/catalogs?_mediatype=text/turtle&_profile=altr-ext:alt-profile") response_graph = Graph().parse(data=r.text) assert ( - URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"), - RDF.type, - URIRef("https://prez.dev/ListingProfile"), - ) in response_graph + URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"), + RDF.type, + URIRef("https://prez.dev/ListingProfile"), + ) in response_graph -def test_object_alt_profile(client, a_catalog_link): +def test_object_alt_profile_token(client, a_catalog_link): r = client.get( - f"{a_catalog_link}?_mediatype=text/turtle&_profile=altr-ext:alt-profile" + f"{a_catalog_link}?_mediatype=text/turtle&_profile=alt" ) response_graph = Graph().parse(data=r.text) - expected_response = ( - URIRef("https://example.com/TopLevelCatalog"), + object_profiles = ( + None, + RDF.type, + PREZ.ObjectProfile, + ) + listing_profiles = ( + None, RDF.type, - DCAT.Catalog, + PREZ.ListingProfile, ) - assert next(response_graph.triples(expected_response)) + assert len(list(response_graph.triples(object_profiles))) > 1 + assert len(list(response_graph.triples(listing_profiles))) == 1 # only the alt profile diff --git a/tests/test_bnode.py b/tests/test_bnode.py index 4d29758e..8d480009 100755 --- a/tests/test_bnode.py +++ b/tests/test_bnode.py @@ -19,7 +19,7 @@ ], ) def test_bnode_depth(input_file: str, iri: str, expected_depth: int) -> None: - file = WORKING_DIR / "tests/data/bnode_depth" / input_file + file = WORKING_DIR / "test_data" / input_file graph = Graph() graph.parse(file) diff --git a/tests/test_connegp.py b/tests/test_connegp.py index ef39f019..63129bbb 100644 --- a/tests/test_connegp.py +++ b/tests/test_connegp.py @@ -7,11 +7,12 @@ from prez.app import app from prez.dependencies import get_repo +from prez.reference_data.prez_ns import PREZ from prez.repositories import PyoxigraphRepo, Repo from prez.services.connegp_service import NegotiatedPMTs -@pytest.fixture(scope="session") +@pytest.fixture() def test_store() -> Store: store = Store() file = Path(__file__).parent.parent / "test_data/ogc_records_profile.ttl" @@ -21,7 +22,7 @@ def test_store() -> Store: return store -@pytest.fixture(scope="session") +@pytest.fixture() def test_repo(test_store: Store) -> Repo: return PyoxigraphRepo(test_store) @@ -83,7 +84,7 @@ def test_repo(test_store: Store) -> Repo: ], [ {}, - {"_media": "application/ld+json"}, # Test mediatype resolution as QSA + {"_mediatype": "application/ld+json"}, # Test mediatype resolution as QSA [URIRef("http://www.w3.org/ns/dcat#Catalog")], False, { @@ -95,7 +96,7 @@ def test_repo(test_store: Store) -> Repo: ], [ {"accept": "text/turtle"}, - {"_media": "application/ld+json"}, # Test QSA mediatype is preferred + {"_mediatype": "application/ld+json"}, # Test QSA mediatype is preferred [URIRef("http://www.w3.org/ns/dcat#Catalog")], False, { @@ -137,8 +138,8 @@ def test_repo(test_store: Store) -> Repo: [URIRef("http://www.w3.org/ns/dcat#Catalog")], True, { - "profile": URIRef("https://www.w3.org/TR/vocab-dcat/"), - "title": "DCAT", + "profile": PREZ["OGCListingProfile"], + "title": "OGC Listing Profile", "mediatype": "text/anot+turtle", "class": "http://www.w3.org/ns/dcat#Catalog", }, @@ -158,6 +159,5 @@ def override_get_repo(): listing=listing, system_repo=test_repo, ) - success = await pmts.setup() - assert success + await pmts.setup() assert pmts.selected == expected_selected diff --git a/tests/test_count.py b/tests/test_count.py index cc2eff06..b421b8be 100755 --- a/tests/test_count.py +++ b/tests/test_count.py @@ -1,44 +1,5 @@ -from pathlib import Path - import pytest from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store - -from prez.app import app -from prez.dependencies import get_repo -from prez.repositories import Repo, PyoxigraphRepo - - -# @pytest.fixture(scope="session") -# def test_store() -> Store: -# # Create a new pyoxigraph Store -# store = Store() -# -# for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): -# store.load(file.read_bytes(), "text/turtle") -# -# return store -# -# -# @pytest.fixture(scope="session") -# def test_repo(test_store: Store) -> Repo: -# # Create a PyoxigraphQuerySender using the test_store -# return PyoxigraphRepo(test_store) -# -# -# @pytest.fixture(scope="session") -# def client(test_repo: Repo) -> TestClient: -# # Override the dependency to use the test_repo -# def override_get_repo(): -# return test_repo -# -# app.dependency_overrides[get_repo] = override_get_repo -# -# with TestClient(app) as c: -# yield c -# -# # Remove the override to ensure subsequent tests are unaffected -# app.dependency_overrides.clear() def get_curie(client: TestClient, iri: str) -> str: @@ -72,11 +33,11 @@ def get_curie(client: TestClient, iri: str) -> str: ], ) def test_count( - client: TestClient, - iri: str, - inbound: str | None, - outbound: str | None, - count: int, + client: TestClient, + iri: str, + inbound: str | None, + outbound: str | None, + count: int, ): curie = get_curie(client, iri) params = {"curie": curie, "inbound": inbound, "outbound": outbound} diff --git a/tests/test_curie_endpoint.py b/tests/test_curie_endpoint.py index d8b7b354..2af37846 100755 --- a/tests/test_curie_endpoint.py +++ b/tests/test_curie_endpoint.py @@ -1,21 +1,11 @@ import pytest from fastapi.testclient import TestClient -from prez.app import app - -# @pytest.fixture -# def client() -> TestClient: -# testclient = TestClient(app) -# -# # Make a request for the following IRI to ensure -# # the curie is available in the 'test_curie' test. -# iri = "http://example.com/namespace/test" -# response = testclient.get(f"/identifier/curie/{iri}") -# assert response.status_code == 200 -# assert response.text == "nmspc:test" -# -# return testclient +@pytest.fixture +def setup(client): + iri = "http://example.com/namespace/test" + client.get(f"/identifier/curie/{iri}") @pytest.mark.parametrize( @@ -39,6 +29,6 @@ def test_iri(iri: str, expected_status_code: int, client: TestClient): ["nmspc:test", 200], ], ) -def test_curie(curie: str, expected_status_code: int, client: TestClient): +def test_curie(curie: str, expected_status_code: int, client: TestClient, setup): response = client.get(f"/identifier/iri/{curie}") assert response.status_code == expected_status_code diff --git a/tests/test_endpoints_cache.py b/tests/test_endpoints_cache.py index 88affe0e..853483ce 100755 --- a/tests/test_endpoints_cache.py +++ b/tests/test_endpoints_cache.py @@ -1,14 +1,35 @@ from rdflib import Graph +from prez.reference_data.prez_ns import PREZ -def test_reset_cache(client): - client.get("/reset-tbox-cache") + +def test_purge_cache(client): + # add some annotations to the cache + client.get("/catalogs") + # purge the cache + response = client.get("/purge-tbox-cache") + assert response.status_code == 200 + # check that the cache is empty r = client.get("/tbox-cache") g = Graph().parse(data=r.text) - assert len(g) > 6000 # cache expands as tests are run + assert len(g) == 0 def test_cache(client): + # add some annotations to the cache + catalogs = client.get("/catalogs") + assert catalogs.status_code == 200 r = client.get("/tbox-cache") g = Graph().parse(data=r.text) - assert len(g) > 6000 # cache expands as tests are run + labels = ( + None, + PREZ.label, + None, + ) + descriptions = ( + None, + PREZ.description, + None, + ) + assert len(list(g.triples(labels))) > 0 + assert len(list(g.triples(descriptions))) > 0 diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py index c6831127..995f267c 100755 --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -1,28 +1,7 @@ -import pytest from rdflib import Graph, URIRef from rdflib.namespace import RDF, DCAT -@pytest.fixture(scope="session") -def a_catalog_link(client): - # get link for first catalog - r = client.get("/catalogs") - g = Graph().parse(data=r.text) - member_uri = g.value(None, RDF.type, DCAT.Catalog) - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link - - -@pytest.fixture(scope="session") -def a_resource_link(client, a_catalog_link): - r = client.get(a_catalog_link) - g = Graph().parse(data=r.text) - links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) - for link in links: - if link != a_catalog_link: - return link - - def test_catalog_listing_anot(client): r = client.get(f"/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile") response_graph = Graph().parse(data=r.text) @@ -40,8 +19,8 @@ def test_catalog_listing_anot(client): assert next(response_graph.triples(expected_response_2)) -def test_catalog_anot(client, a_catalog_link): - r = client.get(f"{a_catalog_link}?_mediatype=text/turtle") +def test_catalog_anot(client, a_top_level_catalog_link): + r = client.get(f"{a_top_level_catalog_link}?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) expected_response = ( URIRef("https://example.com/TopLevelCatalog"), @@ -51,8 +30,8 @@ def test_catalog_anot(client, a_catalog_link): assert next(response_graph.triples(expected_response)) -def test_lower_level_listing_anot(client, a_catalog_link): - r = client.get(f"{a_catalog_link}/collections?_mediatype=text/turtle") +def test_lower_level_listing_anot(client, a_top_level_catalog_link): + r = client.get(f"{a_top_level_catalog_link}/collections?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) expected_response = ( URIRef("https://example.com/LowerLevelCatalog"), diff --git a/tests/test_endpoints_object.py b/tests/test_endpoints_object.py index 63a2f459..35c77449 100755 --- a/tests/test_endpoints_object.py +++ b/tests/test_endpoints_object.py @@ -3,10 +3,10 @@ def test_feature_collection(client): - r = client.get(f"/object?uri=https://test/feature-collection") + r = client.get(f"/object?uri=https://example.com/FeatureCollection") response_graph = Graph().parse(data=r.text) assert ( - URIRef("https://test/feature-collection"), + URIRef("https://example.com/FeatureCollection"), RDF.type, GEO.FeatureCollection, ) in response_graph @@ -14,11 +14,11 @@ def test_feature_collection(client): def test_feature(client): r = client.get( - f"/object?uri=https://linked.data.gov.au/datasets/geofabric/hydroid/102208962" + f"/object?uri=https://example.com/Feature1" ) response_graph = Graph().parse(data=r.text) assert ( - URIRef("https://linked.data.gov.au/datasets/geofabric/hydroid/102208962"), + URIRef("https://example.com/Feature1"), RDF.type, GEO.Feature, ) in response_graph diff --git a/tests/test_endpoints_ok.py b/tests/test_endpoints_ok.py index 799c913d..247ab1b1 100755 --- a/tests/test_endpoints_ok.py +++ b/tests/test_endpoints_ok.py @@ -1,32 +1,15 @@ import logging import time -from pathlib import Path from typing import Optional, Set -import pytest from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store from rdflib import Graph -from prez.app import app -from prez.dependencies import get_repo from prez.reference_data.prez_ns import PREZ -from prez.repositories import Repo, PyoxigraphRepo log = logging.getLogger(__name__) -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../test_data/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - def wait_for_app_to_be_ready(client, timeout=10): start_time = time.time() while time.time() - start_time < timeout: @@ -40,30 +23,8 @@ def wait_for_app_to_be_ready(client, timeout=10): raise RuntimeError("App did not start within the specified timeout") -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_repo] = override_get_repo - - with TestClient(app) as c: - wait_for_app_to_be_ready(c) - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -def test_ogcprez_links( - client: TestClient, visited: Optional[Set] = None, link="/catalogs" +def ogcprez_links( + client, visited: Optional[Set] = None, link="/catalogs", total_links_visited=0 ): if not visited: visited = set() @@ -76,8 +37,16 @@ def test_ogcprez_links( member_links = list(g.objects(member_bnode, PREZ.link)) links.extend(member_links) assert response.status_code == 200 - for link in links: - print(link) - if link not in visited: - visited.add(link) - test_ogcprez_links(client, visited, str(link)) + for next_link in links: + print(next_link) + if next_link not in visited: + visited.add(next_link) + # Make the recursive call and update the total_links_visited + # and visited set with the returned values + visited, total_links_visited = ogcprez_links(client, visited, str(next_link), total_links_visited + 1) + # Return the updated count and visited set + return visited, total_links_visited + +def test_visit_all_links(client): + visited_links, total_count = ogcprez_links(client) + print(f"Total links visited: {total_count}") \ No newline at end of file diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py index b71d02ed..a92dbc1a 100755 --- a/tests/test_endpoints_profiles.py +++ b/tests/test_endpoints_profiles.py @@ -22,9 +22,3 @@ def test_sp_profile(client): g = Graph().parse(data=r.text) assert (URIRef("https://prez.dev/SpacePrezProfile"), RDF.type, PROF.Profile) in g - -def test_vp_profile(client): - # check the example remote profile is loaded - r = client.get("/profiles/prez:VocPrezProfile") - g = Graph().parse(data=r.text) - assert (URIRef("https://prez.dev/VocPrezProfile"), RDF.type, PROF.Profile) in g diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index c3cb7f6b..da6bb213 100755 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -1,40 +1,11 @@ -import pytest from rdflib import Graph, URIRef from rdflib.namespace import RDF, DCAT, GEO -@pytest.fixture(scope="session") -def a_catalog_link(client): - r = client.get("/catalogs") - g = Graph().parse(data=r.text) - member_uri = URIRef("https://example.com/SpacePrezCatalog") - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) - return link - - -@pytest.fixture(scope="session") -def an_fc_link(client, a_catalog_link): - r = client.get(f"{a_catalog_link}/collections") - g = Graph().parse(data=r.text) - links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) - for link in links: - if link != a_catalog_link: - return link - - -@pytest.fixture(scope="session") -def a_feature_link(client, an_fc_link): - r = client.get(f"{an_fc_link}/items") - g = Graph().parse(data=r.text) - links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) - for link in links: - if link != an_fc_link: - return link - - def test_dataset_anot(client, a_catalog_link): r = client.get(f"{a_catalog_link}?_mediatype=text/turtle") - response_graph = Graph().parse(data=r.text) + g_text = r.text + response_graph = Graph().parse(data=g_text) expected_response_1 = ( URIRef("https://example.com/SpacePrezCatalog"), RDF.type, @@ -45,17 +16,19 @@ def test_dataset_anot(client, a_catalog_link): def test_feature_collection(client, an_fc_link): r = client.get(f"{an_fc_link}?_mediatype=text/turtle") - response_graph = Graph().parse(data=r.text) + g_text = r.text + response_graph = Graph().parse(data=g_text) assert ( - URIRef("https://example.com/FeatureCollection"), - RDF.type, - GEO.FeatureCollection, - ) in response_graph + URIRef("https://example.com/FeatureCollection"), + RDF.type, + GEO.FeatureCollection, + ) in response_graph def test_feature(client, a_feature_link): r = client.get(f"{a_feature_link}?_mediatype=text/turtle") - response_graph = Graph().parse(data=r.text) + g_text = r.text + response_graph = Graph().parse(data=g_text) expected_response_1 = ( URIRef("https://example.com/Feature1"), RDF.type, @@ -66,7 +39,8 @@ def test_feature(client, a_feature_link): def test_feature_listing_anot(client, an_fc_link): r = client.get(f"{an_fc_link}/items?_mediatype=text/turtle") - response_graph = Graph().parse(data=r.text) + g_text = r.text + response_graph = Graph().parse(data=g_text) expected_response_1 = ( URIRef("https://example.com/Feature1"), RDF.type, diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 5ea8fece..8a28b9d6 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -25,8 +25,8 @@ def test_nodeshape_parsing(nodeshape_uri): @pytest.mark.parametrize( "nodeshape_uri", - ["http://example.org/ns#TopLevelCatalogs" "http://example.org/ns#FeatureListing"], + ["http://example.org/ns#ConceptSchemeConcept"], ) def test_nodeshape_to_grammar(nodeshape_uri): ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) - ns.to_string() + ... From b9ace76ff352d2b24bec22a5ccfcb59684612543 Mon Sep 17 00:00:00 2001 From: david Date: Wed, 3 Apr 2024 13:41:13 +1000 Subject: [PATCH 23/25] progress --- poetry.lock | 148 +-- prez/cache.py | 2 +- prez/config.py | 4 +- prez/dependencies.py | 217 ++- .../endpoints/endpoint_metadata.ttl | 11 +- .../endpoints/system_endpoints.ttl | 4 +- prez/reference_data/prez_ns.py | 1 + .../profiles/ogc_records_profile.ttl | 6 +- prez/renderers/renderer.py | 2 + prez/repositories/pyoxigraph.py | 5 +- prez/routers/cql.py | 9 +- prez/routers/identifier.py | 4 +- prez/routers/management.py | 4 +- prez/routers/object.py | 6 +- prez/routers/ogc_router.py | 228 ++-- prez/routers/profiles.py | 90 +- prez/routers/search.py | 51 +- prez/routers/sparql.py | 4 +- prez/services/connegp_service.py | 4 +- prez/services/curie_functions.py | 2 +- prez/services/link_generation.py | 7 +- prez/services/listings.py | 319 +++-- prez/services/objects.py | 62 +- prez/services/query_generation/annotations.py | 45 +- prez/services/query_generation/count.py | 3 +- .../{node_selection => }/cql.py | 0 .../node_selection/endpoint_shacl.py | 281 ---- .../query_generation/node_selection/search.py | 323 ----- prez/services/query_generation/search.py | 654 +++++++++ prez/services/query_generation/shacl.py | 590 +++++++++ prez/services/query_generation/umbrella.py | 1169 ++++++++++------- pyproject.toml | 2 +- temp/grammar/grammar.py | 4 +- tests/TO_FIX_test_dd_profiles.py | 4 +- tests/TO_FIX_test_endpoints_vocprez.py | 4 +- tests/TO_FIX_test_search.py | 4 +- tests/_test_cql.py | 4 +- tests/conftest.py | 8 +- tests/test_alt_profiles.py | 16 +- tests/test_connegp.py | 4 +- tests/test_count.py | 10 +- tests/test_endpoints_object.py | 4 +- tests/test_endpoints_ok.py | 9 +- tests/test_endpoints_profiles.py | 1 - tests/test_endpoints_spaceprez.py | 8 +- tests/test_node_selection_shacl.py | 7 +- tests/test_property_selection_shacl.py | 226 ++++ tests/test_query_construction.py | 85 ++ 48 files changed, 3109 insertions(+), 1546 deletions(-) rename prez/services/query_generation/{node_selection => }/cql.py (100%) delete mode 100644 prez/services/query_generation/node_selection/endpoint_shacl.py delete mode 100755 prez/services/query_generation/node_selection/search.py create mode 100755 prez/services/query_generation/search.py create mode 100644 prez/services/query_generation/shacl.py create mode 100644 tests/test_property_selection_shacl.py create mode 100644 tests/test_query_construction.py diff --git a/poetry.lock b/poetry.lock index a96824bf..3e52d939 100755 --- a/poetry.lock +++ b/poetry.lock @@ -261,63 +261,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.3" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.extras] @@ -832,13 +832,13 @@ rdflib = ">=6.3,<8.0" [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -902,13 +902,13 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "2.6.3" +version = "2.6.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, - {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, ] [package.dependencies] @@ -1125,13 +1125,13 @@ files = [ [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -1357,18 +1357,18 @@ wheel = ">=0.36.1" [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1508,13 +1508,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.27.1" +version = "0.28.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, - {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, + {file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"}, + {file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"}, ] [package.dependencies] @@ -1546,13 +1546,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "wheel" -version = "0.42.0" +version = "0.43.0" description = "A built-package format for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, - {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, + {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, + {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, ] [package.extras] @@ -1561,4 +1561,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "7fc8b112be822b96246a4f1639edd1cd5cb7543abf0eded604c7aaf7ed1ebdc8" +content-hash = "4642a2aaf72073ba6653912c1057b7d0befb89d247aae4468a2bdef9a0b53bc5" diff --git a/prez/cache.py b/prez/cache.py index 848c7397..1afc3d94 100755 --- a/prez/cache.py +++ b/prez/cache.py @@ -4,7 +4,7 @@ from prez.repositories import PyoxigraphRepo -profiles_graph_cache = ConjunctiveGraph() +profiles_graph_cache = Dataset() profiles_graph_cache.bind("prez", "https://prez.dev/") endpoints_graph_cache = ConjunctiveGraph() diff --git a/prez/config.py b/prez/config.py index 0362560a..38b48a6c 100755 --- a/prez/config.py +++ b/prez/config.py @@ -8,7 +8,7 @@ from rdflib import URIRef, DCTERMS, RDFS, SDO from rdflib.namespace import SKOS -from prez.reference_data.prez_ns import REG +from prez.reference_data.prez_ns import REG, EP class Settings(BaseSettings): @@ -61,8 +61,10 @@ class Settings(BaseSettings): prez_version: Optional[str] = None disable_prefix_generation: bool = False default_language: str = "en" + default_search_predicates: Optional[List[URIRef]] = [RDFS.label, SKOS.prefLabel, SDO.name, DCTERMS.title] local_rdf_dir: str = "rdf" endpoint_structure: Optional[Tuple[str, ...]] = ("catalogs", "collections", "items") + system_endpoints: Optional[List[URIRef]] = [EP["system/profile-listing"], EP["system/profile-object"]] # @root_validator() # def check_endpoint_enabled(cls, values): diff --git a/prez/dependencies.py b/prez/dependencies.py index cd88a751..f5ad6355 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -4,7 +4,7 @@ import httpx from fastapi import Depends, Request, HTTPException from pyoxigraph import Store -from rdflib import Dataset +from rdflib import Dataset, URIRef, SH from prez.cache import ( store, @@ -16,8 +16,15 @@ annotations_repo, ) from prez.config import settings -from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo -from prez.services.query_generation.node_selection.cql import CQLParser +from prez.reference_data.prez_ns import ALTREXT, ONT +from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo, Repo +from prez.services.connegp_service import NegotiatedPMTs +from prez.services.curie_functions import get_uri_for_curie_id +from prez.services.query_generation.classes import get_classes +from prez.services.query_generation.cql import CQLParser +from prez.services.query_generation.search import SearchQueryRegex +from prez.services.query_generation.shacl import NodeShape +from temp.grammar import IRI, Var async def get_async_http_client(): @@ -45,12 +52,16 @@ def get_oxrdflib_store(): return oxrdflib_store -async def get_repo( - http_async_client: httpx.AsyncClient = Depends(get_async_http_client), - pyoxi_store: Store = Depends(get_pyoxi_store), -): +async def get_data_repo( + request: Request, + http_async_client: httpx.AsyncClient = Depends(get_async_http_client), + pyoxi_data_store: Store = Depends(get_pyoxi_store), + pyoxi_system_store: Store = Depends(get_system_store), +) -> Repo: + if URIRef(request.scope.get("route").name) in settings.system_endpoints: + return PyoxigraphRepo(pyoxi_system_store) if settings.sparql_repo_type == "pyoxigraph": - return PyoxigraphRepo(pyoxi_store) + return PyoxigraphRepo(pyoxi_data_store) elif settings.sparql_repo_type == "oxrdflib": return OxrdflibRepo(oxrdflib_store) elif settings.sparql_repo_type == "remote": @@ -58,8 +69,8 @@ async def get_repo( async def get_system_repo( - pyoxi_store: Store = Depends(get_system_store), -): + pyoxi_store: Store = Depends(get_system_store), +) -> Repo: """ A pyoxigraph Store with Prez system data including: - Profiles @@ -100,9 +111,9 @@ async def load_annotations_data_to_oxigraph(store: Store): Loads all the data from the local data directory into the local SPARQL endpoint """ relevant_predicates = ( - settings.label_predicates - + settings.description_predicates - + settings.provenance_predicates + settings.label_predicates + + settings.description_predicates + + settings.provenance_predicates ) raw_g = Dataset(default_union=True) for file in (Path(__file__).parent / "reference_data/context_ontologies").glob("*"): @@ -115,7 +126,7 @@ async def load_annotations_data_to_oxigraph(store: Store): store.load(file_bytes, "application/n-triples") -async def cql_post_parser_dependency(request: Request): +async def cql_post_parser_dependency(request: Request) -> CQLParser: try: body = await request.json() context = json.load( @@ -123,6 +134,7 @@ async def cql_post_parser_dependency(request: Request): ) cql_parser = CQLParser(cql=body, context=context) cql_parser.generate_jsonld() + cql_parser.parse() return cql_parser except json.JSONDecodeError: raise HTTPException(status_code=400, detail="Invalid JSON format.") @@ -132,17 +144,18 @@ async def cql_post_parser_dependency(request: Request): ) -async def cql_get_parser_dependency(request: Request): +async def cql_get_parser_dependency(request: Request) -> CQLParser: if request.query_params.get("filter"): try: query = json.loads(request.query_params["filter"]) context = json.load( ( - Path(__file__).parent / "reference_data/cql/default_context.json" + Path(__file__).parent / "reference_data/cql/default_context.json" ).open() ) cql_parser = CQLParser(cql=query, context=context) cql_parser.generate_jsonld() + cql_parser.parse() return cql_parser except json.JSONDecodeError: raise HTTPException(status_code=400, detail="Invalid JSON format.") @@ -150,3 +163,175 @@ async def cql_get_parser_dependency(request: Request): raise HTTPException( status_code=400, detail="Invalid CQL format: Parsing failed." ) + + +async def generate_search_query(request: Request): + term = request.query_params.get("q") + if term: + predicates = request.query_params.getlist("predicates") + page = request.query_params.get("page", 1) + per_page = request.query_params.get("per_page", 10) + limit = int(per_page) + offset = limit * (int(page) - 1) + + return SearchQueryRegex( + term=term, + predicates=predicates, + limit=limit, + offset=offset, + ) + + +async def get_endpoint_nodeshapes( + request: Request, + repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), +): + endpoint_uri = URIRef(request.scope.get("route").name) + path_node_curies = [i for i in request.url.path.split("/")[:-1] if i in request.path_params.values()] + path_nodes = {f"path_node_{i + 1}": IRI(value=await get_uri_for_curie_id(value)) for i, value in + enumerate(reversed(path_node_curies))} + hierarchy_level = int(len(request.url.path.split("/")) / 2) + """ + Determines the relevant nodeshape based on the endpoint, hierarchy level, and parent URI + """ + node_selection_shape = None + target_classes = [] + relevant_ns_query = f"""SELECT ?ns ?tc + WHERE {{ + {endpoint_uri.n3()} ?ns . + ?ns ?tc ; + {hierarchy_level} . + }}""" + _, r = await system_repo.send_queries([], [(None, relevant_ns_query)]) + tabular_results = r[0][1] + distinct_ns = set([result["ns"]["value"] for result in tabular_results]) + if len(distinct_ns) == 1: # only one possible node shape + node_selection_shape = URIRef(tabular_results[0]["ns"]["value"]) + target_classes = [URIRef(result["tc"]["value"]) for result in tabular_results] + elif len(distinct_ns) > 1: # more than one possible node shape + # try all of the available nodeshapes + path_node_classes = {} + for pn, uri in path_nodes.items(): + path_node_classes[pn] = await get_classes(URIRef(uri.value), repo) + nodeshapes = [ + NodeShape( + uri=URIRef(ns), + graph=endpoints_graph_cache, + kind="endpoint", + path_nodes=path_nodes, + focus_node=Var(value="focus_node") + ) + for ns in distinct_ns + ] + matching_nodeshapes = [] + for ns in nodeshapes: + match_all_keys = True # Assume a match for all keys initially + + for pn, klasses in path_node_classes.items(): + # Check if all classes for this path node are in the ns.classes_at_len at this pn + if not any(klass in ns.classes_at_len.get(pn, []) for klass in klasses): + match_all_keys = False # Found a key where not all classes match + break # No need to check further for this ns + + if match_all_keys: + matching_nodeshapes.append(ns) + # TODO logic if there is more than one nodeshape - current default nodeshapes will only return one. + if not matching_nodeshapes: + raise ValueError( + "No matching nodeshapes found for the given path nodes and hierarchy level" + ) + node_selection_shape = matching_nodeshapes[0].uri + if not path_nodes: + path_nodes = {} + if node_selection_shape: + ns = NodeShape( + uri=node_selection_shape, + graph=endpoints_graph_cache, + kind="endpoint", + path_nodes=path_nodes, + focus_node=Var(value="focus_node") + ) + return ns + else: + raise ValueError( + f"No relevant nodeshape found for the given endpoint {endpoint_uri}, hierarchy level {hierarchy_level}, " + f"and parent URI" + ) + + +async def get_endpoint_type( + request: Request, + system_repo: Repo = Depends(get_system_repo), +): + endpoint_uri = URIRef(request.scope.get("route").name) + ep_type_fs = await get_classes(endpoint_uri, system_repo) + ep_types = list(ep_type_fs) + + # Iterate over each item in ep_types + for ep_type in ep_types: + # Check if the current ep_type is either ObjectEndpoint or ListingEndpoint + if ep_type in [ONT.ObjectEndpoint, ONT.ListingEndpoint]: + return ep_type + + raise ValueError("Endpoint must be declared as either a 'https://prez.dev/ont/ObjectEndpoint' or a " + "'https://prez.dev/ont/ListingEndpoint' in order for the appropriate profile to be determined.") + + + + +async def get_negotiated_pmts( + request: Request, + endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes), + system_repo: Repo = Depends(get_system_repo), + endpoint_type: URIRef = Depends(get_endpoint_type), +) -> NegotiatedPMTs: + # Use endpoint_nodeshapes in constructing NegotiatedPMTs + if endpoint_type == ONT.ObjectEndpoint: + listing = False + else: + listing = True + pmts = NegotiatedPMTs( + headers=request.headers, + params=request.query_params, + classes=endpoint_nodeshape.targetClasses, + listing=listing, + system_repo=system_repo, + ) + await pmts.setup() + return pmts + + +async def get_endpoint_structure( + request: Request, + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts) +): + endpoint_uri = URIRef(request.scope.get("route").name) + + if ( + (endpoint_uri in settings.system_endpoints) or + (pmts.selected.get("profile") == ALTREXT["alt-profile"]) + ): + return ("profiles",) + else: + return settings.endpoint_structure + + +async def get_profile_nodeshape( + request: Request, + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), + endpoint_type: URIRef = Depends(get_endpoint_type), +): + profile = pmts.selected.get("profile") + if endpoint_type == ONT.ObjectEndpoint: + object_curie = request.url.path.split("/")[-1] + focus_node_uri = await get_uri_for_curie_id(object_curie) + focus_node = IRI(value=focus_node_uri) + else: + focus_node = Var(value="focus_node") + return NodeShape( + uri=profile, + graph=profiles_graph_cache, + kind="profile", + focus_node=focus_node, + ) diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl index f3ad2b72..5f792c02 100644 --- a/prez/reference_data/endpoints/endpoint_metadata.ttl +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -4,13 +4,13 @@ @prefix prez: . @prefix sys: . -sys:profiles-listing - a ont:ListingEndpoint ; +sys:profile-listing + a ont:ListingEndpoint , ont:SystemEndpoint ; ont:relevantShapes ex:Profiles ; . -sys:profiles-object - a ont:ObjectEndpoint ; +sys:profile-object + a ont:ObjectEndpoint , ont:SystemEndpoint ; ont:relevantShapes ex:Profiles ; . @@ -52,4 +52,5 @@ ogce:cql-queryables ogce:search a ont:ListingEndpoint ; ont:relevantShapes ex:search ; -. \ No newline at end of file +. + diff --git a/prez/reference_data/endpoints/system_endpoints.ttl b/prez/reference_data/endpoints/system_endpoints.ttl index 56d32d82..06f2608d 100755 --- a/prez/reference_data/endpoints/system_endpoints.ttl +++ b/prez/reference_data/endpoints/system_endpoints.ttl @@ -10,7 +10,7 @@ PREFIX prof: PREFIX skos: PREFIX shext: -endpoint:profiles-listing a ont:ListingEndpoint ; +endpoint:profile-listing a ont:ListingEndpoint ; ont:deliversClasses prez:ProfilesList ; sh:targetClass prof:Profile ; ont:endpointTemplate "/profiles" ; @@ -24,7 +24,7 @@ endpoint:profile-object a ont:ObjectEndpoint ; ont:endpointTemplate "/profiles/$object" ; . -endpoint:alt-profiles-listing a ont:ListingEndpoint ; +endpoint:alt-profile-listing a ont:ListingEndpoint ; ont:deliversClasses prez:ProfilesList ; sh:targetClass prof:Profile ; sh:target [ sh:select """SELECT ?focus_node diff --git a/prez/reference_data/prez_ns.py b/prez/reference_data/prez_ns.py index 0a5e1d0d..3ee865f7 100755 --- a/prez/reference_data/prez_ns.py +++ b/prez/reference_data/prez_ns.py @@ -5,3 +5,4 @@ ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") REG = Namespace("http://purl.org/linked-data/registry#") EP = Namespace("https://prez.dev/endpoint/") +SHEXT = Namespace("http://example.com/shacl-extension#") diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 8e973dea..14d80ba4 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -74,6 +74,8 @@ prez:OGCSchemesListProfile ( prov:qualifiedDerivation prov:entity ) ) ) + ], [ + sh:path rdf:type ] . @@ -89,10 +91,6 @@ prez:OGCItemProfile altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; sh:property [ sh:path shext:allPredicateValues ; - ] , - [ - sh:minCount 0 ; - sh:path [ sh:inversePath dcterms:hasPart ] ; ] ; shext:bnode-depth 2 ; altr-ext:constrainsClass dcat:Catalog , diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py index 2422e5d3..c9e243fa 100755 --- a/prez/renderers/renderer.py +++ b/prez/renderers/renderer.py @@ -8,6 +8,7 @@ from fastapi.responses import StreamingResponse from rdflib import Graph, URIRef, RDF +from prez.cache import prefix_graph from prez.renderers.csv_renderer import render_csv_dropdown from prez.renderers.json_renderer import render_json_dropdown, NotFoundError from prez.repositories import Repo @@ -67,6 +68,7 @@ async def return_from_graph( if "anot+" in mediatype: non_anot_mediatype = mediatype.replace("anot+", "") graph = await return_annotated_rdf(graph, repo, system_repo) + graph.namespace_manager = prefix_graph.namespace_manager content = io.BytesIO( graph.serialize(format=non_anot_mediatype, encoding="utf-8") ) diff --git a/prez/repositories/pyoxigraph.py b/prez/repositories/pyoxigraph.py index 976ebfe4..aa3954a9 100644 --- a/prez/repositories/pyoxigraph.py +++ b/prez/repositories/pyoxigraph.py @@ -47,10 +47,7 @@ def _handle_query_triples_results(results: pyoxigraph.QueryTriples) -> Graph: return g.parse(data=ntriples, format="ntriples") def _sync_rdf_query_to_graph(self, query: str) -> Graph: - try: - results = self.pyoxi_store.query(query) - except Exception as e: - print(e) + results = self.pyoxi_store.query(query) result_graph = self._handle_query_triples_results(results) return result_graph diff --git a/prez/routers/cql.py b/prez/routers/cql.py index 39b006ad..46a69f3c 100755 --- a/prez/routers/cql.py +++ b/prez/routers/cql.py @@ -5,14 +5,13 @@ from rdflib.namespace import URIRef from prez.dependencies import ( - get_repo, + get_data_repo, cql_post_parser_dependency, get_system_repo, cql_get_parser_dependency, ) from prez.reference_data.prez_ns import PREZ from prez.repositories import Repo -from prez.services.listings import listing_function router = APIRouter(tags=["ogcrecords"]) @@ -28,7 +27,7 @@ async def cql_post_endpoint( cql_parser: Optional[dict] = Depends(cql_post_parser_dependency), page: int = 1, per_page: int = 20, - repo: Repo = Depends(get_repo), + repo: Repo = Depends(get_data_repo), system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql/post") @@ -52,7 +51,7 @@ async def cql_get_endpoint( cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), page: int = 1, per_page: int = 20, - repo: Repo = Depends(get_repo), + repo: Repo = Depends(get_data_repo), system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef("https://prez.dev/endpoint/cql/get") @@ -77,7 +76,7 @@ async def queryables_endpoint( cql_parser: Optional[dict] = Depends(cql_get_parser_dependency), page: int = 1, per_page: int = 20, - repo: Repo = Depends(get_repo), + repo: Repo = Depends(get_data_repo), system_repo: Repo = Depends(get_system_repo), ): endpoint_uri = URIRef(request.scope.get("route").name) diff --git a/prez/routers/identifier.py b/prez/routers/identifier.py index 3d66a6ae..9bb814b8 100755 --- a/prez/routers/identifier.py +++ b/prez/routers/identifier.py @@ -3,7 +3,7 @@ from rdflib import URIRef from rdflib.term import _is_valid_uri -from prez.dependencies import get_repo +from prez.dependencies import get_data_repo from prez.queries.identifier import get_foaf_homepage_query from prez.services.curie_functions import get_uri_for_curie_id, get_curie_id_for_uri @@ -19,7 +19,7 @@ }, ) async def get_identifier_redirect_route( - iri: str, request: Request, repo=Depends(get_repo) + iri: str, request: Request, repo=Depends(get_data_repo) ): """ The `iri` query parameter is used to return a redirect response with the value from the `foaf:homepage` lookup. diff --git a/prez/routers/management.py b/prez/routers/management.py index a51324ab..a9f4f801 100755 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -60,7 +60,9 @@ async def return_tbox_cache(request: Request): # use pickle to deserialize the pred_obj_bytes pred_obj = pickle.loads(pred_obj_bytes) for pred, obj in pred_obj: - if pred_obj: # cache entry for a URI can be empty - i.e. no annotations found for URI + if ( + pred_obj + ): # cache entry for a URI can be empty - i.e. no annotations found for URI # Add the expanded triple (subject, predicate, object) to 'annotations_g' cache_g.add((subject, pred, obj)) return await return_rdf(cache_g, mediatype, profile_headers={}) diff --git a/prez/routers/object.py b/prez/routers/object.py index af6c64e8..833a3013 100755 --- a/prez/routers/object.py +++ b/prez/routers/object.py @@ -3,7 +3,7 @@ from rdflib import URIRef from starlette.responses import PlainTextResponse -from prez.dependencies import get_repo, get_system_repo +from prez.dependencies import get_data_repo, get_system_repo from prez.queries.object import object_inbound_query, object_outbound_query from prez.routers.identifier import get_iri_route from prez.services.objects import object_function @@ -40,7 +40,7 @@ async def count_route( }, }, ), - repo=Depends(get_repo), + repo=Depends(get_data_repo), ): """Get an Object's statements count based on the inbound or outbound predicate""" iri = await get_iri_route(curie) @@ -72,7 +72,7 @@ async def count_route( @router.get("/object", summary="Object", name="https://prez.dev/endpoint/system/object") async def object_route( request: Request, - repo=Depends(get_repo), + repo=Depends(get_data_repo), system_repo=Depends(get_system_repo), ): endpoint_uri = URIRef(request.scope.get("route").name) diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 5462e44e..9f6bc514 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -4,118 +4,170 @@ from rdflib import Namespace from rdflib import URIRef -from prez.dependencies import get_repo, get_system_repo -from prez.reference_data.prez_ns import PREZ +from prez.dependencies import get_data_repo, get_system_repo, generate_search_query, cql_get_parser_dependency, \ + get_endpoint_nodeshapes, get_negotiated_pmts, get_profile_nodeshape, get_endpoint_structure +from prez.reference_data.prez_ns import PREZ, EP from prez.repositories import Repo +from prez.services.connegp_service import NegotiatedPMTs from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function -from prez.services.objects import object_function -from temp.grammar import IRI +from prez.services.listings import listing_function_new +from prez.services.objects import object_function, object_function_new +from prez.services.query_generation.cql import CQLParser +from prez.services.query_generation.shacl import NodeShape +from temp.grammar import IRI, ConstructQuery router = APIRouter(tags=["ogccatprez"]) OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) +@router.get( + "/profiles", + summary="List Profiles", + name=EP["system/profile-listing"], +) @router.get( "/catalogs", summary="Catalog Listing", name=OGCE["catalog-listing"], ) -async def catalog_list( - request: Request, - page: Optional[int] = 1, - per_page: Optional[int] = 20, - search_term: Optional[str] = None, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), -): - search_term = request.query_params.get("q") - endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function( - request, - repo, - system_repo, - endpoint_uri, - hierarchy_level=1, - page=page, - per_page=per_page, - search_term=search_term, - ) - - @router.get( "/catalogs/{catalogId}/collections", summary="Collection Listing", name=OGCE["collection-listing"], ) -async def collection_listing( - request: Request, - page: Optional[int] = 1, - per_page: Optional[int] = 20, - search_term: Optional[str] = None, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), -): - search_term = request.query_params.get("q") - - path_node_1_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) - endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function( - request, - repo, - system_repo, - endpoint_uri, - hierarchy_level=2, - path_nodes={"path_node_1": IRI(value=path_node_1_uri)}, - page=page, - per_page=per_page, - search_term=search_term, - ) - - @router.get( "/catalogs/{catalogId}/collections/{collectionId}/items", summary="Item Listing", name=OGCE["item-listing"], ) -async def item_listing( - request: Request, - page: Optional[int] = 1, - per_page: Optional[int] = 20, - search_term: Optional[str] = None, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), +async def listings( + page: Optional[int] = 1, + per_page: Optional[int] = 20, + order_by: Optional[str] = None, + order_by_direction: Optional[str] = None, + endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes), + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), + endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure), + profile_nodeshape: NodeShape = Depends(get_profile_nodeshape), + cql_parser: CQLParser = Depends(cql_get_parser_dependency), + search_query: ConstructQuery = Depends(generate_search_query), + data_repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), ): - search_term = request.query_params.get("q") - path_node_1_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) - path_node_2_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) - endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function( - request, - repo, - system_repo, - endpoint_uri, - hierarchy_level=3, - path_nodes={ - "path_node_1": IRI(value=path_node_1_uri), - "path_node_2": IRI(value=path_node_2_uri), - }, + return await listing_function_new( + data_repo=data_repo, + system_repo=system_repo, + endpoint_nodeshape=endpoint_nodeshape, + endpoint_structure=endpoint_structure, + search_query=search_query, + cql_parser=cql_parser, + pmts=pmts, + profile_nodeshape=profile_nodeshape, page=page, per_page=per_page, - search_term=search_term, + order_by=order_by, + order_by_direction=order_by_direction, ) +######################################################################################################################## +# Object endpoints +######################################################################################################################## + +@router.get( + "/profiles/{profile_curie}", + summary="Profile", + name=EP["system/profile-object"], +) @router.get( "/catalogs/{catalogId}", summary="Catalog Object", name=OGCE["catalog-object"], ) +@router.get( + "/catalogs/{catalogId}/collections/{collectionId}", + summary="Collection Object", + name=OGCE["collection-object"], +) +@router.get( + "/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", + summary="Item Object", + name=OGCE["item-object"], +) +async def objects( + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), + endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure), + profile_nodeshape: NodeShape = Depends(get_profile_nodeshape), + data_repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), +): + return await object_function_new( + data_repo=data_repo, + system_repo=system_repo, + endpoint_structure=endpoint_structure, + pmts=pmts, + profile_nodeshape=profile_nodeshape, + ) + + +# async def collection_listing( +# request: Request, +# page: Optional[int] = 1, +# per_page: Optional[int] = 20, +# search_term: Optional[str] = None, +# repo: Repo = Depends(get_repo), +# system_repo: Repo = Depends(get_system_repo), +# ): +# search_term = request.query_params.get("q") +# +# path_node_1_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) +# endpoint_uri = URIRef(request.scope.get("route").name) +# return await listing_function( +# request, +# repo, +# system_repo, +# endpoint_uri, +# hierarchy_level=2, +# path_nodes={"path_node_1": IRI(value=path_node_1_uri)}, +# page=page, +# per_page=per_page, +# search_term=search_term, +# ) +# +# +# +# async def item_listing( +# request: Request, +# page: Optional[int] = 1, +# per_page: Optional[int] = 20, +# search_term: Optional[str] = None, +# repo: Repo = Depends(get_repo), +# system_repo: Repo = Depends(get_system_repo), +# ): +# search_term = request.query_params.get("q") +# path_node_1_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) +# path_node_2_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) +# endpoint_uri = URIRef(request.scope.get("route").name) +# return await listing_function( +# request, +# repo, +# system_repo, +# endpoint_uri, +# hierarchy_level=3, +# path_nodes={ +# "path_node_1": IRI(value=path_node_1_uri), +# "path_node_2": IRI(value=path_node_2_uri), +# }, +# page=page, +# per_page=per_page, +# search_term=search_term, +# ) + async def catalog_object( - request: Request, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), ): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) @@ -125,15 +177,10 @@ async def catalog_object( ) -@router.get( - "/catalogs/{catalogId}/collections/{collectionId}", - summary="Collection Object", - name=OGCE["collection-object"], -) async def collection_object( - request: Request, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), ): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) @@ -143,15 +190,10 @@ async def collection_object( ) -@router.get( - "/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", - summary="Item Object", - name=OGCE["item-object"], -) async def item_object( - request: Request, - repo: Repo = Depends(get_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), ): request_url = request.scope["path"] endpoint_uri = URIRef(request.scope.get("route").name) diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py index fdfb8b3b..581226b5 100755 --- a/prez/routers/profiles.py +++ b/prez/routers/profiles.py @@ -1,52 +1,60 @@ +from typing import Optional + from fastapi import APIRouter, Request, Depends from rdflib import URIRef -from prez.dependencies import get_system_repo +from prez.dependencies import get_system_repo, get_endpoint_nodeshapes, get_negotiated_pmts, get_profile_nodeshape, \ + cql_get_parser_dependency, generate_search_query, get_data_repo +from prez.repositories import Repo +from prez.services.connegp_service import NegotiatedPMTs from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function +from prez.services.listings import listing_function_new from prez.services.objects import object_function +from prez.services.query_generation.cql import CQLParser +from prez.services.query_generation.shacl import NodeShape +from temp.grammar import ConstructQuery router = APIRouter(tags=["Profiles"]) -@router.get( - "/profiles", - summary="List Profiles", - name="https://prez.dev/endpoint/system/profiles-listing", -) -async def profiles( - request: Request, - page: int = 1, - per_page: int = 20, - repo=Depends(get_system_repo), -): - endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function( - request=request, - repo=repo, - system_repo=repo, - endpoint_uri=endpoint_uri, - hierarchy_level=1, - page=page, - per_page=per_page, - endpoint_structure=("profiles",), - ) +# @router.get( +# "/profiles", +# summary="List Profiles", +# name="https://prez.dev/endpoint/system/profile-listing", +# ) +# async def profiles( +# request: Request, +# page: int = 1, +# per_page: int = 20, +# repo=Depends(get_system_repo), +# ): +# endpoint_uri = URIRef(request.scope.get("route").name) +# return await listing_function( +# request=request, +# repo=repo, +# system_repo=repo, +# endpoint_uri=endpoint_uri, +# hierarchy_level=1, +# page=page, +# per_page=per_page, +# endpoint_structure=("profiles",), +# ) -@router.get( - "/profiles/{profile_curie}", - summary="Profile", - name="https://prez.dev/endpoint/system/profile-object", -) -async def profile(request: Request, profile_curie: str, repo=Depends(get_system_repo)): - request_url = request.scope["path"] - endpoint_uri = URIRef(request.scope.get("route").name) - profile_uri = await get_uri_for_curie_id(profile_curie) - return await object_function( - request=request, - endpoint_uri=endpoint_uri, - uri=profile_uri, - request_url=request_url, - repo=repo, - system_repo=repo, - ) +# @router.get( +# "/profiles/{profile_curie}", +# summary="Profile", +# name="https://prez.dev/endpoint/system/profile-object", +# ) +# async def profile(request: Request, profile_curie: str, repo=Depends(get_system_repo)): +# request_url = request.scope["path"] +# endpoint_uri = URIRef(request.scope.get("route").name) +# profile_uri = await get_uri_for_curie_id(profile_curie) +# return await object_function( +# request=request, +# endpoint_uri=endpoint_uri, +# uri=profile_uri, +# request_url=request_url, +# repo=repo, +# system_repo=repo, +# ) diff --git a/prez/routers/search.py b/prez/routers/search.py index a2dc451c..2c7e9131 100755 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -1,13 +1,17 @@ from typing import Optional from fastapi import APIRouter, Request, Depends -from rdflib import URIRef +from rdflib import RDF, Literal from rdflib.namespace import Namespace -from prez.dependencies import get_repo, get_system_repo +from prez.config import settings +from prez.dependencies import get_data_repo, get_system_repo, generate_search_query from prez.reference_data.prez_ns import PREZ +from prez.renderers.renderer import return_from_graph from prez.repositories import Repo -from prez.services.listings import listing_function +from prez.services.connegp_service import NegotiatedPMTs +from prez.services.link_generation import add_prez_links +from temp.grammar import ConstructQuery router = APIRouter(tags=["Search"]) OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) @@ -22,19 +26,36 @@ async def search( request: Request, page: Optional[int] = 1, per_page: Optional[int] = 20, - search_term: Optional[str] = None, - repo: Repo = Depends(get_repo), + search_query: ConstructQuery = Depends(generate_search_query), + repo: Repo = Depends(get_data_repo), system_repo: Repo = Depends(get_system_repo), ): - term = request.query_params.get("q") - endpoint_uri = URIRef(request.scope.get("route").name) - return await listing_function( - request=request, - repo=repo, + search_query_str = search_query.to_string() + target_classes = [PREZ.SearchResult] + pmts = NegotiatedPMTs( + headers=request.headers, + params=request.query_params, + classes=target_classes, + listing=True, system_repo=system_repo, - endpoint_uri=endpoint_uri, - hierarchy_level=1, - page=page, - per_page=per_page, - search_term=term, + ) + await pmts.setup() + + item_graph, _ = await repo.send_queries([search_query_str], []) + if "anot+" in pmts.selected["mediatype"]: + await add_prez_links( + item_graph, repo, settings.endpoint_structure + ) + + # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated + count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) + item_graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) + return await return_from_graph( + item_graph, + pmts.selected["mediatype"], + pmts.selected["profile"], + pmts.generate_response_headers(), + pmts.selected["class"], + repo, + system_repo, ) diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index 5e5444a3..5fe5ab3a 100755 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -8,7 +8,7 @@ from starlette.requests import Request from starlette.responses import StreamingResponse -from prez.dependencies import get_repo, get_system_repo +from prez.dependencies import get_data_repo, get_system_repo from prez.renderers.renderer import return_annotated_rdf from prez.repositories import Repo from prez.services.connegp_service import NegotiatedPMTs @@ -26,7 +26,7 @@ async def sparql_endpoint( query: str, request: Request, - repo: Repo = Depends(get_repo), + repo: Repo = Depends(get_data_repo), system_repo: Repo = Depends(get_system_repo), ): pmts = NegotiatedPMTs( diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index c41d0336..1fd9b8b6 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -191,7 +191,7 @@ def generate_response_headers(self) -> dict: profile_header_links = ", ".join( [f'<{self.selected["profile"]}>; rel="profile"'] + [ - f'{profile_uri}; rel="type"; title="{pmt[1]}"; token="{get_curie_id_for_uri(pmt[0])}"; anchor={pmt[0]}"' + f'{profile_uri}; rel="type"; title="{pmt[1]}"; token="{get_curie_id_for_uri(pmt[0])}"; anchor="{pmt[0]}"' for pmt in distinct_profiles ] ) @@ -203,7 +203,7 @@ def generate_response_headers(self) -> dict: ) headers = { "Content-Type": self.selected["mediatype"], - "link": profile_header_links + mediatype_header_links, + "link": profile_header_links + ", " + mediatype_header_links, } return headers diff --git a/prez/services/curie_functions.py b/prez/services/curie_functions.py index 1b20460a..0bb09455 100755 --- a/prez/services/curie_functions.py +++ b/prez/services/curie_functions.py @@ -99,5 +99,5 @@ async def get_uri_for_curie_id(curie_id: str): separator = settings.curie_separator curie = curie_id.replace(separator, ":") uri = prefix_graph.namespace_manager.expand_curie(curie) - curie_cache.set(curie_id, uri) + await curie_cache.set(curie_id, uri) return uri diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index 603598b8..cc3a97e5 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -11,7 +11,7 @@ from prez.repositories import Repo from prez.services.curie_functions import get_curie_id_for_uri from prez.services.query_generation.classes import get_classes -from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape +from prez.services.query_generation.shacl import NodeShape from temp.grammar import * log = logging.getLogger(__name__) @@ -104,6 +104,7 @@ async def get_nodeshapes_constraining_class(klasses, uri): NodeShape( uri=ns, graph=endpoints_graph_cache, + kind="endpoint", focus_node=IRI(value=uri), ) ) @@ -121,7 +122,9 @@ async def add_links_to_graph_and_cache( quads.append( (uri, DCTERMS.identifier, Literal(curie_for_uri, datatype=PREZ.identifier), uri) ) - if members_link: #TODO need to confirm the link value doesn't match the existing link value, as multiple endpoints can deliver the same class/have different links for the same URI + if ( + members_link + ): # TODO need to confirm the link value doesn't match the existing link value, as multiple endpoints can deliver the same class/have different links for the same URI existing_members_link = list( links_ids_graph_cache.quads((uri, PREZ["members"], None, uri)) ) diff --git a/prez/services/listings.py b/prez/services/listings.py index 5f00c034..fa9438bd 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -15,102 +15,50 @@ from prez.services.connegp_service import NegotiatedPMTs from prez.services.link_generation import add_prez_links from prez.services.query_generation.classes import get_classes -from prez.services.query_generation.count import CountQuery -from prez.services.query_generation.node_selection.cql import CQLParser -from prez.services.query_generation.node_selection.endpoint_shacl import NodeShape -from prez.services.query_generation.node_selection.search import SearchQuery -from prez.services.query_generation.umbrella import PrezQueryConstructor +from prez.services.query_generation.count import CountQuery, CountQueryV2 +from prez.services.query_generation.cql import CQLParser +from prez.services.query_generation.search import SearchQuery +from prez.services.query_generation.shacl import NodeShape +from prez.services.query_generation.umbrella import merge_listing_query_grammar_inputs, PrezQueryConstructorV2 from temp.grammar import * log = logging.getLogger(__name__) -async def listing_function( - request: Request, - repo: Repo, - system_repo: Repo, - endpoint_uri: URIRef, - hierarchy_level: int, - path_nodes: Dict[str, Var | IRI] = None, - page: int = 1, - per_page: int = 20, - cql_parser: CQLParser = None, - search_term: Optional[str] = None, - endpoint_structure: Tuple[str] = settings.endpoint_structure, +async def listing_function_new( + data_repo, + system_repo, + endpoint_nodeshape, + endpoint_structure, + search_query, + cql_parser, + pmts, + profile_nodeshape, + page, + per_page, + order_by, + order_by_direction, ): - """ - # determine the relevant node selection part of the query - from SHACL, CQL, Search - # determine the relevant profile for the query - from SHACL only - # gather relevant info for the node selection part of the query - # gather relevant info for the profile part of the query - # build the query - """ - if not path_nodes: - path_nodes = {} - queries = [] - # determine possible SHACL node shapes for endpoint - ns_triples, ns_gpnt, target_classes = await get_shacl_node_selection( - endpoint_uri, hierarchy_level, path_nodes, repo, system_repo + query_construct_kwargs = merge_listing_query_grammar_inputs( + cql_parser=cql_parser, + endpoint_nodeshape=endpoint_nodeshape, + search_query=search_query, + page=page, + per_page=per_page, + order_by=order_by, + order_by_direction=order_by_direction, ) + profile_triples = profile_nodeshape.triples_list + profile_gpnt = profile_nodeshape.gpnt_list - if not target_classes: - # then there is no target class - i.e. it's a search *only* or CQL *only* query (not SHACL + CQL or SHACL + Search) - if cql_parser: - target_classes = frozenset([PREZ.CQLObjectList]) - elif search_term: - target_classes = frozenset([PREZ.SearchResult]) - - # determine the relevant profile - pmts = NegotiatedPMTs( - headers=request.headers, - params=request.query_params, - classes=target_classes, - listing=True, - system_repo=system_repo, - ) - await pmts.setup() - runtime_values = {} - if pmts.selected["profile"] == ALTREXT["alt-profile"]: - endpoint_uri, ns_gpnt, ns_triples = await handle_alternate_profile( - current_endpoint_uri=endpoint_uri, pmts=pmts, runtime_values=runtime_values - ) - - runtime_values["limit"] = per_page - runtime_values["offset"] = (page - 1) * per_page - - cql_triples_list = [] - cql_gpnt_list = [] - if cql_parser: - cql_triples_list = await handle_cql(cql_gpnt_list, cql_parser, cql_triples_list) - - query_constructor = PrezQueryConstructor( - runtime_values=runtime_values, - endpoint_graph=endpoints_graph_cache, - profile_graph=profiles_graph_cache, - listing_or_object="listing", - endpoint_uri=endpoint_uri, - profile_uri=pmts.selected["profile"], - endpoint_shacl_triples=ns_triples, - endpoint_shacl_gpnt=ns_gpnt, - cql_triples=cql_triples_list, - cql_gpnt=cql_gpnt_list, + queries = [] + main_query = PrezQueryConstructorV2( + profile_triples=profile_triples, + profile_gpnt=profile_gpnt, + **query_construct_kwargs ) + queries.append(main_query.to_string()) - query_constructor.generate_sparql() - main_query = query_constructor.sparql - - if search_term: - subselect = query_constructor.inner_select - search_query = SearchQuery( - search_term=search_term, - pred_vals=settings.label_predicates, - additional_ss=subselect, - limit=runtime_values["limit"], - offset=runtime_values["offset"], - ).render() - queries.append(search_query) - else: - queries.append(main_query) if ( pmts.requested_mediatypes is not None and pmts.requested_mediatypes[0][0] == "application/sparql-query" @@ -118,26 +66,26 @@ async def listing_function( return PlainTextResponse(queries[0], media_type="application/sparql-query") # add a count query if it's an annotated mediatype - if "anot+" in pmts.selected["mediatype"] and not search_term: - subselect = copy.deepcopy(query_constructor.inner_select) - count_query = CountQuery(subselect=subselect).render().to_string() + if "anot+" in pmts.selected["mediatype"] and not search_query: + subselect = copy.deepcopy(main_query.inner_select) + count_query = CountQueryV2(original_subselect=subselect).to_string() queries.append(count_query) if pmts.selected["profile"] == ALTREXT["alt-profile"]: query_repo = system_repo - endpoint_structure = ("profiles",) + # endpoint_structure = ("profiles",) else: - query_repo = repo - endpoint_structure = endpoint_structure + query_repo = data_repo + # endpoint_structure = settings.endpoint_structure item_graph, _ = await query_repo.send_queries(queries, []) if "anot+" in pmts.selected["mediatype"]: - await add_prez_links( # TODO can this go under return_from_graph? + await add_prez_links( item_graph, query_repo, endpoint_structure ) # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated - if search_term: + if search_query: count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) item_graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) return await return_from_graph( @@ -146,23 +94,167 @@ async def listing_function( pmts.selected["profile"], pmts.generate_response_headers(), pmts.selected["class"], - repo, + data_repo, system_repo, ) -async def handle_cql(cql_gpnt_list, cql_parser, cql_triples_list): - cql_parser.parse() - cql_select_ggps = cql_parser.ggps_inner_select - if cql_select_ggps.triples_block: - cql_triples_list = cql_select_ggps.triples_block.triples - if cql_select_ggps.graph_patterns_or_triples_blocks: - for pattern in cql_select_ggps.graph_patterns_or_triples_blocks: - if isinstance(pattern, TriplesBlock): - cql_triples_list += pattern.triples - elif isinstance(pattern, GraphPatternNotTriples): - cql_gpnt_list.append(pattern) - return cql_triples_list +# async def listing_function( +# request: Request, +# repo: Repo, +# system_repo: Repo, +# endpoint_uri: URIRef, +# hierarchy_level: int, +# path_nodes: Dict[str, Var | IRI] = None, +# page: int = 1, +# per_page: int = 20, +# cql_parser: CQLParser = None, +# search_term: Optional[str] = None, +# endpoint_structure: Tuple[str] = settings.endpoint_structure, +# ): +# """ +# # determine the relevant node selection part of the query - from SHACL, CQL, Search +# # determine the relevant profile for the query - from SHACL only +# # gather relevant info for the node selection part of the query +# # gather relevant info for the profile part of the query +# # build the query +# """ +# if not path_nodes: +# path_nodes = {} +# queries = [] +# # determine possible SHACL node shapes for endpoint +# ns_triples, ns_gpnt, target_classes = await get_shacl_node_selection( +# endpoint_uri, hierarchy_level, path_nodes, repo, system_repo +# ) +# +# if not target_classes: +# # then there is no target class - i.e. it's a search *only* or CQL *only* query (not SHACL + CQL or SHACL + Search) +# if cql_parser: +# target_classes = frozenset([PREZ.CQLObjectList]) +# elif search_term: +# target_classes = frozenset([PREZ.SearchResult]) +# +# # determine the relevant profile +# pmts = NegotiatedPMTs( +# headers=request.headers, +# params=request.query_params, +# classes=target_classes, +# listing=True, +# system_repo=system_repo, +# ) +# await pmts.setup() +# runtime_values = {} +# if pmts.selected["profile"] == ALTREXT["alt-profile"]: +# endpoint_uri, ns_gpnt, ns_triples = await handle_alternate_profile( +# current_endpoint_uri=endpoint_uri, pmts=pmts, runtime_values=runtime_values +# ) +# +# runtime_values["limit"] = per_page +# runtime_values["offset"] = (page - 1) * per_page +# +# cql_triples_list = [] +# cql_gpnt_list = [] +# if cql_parser: +# cql_triples_list = await handle_cql(cql_gpnt_list, cql_parser, cql_triples_list) +# +# # query_construct_kwargs = merge_query_grammar_inputs( +# # cql_triples_list, +# # cql_gpnt_list, +# # ns_triples, +# # ns_gpnt, +# # +# # ) +# # +# # cql_select_triples: Optional[List[SimplifiedTriple]] = None, +# # cql_select_gpnt: Optional[List[GraphPatternNotTriples]] = None, +# # endpoint_select_triples: Optional[List[SimplifiedTriple]] = None, +# # endpoint_select_gpnt: Optional[List[GraphPatternNotTriples]] = None, +# # search_query: Optional[SearchQueryRegex] = None, +# # limit: Optional[int] = None, +# # offset: Optional[int] = None, +# # order_by: Optional[str] = None, +# # order_by_direction: Optional[bool] = None, +# +# query_constructor = PrezQueryConstructor( +# runtime_values=runtime_values, +# endpoint_graph=endpoints_graph_cache, +# profile_graph=profiles_graph_cache, +# listing_or_object="listing", +# endpoint_uri=endpoint_uri, +# profile_uri=pmts.selected["profile"], +# endpoint_shacl_triples=ns_triples, +# endpoint_shacl_gpnt=ns_gpnt, +# cql_triples=cql_triples_list, +# cql_gpnt=cql_gpnt_list, +# ) +# +# query_constructor.generate_sparql() +# main_query = query_constructor.sparql +# +# if search_term: +# subselect = query_constructor.inner_select +# search_query = SearchQuery( +# search_term=search_term, +# pred_vals=settings.label_predicates, +# additional_ss=subselect, +# limit=runtime_values["limit"], +# offset=runtime_values["offset"], +# ).render() +# queries.append(search_query) +# else: +# queries.append(main_query) +# if ( +# pmts.requested_mediatypes is not None +# and pmts.requested_mediatypes[0][0] == "application/sparql-query" +# ): +# return PlainTextResponse(queries[0], media_type="application/sparql-query") +# +# # add a count query if it's an annotated mediatype +# if "anot+" in pmts.selected["mediatype"] and not search_term: +# subselect = copy.deepcopy(query_constructor.inner_select) +# count_query = CountQuery(subselect=subselect).render().to_string() +# queries.append(count_query) +# +# if pmts.selected["profile"] == ALTREXT["alt-profile"]: +# query_repo = system_repo +# endpoint_structure = ("profiles",) +# else: +# query_repo = repo +# endpoint_structure = endpoint_structure +# +# item_graph, _ = await query_repo.send_queries(queries, []) +# if "anot+" in pmts.selected["mediatype"]: +# await add_prez_links( # TODO can this go under return_from_graph? +# item_graph, query_repo, endpoint_structure +# ) +# +# # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated +# if search_term: +# count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) +# item_graph.add((PREZ.SearchResult, PREZ["count"], Literal(count))) +# return await return_from_graph( +# item_graph, +# pmts.selected["mediatype"], +# pmts.selected["profile"], +# pmts.generate_response_headers(), +# pmts.selected["class"], +# repo, +# system_repo, +# ) +# +# +# async def handle_cql(cql_gpnt_list, cql_parser, cql_triples_list): +# cql_parser.parse() +# cql_select_ggps = cql_parser.ggps_inner_select +# if cql_select_ggps.triples_block: +# cql_triples_list = cql_select_ggps.triples_block.triples +# if cql_select_ggps.graph_patterns_or_triples_blocks: +# for pattern in cql_select_ggps.graph_patterns_or_triples_blocks: +# if isinstance(pattern, TriplesBlock): +# cql_triples_list += pattern.triples +# elif isinstance(pattern, GraphPatternNotTriples): +# cql_gpnt_list.append(pattern) +# return cql_triples_list async def handle_alternate_profile(current_endpoint_uri, pmts, runtime_values): @@ -175,11 +267,12 @@ async def handle_alternate_profile(current_endpoint_uri, pmts, runtime_values): ns = NodeShape( uri=nodeshape_uri, graph=endpoints_graph_cache, + kind="endpoint", path_nodes={"path_node_1": IRI(value=pmts.selected["class"])}, ) ns_triples = ns.triples_list ns_gpnt = ns.gpnt_list - new_endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profiles-listing") + new_endpoint_uri = URIRef("https://prez.dev/endpoint/system/alt-profile-listing") runtime_values["selectedClass"] = pmts.selected["class"] return new_endpoint_uri, ns_gpnt, ns_triples @@ -211,7 +304,10 @@ async def get_shacl_node_selection( path_node_classes[pn] = await get_classes(URIRef(uri.value), repo) nodeshapes = [ NodeShape( - uri=URIRef(ns), graph=endpoints_graph_cache, path_nodes=path_nodes + uri=URIRef(ns), + graph=endpoints_graph_cache, + kind="endpoint", + path_nodes=path_nodes, ) for ns in distinct_ns ] @@ -242,7 +338,10 @@ async def get_shacl_node_selection( path_nodes = {} if node_selection_shape: ns = NodeShape( - uri=node_selection_shape, graph=endpoints_graph_cache, path_nodes=path_nodes + uri=node_selection_shape, + graph=endpoints_graph_cache, + kind="endpoint", + path_nodes=path_nodes, ) ns_triples = ns.triples_list ns_gpnt = ns.gpnt_list diff --git a/prez/services/objects.py b/prez/services/objects.py index 3359ba22..287a1f14 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -12,14 +12,45 @@ from prez.repositories import Repo from prez.services.connegp_service import NegotiatedPMTs from prez.services.link_generation import add_prez_links -from prez.services.listings import listing_function from prez.services.query_generation.classes import get_classes -from prez.services.query_generation.umbrella import PrezQueryConstructor +from prez.services.query_generation.umbrella import merge_listing_query_grammar_inputs, PrezQueryConstructorV2 from temp.grammar import IRI log = logging.getLogger(__name__) +async def object_function_new( + data_repo, + system_repo, + endpoint_structure, + pmts, + profile_nodeshape, +): + profile_triples = profile_nodeshape.triples_list + profile_gpnt = profile_nodeshape.gpnt_list + query = PrezQueryConstructorV2( + profile_triples=profile_triples, + profile_gpnt=profile_gpnt + ).to_string() + + if pmts.requested_mediatypes[0][0] == "application/sparql-query": + return PlainTextResponse(query, media_type="application/sparql-query") + + item_graph, _ = await data_repo.send_queries([query], []) + if "anot+" in pmts.selected["mediatype"]: + await add_prez_links(item_graph, data_repo, endpoint_structure) + return await return_from_graph( + item_graph, + pmts.selected["mediatype"], + pmts.selected["profile"], + pmts.generate_response_headers(), + pmts.selected["class"], + data_repo, + system_repo, + ) + + + async def object_function( request: Request, endpoint_uri: URIRef, @@ -52,19 +83,20 @@ async def object_function( listing_or_object = "object" ns_gpnt = [] ns_triples = [] - query_constructor = PrezQueryConstructor( - runtime_values=runtime_values, - endpoint_graph=endpoints_graph_cache, - profile_graph=profiles_graph_cache, - listing_or_object=listing_or_object, - focus_node=IRI(value=uri), - endpoint_uri=endpoint_uri, - profile_uri=pmts.selected["profile"], - endpoint_shacl_triples=ns_triples, - endpoint_shacl_gpnt=ns_gpnt, - ) - query_constructor.generate_sparql() - query = query_constructor.sparql + # query_constructor = PrezQueryConstructor( + # runtime_values=runtime_values, + # endpoint_graph=endpoints_graph_cache, + # profile_graph=profiles_graph_cache, + # listing_or_object=listing_or_object, + # focus_node=IRI(value=uri), + # endpoint_uri=endpoint_uri, + # profile_uri=pmts.selected["profile"], + # endpoint_shacl_triples=ns_triples, + # endpoint_shacl_gpnt=ns_gpnt, + # ) + # query_constructor.generate_sparql() + # query = query_constructor.sparql + query = "to be removed" try: if pmts.requested_mediatypes[0][0] == "application/sparql-query": diff --git a/prez/services/query_generation/annotations.py b/prez/services/query_generation/annotations.py index cfbd7f90..a55cac19 100644 --- a/prez/services/query_generation/annotations.py +++ b/prez/services/query_generation/annotations.py @@ -1,3 +1,4 @@ +from functools import lru_cache from typing import List from prez.config import settings @@ -26,7 +27,7 @@ def __init__(self, terms: List[IRI]): prez_anot_var = Var(value="prezAnotProp") prop_var = Var(value="prop") - all_annotation_tuples = get_prez_annotation_tuples() + all_annotation_tuples = self.get_prez_annotation_tuples() props_gpnt = GraphPatternNotTriples( content=InlineData( data_block=DataBlock( @@ -103,28 +104,30 @@ def __init__(self, terms: List[IRI]): ) ) ) - solution_modifier = SolutionModifier() super().__init__( construct_template=construct_template, where_clause=where_clause, - solution_modifier=solution_modifier, + solution_modifier=SolutionModifier(), ) - -def get_prez_annotation_tuples(): - label_tuples = [ - (label_prop, PREZ.label) for label_prop in settings.label_predicates - ] - description_tuples = [ - (description_prop, PREZ.description) - for description_prop in settings.description_predicates - ] - provenance_tuples = [ - (provenance_prop, PREZ.provenance) - for provenance_prop in settings.provenance_predicates - ] - other_tuples = [ - (other_prop, PREZ.other) for other_prop in settings.other_predicates - ] - all_tuples = label_tuples + description_tuples + provenance_tuples + other_tuples - return all_tuples + @staticmethod + @lru_cache(maxsize=None) + def get_prez_annotation_tuples(): + label_tuples = [ + (label_prop, PREZ.label) for label_prop in settings.label_predicates + ] + description_tuples = [ + (description_prop, PREZ.description) + for description_prop in settings.description_predicates + ] + provenance_tuples = [ + (provenance_prop, PREZ.provenance) + for provenance_prop in settings.provenance_predicates + ] + other_tuples = [ + (other_prop, PREZ.other) for other_prop in settings.other_predicates + ] + all_tuples = ( + label_tuples + description_tuples + provenance_tuples + other_tuples + ) + return all_tuples diff --git a/prez/services/query_generation/count.py b/prez/services/query_generation/count.py index 18f02c9a..2bf66e89 100755 --- a/prez/services/query_generation/count.py +++ b/prez/services/query_generation/count.py @@ -113,7 +113,7 @@ def __init__(self, original_subselect: SubSelect): construct_triples=ConstructTriples( triples=[ SimplifiedTriple( - subject=BNode(), + subject=BlankNode(value=Anon()), predicate=IRI(value="https://prez.dev/count"), object=Var(value="count"), ) @@ -153,6 +153,7 @@ def __init__(self, original_subselect: SubSelect): super().__init__( construct_template=construct_template, where_clause=where_clause, + solution_modifier=SolutionModifier(), ) diff --git a/prez/services/query_generation/node_selection/cql.py b/prez/services/query_generation/cql.py similarity index 100% rename from prez/services/query_generation/node_selection/cql.py rename to prez/services/query_generation/cql.py diff --git a/prez/services/query_generation/node_selection/endpoint_shacl.py b/prez/services/query_generation/node_selection/endpoint_shacl.py deleted file mode 100644 index ea78eb2f..00000000 --- a/prez/services/query_generation/node_selection/endpoint_shacl.py +++ /dev/null @@ -1,281 +0,0 @@ -from __future__ import annotations - -from string import Template -from typing import List, Optional, Union, Any, Dict - -from pydantic import BaseModel -from rdflib import URIRef, BNode, Graph -from rdflib.collection import Collection -from rdflib.namespace import SH, RDF -from rdflib.term import Node - -from prez.reference_data.prez_ns import ONT -from temp.grammar import * - - -class Shape(BaseModel): - class Config: - arbitrary_types_allowed = True - - def __init__(self, **data: Any): - super().__init__(**data) - self.triples_list = [] - self.gpnt_list = [] - self.from_graph() - self.to_grammar() - - def from_graph(self): - raise NotImplementedError("Subclasses must implement this method.") - - def to_grammar(self): - raise NotImplementedError("Subclasses must implement this method.") - - -class NodeShape(Shape): - uri: URIRef - graph: Graph - focus_node: Var | IRI = Var(value="focus_node") - targetNode: Optional[URIRef] = None - targetClasses: Optional[List[Node]] = [] - propertyShapesURIs: Optional[List[Node]] = [] - target: Optional[Node] = None - rules: Optional[List[Node]] = [] - propertyShapes: Optional[List[PropertyShape]] = [] - triples_list: Optional[List[SimplifiedTriple]] = [] - gpnt_list: Optional[List[GraphPatternNotTriples]] = [] - path_nodes: Optional[Dict[str, Var | IRI]] = {} - classes_at_len: Optional[Dict[str, List[URIRef]]] = {} - hierarchy_level: Optional[int] = None - select_template: Optional[str] = None - - def from_graph(self): # TODO this can be a SPARQL select against the system graph. - self.targetNode = next(self.graph.objects(self.uri, SH.targetNode), None) - self.targetClasses = list(self.graph.objects(self.uri, SH.targetClass)) - self.propertyShapesURIs = list(self.graph.objects(self.uri, SH.property)) - self.target = next(self.graph.objects(self.uri, SH.target), None) - self.rules = list(self.graph.objects(self.uri, SH.rule)) - self.propertyShapes = [ - PropertyShape( - uri=ps_uri, - graph=self.graph, - focus_node=self.focus_node, - path_nodes=self.path_nodes, - ) - for ps_uri in self.propertyShapesURIs - ] - self.hierarchy_level = next( - self.graph.objects(self.uri, ONT.hierarchyLevel), None - ) - if not self.hierarchy_level: - raise ValueError("No hierarchy level found") - - def to_grammar(self): - if self.targetNode: - pass # do not need to add any specific triples or the like - if self.targetClasses: - self._process_class_targets() - if self.propertyShapes: - self._process_property_shapes() - if self.target: - self._process_target() - # rules used to construct triples only in the context of sh:target/sh:sparql at present. - if self.rules: - self._process_rules() - - def _process_class_targets(self): - if len(self.targetClasses) == 1: - self.triples_list.append( - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=IRI(value=self.targetClasses[0]), - ) - ) - elif len(self.targetClasses) > 1: - self.triples_list.append( - SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=RDF.type), - object=Var(value=f"focus_classes"), - ) - ) - dbvs = [ - DataBlockValue(value=IRI(value=klass)) for klass in self.targetClasses - ] - self.gpnt_list.append( - GraphPatternNotTriples( - content=InlineData( - data_block=DataBlock( - block=InlineDataOneVar( - variable=Var(value=f"focus_classes"), - datablockvalues=dbvs, - ) - ) - ) - ) - ) - else: - raise ValueError("No target classes found") - - def _process_property_shapes(self): - for shape in self.propertyShapes: - self.triples_list.extend(shape.triples_list) - self.gpnt_list.extend(shape.gpnt_list) - self.path_nodes = self.path_nodes | shape.path_nodes - self.classes_at_len = self.classes_at_len | shape.classes_at_len - # deduplicate - self.triples_list = list(set(self.triples_list)) - - def _process_target(self): - self.select_statement = Template( - str(self.endpoint_graph.value(self.target, SH.select, default=None)) - ) - - def _process_rules(self): - pass - - -class PropertyShape(Shape): - uri: URIRef | BNode # URI of the shape - graph: Graph - focus_node: IRI | Var = Var(value="focus_node") - # inputs - property_paths: Optional[List[PropertyPath]] = None - or_klasses: Optional[List[URIRef]] = None - # outputs - grammar: Optional[GroupGraphPatternSub] = None - triples_list: Optional[List[SimplifiedTriple]] = None - gpnt_list: Optional[List[GraphPatternNotTriples]] = None - path_nodes: Optional[Dict[str, Var | IRI]] = {} - classes_at_len: Optional[Dict[str, List[URIRef]]] = {} - _select_vars: Optional[List[Var]] = None - - def from_graph(self): - self.property_paths = [] - _single_class = next(self.graph.objects(self.uri, SH["class"]), None) - if _single_class: - self.or_klasses = [URIRef(_single_class)] - - # look for sh:or statements and process classes from these NB only sh:or / sh:class is handled at present. - or_classes = next(self.graph.objects(self.uri, SH["or"]), None) - if or_classes: - or_bns = list(Collection(self.graph, or_classes)) - or_triples = list(self.graph.triples_choices((or_bns, SH["class"], None))) - self.or_klasses = [URIRef(klass) for _, _, klass in or_triples] - - pp = next(self.graph.objects(self.uri, SH.path)) - if isinstance(pp, URIRef): - self.property_paths.append(Path(value=pp)) - elif isinstance(pp, BNode): - self._process_property_path(pp, self.graph) - - def _process_property_path(self, pp, graph): - if isinstance(pp, URIRef): - self.property_paths.append(Path(value=pp)) - elif isinstance(pp, BNode): - pred_objects_gen = graph.predicate_objects(subject=pp) - bn_pred, bn_obj = next(pred_objects_gen, (None, None)) - if bn_obj == SH.union: - pass - elif bn_pred == SH.inversePath: - self.property_paths.append(InversePath(value=bn_obj)) - # elif bn_pred == SH.alternativePath: - # predicates.extend(list(Collection(self.profile_graph, bn_obj))) - else: # sequence paths - paths = list(Collection(graph, pp)) - for path in paths: - self._process_property_path(path, graph) - - def to_grammar(self): - - # set up the path nodes - either from supplied values or set as variables - for i, property_path in enumerate(self.property_paths): - path_node_str = f"path_node_{i+1}" - if path_node_str not in self.path_nodes: - self.path_nodes[path_node_str] = Var(value=path_node_str) - - self.triples_list = [] - len_pp = len(self.property_paths) - # sh:class applies to the end of sequence paths - path_node_term = self.path_nodes[f"path_node_{len_pp}"] - - # useful for determining which endpoint property shape should be used when a request comes in on endpoint - self.classes_at_len[f"path_node_{len_pp}"] = self.or_klasses - - if self.or_klasses: - if len(self.or_klasses) == 1: - self.triples_list.append( - SimplifiedTriple( - subject=path_node_term, - predicate=IRI(value=RDF.type), - object=IRI(value=self.or_klasses[0]), - ) - ) - else: - self.triples_list.append( - SimplifiedTriple( - subject=path_node_term, - predicate=IRI(value=RDF.type), - object=Var(value=f"path_node_classes_{len_pp}"), - ) - ) - dbvs = [ - DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses - ] - self.gpnt_list.append( - GraphPatternNotTriples( - content=InlineData( - data_block=DataBlock( - block=InlineDataOneVar( - variable=Var(value=f"path_node_classes_{len_pp}"), - datablockvalues=dbvs, - ) - ) - ) - ) - ) - - if self.property_paths: - for i, property_path in enumerate(self.property_paths): - - path_node_var = self.path_nodes[f"path_node_{i + 1}"] - if i == 0: - focus_or_path_node = self.focus_node - else: - focus_or_path_node = self.path_nodes[f"path_node_{i}"] - if isinstance(property_path, Path): - # vanilla property path - self.triples_list.append( - SimplifiedTriple( - subject=focus_or_path_node, - predicate=IRI(value=property_path.value), - object=path_node_var, - ) - ) - elif isinstance(property_path, InversePath): - self.triples_list.append( - SimplifiedTriple( - subject=path_node_var, - predicate=IRI(value=property_path.value), - object=focus_or_path_node, - ) - ) - - -class PropertyPath(BaseModel): - class Config: - arbitrary_types_allowed = True - - uri: Optional[URIRef] = None - - -class Path(PropertyPath): - value: URIRef - - -class SequencePath(PropertyPath): - value: List[PropertyPath] - - -class InversePath(PropertyPath): - value: URIRef diff --git a/prez/services/query_generation/node_selection/search.py b/prez/services/query_generation/node_selection/search.py deleted file mode 100755 index acb0b171..00000000 --- a/prez/services/query_generation/node_selection/search.py +++ /dev/null @@ -1,323 +0,0 @@ -from typing import Optional, List - -from pydantic import BaseModel -from rdflib import RDF, URIRef - -from prez.reference_data.prez_ns import PREZ -from temp.grammar import * - - -class SearchQuery(BaseModel): - class Config: - arbitrary_types_allowed = True - - search_term: str - pred_vals: List[URIRef] - additional_ss: Optional[SubSelect] = None - limit: int = 10 - offset: int = 0 - - sr_uri: Var = Var(value="focus_node") - pred: Var = Var(value="pred") - match: Var = Var(value="match") - weight: Var = Var(value="weight") - hashid: Var = Var(value="hashID") - w: Var = Var(value="w") - - @property - def sr_uri_pe(self): - return PrimaryExpression(content=self.sr_uri) - - @property - def pred_pe(self): - return PrimaryExpression(content=self.pred) - - @property - def match_pe(self): - return PrimaryExpression(content=self.match) - - @property - def weight_pe(self): - return PrimaryExpression(content=self.weight) - - @property - def w_pe(self): - return PrimaryExpression(content=self.w) - - @property - def inner_select_vars(self): - return { - "one": { - "weight_val": 100, - "function": "LCASE", - "prefix": "", - "case_insensitive": None, - }, - "two": { - "weight_val": 20, - "function": "REGEX", - "prefix": "^", - "case_insensitive": True, - }, - "three": { - "weight_val": 10, - "function": "REGEX", - "prefix": "", - "case_insensitive": True, - }, - } - - def render(self): - cq = self.create_construct_query() - return "".join(part for part in cq.render()) - - def create_construct_query(self): - cq = ConstructQuery( - construct_template=self.create_construct_template(), - where_clause=WhereClause( - group_graph_pattern=GroupGraphPattern( - content=self.create_outer_subselect() - ) - ), - solution_modifier=SolutionModifier(), - ) - return cq - - def create_construct_template(self): - """ - ?hashID a prez:SearchResult ; - prez:searchResultWeight ?weight ; - prez:searchResultPredicate ?predicate ; - prez:searchResultMatch ?match ; - prez:searchResultURI ?search_result_uri . - """ - search_result_triples = [ - SimplifiedTriple( - subject=self.hashid, - predicate=IRI(value=PREZ.searchResultWeight), - object=self.weight, - ), - SimplifiedTriple( - subject=self.hashid, - predicate=IRI(value=PREZ.searchResultPredicate), - object=self.pred, - ), - SimplifiedTriple( - subject=self.hashid, - predicate=IRI(value=PREZ.searchResultMatch), - object=self.match, - ), - SimplifiedTriple( - subject=self.hashid, - predicate=IRI(value=PREZ.searchResultURI), - object=self.sr_uri, - ), - SimplifiedTriple( - subject=self.hashid, - predicate=IRI(value=RDF.type), - object=IRI(value=PREZ.SearchResult), - ), - ] - ct = ConstructTemplate( - construct_triples=ConstructTriples(triples=search_result_triples) - ) - return ct - - def create_outer_subselect(self): - outer_ss = SubSelect( - select_clause=self.create_outer_select_clause(), - where_clause=self.create_outer_where_clause(), - solution_modifier=self.create_solution_modifier(), - ) - return outer_ss - - def create_outer_select_clause(self): - """ - SELECT ?focus_node ?predicate ?match ?weight (URI(CONCAT("urn:hash:", SHA256(CONCAT(STR(?focus_node), STR(?predicate), STR(?match), STR(?weight))))) AS ?hashID) - """ - expressions = [self.sr_uri_pe, self.pred_pe, self.match_pe, self.weight_pe] - str_builtins = [BuiltInCall.create_with_one_expr("STR", e) for e in expressions] - str_expressions = [PrimaryExpression(content=b) for b in str_builtins] - inner_concat = BuiltInCall.create_with_n_expr("CONCAT", str_expressions) - sha256_expr = PrimaryExpression( - content=BuiltInCall.create_with_one_expr( - "SHA256", PrimaryExpression(content=inner_concat) - ) - ) - urn_literal = PrimaryExpression(content=RDFLiteral(value="urn:hash:")) - outer_concat = BuiltInCall.create_with_n_expr( - "CONCAT", [urn_literal, sha256_expr] - ) - uri_expr = BuiltInCall.create_with_one_expr( - "URI", PrimaryExpression(content=outer_concat) - ) - uri_pr_exp = PrimaryExpression(content=uri_expr) - uri_exp = Expression.from_primary_expr(uri_pr_exp) - sc = SelectClause( - variables_or_all=[ - self.sr_uri, - self.pred, - self.match, - self.weight, - (uri_exp, self.hashid), - ] - ) - return sc - - def create_outer_where_clause(self): - """Wrapper WHERE clause""" - inner_ss = self.create_inner_subselect() - inner_ggp = GroupGraphPattern(content=inner_ss) - outer_wc = WhereClause(group_graph_pattern=inner_ggp) - return outer_wc - - def create_solution_modifier(self): - """ORDER BY DESC(?weight)""" - ocond = OrderCondition(var=self.weight, direction="DESC") - oclause = OrderClause(conditions=[ocond]) - limit = LimitClause(limit=self.limit) - offset = OffsetClause(offset=self.offset) - loc = LimitOffsetClauses(limit_clause=limit, offset_clause=offset) - sm = SolutionModifier(order_by=oclause, limit_offset=loc) - return sm - - def create_inner_subselect(self): - inner_ss = SubSelect( - select_clause=self.create_inner_select_clause(), - where_clause=self.create_inner_where_clause(), - solution_modifier=self.create_group_by_solution_modifier(), - ) - return inner_ss - - def create_group_by_solution_modifier(self): - """ - GROUP BY ?focus_node ?predicate ?match - """ - gc_sr_uri = GroupCondition(condition=self.sr_uri) - gc_pred = GroupCondition(condition=self.pred) - gc_match = GroupCondition(condition=self.match) - gc = GroupClause(group_conditions=[gc_sr_uri, gc_pred, gc_match]) - sm = SolutionModifier(group_by=gc) - return sm - - def create_inner_select_clause(self): - """ - SELECT ?focus_node ?predicate ?match (SUM(?w) AS ?weight) - """ - pr_exp = PrimaryExpression(content=self.w) - exp = Expression.from_primary_expr(pr_exp) - sum_agg = Aggregate(function_name="SUM", expression=exp) - sum_bic = BuiltInCall(other_expressions=sum_agg) - sum_pr_exp = PrimaryExpression(content=sum_bic) - sum_exp = Expression.from_primary_expr(sum_pr_exp) - sc = SelectClause( - variables_or_all=[ - self.sr_uri, - self.pred, - self.match, - (sum_exp, self.weight), - ] - ) - return sc - - def create_inner_where_clause(self): - # outer group graph pattern sub - iri_pred_vals = [IRI(value=p) for p in self.pred_vals] - iri_db_vals = [DataBlockValue(value=p) for p in iri_pred_vals] - ildov = InlineDataOneVar(variable=self.pred, datablockvalues=iri_db_vals) - ild = InlineData(data_block=DataBlock(block=ildov)) - gpnt_ild = GraphPatternNotTriples(content=ild) - - # union statements - gougp = self.create_union_of_inner_ggps() - gpnt_gougp = GraphPatternNotTriples(content=gougp) - - outer_ggps = GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[gpnt_ild, gpnt_gougp] - ) - outer_ggp = GroupGraphPattern(content=outer_ggps) - wc = WhereClause(group_graph_pattern=outer_ggp) - return wc - - def create_union_of_inner_ggps(self): - # inner group graph patterns (unioned statements) - inner_select_ggp_list = [] - for var_dict in self.inner_select_vars.values(): - inner_select_ggp_list.append(self.create_inner_ggp(**var_dict)) - gougp = GroupOrUnionGraphPattern(group_graph_patterns=inner_select_ggp_list) - return gougp - - def create_inner_ggp( - self, - weight_val: int, - function: str, - prefix: str, - case_insensitive: Optional[bool], - ) -> GroupGraphPattern: - ggp = GroupGraphPattern(content=GroupGraphPatternSub()) - - # triple pattern e.g. (?focus_node ?pred ?match) - ggp.content.add_triple( - SimplifiedTriple( - subject=self.sr_uri, - predicate=self.pred, - object=self.match, - ) - ) - - # add additional focus node selection e.g. from endpoint definitions - if self.additional_ss: - if isinstance(self.additional_ss, SubSelectString): - ss_ggp = GroupGraphPattern(content=self.additional_ss) - gougp = GroupOrUnionGraphPattern(group_graph_patterns=[ss_ggp]) - gpnt = GraphPatternNotTriples(content=gougp) - ggp.content.add_pattern(gpnt) - elif isinstance(self.additional_ss, SubSelect): - ss_ggps = self.additional_ss.where_clause.group_graph_pattern.content - ss_tb = ss_ggps.triples_block - ss_gpotb = ss_ggps.graph_patterns_or_triples_blocks - if ss_tb: - ggp.content.add_pattern(ss_tb) - if ss_gpotb: - for pattern in ss_gpotb: - ggp.content.add_pattern(pattern) - - # bind e.g. BIND(100 AS ?w) - bind_for_w = Bind( - expression=Expression.from_primary_expr( - PrimaryExpression(content=NumericLiteral(value=weight_val)) - ), - var=Var(value="w"), - ) - bind_gpnt = GraphPatternNotTriples(content=bind_for_w) - ggp.content.add_pattern(bind_gpnt) - - # FILTER (REGEX(?match, "^$term", "i")) - pe_st = PrimaryExpression(content=RDFLiteral(value=(prefix + self.search_term))) - if function == "REGEX": - e_ci = None - if case_insensitive: - pe_ci = PrimaryExpression(content=RDFLiteral(value="i")) - e_ci = Expression.from_primary_expr(pe_ci) - regex_expression = RegexExpression( - text_expression=Expression.from_primary_expr( - self.match_pe - ), # Expression for the text - pattern_expression=Expression.from_primary_expr(pe_st), # Search Term - flags_expression=e_ci, # Case insensitivity - ) - bic = BuiltInCall(other_expressions=regex_expression) - cons = Constraint(content=bic) - filter_expr = Filter(constraint=cons) - # filter e.g. FILTER(LCASE(?match) = "search term") - elif function == "LCASE": - bifc = BuiltInCall(function_name=function, arguments=[self.match]) - pe_focus = PrimaryExpression(content=bifc) - filter_expr = Filter.filter_relational( - focus=pe_focus, comparators=pe_st, operator="=" - ) - else: - raise ValueError("Only LCASE and REGEX handled at present") - filter_gpnt = GraphPatternNotTriples(content=filter_expr) - ggp.content.add_pattern(filter_gpnt) - return ggp diff --git a/prez/services/query_generation/search.py b/prez/services/query_generation/search.py new file mode 100755 index 00000000..7a2a5882 --- /dev/null +++ b/prez/services/query_generation/search.py @@ -0,0 +1,654 @@ +from typing import Optional, List + +from pydantic import BaseModel +from rdflib import RDF, URIRef + +from prez.config import settings +from prez.reference_data.prez_ns import PREZ +from temp.grammar import * + + +class SearchQuery(BaseModel): + class Config: + arbitrary_types_allowed = True + + search_term: str + pred_vals: List[URIRef] + additional_ss: Optional[SubSelect] = None + limit: int = 10 + offset: int = 0 + cq: Optional[ConstructQuery] = None + + sr_uri: Var = Var(value="focus_node") + pred: Var = Var(value="pred") + match: Var = Var(value="match") + weight: Var = Var(value="weight") + hashid: Var = Var(value="hashID") + w: Var = Var(value="w") + + @property + def sr_uri_pe(self): + return PrimaryExpression(content=self.sr_uri) + + @property + def pred_pe(self): + return PrimaryExpression(content=self.pred) + + @property + def match_pe(self): + return PrimaryExpression(content=self.match) + + @property + def weight_pe(self): + return PrimaryExpression(content=self.weight) + + @property + def w_pe(self): + return PrimaryExpression(content=self.w) + + @property + def inner_select_vars(self): + return { + "one": { + "weight_val": 100, + "function": "LCASE", + "prefix": "", + "case_insensitive": None, + }, + "two": { + "weight_val": 20, + "function": "REGEX", + "prefix": "^", + "case_insensitive": True, + }, + "three": { + "weight_val": 10, + "function": "REGEX", + "prefix": "", + "case_insensitive": True, + }, + } + + def __init__(self, **data): + super().__init__(**data) + self.create_construct_query() + + def render(self): + # self.create_construct_query() + return "".join(part for part in self.cq.render()) + + def create_construct_query(self): + self.cq = ConstructQuery( + construct_template=self.create_construct_template(), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=self.create_outer_subselect() + ) + ), + solution_modifier=SolutionModifier(), + ) + + def create_construct_template(self): + """ + ?hashID a prez:SearchResult ; + prez:searchResultWeight ?weight ; + prez:searchResultPredicate ?predicate ; + prez:searchResultMatch ?match ; + prez:searchResultURI ?search_result_uri . + """ + search_result_triples = [ + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultWeight), + object=self.weight, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultPredicate), + object=self.pred, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultMatch), + object=self.match, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=PREZ.searchResultURI), + object=self.sr_uri, + ), + SimplifiedTriple( + subject=self.hashid, + predicate=IRI(value=RDF.type), + object=IRI(value=PREZ.SearchResult), + ), + ] + ct = ConstructTemplate( + construct_triples=ConstructTriples(triples=search_result_triples) + ) + return ct + + def create_outer_subselect(self): + outer_ss = SubSelect( + select_clause=self.create_outer_select_clause(), + where_clause=self.create_outer_where_clause(), + solution_modifier=self.create_solution_modifier(), + ) + return outer_ss + + def create_outer_select_clause(self): + """ + SELECT ?focus_node ?predicate ?match ?weight (URI(CONCAT("urn:hash:", SHA256(CONCAT(STR(?focus_node), STR(?predicate), STR(?match), STR(?weight))))) AS ?hashID) + """ + expressions = [self.sr_uri_pe, self.pred_pe, self.match_pe, self.weight_pe] + str_builtins = [BuiltInCall.create_with_one_expr("STR", e) for e in expressions] + str_expressions = [PrimaryExpression(content=b) for b in str_builtins] + inner_concat = BuiltInCall.create_with_n_expr("CONCAT", str_expressions) + sha256_expr = PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "SHA256", PrimaryExpression(content=inner_concat) + ) + ) + urn_literal = PrimaryExpression(content=RDFLiteral(value="urn:hash:")) + outer_concat = BuiltInCall.create_with_n_expr( + "CONCAT", [urn_literal, sha256_expr] + ) + uri_expr = BuiltInCall.create_with_one_expr( + "URI", PrimaryExpression(content=outer_concat) + ) + uri_pr_exp = PrimaryExpression(content=uri_expr) + uri_exp = Expression.from_primary_expr(uri_pr_exp) + sc = SelectClause( + variables_or_all=[ + self.sr_uri, + self.pred, + self.match, + self.weight, + (uri_exp, self.hashid), + ] + ) + return sc + + def create_outer_where_clause(self): + """Wrapper WHERE clause""" + inner_ss = self.create_inner_subselect() + inner_ggp = GroupGraphPattern(content=inner_ss) + outer_wc = WhereClause(group_graph_pattern=inner_ggp) + return outer_wc + + def create_solution_modifier(self): + """ORDER BY DESC(?weight)""" + ocond = OrderCondition(var=self.weight, direction="DESC") + oclause = OrderClause(conditions=[ocond]) + limit = LimitClause(limit=self.limit) + offset = OffsetClause(offset=self.offset) + loc = LimitOffsetClauses(limit_clause=limit, offset_clause=offset) + sm = SolutionModifier(order_by=oclause, limit_offset=loc) + return sm + + def create_inner_subselect(self): + inner_ss = SubSelect( + select_clause=self.create_inner_select_clause(), + where_clause=self.create_inner_where_clause(), + solution_modifier=self.create_group_by_solution_modifier(), + ) + return inner_ss + + def create_group_by_solution_modifier(self): + """ + GROUP BY ?focus_node ?predicate ?match + """ + gc_sr_uri = GroupCondition(condition=self.sr_uri) + gc_pred = GroupCondition(condition=self.pred) + gc_match = GroupCondition(condition=self.match) + gc = GroupClause(group_conditions=[gc_sr_uri, gc_pred, gc_match]) + sm = SolutionModifier(group_by=gc) + return sm + + def create_inner_select_clause(self): + """ + SELECT ?focus_node ?predicate ?match (SUM(?w) AS ?weight) + """ + pr_exp = PrimaryExpression(content=self.w) + exp = Expression.from_primary_expr(pr_exp) + sum_agg = Aggregate(function_name="SUM", expression=exp) + sum_bic = BuiltInCall(other_expressions=sum_agg) + sum_pr_exp = PrimaryExpression(content=sum_bic) + sum_exp = Expression.from_primary_expr(sum_pr_exp) + sc = SelectClause( + variables_or_all=[ + self.sr_uri, + self.pred, + self.match, + (sum_exp, self.weight), + ] + ) + return sc + + def create_inner_where_clause(self): + # outer group graph pattern sub + iri_pred_vals = [IRI(value=p) for p in self.pred_vals] + iri_db_vals = [DataBlockValue(value=p) for p in iri_pred_vals] + ildov = InlineDataOneVar(variable=self.pred, datablockvalues=iri_db_vals) + ild = InlineData(data_block=DataBlock(block=ildov)) + gpnt_ild = GraphPatternNotTriples(content=ild) + + # union statements + gougp = self.create_union_of_inner_ggps() + gpnt_gougp = GraphPatternNotTriples(content=gougp) + + outer_ggps = GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[gpnt_ild, gpnt_gougp] + ) + outer_ggp = GroupGraphPattern(content=outer_ggps) + wc = WhereClause(group_graph_pattern=outer_ggp) + return wc + + def create_union_of_inner_ggps(self): + # inner group graph patterns (unioned statements) + inner_select_ggp_list = [] + for var_dict in self.inner_select_vars.values(): + inner_select_ggp_list.append(self.create_inner_ggp(**var_dict)) + gougp = GroupOrUnionGraphPattern(group_graph_patterns=inner_select_ggp_list) + return gougp + + def create_inner_ggp( + self, + weight_val: int, + function: str, + prefix: str, + case_insensitive: Optional[bool], + ) -> GroupGraphPattern: + ggp = GroupGraphPattern(content=GroupGraphPatternSub()) + + # triple pattern e.g. (?focus_node ?pred ?match) + ggp.content.add_triple( + SimplifiedTriple( + subject=self.sr_uri, + predicate=self.pred, + object=self.match, + ) + ) + + # add additional focus node selection e.g. from endpoint definitions + if self.additional_ss: + if isinstance(self.additional_ss, SubSelectString): + ss_ggp = GroupGraphPattern(content=self.additional_ss) + gougp = GroupOrUnionGraphPattern(group_graph_patterns=[ss_ggp]) + gpnt = GraphPatternNotTriples(content=gougp) + ggp.content.add_pattern(gpnt) + elif isinstance(self.additional_ss, SubSelect): + ss_ggps = self.additional_ss.where_clause.group_graph_pattern.content + ss_tb = ss_ggps.triples_block + ss_gpotb = ss_ggps.graph_patterns_or_triples_blocks + if ss_tb: + ggp.content.add_pattern(ss_tb) + if ss_gpotb: + for pattern in ss_gpotb: + ggp.content.add_pattern(pattern) + + # bind e.g. BIND(100 AS ?w) + bind_for_w = Bind( + expression=Expression.from_primary_expr( + PrimaryExpression(content=NumericLiteral(value=weight_val)) + ), + var=Var(value="w"), + ) + bind_gpnt = GraphPatternNotTriples(content=bind_for_w) + ggp.content.add_pattern(bind_gpnt) + + # FILTER (REGEX(?match, "^$term", "i")) + pe_st = PrimaryExpression(content=RDFLiteral(value=(prefix + self.search_term))) + if function == "REGEX": + e_ci = None + if case_insensitive: + pe_ci = PrimaryExpression(content=RDFLiteral(value="i")) + e_ci = Expression.from_primary_expr(pe_ci) + regex_expression = RegexExpression( + text_expression=Expression.from_primary_expr( + self.match_pe + ), # Expression for the text + pattern_expression=Expression.from_primary_expr(pe_st), # Search Term + flags_expression=e_ci, # Case insensitivity + ) + bic = BuiltInCall(other_expressions=regex_expression) + cons = Constraint(content=bic) + filter_expr = Filter(constraint=cons) + # filter e.g. FILTER(LCASE(?match) = "search term") + elif function == "LCASE": + bifc = BuiltInCall(function_name=function, arguments=[self.match]) + pe_focus = PrimaryExpression(content=bifc) + filter_expr = Filter.filter_relational( + focus=pe_focus, comparators=pe_st, operator="=" + ) + else: + raise ValueError("Only LCASE and REGEX handled at present") + filter_gpnt = GraphPatternNotTriples(content=filter_expr) + ggp.content.add_pattern(filter_gpnt) + return ggp + + +class SearchQueryRegex(ConstructQuery): + limit: int = 10 # specify here to make available as attribute + offset: int = 0 # specify here to make available as attribute + + def __init__( + self, + term: str, + predicates: Optional[List[str]] = None, + limit: int = 10, + offset: int = 0, + ): + sr_uri: Var = Var(value="focus_node") + pred: Var = Var(value="pred") + match: Var = Var(value="match") + weight: Var = Var(value="weight") + hashid: Var = Var(value="hashID") + + if not predicates: + predicates = settings.default_search_predicates + + ct_map = { + PREZ.searchResultWeight: weight, + PREZ.searchResultPredicate: pred, + PREZ.searchResultMatch: match, + PREZ.searchResultURI: sr_uri, + RDF.type: IRI(value=PREZ.SearchResult), + } + + # construct template + ct = ConstructTemplate( + construct_triples=ConstructTriples( + triples=[ + SimplifiedTriple(subject=hashid, predicate=IRI(value=p), object=v) + for p, v in ct_map.items() + ] + ) + ) + wc = WhereClause( + group_graph_pattern=GroupGraphPattern( + content=SubSelect( + # SELECT ?focus_node ?predicate ?match ?weight (URI(CONCAT("urn:hash:", + # SHA256(CONCAT(STR(?focus_node), STR(?predicate), STR(?match), STR(?weight))))) AS ?hashID) + select_clause=SelectClause( + variables_or_all=[ + sr_uri, + pred, + match, + weight, + ( + Expression.from_primary_expr( + PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "URI", + PrimaryExpression( + content=BuiltInCall.create_with_n_expr( + "CONCAT", + [ + PrimaryExpression( + content=RDFLiteral( + value="urn:hash:" + ) + ), + PrimaryExpression( + content=BuiltInCall.create_with_one_expr( + "SHA256", + PrimaryExpression( + content=BuiltInCall.create_with_n_expr( + "CONCAT", + [ + PrimaryExpression( + content=b + ) + for b in [ + BuiltInCall.create_with_one_expr( + "STR", + PrimaryExpression( + content=e + ), + ) + for e in [ + sr_uri, + pred, + match, + weight, + ] + ] + ], + ) + ), + ) + ), + ], + ) + ), + ) + ) + ), + hashid, + ), + ] + ), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=SubSelect( + # SELECT ?focus_node ?predicate ?match (SUM(?w) AS ?weight) + select_clause=SelectClause( + variables_or_all=[ + sr_uri, + pred, + match, + ( + Expression.from_primary_expr( + PrimaryExpression( + content=BuiltInCall( + other_expressions=Aggregate( + function_name="SUM", + expression=Expression.from_primary_expr( + PrimaryExpression( + content=Var( + value="w" + ) + ) + ), + ) + ) + ) + ), + weight, + ), + ] + ), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[ + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=pred, + datablockvalues=[ + DataBlockValue( + value=p + ) + for p in [ + IRI(value=p) + for p in predicates + ] + ], + ) + ) + ) + ), + GraphPatternNotTriples( + content=GroupOrUnionGraphPattern( + group_graph_patterns=[ + self.create_inner_ggp( + **var_dict, + sr_uri=sr_uri, + pred=pred, + match=match, + term=term, + ) + for var_dict in self.inner_select_args.values() + ] + ) + ), + ] + ) + ) + ), + solution_modifier=SolutionModifier( + group_by=GroupClause( + group_conditions=[ + GroupCondition(condition=sr_uri), + GroupCondition(condition=pred), + GroupCondition(condition=match), + ] + ) + ), + ) + ) + ), + solution_modifier=SolutionModifier( + order_by=OrderClause( + conditions=[OrderCondition(var=weight, direction="DESC")] + ), + limit_offset=LimitOffsetClauses( + limit_clause=LimitClause(limit=limit), + offset_clause=OffsetClause(offset=offset), + ), + ), + ) + ) + ) + super().__init__( + construct_template=ct, + where_clause=wc, + solution_modifier=SolutionModifier(), + ) + + @property + def inner_select_args(self): + return { + "one": { + "weight_val": 100, + "function": "LCASE", + "prefix": "", + "case_insensitive": None, + }, + "two": { + "weight_val": 20, + "function": "REGEX", + "prefix": "^", + "case_insensitive": True, + }, + "three": { + "weight_val": 10, + "function": "REGEX", + "prefix": "", + "case_insensitive": True, + }, + } + + def create_inner_ggp( + self, + weight_val: int, + function: str, + prefix: str, + case_insensitive: Optional[bool], + sr_uri: Var, + pred: Var, + match: Var, + term: str, + ) -> GroupGraphPattern: + ggp = GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock( + triples=[ + SimplifiedTriple( + subject=sr_uri, + predicate=pred, + object=match, + ) + ] + ), + graph_patterns_or_triples_blocks=[ + GraphPatternNotTriples( + content=Bind( + expression=Expression.from_primary_expr( + PrimaryExpression( + content=NumericLiteral(value=weight_val) + ) + ), + var=Var(value="w"), + ) + ) + ], + ) + ) + # FILTER (REGEX(?match, "^$term", "i")) + pe_st = PrimaryExpression(content=RDFLiteral(value=(prefix + term))) + + filter_expr = None + if function == "REGEX": + filter_expr = Filter( + constraint=Constraint( + content=BuiltInCall( + other_expressions=RegexExpression( + text_expression=Expression.from_primary_expr( + PrimaryExpression(content=match) + ), # Expression for the text + pattern_expression=Expression.from_primary_expr(pe_st), + flags_expression=Expression.from_primary_expr( + PrimaryExpression(content=RDFLiteral(value="i")) + ) + if case_insensitive + else None, + ) + ) + ) + ) + + # filter e.g. FILTER(LCASE(?match) = "search term") + elif function == "LCASE": + filter_expr = Filter.filter_relational( + focus=PrimaryExpression( + content=BuiltInCall(function_name=function, arguments=[match]) + ), + comparators=pe_st, + operator="=", + ) + ggp.content.add_pattern(GraphPatternNotTriples(content=filter_expr)) + return ggp + + # convenience properties for the construct query + @property + def construct_triples(self): + return self.construct_template.construct_triples.triples + + @property + def inner_select_vars(self): + return ( + self.where_clause.group_graph_pattern.content.select_clause.variables_or_all + ) + + @property + def inner_select_gpnt(self): + inner_ggp = ( + self.where_clause.group_graph_pattern.content.where_clause.group_graph_pattern + ) + return GraphPatternNotTriples( + content=GroupOrUnionGraphPattern(group_graph_patterns=[inner_ggp]) + ) + + @property + def order_by(self): + return Var(value="weight") + + @property + def order_by_direction(self): + return "DESC" + + diff --git a/prez/services/query_generation/shacl.py b/prez/services/query_generation/shacl.py new file mode 100644 index 00000000..a12647d6 --- /dev/null +++ b/prez/services/query_generation/shacl.py @@ -0,0 +1,590 @@ +from __future__ import annotations + +from string import Template +from typing import List, Optional, Any, Dict, Literal as TypingLiteral, Union + +from pydantic import BaseModel +from rdflib import URIRef, BNode, Graph +from rdflib.collection import Collection +from rdflib.namespace import SH, RDF +from rdflib.term import Node + +from prez.reference_data.prez_ns import ONT, SHEXT +from temp.grammar import * + + +class Shape(BaseModel): + class Config: + arbitrary_types_allowed = True + + def __init__(self, **data: Any): + super().__init__(**data) + self.triples_list = [] + self.gpnt_list = [] + self.from_graph() + self.to_grammar() + + def from_graph(self): + raise NotImplementedError("Subclasses must implement this method.") + + def to_grammar(self): + raise NotImplementedError("Subclasses must implement this method.") + + +class NodeShape(Shape): + uri: URIRef + graph: Graph + kind: TypingLiteral["endpoint", "profile"] + focus_node: Union[Var, IRI] + targetNode: Optional[URIRef] = None + targetClasses: Optional[List[Node]] = [] + propertyShapesURIs: Optional[List[Node]] = [] + target: Optional[Node] = None + rules: Optional[List[Node]] = [] + propertyShapes: Optional[List[PropertyShape]] = [] + triples_list: Optional[List[SimplifiedTriple]] = [] + gpnt_list: Optional[List[GraphPatternNotTriples]] = [] + rule_triples: Optional[List[SimplifiedTriple]] = [] + path_nodes: Optional[Dict[str, Var | IRI]] = {} + classes_at_len: Optional[Dict[str, List[URIRef]]] = {} + hierarchy_level: Optional[int] = None + select_template: Optional[Template] = None + bnode_depth: Optional[int] = None + + def from_graph(self): # TODO this can be a SPARQL select against the system graph. + self.bnode_depth = next(self.graph.objects(self.uri, SHEXT.bnodeDepth), None) + self.targetNode = next(self.graph.objects(self.uri, SH.targetNode), None) + self.targetClasses = list(self.graph.objects(self.uri, SH.targetClass)) + self.propertyShapesURIs = list(self.graph.objects(self.uri, SH.property)) + self.target = next(self.graph.objects(self.uri, SH.target), None) + self.rules = list(self.graph.objects(self.uri, SH.rule)) + self.propertyShapes = [ + PropertyShape( + uri=ps_uri, + graph=self.graph, + kind=self.kind, + focus_node=self.focus_node, + path_nodes=self.path_nodes, + ) + for ps_uri in self.propertyShapesURIs + ] + self.hierarchy_level = next( + self.graph.objects(self.uri, ONT.hierarchyLevel), None + ) + if not self.hierarchy_level and self.kind == "endpoint": + raise ValueError("No hierarchy level found") + + def to_grammar(self): + if self.targetNode: + pass # do not need to add any specific triples or the like + if self.targetClasses: + self._process_class_targets() + if self.propertyShapes: + self._process_property_shapes() + if self.target: + self._process_target() + # rules used to construct triples only in the context of sh:target/sh:sparql at present. + if self.rules: + self._process_rules() + if self.bnode_depth: + _build_bnode_blocks(self) + + def _process_class_targets(self): + if len(self.targetClasses) == 1: + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=IRI(value=self.targetClasses[0]), + ) + ) + elif len(self.targetClasses) > 1: + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=RDF.type), + object=Var(value=f"focus_classes"), + ) + ) + dbvs = [ + DataBlockValue(value=IRI(value=klass)) for klass in self.targetClasses + ] + self.gpnt_list.append( + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=Var(value=f"focus_classes"), + datablockvalues=dbvs, + ) + ) + ) + ) + ) + else: + raise ValueError("No target classes found") + + def _process_target(self): + self.select_template = Template( + str(self.endpoint_graph.value(self.target, SH.select, default=None)) + ) + + def _process_rules(self): + for rule_node in self.rule_nodes: + subject = self.graph.value(subject=rule_node, predicate=SH.subject) + predicate = self.graph.value(subject=rule_node, predicate=SH.predicate) + object = self.graph.value(subject=rule_node, predicate=SH.object) + if subject == SH.this: + subject = self.focus_node + subject, predicate, object = self.sh_rule_type_conversion( + [subject, predicate, object] + ) + self.rule_triples.append( + SimplifiedTriple(subject=subject, predicate=predicate, object=object) + ) + + def _process_property_shapes(self): + for shape in self.propertyShapes: + self.triples_list.extend(shape.triples_list) + self.gpnt_list.extend(shape.gpnt_list) + self.path_nodes = self.path_nodes | shape.path_nodes + self.classes_at_len = self.classes_at_len | shape.classes_at_len + # deduplicate + self.triples_list = list(set(self.triples_list)) + + def _build_bnode_blocks(self): + bnode_depth = int(self.bnode_depth) + + p1 = Var(value="bn_p_1") + o1 = Var(value="bn_o_1") + p2 = Var(value="bn_p_2") + o2 = Var(value="bn_o_2") + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple(subject=self.focus_node, predicate=p1, object=o1), + SimplifiedTriple(subject=o1, predicate=p2, object=o2), + ] + ) + filter_block = Filter( + constraint=Constraint( + content=BuiltInCall.create_with_one_expr("isBLANK", PrimaryExpression(content=o1)) + ) + ) + container_gpnt = GraphPatternNotTriples( + content=OptionalGraphPattern( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=triples_block, + graph_patterns_or_triples_blocks=[ + GraphPatternNotTriples( + content=filter_block + ) + ] + ) + ) + ) + ) + container_ggps = GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[container_gpnt] + ) + container_ggp = GroupGraphPattern(content=container_ggps) + + def process_bn_level(depth, max_depth, outer_ggps): + old_o_var = Var(value=f"bn_o_{depth}") + new_p_var = Var(value=f"bn_p_{depth + 1}") + new_o_var = Var(value=f"bn_o_{depth + 1}") + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple( + subject=old_o_var, predicate=new_p_var, object=new_o_var + ) + ] + ) + gpnt = GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BuiltInCall.create_with_one_expr( + "isBLANK", PrimaryExpression(content=old_o_var) + ) + ) + ) + ) + opt = OptionalGraphPattern( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=triples_block, + graph_patterns_or_triples_blocks=[gpnt], + ) + ) + ) + outer_ggps.graph_patterns_or_triples_blocks.append(opt) + if depth < max_depth: + process_bn_level(depth + 1, max_depth, ggps) + + if bnode_depth > 1: + process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) + gpnt = GraphPatternNotTriples( + content=GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) + ) + + +class PropertyShape(Shape): + uri: URIRef | BNode # URI of the shape + graph: Graph + kind: TypingLiteral["endpoint", "profile"] + focus_node: Union[IRI, Var] + # inputs + property_paths: Optional[List[PropertyPath]] = None + or_klasses: Optional[List[URIRef]] = None + # outputs + grammar: Optional[GroupGraphPatternSub] = None + triples_list: Optional[List[SimplifiedTriple]] = None + gpnt_list: Optional[List[GraphPatternNotTriples]] = None + path_nodes: Optional[Dict[str, Var | IRI]] = {} + classes_at_len: Optional[Dict[str, List[URIRef]]] = {} + _select_vars: Optional[List[Var]] = None + + @property + def minCount(self): + minc = next(self.graph.objects(self.uri, SH.minCount), None) + if minc is not None: + return int(minc) + + @property + def maxCount(self): + maxc = next(self.graph.objects(self.uri, SH.maxCount), None) + if maxc is not None: + return int(maxc) + + def from_graph(self): + self.property_paths = [] + _single_class = next(self.graph.objects(self.uri, SH["class"]), None) + if _single_class: + self.or_klasses = [URIRef(_single_class)] + + # look for sh:or statements and process classes from these NB only sh:or / sh:class is handled at present. + or_classes = next(self.graph.objects(self.uri, SH["or"]), None) + if or_classes: + or_bns = list(Collection(self.graph, or_classes)) + or_triples = list(self.graph.triples_choices((or_bns, SH["class"], None))) + self.or_klasses = [URIRef(klass) for _, _, klass in or_triples] + + pps = list(self.graph.objects(self.uri, SH.path)) + for pp in pps: + self._process_property_path(pp) + # get the longest property path first - for endpoints this will be the path any path_nodes apply to + self.property_paths = sorted( + self.property_paths, key=lambda x: len(x), reverse=True + ) + + def _process_property_path(self, pp): + if isinstance(pp, URIRef): + self.property_paths.append(Path(value=pp)) + elif isinstance(pp, BNode): + pred_objects_gen = self.graph.predicate_objects(subject=pp) + bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + if bn_obj == SH.union: + union_list = list(Collection(self.graph, pp)) + if union_list != [SH.union]: + union_list_bnode = union_list[1] + union_items = list(Collection(self.graph, union_list_bnode)) + for item in union_items: + self._process_property_path(item) + elif bn_pred == SH.inversePath: + self.property_paths.append(InversePath(value=bn_obj)) + # elif bn_pred == SH.alternativePath: + # predicates.extend(list(Collection(self.profile_graph, bn_obj))) + else: # sequence paths + paths = list(Collection(self.graph, pp)) + sp_list = [] + for path in paths: + if isinstance(path, BNode): + pred_objects_gen = self.graph.predicate_objects(subject=path) + bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + if bn_pred == SH.inversePath: + sp_list.append(InversePath(value=bn_obj)) + elif isinstance(path, URIRef): + sp_list.append(Path(value=path)) + self.property_paths.append(SequencePath(value=sp_list)) + + def to_grammar(self): + # label nodes in the inner select and profile part of the query differently for clarity. + if self.kind == "endpoint": + path_or_prop = "path" + elif self.kind == "profile": + path_or_prop = "prof" + + # set up the path nodes - either from supplied values or set as variables + total_individual_nodes = sum([len(i) for i in self.property_paths]) + for i in range(total_individual_nodes): + path_node_str = f"{path_or_prop}_node_{i + 1}" + if path_node_str not in self.path_nodes: + self.path_nodes[path_node_str] = Var(value=path_node_str) + + self.triples_list = [] + len_pp = max([len(i) for i in self.property_paths]) + # sh:class applies to the end of sequence paths + if f"{path_or_prop}_node_{len_pp}" in self.path_nodes: + path_node_term = self.path_nodes[f"{path_or_prop}_node_{len_pp}"] + else: + path_node_term = Var(value=f"{path_or_prop}_node_{len_pp}") + + # useful for determining which endpoint property shape should be used when a request comes in on endpoint + self.classes_at_len[f"{path_or_prop}_node_{len_pp}"] = self.or_klasses + + if self.or_klasses: + if len(self.or_klasses) == 1: + self.triples_list.append( + SimplifiedTriple( + subject=path_node_term, + predicate=IRI(value=RDF.type), + object=IRI(value=self.or_klasses[0]), + ) + ) + else: + self.triples_list.append( + SimplifiedTriple( + subject=path_node_term, + predicate=IRI(value=RDF.type), + object=Var(value=f"{path_or_prop}_node_classes_{len_pp}"), + ) + ) + dbvs = [ + DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses + ] + self.gpnt_list.append( + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=Var( + value=f"{path_or_prop}_node_classes_{len_pp}" + ), + datablockvalues=dbvs, + ) + ) + ) + ) + ) + + if self.property_paths: + i = 0 + for property_path in self.property_paths: + if f"{path_or_prop}_node_{i + 1}" in self.path_nodes: + path_node_1 = self.path_nodes[f"{path_or_prop}_node_{i + 1}"] + else: + path_node_1 = Var(value=f"{path_or_prop}_node_{i + 1}") + # for sequence paths up to length two: + if f"{path_or_prop}_node_{i + 2}" in self.path_nodes: + path_node_2 = self.path_nodes[f"{path_or_prop}_node_{i + 2}"] + else: + path_node_2 = Var(value=f"{path_or_prop}_node_{i + 2}") + + if isinstance(property_path, Path): + if property_path.value == SHEXT.allPredicateValues: + pred = Var(value="preds") + else: + pred = IRI(value=property_path.value) + # vanilla property path + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=pred, + object=path_node_1, + ) + ) + i += 1 + + elif isinstance(property_path, InversePath): + self.triples_list.append( + SimplifiedTriple( + subject=path_node_1, + predicate=IRI(value=property_path.value), + object=self.focus_node, + ) + ) + i += 1 + + elif isinstance(property_path, SequencePath): + for j, path in enumerate(property_path.value): + if isinstance(path, Path): + if j == 0: + self.triples_list.append( + SimplifiedTriple( + subject=self.focus_node, + predicate=IRI(value=path.value), + object=path_node_1, + ) + ) + else: + self.triples_list.append( + SimplifiedTriple( + subject=path_node_1, + predicate=IRI(value=path.value), + object=path_node_2, + ) + ) + elif isinstance(path, InversePath): + if j == 0: + self.triples_list.append( + SimplifiedTriple( + subject=path_node_1, + predicate=IRI(value=path.value), + object=self.focus_node, + ) + ) + else: + self.triples_list.append( + SimplifiedTriple( + subject=path_node_2, + predicate=IRI(value=path.value), + object=path_node_1, + ) + ) + i += len(property_path) + + if self.minCount == 0: + # triples = self.triples_list.copy() + self.gpnt_list.append( + GraphPatternNotTriples( + content=OptionalGraphPattern( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock(triples=self.triples_list) + ) + ) + ) + ) + ) + self.triples_list = [] + + if self.maxCount == 0: + for p in self.property_paths: + assert isinstance(p, Path) # only support filtering direct predicates + + # reset the triples list + self.triples_list = [ + SimplifiedTriple( + subject=path_node_term, + predicate=Var(value="excluded_props"), + object=Var(value="excluded_prop_vals"), + ) + ] + + values = [ + PrimaryExpression(content=IRIOrFunction(iri=IRI(value=p.value))) + for p in self.property_paths + ] + gpnt = GraphPatternNotTriples( + content=Filter.filter_relational( + focus=PrimaryExpression(content=Var(value="excluded_props")), + comparators=values, + operator="NOT IN", + ) + ) + self.gpnt_list.append(gpnt) + + +class PropertyPath(BaseModel): + class Config: + arbitrary_types_allowed = True + + uri: Optional[URIRef] = None + + +class Path(PropertyPath): + value: URIRef + + def __len__(self): + return 1 + + +class SequencePath(PropertyPath): + value: List[PropertyPath] + + def __len__(self): + return len(self.value) + + +class InversePath(PropertyPath): + value: URIRef + + def __len__(self): + return 1 + + +def _build_bnode_blocks(self): + bnode_depth = list( + self.profile_graph.objects( + subject=self.profile_uri, predicate=SHEXT["bnode-depth"] + ) + ) + if not bnode_depth or bnode_depth == [0]: + return + else: + bnode_depth = int(bnode_depth[0]) + p1 = Var(value="bn_p_1") + o1 = Var(value="bn_o_1") + p2 = Var(value="bn_p_2") + o2 = Var(value="bn_o_2") + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple(subject=self.focus_node, predicate=p1, object=o1), + SimplifiedTriple(subject=o1, predicate=p2, object=o2), + ] + ) + PrimaryExpression(content=o1) = PrimaryExpression(content=o1) + constraint = Constraint( + content=BuiltInCall.create_with_one_expr("isBLANK", PrimaryExpression(content=o1)) + ) + filter_block = Filter(constraint=constraint) + gpnt = GraphPatternNotTriples(content=filter_block) + ggps = GroupGraphPatternSub( + triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] + ) + ggp = GroupGraphPattern(content=ggps) + outer_opt = OptionalGraphPattern(group_graph_pattern=ggp) + container_gpnt = GraphPatternNotTriples(content=outer_opt) + container_ggps = GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[container_gpnt] + ) + container_ggp = GroupGraphPattern(content=container_ggps) + + def process_bn_level(depth, max_depth, outer_ggps): + old_o_var = Var(value=f"bn_o_{depth}") + new_p_var = Var(value=f"bn_p_{depth + 1}") + new_o_var = Var(value=f"bn_o_{depth + 1}") + triples_block = TriplesBlock( + triples=[ + SimplifiedTriple( + subject=old_o_var, predicate=new_p_var, object=new_o_var + ) + ] + ) + gpnt = GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BuiltInCall.create_with_one_expr( + "isBLANK", PrimaryExpression(content=old_o_var) + ) + ) + ) + ) + opt = OptionalGraphPattern( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=triples_block, + graph_patterns_or_triples_blocks=[gpnt], + ) + ) + ) + outer_ggps.graph_patterns_or_triples_blocks.append(opt) + if depth < max_depth: + process_bn_level(depth + 1, max_depth, ggps) + + if bnode_depth > 1: + process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) + gpnt = GraphPatternNotTriples( + content=GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) + ) + self.main_where_ggps.add_pattern(gpnt) diff --git a/prez/services/query_generation/umbrella.py b/prez/services/query_generation/umbrella.py index 3c08aace..95b3c8d6 100755 --- a/prez/services/query_generation/umbrella.py +++ b/prez/services/query_generation/umbrella.py @@ -1,10 +1,15 @@ import re from string import Template from typing import Union, Optional, List, Dict -from pydantic import BaseModel, field_validator -from rdflib import URIRef, Namespace, Graph, SH, RDF, BNode, Literal + +from pydantic import BaseModel +from rdflib import URIRef, Namespace, Graph, SH, BNode, Literal from rdflib.collection import Collection +from prez.cache import profiles_graph_cache, endpoints_graph_cache +from prez.services.query_generation.cql import CQLParser +from prez.services.query_generation.search import SearchQueryRegex +from prez.services.query_generation.shacl import NodeShape from temp.grammar import * ONT = Namespace("https://prez.dev/ont/") @@ -12,292 +17,292 @@ SHEXT = Namespace("http://example.com/shacl-extension#") -class PrezQueryConstructor(BaseModel): - class Config: - arbitrary_types_allowed = True - - runtime_values: dict - endpoint_graph: Graph - profile_graph: Graph - listing_or_object: str - focus_node: Union[IRI, Var] = Var(value="focus_node") - endpoint_uri: Optional[URIRef] = None - profile_uri: Optional[URIRef] = None - - construct_triples: Optional[List[SimplifiedTriple]] = [] - main_where_ggps: Optional[GroupGraphPatternSub] = GroupGraphPatternSub() - inner_select: Optional[Union[SubSelect, SubSelectString]] = None - - endpoint_shacl_triples: Optional[List[SimplifiedTriple]] = [] - endpoint_shacl_gpnt: Optional[List[GraphPatternNotTriples]] = [] - cql_triples: Optional[List[SimplifiedTriple]] = [] - cql_gpnt: Optional[List[GraphPatternNotTriples]] = [] - select_template: Optional[Template] = None - sparql: Optional[str] = None - - # Additional fields - default_limit: Optional[int] = None - default_offset: Optional[int] = None - default_order_by: Optional[str] = None - default_order_by_desc: Optional[bool] = None - runtime_vals_expanded: Optional[Dict] = {} - merged_runtime_and_default_vals: Optional[Dict] = {} - - def _expand_runtime_vars(self): - for k, v in self.runtime_values.items(): - if k in ["limit", "offset", "q"]: - self.runtime_vals_expanded[k] = v - elif v: - val = IRI(value=v).to_string() - self.runtime_vals_expanded[k] = val - - def _merge_runtime_and_default_vars(self): - default_args = { - "limit": self.default_limit, - "offset": self.default_offset, - "order_by": self.default_order_by, - "order_by_desc": self.default_order_by_desc, - } - self.merged_runtime_and_default_vals = default_args | self.runtime_vals_expanded - - def generate_sparql(self): - """ - Generates SPARQL query from Shape profile_graph. - """ - self._expand_runtime_vars() - if self.listing_or_object == "listing": - self.build_inner_select() - self.parse_profile() - self._generate_query() - - def _generate_query(self): - where = WhereClause( - group_graph_pattern=GroupGraphPattern(content=self.main_where_ggps) - ) - - if self.construct_triples: - self.construct_triples.extend(where.collect_triples()) - else: - self.construct_triples = where.collect_triples() - self.construct_triples = list(set(self.construct_triples)) - - if self.listing_or_object == "listing": - gpnt = GraphPatternNotTriples( - content=GroupOrUnionGraphPattern( - group_graph_patterns=[GroupGraphPattern(content=self.inner_select)] - ) - ) - self.main_where_ggps.add_pattern(gpnt, prepend=True) - - construct_template = ConstructTemplate( - construct_triples=ConstructTriples(triples=self.construct_triples) - ) - solution_modifier = SolutionModifier() - query_str = ConstructQuery( - construct_template=construct_template, - where_clause=where, - solution_modifier=solution_modifier, - ).to_string() - self.sparql = query_str - - def build_inner_select(self): - """ - Either set the focus_node to a URIRef, if a target node is provided, or generate a triple pattern to get list items - Generates triples for the endpoint definition with runtime values substituted. - """ - inner_select_ggps = GroupGraphPatternSub() - - self._set_limit_and_offset() - self._merge_runtime_and_default_vars() - - rule_nodes = list( - self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.rule) - ) - - sol_mod, order_by_triple = self._create_focus_node_solution_modifier() - - if self.select_template: - # sh:target / sh:select - sss = self.create_select_subquery_from_template(sol_mod, order_by_triple) - self.inner_select = sss - - # rule nodes - for CONSTRUCT TRIPLES patterns. - if rule_nodes: - for rule_node in rule_nodes: - self._create_construct_triples_from_sh_rules(rule_node) - - else: - self.inner_select = SubSelect( - select_clause=SelectClause(variables_or_all=[self.focus_node]), - where_clause=WhereClause( - group_graph_pattern=GroupGraphPattern(content=inner_select_ggps) - ), - solution_modifier=sol_mod, - ) - - if order_by_triple: - inner_select_ggps.add_triple(order_by_triple) - - # otherwise just use what is provided by the endpoint shapes - all_triples = self.endpoint_shacl_triples + self.cql_triples - if all_triples: - tb = TriplesBlock(triples=all_triples) - inner_select_ggps.add_pattern(tb) - - all_gpnt = self.endpoint_shacl_gpnt + self.cql_gpnt - if all_gpnt: - for gpnt in all_gpnt: - inner_select_ggps.add_pattern(gpnt) - - def sh_rule_type_conversion(self, items: List): - """ - Assumes Literals are actually Variables. - """ - new_items = [] - for item in items: - if isinstance(item, URIRef): - item = IRI(value=item) - elif isinstance(item, Literal): - item = Var(value=item[1:]) - new_items.append(item) - return new_items - - def _create_construct_triples_from_sh_rules(self, rule_node): - """CONSTRUCT {?s ?p ?o} based on sh:rule [ sh:subject ... ]""" - subject = self.endpoint_graph.value(subject=rule_node, predicate=SH.subject) - predicate = self.endpoint_graph.value(subject=rule_node, predicate=SH.predicate) - object = self.endpoint_graph.value(subject=rule_node, predicate=SH.object) - if subject == SH.this: - subject = self.focus_node - subject, predicate, object = self.sh_rule_type_conversion( - [subject, predicate, object] - ) - - triple = SimplifiedTriple(subject=subject, predicate=predicate, object=object) - if self.construct_triples: - self.construct_triples.append(triple) - else: - self.construct_triples = [triple] - - def create_select_subquery_from_template(self, sol_mod, order_by_triple): - # expand any prefixes etc. in case the prefixes are not defined in the query this subquery is being inserted - # into. NB Shape does provide a mechanism to declare prefixes used in SPARQL target - this has not been - # implemented - substituted_query = self.select_template.substitute( - self.merged_runtime_and_default_vals - ).rstrip() - if order_by_triple: # insert it before the end of the string, - order_by_triple_text = order_by_triple.to_string() - substituted_query = ( - substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" - ) - additional_strings = [] - if self.cql_triples: # for example from cql - additional_strings.append( - TriplesBlock(triples=self.cql_triples).to_string() - ) - if self.cql_gpnt: - additional_strings.extend([gpnt.to_string() for gpnt in self.cql_gpnt]) - substituted_query = self.split_query(substituted_query, additional_strings) - sss = SubSelectString( - select_string=substituted_query, solution_modifier=sol_mod - ) - return sss - - def split_query(self, original_query, additional_strings: List[str]): - # Regex to match the entire structure: 'SELECT ?xxx { ... }' - pattern = r"(SELECT\s+[\?\w\s\(\)]+\s*\{)(.*?)(\}\s*)" - # Use re.split to split the query based on the pattern - parts = re.split(pattern, original_query, flags=re.DOTALL) - parts = [part for part in parts if part.strip()] - new_parts = [parts[0]] + additional_strings - if len(parts) > 1: - new_parts.extend(parts[1:]) - new_query = "".join(part for part in new_parts) - return new_query - - def _create_focus_node_solution_modifier(self): - """ - Solution modifiers include LIMIT, OFFSET, ORDER BY clauses. - """ - order_clause = order_by_triple = None # order clause is optional - order_by_path = self.merged_runtime_and_default_vals.get("order_by") - if order_by_path: - direction = self.merged_runtime_and_default_vals.get("order_by_desc") - if direction: - direction = "DESC" - else: - direction = "ASC" - order_cond = OrderCondition( - var=Var(value="order_by_var"), direction=direction - ) - order_clause = OrderClause(conditions=[order_cond]) - order_by_triple = SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=order_by_path[0]), - object=Var(value="order_by_var"), - ) - limit = int(self.merged_runtime_and_default_vals["limit"]) - offset = int(self.merged_runtime_and_default_vals["offset"]) - limit_clause = LimitClause(limit=limit) - offset_clause = OffsetClause(offset=offset) - limit_offset_clauses = LimitOffsetClauses( - limit_clause=limit_clause, offset_clause=offset_clause - ) - sol_mod = SolutionModifier( - order_by=order_clause, limit_offset=limit_offset_clauses - ) - return sol_mod, order_by_triple - - def _set_limit_and_offset(self): - """ - Sets the default limit, offset, and ordering for a listing endpoint. - """ - default_limit = next( - self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=SHEXT.limit - ), - 20, - ) - default_offset = next( - self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=SHEXT.offset - ), - 0, - ) - default_order_by = list( - self.endpoint_graph.objects( - subject=self.endpoint_uri, predicate=SHEXT.orderBy - ) - ) - - self.default_limit = int(default_limit) - self.default_offset = int(default_offset) - - # Process each blank node in the default_order_by list - for blank_node in default_order_by: - # Extract sh:path - path = next(self.endpoint_graph.objects(blank_node, SH.path), None) - if not path: - continue # Skip if no sh:path is found - - # Check for sh:desc - desc_node = next(self.endpoint_graph.objects(blank_node, SHEXT.desc), None) - is_descending = ( - True if desc_node and (desc_node == Literal(True)) else False - ) - - # Add the configuration to the list - self.default_order_by = (path,) - self.default_order_by_desc = is_descending - - def parse_profile(self): - for i, property_node in enumerate( - self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) - ): - self._parse_property_shapes(property_node, i) - self._build_bnode_blocks() - +# class PrezQueryConstructor(BaseModel): +# class Config: +# arbitrary_types_allowed = True +# +# runtime_values: dict +# endpoint_graph: Graph +# profile_graph: Graph +# listing_or_object: str +# focus_node: Union[IRI, Var] = Var(value="focus_node") +# endpoint_uri: Optional[URIRef] = None +# profile_uri: Optional[URIRef] = None +# +# construct_triples: Optional[List[SimplifiedTriple]] = [] +# main_where_ggps: Optional[GroupGraphPatternSub] = GroupGraphPatternSub() +# inner_select: Optional[Union[SubSelect, SubSelectString]] = None +# +# endpoint_shacl_triples: Optional[List[SimplifiedTriple]] = [] +# endpoint_shacl_gpnt: Optional[List[GraphPatternNotTriples]] = [] +# cql_triples: Optional[List[SimplifiedTriple]] = [] +# cql_gpnt: Optional[List[GraphPatternNotTriples]] = [] +# select_template: Optional[Template] = None +# sparql: Optional[str] = None +# +# # Additional fields +# default_limit: Optional[int] = None +# default_offset: Optional[int] = None +# default_order_by: Optional[str] = None +# default_order_by_desc: Optional[bool] = None +# runtime_vals_expanded: Optional[Dict] = {} +# merged_runtime_and_default_vals: Optional[Dict] = {} +# +# def _expand_runtime_vars(self): +# for k, v in self.runtime_values.items(): +# if k in ["limit", "offset", "q"]: +# self.runtime_vals_expanded[k] = v +# elif v: +# val = IRI(value=v).to_string() +# self.runtime_vals_expanded[k] = val +# +# def _merge_runtime_and_default_vars(self): +# default_args = { +# "limit": self.default_limit, +# "offset": self.default_offset, +# "order_by": self.default_order_by, +# "order_by_desc": self.default_order_by_desc, +# } +# self.merged_runtime_and_default_vals = default_args | self.runtime_vals_expanded +# +# def generate_sparql(self): +# """ +# Generates SPARQL query from Shape profile_graph. +# """ +# self._expand_runtime_vars() +# if self.listing_or_object == "listing": +# self.build_inner_select() +# self.parse_profile() +# self._generate_query() +# +# def _generate_query(self): +# where = WhereClause( +# group_graph_pattern=GroupGraphPattern(content=self.main_where_ggps) +# ) +# +# if self.construct_triples: +# self.construct_triples.extend(where.collect_triples()) +# else: +# self.construct_triples = where.collect_triples() +# self.construct_triples = list(set(self.construct_triples)) +# +# if self.listing_or_object == "listing": +# gpnt = GraphPatternNotTriples( +# content=GroupOrUnionGraphPattern( +# group_graph_patterns=[GroupGraphPattern(content=self.inner_select)] +# ) +# ) +# self.main_where_ggps.add_pattern(gpnt, prepend=True) +# +# construct_template = ConstructTemplate( +# construct_triples=ConstructTriples(triples=self.construct_triples) +# ) +# solution_modifier = SolutionModifier() +# query_str = ConstructQuery( +# construct_template=construct_template, +# where_clause=where, +# solution_modifier=solution_modifier, +# ).to_string() +# self.sparql = query_str +# +# def build_inner_select(self): +# """ +# Either set the focus_node to a URIRef, if a target node is provided, or generate a triple pattern to get list items +# Generates triples for the endpoint definition with runtime values substituted. +# """ +# inner_select_ggps = GroupGraphPatternSub() +# +# self._set_limit_and_offset() +# self._merge_runtime_and_default_vars() +# +# # rule_nodes = list( +# # self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.rule) +# # ) +# +# sol_mod, order_by_triple = self._create_focus_node_solution_modifier() +# +# if self.select_template: +# # sh:target / sh:select +# sss = self.create_select_subquery_from_template(sol_mod, order_by_triple) +# self.inner_select = sss +# +# # # rule nodes - for CONSTRUCT TRIPLES patterns. +# # if rule_nodes: +# # for rule_node in rule_nodes: +# # self._create_construct_triples_from_sh_rules(rule_node) +# +# else: +# self.inner_select = SubSelect( +# select_clause=SelectClause(variables_or_all=[self.focus_node]), +# where_clause=WhereClause( +# group_graph_pattern=GroupGraphPattern(content=inner_select_ggps) +# ), +# solution_modifier=sol_mod, +# ) +# +# if order_by_triple: +# inner_select_ggps.add_triple(order_by_triple) +# +# # otherwise just use what is provided by the endpoint shapes +# all_triples = self.endpoint_shacl_triples + self.cql_triples +# if all_triples: +# tb = TriplesBlock(triples=all_triples) +# inner_select_ggps.add_pattern(tb) +# +# all_gpnt = self.endpoint_shacl_gpnt + self.cql_gpnt +# if all_gpnt: +# for gpnt in all_gpnt: +# inner_select_ggps.add_pattern(gpnt) +# +# # def sh_rule_type_conversion(self, items: List): +# # """ +# # Assumes Literals are actually Variables. +# # """ +# # new_items = [] +# # for item in items: +# # if isinstance(item, URIRef): +# # item = IRI(value=item) +# # elif isinstance(item, Literal): +# # item = Var(value=item[1:]) +# # new_items.append(item) +# # return new_items +# # +# # def _create_construct_triples_from_sh_rules(self, rule_node): +# # """CONSTRUCT {?s ?p ?o} based on sh:rule [ sh:subject ... ]""" +# # subject = self.endpoint_graph.value(subject=rule_node, predicate=SH.subject) +# # predicate = self.endpoint_graph.value(subject=rule_node, predicate=SH.predicate) +# # object = self.endpoint_graph.value(subject=rule_node, predicate=SH.object) +# # if subject == SH.this: +# # subject = self.focus_node +# # subject, predicate, object = self.sh_rule_type_conversion( +# # [subject, predicate, object] +# # ) +# # +# # triple = SimplifiedTriple(subject=subject, predicate=predicate, object=object) +# # if self.construct_triples: +# # self.construct_triples.append(triple) +# # else: +# # self.construct_triples = [triple] +# +# def create_select_subquery_from_template(self, sol_mod, order_by_triple): +# # expand any prefixes etc. in case the prefixes are not defined in the query this subquery is being inserted +# # into. NB Shape does provide a mechanism to declare prefixes used in SPARQL target - this has not been +# # implemented +# substituted_query = self.select_template.substitute( +# self.merged_runtime_and_default_vals +# ).rstrip() +# if order_by_triple: # insert it before the end of the string, +# order_by_triple_text = order_by_triple.to_string() +# substituted_query = ( +# substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" +# ) +# additional_strings = [] +# if self.cql_triples: # for example from cql +# additional_strings.append( +# TriplesBlock(triples=self.cql_triples).to_string() +# ) +# if self.cql_gpnt: +# additional_strings.extend([gpnt.to_string() for gpnt in self.cql_gpnt]) +# substituted_query = self.split_query(substituted_query, additional_strings) +# sss = SubSelectString( +# select_string=substituted_query, solution_modifier=sol_mod +# ) +# return sss +# +# def split_query(self, original_query, additional_strings: List[str]): +# # Regex to match the entire structure: 'SELECT ?xxx { ... }' +# pattern = r"(SELECT\s+[\?\w\s\(\)]+\s*\{)(.*?)(\}\s*)" +# # Use re.split to split the query based on the pattern +# parts = re.split(pattern, original_query, flags=re.DOTALL) +# parts = [part for part in parts if part.strip()] +# new_parts = [parts[0]] + additional_strings +# if len(parts) > 1: +# new_parts.extend(parts[1:]) +# new_query = "".join(part for part in new_parts) +# return new_query +# +# def _create_focus_node_solution_modifier(self): +# """ +# Solution modifiers include LIMIT, OFFSET, ORDER BY clauses. +# """ +# order_clause = order_by_triple = None # order clause is optional +# order_by_path = self.merged_runtime_and_default_vals.get("order_by") +# if order_by_path: +# direction = self.merged_runtime_and_default_vals.get("order_by_desc") +# if direction: +# direction = "DESC" +# else: +# direction = "ASC" +# order_cond = OrderCondition( +# var=Var(value="order_by_var"), direction=direction +# ) +# order_clause = OrderClause(conditions=[order_cond]) +# order_by_triple = SimplifiedTriple( +# subject=self.focus_node, +# predicate=IRI(value=order_by_path[0]), +# object=Var(value="order_by_var"), +# ) +# limit = int(self.merged_runtime_and_default_vals["limit"]) +# offset = int(self.merged_runtime_and_default_vals["offset"]) +# limit_clause = LimitClause(limit=limit) +# offset_clause = OffsetClause(offset=offset) +# limit_offset_clauses = LimitOffsetClauses( +# limit_clause=limit_clause, offset_clause=offset_clause +# ) +# sol_mod = SolutionModifier( +# order_by=order_clause, limit_offset=limit_offset_clauses +# ) +# return sol_mod, order_by_triple +# +# def _set_limit_and_offset(self): +# """ +# Sets the default limit, offset, and ordering for a listing endpoint. +# """ +# default_limit = next( +# self.endpoint_graph.objects( +# subject=self.endpoint_uri, predicate=SHEXT.limit +# ), +# 20, +# ) +# default_offset = next( +# self.endpoint_graph.objects( +# subject=self.endpoint_uri, predicate=SHEXT.offset +# ), +# 0, +# ) +# default_order_by = list( +# self.endpoint_graph.objects( +# subject=self.endpoint_uri, predicate=SHEXT.orderBy +# ) +# ) +# +# self.default_limit = int(default_limit) +# self.default_offset = int(default_offset) +# +# # Process each blank node in the default_order_by list +# for blank_node in default_order_by: +# # Extract sh:path +# path = next(self.endpoint_graph.objects(blank_node, SH.path), None) +# if not path: +# continue # Skip if no sh:path is found +# +# # Check for sh:desc +# desc_node = next(self.endpoint_graph.objects(blank_node, SHEXT.desc), None) +# is_descending = ( +# True if desc_node and (desc_node == Literal(True)) else False +# ) +# +# # Add the configuration to the list +# self.default_order_by = (path,) +# self.default_order_by_desc = is_descending +# +# def parse_profile(self): +# for i, property_node in enumerate( +# self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) +# ): +# self._parse_property_shapes(property_node, i) +# self._build_bnode_blocks() +# def _build_bnode_blocks(self): bnode_depth = list( self.profile_graph.objects( @@ -346,208 +351,424 @@ def process_bn_level(depth, max_depth, outer_ggps): ) ] ) - old_o_var_pe = PrimaryExpression(content=old_o_var) - constraint = Constraint( - content=BuiltInCall.create_with_one_expr("isBLANK", old_o_var_pe) + gpnt = GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BuiltInCall.create_with_one_expr( + "isBLANK", PrimaryExpression(content=old_o_var) + ) + ) + ) ) - filter_block = Filter(constraint=constraint) - gpnt = GraphPatternNotTriples(content=filter_block) - ggps = GroupGraphPatternSub( - triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] + opt = OptionalGraphPattern( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=triples_block, + graph_patterns_or_triples_blocks=[gpnt], + ) + ) ) - ggp = GroupGraphPattern(content=ggps) - opt = OptionalGraphPattern(group_graph_pattern=ggp) outer_ggps.graph_patterns_or_triples_blocks.append(opt) if depth < max_depth: process_bn_level(depth + 1, max_depth, ggps) if bnode_depth > 1: process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) - gorugp = GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) - gpnt = GraphPatternNotTriples(content=gorugp) - self.main_where_ggps.add_pattern(gpnt) - - def _parse_property_shapes(self, property_node, i): - def process_path_object(path_obj: Union[URIRef, BNode]): - if isinstance(path_obj, BNode): - pred_objects_gen = self.profile_graph.predicate_objects( - subject=path_obj - ) - bn_pred, bn_obj = next(pred_objects_gen, (None, None)) - if bn_obj == SH.union: - pass - elif bn_pred == SH.inversePath: - inverse_preds.append(IRI(value=bn_obj)) - elif bn_pred == SH.alternativePath: - predicates.extend(list(Collection(self.profile_graph, bn_obj))) - else: # sequence paths - predicates.append(tuple(Collection(self.profile_graph, path_obj))) - else: # a plain path specification to restrict the predicate to a specific value - predicates.append(path_obj) - - inverse_preds = [] # list of IRIs - predicates = [] # list of IRIs - union_items = None - path_object = self.profile_graph.value( - subject=property_node, predicate=SH.path, default=None + gpnt = GraphPatternNotTriples( + content=GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) ) - if isinstance(path_object, BNode): - predicate_objects_gen = self.profile_graph.predicate_objects( - subject=path_object - ) - bnode_pred, bnode_obj = next(predicate_objects_gen, (None, None)) - if bnode_obj == SH.union: # TODO or sh:or ?? - union_list_bnode = list(Collection(self.profile_graph, path_object))[1] - union_items = list(Collection(self.profile_graph, union_list_bnode)) - - ggp_list = [] - if union_items: - for item in union_items: - process_path_object(item) - else: - process_path_object(path_object) - - if inverse_preds: - ggps_under_under_union = GroupGraphPatternSub() - ggps = ggps_under_under_union - ggp = GroupGraphPattern(content=ggps_under_under_union) - ggp_list.append(ggp) - self._add_inverse_preds(ggps, inverse_preds, i) - if predicates: - self._add_predicate_constraints(predicates, property_node, ggp_list) - self._add_object_constraints(ggp_list, property_node) - union = GroupOrUnionGraphPattern(group_graph_patterns=ggp_list) - gpnt = GraphPatternNotTriples(content=union) - - min = int( - self.profile_graph.value( - subject=property_node, predicate=SH.minCount, default=1 - ) - ) - if min == 0: # Add Optional GroupGraphPatternSub "wrapper" as the main GGPS - ggps_under_optional = GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[gpnt] - ) - ggp = GroupGraphPattern(content=ggps_under_optional) - optional = OptionalGraphPattern(group_graph_pattern=ggp) - gpnt = GraphPatternNotTriples(content=optional) self.main_where_ggps.add_pattern(gpnt) - - def _add_inverse_preds( - self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i +# +# def _parse_property_shapes(self, property_node, i): +# def process_path_object(path_obj: Union[URIRef, BNode]): +# if isinstance(path_obj, BNode): +# pred_objects_gen = self.profile_graph.predicate_objects( +# subject=path_obj +# ) +# bn_pred, bn_obj = next(pred_objects_gen, (None, None)) +# if bn_obj == SH.union: +# pass +# elif bn_pred == SH.inversePath: +# inverse_preds.append(IRI(value=bn_obj)) +# elif bn_pred == SH.alternativePath: +# predicates.extend(list(Collection(self.profile_graph, bn_obj))) +# else: # sequence paths +# predicates.append(tuple(Collection(self.profile_graph, path_obj))) +# else: # a plain path specification to restrict the predicate to a specific value +# predicates.append(path_obj) +# +# inverse_preds = [] # list of IRIs +# predicates = [] # list of IRIs +# union_items = None +# path_object = self.profile_graph.value( +# subject=property_node, predicate=SH.path, default=None +# ) +# if isinstance(path_object, BNode): +# predicate_objects_gen = self.profile_graph.predicate_objects( +# subject=path_object +# ) +# bnode_pred, bnode_obj = next(predicate_objects_gen, (None, None)) +# if bnode_obj == SH.union: # TODO or sh:or ?? +# union_list_bnode = list(Collection(self.profile_graph, path_object))[1] +# union_items = list(Collection(self.profile_graph, union_list_bnode)) +# +# ggp_list = [] +# if union_items: +# for item in union_items: +# process_path_object(item) +# else: +# process_path_object(path_object) +# +# if inverse_preds: +# ggps_under_under_union = GroupGraphPatternSub() +# ggps = ggps_under_under_union +# ggp = GroupGraphPattern(content=ggps_under_under_union) +# ggp_list.append(ggp) +# self._add_inverse_preds(ggps, inverse_preds, i) +# if predicates: +# self._add_predicate_constraints(predicates, property_node, ggp_list) +# self._add_object_constraints(ggp_list, property_node) +# union = GroupOrUnionGraphPattern(group_graph_patterns=ggp_list) +# gpnt = GraphPatternNotTriples(content=union) +# +# min = int( +# self.profile_graph.value( +# subject=property_node, predicate=SH.minCount, default=1 +# ) +# ) +# if min == 0: # Add Optional GroupGraphPatternSub "wrapper" as the main GGPS +# ggps_under_optional = GroupGraphPatternSub( +# graph_patterns_or_triples_blocks=[gpnt] +# ) +# ggp = GroupGraphPattern(content=ggps_under_optional) +# optional = OptionalGraphPattern(group_graph_pattern=ggp) +# gpnt = GraphPatternNotTriples(content=optional) +# self.main_where_ggps.add_pattern(gpnt) +# +# def _add_inverse_preds( +# self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i +# ): +# if inverse_preds: +# ggps.add_triple( +# SimplifiedTriple( +# subject=Var(value=f"inv_path_{i}"), +# predicate=Var(value=f"inv_pred_{i}"), +# object=self.focus_node, +# ) +# ) +# dbv_list = [DataBlockValue(value=p) for p in inverse_preds] +# ildov = InlineDataOneVar( +# variable=Var(value=f"inv_pred_{i}"), datablockvalues=dbv_list +# ) +# data_block = DataBlock(block=ildov) +# inline_data = InlineData(data_block=data_block) +# gpnt = GraphPatternNotTriples(content=inline_data) +# ggps.add_pattern(gpnt) +# +# def _add_predicate_constraints(self, predicates, property_node, ggp_list): +# # check for any sequence paths - process separately +# sps = [p for p in predicates if isinstance(p, tuple)] # convert to IRIs here +# predicates = [ +# IRI(value=p) for p in predicates if not isinstance(p, tuple) +# ] # convert to IRIs below +# +# try: +# for i, (pred1, pred2) in enumerate(sps): +# t1 = SimplifiedTriple( +# subject=self.focus_node, +# predicate=IRI(value=pred1), +# object=Var(value=f"seq_obj_{i + 1}"), +# ) +# t2 = SimplifiedTriple( +# subject=Var(value=f"seq_obj_{i + 1}"), +# predicate=IRI(value=pred2), +# object=Var(value=f"seq_obj_terminal{i + 1}"), +# ) +# tb = TriplesBlock(triples=[t1, t2]) +# ggps = GroupGraphPatternSub(triples_block=tb) +# ggp = GroupGraphPattern(content=ggps) +# ggp_list.append(ggp) +# except Exception as e: +# print(e) +# +# # process direct path predicates +# max = self.profile_graph.value(subject=property_node, predicate=SH.maxCount) +# simplified_triple = SimplifiedTriple( +# subject=self.focus_node, +# predicate=Var(value="preds"), +# object=Var(value="objs"), +# ) +# tb = TriplesBlock(triples=[simplified_triple]) +# if predicates: +# if max == Literal(0): # excluded predicates. +# values = [ +# PrimaryExpression(content=IRIOrFunction(iri=p)) for p in predicates +# ] +# focus_pe = PrimaryExpression(content=Var(value="preds")) +# values_constraint = Filter.filter_relational( +# focus=focus_pe, comparators=values, operator="NOT IN" +# ) +# gpnt = GraphPatternNotTriples(content=values_constraint) +# if ggp_list: +# for ggp in ggp_list: +# ggp.content.add_pattern(gpnt) +# else: +# ggps = GroupGraphPatternSub( +# graph_patterns_or_triples_blocks=[gpnt, tb] +# ) +# ggp = GroupGraphPattern(content=ggps) +# ggp_list.append(ggp) +# elif ( +# IRI(value=SHEXT.allPredicateValues) not in predicates +# ): # add VALUES clause +# dbv_list = [DataBlockValue(value=p) for p in predicates] +# inline_data_one_var = InlineDataOneVar( +# variable=Var(value="preds"), datablockvalues=dbv_list +# ) +# data_block = DataBlock(block=inline_data_one_var) +# inline_data = InlineData(data_block=data_block) +# gpnt = GraphPatternNotTriples(content=inline_data) +# ggps = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt, tb]) +# ggp = GroupGraphPattern(content=ggps) +# ggp_list.append(ggp) +# elif predicates == [IRI(value=SHEXT.allPredicateValues)]: +# ggps = GroupGraphPatternSub(triples_block=tb) +# ggp = GroupGraphPattern(content=ggps) +# ggp_list.append(ggp) +# +# def _add_object_constraints(self, ggp_list, property_node): +# value = self.profile_graph.value( +# subject=property_node, predicate=SH.hasValue, default=None +# ) +# values_bn = self.profile_graph.value( +# subject=property_node, predicate=SH["in"], default=None +# ) +# if value: # a specific value +# objects = [value] +# elif values_bn: # a set of values +# c = Collection(self.profile_graph, values_bn) +# objects = list(c) +# if value or values_bn: +# ggps = GroupGraphPatternSub() +# ggp = GroupGraphPattern(content=ggps) +# ggp_list.append(ggp) +# objs = [] +# for obj in objects: +# if isinstance(obj, Literal): +# objs.append(RDFLiteral(value=obj)) +# elif isinstance(obj, URIRef): +# objs.append(IRI(value=obj)) +# dbv_list = [DataBlockValue(value=p) for p in objs] +# inline_data_one_var = InlineDataOneVar( +# variable=Var(value="objs"), datablockvalues=dbv_list +# ) +# data_block = DataBlock(block=inline_data_one_var) +# inline_data = InlineData(data_block=data_block) +# gpnt = GraphPatternNotTriples(content=inline_data) +# ggps.add_pattern(gpnt) + + +class PrezQueryConstructorV2(ConstructQuery): + """ + Creates a CONSTRUCT query to describe a listing of objects or an individual object. + Query format: + + CONSTRUCT { + + } + WHERE { + + + # for listing queries only: + { SELECT ?focus_node + WHERE { + + + } + ORDER BY () + LIMIT + OFFSET + } + } + gpnt = GraphPatternNotTriples - refer to the SPARQL grammar for details. + """ + + def __init__( + self, + additional_construct_triples: Optional[List[SimplifiedTriple]] = [], + profile_triples: Optional[List[SimplifiedTriple]] = [], + profile_gpnt: Optional[List[GraphPatternNotTriples]] = [], + inner_select_vars: Optional[List[Union[Var, Tuple[Expression, Var]]]] = [], + inner_select_triples: Optional[List[SimplifiedTriple]] = [], + inner_select_gpnt: Optional[List[GraphPatternNotTriples]] = [], + limit: Optional[int] = None, + offset: Optional[int] = None, + order_by: Optional[Var] = None, + order_by_direction: Optional[str] = None, ): - if inverse_preds: - ggps.add_triple( - SimplifiedTriple( - subject=Var(value=f"inv_path_{i}"), - predicate=Var(value=f"inv_pred_{i}"), - object=self.focus_node, - ) - ) - dbv_list = [DataBlockValue(value=p) for p in inverse_preds] - ildov = InlineDataOneVar( - variable=Var(value=f"inv_pred_{i}"), datablockvalues=dbv_list + # where clause triples and GraphPatternNotTriples - set up first as in the case of a listing query, the inner + # select is appended to this list as a GraphPatternNotTriples + gpotb = [TriplesBlock(triples=profile_triples), *profile_gpnt] + + # inner_select_vars typically set for search queries or custom select queries; otherwise we only want the focus + # node from the inner select query + if not inner_select_vars: + inner_select_vars = [(Var(value="focus_node"))] + + # order condition + oc = None + if order_by: + oc = OrderClause( + conditions=[ + OrderCondition( + var=order_by, # ORDER BY + direction=order_by_direction, # DESC/ASC + ) + ] ) - data_block = DataBlock(block=ildov) - inline_data = InlineData(data_block=data_block) - gpnt = GraphPatternNotTriples(content=inline_data) - ggps.add_pattern(gpnt) - - def _add_predicate_constraints(self, predicates, property_node, ggp_list): - # check for any sequence paths - process separately - sps = [p for p in predicates if isinstance(p, tuple)] # convert to IRIs here - predicates = [ - IRI(value=p) for p in predicates if not isinstance(p, tuple) - ] # convert to IRIs below - for i, (pred1, pred2) in enumerate(sps): - t1 = SimplifiedTriple( - subject=self.focus_node, - predicate=IRI(value=pred1), - object=Var(value=f"seq_obj_{i + 1}"), + # for listing queries only, add an inner select to the where clause + if inner_select_triples or inner_select_gpnt: + gpnt_inner_subselect = GraphPatternNotTriples( + content=GroupOrUnionGraphPattern( + group_graph_patterns=[ + GroupGraphPattern( + content=SubSelect( + select_clause=SelectClause( + distinct=True, + variables_or_all=inner_select_vars + ), + where_clause=WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + graph_patterns_or_triples_blocks=[ + TriplesBlock( + triples=inner_select_triples + ), + *inner_select_gpnt, + ] + ) + ) + ), + solution_modifier=SolutionModifier( + limit_offset=LimitOffsetClauses( + limit_clause=LimitClause( + limit=limit + ), # LIMIT m + offset_clause=OffsetClause( + offset=offset + ), # OFFSET n + ), + order_by=oc + ), + ) + ) + ] + ) ) - t2 = SimplifiedTriple( - subject=Var(value=f"seq_obj_{i + 1}"), - predicate=IRI(value=pred2), - object=Var(value=f"seq_obj_terminal{i + 1}"), + gpotb.append(gpnt_inner_subselect) + where_clause = WhereClause( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub(graph_patterns_or_triples_blocks=gpotb) ) - tb = TriplesBlock(triples=[t1, t2]) - ggps = GroupGraphPatternSub(triples_block=tb) - ggp = GroupGraphPattern(content=ggps) - ggp_list.append(ggp) - - # process direct path predicates - max = self.profile_graph.value(subject=property_node, predicate=SH.maxCount) - simplified_triple = SimplifiedTriple( - subject=self.focus_node, - predicate=Var(value="preds"), - object=Var(value="objs"), ) - tb = TriplesBlock(triples=[simplified_triple]) - if predicates: - if max == Literal(0): # excluded predicates. - values = [ - PrimaryExpression(content=IRIOrFunction(iri=p)) for p in predicates - ] - focus_pe = PrimaryExpression(content=Var(value="preds")) - values_constraint = Filter.filter_relational( - focus=focus_pe, comparators=values, operator="NOT IN" - ) - gpnt = GraphPatternNotTriples(content=values_constraint) - if ggp_list: - for ggp in ggp_list: - ggp.content.add_pattern(gpnt) - else: - ggps = GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[gpnt, tb] - ) - ggp = GroupGraphPattern(content=ggps) - ggp_list.append(ggp) - elif ( - IRI(value=SHEXT.allPredicateValues) not in predicates - ): # add VALUES clause - dbv_list = [DataBlockValue(value=p) for p in predicates] - inline_data_one_var = InlineDataOneVar( - variable=Var(value="preds"), datablockvalues=dbv_list - ) - data_block = DataBlock(block=inline_data_one_var) - inline_data = InlineData(data_block=data_block) - gpnt = GraphPatternNotTriples(content=inline_data) - ggps = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt, tb]) - ggp = GroupGraphPattern(content=ggps) - ggp_list.append(ggp) - elif predicates == [IRI(value=SHEXT.allPredicateValues)]: - ggps = GroupGraphPatternSub(triples_block=tb) - ggp = GroupGraphPattern(content=ggps) - ggp_list.append(ggp) - def _add_object_constraints(self, ggp_list, property_node): - value = self.profile_graph.value( - subject=property_node, predicate=SH.hasValue, default=None + # construct triples is usually only from the profile, but in the case of search queries for example, additional + # triples are added + construct_triples = TriplesBlock(triples=profile_triples).collect_triples() + if additional_construct_triples: + construct_triples.extend(additional_construct_triples) + construct_template = ConstructTemplate( + construct_triples=ConstructTriples(triples=construct_triples) ) - values_bn = self.profile_graph.value( - subject=property_node, predicate=SH["in"], default=None + super().__init__( + construct_template=construct_template, + where_clause=where_clause, + solution_modifier=SolutionModifier(), ) - if value: # a specific value - objects = [value] - elif values_bn: # a set of values - c = Collection(self.profile_graph, values_bn) - objects = list(c) - if value or values_bn: - ggps = GroupGraphPatternSub() - ggp = GroupGraphPattern(content=ggps) - ggp_list.append(ggp) - objs = [] - for obj in objects: - if isinstance(obj, Literal): - objs.append(RDFLiteral(value=obj)) - elif isinstance(obj, URIRef): - objs.append(IRI(value=obj)) - dbv_list = [DataBlockValue(value=p) for p in objs] - inline_data_one_var = InlineDataOneVar( - variable=Var(value="objs"), datablockvalues=dbv_list - ) - data_block = DataBlock(block=inline_data_one_var) - inline_data = InlineData(data_block=data_block) - gpnt = GraphPatternNotTriples(content=inline_data) - ggps.add_pattern(gpnt) + + @property + def inner_select(self): + return self.where_clause.group_graph_pattern.content.graph_patterns_or_triples_blocks[-1].content.group_graph_patterns[0].content + + +# def get_profile_grammar(profile_uri: URIRef): +# """ +# Returns the grammar for a given profile. +# """ +# profile = NodeShape(uri=profile_uri, graph=profiles_graph_cache, kind="profile") +# return profile.triples_list, profile.gpnt_list +# +# +# def get_endpoint_grammar(endpoint_uri: URIRef): +# """ +# Returns the grammar for a given endpoint. +# """ +# endpoint_shape = NodeShape( +# uri=endpoint_uri, graph=endpoints_graph_cache, kind="endpoint" +# ) +# return endpoint_shape.triples_list, endpoint_shape.gpnt_list +# +# +# def get_cql_grammar(): +# pass +# +# +# def get_search_grammar(): +# pass +# +# +# def get_all_grammar(): +# pass + + +def merge_listing_query_grammar_inputs( + cql_parser: Optional[CQLParser] = None, + endpoint_nodeshape: Optional[NodeShape] = None, + search_query: Optional[SearchQueryRegex] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, + order_by: Optional[str] = None, + order_by_direction: Optional[bool] = None, +) -> dict: + """ + Merges the inputs for a query grammar. + """ + kwargs = { + "additional_construct_triples": None, + "inner_select_vars": [], + "inner_select_triples": [], + "inner_select_gpnt": [], + "limit": None, + "offset": None, + "order_by": order_by, + "order_by_direction": order_by_direction, + } + if search_query: + kwargs["additional_construct_triples"] = search_query.construct_triples + kwargs["inner_select_vars"] = search_query.inner_select_vars + kwargs["limit"] = search_query.limit + kwargs["offset"] = search_query.offset + kwargs["order_by"] = search_query.order_by + kwargs["order_by_direction"] = search_query.order_by_direction + kwargs["inner_select_gpnt"] = [search_query.inner_select_gpnt] + else: + limit = int(per_page) + offset = limit * (int(page) - 1) + kwargs["limit"] = limit + kwargs["offset"] = offset + if order_by: + kwargs["order_by"] = Var(value=order_by) + if order_by_direction: + kwargs["order_by_direction"] = order_by_direction + else: + kwargs["order_by_direction"] = "ASC" + + if cql_parser: + pass + + if endpoint_nodeshape: + kwargs["inner_select_triples"].extend(endpoint_nodeshape.triples_list) + kwargs["inner_select_gpnt"].extend(endpoint_nodeshape.gpnt_list) + + return kwargs diff --git a/pyproject.toml b/pyproject.toml index e60993bd..33460545 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Jamie Feiss ", "Nicholas Car "BuiltInCall": + ) -> BuiltInCall: """ Convenience method for functions that take a list of PrimaryExpressions as arguments. Wraps each PrimaryExpression in an Expression. diff --git a/tests/TO_FIX_test_dd_profiles.py b/tests/TO_FIX_test_dd_profiles.py index 3707d3f4..7628b8b1 100755 --- a/tests/TO_FIX_test_dd_profiles.py +++ b/tests/TO_FIX_test_dd_profiles.py @@ -7,7 +7,7 @@ from pyoxigraph.pyoxigraph import Store from prez.app import app -from prez.dependencies import get_repo +from prez.dependencies import get_data_repo from prez.repositories import Repo, PyoxigraphRepo @@ -34,7 +34,7 @@ def test_client(test_repo: Repo) -> TestClient: def override_get_repo(): return test_repo - app.dependency_overrides[get_repo] = override_get_repo + app.dependency_overrides[get_data_repo] = override_get_repo with TestClient(app) as c: yield c diff --git a/tests/TO_FIX_test_endpoints_vocprez.py b/tests/TO_FIX_test_endpoints_vocprez.py index 69222a35..e80f7ca1 100755 --- a/tests/TO_FIX_test_endpoints_vocprez.py +++ b/tests/TO_FIX_test_endpoints_vocprez.py @@ -8,7 +8,7 @@ from rdflib.compare import isomorphic from prez.app import app -from prez.dependencies import get_repo +from prez.dependencies import get_data_repo from prez.repositories import Repo, PyoxigraphRepo @@ -48,7 +48,7 @@ def test_client(test_repo: Repo) -> TestClient: def override_get_repo(): return test_repo - app.dependency_overrides[get_repo] = override_get_repo + app.dependency_overrides[get_data_repo] = override_get_repo with TestClient(app) as c: wait_for_app_to_be_ready(c) diff --git a/tests/TO_FIX_test_search.py b/tests/TO_FIX_test_search.py index 3d429002..916143c2 100755 --- a/tests/TO_FIX_test_search.py +++ b/tests/TO_FIX_test_search.py @@ -8,7 +8,7 @@ from rdflib.compare import isomorphic from prez.app import app -from prez.dependencies import get_repo +from prez.dependencies import get_data_repo from prez.repositories import Repo, PyoxigraphRepo @@ -35,7 +35,7 @@ def client(test_repo: Repo) -> TestClient: def override_get_repo(): return test_repo - app.dependency_overrides[get_repo] = override_get_repo + app.dependency_overrides[get_data_repo] = override_get_repo with TestClient(app) as c: yield c diff --git a/tests/_test_cql.py b/tests/_test_cql.py index ee97f148..9de4352b 100755 --- a/tests/_test_cql.py +++ b/tests/_test_cql.py @@ -6,7 +6,7 @@ from pyoxigraph.pyoxigraph import Store from prez.app import app -from prez.dependencies import get_repo +from prez.dependencies import get_data_repo from prez.repositories import Repo, PyoxigraphRepo from urllib.parse import quote_plus @@ -34,7 +34,7 @@ def client(test_repo: Repo) -> TestClient: def override_get_repo(): return test_repo - app.dependency_overrides[get_repo] = override_get_repo + app.dependency_overrides[get_data_repo] = override_get_repo with TestClient(app) as c: yield c diff --git a/tests/conftest.py b/tests/conftest.py index 3be7a303..35d26322 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,7 +16,7 @@ from pyoxigraph.pyoxigraph import Store from prez.app import app -from prez.dependencies import get_repo +from prez.dependencies import get_data_repo from prez.repositories import Repo, PyoxigraphRepo @@ -43,7 +43,7 @@ def client(test_repo: Repo) -> TestClient: def override_get_repo(): return test_repo - app.dependency_overrides[get_repo] = override_get_repo + app.dependency_overrides[get_data_repo] = override_get_repo with TestClient(app) as c: yield c @@ -99,7 +99,3 @@ def a_resource_link(client, a_top_level_catalog_link): for link in links: if link != a_top_level_catalog_link: return link - - - - diff --git a/tests/test_alt_profiles.py b/tests/test_alt_profiles.py index 083f59ef..0358b8e7 100755 --- a/tests/test_alt_profiles.py +++ b/tests/test_alt_profiles.py @@ -29,16 +29,14 @@ def test_listing_alt_profile(client): r = client.get(f"/catalogs?_mediatype=text/turtle&_profile=altr-ext:alt-profile") response_graph = Graph().parse(data=r.text) assert ( - URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"), - RDF.type, - URIRef("https://prez.dev/ListingProfile"), - ) in response_graph + URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"), + RDF.type, + URIRef("https://prez.dev/ListingProfile"), + ) in response_graph def test_object_alt_profile_token(client, a_catalog_link): - r = client.get( - f"{a_catalog_link}?_mediatype=text/turtle&_profile=alt" - ) + r = client.get(f"{a_catalog_link}?_mediatype=text/turtle&_profile=alt") response_graph = Graph().parse(data=r.text) object_profiles = ( None, @@ -51,4 +49,6 @@ def test_object_alt_profile_token(client, a_catalog_link): PREZ.ListingProfile, ) assert len(list(response_graph.triples(object_profiles))) > 1 - assert len(list(response_graph.triples(listing_profiles))) == 1 # only the alt profile + assert ( + len(list(response_graph.triples(listing_profiles))) == 1 + ) # only the alt profile diff --git a/tests/test_connegp.py b/tests/test_connegp.py index 63129bbb..6bd232e2 100644 --- a/tests/test_connegp.py +++ b/tests/test_connegp.py @@ -6,7 +6,7 @@ from rdflib import URIRef from prez.app import app -from prez.dependencies import get_repo +from prez.dependencies import get_data_repo from prez.reference_data.prez_ns import PREZ from prez.repositories import PyoxigraphRepo, Repo from prez.services.connegp_service import NegotiatedPMTs @@ -151,7 +151,7 @@ async def test_connegp(headers, params, classes, listing, expected_selected, tes def override_get_repo(): return test_repo - app.dependency_overrides[get_repo] = override_get_repo + app.dependency_overrides[get_data_repo] = override_get_repo pmts = NegotiatedPMTs( headers=headers, params=params, diff --git a/tests/test_count.py b/tests/test_count.py index b421b8be..9e7a831b 100755 --- a/tests/test_count.py +++ b/tests/test_count.py @@ -33,11 +33,11 @@ def get_curie(client: TestClient, iri: str) -> str: ], ) def test_count( - client: TestClient, - iri: str, - inbound: str | None, - outbound: str | None, - count: int, + client: TestClient, + iri: str, + inbound: str | None, + outbound: str | None, + count: int, ): curie = get_curie(client, iri) params = {"curie": curie, "inbound": inbound, "outbound": outbound} diff --git a/tests/test_endpoints_object.py b/tests/test_endpoints_object.py index 35c77449..29b16be6 100755 --- a/tests/test_endpoints_object.py +++ b/tests/test_endpoints_object.py @@ -13,9 +13,7 @@ def test_feature_collection(client): def test_feature(client): - r = client.get( - f"/object?uri=https://example.com/Feature1" - ) + r = client.get(f"/object?uri=https://example.com/Feature1") response_graph = Graph().parse(data=r.text) assert ( URIRef("https://example.com/Feature1"), diff --git a/tests/test_endpoints_ok.py b/tests/test_endpoints_ok.py index 247ab1b1..228f2d52 100755 --- a/tests/test_endpoints_ok.py +++ b/tests/test_endpoints_ok.py @@ -24,7 +24,7 @@ def wait_for_app_to_be_ready(client, timeout=10): def ogcprez_links( - client, visited: Optional[Set] = None, link="/catalogs", total_links_visited=0 + client, visited: Optional[Set] = None, link="/catalogs", total_links_visited=0 ): if not visited: visited = set() @@ -43,10 +43,13 @@ def ogcprez_links( visited.add(next_link) # Make the recursive call and update the total_links_visited # and visited set with the returned values - visited, total_links_visited = ogcprez_links(client, visited, str(next_link), total_links_visited + 1) + visited, total_links_visited = ogcprez_links( + client, visited, str(next_link), total_links_visited + 1 + ) # Return the updated count and visited set return visited, total_links_visited + def test_visit_all_links(client): visited_links, total_count = ogcprez_links(client) - print(f"Total links visited: {total_count}") \ No newline at end of file + print(f"Total links visited: {total_count}") diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py index a92dbc1a..30c18ce7 100755 --- a/tests/test_endpoints_profiles.py +++ b/tests/test_endpoints_profiles.py @@ -21,4 +21,3 @@ def test_sp_profile(client): r = client.get("/profiles/prez:SpacePrezProfile") g = Graph().parse(data=r.text) assert (URIRef("https://prez.dev/SpacePrezProfile"), RDF.type, PROF.Profile) in g - diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index da6bb213..58026be0 100755 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -19,10 +19,10 @@ def test_feature_collection(client, an_fc_link): g_text = r.text response_graph = Graph().parse(data=g_text) assert ( - URIRef("https://example.com/FeatureCollection"), - RDF.type, - GEO.FeatureCollection, - ) in response_graph + URIRef("https://example.com/FeatureCollection"), + RDF.type, + GEO.FeatureCollection, + ) in response_graph def test_feature(client, a_feature_link): diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 8a28b9d6..848969b3 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -1,9 +1,8 @@ import pytest from rdflib import Graph, URIRef -from prez.services.query_generation.node_selection.endpoint_shacl import ( +from prez.services.query_generation.shacl import ( NodeShape, - PropertyShape, ) endpoints_graph = Graph().parse( @@ -13,7 +12,7 @@ @pytest.mark.parametrize("nodeshape_uri", ["http://example.org/ns#Collections"]) def test_nodeshape_parsing(nodeshape_uri): - ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) + ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph, kind="endpoint") assert ns.targetClasses == [ URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection"), URIRef("http://www.w3.org/2004/02/skos/core#ConceptScheme"), @@ -28,5 +27,5 @@ def test_nodeshape_parsing(nodeshape_uri): ["http://example.org/ns#ConceptSchemeConcept"], ) def test_nodeshape_to_grammar(nodeshape_uri): - ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph) + ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph, kind="endpoint") ... diff --git a/tests/test_property_selection_shacl.py b/tests/test_property_selection_shacl.py new file mode 100644 index 00000000..2c0c3222 --- /dev/null +++ b/tests/test_property_selection_shacl.py @@ -0,0 +1,226 @@ +from rdflib import Graph, URIRef, SH, RDF, PROV, DCTERMS + +from prez.reference_data.prez_ns import REG +from prez.services.query_generation.shacl import PropertyShape +from temp.grammar import SimplifiedTriple, Var, IRI, OptionalGraphPattern, Filter + + +# uri: URIRef | BNode # URI of the shape +# graph: Graph +# focus_node: IRI | Var = Var(value="focus_node") +# # inputs +# property_paths: Optional[List[PropertyPath]] = None +# or_klasses: Optional[List[URIRef]] = None +# # outputs +# grammar: Optional[GroupGraphPatternSub] = None +# triples_list: Optional[List[SimplifiedTriple]] = None +# gpnt_list: Optional[List[GraphPatternNotTriples]] = None +# prof_nodes: Optional[Dict[str, Var | IRI]] = {} +# classes_at_len: Optional[Dict[str, List[URIRef]]] = {} +# _select_vars: Optional[List[Var]] = None + + +def test_simple_path(): + g = Graph().parse( + data=""" + PREFIX rdf: + PREFIX sh: + + sh:property [ sh:path rdf:type ] . + """ + ) + path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) + ps = PropertyShape(uri=path_bn, graph=g) + assert ( + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=RDF.type), + object=Var(value="prof_node_1"), + ) + in ps.triples_list + ) + + +def test_sequence_path(): + g = Graph().parse( + data=""" + PREFIX sh: + PREFIX prov: + + sh:property [ sh:path ( prov:qualifiedDerivation prov:hadRole ) ] . + """ + ) + path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) + ps = PropertyShape(uri=path_bn, graph=g) + assert ( + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=PROV.qualifiedDerivation), + object=Var(value="prof_node_1"), + ) + in ps.triples_list + ) + assert ( + SimplifiedTriple( + subject=Var(value="prof_node_1"), + predicate=IRI(value=PROV.hadRole), + object=Var(value="prof_node_2"), + ) + in ps.triples_list + ) + + +def test_union(): + g = Graph().parse( + data=""" + PREFIX dcterms: + PREFIX reg: + PREFIX sh: + PREFIX prov: + + sh:property [ + sh:path ( + sh:union ( + dcterms:publisher + reg:status + ( prov:qualifiedDerivation prov:hadRole ) + ( prov:qualifiedDerivation prov:entity ) + ) + ) + ] + . + + """ + ) + path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) + ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + assert ( + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=PROV.qualifiedDerivation), + object=Var(value="prof_node_1"), + ) + in ps.triples_list + ) + assert ( + SimplifiedTriple( + subject=Var(value="prof_node_1"), + predicate=IRI(value=PROV.hadRole), + object=Var(value="prof_node_2"), + ) + in ps.triples_list + ) + assert ( + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=PROV.qualifiedDerivation), + object=Var(value="prof_node_3"), + ) + in ps.triples_list + ) + assert ( + SimplifiedTriple( + subject=Var(value="prof_node_3"), + predicate=IRI(value=PROV.entity), + object=Var(value="prof_node_4"), + ) + in ps.triples_list + ) + assert ( + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=DCTERMS.publisher), + object=Var(value="prof_node_5"), + ) + in ps.triples_list + ) + assert ( + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=REG.status), + object=Var(value="prof_node_6"), + ) + in ps.triples_list + ) + + +def test_optional_props(): + g = Graph().parse( + data=""" + PREFIX dcterms: + PREFIX reg: + PREFIX sh: + PREFIX prov: + + sh:property [ + sh:minCount 0 ; + sh:path dcterms:publisher ; + ] + . + + """ + ) + path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) + ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + assert ps.triples_list == [] + assert isinstance(ps.gpnt_list[0].content, OptionalGraphPattern) + + +def test_complex_optional_props(): + g = Graph().parse( + data=""" + PREFIX dcterms: + PREFIX sh: + PREFIX prov: + + sh:property [ + sh:minCount 0 ; + sh:path ( + sh:union ( + dcterms:publisher + ( prov:qualifiedDerivation prov:hadRole ) + ) + ) + ] + . + + """ + ) + path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) + ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + assert ps.triples_list == [] + assert isinstance(ps.gpnt_list[0].content, OptionalGraphPattern) + + +def test_excluded_props(): + g = Graph().parse( + data=""" + PREFIX dcterms: + PREFIX reg: + PREFIX sh: + PREFIX prov: + + sh:property [ + sh:maxCount 0 ; + sh:path ( + sh:union ( + dcterms:publisher + reg:status + ) + ) + ] + . + + """ + ) + path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) + ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + assert ( + SimplifiedTriple( + subject=Var(value="prof_node_1"), + predicate=Var(value="excluded_props"), + object=Var(value="excluded_prop_vals"), + ) + in ps.triples_list + ) + assert isinstance(ps.gpnt_list[0].content, Filter) diff --git a/tests/test_query_construction.py b/tests/test_query_construction.py new file mode 100644 index 00000000..13b34e9f --- /dev/null +++ b/tests/test_query_construction.py @@ -0,0 +1,85 @@ +from rdflib import RDF, RDFS +from rdflib.namespace import GEO + +from prez.services.query_generation.search import ( + SearchQueryRegex, +) +from prez.services.query_generation.umbrella import PrezQueryConstructorV2 +from temp.grammar import * + + +def test_basic_object(): + PrezQueryConstructorV2( + profile_triples=[ + SimplifiedTriple( + subject=IRI(value="https://test-object"), + predicate=IRI(value="https://prez.dev/ont/label"), + object=Var(value="label"), + ), + SimplifiedTriple( + subject=IRI(value="https://test-object"), + predicate=IRI(value="https://property"), + object=Var(value="propValue"), + ), + ], + ) + + +def test_basic_listing(): + test = PrezQueryConstructorV2( + profile_triples=[ + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=str(RDF.type)), + object=IRI(value=str(GEO.Feature)), + ), + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value="https://property"), + object=Var(value="propValue"), + ), + ], + inner_select_triples=[ + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=str(RDF.type)), + object=IRI(value=str(GEO.Feature)), + ), + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=str(RDFS.label)), + object=Var(value="label"), + ), + ], + limit=10, + offset=0, + order_by=Var(value="label"), + order_by_direction="ASC", + ) + print("") + + +def test_search_query_regex(): + sq = SearchQueryRegex(term="test", predicates=[RDFS.label]) + test = PrezQueryConstructorV2( + profile_triples=[ + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value=str(RDF.type)), + object=IRI(value=str(GEO.Feature)), + ), + SimplifiedTriple( + subject=Var(value="focus_node"), + predicate=IRI(value="https://property"), + object=Var(value="propValue"), + ), + ], + additional_construct_triples=sq.construct_triples, + inner_select_vars=sq.inner_select_vars, + inner_select_gpnt=[sq.inner_select_gpnt], + limit=sq.limit, + offset=sq.offset, + order_by=sq.order_by, + order_by_direction=sq.order_by_direction, + ) + print("") From f061a3c758fd1f3e33cc70e764bf7ceee20a22a2 Mon Sep 17 00:00:00 2001 From: david Date: Thu, 4 Apr 2024 14:07:58 +1000 Subject: [PATCH 24/25] all current tests passing --- prez/app.py | 5 - prez/config.py | 12 +- prez/dependencies.py | 169 +++-- .../endpoints/endpoint_metadata.ttl | 5 + .../endpoint_node_selection_shapes.ttl | 7 +- .../endpoints/system_endpoints.ttl | 82 +-- .../profiles/ogc_records_profile.ttl | 6 + .../profiles/prez_default_profiles.ttl | 9 +- .../profiles/spaceprez_default_profiles.ttl | 3 +- prez/routers/{object.py => object.py.unused} | 19 - prez/routers/ogc_router.py | 157 ++--- prez/routers/profiles.py | 60 -- prez/routers/search.py | 4 +- prez/services/annotations.py | 10 +- prez/services/connegp_service.py | 9 +- prez/services/listings.py | 68 +- prez/services/objects.py | 45 +- prez/services/query_generation/search.py | 2 - prez/services/query_generation/shacl.py | 190 ++---- prez/services/query_generation/umbrella.py | 612 +----------------- temp/grammar/grammar.py | 4 +- test_data/ogc_records_profile.ttl | 106 --- test_data/spaceprez_default_profiles.ttl | 138 ---- tests/{test_count.py => _test_count.py} | 0 tests/conftest.py | 6 + tests/test_alt_profiles.py | 2 +- tests/test_connegp.py | 35 +- tests/test_endpoints_profiles.py | 12 +- tests/test_node_selection_shacl.py | 15 +- tests/test_property_selection_shacl.py | 24 +- tests/test_query_construction.py | 8 +- 31 files changed, 430 insertions(+), 1394 deletions(-) rename prez/routers/{object.py => object.py.unused} (80%) delete mode 100755 prez/routers/profiles.py delete mode 100755 test_data/ogc_records_profile.ttl delete mode 100755 test_data/spaceprez_default_profiles.ttl rename tests/{test_count.py => _test_count.py} (100%) diff --git a/prez/app.py b/prez/app.py index 4a0e24a4..2e7952de 100755 --- a/prez/app.py +++ b/prez/app.py @@ -27,9 +27,7 @@ from prez.routers.cql import router as cql_router from prez.routers.identifier import router as identifier_router from prez.routers.management import router as management_router -from prez.routers.object import router as object_router from prez.routers.ogc_router import router as ogc_records_router -from prez.routers.profiles import router as profiles_router from prez.routers.search import router as search_router from prez.routers.sparql import router as sparql_router from prez.services.app_service import ( @@ -38,7 +36,6 @@ create_endpoints_graph, populate_api_info, add_prefixes_to_prefix_graph, - # add_common_context_ontologies_to_tbox_cache, ) from prez.services.exception_catchers import ( catch_400, @@ -64,10 +61,8 @@ app.include_router(cql_router) app.include_router(management_router) -app.include_router(object_router) app.include_router(sparql_router) app.include_router(search_router) -app.include_router(profiles_router) app.include_router(ogc_records_router) app.include_router(identifier_router) diff --git a/prez/config.py b/prez/config.py index 38b48a6c..cd24aaea 100755 --- a/prez/config.py +++ b/prez/config.py @@ -61,10 +61,18 @@ class Settings(BaseSettings): prez_version: Optional[str] = None disable_prefix_generation: bool = False default_language: str = "en" - default_search_predicates: Optional[List[URIRef]] = [RDFS.label, SKOS.prefLabel, SDO.name, DCTERMS.title] + default_search_predicates: Optional[List[URIRef]] = [ + RDFS.label, + SKOS.prefLabel, + SDO.name, + DCTERMS.title, + ] local_rdf_dir: str = "rdf" endpoint_structure: Optional[Tuple[str, ...]] = ("catalogs", "collections", "items") - system_endpoints: Optional[List[URIRef]] = [EP["system/profile-listing"], EP["system/profile-object"]] + system_endpoints: Optional[List[URIRef]] = [ + EP["system/profile-listing"], + EP["system/profile-object"], + ] # @root_validator() # def check_endpoint_enabled(cls, values): diff --git a/prez/dependencies.py b/prez/dependencies.py index f5ad6355..a39f0e82 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -4,7 +4,7 @@ import httpx from fastapi import Depends, Request, HTTPException from pyoxigraph import Store -from rdflib import Dataset, URIRef, SH +from rdflib import Dataset, URIRef, SH, Graph from prez.cache import ( store, @@ -16,7 +16,7 @@ annotations_repo, ) from prez.config import settings -from prez.reference_data.prez_ns import ALTREXT, ONT +from prez.reference_data.prez_ns import ALTREXT, ONT, EP from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo, Repo from prez.services.connegp_service import NegotiatedPMTs from prez.services.curie_functions import get_uri_for_curie_id @@ -53,10 +53,10 @@ def get_oxrdflib_store(): async def get_data_repo( - request: Request, - http_async_client: httpx.AsyncClient = Depends(get_async_http_client), - pyoxi_data_store: Store = Depends(get_pyoxi_store), - pyoxi_system_store: Store = Depends(get_system_store), + request: Request, + http_async_client: httpx.AsyncClient = Depends(get_async_http_client), + pyoxi_data_store: Store = Depends(get_pyoxi_store), + pyoxi_system_store: Store = Depends(get_system_store), ) -> Repo: if URIRef(request.scope.get("route").name) in settings.system_endpoints: return PyoxigraphRepo(pyoxi_system_store) @@ -69,7 +69,7 @@ async def get_data_repo( async def get_system_repo( - pyoxi_store: Store = Depends(get_system_store), + pyoxi_store: Store = Depends(get_system_store), ) -> Repo: """ A pyoxigraph Store with Prez system data including: @@ -99,8 +99,10 @@ async def load_system_data_to_oxigraph(store: Store): Loads all the data from the local data directory into the local SPARQL endpoint """ # TODO refactor to use the local files directly - profiles_bytes = profiles_graph_cache.serialize(format="nt", encoding="utf-8") - store.load(profiles_bytes, "application/n-triples") + for f in (Path(__file__).parent / "reference_data/profiles").glob("*.ttl"): + prof_bytes = Graph().parse(f).serialize(format="nt", encoding="utf-8") + # profiles_bytes = profiles_graph_cache.default_context.serialize(format="nt", encoding="utf-8") + store.load(prof_bytes, "application/n-triples") endpoints_bytes = endpoints_graph_cache.serialize(format="nt", encoding="utf-8") store.load(endpoints_bytes, "application/n-triples") @@ -111,9 +113,9 @@ async def load_annotations_data_to_oxigraph(store: Store): Loads all the data from the local data directory into the local SPARQL endpoint """ relevant_predicates = ( - settings.label_predicates - + settings.description_predicates - + settings.provenance_predicates + settings.label_predicates + + settings.description_predicates + + settings.provenance_predicates ) raw_g = Dataset(default_union=True) for file in (Path(__file__).parent / "reference_data/context_ontologies").glob("*"): @@ -150,7 +152,7 @@ async def cql_get_parser_dependency(request: Request) -> CQLParser: query = json.loads(request.query_params["filter"]) context = json.load( ( - Path(__file__).parent / "reference_data/cql/default_context.json" + Path(__file__).parent / "reference_data/cql/default_context.json" ).open() ) cql_parser = CQLParser(cql=query, context=context) @@ -182,24 +184,73 @@ async def generate_search_query(request: Request): ) +async def get_endpoint_uri_type( + request: Request, + system_repo: Repo = Depends(get_system_repo), +) -> tuple[URIRef, URIRef]: + endpoint_uri = URIRef(request.scope.get("route").name) + ep_type_fs = await get_classes(endpoint_uri, system_repo) + ep_types = list(ep_type_fs) + + # Iterate over each item in ep_types + for ep_type in ep_types: + # Check if the current ep_type is either ObjectEndpoint or ListingEndpoint + if ep_type in [ONT.ObjectEndpoint, ONT.ListingEndpoint]: + return endpoint_uri, ep_type + + raise ValueError( + "Endpoint must be declared as either a 'https://prez.dev/ont/ObjectEndpoint' or a " + "'https://prez.dev/ont/ListingEndpoint' in order for the appropriate profile to be determined." + ) + + +async def get_focus_node( + request: Request, + endpoint_uri_type: tuple[URIRef, URIRef] = Depends(get_endpoint_uri_type), +): + ep_uri = endpoint_uri_type[0] + ep_type = endpoint_uri_type[1] + if ep_uri == EP["system/object"]: + uri = request.query_params.get("uri") + return IRI(value=uri) + elif ep_type == ONT.ObjectEndpoint: + object_curie = request.url.path.split("/")[-1] + focus_node_uri = await get_uri_for_curie_id(object_curie) + return IRI(value=focus_node_uri) + else: + return Var(value="focus_node") + + async def get_endpoint_nodeshapes( - request: Request, - repo: Repo = Depends(get_data_repo), - system_repo: Repo = Depends(get_system_repo), + request: Request, + repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), + endpoint_uri_type: tuple[URIRef, URIRef] = Depends(get_endpoint_uri_type), + focus_node: IRI | Var = Depends(get_focus_node), ): - endpoint_uri = URIRef(request.scope.get("route").name) - path_node_curies = [i for i in request.url.path.split("/")[:-1] if i in request.path_params.values()] - path_nodes = {f"path_node_{i + 1}": IRI(value=await get_uri_for_curie_id(value)) for i, value in - enumerate(reversed(path_node_curies))} + ep_uri = endpoint_uri_type[0] + if endpoint_uri_type[0] == EP["system/object"]: + return NodeShape( + uri=URIRef("http://example.org/ns#Object"), + graph=endpoints_graph_cache, + kind="endpoint", + focus_node=focus_node, + ) + path_node_curies = [ + i for i in request.url.path.split("/")[:-1] if i in request.path_params.values() + ] + path_nodes = { + f"path_node_{i + 1}": IRI(value=await get_uri_for_curie_id(value)) + for i, value in enumerate(reversed(path_node_curies)) + } hierarchy_level = int(len(request.url.path.split("/")) / 2) """ Determines the relevant nodeshape based on the endpoint, hierarchy level, and parent URI """ node_selection_shape = None - target_classes = [] relevant_ns_query = f"""SELECT ?ns ?tc WHERE {{ - {endpoint_uri.n3()} ?ns . + {ep_uri.n3()} ?ns . ?ns ?tc ; {hierarchy_level} . }}""" @@ -208,7 +259,6 @@ async def get_endpoint_nodeshapes( distinct_ns = set([result["ns"]["value"] for result in tabular_results]) if len(distinct_ns) == 1: # only one possible node shape node_selection_shape = URIRef(tabular_results[0]["ns"]["value"]) - target_classes = [URIRef(result["tc"]["value"]) for result in tabular_results] elif len(distinct_ns) > 1: # more than one possible node shape # try all of the available nodeshapes path_node_classes = {} @@ -220,7 +270,7 @@ async def get_endpoint_nodeshapes( graph=endpoints_graph_cache, kind="endpoint", path_nodes=path_nodes, - focus_node=Var(value="focus_node") + focus_node=focus_node, ) for ns in distinct_ns ] @@ -250,51 +300,37 @@ async def get_endpoint_nodeshapes( graph=endpoints_graph_cache, kind="endpoint", path_nodes=path_nodes, - focus_node=Var(value="focus_node") + focus_node=focus_node, ) return ns else: raise ValueError( - f"No relevant nodeshape found for the given endpoint {endpoint_uri}, hierarchy level {hierarchy_level}, " - f"and parent URI" + f"No relevant nodeshape found for the given endpoint {ep_uri}, hierarchy level " + f"{hierarchy_level}, and parent URI" ) -async def get_endpoint_type( +async def get_negotiated_pmts( request: Request, + endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes), + repo: Repo = Depends(get_data_repo), system_repo: Repo = Depends(get_system_repo), -): - endpoint_uri = URIRef(request.scope.get("route").name) - ep_type_fs = await get_classes(endpoint_uri, system_repo) - ep_types = list(ep_type_fs) - - # Iterate over each item in ep_types - for ep_type in ep_types: - # Check if the current ep_type is either ObjectEndpoint or ListingEndpoint - if ep_type in [ONT.ObjectEndpoint, ONT.ListingEndpoint]: - return ep_type - - raise ValueError("Endpoint must be declared as either a 'https://prez.dev/ont/ObjectEndpoint' or a " - "'https://prez.dev/ont/ListingEndpoint' in order for the appropriate profile to be determined.") - - - - -async def get_negotiated_pmts( - request: Request, - endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes), - system_repo: Repo = Depends(get_system_repo), - endpoint_type: URIRef = Depends(get_endpoint_type), + endpoint_uri_type: URIRef = Depends(get_endpoint_uri_type), + focus_node: IRI | Var = Depends(get_focus_node), ) -> NegotiatedPMTs: # Use endpoint_nodeshapes in constructing NegotiatedPMTs - if endpoint_type == ONT.ObjectEndpoint: + ep_type = endpoint_uri_type[1] + if ep_type == ONT.ObjectEndpoint: listing = False - else: + klasses_fs = await get_classes(focus_node.value, repo) + klasses = list(klasses_fs) + elif ep_type == ONT.ListingEndpoint: listing = True + klasses = endpoint_nodeshape.targetClasses pmts = NegotiatedPMTs( headers=request.headers, params=request.query_params, - classes=endpoint_nodeshape.targetClasses, + classes=klasses, listing=listing, system_repo=system_repo, ) @@ -303,14 +339,14 @@ async def get_negotiated_pmts( async def get_endpoint_structure( - request: Request, - pmts: NegotiatedPMTs = Depends(get_negotiated_pmts) + request: Request, + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), + endpoint_uri_type: URIRef = Depends(get_endpoint_uri_type), ): - endpoint_uri = URIRef(request.scope.get("route").name) + endpoint_uri = endpoint_uri_type[0] - if ( - (endpoint_uri in settings.system_endpoints) or - (pmts.selected.get("profile") == ALTREXT["alt-profile"]) + if (endpoint_uri in settings.system_endpoints) or ( + pmts.selected.get("profile") == ALTREXT["alt-profile"] ): return ("profiles",) else: @@ -318,16 +354,21 @@ async def get_endpoint_structure( async def get_profile_nodeshape( - request: Request, - pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), - endpoint_type: URIRef = Depends(get_endpoint_type), + request: Request, + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), + endpoint_uri_type: URIRef = Depends(get_endpoint_uri_type), ): profile = pmts.selected.get("profile") - if endpoint_type == ONT.ObjectEndpoint: + if profile == ALTREXT["alt-profile"]: + focus_node = Var(value="focus_node") + elif endpoint_uri_type[0] == EP["system/object"]: + uri = request.query_params.get("uri") + focus_node = IRI(value=uri) + elif endpoint_uri_type[1] == ONT.ObjectEndpoint: object_curie = request.url.path.split("/")[-1] focus_node_uri = await get_uri_for_curie_id(object_curie) focus_node = IRI(value=focus_node_uri) - else: + else: # listing focus_node = Var(value="focus_node") return NodeShape( uri=profile, diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl index 5f792c02..cc9b6b6c 100644 --- a/prez/reference_data/endpoints/endpoint_metadata.ttl +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -14,6 +14,11 @@ sys:profile-object ont:relevantShapes ex:Profiles ; . +sys:object + a ont:ObjectEndpoint , ont:SystemEndpoint ; + ont:relevantShapes ex:Profiles ; + . + ogce:catalog-listing a ont:ListingEndpoint ; ont:relevantShapes ex:TopLevelCatalogs ; diff --git a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl index fda11289..e17cd2ec 100644 --- a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl +++ b/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl @@ -129,4 +129,9 @@ ex:AltProfilesForObject sh:property [ sh:path altr-ext:constrainsClass ; ] -. \ No newline at end of file +. + +ex:Object + a sh:NodeShape ; + ont:hierarchyLevel 1 ; + . \ No newline at end of file diff --git a/prez/reference_data/endpoints/system_endpoints.ttl b/prez/reference_data/endpoints/system_endpoints.ttl index 06f2608d..a1c35817 100755 --- a/prez/reference_data/endpoints/system_endpoints.ttl +++ b/prez/reference_data/endpoints/system_endpoints.ttl @@ -1,41 +1,41 @@ -PREFIX dcat: -PREFIX endpoint: -PREFIX geo: -PREFIX ont: -PREFIX prez: -PREFIX rdfs: -PREFIX sh: -PREFIX xsd: -PREFIX prof: -PREFIX skos: -PREFIX shext: - -endpoint:profile-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:ProfilesList ; - sh:targetClass prof:Profile ; - ont:endpointTemplate "/profiles" ; - shext:limit 20 ; - shext:offset 0 ; -. - -endpoint:profile-object a ont:ObjectEndpoint ; - sh:targetNode "$object" ; - ont:deliversClasses prof:Profile ; - ont:endpointTemplate "/profiles/$object" ; -. - -endpoint:alt-profile-listing a ont:ListingEndpoint ; - ont:deliversClasses prez:ProfilesList ; - sh:targetClass prof:Profile ; - sh:target [ sh:select """SELECT ?focus_node - WHERE { - ?focus_node a prof:Profile ; - $selectedClass . - }""" ] ; - shext:limit 20 ; - shext:offset 0 ; -. - -endpoint:object a ont:ObjectEndpoint ; - ont:deliversClasses prez:Object ; -. +#PREFIX dcat: +#PREFIX endpoint: +#PREFIX geo: +#PREFIX ont: +#PREFIX prez: +#PREFIX rdfs: +#PREFIX sh: +#PREFIX xsd: +#PREFIX prof: +#PREFIX skos: +#PREFIX shext: +# +#endpoint:profile-listing a ont:ListingEndpoint ; +# ont:deliversClasses prez:ProfilesList ; +# sh:targetClass prof:Profile ; +# ont:endpointTemplate "/profiles" ; +# shext:limit 20 ; +# shext:offset 0 ; +#. +# +#endpoint:profile-object a ont:ObjectEndpoint ; +# sh:targetNode "$object" ; +# ont:deliversClasses prof:Profile ; +# ont:endpointTemplate "/profiles/$object" ; +#. +# +#endpoint:alt-profile-listing a ont:ListingEndpoint ; +# ont:deliversClasses prez:ProfilesList ; +# sh:targetClass prof:Profile ; +# sh:target [ sh:select """SELECT ?focus_node +# WHERE { +# ?focus_node a prof:Profile ; +# $selectedClass . +# }""" ] ; +# shext:limit 20 ; +# shext:offset 0 ; +#. +# +#endpoint:object a ont:ObjectEndpoint ; +# ont:deliversClasses prez:Object ; +#. diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 14d80ba4..720d534a 100755 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -40,7 +40,9 @@ prez:OGCRecordsProfile prez:OGCListingProfile a prof:Profile , prez:ListingProfile , sh:NodeShape ; + dcterms:identifier "ogc-listing"^^xsd:token ; dcterms:title "OGC Listing Profile" ; + dcterms:description "A profile for listing different kinds of items relevant to an OGC Records API" ; altr-ext:hasResourceFormat "application/ld+json" , "application/anot+ld+json" , @@ -56,6 +58,8 @@ prez:OGCListingProfile prez:OGCSchemesListProfile a prof:Profile , prez:ListingProfile , sh:NodeShape ; dcterms:title "OGC Concept Scheme Listing Profile" ; + dcterms:description "A profile for listing SKOS Concept Schemes" ; + dcterms:identifier "ogc-schemes-listing"^^xsd:token ; altr-ext:hasResourceFormat "application/ld+json" , "application/anot+ld+json" , @@ -82,6 +86,8 @@ prez:OGCSchemesListProfile prez:OGCItemProfile a prof:Profile , prez:ObjectProfile , sh:NodeShape ; dcterms:title "OGC Object Profile" ; + dcterms:description "A profile for individual OGC Records API items" ; + dcterms:identifier "ogc-item"^^xsd:token ; altr-ext:hasResourceFormat "application/ld+json" , "application/anot+ld+json" , diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index ececa07b..0a80b0b4 100755 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -60,7 +60,8 @@ PREFIX xsd: skos:ConceptScheme , skos:Collection , prez:CQLObjectList , - prez:QueryablesList ; + prez:QueryablesList , + prof:Profile ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat "application/json" , @@ -68,6 +69,9 @@ PREFIX xsd: "application/rdf+xml" , "text/anot+turtle" , "text/turtle" ; + sh:property [ + sh:path rdf:type ; + ] ; . altr-ext:alt-profile @@ -90,7 +94,8 @@ altr-ext:alt-profile dcat:Resource , skos:ConceptScheme , skos:Concept , - skos:Collection ; + skos:Collection , + prof:Profile ; sh:property [ sh:path ( sh:union ( diff --git a/prez/reference_data/profiles/spaceprez_default_profiles.ttl b/prez/reference_data/profiles/spaceprez_default_profiles.ttl index 9e6a3c8a..8907cb9d 100755 --- a/prez/reference_data/profiles/spaceprez_default_profiles.ttl +++ b/prez/reference_data/profiles/spaceprez_default_profiles.ttl @@ -50,7 +50,8 @@ prez:SpacePrezProfile prez:FeatureCollectionProfile a prof:Profile ; dcterms:description "A profile for GeoSPARQL FeatureCollections" ; dcterms:identifier "geofc"^^xsd:token ; - dcterms:title "Feature Collection Profile" ; + dcterms:title "Feature Collection Profile" , "a second title" ; + rdfs:label "Some Other Label" ; altr-ext:constrainsClass geo:FeatureCollection ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat diff --git a/prez/routers/object.py b/prez/routers/object.py.unused similarity index 80% rename from prez/routers/object.py rename to prez/routers/object.py.unused index 833a3013..229219f9 100755 --- a/prez/routers/object.py +++ b/prez/routers/object.py.unused @@ -67,22 +67,3 @@ async def count_route( _, rows = await repo.send_queries([], [(None, query)]) for row in rows[0][1]: return row["count"]["value"] - - -@router.get("/object", summary="Object", name="https://prez.dev/endpoint/system/object") -async def object_route( - request: Request, - repo=Depends(get_data_repo), - system_repo=Depends(get_system_repo), -): - endpoint_uri = URIRef(request.scope.get("route").name) - uri = URIRef(request.query_params.get("uri")) - request_url = request.scope["path"] - return await object_function( - request=request, - endpoint_uri=endpoint_uri, - uri=uri, - request_url=request_url, - repo=repo, - system_repo=system_repo, - ) diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 9f6bc514..326a017a 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -4,9 +4,17 @@ from rdflib import Namespace from rdflib import URIRef -from prez.dependencies import get_data_repo, get_system_repo, generate_search_query, cql_get_parser_dependency, \ - get_endpoint_nodeshapes, get_negotiated_pmts, get_profile_nodeshape, get_endpoint_structure -from prez.reference_data.prez_ns import PREZ, EP +from prez.dependencies import ( + get_data_repo, + get_system_repo, + generate_search_query, + cql_get_parser_dependency, + get_endpoint_nodeshapes, + get_negotiated_pmts, + get_profile_nodeshape, + get_endpoint_structure, +) +from prez.reference_data.prez_ns import PREZ, EP, ONT from prez.repositories import Repo from prez.services.connegp_service import NegotiatedPMTs from prez.services.curie_functions import get_uri_for_curie_id @@ -42,18 +50,18 @@ name=OGCE["item-listing"], ) async def listings( - page: Optional[int] = 1, - per_page: Optional[int] = 20, - order_by: Optional[str] = None, - order_by_direction: Optional[str] = None, - endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes), - pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), - endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure), - profile_nodeshape: NodeShape = Depends(get_profile_nodeshape), - cql_parser: CQLParser = Depends(cql_get_parser_dependency), - search_query: ConstructQuery = Depends(generate_search_query), - data_repo: Repo = Depends(get_data_repo), - system_repo: Repo = Depends(get_system_repo), + page: Optional[int] = 1, + per_page: Optional[int] = 20, + order_by: Optional[str] = None, + order_by_direction: Optional[str] = None, + endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes), + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), + endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure), + profile_nodeshape: NodeShape = Depends(get_profile_nodeshape), + cql_parser: CQLParser = Depends(cql_get_parser_dependency), + search_query: ConstructQuery = Depends(generate_search_query), + data_repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), ): return await listing_function_new( data_repo=data_repo, @@ -68,39 +76,48 @@ async def listings( per_page=per_page, order_by=order_by, order_by_direction=order_by_direction, + original_endpoint_type=ONT["ListingEndpoint"], ) ######################################################################################################################## # Object endpoints + +# 1: /object?uri=<> +# 2: /profiles/{profile_curie} +# 3: /catalogs/{catalogId} +# 4: /catalogs/{catalogId}/collections/{collectionId} +# 5: /catalogs/{catalogId}/collections/{collectionId}/items/{itemId} ######################################################################################################################## + +@router.get(path="/object", summary="Object", name=EP["system/object"]) @router.get( - "/profiles/{profile_curie}", + path="/profiles/{profile_curie}", summary="Profile", name=EP["system/profile-object"], ) @router.get( - "/catalogs/{catalogId}", + path="/catalogs/{catalogId}", summary="Catalog Object", name=OGCE["catalog-object"], ) @router.get( - "/catalogs/{catalogId}/collections/{collectionId}", + path="/catalogs/{catalogId}/collections/{collectionId}", summary="Collection Object", name=OGCE["collection-object"], ) @router.get( - "/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", + path="/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", summary="Item Object", name=OGCE["item-object"], ) async def objects( - pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), - endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure), - profile_nodeshape: NodeShape = Depends(get_profile_nodeshape), - data_repo: Repo = Depends(get_data_repo), - system_repo: Repo = Depends(get_system_repo), + pmts: NegotiatedPMTs = Depends(get_negotiated_pmts), + endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure), + profile_nodeshape: NodeShape = Depends(get_profile_nodeshape), + data_repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), ): return await object_function_new( data_repo=data_repo, @@ -109,95 +126,3 @@ async def objects( pmts=pmts, profile_nodeshape=profile_nodeshape, ) - - -# async def collection_listing( -# request: Request, -# page: Optional[int] = 1, -# per_page: Optional[int] = 20, -# search_term: Optional[str] = None, -# repo: Repo = Depends(get_repo), -# system_repo: Repo = Depends(get_system_repo), -# ): -# search_term = request.query_params.get("q") -# -# path_node_1_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) -# endpoint_uri = URIRef(request.scope.get("route").name) -# return await listing_function( -# request, -# repo, -# system_repo, -# endpoint_uri, -# hierarchy_level=2, -# path_nodes={"path_node_1": IRI(value=path_node_1_uri)}, -# page=page, -# per_page=per_page, -# search_term=search_term, -# ) -# -# -# -# async def item_listing( -# request: Request, -# page: Optional[int] = 1, -# per_page: Optional[int] = 20, -# search_term: Optional[str] = None, -# repo: Repo = Depends(get_repo), -# system_repo: Repo = Depends(get_system_repo), -# ): -# search_term = request.query_params.get("q") -# path_node_1_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) -# path_node_2_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) -# endpoint_uri = URIRef(request.scope.get("route").name) -# return await listing_function( -# request, -# repo, -# system_repo, -# endpoint_uri, -# hierarchy_level=3, -# path_nodes={ -# "path_node_1": IRI(value=path_node_1_uri), -# "path_node_2": IRI(value=path_node_2_uri), -# }, -# page=page, -# per_page=per_page, -# search_term=search_term, -# ) - -async def catalog_object( - request: Request, - repo: Repo = Depends(get_data_repo), - system_repo: Repo = Depends(get_system_repo), -): - request_url = request.scope["path"] - endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = await get_uri_for_curie_id(request.path_params["catalogId"]) - return await object_function( - request, endpoint_uri, object_uri, request_url, repo, system_repo - ) - - -async def collection_object( - request: Request, - repo: Repo = Depends(get_data_repo), - system_repo: Repo = Depends(get_system_repo), -): - request_url = request.scope["path"] - endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = await get_uri_for_curie_id(request.path_params["collectionId"]) - return await object_function( - request, endpoint_uri, object_uri, request_url, repo, system_repo - ) - - -async def item_object( - request: Request, - repo: Repo = Depends(get_data_repo), - system_repo: Repo = Depends(get_system_repo), -): - request_url = request.scope["path"] - endpoint_uri = URIRef(request.scope.get("route").name) - object_uri = await get_uri_for_curie_id(request.path_params["itemId"]) - return await object_function( - request, endpoint_uri, object_uri, request_url, repo, system_repo - ) diff --git a/prez/routers/profiles.py b/prez/routers/profiles.py deleted file mode 100755 index 581226b5..00000000 --- a/prez/routers/profiles.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Optional - -from fastapi import APIRouter, Request, Depends -from rdflib import URIRef - -from prez.dependencies import get_system_repo, get_endpoint_nodeshapes, get_negotiated_pmts, get_profile_nodeshape, \ - cql_get_parser_dependency, generate_search_query, get_data_repo -from prez.repositories import Repo -from prez.services.connegp_service import NegotiatedPMTs -from prez.services.curie_functions import get_uri_for_curie_id -from prez.services.listings import listing_function_new -from prez.services.objects import object_function -from prez.services.query_generation.cql import CQLParser -from prez.services.query_generation.shacl import NodeShape -from temp.grammar import ConstructQuery - -router = APIRouter(tags=["Profiles"]) - - -# @router.get( -# "/profiles", -# summary="List Profiles", -# name="https://prez.dev/endpoint/system/profile-listing", -# ) -# async def profiles( -# request: Request, -# page: int = 1, -# per_page: int = 20, -# repo=Depends(get_system_repo), -# ): -# endpoint_uri = URIRef(request.scope.get("route").name) -# return await listing_function( -# request=request, -# repo=repo, -# system_repo=repo, -# endpoint_uri=endpoint_uri, -# hierarchy_level=1, -# page=page, -# per_page=per_page, -# endpoint_structure=("profiles",), -# ) - - -# @router.get( -# "/profiles/{profile_curie}", -# summary="Profile", -# name="https://prez.dev/endpoint/system/profile-object", -# ) -# async def profile(request: Request, profile_curie: str, repo=Depends(get_system_repo)): -# request_url = request.scope["path"] -# endpoint_uri = URIRef(request.scope.get("route").name) -# profile_uri = await get_uri_for_curie_id(profile_curie) -# return await object_function( -# request=request, -# endpoint_uri=endpoint_uri, -# uri=profile_uri, -# request_url=request_url, -# repo=repo, -# system_repo=repo, -# ) diff --git a/prez/routers/search.py b/prez/routers/search.py index 2c7e9131..6cabd6e8 100755 --- a/prez/routers/search.py +++ b/prez/routers/search.py @@ -43,9 +43,7 @@ async def search( item_graph, _ = await repo.send_queries([search_query_str], []) if "anot+" in pmts.selected["mediatype"]: - await add_prez_links( - item_graph, repo, settings.endpoint_structure - ) + await add_prez_links(item_graph, repo, settings.endpoint_structure) # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) diff --git a/prez/services/annotations.py b/prez/services/annotations.py index 96c1f05c..f22bce23 100755 --- a/prez/services/annotations.py +++ b/prez/services/annotations.py @@ -13,7 +13,7 @@ log = logging.getLogger(__name__) -async def process_terms(terms_and_dtypes: Set[URIRef], repo: Repo, system_repo: Repo): +async def get_annotations(terms_and_dtypes: Set[URIRef], repo: Repo, system_repo: Repo): """ This function processes the terms and their data types. It first retrieves the cached results for the given terms and data types. Then, it processes the terms that are not cached. The results are added to a graph which is then @@ -69,7 +69,7 @@ async def add_cached_entries( async def process_uncached_terms( - terms: List[URIRef], repo: Repo, system_repo: Repo, annotations_g: Graph + terms: List[URIRef], data_repo: Repo, system_repo: Repo, annotations_g: Graph ): """ This function processes the terms that are not cached. It sends queries to the annotations repository and the @@ -78,7 +78,7 @@ async def process_uncached_terms( Args: terms (list): A list of terms that are not cached. - repo (Repo): An instance of the Repo class. + data_repo (Repo): An instance of the Repo class. annotations_g (Graph): A graph to which the results are added. Returns: @@ -92,7 +92,7 @@ async def process_uncached_terms( context_results = await annotations_repo.send_queries( rdf_queries=[annotations_query], tabular_queries=[] ) - repo_results = await repo.send_queries( + repo_results = await data_repo.send_queries( rdf_queries=[annotations_query], tabular_queries=[] ) system_results = await system_repo.send_queries( @@ -144,5 +144,5 @@ async def get_annotation_properties( if not terms_and_types: return Graph() - annotations_g = await process_terms(terms_and_types, repo, system_repo) + annotations_g = await get_annotations(terms_and_types, repo, system_repo) return annotations_g diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index 1fd9b8b6..0b249823 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -210,13 +210,10 @@ def generate_response_headers(self) -> dict: def _compose_select_query(self) -> str: prez = Namespace("https://prez.dev/") profile_class = prez.ListingProfile if self.listing else prez.ObjectProfile - try: - requested_profile = self.requested_profiles[0][ - 0 - ] # TODO: handle multiple requested profiles - except TypeError as e: + if self.requested_profiles: + requested_profile = self.requested_profiles[0][0] + else: requested_profile = None - log.debug(f"{e}. normally this just means no profiles were requested") query = dedent( f""" diff --git a/prez/services/listings.py b/prez/services/listings.py index fa9438bd..555ca01a 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -19,26 +19,51 @@ from prez.services.query_generation.cql import CQLParser from prez.services.query_generation.search import SearchQuery from prez.services.query_generation.shacl import NodeShape -from prez.services.query_generation.umbrella import merge_listing_query_grammar_inputs, PrezQueryConstructorV2 +from prez.services.query_generation.umbrella import ( + merge_listing_query_grammar_inputs, + PrezQueryConstructor, +) from temp.grammar import * log = logging.getLogger(__name__) async def listing_function_new( - data_repo, - system_repo, - endpoint_nodeshape, - endpoint_structure, - search_query, - cql_parser, - pmts, - profile_nodeshape, - page, - per_page, - order_by, - order_by_direction, + data_repo, + system_repo, + endpoint_nodeshape, + endpoint_structure, + search_query, + cql_parser, + pmts, + profile_nodeshape, + page, + per_page, + order_by, + order_by_direction, + original_endpoint_type, ): + if ( + pmts.selected["profile"] == ALTREXT["alt-profile"] + ): # recalculate the endpoint node shape + endpoint_nodeshape_map = { + ONT["ObjectEndpoint"]: URIRef("http://example.org/ns#AltProfilesForObject"), + ONT["ListingEndpoint"]: URIRef( + "http://example.org/ns#AltProfilesForListing" + ), + } + endpoint_uri = endpoint_nodeshape_map[original_endpoint_type] + endpoint_nodeshape = NodeShape( + uri=endpoint_uri, + graph=endpoints_graph_cache, + kind="endpoint", + focus_node=Var(value="focus_node"), + path_nodes={ + "path_node_1": IRI(value=pmts.selected["class"]) + }, # hack - not sure how (or if) the class can be + # 'dynamicaly' expressed in SHACL. The class is only known at runtime + ) + query_construct_kwargs = merge_listing_query_grammar_inputs( cql_parser=cql_parser, endpoint_nodeshape=endpoint_nodeshape, @@ -52,16 +77,16 @@ async def listing_function_new( profile_gpnt = profile_nodeshape.gpnt_list queries = [] - main_query = PrezQueryConstructorV2( + main_query = PrezQueryConstructor( profile_triples=profile_triples, profile_gpnt=profile_gpnt, - **query_construct_kwargs + **query_construct_kwargs, ) queries.append(main_query.to_string()) if ( - pmts.requested_mediatypes is not None - and pmts.requested_mediatypes[0][0] == "application/sparql-query" + pmts.requested_mediatypes is not None + and pmts.requested_mediatypes[0][0] == "application/sparql-query" ): return PlainTextResponse(queries[0], media_type="application/sparql-query") @@ -71,18 +96,15 @@ async def listing_function_new( count_query = CountQueryV2(original_subselect=subselect).to_string() queries.append(count_query) + # TODO absorb this up the top of function if pmts.selected["profile"] == ALTREXT["alt-profile"]: query_repo = system_repo - # endpoint_structure = ("profiles",) else: query_repo = data_repo - # endpoint_structure = settings.endpoint_structure item_graph, _ = await query_repo.send_queries(queries, []) if "anot+" in pmts.selected["mediatype"]: - await add_prez_links( - item_graph, query_repo, endpoint_structure - ) + await add_prez_links(item_graph, query_repo, endpoint_structure) # count search results - hard to do in SPARQL as the SELECT part of the query is NOT aggregated if search_query: @@ -278,7 +300,7 @@ async def handle_alternate_profile(current_endpoint_uri, pmts, runtime_values): async def get_shacl_node_selection( - endpoint_uri, hierarchy_level, path_nodes, repo, system_repo + endpoint_uri, hierarchy_level, path_nodes, repo, system_repo ): """ Determines the relevant nodeshape based on the endpoint, hierarchy level, and parent URI diff --git a/prez/services/objects.py b/prez/services/objects.py index 287a1f14..1598c4a8 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -7,30 +7,54 @@ from prez.cache import endpoints_graph_cache, profiles_graph_cache from prez.config import settings -from prez.reference_data.prez_ns import EP, ALTREXT +from prez.reference_data.prez_ns import EP, ALTREXT, ONT from prez.renderers.renderer import return_from_graph from prez.repositories import Repo from prez.services.connegp_service import NegotiatedPMTs from prez.services.link_generation import add_prez_links +from prez.services.listings import listing_function_new from prez.services.query_generation.classes import get_classes -from prez.services.query_generation.umbrella import merge_listing_query_grammar_inputs, PrezQueryConstructorV2 +from prez.services.query_generation.umbrella import ( + merge_listing_query_grammar_inputs, + PrezQueryConstructor, +) from temp.grammar import IRI log = logging.getLogger(__name__) async def object_function_new( - data_repo, - system_repo, - endpoint_structure, - pmts, - profile_nodeshape, + data_repo, + system_repo, + endpoint_structure, + pmts, + profile_nodeshape, ): + if pmts.selected["profile"] == ALTREXT["alt-profile"]: + none_keys = [ + "endpoint_nodeshape", + "search_query", + "cql_parser", + "order_by", + "order_by_direction", + ] + none_kwargs = {key: None for key in none_keys} + return await listing_function_new( + data_repo=data_repo, + system_repo=system_repo, + endpoint_structure=endpoint_structure, + pmts=pmts, + profile_nodeshape=profile_nodeshape, + page=1, + per_page=20, + original_endpoint_type=ONT["ObjectEndpoint"], + **none_kwargs + ) + profile_triples = profile_nodeshape.triples_list profile_gpnt = profile_nodeshape.gpnt_list - query = PrezQueryConstructorV2( - profile_triples=profile_triples, - profile_gpnt=profile_gpnt + query = PrezQueryConstructor( + profile_triples=profile_triples, profile_gpnt=profile_gpnt ).to_string() if pmts.requested_mediatypes[0][0] == "application/sparql-query": @@ -50,7 +74,6 @@ async def object_function_new( ) - async def object_function( request: Request, endpoint_uri: URIRef, diff --git a/prez/services/query_generation/search.py b/prez/services/query_generation/search.py index 7a2a5882..cf5a9c67 100755 --- a/prez/services/query_generation/search.py +++ b/prez/services/query_generation/search.py @@ -650,5 +650,3 @@ def order_by(self): @property def order_by_direction(self): return "DESC" - - diff --git a/prez/services/query_generation/shacl.py b/prez/services/query_generation/shacl.py index a12647d6..b07b6996 100644 --- a/prez/services/query_generation/shacl.py +++ b/prez/services/query_generation/shacl.py @@ -52,7 +52,9 @@ class NodeShape(Shape): bnode_depth: Optional[int] = None def from_graph(self): # TODO this can be a SPARQL select against the system graph. - self.bnode_depth = next(self.graph.objects(self.uri, SHEXT.bnodeDepth), None) + self.bnode_depth = next( + self.graph.objects(self.uri, SHEXT["bnode-depth"]), None + ) self.targetNode = next(self.graph.objects(self.uri, SH.targetNode), None) self.targetClasses = list(self.graph.objects(self.uri, SH.targetClass)) self.propertyShapesURIs = list(self.graph.objects(self.uri, SH.property)) @@ -87,7 +89,7 @@ def to_grammar(self): if self.rules: self._process_rules() if self.bnode_depth: - _build_bnode_blocks(self) + self._build_bnode_blocks() def _process_class_targets(self): if len(self.targetClasses) == 1: @@ -153,79 +155,60 @@ def _process_property_shapes(self): self.triples_list = list(set(self.triples_list)) def _build_bnode_blocks(self): - bnode_depth = int(self.bnode_depth) - - p1 = Var(value="bn_p_1") - o1 = Var(value="bn_o_1") - p2 = Var(value="bn_p_2") - o2 = Var(value="bn_o_2") - triples_block = TriplesBlock( - triples=[ - SimplifiedTriple(subject=self.focus_node, predicate=p1, object=o1), - SimplifiedTriple(subject=o1, predicate=p2, object=o2), - ] - ) - filter_block = Filter( - constraint=Constraint( - content=BuiltInCall.create_with_one_expr("isBLANK", PrimaryExpression(content=o1)) - ) - ) - container_gpnt = GraphPatternNotTriples( - content=OptionalGraphPattern( - group_graph_pattern=GroupGraphPattern( - content=GroupGraphPatternSub( - triples_block=triples_block, - graph_patterns_or_triples_blocks=[ - GraphPatternNotTriples( - content=filter_block + max_depth = int(self.bnode_depth) + + def optional_gpnt(depth): + # graph pattern or triples block list, which will contain the filter, and any nested optional blocks + gpotb = [ + GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BuiltInCall.create_with_one_expr( + "isBLANK", + PrimaryExpression(content=Var(value=f"bn_o_{depth}")), ) - ] + ) ) - ) - ) - ) - container_ggps = GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[container_gpnt] - ) - container_ggp = GroupGraphPattern(content=container_ggps) - - def process_bn_level(depth, max_depth, outer_ggps): - old_o_var = Var(value=f"bn_o_{depth}") - new_p_var = Var(value=f"bn_p_{depth + 1}") - new_o_var = Var(value=f"bn_o_{depth + 1}") - triples_block = TriplesBlock( - triples=[ + ), + ] + + # recursive call to build nested optional blocks + if depth < max_depth: + gpotb.append(optional_gpnt(depth + 1)) + + # triples to go inside the optional block + triples = [] + if depth == 1: + triples.append( SimplifiedTriple( - subject=old_o_var, predicate=new_p_var, object=new_o_var - ) - ] - ) - gpnt = GraphPatternNotTriples( - content=Filter( - constraint=Constraint( - content=BuiltInCall.create_with_one_expr( - "isBLANK", PrimaryExpression(content=old_o_var) - ) + subject=self.focus_node, + predicate=Var(value=f"bn_p_{depth}"), + object=Var(value=f"bn_o_{depth}"), ) ) + triples.append( + SimplifiedTriple( + subject=Var(value=f"bn_o_{depth}"), + predicate=Var(value=f"bn_p_{depth + 1}"), + object=Var(value=f"bn_o_{depth + 1}"), + ) ) - opt = OptionalGraphPattern( - group_graph_pattern=GroupGraphPattern( - content=GroupGraphPatternSub( - triples_block=triples_block, - graph_patterns_or_triples_blocks=[gpnt], + + # optional block containing triples + opt_gpnt = GraphPatternNotTriples( + content=OptionalGraphPattern( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock(triples=triples), + graph_patterns_or_triples_blocks=gpotb, + ) ) ) ) - outer_ggps.graph_patterns_or_triples_blocks.append(opt) - if depth < max_depth: - process_bn_level(depth + 1, max_depth, ggps) + return opt_gpnt - if bnode_depth > 1: - process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) - gpnt = GraphPatternNotTriples( - content=GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) - ) + nested_ogp = optional_gpnt(depth=1) + self.gpnt_list.append(nested_ogp) class PropertyShape(Shape): @@ -511,80 +494,3 @@ class InversePath(PropertyPath): def __len__(self): return 1 - - -def _build_bnode_blocks(self): - bnode_depth = list( - self.profile_graph.objects( - subject=self.profile_uri, predicate=SHEXT["bnode-depth"] - ) - ) - if not bnode_depth or bnode_depth == [0]: - return - else: - bnode_depth = int(bnode_depth[0]) - p1 = Var(value="bn_p_1") - o1 = Var(value="bn_o_1") - p2 = Var(value="bn_p_2") - o2 = Var(value="bn_o_2") - triples_block = TriplesBlock( - triples=[ - SimplifiedTriple(subject=self.focus_node, predicate=p1, object=o1), - SimplifiedTriple(subject=o1, predicate=p2, object=o2), - ] - ) - PrimaryExpression(content=o1) = PrimaryExpression(content=o1) - constraint = Constraint( - content=BuiltInCall.create_with_one_expr("isBLANK", PrimaryExpression(content=o1)) - ) - filter_block = Filter(constraint=constraint) - gpnt = GraphPatternNotTriples(content=filter_block) - ggps = GroupGraphPatternSub( - triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] - ) - ggp = GroupGraphPattern(content=ggps) - outer_opt = OptionalGraphPattern(group_graph_pattern=ggp) - container_gpnt = GraphPatternNotTriples(content=outer_opt) - container_ggps = GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[container_gpnt] - ) - container_ggp = GroupGraphPattern(content=container_ggps) - - def process_bn_level(depth, max_depth, outer_ggps): - old_o_var = Var(value=f"bn_o_{depth}") - new_p_var = Var(value=f"bn_p_{depth + 1}") - new_o_var = Var(value=f"bn_o_{depth + 1}") - triples_block = TriplesBlock( - triples=[ - SimplifiedTriple( - subject=old_o_var, predicate=new_p_var, object=new_o_var - ) - ] - ) - gpnt = GraphPatternNotTriples( - content=Filter( - constraint=Constraint( - content=BuiltInCall.create_with_one_expr( - "isBLANK", PrimaryExpression(content=old_o_var) - ) - ) - ) - ) - opt = OptionalGraphPattern( - group_graph_pattern=GroupGraphPattern( - content=GroupGraphPatternSub( - triples_block=triples_block, - graph_patterns_or_triples_blocks=[gpnt], - ) - ) - ) - outer_ggps.graph_patterns_or_triples_blocks.append(opt) - if depth < max_depth: - process_bn_level(depth + 1, max_depth, ggps) - - if bnode_depth > 1: - process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) - gpnt = GraphPatternNotTriples( - content=GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) - ) - self.main_where_ggps.add_pattern(gpnt) diff --git a/prez/services/query_generation/umbrella.py b/prez/services/query_generation/umbrella.py index 95b3c8d6..dbb2a6c2 100755 --- a/prez/services/query_generation/umbrella.py +++ b/prez/services/query_generation/umbrella.py @@ -1,574 +1,12 @@ -import re -from string import Template -from typing import Union, Optional, List, Dict +from typing import Union, Optional, List -from pydantic import BaseModel -from rdflib import URIRef, Namespace, Graph, SH, BNode, Literal -from rdflib.collection import Collection - -from prez.cache import profiles_graph_cache, endpoints_graph_cache from prez.services.query_generation.cql import CQLParser from prez.services.query_generation.search import SearchQueryRegex from prez.services.query_generation.shacl import NodeShape from temp.grammar import * -ONT = Namespace("https://prez.dev/ont/") -ALTREXT = Namespace("http://www.w3.org/ns/dx/conneg/altr-ext#") -SHEXT = Namespace("http://example.com/shacl-extension#") - - -# class PrezQueryConstructor(BaseModel): -# class Config: -# arbitrary_types_allowed = True -# -# runtime_values: dict -# endpoint_graph: Graph -# profile_graph: Graph -# listing_or_object: str -# focus_node: Union[IRI, Var] = Var(value="focus_node") -# endpoint_uri: Optional[URIRef] = None -# profile_uri: Optional[URIRef] = None -# -# construct_triples: Optional[List[SimplifiedTriple]] = [] -# main_where_ggps: Optional[GroupGraphPatternSub] = GroupGraphPatternSub() -# inner_select: Optional[Union[SubSelect, SubSelectString]] = None -# -# endpoint_shacl_triples: Optional[List[SimplifiedTriple]] = [] -# endpoint_shacl_gpnt: Optional[List[GraphPatternNotTriples]] = [] -# cql_triples: Optional[List[SimplifiedTriple]] = [] -# cql_gpnt: Optional[List[GraphPatternNotTriples]] = [] -# select_template: Optional[Template] = None -# sparql: Optional[str] = None -# -# # Additional fields -# default_limit: Optional[int] = None -# default_offset: Optional[int] = None -# default_order_by: Optional[str] = None -# default_order_by_desc: Optional[bool] = None -# runtime_vals_expanded: Optional[Dict] = {} -# merged_runtime_and_default_vals: Optional[Dict] = {} -# -# def _expand_runtime_vars(self): -# for k, v in self.runtime_values.items(): -# if k in ["limit", "offset", "q"]: -# self.runtime_vals_expanded[k] = v -# elif v: -# val = IRI(value=v).to_string() -# self.runtime_vals_expanded[k] = val -# -# def _merge_runtime_and_default_vars(self): -# default_args = { -# "limit": self.default_limit, -# "offset": self.default_offset, -# "order_by": self.default_order_by, -# "order_by_desc": self.default_order_by_desc, -# } -# self.merged_runtime_and_default_vals = default_args | self.runtime_vals_expanded -# -# def generate_sparql(self): -# """ -# Generates SPARQL query from Shape profile_graph. -# """ -# self._expand_runtime_vars() -# if self.listing_or_object == "listing": -# self.build_inner_select() -# self.parse_profile() -# self._generate_query() -# -# def _generate_query(self): -# where = WhereClause( -# group_graph_pattern=GroupGraphPattern(content=self.main_where_ggps) -# ) -# -# if self.construct_triples: -# self.construct_triples.extend(where.collect_triples()) -# else: -# self.construct_triples = where.collect_triples() -# self.construct_triples = list(set(self.construct_triples)) -# -# if self.listing_or_object == "listing": -# gpnt = GraphPatternNotTriples( -# content=GroupOrUnionGraphPattern( -# group_graph_patterns=[GroupGraphPattern(content=self.inner_select)] -# ) -# ) -# self.main_where_ggps.add_pattern(gpnt, prepend=True) -# -# construct_template = ConstructTemplate( -# construct_triples=ConstructTriples(triples=self.construct_triples) -# ) -# solution_modifier = SolutionModifier() -# query_str = ConstructQuery( -# construct_template=construct_template, -# where_clause=where, -# solution_modifier=solution_modifier, -# ).to_string() -# self.sparql = query_str -# -# def build_inner_select(self): -# """ -# Either set the focus_node to a URIRef, if a target node is provided, or generate a triple pattern to get list items -# Generates triples for the endpoint definition with runtime values substituted. -# """ -# inner_select_ggps = GroupGraphPatternSub() -# -# self._set_limit_and_offset() -# self._merge_runtime_and_default_vars() -# -# # rule_nodes = list( -# # self.endpoint_graph.objects(subject=self.endpoint_uri, predicate=SH.rule) -# # ) -# -# sol_mod, order_by_triple = self._create_focus_node_solution_modifier() -# -# if self.select_template: -# # sh:target / sh:select -# sss = self.create_select_subquery_from_template(sol_mod, order_by_triple) -# self.inner_select = sss -# -# # # rule nodes - for CONSTRUCT TRIPLES patterns. -# # if rule_nodes: -# # for rule_node in rule_nodes: -# # self._create_construct_triples_from_sh_rules(rule_node) -# -# else: -# self.inner_select = SubSelect( -# select_clause=SelectClause(variables_or_all=[self.focus_node]), -# where_clause=WhereClause( -# group_graph_pattern=GroupGraphPattern(content=inner_select_ggps) -# ), -# solution_modifier=sol_mod, -# ) -# -# if order_by_triple: -# inner_select_ggps.add_triple(order_by_triple) -# -# # otherwise just use what is provided by the endpoint shapes -# all_triples = self.endpoint_shacl_triples + self.cql_triples -# if all_triples: -# tb = TriplesBlock(triples=all_triples) -# inner_select_ggps.add_pattern(tb) -# -# all_gpnt = self.endpoint_shacl_gpnt + self.cql_gpnt -# if all_gpnt: -# for gpnt in all_gpnt: -# inner_select_ggps.add_pattern(gpnt) -# -# # def sh_rule_type_conversion(self, items: List): -# # """ -# # Assumes Literals are actually Variables. -# # """ -# # new_items = [] -# # for item in items: -# # if isinstance(item, URIRef): -# # item = IRI(value=item) -# # elif isinstance(item, Literal): -# # item = Var(value=item[1:]) -# # new_items.append(item) -# # return new_items -# # -# # def _create_construct_triples_from_sh_rules(self, rule_node): -# # """CONSTRUCT {?s ?p ?o} based on sh:rule [ sh:subject ... ]""" -# # subject = self.endpoint_graph.value(subject=rule_node, predicate=SH.subject) -# # predicate = self.endpoint_graph.value(subject=rule_node, predicate=SH.predicate) -# # object = self.endpoint_graph.value(subject=rule_node, predicate=SH.object) -# # if subject == SH.this: -# # subject = self.focus_node -# # subject, predicate, object = self.sh_rule_type_conversion( -# # [subject, predicate, object] -# # ) -# # -# # triple = SimplifiedTriple(subject=subject, predicate=predicate, object=object) -# # if self.construct_triples: -# # self.construct_triples.append(triple) -# # else: -# # self.construct_triples = [triple] -# -# def create_select_subquery_from_template(self, sol_mod, order_by_triple): -# # expand any prefixes etc. in case the prefixes are not defined in the query this subquery is being inserted -# # into. NB Shape does provide a mechanism to declare prefixes used in SPARQL target - this has not been -# # implemented -# substituted_query = self.select_template.substitute( -# self.merged_runtime_and_default_vals -# ).rstrip() -# if order_by_triple: # insert it before the end of the string, -# order_by_triple_text = order_by_triple.to_string() -# substituted_query = ( -# substituted_query[:-1] + f"{{{order_by_triple_text}}} }}" -# ) -# additional_strings = [] -# if self.cql_triples: # for example from cql -# additional_strings.append( -# TriplesBlock(triples=self.cql_triples).to_string() -# ) -# if self.cql_gpnt: -# additional_strings.extend([gpnt.to_string() for gpnt in self.cql_gpnt]) -# substituted_query = self.split_query(substituted_query, additional_strings) -# sss = SubSelectString( -# select_string=substituted_query, solution_modifier=sol_mod -# ) -# return sss -# -# def split_query(self, original_query, additional_strings: List[str]): -# # Regex to match the entire structure: 'SELECT ?xxx { ... }' -# pattern = r"(SELECT\s+[\?\w\s\(\)]+\s*\{)(.*?)(\}\s*)" -# # Use re.split to split the query based on the pattern -# parts = re.split(pattern, original_query, flags=re.DOTALL) -# parts = [part for part in parts if part.strip()] -# new_parts = [parts[0]] + additional_strings -# if len(parts) > 1: -# new_parts.extend(parts[1:]) -# new_query = "".join(part for part in new_parts) -# return new_query -# -# def _create_focus_node_solution_modifier(self): -# """ -# Solution modifiers include LIMIT, OFFSET, ORDER BY clauses. -# """ -# order_clause = order_by_triple = None # order clause is optional -# order_by_path = self.merged_runtime_and_default_vals.get("order_by") -# if order_by_path: -# direction = self.merged_runtime_and_default_vals.get("order_by_desc") -# if direction: -# direction = "DESC" -# else: -# direction = "ASC" -# order_cond = OrderCondition( -# var=Var(value="order_by_var"), direction=direction -# ) -# order_clause = OrderClause(conditions=[order_cond]) -# order_by_triple = SimplifiedTriple( -# subject=self.focus_node, -# predicate=IRI(value=order_by_path[0]), -# object=Var(value="order_by_var"), -# ) -# limit = int(self.merged_runtime_and_default_vals["limit"]) -# offset = int(self.merged_runtime_and_default_vals["offset"]) -# limit_clause = LimitClause(limit=limit) -# offset_clause = OffsetClause(offset=offset) -# limit_offset_clauses = LimitOffsetClauses( -# limit_clause=limit_clause, offset_clause=offset_clause -# ) -# sol_mod = SolutionModifier( -# order_by=order_clause, limit_offset=limit_offset_clauses -# ) -# return sol_mod, order_by_triple -# -# def _set_limit_and_offset(self): -# """ -# Sets the default limit, offset, and ordering for a listing endpoint. -# """ -# default_limit = next( -# self.endpoint_graph.objects( -# subject=self.endpoint_uri, predicate=SHEXT.limit -# ), -# 20, -# ) -# default_offset = next( -# self.endpoint_graph.objects( -# subject=self.endpoint_uri, predicate=SHEXT.offset -# ), -# 0, -# ) -# default_order_by = list( -# self.endpoint_graph.objects( -# subject=self.endpoint_uri, predicate=SHEXT.orderBy -# ) -# ) -# -# self.default_limit = int(default_limit) -# self.default_offset = int(default_offset) -# -# # Process each blank node in the default_order_by list -# for blank_node in default_order_by: -# # Extract sh:path -# path = next(self.endpoint_graph.objects(blank_node, SH.path), None) -# if not path: -# continue # Skip if no sh:path is found -# -# # Check for sh:desc -# desc_node = next(self.endpoint_graph.objects(blank_node, SHEXT.desc), None) -# is_descending = ( -# True if desc_node and (desc_node == Literal(True)) else False -# ) -# -# # Add the configuration to the list -# self.default_order_by = (path,) -# self.default_order_by_desc = is_descending -# -# def parse_profile(self): -# for i, property_node in enumerate( -# self.profile_graph.objects(subject=self.profile_uri, predicate=SH.property) -# ): -# self._parse_property_shapes(property_node, i) -# self._build_bnode_blocks() -# - def _build_bnode_blocks(self): - bnode_depth = list( - self.profile_graph.objects( - subject=self.profile_uri, predicate=SHEXT["bnode-depth"] - ) - ) - if not bnode_depth or bnode_depth == [0]: - return - else: - bnode_depth = int(bnode_depth[0]) - p1 = Var(value="bn_p_1") - o1 = Var(value="bn_o_1") - p2 = Var(value="bn_p_2") - o2 = Var(value="bn_o_2") - triples_block = TriplesBlock( - triples=[ - SimplifiedTriple(subject=self.focus_node, predicate=p1, object=o1), - SimplifiedTriple(subject=o1, predicate=p2, object=o2), - ] - ) - o1_pe = PrimaryExpression(content=o1) - constraint = Constraint( - content=BuiltInCall.create_with_one_expr("isBLANK", o1_pe) - ) - filter_block = Filter(constraint=constraint) - gpnt = GraphPatternNotTriples(content=filter_block) - ggps = GroupGraphPatternSub( - triples_block=triples_block, graph_patterns_or_triples_blocks=[gpnt] - ) - ggp = GroupGraphPattern(content=ggps) - outer_opt = OptionalGraphPattern(group_graph_pattern=ggp) - container_gpnt = GraphPatternNotTriples(content=outer_opt) - container_ggps = GroupGraphPatternSub( - graph_patterns_or_triples_blocks=[container_gpnt] - ) - container_ggp = GroupGraphPattern(content=container_ggps) - - def process_bn_level(depth, max_depth, outer_ggps): - old_o_var = Var(value=f"bn_o_{depth}") - new_p_var = Var(value=f"bn_p_{depth + 1}") - new_o_var = Var(value=f"bn_o_{depth + 1}") - triples_block = TriplesBlock( - triples=[ - SimplifiedTriple( - subject=old_o_var, predicate=new_p_var, object=new_o_var - ) - ] - ) - gpnt = GraphPatternNotTriples( - content=Filter( - constraint=Constraint( - content=BuiltInCall.create_with_one_expr( - "isBLANK", PrimaryExpression(content=old_o_var) - ) - ) - ) - ) - opt = OptionalGraphPattern( - group_graph_pattern=GroupGraphPattern( - content=GroupGraphPatternSub( - triples_block=triples_block, - graph_patterns_or_triples_blocks=[gpnt], - ) - ) - ) - outer_ggps.graph_patterns_or_triples_blocks.append(opt) - if depth < max_depth: - process_bn_level(depth + 1, max_depth, ggps) - - if bnode_depth > 1: - process_bn_level(depth=2, max_depth=bnode_depth, outer_ggps=ggps) - gpnt = GraphPatternNotTriples( - content=GroupOrUnionGraphPattern(group_graph_patterns=[container_ggp]) - ) - self.main_where_ggps.add_pattern(gpnt) -# -# def _parse_property_shapes(self, property_node, i): -# def process_path_object(path_obj: Union[URIRef, BNode]): -# if isinstance(path_obj, BNode): -# pred_objects_gen = self.profile_graph.predicate_objects( -# subject=path_obj -# ) -# bn_pred, bn_obj = next(pred_objects_gen, (None, None)) -# if bn_obj == SH.union: -# pass -# elif bn_pred == SH.inversePath: -# inverse_preds.append(IRI(value=bn_obj)) -# elif bn_pred == SH.alternativePath: -# predicates.extend(list(Collection(self.profile_graph, bn_obj))) -# else: # sequence paths -# predicates.append(tuple(Collection(self.profile_graph, path_obj))) -# else: # a plain path specification to restrict the predicate to a specific value -# predicates.append(path_obj) -# -# inverse_preds = [] # list of IRIs -# predicates = [] # list of IRIs -# union_items = None -# path_object = self.profile_graph.value( -# subject=property_node, predicate=SH.path, default=None -# ) -# if isinstance(path_object, BNode): -# predicate_objects_gen = self.profile_graph.predicate_objects( -# subject=path_object -# ) -# bnode_pred, bnode_obj = next(predicate_objects_gen, (None, None)) -# if bnode_obj == SH.union: # TODO or sh:or ?? -# union_list_bnode = list(Collection(self.profile_graph, path_object))[1] -# union_items = list(Collection(self.profile_graph, union_list_bnode)) -# -# ggp_list = [] -# if union_items: -# for item in union_items: -# process_path_object(item) -# else: -# process_path_object(path_object) -# -# if inverse_preds: -# ggps_under_under_union = GroupGraphPatternSub() -# ggps = ggps_under_under_union -# ggp = GroupGraphPattern(content=ggps_under_under_union) -# ggp_list.append(ggp) -# self._add_inverse_preds(ggps, inverse_preds, i) -# if predicates: -# self._add_predicate_constraints(predicates, property_node, ggp_list) -# self._add_object_constraints(ggp_list, property_node) -# union = GroupOrUnionGraphPattern(group_graph_patterns=ggp_list) -# gpnt = GraphPatternNotTriples(content=union) -# -# min = int( -# self.profile_graph.value( -# subject=property_node, predicate=SH.minCount, default=1 -# ) -# ) -# if min == 0: # Add Optional GroupGraphPatternSub "wrapper" as the main GGPS -# ggps_under_optional = GroupGraphPatternSub( -# graph_patterns_or_triples_blocks=[gpnt] -# ) -# ggp = GroupGraphPattern(content=ggps_under_optional) -# optional = OptionalGraphPattern(group_graph_pattern=ggp) -# gpnt = GraphPatternNotTriples(content=optional) -# self.main_where_ggps.add_pattern(gpnt) -# -# def _add_inverse_preds( -# self, ggps: GroupGraphPatternSub, inverse_preds: List[IRI], i -# ): -# if inverse_preds: -# ggps.add_triple( -# SimplifiedTriple( -# subject=Var(value=f"inv_path_{i}"), -# predicate=Var(value=f"inv_pred_{i}"), -# object=self.focus_node, -# ) -# ) -# dbv_list = [DataBlockValue(value=p) for p in inverse_preds] -# ildov = InlineDataOneVar( -# variable=Var(value=f"inv_pred_{i}"), datablockvalues=dbv_list -# ) -# data_block = DataBlock(block=ildov) -# inline_data = InlineData(data_block=data_block) -# gpnt = GraphPatternNotTriples(content=inline_data) -# ggps.add_pattern(gpnt) -# -# def _add_predicate_constraints(self, predicates, property_node, ggp_list): -# # check for any sequence paths - process separately -# sps = [p for p in predicates if isinstance(p, tuple)] # convert to IRIs here -# predicates = [ -# IRI(value=p) for p in predicates if not isinstance(p, tuple) -# ] # convert to IRIs below -# -# try: -# for i, (pred1, pred2) in enumerate(sps): -# t1 = SimplifiedTriple( -# subject=self.focus_node, -# predicate=IRI(value=pred1), -# object=Var(value=f"seq_obj_{i + 1}"), -# ) -# t2 = SimplifiedTriple( -# subject=Var(value=f"seq_obj_{i + 1}"), -# predicate=IRI(value=pred2), -# object=Var(value=f"seq_obj_terminal{i + 1}"), -# ) -# tb = TriplesBlock(triples=[t1, t2]) -# ggps = GroupGraphPatternSub(triples_block=tb) -# ggp = GroupGraphPattern(content=ggps) -# ggp_list.append(ggp) -# except Exception as e: -# print(e) -# -# # process direct path predicates -# max = self.profile_graph.value(subject=property_node, predicate=SH.maxCount) -# simplified_triple = SimplifiedTriple( -# subject=self.focus_node, -# predicate=Var(value="preds"), -# object=Var(value="objs"), -# ) -# tb = TriplesBlock(triples=[simplified_triple]) -# if predicates: -# if max == Literal(0): # excluded predicates. -# values = [ -# PrimaryExpression(content=IRIOrFunction(iri=p)) for p in predicates -# ] -# focus_pe = PrimaryExpression(content=Var(value="preds")) -# values_constraint = Filter.filter_relational( -# focus=focus_pe, comparators=values, operator="NOT IN" -# ) -# gpnt = GraphPatternNotTriples(content=values_constraint) -# if ggp_list: -# for ggp in ggp_list: -# ggp.content.add_pattern(gpnt) -# else: -# ggps = GroupGraphPatternSub( -# graph_patterns_or_triples_blocks=[gpnt, tb] -# ) -# ggp = GroupGraphPattern(content=ggps) -# ggp_list.append(ggp) -# elif ( -# IRI(value=SHEXT.allPredicateValues) not in predicates -# ): # add VALUES clause -# dbv_list = [DataBlockValue(value=p) for p in predicates] -# inline_data_one_var = InlineDataOneVar( -# variable=Var(value="preds"), datablockvalues=dbv_list -# ) -# data_block = DataBlock(block=inline_data_one_var) -# inline_data = InlineData(data_block=data_block) -# gpnt = GraphPatternNotTriples(content=inline_data) -# ggps = GroupGraphPatternSub(graph_patterns_or_triples_blocks=[gpnt, tb]) -# ggp = GroupGraphPattern(content=ggps) -# ggp_list.append(ggp) -# elif predicates == [IRI(value=SHEXT.allPredicateValues)]: -# ggps = GroupGraphPatternSub(triples_block=tb) -# ggp = GroupGraphPattern(content=ggps) -# ggp_list.append(ggp) -# -# def _add_object_constraints(self, ggp_list, property_node): -# value = self.profile_graph.value( -# subject=property_node, predicate=SH.hasValue, default=None -# ) -# values_bn = self.profile_graph.value( -# subject=property_node, predicate=SH["in"], default=None -# ) -# if value: # a specific value -# objects = [value] -# elif values_bn: # a set of values -# c = Collection(self.profile_graph, values_bn) -# objects = list(c) -# if value or values_bn: -# ggps = GroupGraphPatternSub() -# ggp = GroupGraphPattern(content=ggps) -# ggp_list.append(ggp) -# objs = [] -# for obj in objects: -# if isinstance(obj, Literal): -# objs.append(RDFLiteral(value=obj)) -# elif isinstance(obj, URIRef): -# objs.append(IRI(value=obj)) -# dbv_list = [DataBlockValue(value=p) for p in objs] -# inline_data_one_var = InlineDataOneVar( -# variable=Var(value="objs"), datablockvalues=dbv_list -# ) -# data_block = DataBlock(block=inline_data_one_var) -# inline_data = InlineData(data_block=data_block) -# gpnt = GraphPatternNotTriples(content=inline_data) -# ggps.add_pattern(gpnt) - -class PrezQueryConstructorV2(ConstructQuery): +class PrezQueryConstructor(ConstructQuery): """ Creates a CONSTRUCT query to describe a listing of objects or an individual object. Query format: @@ -635,8 +73,7 @@ def __init__( GroupGraphPattern( content=SubSelect( select_clause=SelectClause( - distinct=True, - variables_or_all=inner_select_vars + distinct=True, variables_or_all=inner_select_vars ), where_clause=WhereClause( group_graph_pattern=GroupGraphPattern( @@ -659,7 +96,7 @@ def __init__( offset=offset ), # OFFSET n ), - order_by=oc + order_by=oc, ), ) ) @@ -676,6 +113,9 @@ def __init__( # construct triples is usually only from the profile, but in the case of search queries for example, additional # triples are added construct_triples = TriplesBlock(triples=profile_triples).collect_triples() + # triples from any profiles gpnt + for gpnt in profile_gpnt: + construct_triples.extend(gpnt.collect_triples()) if additional_construct_triples: construct_triples.extend(additional_construct_triples) construct_template = ConstructTemplate( @@ -689,37 +129,13 @@ def __init__( @property def inner_select(self): - return self.where_clause.group_graph_pattern.content.graph_patterns_or_triples_blocks[-1].content.group_graph_patterns[0].content - - -# def get_profile_grammar(profile_uri: URIRef): -# """ -# Returns the grammar for a given profile. -# """ -# profile = NodeShape(uri=profile_uri, graph=profiles_graph_cache, kind="profile") -# return profile.triples_list, profile.gpnt_list -# -# -# def get_endpoint_grammar(endpoint_uri: URIRef): -# """ -# Returns the grammar for a given endpoint. -# """ -# endpoint_shape = NodeShape( -# uri=endpoint_uri, graph=endpoints_graph_cache, kind="endpoint" -# ) -# return endpoint_shape.triples_list, endpoint_shape.gpnt_list -# -# -# def get_cql_grammar(): -# pass -# -# -# def get_search_grammar(): -# pass -# -# -# def get_all_grammar(): -# pass + return ( + self.where_clause.group_graph_pattern.content.graph_patterns_or_triples_blocks[ + -1 + ] + .content.group_graph_patterns[0] + .content + ) def merge_listing_query_grammar_inputs( diff --git a/temp/grammar/grammar.py b/temp/grammar/grammar.py index 79baaf89..f020cee0 100755 --- a/temp/grammar/grammar.py +++ b/temp/grammar/grammar.py @@ -216,8 +216,8 @@ class TriplesBlock(SPARQLGrammarBase): def render(self) -> Generator[str, None, None]: for i, triple in enumerate(self.triples): yield from triple.render() - if i < len(self.triples) - 1: # Check if it's not the last triple - yield "\n" + # if i < len(self.triples) - 1: # Check if it's not the last triple + yield "\n" class PrimaryExpression(SPARQLGrammarBase): diff --git a/test_data/ogc_records_profile.ttl b/test_data/ogc_records_profile.ttl deleted file mode 100755 index 333ef557..00000000 --- a/test_data/ogc_records_profile.ttl +++ /dev/null @@ -1,106 +0,0 @@ -PREFIX altr-ext: -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX owl: -PREFIX prez: -PREFIX prof: -PREFIX prov: -PREFIX reg: -PREFIX rdf: -PREFIX rdfs: -PREFIX sh: -PREFIX skos: -PREFIX xsd: -PREFIX endpoint: -PREFIX shext: - - -prez:OGCRecordsProfile - a prof:Profile ; - dcterms:identifier "ogc"^^xsd:token ; - dcterms:description "A system profile for OGC Records conformant API" ; - dcterms:title "OGC Profile" ; - altr-ext:constrainsClass prez:CatPrez ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass dcat:Catalog , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection , prez:SearchResult , prez:CQLObjectList ; - altr-ext:hasDefaultProfile prez:OGCListingProfile - ] , [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:hasDefaultProfile prez:OGCSchemesListProfile - ] , [ - a sh:NodeShape ; - sh:targetClass dcat:Catalog , skos:ConceptScheme , skos:Concept , geo:Feature , geo:FeatureCollection , skos:Collection ; - altr-ext:hasDefaultProfile prez:OGCItemProfile - ] - . - -prez:OGCListingProfile - a prof:Profile , prez:ListingProfile , sh:NodeShape ; - dcterms:title "OGC Listing Profile" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept , - dcat:Resource , prof:Profile , prez:SearchResult , prez:CQLObjectList ; - sh:property [ sh:path rdf:type ] - . - -prez:OGCSchemesListProfile - a prof:Profile , prez:ListingProfile , sh:NodeShape ; - dcterms:title "OGC Concept Scheme Listing Profile" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:constrainsClass skos:ConceptScheme ; - sh:property [ - sh:minCount 0 ; - sh:path ( - sh:union ( - dcterms:publisher - reg:status - ( prov:qualifiedDerivation prov:hadRole ) - ( prov:qualifiedDerivation prov:entity ) - ) - ) - ] - . - -prez:OGCItemProfile - a prof:Profile , prez:ObjectProfile , sh:NodeShape ; - dcterms:title "OGC Object Profile" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - sh:property [ - sh:path shext:allPredicateValues ; - ] , - [ - sh:minCount 0 ; - sh:path [ sh:inversePath dcterms:hasPart ] ; - ] ; - shext:bnode-depth 2 ; - altr-ext:constrainsClass dcat:Catalog , - dcat:Resource , - skos:ConceptScheme, - skos:Collection , - skos:Concept , - geo:FeatureCollection , - geo:Feature , - prof:Profile ; - . diff --git a/test_data/spaceprez_default_profiles.ttl b/test_data/spaceprez_default_profiles.ttl deleted file mode 100755 index 9e6a3c8a..00000000 --- a/test_data/spaceprez_default_profiles.ttl +++ /dev/null @@ -1,138 +0,0 @@ -PREFIX altr-ext: -PREFIX dcat: -PREFIX dcterms: -PREFIX geo: -PREFIX owl: -PREFIX prez: -PREFIX prof: -PREFIX rdf: -PREFIX rdfs: -PREFIX sh: -PREFIX skos: -PREFIX xsd: -PREFIX shext: - - -prez:SpacePrezProfile - a prof:Profile ; - dcterms:identifier "spaceprez"^^xsd:token ; - dcterms:description "A system profile for SpacePrez" ; - skos:prefLabel "SpacePrez profile" ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:constrainsClass prez:SpacePrez ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass dcat:Dataset ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass geo:FeatureCollection ; - altr-ext:hasDefaultProfile prez:FeatureCollectionProfile - ] , [ - a sh:NodeShape ; - sh:targetClass geo:Feature ; - altr-ext:hasDefaultProfile prez:FeatureProfile - ] , [ - a sh:NodeShape ; - sh:targetClass prez:DatasetList ; - altr-ext:hasDefaultProfile - ] , [ - a sh:NodeShape ; - sh:targetClass prez:FeatureCollectionList ; - altr-ext:hasDefaultProfile prez:GeoListingProfile - ] , [ - a sh:NodeShape ; - sh:targetClass prez:FeatureList ; - altr-ext:hasDefaultProfile prez:GeoListingProfile - ] -. - -prez:FeatureCollectionProfile a prof:Profile ; - dcterms:description "A profile for GeoSPARQL FeatureCollections" ; - dcterms:identifier "geofc"^^xsd:token ; - dcterms:title "Feature Collection Profile" ; - altr-ext:constrainsClass geo:FeatureCollection ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - sh:targetClass geo:FeatureCollection ; - sh:property - [ - sh:maxCount 0 ; - sh:path rdfs:member ; - ] , - [ - sh:path [ sh:inversePath rdfs:member ] ; - ] ; - shext:bnode-depth 2 ; -. - -prez:FeatureProfile a prof:Profile ; - dcterms:description "A profile for GeoSPARQL Features" ; - dcterms:identifier "geofeat"^^xsd:token ; - dcterms:title "Feature Profile" ; - altr-ext:constrainsClass geo:Feature ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - sh:targetClass geo:Feature ; - sh:property - [ - sh:path [ sh:inversePath rdfs:member ] ; - ] , - [ - sh:path shext:allPredicateValues ; - ] ; - shext:bnode-depth 2 ; -. - - -prez:GeoListingProfile a prof:Profile ; - dcterms:description "A profile for listing GeoSPARQL Features and FeatureCollections" ; - dcterms:identifier "geolisting"^^xsd:token ; - dcterms:title "Geo Listing Profile" ; - altr-ext:constrainsClass prez:FeatureCollectionList , prez:FeatureList ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - sh:property - [ - sh:path [ sh:inversePath rdfs:member ] ; - ] -. - - - a prof:Profile , prez:SpacePrezProfile ; - dcterms:description "Dataset Catalog Vocabulary (DCAT) is a W3C-authored RDF vocabulary designed to facilitate interoperability between data catalogs" ; - dcterms:identifier "dcat"^^xsd:token ; - dcterms:title "DCAT" ; - altr-ext:constrainsClass - dcat:Catalog , - dcat:Dataset ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" ; - altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - sh:property [ - sh:path shext:allPredicateValues ; - ] ; - shext:bnode-depth 2 ; - altr-ext:constrainsClass dcat:Catalog , dcat:Dataset ; -. - diff --git a/tests/test_count.py b/tests/_test_count.py similarity index 100% rename from tests/test_count.py rename to tests/_test_count.py diff --git a/tests/conftest.py b/tests/conftest.py index 35d26322..da1d8f99 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -52,6 +52,12 @@ def override_get_repo(): app.dependency_overrides.clear() +@pytest.fixture(scope="module") +def client_no_override() -> TestClient: + with TestClient(app) as c: + yield c + + @pytest.fixture() def a_catalog_link(client): r = client.get("/catalogs") diff --git a/tests/test_alt_profiles.py b/tests/test_alt_profiles.py index 0358b8e7..1cbb5a72 100755 --- a/tests/test_alt_profiles.py +++ b/tests/test_alt_profiles.py @@ -26,7 +26,7 @@ def a_resource_link(client, a_catalog_link): def test_listing_alt_profile(client): - r = client.get(f"/catalogs?_mediatype=text/turtle&_profile=altr-ext:alt-profile") + r = client.get(f"/catalogs?_profile=altr-ext:alt-profile") response_graph = Graph().parse(data=r.text) assert ( URIRef("http://www.w3.org/ns/dx/conneg/altr-ext#alt-profile"), diff --git a/tests/test_connegp.py b/tests/test_connegp.py index 6bd232e2..c599effc 100644 --- a/tests/test_connegp.py +++ b/tests/test_connegp.py @@ -1,32 +1,11 @@ -from pathlib import Path - import pytest -from pyoxigraph import Store -from pyoxigraph.pyoxigraph import Store from rdflib import URIRef -from prez.app import app -from prez.dependencies import get_data_repo from prez.reference_data.prez_ns import PREZ -from prez.repositories import PyoxigraphRepo, Repo +from prez.repositories import PyoxigraphRepo from prez.services.connegp_service import NegotiatedPMTs -@pytest.fixture() -def test_store() -> Store: - store = Store() - file = Path(__file__).parent.parent / "test_data/ogc_records_profile.ttl" - store.load(file.read_bytes(), "text/turtle") - file = Path(__file__).parent.parent / "test_data/spaceprez_default_profiles.ttl" - store.load(file.read_bytes(), "text/turtle") - return store - - -@pytest.fixture() -def test_repo(test_store: Store) -> Repo: - return PyoxigraphRepo(test_store) - - @pytest.mark.parametrize( "headers, params, classes, listing, expected_selected", [ @@ -147,17 +126,17 @@ def test_repo(test_store: Store) -> Repo: ], ) @pytest.mark.asyncio -async def test_connegp(headers, params, classes, listing, expected_selected, test_repo): - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_data_repo] = override_get_repo +async def test_connegp( + headers, params, classes, listing, expected_selected, client_no_override +): + system_store = client_no_override.app.state._state.get("pyoxi_system_store") + system_repo = PyoxigraphRepo(system_store) pmts = NegotiatedPMTs( headers=headers, params=params, classes=classes, listing=listing, - system_repo=test_repo, + system_repo=system_repo, ) await pmts.setup() assert pmts.selected == expected_selected diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py index 30c18ce7..8d922977 100755 --- a/tests/test_endpoints_profiles.py +++ b/tests/test_endpoints_profiles.py @@ -2,22 +2,22 @@ from rdflib.namespace import RDF, PROF -def test_profile(client): +def test_profile(client_no_override): # check the example remote profile is loaded - r = client.get("/profiles") + r = client_no_override.get("/profiles") g = Graph().parse(data=r.text) assert (URIRef("https://prez.dev/profile/prez"), RDF.type, PROF.Profile) in g -def test_ogcprez_profile(client): +def test_ogcprez_profile(client_no_override): # check the example remote profile is loaded - r = client.get("/profiles/prez:OGCRecordsProfile") + r = client_no_override.get("/profiles/prez:OGCRecordsProfile") g = Graph().parse(data=r.text) assert (URIRef("https://prez.dev/OGCRecordsProfile"), RDF.type, PROF.Profile) in g -def test_sp_profile(client): +def test_sp_profile(client_no_override): # check the example remote profile is loaded - r = client.get("/profiles/prez:SpacePrezProfile") + r = client_no_override.get("/profiles/prez:SpacePrezProfile") g = Graph().parse(data=r.text) assert (URIRef("https://prez.dev/SpacePrezProfile"), RDF.type, PROF.Profile) in g diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 848969b3..db20de41 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -4,6 +4,7 @@ from prez.services.query_generation.shacl import ( NodeShape, ) +from temp.grammar import Var endpoints_graph = Graph().parse( "prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl", format="turtle" @@ -12,7 +13,12 @@ @pytest.mark.parametrize("nodeshape_uri", ["http://example.org/ns#Collections"]) def test_nodeshape_parsing(nodeshape_uri): - ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph, kind="endpoint") + ns = NodeShape( + uri=URIRef(nodeshape_uri), + graph=endpoints_graph, + kind="endpoint", + focus_node=Var(value="focus_node"), + ) assert ns.targetClasses == [ URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection"), URIRef("http://www.w3.org/2004/02/skos/core#ConceptScheme"), @@ -27,5 +33,10 @@ def test_nodeshape_parsing(nodeshape_uri): ["http://example.org/ns#ConceptSchemeConcept"], ) def test_nodeshape_to_grammar(nodeshape_uri): - ns = NodeShape(uri=URIRef(nodeshape_uri), graph=endpoints_graph, kind="endpoint") + ns = NodeShape( + uri=URIRef(nodeshape_uri), + graph=endpoints_graph, + kind="endpoint", + focus_node=Var(value="focus_node"), + ) ... diff --git a/tests/test_property_selection_shacl.py b/tests/test_property_selection_shacl.py index 2c0c3222..8a2225c3 100644 --- a/tests/test_property_selection_shacl.py +++ b/tests/test_property_selection_shacl.py @@ -30,7 +30,9 @@ def test_simple_path(): """ ) path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) - ps = PropertyShape(uri=path_bn, graph=g) + ps = PropertyShape( + uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") + ) assert ( SimplifiedTriple( subject=Var(value="focus_node"), @@ -51,7 +53,9 @@ def test_sequence_path(): """ ) path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) - ps = PropertyShape(uri=path_bn, graph=g) + ps = PropertyShape( + uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") + ) assert ( SimplifiedTriple( subject=Var(value="focus_node"), @@ -93,7 +97,9 @@ def test_union(): """ ) path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) - ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + ps = PropertyShape( + uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") + ) assert ( SimplifiedTriple( subject=Var(value="focus_node"), @@ -161,7 +167,9 @@ def test_optional_props(): """ ) path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) - ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + ps = PropertyShape( + uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") + ) assert ps.triples_list == [] assert isinstance(ps.gpnt_list[0].content, OptionalGraphPattern) @@ -187,7 +195,9 @@ def test_complex_optional_props(): """ ) path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) - ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + ps = PropertyShape( + uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") + ) assert ps.triples_list == [] assert isinstance(ps.gpnt_list[0].content, OptionalGraphPattern) @@ -214,7 +224,9 @@ def test_excluded_props(): """ ) path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) - ps = PropertyShape(uri=path_bn, graph=g, kind="profile") + ps = PropertyShape( + uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") + ) assert ( SimplifiedTriple( subject=Var(value="prof_node_1"), diff --git a/tests/test_query_construction.py b/tests/test_query_construction.py index 13b34e9f..24fed381 100644 --- a/tests/test_query_construction.py +++ b/tests/test_query_construction.py @@ -4,12 +4,12 @@ from prez.services.query_generation.search import ( SearchQueryRegex, ) -from prez.services.query_generation.umbrella import PrezQueryConstructorV2 +from prez.services.query_generation.umbrella import PrezQueryConstructor from temp.grammar import * def test_basic_object(): - PrezQueryConstructorV2( + PrezQueryConstructor( profile_triples=[ SimplifiedTriple( subject=IRI(value="https://test-object"), @@ -26,7 +26,7 @@ def test_basic_object(): def test_basic_listing(): - test = PrezQueryConstructorV2( + test = PrezQueryConstructor( profile_triples=[ SimplifiedTriple( subject=Var(value="focus_node"), @@ -61,7 +61,7 @@ def test_basic_listing(): def test_search_query_regex(): sq = SearchQueryRegex(term="test", predicates=[RDFS.label]) - test = PrezQueryConstructorV2( + test = PrezQueryConstructor( profile_triples=[ SimplifiedTriple( subject=Var(value="focus_node"), From 20b263020c0496f9fdad7f478089a7a385b094c1 Mon Sep 17 00:00:00 2001 From: jamiefeiss Date: Thu, 4 Apr 2024 17:49:18 +1000 Subject: [PATCH 25/25] Added broader concept to vocprez test data --- test_data/vocprez.ttl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/test_data/vocprez.ttl b/test_data/vocprez.ttl index b379711a..30790353 100755 --- a/test_data/vocprez.ttl +++ b/test_data/vocprez.ttl @@ -30,6 +30,13 @@ ex:SecondLevelConcept a skos:Concept ; skos:inScheme ex:SchemingConceptScheme ; . +ex:SecondLevelConceptBroader a skos:Concept ; + skos:prefLabel "A second level concept broader" ; + ex:property "a property of the first level concept" ; + skos:broader ex:TopLevelConcept ; + skos:inScheme ex:SchemingConceptScheme ; +. + ex:ThirdLevelConcept a skos:Concept ; skos:prefLabel "A third level concept" ; ex:property "a property of the third level concept" ;