diff --git a/Gemfile.lock b/Gemfile.lock index 7590a5e9..dfabd7a1 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -100,8 +100,7 @@ GIT GEM remote: https://rubygems.org/ specs: - activesupport (3.2.22.5) - i18n (~> 0.6, >= 0.6.4) + activesupport (3.1.12) multi_json (~> 1.0) addressable (2.8.5) public_suffix (>= 2.0.2, < 6.0) @@ -164,7 +163,7 @@ GEM ffi (~> 1.0) google-apis-analytics_v3 (0.13.0) google-apis-core (>= 0.11.0, < 2.a) - google-apis-core (0.11.1) + google-apis-core (0.11.2) addressable (~> 2.5, >= 2.5.1) googleauth (>= 0.16.2, < 2.a) httpclient (>= 2.8.1, < 3.a) @@ -187,7 +186,7 @@ GEM http-cookie (1.0.5) domain_name (~> 0.5) httpclient (2.8.3) - i18n (0.9.5) + i18n (1.14.1) concurrent-ruby (~> 1.0) json (2.6.3) json-schema (2.8.1) @@ -405,4 +404,4 @@ DEPENDENCIES webmock BUNDLED WITH - 2.3.23 + 2.4.12 diff --git a/config/solr/property_search/enumsconfig.xml b/config/solr/property_search/enumsconfig.xml new file mode 100644 index 00000000..72e7b7d3 --- /dev/null +++ b/config/solr/property_search/enumsconfig.xml @@ -0,0 +1,12 @@ + + + + ONTOLOGY + VALUE_SET_COLLECTION + + + ANNOTATION + DATATYPE + OBJECT + + \ No newline at end of file diff --git a/config/solr/property_search/mapping-ISOLatin1Accent.txt b/config/solr/property_search/mapping-ISOLatin1Accent.txt new file mode 100644 index 00000000..ede77425 --- /dev/null +++ b/config/solr/property_search/mapping-ISOLatin1Accent.txt @@ -0,0 +1,246 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Syntax: +# "source" => "target" +# "source".length() > 0 (source cannot be empty.) +# "target".length() >= 0 (target can be empty.) + +# example: +# "À" => "A" +# "\u00C0" => "A" +# "\u00C0" => "\u0041" +# "ß" => "ss" +# "\t" => " " +# "\n" => "" + +# À => A +"\u00C0" => "A" + +# Á => A +"\u00C1" => "A" + +#  => A +"\u00C2" => "A" + +# à => A +"\u00C3" => "A" + +# Ä => A +"\u00C4" => "A" + +# Å => A +"\u00C5" => "A" + +# Æ => AE +"\u00C6" => "AE" + +# Ç => C +"\u00C7" => "C" + +# È => E +"\u00C8" => "E" + +# É => E +"\u00C9" => "E" + +# Ê => E +"\u00CA" => "E" + +# Ë => E +"\u00CB" => "E" + +# Ì => I +"\u00CC" => "I" + +# Í => I +"\u00CD" => "I" + +# Î => I +"\u00CE" => "I" + +# Ï => I +"\u00CF" => "I" + +# IJ => IJ +"\u0132" => "IJ" + +# Ð => D +"\u00D0" => "D" + +# Ñ => N +"\u00D1" => "N" + +# Ò => O +"\u00D2" => "O" + +# Ó => O +"\u00D3" => "O" + +# Ô => O +"\u00D4" => "O" + +# Õ => O +"\u00D5" => "O" + +# Ö => O +"\u00D6" => "O" + +# Ø => O +"\u00D8" => "O" + +# Œ => OE +"\u0152" => "OE" + +# Þ +"\u00DE" => "TH" + +# Ù => U +"\u00D9" => "U" + +# Ú => U +"\u00DA" => "U" + +# Û => U +"\u00DB" => "U" + +# Ü => U +"\u00DC" => "U" + +# Ý => Y +"\u00DD" => "Y" + +# Ÿ => Y +"\u0178" => "Y" + +# à => a +"\u00E0" => "a" + +# á => a +"\u00E1" => "a" + +# â => a +"\u00E2" => "a" + +# ã => a +"\u00E3" => "a" + +# ä => a +"\u00E4" => "a" + +# å => a +"\u00E5" => "a" + +# æ => ae +"\u00E6" => "ae" + +# ç => c +"\u00E7" => "c" + +# è => e +"\u00E8" => "e" + +# é => e +"\u00E9" => "e" + +# ê => e +"\u00EA" => "e" + +# ë => e +"\u00EB" => "e" + +# ì => i +"\u00EC" => "i" + +# í => i +"\u00ED" => "i" + +# î => i +"\u00EE" => "i" + +# ï => i +"\u00EF" => "i" + +# ij => ij +"\u0133" => "ij" + +# ð => d +"\u00F0" => "d" + +# ñ => n +"\u00F1" => "n" + +# ò => o +"\u00F2" => "o" + +# ó => o +"\u00F3" => "o" + +# ô => o +"\u00F4" => "o" + +# õ => o +"\u00F5" => "o" + +# ö => o +"\u00F6" => "o" + +# ø => o +"\u00F8" => "o" + +# œ => oe +"\u0153" => "oe" + +# ß => ss +"\u00DF" => "ss" + +# þ => th +"\u00FE" => "th" + +# ù => u +"\u00F9" => "u" + +# ú => u +"\u00FA" => "u" + +# û => u +"\u00FB" => "u" + +# ü => u +"\u00FC" => "u" + +# ý => y +"\u00FD" => "y" + +# ÿ => y +"\u00FF" => "y" + +# ff => ff +"\uFB00" => "ff" + +# fi => fi +"\uFB01" => "fi" + +# fl => fl +"\uFB02" => "fl" + +# ffi => ffi +"\uFB03" => "ffi" + +# ffl => ffl +"\uFB04" => "ffl" + +# ſt => ft +"\uFB05" => "ft" + +# st => st +"\uFB06" => "st" diff --git a/config/solr/property_search/schema.xml b/config/solr/property_search/schema.xml new file mode 100644 index 00000000..20824ea6 --- /dev/null +++ b/config/solr/property_search/schema.xml @@ -0,0 +1,1179 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/solr/property_search/solrconfig.xml b/config/solr/property_search/solrconfig.xml new file mode 100644 index 00000000..771a0f32 --- /dev/null +++ b/config/solr/property_search/solrconfig.xml @@ -0,0 +1,1299 @@ + + + + + + + + + 8.8.2 + + + + + + + + + + + ${solr.data.dir:} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.lock.type:native} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + + ${solr.autoCommit.maxTime:15000} + false + + + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + + + + + + + + + + ${solr.max.booleanClauses:500000} + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + + 20 + + + 200 + + + + + + + + + + + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + explicit + 10 + + + + + + + + + + + + + + + + explicit + json + true + + + + + + _text_ + + + + + + + + + text_general + + + + + + default + _text_ + solr.DirectSolrSpellChecker + + internal + + 0.5 + + 2 + + 1 + + 5 + + 4 + + 0.01 + + + + + + + + + + + + default + on + true + 10 + 5 + 5 + true + true + 10 + 5 + + + spellcheck + + + + + + + + + + true + false + + + terms + + + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + + + + + + + + + + + + + + + ,, + ,, + ,, + ,, + ,]]> + ]]> + + + + + + 10 + .,!? + + + + + + + WORD + + + en + US + + + + + + + + + + + + [^\w-\.] + _ + + + + + + + yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z + yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z + yyyy-MM-dd HH:mm[:ss[.SSS]][z + yyyy-MM-dd HH:mm[:ss[,SSS]][z + [EEE, ]dd MMM yyyy HH:mm[:ss] z + EEEE, dd-MMM-yy HH:mm:ss z + EEE MMM ppd HH:mm:ss [z ]yyyy + + + + + java.lang.String + text_general + + *_str + 256 + + + true + + + java.lang.Boolean + booleans + + + java.util.Date + pdates + + + java.lang.Long + java.lang.Integer + plongs + + + java.lang.Number + pdoubles + + + + + + + + + + + + + + + + + + + + text/plain; charset=UTF-8 + + + + + + + + + + + + + + diff --git a/config/solr/solr.xml b/config/solr/solr.xml new file mode 100644 index 00000000..d9d089e4 --- /dev/null +++ b/config/solr/solr.xml @@ -0,0 +1,60 @@ + + + + + + + + ${solr.max.booleanClauses:500000} + ${solr.sharedLib:} + ${solr.allowPaths:} + + + + ${host:} + ${solr.port.advertise:0} + ${hostContext:solr} + + ${genericCoreNodeNames:true} + + ${zkClientTimeout:30000} + ${distribUpdateSoTimeout:600000} + ${distribUpdateConnTimeout:60000} + ${zkCredentialsProvider:org.apache.solr.common.cloud.DefaultZkCredentialsProvider} + ${zkACLProvider:org.apache.solr.common.cloud.DefaultZkACLProvider} + + + + + ${socketTimeout:600000} + ${connTimeout:60000} + ${solr.shardsWhitelist:} + + + + + diff --git a/config/solr/term_search/enumsconfig.xml b/config/solr/term_search/enumsconfig.xml new file mode 100644 index 00000000..72e7b7d3 --- /dev/null +++ b/config/solr/term_search/enumsconfig.xml @@ -0,0 +1,12 @@ + + + + ONTOLOGY + VALUE_SET_COLLECTION + + + ANNOTATION + DATATYPE + OBJECT + + \ No newline at end of file diff --git a/config/solr/term_search/mapping-ISOLatin1Accent.txt b/config/solr/term_search/mapping-ISOLatin1Accent.txt new file mode 100644 index 00000000..ede77425 --- /dev/null +++ b/config/solr/term_search/mapping-ISOLatin1Accent.txt @@ -0,0 +1,246 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Syntax: +# "source" => "target" +# "source".length() > 0 (source cannot be empty.) +# "target".length() >= 0 (target can be empty.) + +# example: +# "À" => "A" +# "\u00C0" => "A" +# "\u00C0" => "\u0041" +# "ß" => "ss" +# "\t" => " " +# "\n" => "" + +# À => A +"\u00C0" => "A" + +# Á => A +"\u00C1" => "A" + +#  => A +"\u00C2" => "A" + +# à => A +"\u00C3" => "A" + +# Ä => A +"\u00C4" => "A" + +# Å => A +"\u00C5" => "A" + +# Æ => AE +"\u00C6" => "AE" + +# Ç => C +"\u00C7" => "C" + +# È => E +"\u00C8" => "E" + +# É => E +"\u00C9" => "E" + +# Ê => E +"\u00CA" => "E" + +# Ë => E +"\u00CB" => "E" + +# Ì => I +"\u00CC" => "I" + +# Í => I +"\u00CD" => "I" + +# Î => I +"\u00CE" => "I" + +# Ï => I +"\u00CF" => "I" + +# IJ => IJ +"\u0132" => "IJ" + +# Ð => D +"\u00D0" => "D" + +# Ñ => N +"\u00D1" => "N" + +# Ò => O +"\u00D2" => "O" + +# Ó => O +"\u00D3" => "O" + +# Ô => O +"\u00D4" => "O" + +# Õ => O +"\u00D5" => "O" + +# Ö => O +"\u00D6" => "O" + +# Ø => O +"\u00D8" => "O" + +# Œ => OE +"\u0152" => "OE" + +# Þ +"\u00DE" => "TH" + +# Ù => U +"\u00D9" => "U" + +# Ú => U +"\u00DA" => "U" + +# Û => U +"\u00DB" => "U" + +# Ü => U +"\u00DC" => "U" + +# Ý => Y +"\u00DD" => "Y" + +# Ÿ => Y +"\u0178" => "Y" + +# à => a +"\u00E0" => "a" + +# á => a +"\u00E1" => "a" + +# â => a +"\u00E2" => "a" + +# ã => a +"\u00E3" => "a" + +# ä => a +"\u00E4" => "a" + +# å => a +"\u00E5" => "a" + +# æ => ae +"\u00E6" => "ae" + +# ç => c +"\u00E7" => "c" + +# è => e +"\u00E8" => "e" + +# é => e +"\u00E9" => "e" + +# ê => e +"\u00EA" => "e" + +# ë => e +"\u00EB" => "e" + +# ì => i +"\u00EC" => "i" + +# í => i +"\u00ED" => "i" + +# î => i +"\u00EE" => "i" + +# ï => i +"\u00EF" => "i" + +# ij => ij +"\u0133" => "ij" + +# ð => d +"\u00F0" => "d" + +# ñ => n +"\u00F1" => "n" + +# ò => o +"\u00F2" => "o" + +# ó => o +"\u00F3" => "o" + +# ô => o +"\u00F4" => "o" + +# õ => o +"\u00F5" => "o" + +# ö => o +"\u00F6" => "o" + +# ø => o +"\u00F8" => "o" + +# œ => oe +"\u0153" => "oe" + +# ß => ss +"\u00DF" => "ss" + +# þ => th +"\u00FE" => "th" + +# ù => u +"\u00F9" => "u" + +# ú => u +"\u00FA" => "u" + +# û => u +"\u00FB" => "u" + +# ü => u +"\u00FC" => "u" + +# ý => y +"\u00FD" => "y" + +# ÿ => y +"\u00FF" => "y" + +# ff => ff +"\uFB00" => "ff" + +# fi => fi +"\uFB01" => "fi" + +# fl => fl +"\uFB02" => "fl" + +# ffi => ffi +"\uFB03" => "ffi" + +# ffl => ffl +"\uFB04" => "ffl" + +# ſt => ft +"\uFB05" => "ft" + +# st => st +"\uFB06" => "st" diff --git a/config/solr/term_search/schema.xml b/config/solr/term_search/schema.xml new file mode 100644 index 00000000..fa95e127 --- /dev/null +++ b/config/solr/term_search/schema.xml @@ -0,0 +1,1222 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/solr/term_search/solrconfig.xml b/config/solr/term_search/solrconfig.xml new file mode 100644 index 00000000..771a0f32 --- /dev/null +++ b/config/solr/term_search/solrconfig.xml @@ -0,0 +1,1299 @@ + + + + + + + + + 8.8.2 + + + + + + + + + + + ${solr.data.dir:} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.lock.type:native} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + + ${solr.autoCommit.maxTime:15000} + false + + + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + + + + + + + + + + ${solr.max.booleanClauses:500000} + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + + 20 + + + 200 + + + + + + + + + + + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + explicit + 10 + + + + + + + + + + + + + + + + explicit + json + true + + + + + + _text_ + + + + + + + + + text_general + + + + + + default + _text_ + solr.DirectSolrSpellChecker + + internal + + 0.5 + + 2 + + 1 + + 5 + + 4 + + 0.01 + + + + + + + + + + + + default + on + true + 10 + 5 + 5 + true + true + 10 + 5 + + + spellcheck + + + + + + + + + + true + false + + + terms + + + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + + + + + + + + + + + + + + + ,, + ,, + ,, + ,, + ,]]> + ]]> + + + + + + 10 + .,!? + + + + + + + WORD + + + en + US + + + + + + + + + + + + [^\w-\.] + _ + + + + + + + yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z + yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z + yyyy-MM-dd HH:mm[:ss[.SSS]][z + yyyy-MM-dd HH:mm[:ss[,SSS]][z + [EEE, ]dd MMM yyyy HH:mm[:ss] z + EEEE, dd-MMM-yy HH:mm:ss z + EEE MMM ppd HH:mm:ss [z ]yyyy + + + + + java.lang.String + text_general + + *_str + 256 + + + true + + + java.lang.Boolean + booleans + + + java.util.Date + pdates + + + java.lang.Long + java.lang.Integer + plongs + + + java.lang.Number + pdoubles + + + + + + + + + + + + + + + + + + + + text/plain; charset=UTF-8 + + + + + + + + + + + + + + diff --git a/controllers/admin_controller.rb b/controllers/admin_controller.rb index 7ae6d800..747def93 100644 --- a/controllers/admin_controller.rb +++ b/controllers/admin_controller.rb @@ -68,7 +68,7 @@ class AdminController < ApplicationController latest = ont.latest_submission(status: :any) error 404, "Ontology #{params["acronym"]} contains no submissions" if latest.nil? check_last_modified(latest) - latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) + latest.bring(*submission_include_params) NcboCron::Models::OntologySubmissionParser.new.queue_submission(latest, actions) halt 204 end @@ -84,7 +84,7 @@ class AdminController < ApplicationController latest = ont.latest_submission(status: :any) end check_last_modified(latest) if latest - latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) if latest + latest.bring(*submission_include_params) if latest reply(latest || {}) end diff --git a/controllers/ontologies_controller.rb b/controllers/ontologies_controller.rb index 99c0ce68..58518420 100644 --- a/controllers/ontologies_controller.rb +++ b/controllers/ontologies_controller.rb @@ -38,22 +38,12 @@ class OntologiesController < ApplicationController else latest = ont.latest_submission(status: :any) end - check_last_modified(latest) if latest - # When asking to display all metadata, we are using bring_remaining which is more performant than including all metadata (remove this when the query to get metadata will be fixed) - if latest - if includes_param.first == :all - # Bring what we need to display all attr of the submission - latest.bring_remaining - latest.bring(*submission_attributes_all) - else - includes = OntologySubmission.goo_attrs_to_load(includes_param) - - includes << {:contact=>[:name, :email]} if includes.find{|v| v.is_a?(Hash) && v.keys.first.eql?(:contact)} - latest.bring(*includes) - end + if latest + check_last_modified(latest) + latest.bring(*submission_include_params) end - #remove the whole previous if block and replace by it: latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) if latest + reply(latest || {}) end @@ -63,7 +53,7 @@ class OntologiesController < ApplicationController patch '/:acronym/latest_submission' do ont = Ontology.find(params["acronym"]).first error 422, "You must provide an existing `acronym` to patch" if ont.nil? - + submission = ont.latest_submission(status: :any) submission.bring(*OntologySubmission.attributes) diff --git a/controllers/ontology_submissions_controller.rb b/controllers/ontology_submissions_controller.rb index 7de7a3dc..0068a5f1 100644 --- a/controllers/ontology_submissions_controller.rb +++ b/controllers/ontology_submissions_controller.rb @@ -29,17 +29,14 @@ class OntologySubmissionsController < ApplicationController error 422, "Ontology #{params["acronym"]} does not exist" unless ont check_last_modified_segment(LinkedData::Models::OntologySubmission, [ont.acronym]) check_access(ont) - if includes_param.first == :all - # When asking to display all metadata, we are using bring_remaining which is more performant than including all metadata (remove this when the query to get metadata will be fixed) - ont.bring(submission_attributes_all) - - ont.submissions.each do |sub| - sub.bring_remaining - end - else - ont.bring(submissions: OntologySubmission.goo_attrs_to_load(includes_param)) - end - reply ont.submissions.sort {|a,b| b.submissionId.to_i <=> a.submissionId.to_i } # descending order of submissionId + options = { + also_include_views: true, + status: (params["include_status"] || "ANY"), + ontology: params["acronym"] + } + subs = retrieve_submissions(options) + + reply subs.sort {|a,b| b.submissionId.to_i <=> a.submissionId.to_i } # descending order of submissionId end ## @@ -58,7 +55,7 @@ class OntologySubmissionsController < ApplicationController ont.bring(:submissions) ont_submission = ont.submission(params["ontology_submission_id"]) error 404, "`submissionId` not found" if ont_submission.nil? - ont_submission.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) + ont_submission.bring(*submission_include_params) reply ont_submission end diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb index 5d6d1b0a..172170fa 100644 --- a/helpers/application_helper.rb +++ b/helpers/application_helper.rb @@ -31,6 +31,7 @@ def populate_from_params(obj, params) # Deal with empty strings for String and URI empty_string = value.is_a?(String) && value.empty? old_string_value_exists = obj.respond_to?(attribute) && (obj.send(attribute).is_a?(String) || obj.send(attribute).is_a?(RDF::URI)) + old_string_value_exists = old_string_value_exists || (obj.respond_to?(attribute) && obj.send(attribute).is_a?(LinkedData::Models::Base)) if old_string_value_exists && empty_string value = nil elsif empty_string @@ -63,7 +64,7 @@ def populate_from_params(obj, params) # Replace the initial value with the object, handling Arrays as appropriate if value.is_a?(Array) value = value.map {|e| attr_cls.find(uri_as_needed(e)).include(attr_cls.attributes).first} - else + elsif !value.nil? value = attr_cls.find(uri_as_needed(value)).include(attr_cls.attributes).first end elsif attr_cls @@ -363,8 +364,6 @@ def retrieve_latest_submissions(options = {}) latest_submissions = page? ? submissions : {} # latest_submission doest not work with pagination submissions.each do |sub| - # To retrieve all metadata, but slow when a lot of ontologies - sub.bring_remaining if includes_param.first == :all unless page? next if include_ready?(options) && !sub.ready? next if sub.ontology.nil? diff --git a/helpers/request_params_helper.rb b/helpers/request_params_helper.rb index 45091042..842ee0a7 100644 --- a/helpers/request_params_helper.rb +++ b/helpers/request_params_helper.rb @@ -42,7 +42,6 @@ def apply_submission_filters(query) hasOntologyLanguage_acronym: params[:hasOntologyLanguage]&.split(',') , #%w[OWL SKOS], ontology_hasDomain_acronym: params[:hasDomain]&.split(',') , #%w[Crop Vue_francais], ontology_group_acronym: params[:group]&.split(','), #%w[RICE CROP], - ontology_name: Array(params[:name]) + Array(params[:name]&.capitalize), isOfType: params[:isOfType]&.split(','), #["http://omv.ontoware.org/2005/05/ontology#Vocabulary"], hasFormalityLevel: params[:hasFormalityLevel]&.split(','), #["http://w3id.org/nkos/nkostype#thesaurus"], ontology_viewingRestriction: params[:viewingRestriction]&.split(','), #["private"] diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb index 5d37d884..071000d9 100644 --- a/helpers/search_helper.rb +++ b/helpers/search_helper.rb @@ -82,6 +82,9 @@ def get_term_search_query(text, params={}) end end + lang = params["lang"] || params["language"] + lang_suffix = lang && !lang.eql?("all") ? "_#{lang}" : "" + query = "" params["defType"] = "edismax" params["stopwords"] = "true" @@ -98,15 +101,15 @@ def get_term_search_query(text, params={}) if params[EXACT_MATCH_PARAM] == "true" query = "\"#{solr_escape(text)}\"" - params["qf"] = "resource_id^20 prefLabelExact^10 synonymExact #{QUERYLESS_FIELDS_STR}" - params["hl.fl"] = "resource_id prefLabelExact synonymExact #{QUERYLESS_FIELDS_STR}" + params["qf"] = "resource_id^20 prefLabelExact#{lang_suffix }^10 synonymExact#{lang_suffix } #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix } synonymExact#{lang_suffix } #{QUERYLESS_FIELDS_STR}" elsif params[SUGGEST_PARAM] == "true" || text[-1] == '*' text.gsub!(/\*+$/, '') query = "\"#{solr_escape(text)}\"" params["qt"] = "/suggest_ncbo" - params["qf"] = "prefLabelExact^100 prefLabelSuggestEdge^50 synonymSuggestEdge^10 prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" + params["qf"] = "prefLabelExact#{lang_suffix }^100 prefLabelSuggestEdge^50 synonymSuggestEdge^10 prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" params["pf"] = "prefLabelSuggest^50" - params["hl.fl"] = "prefLabelExact prefLabelSuggestEdge synonymSuggestEdge prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "prefLabelExact#{lang_suffix } prefLabelSuggestEdge synonymSuggestEdge prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" else if text.strip.empty? query = '*' @@ -114,9 +117,9 @@ def get_term_search_query(text, params={}) query = solr_escape(text) end - params["qf"] = "resource_id^100 prefLabelExact^90 prefLabel^70 synonymExact^50 synonym^10 #{QUERYLESS_FIELDS_STR}" + params["qf"] = "resource_id^100 prefLabelExact#{lang_suffix }^90 prefLabel#{lang_suffix }^70 synonymExact#{lang_suffix }^50 synonym#{lang_suffix }^10 #{QUERYLESS_FIELDS_STR}" params["qf"] << " property" if params[INCLUDE_PROPERTIES_PARAM] == "true" - params["hl.fl"] = "resource_id prefLabelExact prefLabel synonymExact synonym #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix } prefLabel#{lang_suffix } synonymExact#{lang_suffix } synonym#{lang_suffix } #{QUERYLESS_FIELDS_STR}" params["hl.fl"] = "#{params["hl.fl"]} property" if params[INCLUDE_PROPERTIES_PARAM] == "true" end diff --git a/helpers/submission_helper.rb b/helpers/submission_helper.rb index c1ee5dd3..b79737d0 100644 --- a/helpers/submission_helper.rb +++ b/helpers/submission_helper.rb @@ -3,6 +3,26 @@ module Sinatra module Helpers module SubmissionHelper + def submission_include_params + # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs + includes = OntologySubmission.goo_attrs_to_load(includes_param) + if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:ontology)} + includes << {:ontology=>[:administeredBy, :acronym, :name, :viewingRestriction, :group, :hasDomain,:notes, :reviews, :projects,:acl, :viewOf]} + end + + if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:contact)} + includes << {:contact=>[:name, :email]} + end + + if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:metrics)} + includes << { metrics: [:maxChildCount, :properties, :classesWithMoreThan25Children, + :classesWithOneChild, :individuals, :maxDepth, :classes, + :classesWithNoDefinition, :averageChildCount, :numberOfAxioms, + :entities]} + end + + includes + end def submission_attributes_all out = [LinkedData::Models::OntologySubmission.embed_values_hash] @@ -16,14 +36,17 @@ def submission_attributes_all def retrieve_submissions(options) status = (options[:status] || "RDF").to_s.upcase status = "RDF" if status.eql?("READY") + ontology_acronym = options[:ontology] any = status.eql?("ANY") include_views = options[:also_include_views] || false includes, page, size, order_by, _ = settings_params(LinkedData::Models::OntologySubmission) includes << :submissionStatus unless includes.include?(:submissionStatus) submissions_query = LinkedData::Models::OntologySubmission + submissions_query = submissions_query.where(ontology: [acronym: ontology_acronym]) if ontology_acronym + if any - submissions_query = submissions_query.where + submissions_query = submissions_query.where unless ontology_acronym else submissions_query = submissions_query.where({ submissionStatus: [code: status] }) end @@ -32,24 +55,8 @@ def retrieve_submissions(options) submissions_query = submissions_query.filter(Goo::Filter.new(ontology: [:viewOf]).unbound) unless include_views submissions_query = submissions_query.filter(filter) if filter? - # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs - if includes_param.first == :all - includes = [:submissionId, { :contact => [:name, :email], - :ontology => [:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl, - :group, :hasDomain, :views, :viewOf, :flat, :notes, :reviews, :projects], - :submissionStatus => [:code], :hasOntologyLanguage => [:acronym], :metrics => [:classes, :individuals, :properties] }, - :submissionStatus] - else - if includes.find { |v| v.is_a?(Hash) && v.keys.include?(:ontology) } - includes << { :ontology => [:administeredBy, :acronym, :name, :viewingRestriction, :group, :hasDomain, :notes, :reviews, :projects, :acl, :viewOf] } - end - - if includes.find { |v| v.is_a?(Hash) && v.keys.include?(:contact) } - includes << { :contact => [:name, :email] } - end - end - submissions = submissions_query.include(includes) + submissions = submissions_query.include(submission_include_params) if page? submissions.page(page, size).all else diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb index dc79359b..34f8c4dc 100644 --- a/test/controllers/test_ontologies_controller.rb +++ b/test/controllers/test_ontologies_controller.rb @@ -254,6 +254,32 @@ def test_download_acl_only end + def test_detach_a_view + view = Ontology.find(@@view_acronym).include(:viewOf).first + ont = view.viewOf + refute_nil view + refute_nil ont + + remove_view_of = {viewOf: ''} + patch "/ontologies/#{@@view_acronym}", MultiJson.dump(remove_view_of), "CONTENT_TYPE" => "application/json" + + assert last_response.status == 204 + + get "/ontologies/#{@@view_acronym}" + onto = MultiJson.load(last_response.body) + assert_nil onto["viewOf"] + + + add_view_of = {viewOf: @@acronym} + patch "/ontologies/#{@@view_acronym}", MultiJson.dump(add_view_of), "CONTENT_TYPE" => "application/json" + + assert last_response.status == 204 + + get "/ontologies/#{@@view_acronym}" + onto = MultiJson.load(last_response.body) + assert_equal onto["viewOf"], ont.id.to_s + end + private def check400(response) diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb index 9ee81257..58359451 100644 --- a/test/controllers/test_ontology_submissions_controller.rb +++ b/test/controllers/test_ontology_submissions_controller.rb @@ -39,6 +39,12 @@ def self._create_onts ont.save end + def setup + delete_ontologies_and_submissions + ont = Ontology.new(acronym: @@acronym, name: @@name, administeredBy: [@@user]) + ont.save + end + def test_submissions_for_given_ontology num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1) ontology = created_ont_acronyms.first @@ -196,7 +202,7 @@ def test_download_acl_only end def test_submissions_pagination - num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 2, submission_count: 2) + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 2, submission_count: 2) get "/submissions" assert last_response.ok? @@ -210,4 +216,247 @@ def test_submissions_pagination submissions = MultiJson.load(last_response.body) assert_equal 1, submissions["collection"].length end + + def test_submissions_pagination_filter + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 10, submission_count: 1) + group1 = LinkedData::Models::Group.new(acronym: 'group-1', name: "Test Group 1").save + group2 = LinkedData::Models::Group.new(acronym: 'group-2', name: "Test Group 2").save + category1 = LinkedData::Models::Category.new(acronym: 'category-1', name: "Test Category 1").save + category2 = LinkedData::Models::Category.new(acronym: 'category-2', name: "Test Category 2").save + + ontologies1 = ontologies[0..5].each do |o| + o.bring_remaining + o.group = [group1] + o.hasDomain = [category1] + o.save + end + + ontologies2 = ontologies[6..8].each do |o| + o.bring_remaining + o.group = [group2] + o.hasDomain = [category2] + o.save + end + + + + # test filter by group and category + get "/submissions?page=1&pagesize=100&group=#{group1.acronym}" + assert last_response.ok? + assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length + get "/submissions?page=1&pagesize=100&group=#{group2.acronym}" + assert last_response.ok? + assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length + get "/submissions?page=1&pagesize=100&hasDomain=#{category1.acronym}" + assert last_response.ok? + assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length + get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}" + assert last_response.ok? + assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length + get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group1.acronym}" + assert last_response.ok? + assert_equal 0, MultiJson.load(last_response.body)["collection"].length + get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group2.acronym}" + assert last_response.ok? + assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length + + ontologies3 = ontologies[9] + ontologies3.bring_remaining + ontologies3.group = [group1, group2] + ontologies3.hasDomain = [category1, category2] + ontologies3.name = "name search test" + ontologies3.save + + # test search with acronym + [ + [ 1, ontologies.first.acronym], + [ 1, ontologies.last.acronym], + [ontologies.size, 'TEST-ONT'] + ].each do |count, acronym_search| + get "/submissions?page=1&pagesize=100&acronym=#{acronym_search}" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal count, submissions["collection"].length + end + + + # test search with name + [ + [ 1, ontologies.first.name], + [ 1, ontologies.last.name], + [ontologies.size - 1, 'TEST-ONT'] + ].each do |count, name_search| + get "/submissions?page=1&pagesize=100&name=#{name_search}" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + binding.pry unless submissions["collection"].length.eql?(count) + assert_equal count, submissions["collection"].length + end + + # test search with name and acronym + # search by name + get "/submissions?page=1&pagesize=100&name=search&acronym=search" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions["collection"].length + # search by acronym + get "/submissions?page=1&pagesize=100&name=9&acronym=9" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions["collection"].length + # search by acronym or name + get "/submissions?page=1&pagesize=100&name=search&acronym=8" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 2, submissions["collection"].length + + ontologies.first.name = "sort by test" + ontologies.first.save + sub = ontologies.first.latest_submission(status: :any).bring_remaining + sub.creationDate = DateTime.yesterday.to_datetime + sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first + sub.save + + #test search with sort + get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=ontology_name" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + refute_empty submissions["collection"] + assert_equal ontologies.map{|x| x.name}.sort, submissions["collection"].map{|x| x["ontology"]["name"]} + + get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=creationDate" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + refute_empty submissions["collection"] + assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate}.sort.map(&:to_s), submissions["collection"].map{|x| x["creationDate"]}.reverse + + # test search with format + get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=SKOS" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + refute_empty submissions["collection"] + assert_equal 1, submissions["collection"].size + + get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=OWL" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + refute_empty submissions["collection"] + assert_equal ontologies.size-1 , submissions["collection"].size + + # test ontology filter with submission filter attributes + get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&group=group-2&category=category-2&hasOntologyLanguage=OWL" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + refute_empty submissions["collection"] + assert_equal ontologies2.size + 1 , submissions["collection"].size + end + + def test_submissions_default_includes + ontology_count = 5 + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + + submission_default_attributes = LinkedData::Models::OntologySubmission.hypermedia_settings[:serialize_default].map(&:to_s) + + get("/submissions?display_links=false&display_context=false&include_status=ANY") + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + + assert_equal ontology_count, submissions.size + assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) }) + + get("/ontologies/#{created_ont_acronyms.first}/submissions?display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions.size + assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) }) + end + + def test_submissions_all_includes + ontology_count = 5 + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + def submission_all_attributes + attrs = OntologySubmission.goo_attrs_to_load([:all]) + embed_attrs = attrs.select { |x| x.is_a?(Hash) }.first + + attrs.delete_if { |x| x.is_a?(Hash) }.map(&:to_s) + embed_attrs.keys.map(&:to_s) + end + get("/submissions?include=all&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal ontology_count, submissions.size + + assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/submissions?include=all&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions.size + + assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=all&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + + assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort)) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + + get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=all&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + + assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort)) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + end + + def test_submissions_custom_includes + ontology_count = 5 + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + include = 'ontology,contact,submissionId' + + get("/submissions?include=#{include}&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal ontology_count, submissions.size + assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/submissions?include=#{include}&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions.size + assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=#{include}&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + assert(include.split(',').eql?(submission_keys(sub))) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + + get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=#{include}&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + assert(include.split(',').eql?(submission_keys(sub))) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + end + + def test_submissions_param_include + skip('only for local development regrouping a set of tests') + test_submissions_default_includes + test_submissions_all_includes + test_submissions_custom_includes + end + + private + def submission_keys(sub) + sub.to_hash.keys - %w[@id @type id] + end end diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb index 21a3dd18..74be75d2 100644 --- a/test/controllers/test_search_controller.rb +++ b/test/controllers/test_search_controller.rb @@ -213,4 +213,48 @@ def test_search_provisional_class assert_equal @@test_pc_child.label, provisional[0]["prefLabel"].first end + def test_multilingual_search + get "/search?q=Activity&ontologies=BROSEARCHTEST-0" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + + doc = res["collection"].select{|doc| doc["@id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + refute_nil doc + + #res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BROSEARCHTEST-0", :start => 0, :rows => 80}, :main) + #refute_equal 0, res["response"]["numFound"] + #refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true" + res = MultiJson.load(last_response.body) + assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true" + res = MultiJson.load(last_response.body) + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=Activity&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true" + res = MultiJson.load(last_response.body) + assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true" + res = MultiJson.load(last_response.body) + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + + end + end diff --git a/test/data/ontology_files/BRO_v3.2.owl b/test/data/ontology_files/BRO_v3.2.owl index d64075cc..b2aeccf5 100644 --- a/test/data/ontology_files/BRO_v3.2.owl +++ b/test/data/ontology_files/BRO_v3.2.owl @@ -631,6 +631,9 @@ Activity + Activity + ActivityEnglish + Activité Activity of interest that may be related to a BRO:Resource. activities diff --git a/test/solr/docker-compose.yml b/test/solr/docker-compose.yml new file mode 100644 index 00000000..3ddae69c --- /dev/null +++ b/test/solr/docker-compose.yml @@ -0,0 +1,13 @@ +version: '3.8' + +services: + op_solr: + image: solr:8.8 + volumes: + - ./solr_configsets:/configsets:ro + ports: + - "8983:8983" + command: > + bash -c "precreate-core term_search_core1 /configsets/term_search + && precreate-core prop_search_core1 /configsets/property_search + && solr-foreground" diff --git a/test/solr/generate_ncbo_configsets.sh b/test/solr/generate_ncbo_configsets.sh index 893f7f3a..7b4281f7 100755 --- a/test/solr/generate_ncbo_configsets.sh +++ b/test/solr/generate_ncbo_configsets.sh @@ -2,18 +2,23 @@ # generates solr configsets by merging _default configset with config files in config/solr # _default is copied from sorl distribuion solr-8.10.1/server/solr/configsets/_default/ -pushd solr/configsets -ld_config='../../../../ontologies_linked_data/config/solr/' -#ld_config='../../../../config/solr/' -ls -l $ld_config -pwd -[ -d property_search ] && rm -Rf property_search -[ -d term_search ] && rm -Rf property_search -[ -d $ld_config/property_search ] || echo "cant find ontologies_linked_data project" -mkdir -p property_search/conf -mkdir -p term_search/conf -cp -a _default/conf/* property_search/conf/ -cp -a _default/conf/* term_search/conf/ -cp -a $ld_config/property_search/* property_search/conf -cp -a $ld_config/term_search/* term_search/conf -popd +#cd solr/configsets +ld_config='config/solr' +configsets='test/solr/configsets' +[ -d ${configsets}/property_search ] && rm -Rf ${configsets}/property_search +[ -d ${configsets}/term_search ] && rm -Rf ${configsets}/term_search +if [[ ! -d ${ld_config}/property_search ]]; then + echo 'cant find ld solr config sets' + exit 1 +fi +if [[ ! -d ${configsets}/_default/conf ]]; then + echo 'cant find default solr configset' + exit 1 +fi +mkdir -p ${configsets}/property_search/conf +mkdir -p ${configsets}/term_search/conf +cp -a ${configsets}/_default/conf/* ${configsets}/property_search/conf/ +cp -a ${configsets}/_default/conf/* ${configsets}/term_search/conf/ +cp -a $ld_config/property_search/* ${configsets}/property_search/conf +cp -a $ld_config/term_search/* ${configsets}/term_search/conf +