From 452c5e27478168cbb6b51251729f41027bc860df Mon Sep 17 00:00:00 2001 From: Bilel Kihal <61744974+Bilelkihal@users.noreply.github.com> Date: Tue, 21 May 2024 15:23:10 +0200 Subject: [PATCH 01/13] Feature: mappings statistics slices support (#78) * restrict mapping statistics ontologies to the ontologies of the current slice * add a test for the mappings slices support * add test for mappings statistics slices support --- Gemfile | 4 ++-- Gemfile.lock | 53 +++++++++++++++++++++++++++------------------------- 2 files changed, 30 insertions(+), 27 deletions(-) diff --git a/Gemfile b/Gemfile index bd445a1e..adaeb43a 100644 --- a/Gemfile +++ b/Gemfile @@ -6,7 +6,7 @@ gem 'bigdecimal', '1.4.2' gem 'faraday', '~> 1.9' gem 'json-schema', '~> 2.0' gem 'multi_json', '~> 1.0' -gem 'oj', '~> 2.0' +gem 'oj' gem 'parseconfig' gem 'rack' gem 'rake', '~> 10.0' @@ -77,4 +77,4 @@ group :test do gem 'simplecov', require: false gem 'simplecov-cobertura' # for codecov.io gem 'webmock', '~> 3.19.1' -end \ No newline at end of file +end diff --git a/Gemfile.lock b/Gemfile.lock index dc2272a6..023fd1b4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -40,7 +40,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ncbo_cron.git - revision: 5a6af32adc867ff0741d81b7a7162c3f34f45ade + revision: 6bb53a13f514a60513afe25e37c5c69475140452 branch: master specs: ncbo_cron (0.0.1) @@ -57,7 +57,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git - revision: 0855adfce5365c6ef7a494e3d3ad9af2611974d9 + revision: a5b56a68e6dc8ecfc9db708d44350342dac38ce6 branch: development specs: ontologies_linked_data (0.0.1) @@ -106,8 +106,8 @@ GEM activesupport (3.2.22.5) i18n (~> 0.6, >= 0.6.4) multi_json (~> 1.0) - addressable (2.8.6) - public_suffix (>= 2.0.2, < 6.0) + addressable (2.8.7) + public_suffix (>= 2.0.2, < 7.0) airbrussh (1.5.2) sshkit (>= 1.6.1, != 1.7.0) backports (3.25.0) @@ -116,8 +116,8 @@ GEM bcrypt_pbkdf (1.1.1) bcrypt_pbkdf (1.1.1-x86_64-darwin) bigdecimal (1.4.2) - builder (3.2.4) - capistrano (3.18.1) + builder (3.3.0) + capistrano (3.19.0) airbrussh (>= 1.0.0) i18n rake (>= 10.0.0) @@ -130,7 +130,7 @@ GEM capistrano (~> 3.1) sshkit (~> 1.3) coderay (1.1.3) - concurrent-ruby (1.2.3) + concurrent-ruby (1.3.3) connection_pool (2.4.1) crack (1.0.0) bigdecimal @@ -198,14 +198,15 @@ GEM google-cloud-env (2.1.1) faraday (>= 1.0, < 3.a) google-cloud-errors (1.4.0) + google-protobuf (3.25.3) google-protobuf (3.25.3-x86_64-darwin) google-protobuf (3.25.3-x86_64-linux) - googleapis-common-protos (1.5.0) - google-protobuf (~> 3.18) + googleapis-common-protos (1.6.0) + google-protobuf (>= 3.18, < 5.a) googleapis-common-protos-types (~> 1.7) grpc (~> 1.41) - googleapis-common-protos-types (1.14.0) - google-protobuf (~> 3.18) + googleapis-common-protos-types (1.15.0) + google-protobuf (>= 3.18, < 5.a) googleauth (1.11.0) faraday (>= 1.0, < 3.a) google-cloud-env (~> 2.1) @@ -213,6 +214,9 @@ GEM multi_json (~> 1.11) os (>= 0.9, < 2.0) signet (>= 0.16, < 2.a) + grpc (1.64.0) + google-protobuf (~> 3.25) + googleapis-common-protos-types (~> 1.0) grpc (1.64.0-x86_64-darwin) google-protobuf (~> 3.25) googleapis-common-protos-types (~> 1.0) @@ -225,7 +229,7 @@ GEM hashdiff (1.1.0) htmlentities (4.3.4) http-accept (1.7.0) - http-cookie (1.0.5) + http-cookie (1.0.6) domain_name (~> 0.5) httpclient (2.8.3) i18n (0.9.5) @@ -236,7 +240,7 @@ GEM rdf (>= 2.2.8, < 4.0) json-schema (2.8.1) addressable (>= 2.4) - jwt (2.8.1) + jwt (2.8.2) base64 kgio (2.11.4) libxml-ruby (5.0.3) @@ -252,7 +256,7 @@ GEM method_source (1.1.0) mime-types (3.5.2) mime-types-data (~> 3.2015) - mime-types-data (3.2024.0507) + mime-types-data (3.2024.0604) mini_mime (1.1.5) minitest (4.7.5) minitest-stub_any_instance (1.0.3) @@ -260,10 +264,9 @@ GEM redis multi_json (1.15.0) multipart-post (2.4.1) - mutex_m (0.2.0) net-http-persistent (4.0.2) connection_pool (~> 2.2) - net-imap (0.4.11) + net-imap (0.4.14) date net-protocol net-pop (0.1.2) @@ -278,19 +281,19 @@ GEM net-protocol net-ssh (7.2.3) netrc (0.11.0) - newrelic_rpm (9.9.0) - oj (2.18.5) + newrelic_rpm (9.11.0) + oj (3.16.1) omni_logger (0.1.4) logger os (1.1.4) - parallel (1.24.0) + parallel (1.25.1) parseconfig (1.1.2) pony (1.13.1) mail (>= 2.0) pry (0.14.2) coderay (~> 1.1) method_source (~> 1.0) - public_suffix (5.0.5) + public_suffix (5.1.1) rack (1.6.13) rack-accept (0.4.5) rack (>= 0.4) @@ -346,8 +349,8 @@ GEM mime-types (>= 1.16, < 4.0) netrc (~> 0.8) retriable (3.1.2) - rexml (3.2.8) - strscan (>= 3.0.9) + rexml (3.3.1) + strscan rsolr (2.6.0) builder (>= 2.1.2) faraday (>= 0.9, < 3, != 2.0.0) @@ -383,9 +386,8 @@ GEM rack-test sinatra (~> 1.4.0) tilt (>= 1.3, < 3) - sshkit (1.22.2) + sshkit (1.23.0) base64 - mutex_m net-scp (>= 1.1.2) net-sftp (>= 2.1.2) net-ssh (>= 2.8.0) @@ -412,6 +414,7 @@ GEM hashdiff (>= 0.4.0, < 2.0.0) PLATFORMS + ruby x86_64-darwin-23 x86_64-linux @@ -438,7 +441,7 @@ DEPENDENCIES ncbo_cron! ncbo_ontology_recommender! newrelic_rpm - oj (~> 2.0) + oj ontologies_linked_data! parallel parseconfig From c2ac9e074db2a0a486bba1d9e5337217531f0e3f Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Thu, 27 Jun 2024 01:00:01 +0200 Subject: [PATCH 02/13] update owl wrapper version to v1.4.3 --- test/controllers/test_properties_controller.rb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/controllers/test_properties_controller.rb b/test/controllers/test_properties_controller.rb index 38f8708f..96879083 100644 --- a/test/controllers/test_properties_controller.rb +++ b/test/controllers/test_properties_controller.rb @@ -35,12 +35,12 @@ def test_properties get "/ontologies/#{@@acronyms.first}/properties" assert last_response.ok? results = MultiJson.load(last_response.body) - assert_includes [85, 56], results.length # depending if owlapi imports SKOS + assert_equal 81, results.length get "/ontologies/#{@@acronyms.last}/properties" assert last_response.ok? results = MultiJson.load(last_response.body) - assert_includes [35] , results.length # depending if owlapi imports SKOS + assert_equal 35, results.length end def test_single_property @@ -59,7 +59,7 @@ def test_property_roots get "/ontologies/#{@@acronyms.first}/properties/roots" assert last_response.ok? pr = MultiJson.load(last_response.body) - assert_includes [62, 52], pr.length #depending if owlapi import SKOS + assert_equal 58, pr.length # count object properties opr = pr.select { |p| p["@type"] == "http://www.w3.org/2002/07/owl#ObjectProperty" } @@ -108,7 +108,7 @@ def test_property_tree get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23topConceptOf/tree" assert last_response.ok? pr = MultiJson.load(last_response.body) - assert_equal 62, pr.length + assert_equal 58, pr.length num_found = 0 pr.each do |p| From 0bb7916536813b051ab69db2adf59d71277ecd21 Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Thu, 27 Jun 2024 03:03:41 +0200 Subject: [PATCH 03/13] update test search multilingual test to ensure selecting one prefLabel --- test/controllers/test_search_controller.rb | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb index 7549ca3e..0020b15e 100644 --- a/test/controllers/test_search_controller.rb +++ b/test/controllers/test_search_controller.rb @@ -222,27 +222,33 @@ def test_search_provisional_class def test_multilingual_search get "/search?q=Activity&ontologies=BROSEARCHTEST-0" - res = MultiJson.load(last_response.body) + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] doc = res["collection"].select{|doc| doc["@id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first refute_nil doc + assert_equal "ActivityEnglish", doc["prefLabel"] res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BROSEARCHTEST-0", :start => 0, :rows => 80}) refute_equal 0, res["response"]["numFound"] refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr" res = MultiJson.load(last_response.body) refute_equal 0, res["totalCount"] - refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first - + doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + refute_nil doc + assert_equal "Activité", doc["prefLabel"] get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en" res = MultiJson.load(last_response.body) refute_equal 0, res["totalCount"] - refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + refute_nil doc + assert_equal "ActivityEnglish", doc["prefLabel"] get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true" From 147e44bb2665202d8a7a252561132c6e47ecba79 Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Thu, 27 Jun 2024 03:04:22 +0200 Subject: [PATCH 04/13] add filter search results attributes by language --- controllers/search_controller.rb | 3 +++ helpers/search_helper.rb | 31 +++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/controllers/search_controller.rb b/controllers/search_controller.rb index 9f701714..cf2d76c6 100644 --- a/controllers/search_controller.rb +++ b/controllers/search_controller.rb @@ -230,6 +230,9 @@ def process_search(params = nil) doc[:submission] = submission doc[:ontology_rank] = (ontology_rank[doc[:submissionAcronym]] && !ontology_rank[doc[:submissionAcronym]].empty?) ? ontology_rank[doc[:submissionAcronym]][:normalizedScore] : 0.0 doc[:properties] = MultiJson.load(doc.delete(:propertyRaw)) if include_param_contains?(:properties) + + doc = filter_attrs_by_language(doc) + instance = doc[:provisional] ? LinkedData::Models::ProvisionalClass.read_only(doc) : LinkedData::Models::Class.read_only(doc) docs.push(instance) end diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb index 1c8dc431..06e6a78f 100644 --- a/helpers/search_helper.rb +++ b/helpers/search_helper.rb @@ -221,6 +221,37 @@ def add_matched_fields(solr_response, default_match) solr_response["match_types"] = all_matches end + def portal_language + Goo.main_languages.first + end + + def request_language + params['lang'] || params['languages'] || portal_language + end + + + def filter_attrs_by_language(doc) + lang_values = {} + doc.each do |k, v| + attr, lang = k.to_s.split('_') + + next unless lang + + if lang.eql?('none') || request_language.eql?(lang) + lang_values[attr.to_sym] ||= [] + lang_values[attr.to_sym] = lang.eql?('none') ? lang_values[attr.to_sym] + v : v + lang_values[attr.to_sym] + end + end + + lang_values.each do |k, v| + doc[k] = v unless v.empty? + end + + doc[:prefLabel] = doc["prefLabel_#{request_language}".to_sym]&.first || doc[:prefLabel]&.first + doc + end + + # see https://github.com/rsolr/rsolr/issues/101 # and https://github.com/projecthydra/active_fedora/commit/75b4afb248ee61d9edb56911b2ef51f30f1ce17f # From 955817af43f3f342ff7ef01d36bb18eae4cd521b Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Thu, 27 Jun 2024 05:08:17 +0200 Subject: [PATCH 05/13] add search multiple languages or all languages tests --- test/controllers/test_search_controller.rb | 28 +++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb index 0020b15e..459df9aa 100644 --- a/test/controllers/test_search_controller.rb +++ b/test/controllers/test_search_controller.rb @@ -153,7 +153,7 @@ def test_search_other_filters .join(' ') .include?("Funding Resource") end - assert_equal "Funding Resource", results["collection"][0]["prefLabel"].first + assert_equal "Funding Resource", results["collection"][0]["prefLabel"] assert_equal "T028", results["collection"][0]["semanticType"][0] assert_equal "X123456", results["collection"][0]["cui"][0] @@ -208,7 +208,7 @@ def test_search_provisional_class assert_includes [10, 6], results["collection"].length # depending if owlapi import SKOS concepts provisional = results["collection"].select {|res| assert_equal ontology_type, res["ontologyType"]; res["provisional"]} assert_equal 1, provisional.length - assert_equal @@test_pc_root.label, provisional[0]["prefLabel"].first + assert_equal @@test_pc_root.label, provisional[0]["prefLabel"] # subtree root with provisional class test get "search?ontology=#{acronym}&subtree_root_id=#{CGI::escape(@@cls_uri.to_s)}&also_search_provisional=true" @@ -217,7 +217,7 @@ def test_search_provisional_class provisional = results["collection"].select {|res| res["provisional"]} assert_equal 1, provisional.length - assert_equal @@test_pc_child.label, provisional[0]["prefLabel"].first + assert_equal @@test_pc_child.label, provisional[0]["prefLabel"] end def test_multilingual_search @@ -251,6 +251,28 @@ def test_multilingual_search assert_equal "ActivityEnglish", doc["prefLabel"] + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr,es" + res = MultiJson.load(last_response.body) + assert_equal 0, res["totalCount"] + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en,es" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + refute_nil doc + expected_pref_label = {"none"=>["Activity"], "en"=>["ActivityEnglish"]} + assert_equal expected_pref_label, doc["prefLabel"] + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=all" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + refute_nil doc + expected_pref_label = {"none"=>["Activity"], "en"=>["ActivityEnglish"], "fr"=>["Activité"]} + assert_equal expected_pref_label, doc["prefLabel"] + + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true" res = MultiJson.load(last_response.body) assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first From 923629e5ca25594a8d3048c07d6789d44318a180 Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Thu, 27 Jun 2024 05:10:01 +0200 Subject: [PATCH 06/13] implement display search results in multiple languages --- helpers/search_helper.rb | 162 ++++++++++++++------- test/controllers/test_search_controller.rb | 2 +- 2 files changed, 108 insertions(+), 56 deletions(-) diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb index 06e6a78f..8b679986 100644 --- a/helpers/search_helper.rb +++ b/helpers/search_helper.rb @@ -30,51 +30,51 @@ module SearchHelper MATCH_TYPE_LABELGENERATED = "labelGenerated" MATCH_TYPE_MAP = { - "resource_id" => "id", - MATCH_TYPE_PREFLABEL => MATCH_TYPE_PREFLABEL, - "prefLabelExact" => MATCH_TYPE_PREFLABEL, - "prefLabelSuggestEdge" => MATCH_TYPE_PREFLABEL, - "prefLabelSuggestNgram" => MATCH_TYPE_PREFLABEL, - MATCH_TYPE_SYNONYM => MATCH_TYPE_SYNONYM, - "synonymExact" => MATCH_TYPE_SYNONYM, - "synonymSuggestEdge" => MATCH_TYPE_SYNONYM, - "synonymSuggestNgram" => MATCH_TYPE_SYNONYM, - MATCH_TYPE_PROPERTY => MATCH_TYPE_PROPERTY, - MATCH_TYPE_LABEL => MATCH_TYPE_LABEL, - "labelExact" => MATCH_TYPE_LABEL, - "labelSuggestEdge" => MATCH_TYPE_LABEL, - "labelSuggestNgram" => MATCH_TYPE_LABEL, - MATCH_TYPE_LABELGENERATED => MATCH_TYPE_LABELGENERATED, - "labelGeneratedExact" => MATCH_TYPE_LABELGENERATED, - "labellabelGeneratedSuggestEdge" => MATCH_TYPE_LABELGENERATED, - "labellabelGeneratedSuggestNgram" => MATCH_TYPE_LABELGENERATED, - "notation" => "notation", - "cui" => "cui", - "semanticType" => "semanticType" + "resource_id" => "id", + MATCH_TYPE_PREFLABEL => MATCH_TYPE_PREFLABEL, + "prefLabelExact" => MATCH_TYPE_PREFLABEL, + "prefLabelSuggestEdge" => MATCH_TYPE_PREFLABEL, + "prefLabelSuggestNgram" => MATCH_TYPE_PREFLABEL, + MATCH_TYPE_SYNONYM => MATCH_TYPE_SYNONYM, + "synonymExact" => MATCH_TYPE_SYNONYM, + "synonymSuggestEdge" => MATCH_TYPE_SYNONYM, + "synonymSuggestNgram" => MATCH_TYPE_SYNONYM, + MATCH_TYPE_PROPERTY => MATCH_TYPE_PROPERTY, + MATCH_TYPE_LABEL => MATCH_TYPE_LABEL, + "labelExact" => MATCH_TYPE_LABEL, + "labelSuggestEdge" => MATCH_TYPE_LABEL, + "labelSuggestNgram" => MATCH_TYPE_LABEL, + MATCH_TYPE_LABELGENERATED => MATCH_TYPE_LABELGENERATED, + "labelGeneratedExact" => MATCH_TYPE_LABELGENERATED, + "labellabelGeneratedSuggestEdge" => MATCH_TYPE_LABELGENERATED, + "labellabelGeneratedSuggestNgram" => MATCH_TYPE_LABELGENERATED, + "notation" => "notation", + "cui" => "cui", + "semanticType" => "semanticType" } # list of fields that allow empty query text QUERYLESS_FIELDS_PARAMS = { - "ontologies" => nil, - "notation" => "notation", - "cui" => "cui", - "semantic_types" => "semanticType", - ONTOLOGY_TYPES_PARAM => "ontologyType", - ALSO_SEARCH_PROVISIONAL_PARAM => nil, - SUBTREE_ID_PARAM => nil + "ontologies" => nil, + "notation" => "notation", + "cui" => "cui", + "semantic_types" => "semanticType", + ONTOLOGY_TYPES_PARAM => "ontologyType", + ALSO_SEARCH_PROVISIONAL_PARAM => nil, + SUBTREE_ID_PARAM => nil } QUERYLESS_FIELDS_STR = QUERYLESS_FIELDS_PARAMS.values.compact.join(" ") - def get_term_search_query(text, params={}) + def get_term_search_query(text, params = {}) validate_params_solr_population(ALLOWED_INCLUDES_PARAMS) sort = params.delete('sort') # raise error if text is empty AND (none of the QUERYLESS_FIELDS_PARAMS has been passed # OR either an exact match OR suggest search is being executed) if text.nil? || text.strip.empty? - if !QUERYLESS_FIELDS_PARAMS.keys.any? {|k| params.key?(k)} || - params[EXACT_MATCH_PARAM] == "true" || - params[SUGGEST_PARAM] == "true" + if !QUERYLESS_FIELDS_PARAMS.keys.any? { |k| params.key?(k) } || + params[EXACT_MATCH_PARAM] == "true" || + params[SUGGEST_PARAM] == "true" raise error 400, "The search query must be provided via /search?q=[&page=&pagesize=]" else text = '' @@ -82,10 +82,6 @@ def get_term_search_query(text, params={}) end end - lang = params["lang"] || params["language"] - lang_suffix = lang && !lang.eql?("all") ? "_#{lang}" : "" - - query = "" params["defType"] = "edismax" params["stopwords"] = "true" params["lowercaseOperators"] = "true" @@ -97,19 +93,33 @@ def get_term_search_query(text, params={}) params["hl.simple.pre"] = MATCH_HTML_PRE params["hl.simple.post"] = MATCH_HTML_POST - # text.gsub!(/\*+$/, '') - if params[EXACT_MATCH_PARAM] == "true" query = "\"#{solr_escape(text)}\"" - params["qf"] = "resource_id^20 prefLabel#{lang_suffix}^10 synonymExact#{lang_suffix} #{QUERYLESS_FIELDS_STR}" - params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix} synonymExact#{lang_suffix} #{QUERYLESS_FIELDS_STR}" + params["qf"] = "resource_id^20 #{add_lang_suffix('prefLabel', '^10')} #{add_lang_suffix('synonymExact')} #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "resource_id #{add_lang_suffix('prefLabelExact')} #{add_lang_suffix('synonymExact')} #{QUERYLESS_FIELDS_STR}" elsif params[SUGGEST_PARAM] == "true" || text[-1] == '*' text.gsub!(/\*+$/, '') query = "\"#{solr_escape(text)}\"" params["qt"] = "/suggest_ncbo" - params["qf"] = " prefLabelExact#{lang_suffix}^100 prefLabelSuggestEdge#{lang_suffix}^50 synonym#{lang_suffix}SuggestEdge^10 prefLabel#{lang_suffix}SuggestNgram synonym#{lang_suffix}SuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" - params["pf"] = "prefLabelSuggest^50" - params["hl.fl"] = "prefLabelExact#{lang_suffix} prefLabelSuggestEdge#{lang_suffix} synonymSuggestEdge#{lang_suffix} prefLabelSuggestNgram#{lang_suffix} synonymSuggestNgram#{lang_suffix} resource_id #{QUERYLESS_FIELDS_STR}" + params["qf"] = [ + add_lang_suffix('prefLabelExact', '^100'), + add_lang_suffix('prefLabelSuggestEdge', '^50'), + add_lang_suffix('synonymSuggestEdge', '^10'), + add_lang_suffix('prefLabelSuggestNgram'), + add_lang_suffix('synonymSuggestNgram'), + "resource_id #{QUERYLESS_FIELDS_STR}" + ].join(' ') + + params["pf"] = add_lang_suffix('prefLabelSuggest', '^50') + + params["hl.fl"] = [ + add_lang_suffix('prefLabelExact'), + add_lang_suffix('prefLabelSuggestEdge'), + add_lang_suffix('synonymSuggestEdge'), + add_lang_suffix('prefLabelSuggestNgram'), + add_lang_suffix('synonymSuggestNgram'), + "resource_id #{QUERYLESS_FIELDS_STR}" + ].join(' ') else if text.strip.empty? query = '*' @@ -117,9 +127,19 @@ def get_term_search_query(text, params={}) query = solr_escape(text) end - params["qf"] = "resource_id^100 prefLabelExact#{lang_suffix}^90 prefLabel#{lang_suffix}^70 synonymExact#{lang_suffix}^50 synonym#{lang_suffix }^10 #{QUERYLESS_FIELDS_STR}" + params["qf"] = [ + "resource_id^100", + add_lang_suffix('prefLabelExact', '^90'), + add_lang_suffix('prefLabel', '^70'), + add_lang_suffix('synonymExact', '^50'), + add_lang_suffix('synonym', '^10'), + QUERYLESS_FIELDS_STR + ].join(' ') + params["qf"] << " property" if params[INCLUDE_PROPERTIES_PARAM] == "true" - params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix} prefLabel#{lang_suffix } synonymExact#{lang_suffix} synonym#{lang_suffix } #{QUERYLESS_FIELDS_STR}" + + params["hl.fl"] = "resource_id #{add_lang_suffix('prefLabelExact')} #{ add_lang_suffix('prefLabel')} #{add_lang_suffix('synonymExact')} #{add_lang_suffix('synonym')} #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "#{params["hl.fl"]} property" if params[INCLUDE_PROPERTIES_PARAM] == "true" end @@ -225,29 +245,61 @@ def portal_language Goo.main_languages.first end - def request_language - params['lang'] || params['languages'] || portal_language + def request_languages + lang = params['lang'] || params['languages'] + + return [portal_language] if lang.blank? + + lang.split(',') end + def request_multiple_languages? + request_languages.size > 1 || request_all_languages? + end + + def request_languages? + !(params['lang'] || params['language']).blank? + end + + def request_all_languages? + request_languages.first.eql?('all') + end + + def add_lang_suffix(attr, rank = "") + if request_languages? && !request_all_languages? + languages = request_languages + languages.map { |lang| "#{attr}_#{lang}#{rank} " }.join + else + "#{attr}#{rank}" + end + end def filter_attrs_by_language(doc) lang_values = {} doc.each do |k, v| attr, lang = k.to_s.split('_') - next unless lang + next if [:ontology_rank, :resource_id, :resource_model].include?(k) + next if lang.blank? || attr.blank? + next if !(request_languages + %w[none]).include?(lang) && !request_all_languages? - if lang.eql?('none') || request_language.eql?(lang) - lang_values[attr.to_sym] ||= [] - lang_values[attr.to_sym] = lang.eql?('none') ? lang_values[attr.to_sym] + v : v + lang_values[attr.to_sym] - end + lang_values[attr.to_sym] ||= {} + lang_values[attr.to_sym][lang] ||= [] + lang_values[attr.to_sym][lang] += v end - lang_values.each do |k, v| - doc[k] = v unless v.empty? + if request_multiple_languages? + lang_values.each do |k, lang_vals| + doc[k] = lang_vals + end + else + lang_values.each do |k, lang_vals| + doc[k] = lang_vals.map { |l, v| l.eql?('none') ? nil : v }.compact.flatten + Array(lang_vals['none']) + end + + doc[:prefLabel] = Array(doc["prefLabel_#{request_languages.first}".to_sym]).first || Array(doc[:prefLabel]).first end - doc[:prefLabel] = doc["prefLabel_#{request_language}".to_sym]&.first || doc[:prefLabel]&.first doc end diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb index 459df9aa..9667606c 100644 --- a/test/controllers/test_search_controller.rb +++ b/test/controllers/test_search_controller.rb @@ -92,7 +92,7 @@ def test_search_ontology_filter assert last_response.ok? results = MultiJson.load(last_response.body) doc = results["collection"][0] - assert_equal "cell line", doc["prefLabel"].first + assert_equal "cell line", doc["prefLabel"] assert doc["links"]["ontology"].include? acronym results["collection"].each do |doc| acr = doc["links"]["ontology"].split('/')[-1] From 6abcaaa521251c96519f99d29f86731b05b1fb7c Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Fri, 28 Jun 2024 00:21:44 +0200 Subject: [PATCH 07/13] fix annotator prefLabel language selection --- helpers/search_helper.rb | 8 ++++++-- test/controllers/test_annotator_controller.rb | 8 ++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb index 8b679986..3805e650 100644 --- a/helpers/search_helper.rb +++ b/helpers/search_helper.rb @@ -274,6 +274,10 @@ def add_lang_suffix(attr, rank = "") end end + def pref_label_by_language(doc) + Array(doc["prefLabel_#{request_languages.first}".to_sym]).first || Array(doc["prefLabel_none".to_sym]).first || Array(doc[:prefLabel]).first + end + def filter_attrs_by_language(doc) lang_values = {} doc.each do |k, v| @@ -297,7 +301,7 @@ def filter_attrs_by_language(doc) doc[k] = lang_vals.map { |l, v| l.eql?('none') ? nil : v }.compact.flatten + Array(lang_vals['none']) end - doc[:prefLabel] = Array(doc["prefLabel_#{request_languages.first}".to_sym]).first || Array(doc[:prefLabel]).first + doc[:prefLabel] = pref_label_by_language(doc) end doc @@ -431,7 +435,7 @@ def populate_classes_from_search(classes, ontology_acronyms=nil) doc[:submission] = old_class.submission doc[:properties] = MultiJson.load(doc.delete(:propertyRaw)) if include_param_contains?(:properties) instance = LinkedData::Models::Class.read_only(doc) - instance.prefLabel = instance.prefLabel.first if instance.prefLabel.is_a?(Array) + instance.prefLabel = pref_label_by_language(doc) classes_hash[ont_uri_class_uri] = instance end diff --git a/test/controllers/test_annotator_controller.rb b/test/controllers/test_annotator_controller.rb index 572c8750..947d474e 100644 --- a/test/controllers/test_annotator_controller.rb +++ b/test/controllers/test_annotator_controller.rb @@ -265,16 +265,16 @@ def test_default_properties_output assert last_response.ok? annotations = MultiJson.load(last_response.body) assert_equal 9, annotations.length - annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].first.downcase <=> b["annotatedClass"]["prefLabel"].first.downcase } + annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].downcase <=> b["annotatedClass"]["prefLabel"].downcase } assert_equal "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Aggregate_Human_Data", annotations.first["annotatedClass"]["@id"] - assert_equal "Aggregate Human Data", Array(annotations.first["annotatedClass"]["prefLabel"]).first + assert_equal "Aggregate Human Data", annotations.first["annotatedClass"]["prefLabel"] params = {text: text, include: "prefLabel,definition"} get "/annotator", params assert last_response.ok? annotations = MultiJson.load(last_response.body) assert_equal 9, annotations.length - annotations.sort! { |a,b| Array(a["annotatedClass"]["prefLabel"]).first.downcase <=> Array(b["annotatedClass"]["prefLabel"]).first.downcase } + annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].downcase <=> b["annotatedClass"]["prefLabel"].downcase } assert_equal "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Aggregate_Human_Data", annotations.first["annotatedClass"]["@id"] assert_equal ["A resource that provides data from clinical care that comprises combined data from multiple individual human subjects."], annotations.first["annotatedClass"]["definition"] end @@ -354,7 +354,7 @@ def self.mapping_test_set class_id = terms_a[i] ont_acr = onts_a[i] sub = LinkedData::Models::Ontology.find(ont_acr).first.latest_submission(status: :any) - binding.pry if sub.nil? + sub.bring(ontology: [:acronym]) c = LinkedData::Models::Class.find(RDF::URI.new(class_id)) .in(sub) From ab38c60a5d8a41c594a682789ef89dc01d5e7ed6 Mon Sep 17 00:00:00 2001 From: Bilel Kihal <61744974+Bilelkihal@users.noreply.github.com> Date: Tue, 23 Jul 2024 15:22:46 +0200 Subject: [PATCH 08/13] fix: remove duplicated agents endpoint ('/Agents') (#85) --- controllers/agents_controller.rb | 222 +++++++++++++++---------------- 1 file changed, 110 insertions(+), 112 deletions(-) diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb index 1bf86321..06fcd27c 100644 --- a/controllers/agents_controller.rb +++ b/controllers/agents_controller.rb @@ -1,150 +1,148 @@ class AgentsController < ApplicationController - %w[/agents /Agents].each do |namespace| - namespace namespace do - # Display all agents - get do - check_last_modified_collection(LinkedData::Models::Agent) - query = LinkedData::Models::Agent.where - query = apply_filters(LinkedData::Models::Agent, query) - query = query.include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)) - if page? - page, size = page_params - agents = query.page(page, size).all - else - agents = query.to_a - end - - if includes_param.include?(:all) || includes_param.include?(:usages) - LinkedData::Models::Agent.load_agents_usages(agents) - end - - reply agents - end - - # Display a single agent - get '/:id' do - check_last_modified_collection(LinkedData::Models::Agent) - id = params["id"] - agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first - error 404, "Agent #{id} not found" if agent.nil? - reply 200, agent - end - - # Create a agent with the given acronym - post do - reply 201, create_new_agent + namespace "/agents" do + get do + check_last_modified_collection(LinkedData::Models::Agent) + query = LinkedData::Models::Agent.where + query = apply_filters(LinkedData::Models::Agent, query) + query = query.include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)) + if page? + page, size = page_params + agents = query.page(page, size).all + else + agents = query.to_a end - # Create a agent with the given acronym - put '/:acronym' do - reply 201, create_new_agent + if includes_param.include?(:all) || includes_param.include?(:usages) + LinkedData::Models::Agent.load_agents_usages(agents) end - # Update an existing submission of a agent - patch '/:id' do - acronym = params["id"] - agent = LinkedData::Models::Agent.find(acronym).include(LinkedData::Models::Agent.attributes).first + reply agents + end - if agent.nil? - error 400, "Agent does not exist, please create using HTTP PUT before modifying" - else - agent = update_agent(agent, params) + # Display a single agent + get '/:id' do + check_last_modified_collection(LinkedData::Models::Agent) + id = params["id"] + agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first + error 404, "Agent #{id} not found" if agent.nil? + reply 200, agent + end - error 400, agent.errors unless agent.errors.empty? - end - halt 204 - end + # Create a agent with the given acronym + post do + reply 201, create_new_agent + end - # Delete a agent - delete '/:id' do - agent = LinkedData::Models::Agent.find(params["id"]).first - agent.delete - halt 204 - end + # Create a agent with the given acronym + put '/:acronym' do + reply 201, create_new_agent + end - private + # Update an existing submission of a agent + patch '/:id' do + acronym = params["id"] + agent = LinkedData::Models::Agent.find(acronym).include(LinkedData::Models::Agent.attributes).first - def update_identifiers(identifiers) - Array(identifiers).map do |i| - next nil if i.empty? + if agent.nil? + error 400, "Agent does not exist, please create using HTTP PUT before modifying" + else + agent = update_agent(agent, params) - id = i["id"] || LinkedData::Models::AgentIdentifier.generate_identifier(i['notation'], i['schemaAgency']) - identifier = LinkedData::Models::AgentIdentifier.find(RDF::URI.new(id)).first + error 400, agent.errors unless agent.errors.empty? + end + halt 204 + end - if identifier - identifier.bring_remaining - else - identifier = LinkedData::Models::AgentIdentifier.new - end + # Delete a agent + delete '/:id' do + agent = LinkedData::Models::Agent.find(params["id"]).first + agent.delete + halt 204 + end - i.delete "id" + private - next identifier if i.keys.size.zero? + def update_identifiers(identifiers) + Array(identifiers).map do |i| + next nil if i.empty? - populate_from_params(identifier, i) + id = i["id"] || LinkedData::Models::AgentIdentifier.generate_identifier(i['notation'], i['schemaAgency']) + identifier = LinkedData::Models::AgentIdentifier.find(RDF::URI.new(id)).first - if identifier.valid? - identifier.save - else - error 400, identifier.errors - end - identifier - end.compact - end + if identifier + identifier.bring_remaining + else + identifier = LinkedData::Models::AgentIdentifier.new + end - def update_affiliations(affiliations) - Array(affiliations).map do |aff| - affiliation = aff["id"] ? LinkedData::Models::Agent.find(RDF::URI.new(aff["id"])).first : nil + i.delete "id" - if affiliation - affiliation.bring_remaining - affiliation.identifiers.each{|i| i.bring_remaining} - end + next identifier if i.keys.size.zero? - next affiliation if aff.keys.size.eql?(1) && aff["id"] + populate_from_params(identifier, i) - if affiliation - affiliation = update_agent(affiliation, aff) - else - affiliation = create_new_agent(aff["id"], aff) - end + if identifier.valid? + identifier.save + else + error 400, identifier.errors + end + identifier + end.compact + end - error 400, affiliation.errors unless affiliation.errors.empty? + def update_affiliations(affiliations) + Array(affiliations).map do |aff| + affiliation = aff["id"] ? LinkedData::Models::Agent.find(RDF::URI.new(aff["id"])).first : nil - affiliation + if affiliation + affiliation.bring_remaining + affiliation.identifiers.each{|i| i.bring_remaining} end - end - def create_new_agent (id = @params['id'], params = @params) - agent = nil - agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first if id + next affiliation if aff.keys.size.eql?(1) && aff["id"] - if agent.nil? - agent = update_agent(LinkedData::Models::Agent.new, params) - error 400, agent.errors unless agent.errors.empty? - - return agent + if affiliation + affiliation = update_agent(affiliation, aff) else - error 400, "Agent exists, please use HTTP PATCH to update" + affiliation = create_new_agent(aff["id"], aff) end + + error 400, affiliation.errors unless affiliation.errors.empty? + + affiliation end + end - def update_agent(agent, params) - return agent unless agent + def create_new_agent (id = @params['id'], params = @params) + agent = nil + agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first if id - identifiers = params.delete "identifiers" - affiliations = params.delete "affiliations" - params.delete "id" - populate_from_params(agent, params) - agent.identifiers = update_identifiers(identifiers) - agent.affiliations = update_affiliations(affiliations) + if agent.nil? + agent = update_agent(LinkedData::Models::Agent.new, params) + error 400, agent.errors unless agent.errors.empty? - agent.save if agent.valid? return agent + else + error 400, "Agent exists, please use HTTP PATCH to update" end + end + def update_agent(agent, params) + return agent unless agent + + identifiers = params.delete "identifiers" + affiliations = params.delete "affiliations" + params.delete "id" + populate_from_params(agent, params) + agent.identifiers = update_identifiers(identifiers) + agent.affiliations = update_affiliations(affiliations) + + agent.save if agent.valid? + return agent end + end -end \ No newline at end of file + +end From 67bc9fb2413f411cbb9d1842293859044123c971 Mon Sep 17 00:00:00 2001 From: Syphax bouazzouni Date: Tue, 23 Jul 2024 16:14:49 +0200 Subject: [PATCH 09/13] Feature: implement ontology agents endpoint (#84) * implement ontology agents endpoint * Move ontology agents method out of agents namespace in agents_controller * return a list of uniq values, for the endpoint '/ontologies/:acronym/agents' that contains all the agents of the ontology using agents_attrs list * test for ontology agents endpoing * add another ontologyin test ontology agents test, and assert only the number of results and the names --------- Co-authored-by: Bilel KIHAL --- Gemfile.lock | 2 +- controllers/agents_controller.rb | 24 ++++++++- .../controllers/test_ontologies_controller.rb | 50 +++++++++++++++++++ 3 files changed, 73 insertions(+), 3 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 023fd1b4..ccb0b5ee 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -57,7 +57,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git - revision: a5b56a68e6dc8ecfc9db708d44350342dac38ce6 + revision: fd78d689dac4a7393e20a36ac930c6c9d191a619 branch: development specs: ontologies_linked_data (0.0.1) diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb index 06fcd27c..6b69fbc5 100644 --- a/controllers/agents_controller.rb +++ b/controllers/agents_controller.rb @@ -1,5 +1,27 @@ class AgentsController < ApplicationController + get '/ontologies/:acronym/agents' do + ont = Ontology.find(params["acronym"]).first + latest = ont.latest_submission(status: :any) + latest.bring(*OntologySubmission.agents_attrs) + properties_agents= {} + OntologySubmission.agents_attrs.each do |attr| + properties_agents[attr] = Array(latest.send(attr)) + end + + agents = [] + properties_agents.each do |key, value| + agents.concat(value.map{ |agent| agent.bring_remaining}) + end + agents.uniq! + + if includes_param.include?(:all) || includes_param.include?(:usages) + LinkedData::Models::Agent.load_agents_usages(agents) + end + + reply agents + end + namespace "/agents" do get do check_last_modified_collection(LinkedData::Models::Agent) @@ -143,6 +165,4 @@ def update_agent(agent, params) end end - - end diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb index ad062742..681ab93b 100644 --- a/test/controllers/test_ontologies_controller.rb +++ b/test/controllers/test_ontologies_controller.rb @@ -282,6 +282,46 @@ def test_detach_a_view assert_equal onto["viewOf"], ont.id.to_s end + def test_ontology_agents + ontologies_and_submissions = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: true) + submission1 = ontologies_and_submissions[2].first.submissions.last + submission2 = ontologies_and_submissions[2].last.submissions.last + + ontology_acronym1 = ontologies_and_submissions[1].first + ontology_acronym2 = ontologies_and_submissions[1].last + + submission1.bring(*OntologySubmission.agents_attrs) + submission2.bring(*OntologySubmission.agents_attrs) + + # To insure that we don't have duplicated agents in the response + agent_syphax = _create_agent(name: 'Syphax', type: 'person') + + submission1.publisher = [_create_agent(name: 'Bilel', type: 'person'), agent_syphax] + submission1.hasContributor = [_create_agent(name: 'Clement', type: 'person'), agent_syphax] + + submission2.publisher = [_create_agent(name: 'Imad', type: 'person'), _create_agent(name: 'Serine', type: 'person')] + + submission1.save + submission2.save + + + get "/ontologies/#{ontology_acronym1}/agents" + + response = MultiJson.load(last_response.body) + assert_equal response.length, 3 + response.each do |r| + assert_includes ['Bilel', 'Syphax', 'Clement'], r["name"] + end + + get "/ontologies/#{ontology_acronym2}/agents" + + response = MultiJson.load(last_response.body) + assert_equal response.length, 2 + response.each do |r| + assert_includes ['Imad', 'Serine'], r["name"] + end + end + private def check400(response) @@ -289,4 +329,14 @@ def check400(response) assert MultiJson.load(response.body)["errors"] end + def _create_agent(name: 'name', type: 'person') + agent = LinkedData::Models::Agent.new({ + agentType: type, + name: name, + creator: User.find('tim').first + }) + agent.save + agent + end + end From 963b9065888bb5e1b67d572db1fddf53c8dbdb93 Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Thu, 25 Jul 2024 22:24:33 +0200 Subject: [PATCH 10/13] update API deploy CI to SSH jump host and get configs from private repo --- .github/workflows/deploy.yml | 121 ++++++++++++++++++----------------- 1 file changed, 64 insertions(+), 57 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 212d5dcd..9d290cc8 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,85 +1,92 @@ -# Workflow for deploying ontologies_api to stage/prod systems via capistrano. -# This workflow runs after a successeful execution of the unit test workflow and it -# can also be triggered manually. +# Workflow to deploy OntoPortal UI to stage/prod systems # # Required github secrets: # -# CONFIG_REPO - github repo containing config and customizations for the API. Format 'author/private_config_repo' +# CONFIG_REPO - github repo containing config and customizations for UI. Format 'author/private_config_repo' # it is used for getting capistrano deployment configuration for stages on the github actions runner and -# PRIVATE_CONFIG_REPO env var is constructed from it which is used by capistrano on the remote servers for pulling configs. +# PRIVATE_CONFIG_REPO env var is constructed from it which is used by capistrano on the UI hosts for pulling configs. # -# GH_PAT - github Personal Access Token for accessing PRIVATE_CONFIG_REPO +# GH_PAT - github Personal Access Token for accessing private config repo # -# SSH_JUMPHOST - ssh jump/proxy host though which deployments have to though if app servers are hosted on private network. +# SSH_JUMPHOST - ssh jump/proxy host though which deployments have to though if UI nodes live on private network. +# SSH_JUMPHOST_USER - username to use to connect to the ssh jump/proxy. # -# DEPLOY_ENC_KEY - key for decrypting deploymnet ssh key residing in config/deploy_id_rsa_enc (see miloserdow/capistrano-deploy) +# DEPLOY_ENC_KEY - key for decrypting deploymnet ssh key residing in config/ # this SSH key is used for accessing jump host, UI nodes, and private github repo. name: Capistrano Deployment # Controls when the action will run. on: - # Trigger deployment to staging after unit test action completes - workflow_run: - workflows: ["Ruby Unit Tests"] - types: - - completed - branches: [master, develop] + push: + branches: + - stage + - test # Allows running this workflow manually from the Actions tab workflow_dispatch: - branches: [master, develop] inputs: BRANCH: - description: 'Branch/tag to deploy' - default: develop + description: "Branch/tag to deploy" + options: + - stage + - test + - master + default: stage required: true environment: - description: 'target environment to deploy to' + description: "target environment to deploy to" type: choice options: - staging - - production - default: staging - + - agroportal + - test + default: stage jobs: deploy: runs-on: ubuntu-latest - # run deployment only if "Ruby Unit Tests" workflow completes sucessefully or when manually triggered - if: ${{ (github.event.workflow_run.conclusion == 'success') || (github.event_name == 'workflow_dispatch') }} env: - BUNDLE_WITHOUT: default #install gems required primarily for the deployment in order to speed this workflow + BUNDLE_WITHOUT: default #install gems required primarely for deployment in order to speed up workflow PRIVATE_CONFIG_REPO: ${{ format('git@github.com:{0}.git', secrets.CONFIG_REPO) }} # Steps represent a sequence of tasks that will be executed as part of the job steps: - - name: set branch/tag and environment to deploy from inputs - run: | - # workflow_dispatch default input doesn't get set on push so we need to set defaults - # via shell parameter expansion - # https://dev.to/mrmike/github-action-handling-input-default-value-5f2g - USER_INPUT_BRANCH=${{ inputs.branch }} - echo "BRANCH=${USER_INPUT_BRANCH:-develop}" >> $GITHUB_ENV - USER_INPUT_ENVIRONMENT=${{ inputs.environment }} - echo "TARGET=${USER_INPUT_ENVIRONMENT:-staging}" >> $GITHUB_ENV - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 - - uses: ruby/setup-ruby@v1 - with: - ruby-version: 2.7.6 # Not needed with a .ruby-version file - bundler-cache: true # runs 'bundle install' and caches installed gems automatically - - name: get-deployment-config - uses: actions/checkout@v3 - with: - repository: ${{ secrets.CONFIG_REPO }} # repository containing deployment settings - token: ${{ secrets.GH_PAT }} # `GH_PAT` is a secret that contains your PAT - path: deploy_config - - name: copy-deployment-config - run: cp -r deploy_config/ontologies_api/* . - # add ssh hostkey so that capistrano doesn't complain - - name: Add jumphost's hostkey to Known Hosts - run: | - mkdir -p ~/.ssh - ssh-keyscan -H ${{ secrets.SSH_JUMPHOST }} > ~/.ssh/known_hosts - shell: bash - - uses: miloserdow/capistrano-deploy@master - with: - target: ${{ env.TARGET }} # which environment to deploy - deploy_key: ${{ secrets.DEPLOY_ENC_KEY }} # Name of the variable configured in Settings/Secrets of your github project + - name: set branch/tag and environment to deploy from inputs + run: | + # workflow_dispatch default input doesn't get set on push so we need to set defaults + # via shell parameter expansion + # https://dev.to/mrmike/github-action-handling-input-default-value-5f2g + USER_INPUT_BRANCH=${{ inputs.branch }} + echo "BRANCH=${USER_INPUT_BRANCH:github.head_ref:-master}" >> $GITHUB_ENV + + USER_INPUT_ENVIRONMENT=${{ inputs.environment }} + echo "TARGET=${USER_INPUT_ENVIRONMENT:-staging}" >> $GITHUB_ENV + + CONFIG_REPO=${{ secrets.CONFIG_REPO }} + GH_PAT=${{ secrets.GH_PAT }} + echo "PRIVATE_CONFIG_REPO=https://${GH_PAT}@github.com/${CONFIG_REPO}" >> $GITHUB_ENV + + echo "SSH_JUMPHOST=${{ secrets.SSH_JUMPHOST }}" >> $GITHUB_ENV + echo "SSH_JUMPHOST_USER=${{ secrets.SSH_JUMPHOST_USER }}" >> $GITHUB_ENV + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: 2.7.6 # Not needed with a .ruby-version file + bundler-cache: true # runs 'bundle install' and caches installed gems automatically + - name: get-deployment-config + uses: actions/checkout@v3 + with: + repository: ${{ secrets.CONFIG_REPO }} # repository containing deployment settings + token: ${{ secrets.GH_PAT }} # `GH_PAT` is a secret that contains your PAT + path: deploy_config + - name: copy-deployment-config + run: cp -r deploy_config/ontologies_api/${{ inputs.environment }}/* . + # add ssh hostkey so that capistrano doesn't complain + - name: Add jumphost's hostkey to Known Hosts + run: | + mkdir -p ~/.ssh + echo "${{ secrets.SSH_JUMPHOST }}" + ssh-keyscan -H ${{ secrets.SSH_JUMPHOST }} > ~/.ssh/known_hosts + shell: bash + - uses: miloserdow/capistrano-deploy@master + with: + target: ${{ env.TARGET }} # which environment to deploy + deploy_key: ${{ secrets.DEPLOY_ENC_KEY }} # Name of the variable configured in Settings/Secrets of your github project From 0ff1abd23fe3d0d16b51e3b71fa5281c90c876bf Mon Sep 17 00:00:00 2001 From: Syphax Bouazzouni Date: Thu, 25 Jul 2024 22:27:29 +0200 Subject: [PATCH 11/13] update deploy files and add agroportal, stage and test environments --- .github/workflows/deploy.yml | 10 ++--- .gitignore | 1 - config/deploy.rb | 72 +++++++++++++++++++++--------------- config/deploy/agroportal.rb | 17 +++++++++ config/deploy/appliance.rb | 49 ------------------------ config/deploy/production.rb | 39 ------------------- config/deploy/staging.rb | 17 +++++++++ config/deploy/test.rb | 17 +++++++++ 8 files changed, 99 insertions(+), 123 deletions(-) create mode 100644 config/deploy/agroportal.rb delete mode 100644 config/deploy/appliance.rb delete mode 100644 config/deploy/production.rb create mode 100644 config/deploy/staging.rb create mode 100644 config/deploy/test.rb diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 9d290cc8..e0b23263 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,18 +1,18 @@ -# Workflow to deploy OntoPortal UI to stage/prod systems +# Workflow to deploy OntoPortal API to stage/prod systems # # Required github secrets: # -# CONFIG_REPO - github repo containing config and customizations for UI. Format 'author/private_config_repo' +# CONFIG_REPO - github repo containing config and customizations for API. Format 'author/private_config_repo' # it is used for getting capistrano deployment configuration for stages on the github actions runner and -# PRIVATE_CONFIG_REPO env var is constructed from it which is used by capistrano on the UI hosts for pulling configs. +# PRIVATE_CONFIG_REPO env var is constructed from it which is used by capistrano on the API hosts for pulling configs. # # GH_PAT - github Personal Access Token for accessing private config repo # -# SSH_JUMPHOST - ssh jump/proxy host though which deployments have to though if UI nodes live on private network. +# SSH_JUMPHOST - ssh jump/proxy host though which deployments have to though if API nodes live on private network. # SSH_JUMPHOST_USER - username to use to connect to the ssh jump/proxy. # # DEPLOY_ENC_KEY - key for decrypting deploymnet ssh key residing in config/ -# this SSH key is used for accessing jump host, UI nodes, and private github repo. +# this SSH key is used for accessing jump host, API nodes, and private github repo. name: Capistrano Deployment # Controls when the action will run. diff --git a/.gitignore b/.gitignore index 8b568832..ed57b8d9 100644 --- a/.gitignore +++ b/.gitignore @@ -36,7 +36,6 @@ config/environments/* !config/environments/config.rb.sample #ignore capistrano deployment -config/deploy/* config/*.p12 # Ignore generated test data diff --git a/config/deploy.rb b/config/deploy.rb index 23a982cd..6916caf5 100644 --- a/config/deploy.rb +++ b/config/deploy.rb @@ -1,9 +1,6 @@ -# config valid only for Capistrano 3 - -APP_PATH = '/srv/ontoportal' - -set :application, 'ontologies_api' -set :repo_url, "https://github.com/ncbo/#{fetch(:application)}.git" +set :author, "ontoportal-lirmm" +set :application, "ontologies_api" +set :repo_url, "https://github.com/#{fetch(:author)}/#{fetch(:application)}.git" set :deploy_via, :remote_cache @@ -11,7 +8,7 @@ # ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp } # Default deploy_to directory is /var/www/my_app -set :deploy_to, "#{APP_PATH}/#{fetch(:application)}" +set :deploy_to, "/srv/ontoportal/#{fetch(:application)}" # Default value for :scm is :git # set :scm, :git @@ -20,7 +17,7 @@ # set :format, :pretty # Default value for :log_level is :debug -# set :log_level, :debug +set :log_level, :error # Default value for :pty is false # set :pty, true @@ -32,21 +29,40 @@ # set :linked_dirs, %w{log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system} set :linked_dirs, %w{log vendor/bundle tmp/pids tmp/sockets public/system} -# rbenv -# set :rbenv_type, :system #or :user -# set :rbenv_ruby, '2.2.5' -# set :rbenv_roles, :all # default value - -# do not use sudo -set :use_sudo, false -# required for restarting unicorn with sudo -set :pty, true # Default value for default_env is {} -set :default_env, { -} +# set :default_env, { path: "/opt/ruby/bin:$PATH" } # Default value for keep_releases is 5 set :keep_releases, 5 +set :config_folder_path, "#{fetch(:application)}/#{fetch(:stage)}" + +# If you want to restart using `touch tmp/restart.txt`, add this to your config/deploy.rb: + +SSH_JUMPHOST = ENV.include?('SSH_JUMPHOST') ? ENV['SSH_JUMPHOST'] : 'jumpbox.hostname.com' +SSH_JUMPHOST_USER = ENV.include?('SSH_JUMPHOST_USER') ? ENV['SSH_JUMPHOST_USER'] : 'username' + +JUMPBOX_PROXY = "#{SSH_JUMPHOST_USER}@#{SSH_JUMPHOST}" +set :ssh_options, { + user: 'ontoportal', + forward_agent: 'true', + keys: %w(config/deploy_id_rsa), + auth_methods: %w(publickey), + # use ssh proxy if API servers are on a private network + proxy: Net::SSH::Proxy::Command.new("ssh #{JUMPBOX_PROXY} -W %h:%p") +} + +# private git repo for configuraiton +PRIVATE_CONFIG_REPO = ENV.include?('PRIVATE_CONFIG_REPO') ? ENV['PRIVATE_CONFIG_REPO'] : 'https://your_github_pat_token@github.com/your_organization/ontoportal-configs.git' +desc "Check if agent forwarding is working" +task :forwarding do + on roles(:all) do |h| + if test("env | grep SSH_AUTH_SOCK") + info "Agent forwarding is up to #{h}" + else + error "Agent forwarding is NOT up to #{h}" + end + end +end # inspired by http://nathaniel.talbott.ws/blog/2013/03/14/post-deploy-smoke-tests/ desc 'Run smoke test' @@ -74,7 +90,6 @@ end end - namespace :deploy do desc 'Incorporate the private repository content' @@ -82,10 +97,10 @@ # or get config from local directory if LOCAL_CONFIG_PATH env var is set task :get_config do if defined?(PRIVATE_CONFIG_REPO) - TMP_CONFIG_PATH = "/tmp/#{SecureRandom.hex(15)}" + TMP_CONFIG_PATH = "/tmp/#{SecureRandom.hex(15)}".freeze on roles(:app) do execute "git clone -q #{PRIVATE_CONFIG_REPO} #{TMP_CONFIG_PATH}" - execute "rsync -av #{TMP_CONFIG_PATH}/#{fetch(:application)}/ #{release_path}/" + execute "rsync -av #{TMP_CONFIG_PATH}/#{fetch(:config_folder_path)}/ #{release_path}/" execute "rm -rf #{TMP_CONFIG_PATH}" end elsif defined?(LOCAL_CONFIG_PATH) @@ -98,16 +113,15 @@ desc 'Restart application' task :restart do on roles(:app), in: :sequence, wait: 5 do - # Your restart mechanism here, for example: - # execute :touch, release_path.join('tmp/restart.txt') - execute 'sudo systemctl restart unicorn' - execute 'sleep 5' + # Your restart mechanism here, for example: + # execute :touch, release_path.join('tmp/restart.txt') + execute 'sudo systemctl restart unicorn' + execute 'sleep 5' end end - after :publishing, :get_config - after :get_config, :restart - # after :deploy, :smoke_test + after :updating, :get_config + after :publishing, :restart after :restart, :clear_cache do on roles(:web), in: :groups, limit: 3, wait: 10 do diff --git a/config/deploy/agroportal.rb b/config/deploy/agroportal.rb new file mode 100644 index 00000000..c01f3fb9 --- /dev/null +++ b/config/deploy/agroportal.rb @@ -0,0 +1,17 @@ +# Simple Role Syntax +# ================== +# Supports bulk-adding hosts to roles, the primary +# server in each group is considered to be the first +# unless any hosts have the primary property set. +# Don't declare `role :all`, it's a meta role +role :app, %w[agroportal.lirmm.fr] +role :db, %w[agroportal.lirmm.fr] # sufficient to run db:migrate only on one system +set :branch, ENV.include?('BRANCH') ? ENV['BRANCH'] : 'master' +# Extended Server Syntax +# ====================== +# This can be used to drop a more detailed server +# definition into the server list. The second argument +# something that quacks like a hash can be used to set +# extended properties on the server. +# server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value +set :log_level, :error diff --git a/config/deploy/appliance.rb b/config/deploy/appliance.rb deleted file mode 100644 index fdfe0d70..00000000 --- a/config/deploy/appliance.rb +++ /dev/null @@ -1,49 +0,0 @@ -# Simple Role Syntax -# ================== -# Supports bulk-adding hosts to roles, the primary -# server in each group is considered to be the first -# unless any hosts have the primary property set. -# Don't declare `role :all`, it's a meta role - -# Extended Server Syntax -# ====================== -# This can be used to drop a more detailed server -# definition into the server list. The second argument -# something that quacks like a hash can be used to set -# extended properties on the server. -server 'localhost', roles: %w{app} - -# you can set custom ssh options -# it's possible to pass any option but you need to keep in mind that net/ssh understand limited list of options -# you can see them in [net/ssh documentation](http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start) -# set it globally -# set :ssh_options, { -# keys: %w(/home/rlisowski/.ssh/id_rsa), -# forward_agent: false, -# auth_methods: %w(password) -# } -# and/or per server -# server 'example.com', -# user: 'user_name', -# roles: %w{web app}, -# ssh_options: { -# user: 'user_name', # overrides user setting above -# keys: %w(/home/user_name/.ssh/id_rsa), -# forward_agent: false, -# auth_methods: %w(publickey password) -# # password: 'please use keys' -# } -# setting per server overrides global ssh_options - -BRANCH = ENV.include?('BRANCH') ? ENV['BRANCH'] : 'master' -set :branch, "#{BRANCH}" -set :deploy_to, "/srv/ontoportal/#{fetch(:application)}" -# install gems into a common direcotry shared across ui, api and ncbo_cron to reduce disk usage -set :bundle_path, '/srv/ontoportal/.bundle' -remove :linked_dirs, 'vendor/bundle' - -# private git repo for configuraiton -# PRIVATE_CONFIG_REPO = ENV.include?('PRIVATE_CONFIG_REPO') ? ENV['PRIVATE_CONFIG_REPO'] : 'git@github.com:your_org/private-config-repo.git' - -# location of local configuration files -LOCAL_CONFIG_PATH = ENV.include?('LOCAL_CONFIG_PATH') ? ENV['LOCAL_CONFIG_PATH'] : '/srv/ontoportal/virtual_appliance/appliance_config' diff --git a/config/deploy/production.rb b/config/deploy/production.rb deleted file mode 100644 index c84d24ea..00000000 --- a/config/deploy/production.rb +++ /dev/null @@ -1,39 +0,0 @@ -# Simple Role Syntax -# ================== -# Supports bulk-adding hosts to roles, the primary -# server in each group is considered to be the first -# unless any hosts have the primary property set. -# Don't declare `role :all`, it's a meta role -role :app, %w{deploy@example.com} -role :web, %w{deploy@example.com} -role :db, %w{deploy@example.com} - -# Extended Server Syntax -# ====================== -# This can be used to drop a more detailed server -# definition into the server list. The second argument -# something that quacks like a hash can be used to set -# extended properties on the server. -server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value - -# you can set custom ssh options -# it's possible to pass any option but you need to keep in mind that net/ssh understand limited list of options -# you can see them in [net/ssh documentation](http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start) -# set it globally -# set :ssh_options, { -# keys: %w(/home/rlisowski/.ssh/id_rsa), -# forward_agent: false, -# auth_methods: %w(password) -# } -# and/or per server -# server 'example.com', -# user: 'user_name', -# roles: %w{web app}, -# ssh_options: { -# user: 'user_name', # overrides user setting above -# keys: %w(/home/user_name/.ssh/id_rsa), -# forward_agent: false, -# auth_methods: %w(publickey password) -# # password: 'please use keys' -# } -# setting per server overrides global ssh_options diff --git a/config/deploy/staging.rb b/config/deploy/staging.rb new file mode 100644 index 00000000..47b158ae --- /dev/null +++ b/config/deploy/staging.rb @@ -0,0 +1,17 @@ +# Simple Role Syntax +# ================== +# Supports bulk-adding hosts to roles, the primary +# server in each group is considered to be the first +# unless any hosts have the primary property set. +# Don't declare `role :all`, it's a meta role +role :app, %w{stageportal.lirmm.fr} +role :db, %w{stageportal.lirmm.fr} # sufficient to run db:migrate only on one system +set :branch, ENV.include?('BRANCH') ? ENV['BRANCH'] : 'stage' +# Extended Server Syntax +# ====================== +# This can be used to drop a more detailed server +# definition into the server list. The second argument +# something that quacks like a hash can be used to set +# extended properties on the server. +#server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value +set :log_level, :error diff --git a/config/deploy/test.rb b/config/deploy/test.rb new file mode 100644 index 00000000..fcbe1efc --- /dev/null +++ b/config/deploy/test.rb @@ -0,0 +1,17 @@ +# Simple Role Syntax +# ================== +# Supports bulk-adding hosts to roles, the primary +# server in each group is considered to be the first +# unless any hosts have the primary property set. +# Don't declare `role :all`, it's a meta role +role :app, %w{testportal.lirmm.fr} +role :db, %w{testportal.lirmm.fr} # sufficient to run db:migrate only on one system +# Extended Server Syntax +# ====================== +# This can be used to drop a more detailed server +# definition into the server list. The second argument +# something that quacks like a hash can be used to set +# extended properties on the server. +#server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value +set :log_level, :error +set :branch, ENV.include?('BRANCH') ? ENV['BRANCH'] : 'test' From 014eb3dc1b6270642ff189130c82f608f241f1cf Mon Sep 17 00:00:00 2001 From: Bilel Kihal <61744974+Bilelkihal@users.noreply.github.com> Date: Thu, 1 Aug 2024 20:25:08 +0200 Subject: [PATCH 12/13] Feature: update agents search endpoint to add option to have a custom qf paramter (#90) * fix agents search sensibility * improve agents search endpoint to search only exact string or substring match * make the agent search endpoint query filter configurable --------- Co-authored-by: Syphax --- controllers/search_controller.rb | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/controllers/search_controller.rb b/controllers/search_controller.rb index cf2d76c6..ce34d51d 100644 --- a/controllers/search_controller.rb +++ b/controllers/search_controller.rb @@ -63,7 +63,7 @@ class SearchController < ApplicationController page_size: page_size, sort: sort }) - + total_found = page_data.aggregate ontology_rank = LinkedData::Models::Ontology.rank docs = {} @@ -153,11 +153,17 @@ class SearchController < ApplicationController fq = "agentType_t:#{type}" if type - qf = [ - "acronymSuggestEdge^25 nameSuggestEdge^15 emailSuggestEdge^15 identifiersSuggestEdge^10 ", # start of the word first - "identifiers_texts^20 acronym_text^15 name_text^10 email_text^10 ", # full word match - "acronymSuggestNgram^2 nameSuggestNgram^1.5 email_text^1" # substring match last - ].join(' ') + if params[:qf] + qf = params[:qf] + else + qf = [ + "acronymSuggestEdge^25 nameSuggestEdge^15 emailSuggestEdge^15 identifiersSuggestEdge^10 ", # start of the word first + "identifiers_texts^20 acronym_text^15 name_text^10 email_text^10 ", # full word match + "acronymSuggestNgram^2 nameSuggestNgram^1.5 email_text^1" # substring match last + ].join(' ') + end + + if params[:sort] sort = "#{params[:sort]} asc, score desc" From fc11608eec406707314c620900022489e52ed46b Mon Sep 17 00:00:00 2001 From: Syphax bouazzouni Date: Thu, 1 Aug 2024 23:38:04 +0200 Subject: [PATCH 13/13] Fix: hide duplicated agents endpoint ('/Agents') (#91) * remove duplicated agents endpoint ('/Agents') * put again the Agents endpoint * hide Agents endpoint in the home endpoint * fix properties tests --------- Co-authored-by: Bilel KIHAL --- Gemfile.lock | 47 +++--- controllers/agents_controller.rb | 219 ++++++++++++++------------- controllers/home_controller.rb | 4 + controllers/properties_controller.rb | 12 +- docker-compose.yml | 3 +- 5 files changed, 144 insertions(+), 141 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index ccb0b5ee..ce2a0b4f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/goo.git - revision: b2a635fb1e8206e6e3010be4dbe033b47eb58481 + revision: a95245b8c964431505ca6315907440996c59a00d branch: development specs: goo (0.0.2) @@ -40,7 +40,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ncbo_cron.git - revision: 6bb53a13f514a60513afe25e37c5c69475140452 + revision: fabd04ef4fa37989d526fc6a7aa1e98830008dae branch: master specs: ncbo_cron (0.0.1) @@ -57,7 +57,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git - revision: fd78d689dac4a7393e20a36ac930c6c9d191a619 + revision: ca79d5a84a3b6e961118b7e1062f082e7f7b99fc branch: development specs: ontologies_linked_data (0.0.1) @@ -117,7 +117,7 @@ GEM bcrypt_pbkdf (1.1.1-x86_64-darwin) bigdecimal (1.4.2) builder (3.3.0) - capistrano (3.19.0) + capistrano (3.19.1) airbrussh (>= 1.0.0) i18n rake (>= 10.0.0) @@ -139,7 +139,7 @@ GEM dante (0.2.0) date (3.3.4) declarative (0.0.20) - docile (1.4.0) + docile (1.4.1) domain_name (0.6.20240107) ed25519 (1.3.0) faraday (1.10.3) @@ -160,12 +160,12 @@ GEM faraday-httpclient (1.0.1) faraday-multipart (1.0.4) multipart-post (~> 2) - faraday-net_http (1.0.1) + faraday-net_http (1.0.2) faraday-net_http_persistent (1.2.0) faraday-patron (1.0.0) faraday-rack (1.0.0) faraday-retry (1.0.3) - ffi (1.16.3) + ffi (1.17.0) gapic-common (0.21.1) faraday (>= 1.9, < 3.a) faraday-retry (>= 1.0, < 3.a) @@ -184,23 +184,21 @@ GEM google-cloud-errors (~> 1.0) google-apis-analytics_v3 (0.16.0) google-apis-core (>= 0.15.0, < 2.a) - google-apis-core (0.15.0) + google-apis-core (0.15.1) addressable (~> 2.5, >= 2.5.1) googleauth (~> 1.9) - httpclient (>= 2.8.1, < 3.a) + httpclient (>= 2.8.3, < 3.a) mini_mime (~> 1.0) + mutex_m representable (~> 3.0) retriable (>= 2.0, < 4.a) - rexml google-cloud-core (1.7.0) google-cloud-env (>= 1.0, < 3.a) google-cloud-errors (~> 1.0) google-cloud-env (2.1.1) faraday (>= 1.0, < 3.a) google-cloud-errors (1.4.0) - google-protobuf (3.25.3) - google-protobuf (3.25.3-x86_64-darwin) - google-protobuf (3.25.3-x86_64-linux) + google-protobuf (3.25.4) googleapis-common-protos (1.6.0) google-protobuf (>= 3.18, < 5.a) googleapis-common-protos-types (~> 1.7) @@ -214,14 +212,11 @@ GEM multi_json (~> 1.11) os (>= 0.9, < 2.0) signet (>= 0.16, < 2.a) - grpc (1.64.0) - google-protobuf (~> 3.25) - googleapis-common-protos-types (~> 1.0) - grpc (1.64.0-x86_64-darwin) - google-protobuf (~> 3.25) + grpc (1.65.2-x86_64-darwin) + google-protobuf (>= 3.25, < 5.0) googleapis-common-protos-types (~> 1.0) - grpc (1.64.0-x86_64-linux) - google-protobuf (~> 3.25) + grpc (1.65.2-x86_64-linux) + google-protobuf (>= 3.25, < 5.0) googleapis-common-protos-types (~> 1.0) haml (5.2.2) temple (>= 0.8.0) @@ -256,7 +251,7 @@ GEM method_source (1.1.0) mime-types (3.5.2) mime-types-data (~> 3.2015) - mime-types-data (3.2024.0604) + mime-types-data (3.2024.0702) mini_mime (1.1.5) minitest (4.7.5) minitest-stub_any_instance (1.0.3) @@ -264,6 +259,7 @@ GEM redis multi_json (1.15.0) multipart-post (2.4.1) + mutex_m (0.2.0) net-http-persistent (4.0.2) connection_pool (~> 2.2) net-imap (0.4.14) @@ -281,7 +277,7 @@ GEM net-protocol net-ssh (7.2.3) netrc (0.11.0) - newrelic_rpm (9.11.0) + newrelic_rpm (9.12.0) oj (3.16.1) omni_logger (0.1.4) logger @@ -349,7 +345,7 @@ GEM mime-types (>= 1.16, < 4.0) netrc (~> 0.8) retriable (3.1.2) - rexml (3.3.1) + rexml (3.3.4) strscan rsolr (2.6.0) builder (>= 2.1.2) @@ -394,7 +390,7 @@ GEM strscan (3.1.0) systemu (2.6.5) temple (0.10.3) - tilt (2.3.0) + tilt (2.4.0) timeout (0.4.1) trailblazer-option (0.1.2) tzinfo (2.0.6) @@ -414,7 +410,6 @@ GEM hashdiff (>= 0.4.0, < 2.0.0) PLATFORMS - ruby x86_64-darwin-23 x86_64-linux @@ -473,4 +468,4 @@ DEPENDENCIES webmock (~> 3.19.1) BUNDLED WITH - 2.4.22 + 2.3.23 diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb index 6b69fbc5..0b47c0c2 100644 --- a/controllers/agents_controller.rb +++ b/controllers/agents_controller.rb @@ -22,147 +22,150 @@ class AgentsController < ApplicationController reply agents end - namespace "/agents" do - get do - check_last_modified_collection(LinkedData::Models::Agent) - query = LinkedData::Models::Agent.where - query = apply_filters(LinkedData::Models::Agent, query) - query = query.include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)) - if page? - page, size = page_params - agents = query.page(page, size).all - else - agents = query.to_a - end + %w[agents Agents].each do |namespace| + namespace "/#{namespace}" do + get do + check_last_modified_collection(LinkedData::Models::Agent) + query = LinkedData::Models::Agent.where + query = apply_filters(LinkedData::Models::Agent, query) + query = query.include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)) + if page? + page, size = page_params + agents = query.page(page, size).all + else + agents = query.to_a + end + + if includes_param.include?(:all) || includes_param.include?(:usages) + LinkedData::Models::Agent.load_agents_usages(agents) + end - if includes_param.include?(:all) || includes_param.include?(:usages) - LinkedData::Models::Agent.load_agents_usages(agents) + reply agents end - reply agents - end + # Display a single agent + get '/:id' do + check_last_modified_collection(LinkedData::Models::Agent) + id = params["id"] + agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first + error 404, "Agent #{id} not found" if agent.nil? + reply 200, agent + end - # Display a single agent - get '/:id' do - check_last_modified_collection(LinkedData::Models::Agent) - id = params["id"] - agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first - error 404, "Agent #{id} not found" if agent.nil? - reply 200, agent - end + # Create a agent with the given acronym + post do + reply 201, create_new_agent + end - # Create a agent with the given acronym - post do - reply 201, create_new_agent - end + # Create a agent with the given acronym + put '/:acronym' do + reply 201, create_new_agent + end - # Create a agent with the given acronym - put '/:acronym' do - reply 201, create_new_agent - end + # Update an existing submission of a agent + patch '/:id' do + acronym = params["id"] + agent = LinkedData::Models::Agent.find(acronym).include(LinkedData::Models::Agent.attributes).first - # Update an existing submission of a agent - patch '/:id' do - acronym = params["id"] - agent = LinkedData::Models::Agent.find(acronym).include(LinkedData::Models::Agent.attributes).first + if agent.nil? + error 400, "Agent does not exist, please create using HTTP PUT before modifying" + else + agent = update_agent(agent, params) - if agent.nil? - error 400, "Agent does not exist, please create using HTTP PUT before modifying" - else - agent = update_agent(agent, params) + error 400, agent.errors unless agent.errors.empty? + end + halt 204 + end - error 400, agent.errors unless agent.errors.empty? + # Delete a agent + delete '/:id' do + agent = LinkedData::Models::Agent.find(params["id"]).first + agent.delete + halt 204 end - halt 204 - end - # Delete a agent - delete '/:id' do - agent = LinkedData::Models::Agent.find(params["id"]).first - agent.delete - halt 204 - end + private - private + def update_identifiers(identifiers) + Array(identifiers).map do |i| + next nil if i.empty? - def update_identifiers(identifiers) - Array(identifiers).map do |i| - next nil if i.empty? + id = i["id"] || LinkedData::Models::AgentIdentifier.generate_identifier(i['notation'], i['schemaAgency']) + identifier = LinkedData::Models::AgentIdentifier.find(RDF::URI.new(id)).first - id = i["id"] || LinkedData::Models::AgentIdentifier.generate_identifier(i['notation'], i['schemaAgency']) - identifier = LinkedData::Models::AgentIdentifier.find(RDF::URI.new(id)).first + if identifier + identifier.bring_remaining + else + identifier = LinkedData::Models::AgentIdentifier.new + end - if identifier - identifier.bring_remaining - else - identifier = LinkedData::Models::AgentIdentifier.new - end + i.delete "id" - i.delete "id" + next identifier if i.keys.size.zero? - next identifier if i.keys.size.zero? + populate_from_params(identifier, i) - populate_from_params(identifier, i) + if identifier.valid? + identifier.save + else + error 400, identifier.errors + end + identifier + end.compact + end - if identifier.valid? - identifier.save - else - error 400, identifier.errors - end - identifier - end.compact - end + def update_affiliations(affiliations) + Array(affiliations).map do |aff| + affiliation = aff["id"] ? LinkedData::Models::Agent.find(RDF::URI.new(aff["id"])).first : nil - def update_affiliations(affiliations) - Array(affiliations).map do |aff| - affiliation = aff["id"] ? LinkedData::Models::Agent.find(RDF::URI.new(aff["id"])).first : nil + if affiliation + affiliation.bring_remaining + affiliation.identifiers.each{|i| i.bring_remaining} + end - if affiliation - affiliation.bring_remaining - affiliation.identifiers.each{|i| i.bring_remaining} - end + next affiliation if aff.keys.size.eql?(1) && aff["id"] - next affiliation if aff.keys.size.eql?(1) && aff["id"] + if affiliation + affiliation = update_agent(affiliation, aff) + else + affiliation = create_new_agent(aff["id"], aff) + end - if affiliation - affiliation = update_agent(affiliation, aff) - else - affiliation = create_new_agent(aff["id"], aff) + error 400, affiliation.errors unless affiliation.errors.empty? + + affiliation end + end - error 400, affiliation.errors unless affiliation.errors.empty? + def create_new_agent (id = @params['id'], params = @params) + agent = nil + agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first if id - affiliation + if agent.nil? + agent = update_agent(LinkedData::Models::Agent.new, params) + error 400, agent.errors unless agent.errors.empty? + + return agent + else + error 400, "Agent exists, please use HTTP PATCH to update" + end end - end - def create_new_agent (id = @params['id'], params = @params) - agent = nil - agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first if id + def update_agent(agent, params) + return agent unless agent - if agent.nil? - agent = update_agent(LinkedData::Models::Agent.new, params) - error 400, agent.errors unless agent.errors.empty? + identifiers = params.delete "identifiers" + affiliations = params.delete "affiliations" + params.delete "id" + populate_from_params(agent, params) + agent.identifiers = update_identifiers(identifiers) + agent.affiliations = update_affiliations(affiliations) + agent.save if agent.valid? return agent - else - error 400, "Agent exists, please use HTTP PATCH to update" end - end - def update_agent(agent, params) - return agent unless agent - - identifiers = params.delete "identifiers" - affiliations = params.delete "affiliations" - params.delete "id" - populate_from_params(agent, params) - agent.identifiers = update_identifiers(identifiers) - agent.affiliations = update_affiliations(affiliations) - - agent.save if agent.valid? - return agent end - end + end diff --git a/controllers/home_controller.rb b/controllers/home_controller.rb index c2a67fb4..a44fd22e 100644 --- a/controllers/home_controller.rb +++ b/controllers/home_controller.rb @@ -13,11 +13,15 @@ class HomeController < ApplicationController expires 3600, :public last_modified @@root_last_modified ||= Time.now.httpdate routes = routes_list + #TODO: delete when ccv will be on production routes.delete("/ccv") if LinkedData.settings.enable_resource_index == false routes.delete("/resource_index") end + + routes.delete('/Agents') + routes_hash = {} context = {} routes.each do |route| diff --git a/controllers/properties_controller.rb b/controllers/properties_controller.rb index f98e9016..d32180d5 100644 --- a/controllers/properties_controller.rb +++ b/controllers/properties_controller.rb @@ -24,7 +24,7 @@ class PropertiesController < ApplicationController get '/:property' do prop = params[:property] ont, submission = get_ontology_and_submission - p = ont.property(prop, submission) + p = ont.property(prop, submission, display_all_attributes: false) error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil? reply 200, p end @@ -51,7 +51,7 @@ class PropertiesController < ApplicationController get '/:property/tree' do prop = params[:property] ont, submission = get_ontology_and_submission - p = ont.property(prop, submission) + p = ont.property(prop, submission, display_all_attributes: false) error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil? root_tree = p.tree @@ -79,7 +79,7 @@ class PropertiesController < ApplicationController get '/:property/ancestors' do prop = params[:property] ont, submission = get_ontology_and_submission - p = ont.property(prop, submission) + p = ont.property(prop, submission, display_all_attributes: false) error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil? ancestors = p.ancestors p.class.in(submission).models(ancestors).include(:label, :definition).all @@ -91,7 +91,7 @@ class PropertiesController < ApplicationController get '/:property/descendants' do prop = params[:property] ont, submission = get_ontology_and_submission - p = ont.property(prop, submission) + p = ont.property(prop, submission, display_all_attributes: false) error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil? descendants = p.descendants p.class.in(submission).models(descendants).include(:label, :definition).all @@ -103,7 +103,7 @@ class PropertiesController < ApplicationController get '/:property/parents' do prop = params[:property] ont, submission = get_ontology_and_submission - p = ont.property(prop, submission) + p = ont.property(prop, submission, display_all_attributes: false) error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil? p.bring(:parents) @@ -120,7 +120,7 @@ class PropertiesController < ApplicationController get '/:property/children' do prop = params[:property] ont, submission = get_ontology_and_submission - p = ont.property(prop, submission) + p = ont.property(prop, submission, display_all_attributes: false) error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil? p.bring(:children) diff --git a/docker-compose.yml b/docker-compose.yml index a75136d7..07b0cda1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -31,6 +31,7 @@ services: environment: <<: *env BUNDLE_APP_CONFIG: /srv/ontoportal/ontologies_api/.bundle + profiles: - 4store depends_on: @@ -48,7 +49,7 @@ services: - "9393:9393" volumes: # bundle volume for hosting gems installed by bundle; it speeds up gem install in local development - - app_api:/srv/ontoportal/ontologies_api + - .:/srv/ontoportal/ontologies_api - repository:/srv/ontoportal/data/repository ncbo_cron: