From d0667aa67a440fee448944750a4ca8750ff273dc Mon Sep 17 00:00:00 2001 From: OntoPortal Bot Date: Thu, 12 Oct 2023 22:01:41 +0200 Subject: [PATCH 1/8] [ontoportal-bot] Gemfile.lock update --- Gemfile | 10 ++++++---- Gemfile.lock | 30 +++++++++++++++++++----------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/Gemfile b/Gemfile index cc21c703..dfc4fa69 100644 --- a/Gemfile +++ b/Gemfile @@ -1,6 +1,6 @@ source 'https://rubygems.org' -gem 'activesupport', '~> 3.0' +gem 'activesupport', '~> 3.1' # see https://github.com/ncbo/ontologies_api/issues/69 gem 'bigdecimal', '1.4.2' gem 'faraday', '~> 1.9' @@ -18,7 +18,7 @@ gem 'sinatra-contrib', '~> 1.0' gem 'ffi' gem 'rack-accept', '~> 0.4' gem 'rack-attack', '~> 6.6.1', require: 'rack/attack' -gem 'rack-cache', '~> 1.0' +gem 'rack-cache', '~> 1.13.0' gem 'rack-cors', require: 'rack/cors' # GitHub dependency can be removed when https://github.com/niko/rack-post-body-to-params/pull/6 is merged and released gem 'rack-post-body-to-params', github: 'palexander/rack-post-body-to-params', branch: 'multipart_support' @@ -26,8 +26,9 @@ gem 'rack-timeout' gem 'redis-rack-cache', '~> 2.0' # Data access (caching) -gem 'redis' +gem 'redis', '~> 4.8.1' gem 'redis-activesupport' +gem 'redis-store', '1.9.1' # Monitoring gem 'cube-ruby', require: 'cube' @@ -71,4 +72,5 @@ group :test do gem 'rack-test' gem 'simplecov', require: false gem 'simplecov-cobertura' # for codecov.io -end + gem 'webmock' +end \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index e2ea3509..c7cde714 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -126,7 +126,8 @@ GEM sshkit (~> 1.3) coderay (1.1.3) concurrent-ruby (1.2.2) - connection_pool (2.4.1) + crack (0.4.5) + rexml cube-ruby (0.0.3) dante (0.2.0) date (3.3.3) @@ -181,6 +182,7 @@ GEM haml (5.2.2) temple (>= 0.8.0) tilt + hashdiff (1.0.1) http-accept (1.7.0) http-cookie (1.0.5) domain_name (~> 0.5) @@ -244,7 +246,7 @@ GEM rack (>= 0.4) rack-attack (6.6.1) rack (>= 1.0, < 3) - rack-cache (1.14.0) + rack-cache (1.13.0) rack (>= 0.4) rack-cors (1.0.6) rack (>= 1.6.0) @@ -260,22 +262,21 @@ GEM rdf (1.0.8) addressable (>= 2.2) redcarpet (3.6.0) - redis (5.0.7) - redis-client (>= 0.9.0) + redis (4.8.1) redis-activesupport (5.3.0) activesupport (>= 3, < 8) redis-store (>= 1.3, < 2) - redis-client (0.17.0) - connection_pool redis-rack-cache (2.2.1) rack-cache (>= 1.10, < 2) redis-store (>= 1.6, < 2) - redis-store (1.10.0) - redis (>= 4, < 6) + redis-store (1.9.1) + redis (>= 4, < 5) representable (3.2.0) declarative (< 0.1.0) trailblazer-option (>= 0.1.1, < 0.2.0) uber (< 0.2.0) + request_store (1.5.1) + rack (>= 1.4) rest-client (2.1.0) http-accept (>= 1.7.0, < 2.0) http-cookie (>= 1.0.2, < 2.0) @@ -340,6 +341,10 @@ GEM unicorn (>= 4, < 7) uuid (2.3.9) macaddr (~> 1.0) + webmock (3.19.1) + addressable (>= 2.8.0) + crack (>= 0.3.2) + hashdiff (>= 0.4.0, < 2.0.0) webrick (1.8.1) PLATFORMS @@ -347,7 +352,7 @@ PLATFORMS x86_64-linux DEPENDENCIES - activesupport (~> 3.0) + activesupport (~> 3.1) bcrypt_pbkdf (>= 1.0, < 2.0) bigdecimal (= 1.4.2) capistrano (~> 3) @@ -375,7 +380,7 @@ DEPENDENCIES rack rack-accept (~> 0.4) rack-attack (~> 6.6.1) - rack-cache (~> 1.0) + rack-cache (~> 1.13.0) rack-cors rack-mini-profiler rack-post-body-to-params! @@ -383,9 +388,11 @@ DEPENDENCIES rack-timeout rake (~> 10.0) redcarpet - redis + redis (~> 4.8.1) redis-activesupport redis-rack-cache (~> 2.0) + redis-store (= 1.9.1) + request_store shotgun! simplecov simplecov-cobertura @@ -395,6 +402,7 @@ DEPENDENCIES sparql-client! unicorn unicorn-worker-killer + webmock BUNDLED WITH 2.3.23 From 1b4c7eb1836836d56d1be96d62075935defd35e1 Mon Sep 17 00:00:00 2001 From: OntoPortal Bot Date: Thu, 12 Oct 2023 22:38:10 +0200 Subject: [PATCH 2/8] [ontoportal-bot] Gemfile.lock update --- Gemfile.lock | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index c7cde714..e6ca88b0 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -275,8 +275,6 @@ GEM declarative (< 0.1.0) trailblazer-option (>= 0.1.1, < 0.2.0) uber (< 0.2.0) - request_store (1.5.1) - rack (>= 1.4) rest-client (2.1.0) http-accept (>= 1.7.0, < 2.0) http-cookie (>= 1.0.2, < 2.0) @@ -348,7 +346,6 @@ GEM webrick (1.8.1) PLATFORMS - x86_64-darwin-21 x86_64-linux DEPENDENCIES @@ -392,7 +389,6 @@ DEPENDENCIES redis-activesupport redis-rack-cache (~> 2.0) redis-store (= 1.9.1) - request_store shotgun! simplecov simplecov-cobertura From 79dad08d195dcdf70df02c07a94fab2fd0cd5a63 Mon Sep 17 00:00:00 2001 From: Syphax bouazzouni Date: Tue, 17 Oct 2023 09:00:44 +0200 Subject: [PATCH 3/8] Fix: isInActiveScheme attribute in the class/tree endpoint (#50) * add class tree test with scheme and collection filters * make the class tree endpoit load isInActiveScheme and isInActiveCollection for the select class --- controllers/classes_controller.rb | 6 --- helpers/classes_helper.rb | 41 +++++++++++++-------- test/controllers/test_schemes_controller.rb | 17 +++++++++ 3 files changed, 43 insertions(+), 21 deletions(-) diff --git a/controllers/classes_controller.rb b/controllers/classes_controller.rb index c8e55bf8..d792c172 100644 --- a/controllers/classes_controller.rb +++ b/controllers/classes_controller.rb @@ -262,13 +262,7 @@ def includes_param_check end end - def concept_schemes - params["concept_schemes"]&.split(',') || [] - end - def concept_collections - params["concept_collections"]&.split(',') || [] - end def request_display(attrs) diff --git a/helpers/classes_helper.rb b/helpers/classes_helper.rb index fa6c48cf..60becb22 100644 --- a/helpers/classes_helper.rb +++ b/helpers/classes_helper.rb @@ -32,23 +32,19 @@ def get_class(submission, load_attrs=nil) load_children = load_attrs.delete :children load_has_children = load_attrs.delete :hasChildren - if !load_children + unless load_children load_children = load_attrs.select { |x| x.instance_of?(Hash) && x.include?(:children) } - - if load_children.length == 0 - load_children = nil - end - if !load_children.nil? - load_attrs = load_attrs.select { |x| !(x.instance_of?(Hash) && x.include?(:children)) } - end + load_children = nil if load_children.length == 0 + load_attrs = load_attrs.select { |x| !(x.instance_of?(Hash) && x.include?(:children)) } unless load_children.nil? end + cls_uri = notation_to_class_uri(submission) if cls_uri.nil? cls_uri = RDF::URI.new(params[:cls]) - if !cls_uri.valid? + unless cls_uri.valid? error 400, "The input class id '#{params[:cls]}' is not a valid IRI" end end @@ -62,23 +58,38 @@ def get_class(submission, load_attrs=nil) error 404, "Resource '#{params[:cls]}' not found in ontology #{submission.ontology.acronym} submission #{submission.submissionId}" end - unless load_has_children.nil? - cls.load_has_children - end - if !load_children.nil? + + extra_include = [] + + extra_include << :hasChildren if load_has_children + extra_include << :isInActiveScheme if load_attrs.include?(:inScheme) + extra_include << :isInActiveCollection if load_attrs.include?(:memberOf) + + cls.load_computed_attributes(to_load: extra_include , + options: {schemes: concept_schemes, collections: concept_collections}) + + + unless load_children.nil? LinkedData::Models::Class.partially_load_children( - [cls],500,cls.submission) + [cls], 500, cls.submission) unless load_has_children.nil? cls.children.each do |c| c.load_has_children end end end - return cls + cls end end + def concept_schemes + params["concept_schemes"]&.split(',') || [] + end + + def concept_collections + params["concept_collections"]&.split(',') || [] + end end end diff --git a/test/controllers/test_schemes_controller.rb b/test/controllers/test_schemes_controller.rb index ebabc42f..d4504aa3 100644 --- a/test/controllers/test_schemes_controller.rb +++ b/test/controllers/test_schemes_controller.rb @@ -61,4 +61,21 @@ def test_calls_not_found assert_equal 404, last_response.status end + + def test_class_tree + ont = Ontology.find('INRAETHES-0').include(:acronym).first + sub = ont.latest_submission + sub.bring_remaining + sub.uri = RDF::URI.new('http://opendata.inrae.fr/thesaurusINRAE/domainesINRAE') + sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first + sub.save + + cls = 'http://opendata.inrae.fr/thesaurusINRAE/d_6' + get "ontologies/INRAETHES-0/classes/#{CGI.escape(cls)}/tree" + + classes = MultiJson.load(last_response.body) + + refute_nil classes.select{|x| x['@id'].eql?(cls)}.first['isInActiveScheme'] + refute_nil classes.select{|x| x['@id'].eql?(cls)}.first['isInActiveCollection'] + end end From eb9103bdba650047ebb110e7bc2cd0b85780c329 Mon Sep 17 00:00:00 2001 From: OntoPortal Bot Date: Tue, 17 Oct 2023 12:54:33 +0200 Subject: [PATCH 4/8] [ontoportal-bot] Gemfile.lock update --- Gemfile.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gemfile.lock b/Gemfile.lock index e6ca88b0..63f2fc7c 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -53,7 +53,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git - revision: bf682ec9c1baa3e9e7eae2e5d095187b0f900bf7 + revision: 0d6632bb39c1d24a930fe088bc464c3a51d68c9b branch: master specs: ontologies_linked_data (0.0.1) From 12340962f57d9e7107e8a32ad4b903caff529fa5 Mon Sep 17 00:00:00 2001 From: Syphax bouazzouni Date: Thu, 19 Oct 2023 14:06:43 +0200 Subject: [PATCH 5/8] Merge to master: Release 2.3.2 - Submissions endpoint pagination and fixes (#52) * add get submission all including all properties test * extract and use submission_include_params where we use submission.bring * use retrieve_submissions helper in the :acronym/submissions endpoint --- Gemfile.lock | 4 +- controllers/admin_controller.rb | 4 +- controllers/ontologies_controller.rb | 19 +-- .../ontology_submissions_controller.rb | 38 +++--- helpers/access_control_helper.rb | 4 - helpers/application_helper.rb | 42 ++---- helpers/request_params_helper.rb | 97 ++++++++++++++ helpers/submission_helper.rb | 67 ++++++++++ .../controllers/test_ontologies_controller.rb | 4 +- .../test_ontology_submissions_controller.rb | 125 +++++++++++++++++- .../thesaurusINRAE_nouv_structure.rdf | 2 +- 11 files changed, 329 insertions(+), 77 deletions(-) create mode 100644 helpers/submission_helper.rb diff --git a/Gemfile.lock b/Gemfile.lock index 63f2fc7c..8d0a6681 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -53,7 +53,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git - revision: 0d6632bb39c1d24a930fe088bc464c3a51d68c9b + revision: f44f7baa96eb3ee10dfab4a8aca154161ba7dd89 branch: master specs: ontologies_linked_data (0.0.1) @@ -112,7 +112,7 @@ GEM bcrypt_pbkdf (1.1.0) bigdecimal (1.4.2) builder (3.2.4) - capistrano (3.17.3) + capistrano (3.18.0) airbrussh (>= 1.0.0) i18n rake (>= 10.0.0) diff --git a/controllers/admin_controller.rb b/controllers/admin_controller.rb index 7ae6d800..747def93 100644 --- a/controllers/admin_controller.rb +++ b/controllers/admin_controller.rb @@ -68,7 +68,7 @@ class AdminController < ApplicationController latest = ont.latest_submission(status: :any) error 404, "Ontology #{params["acronym"]} contains no submissions" if latest.nil? check_last_modified(latest) - latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) + latest.bring(*submission_include_params) NcboCron::Models::OntologySubmissionParser.new.queue_submission(latest, actions) halt 204 end @@ -84,7 +84,7 @@ class AdminController < ApplicationController latest = ont.latest_submission(status: :any) end check_last_modified(latest) if latest - latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) if latest + latest.bring(*submission_include_params) if latest reply(latest || {}) end diff --git a/controllers/ontologies_controller.rb b/controllers/ontologies_controller.rb index da1b748c..58518420 100644 --- a/controllers/ontologies_controller.rb +++ b/controllers/ontologies_controller.rb @@ -38,21 +38,12 @@ class OntologiesController < ApplicationController else latest = ont.latest_submission(status: :any) end - check_last_modified(latest) if latest - # When asking to display all metadata, we are using bring_remaining which is more performant than including all metadata (remove this when the query to get metadata will be fixed) + if latest - if includes_param.first == :all - # Bring what we need to display all attr of the submission - latest.bring_remaining - latest.bring({:contact=>[:name, :email], - :ontology=>[:acronym, :name, :administeredBy, :group, :viewingRestriction, :doNotUpdate, :flat, - :hasDomain, :summaryOnly, :acl, :viewOf, :ontologyType], - :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]}) - else - latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) - end + check_last_modified(latest) + latest.bring(*submission_include_params) end - #remove the whole previous if block and replace by it: latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) if latest + reply(latest || {}) end @@ -62,7 +53,7 @@ class OntologiesController < ApplicationController patch '/:acronym/latest_submission' do ont = Ontology.find(params["acronym"]).first error 422, "You must provide an existing `acronym` to patch" if ont.nil? - + submission = ont.latest_submission(status: :any) submission.bring(*OntologySubmission.attributes) diff --git a/controllers/ontology_submissions_controller.rb b/controllers/ontology_submissions_controller.rb index cf55659d..0068a5f1 100644 --- a/controllers/ontology_submissions_controller.rb +++ b/controllers/ontology_submissions_controller.rb @@ -1,9 +1,15 @@ class OntologySubmissionsController < ApplicationController get "/submissions" do check_last_modified_collection(LinkedData::Models::OntologySubmission) - #using appplication_helper method - options = {also_include_views: params["also_include_views"], status: (params["include_status"] || "ANY")} - reply retrieve_latest_submissions(options).values + options = { + also_include_views: params["also_include_views"], + status: (params["include_status"] || "ANY") + } + subs = retrieve_latest_submissions(options) + subs = subs.values unless page? + # Force to show ontology reviews, notes and projects by default only for this request + LinkedData::Models::Ontology.serialize_default(*(LinkedData::Models::Ontology.hypermedia_settings[:serialize_default] + [:reviews, :notes, :projects])) + reply subs end ## @@ -19,22 +25,18 @@ class OntologySubmissionsController < ApplicationController ## # Display all submissions of an ontology get do - ont = Ontology.find(params["acronym"]).include(:acronym).first + ont = Ontology.find(params["acronym"]).include(:acronym, :administeredBy, :acl, :viewingRestriction).first error 422, "Ontology #{params["acronym"]} does not exist" unless ont check_last_modified_segment(LinkedData::Models::OntologySubmission, [ont.acronym]) - if includes_param.first == :all - # When asking to display all metadata, we are using bring_remaining which is more performant than including all metadata (remove this when the query to get metadata will be fixed) - ont.bring(submissions: [:released, :creationDate, :status, :submissionId, - {:contact=>[:name, :email], :ontology=>[:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl, :group, :hasDomain, :views, :viewOf, :flat], - :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]}, :submissionStatus]) - - ont.submissions.each do |sub| - sub.bring_remaining - end - else - ont.bring(submissions: OntologySubmission.goo_attrs_to_load(includes_param)) - end - reply ont.submissions.sort {|a,b| b.submissionId.to_i <=> a.submissionId.to_i } # descending order of submissionId + check_access(ont) + options = { + also_include_views: true, + status: (params["include_status"] || "ANY"), + ontology: params["acronym"] + } + subs = retrieve_submissions(options) + + reply subs.sort {|a,b| b.submissionId.to_i <=> a.submissionId.to_i } # descending order of submissionId end ## @@ -53,7 +55,7 @@ class OntologySubmissionsController < ApplicationController ont.bring(:submissions) ont_submission = ont.submission(params["ontology_submission_id"]) error 404, "`submissionId` not found" if ont_submission.nil? - ont_submission.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) + ont_submission.bring(*submission_include_params) reply ont_submission end diff --git a/helpers/access_control_helper.rb b/helpers/access_control_helper.rb index 1de3bee5..74416866 100644 --- a/helpers/access_control_helper.rb +++ b/helpers/access_control_helper.rb @@ -10,11 +10,7 @@ module AccessControlHelper def check_access(obj) return obj unless LinkedData.settings.enable_security if obj.is_a?(Enumerable) - if obj.first.is_a?(LinkedData::Models::Base) && obj.first.access_based_on? - check_access(obj.first) - else filter_access(obj) - end else if obj.respond_to?(:read_restricted?) && obj.read_restricted? readable = obj.readable?(env["REMOTE_USER"]) diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb index 10871498..172170fa 100644 --- a/helpers/application_helper.rb +++ b/helpers/application_helper.rb @@ -52,6 +52,10 @@ def populate_from_params(obj, params) value = is_arr ? value : [value] new_value = [] value.each do |cls| + if uri_as_needed(cls["ontology"]).nil? + new_value << cls + next + end sub = LinkedData::Models::Ontology.find(uri_as_needed(cls["ontology"])).first.latest_submission new_value << LinkedData::Models::Class.find(cls["class"]).in(sub).first end @@ -356,40 +360,16 @@ def replace_url_prefix(id) end def retrieve_latest_submissions(options = {}) - status = (options[:status] || "RDF").to_s.upcase - include_ready = status.eql?("READY") ? true : false - status = "RDF" if status.eql?("READY") - any = true if status.eql?("ANY") - include_views = options[:also_include_views] || false - includes = OntologySubmission.goo_attrs_to_load(includes_param) - - includes << :submissionStatus unless includes.include?(:submissionStatus) - if any - submissions_query = OntologySubmission.where - else - submissions_query = OntologySubmission.where(submissionStatus: [ code: status]) - end + submissions = retrieve_submissions(options) - submissions_query = submissions_query.filter(Goo::Filter.new(ontology: [:viewOf]).unbound) unless include_views - submissions_query = submissions_query.filter(filter) if filter? - # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs - if includes_param.first == :all - includes = [:submissionId, {:contact=>[:name, :email], :ontology=>[:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl, - :group, :hasDomain, :views, :viewOf, :flat], :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]}, :submissionStatus] - end - submissions = submissions_query.include(includes).to_a - - # Figure out latest parsed submissions using all submissions - latest_submissions = {} + latest_submissions = page? ? submissions : {} # latest_submission doest not work with pagination submissions.each do |sub| - # To retrieve all metadata, but slow when a lot of ontologies - if includes_param.first == :all - sub.bring_remaining + unless page? + next if include_ready?(options) && !sub.ready? + next if sub.ontology.nil? + latest_submissions[sub.ontology.acronym] ||= sub + latest_submissions[sub.ontology.acronym] = sub if sub.submissionId.to_i > latest_submissions[sub.ontology.acronym].submissionId.to_i end - next if include_ready && !sub.ready? - next if sub.ontology.nil? - latest_submissions[sub.ontology.acronym] ||= sub - latest_submissions[sub.ontology.acronym] = sub if sub.submissionId.to_i > latest_submissions[sub.ontology.acronym].submissionId.to_i end latest_submissions end diff --git a/helpers/request_params_helper.rb b/helpers/request_params_helper.rb index e7ec091a..842ee0a7 100644 --- a/helpers/request_params_helper.rb +++ b/helpers/request_params_helper.rb @@ -13,6 +13,10 @@ def settings_params(klass) [attributes, page, size, order_by, bring_unmapped] end + def page? + !params[:page].nil? + end + def is_set?(param) !param.nil? && param != "" end @@ -25,6 +29,38 @@ def filter build_filter end + def apply_filters(object, query) + attributes_to_filter = object.attributes(:all).select{|x| params.keys.include?(x.to_s)} + filters = attributes_to_filter.map {|key| [key, params[key]&.split(',')]}.to_h + add_direct_filters(filters, query) + end + + def apply_submission_filters(query) + + filters = { + naturalLanguage: params[:naturalLanguage]&.split(',') , #%w[http://lexvo.org/id/iso639-3/fra http://lexvo.org/id/iso639-3/eng], + hasOntologyLanguage_acronym: params[:hasOntologyLanguage]&.split(',') , #%w[OWL SKOS], + ontology_hasDomain_acronym: params[:hasDomain]&.split(',') , #%w[Crop Vue_francais], + ontology_group_acronym: params[:group]&.split(','), #%w[RICE CROP], + isOfType: params[:isOfType]&.split(','), #["http://omv.ontoware.org/2005/05/ontology#Vocabulary"], + hasFormalityLevel: params[:hasFormalityLevel]&.split(','), #["http://w3id.org/nkos/nkostype#thesaurus"], + ontology_viewingRestriction: params[:viewingRestriction]&.split(','), #["private"] + } + inverse_filters = { + status: params[:status], #"retired", + submissionStatus: params[:submissionStatus] #"RDF", + } + + query = add_direct_filters(filters, query) + + query = add_inverse_filters(inverse_filters, query) + + query = add_acronym_name_filters(query) + + add_order_by_patterns(query) + end + + def get_order_by_from(params, default_order = :asc) if is_set?(params['sortby']) orders = (params["order"] || default_order.to_s).split(',') @@ -50,6 +86,67 @@ def bring_unmapped_to(page_data, sub, klass) end private + def extract_attr(key) + attr, sub_attr, sub_sub_attr = key.to_s.split('_') + + return attr.to_sym unless sub_attr + + return {attr.to_sym => [sub_attr.to_sym]} unless sub_sub_attr + + {attr.to_sym => [sub_attr.to_sym => sub_sub_attr.to_sym]} + end + + def add_direct_filters(filters, query) + filters.each do |key, values| + attr = extract_attr(key) + next if Array(values).empty? + + filter = Goo::Filter.new(attr).regex(values.first) + values.drop(1).each do |v| + filter = filter.or(Goo::Filter.new(attr).regex(v)) + end + query = query.filter(filter) + end + query + end + + def add_inverse_filters(inverse_filters, query) + inverse_filters.each do |key, value| + attr = extract_attr(key) + next unless value + + filter = Goo::Filter.new(attr).regex("^(?:(?!#{value}).)*$") + query = query.filter(filter) + end + query + end + + def add_acronym_name_filters(query) + if params[:acronym] + filter = Goo::Filter.new(extract_attr(:ontology_acronym)).regex(params[:acronym]) + if params[:name] + filter.or(Goo::Filter.new(extract_attr(:ontology_name)).regex(params[:name])) + end + query = query.filter(filter) + elsif params[:name] + filter = Goo::Filter.new(extract_attr(:ontology_name)).regex(params[:name]) + query = query.filter(filter) + end + query + end + + def add_order_by_patterns(query) + if params[:order_by] + attr, sub_attr = params[:order_by].to_s.split('_') + if sub_attr + order_pattern = { attr.to_sym => { sub_attr.to_sym => (sub_attr.eql?("name") ? :asc : :desc) } } + else + order_pattern = { attr.to_sym => :desc } + end + query = query.order_by(order_pattern) + end + query + end def sort_order_item(param, order) [param.to_sym, order.to_sym] diff --git a/helpers/submission_helper.rb b/helpers/submission_helper.rb new file mode 100644 index 00000000..07f82138 --- /dev/null +++ b/helpers/submission_helper.rb @@ -0,0 +1,67 @@ +require 'sinatra/base' + +module Sinatra + module Helpers + module SubmissionHelper + def submission_include_params + # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs + includes = OntologySubmission.goo_attrs_to_load(includes_param) + if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:ontology)} + includes << {:ontology=>[:administeredBy, :acronym, :name, :viewingRestriction, :group, :hasDomain,:notes, :reviews, :projects,:acl, :viewOf]} + end + + if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:contact)} + includes << {:contact=>[:name, :email]} + end + includes + end + + def submission_attributes_all + out = [LinkedData::Models::OntologySubmission.embed_values_hash] + out << {:contact=>[:name, :email]} + out << {:ontology=>[:acronym, :name, :administeredBy, :group, :viewingRestriction, :doNotUpdate, :flat, + :hasDomain, :summaryOnly, :acl, :viewOf, :ontologyType]} + + out + end + + def retrieve_submissions(options) + status = (options[:status] || "RDF").to_s.upcase + status = "RDF" if status.eql?("READY") + ontology_acronym = options[:ontology] + any = status.eql?("ANY") + include_views = options[:also_include_views] || false + includes, page, size, order_by, _ = settings_params(LinkedData::Models::OntologySubmission) + includes << :submissionStatus unless includes.include?(:submissionStatus) + + submissions_query = LinkedData::Models::OntologySubmission + submissions_query = submissions_query.where(ontology: [acronym: ontology_acronym]) if ontology_acronym + + if any + submissions_query = submissions_query.where unless ontology_acronym + else + submissions_query = submissions_query.where({ submissionStatus: [code: status] }) + end + + submissions_query = apply_submission_filters(submissions_query) + submissions_query = submissions_query.filter(Goo::Filter.new(ontology: [:viewOf]).unbound) unless include_views + submissions_query = submissions_query.filter(filter) if filter? + + + submissions = submissions_query.include(submission_include_params) + if page? + submissions.page(page, size).all + else + submissions.to_a + end + end + + def include_ready?(options) + options[:status] && options[:status].to_s.upcase.eql?("READY") + end + + end + end +end + +helpers Sinatra::Helpers::SubmissionHelper \ No newline at end of file diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb index b90f15e1..34f8c4dc 100644 --- a/test/controllers/test_ontologies_controller.rb +++ b/test/controllers/test_ontologies_controller.rb @@ -217,13 +217,13 @@ def test_download_acl_only begin allowed_user = User.new({ username: "allowed", - email: "test@example.org", + email: "test1@example.org", password: "12345" }) allowed_user.save blocked_user = User.new({ username: "blocked", - email: "test@example.org", + email: "test2@example.org", password: "12345" }) blocked_user.save diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb index 7500dce4..8c4cb098 100644 --- a/test/controllers/test_ontology_submissions_controller.rb +++ b/test/controllers/test_ontology_submissions_controller.rb @@ -18,7 +18,10 @@ def self._set_vars administeredBy: "tim", "file" => Rack::Test::UploadedFile.new(@@test_file, ""), released: DateTime.now.to_s, - contact: [{name: "test_name", email: "test@example.org"}] + contact: [{name: "test_name", email: "test3@example.org"}], + URI: 'https://test.com/test', + status: 'production', + description: 'ontology description' } @@status_uploaded = "UPLOADED" @@status_rdf = "RDF" @@ -36,6 +39,12 @@ def self._create_onts ont.save end + def setup + delete_ontologies_and_submissions + ont = Ontology.new(acronym: @@acronym, name: @@name, administeredBy: [@@user]) + ont.save + end + def test_submissions_for_given_ontology num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1) ontology = created_ont_acronyms.first @@ -156,13 +165,13 @@ def test_download_acl_only begin allowed_user = User.new({ username: "allowed", - email: "test@example.org", + email: "test4@example.org", password: "12345" }) allowed_user.save blocked_user = User.new({ username: "blocked", - email: "test@example.org", + email: "test5@example.org", password: "12345" }) blocked_user.save @@ -192,4 +201,114 @@ def test_download_acl_only end end + + + def test_submissions_default_includes + ontology_count = 5 + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + + submission_default_attributes = LinkedData::Models::OntologySubmission.hypermedia_settings[:serialize_default].map(&:to_s) + + get("/submissions?display_links=false&display_context=false&include_status=ANY") + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + + assert_equal ontology_count, submissions.size + assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) }) + + get("/ontologies/#{created_ont_acronyms.first}/submissions?display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions.size + assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) }) + end + + def test_submissions_all_includes + ontology_count = 5 + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + def submission_all_attributes + attrs = OntologySubmission.goo_attrs_to_load([:all]) + embed_attrs = attrs.select { |x| x.is_a?(Hash) }.first + + attrs.delete_if { |x| x.is_a?(Hash) }.map(&:to_s) + embed_attrs.keys.map(&:to_s) + end + get("/submissions?include=all&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal ontology_count, submissions.size + + assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/submissions?include=all&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions.size + + assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=all&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + + assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort)) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + + get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=all&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + + assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort)) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + end + + def test_submissions_custom_includes + ontology_count = 5 + num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + include = 'ontology,contact,submissionId' + + get("/submissions?include=#{include}&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal ontology_count, submissions.size + assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/submissions?include=#{include}&display_links=false&display_context=false") + + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions.size + assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) }) + assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + + get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=#{include}&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + assert(include.split(',').eql?(submission_keys(sub))) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + + get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=#{include}&display_links=false&display_context=false") + assert last_response.ok? + sub = MultiJson.load(last_response.body) + assert(include.split(',').eql?(submission_keys(sub))) + assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + end + + def test_submissions_param_include + skip('only for local development regrouping a set of tests') + test_submissions_default_includes + test_submissions_all_includes + test_submissions_custom_includes + end + + private + def submission_keys(sub) + sub.to_hash.keys - %w[@id @type id] + end end diff --git a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf b/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf index 8353d82f..ca303834 100644 --- a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf +++ b/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf @@ -30,7 +30,7 @@ 1331561625299 - aktivite + aktivite 2012-03-12T22:13:45Z 2017-09-22T14:09:06Z From c791b3f38270ab275ab4bb78588546bd1d73daab Mon Sep 17 00:00:00 2001 From: Syphax bouazzouni Date: Tue, 5 Dec 2023 20:47:01 +0100 Subject: [PATCH 6/8] Merge pull request #41 from ontoportal-lirmm/feature/add-multiprovider-auth (#44) Feature: Add multi provider authentication --- .dockerignore | 1 + Gemfile | 1 + Gemfile.lock | 47 +++++++++--------- config/environments/test.rb | 18 +++++++ controllers/users_controller.rb | 48 +++++++----------- docker-compose.yml | 16 +++++- helpers/search_helper.rb | 1 + helpers/users_helper.rb | 49 +++++++++++++++++++ test/controllers/test_search_controller.rb | 27 +++++++--- test/controllers/test_users_controller.rb | 36 ++++++++++++++ test/middleware/test_rack_attack.rb | 6 +-- .../configsets/term_search/conf/schema.xml | 41 +++++++++++++--- test/test_case.rb | 2 + 13 files changed, 222 insertions(+), 71 deletions(-) diff --git a/.dockerignore b/.dockerignore index cf76ed57..3b15d33c 100644 --- a/.dockerignore +++ b/.dockerignore @@ -9,3 +9,4 @@ tmp/* # Editor temp files *.swp *.swo +test/solr diff --git a/Gemfile b/Gemfile index dfc4fa69..49c8357e 100644 --- a/Gemfile +++ b/Gemfile @@ -13,6 +13,7 @@ gem 'rake', '~> 10.0' gem 'sinatra', '~> 1.0' gem 'sinatra-advanced-routes' gem 'sinatra-contrib', '~> 1.0' +gem 'request_store' # Rack middleware gem 'ffi' diff --git a/Gemfile.lock b/Gemfile.lock index 8d0a6681..2612b968 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,6 +1,6 @@ GIT remote: https://github.com/ncbo/ncbo_ontology_recommender.git - revision: f440ae855a217807fead1d20629a0f187997b973 + revision: 013abea4af3b10910ec661dbb358a4b6cae198a4 branch: master specs: ncbo_ontology_recommender (0.0.1) @@ -11,7 +11,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/goo.git - revision: bd7154217438c3b9160e0e9b495c7c718b55fbf8 + revision: 74ea47defc7f6260b045a6c6997bbe6a59c7bf62 branch: master specs: goo (0.0.2) @@ -53,7 +53,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git - revision: f44f7baa96eb3ee10dfab4a8aca154161ba7dd89 + revision: 80a331d053ea04397a903452288c2186822c340c branch: master specs: ontologies_linked_data (0.0.1) @@ -108,7 +108,8 @@ GEM airbrussh (1.5.0) sshkit (>= 1.6.1, != 1.7.0) backports (3.24.1) - bcrypt (3.1.19) + base64 (0.2.0) + bcrypt (3.1.20) bcrypt_pbkdf (1.1.0) bigdecimal (1.4.2) builder (3.2.4) @@ -130,11 +131,10 @@ GEM rexml cube-ruby (0.0.3) dante (0.2.0) - date (3.3.3) + date (3.3.4) declarative (0.0.20) docile (1.4.0) - domain_name (0.5.20190701) - unf (>= 0.0.5, < 1.0.0) + domain_name (0.6.20231109) ed25519 (1.3.0) faraday (1.10.3) faraday-em_http (~> 1.0) @@ -164,7 +164,7 @@ GEM ffi (~> 1.0) google-apis-analytics_v3 (0.13.0) google-apis-core (>= 0.11.0, < 2.a) - google-apis-core (0.11.1) + google-apis-core (0.11.2) addressable (~> 2.5, >= 2.5.1) googleauth (>= 0.16.2, < 2.a) httpclient (>= 2.8.1, < 3.a) @@ -189,14 +189,14 @@ GEM httpclient (2.8.3) i18n (0.9.5) concurrent-ruby (~> 1.0) - json (2.6.3) + json (2.7.1) json-schema (2.8.1) addressable (>= 2.4) - json_pure (2.6.3) + json_pure (2.7.1) jwt (2.7.1) kgio (2.11.4) - libxml-ruby (4.1.1) - logger (1.5.3) + libxml-ruby (4.1.2) + logger (1.6.0) macaddr (1.7.2) systemu (~> 2.6.5) mail (2.8.1) @@ -216,12 +216,12 @@ GEM multi_json (1.15.0) multipart-post (2.3.0) net-http-persistent (2.9.4) - net-imap (0.4.1) + net-imap (0.4.7) date net-protocol net-pop (0.1.2) net-protocol - net-protocol (0.2.1) + net-protocol (0.2.2) timeout net-scp (4.0.0) net-ssh (>= 2.6.5, < 8.0.0) @@ -229,7 +229,8 @@ GEM net-protocol net-ssh (7.2.0) netrc (0.11.0) - newrelic_rpm (9.5.0) + newrelic_rpm (9.6.0) + base64 oj (2.18.5) omni_logger (0.1.4) logger @@ -240,7 +241,7 @@ GEM pry (0.14.2) coderay (~> 1.1) method_source (~> 1.0) - public_suffix (5.0.3) + public_suffix (5.0.4) rack (1.6.13) rack-accept (0.4.5) rack (>= 0.4) @@ -275,6 +276,8 @@ GEM declarative (< 0.1.0) trailblazer-option (>= 0.1.1, < 0.2.0) uber (< 0.2.0) + request_store (1.5.1) + rack (>= 1.4) rest-client (2.1.0) http-accept (>= 1.7.0, < 2.0) http-cookie (>= 1.0.2, < 2.0) @@ -317,20 +320,17 @@ GEM rack-test sinatra (~> 1.4.0) tilt (>= 1.3, < 3) - sshkit (1.21.5) + sshkit (1.21.6) net-scp (>= 1.1.2) net-ssh (>= 2.8.0) systemu (2.6.5) temple (0.10.3) tilt (2.3.0) - timeout (0.4.0) + timeout (0.4.1) trailblazer-option (0.1.2) tzinfo (2.0.6) concurrent-ruby (~> 1.0) uber (0.1.0) - unf (0.1.4) - unf_ext - unf_ext (0.0.8.2) unicorn (6.1.0) kgio (~> 2.6) raindrops (~> 0.7) @@ -346,6 +346,8 @@ GEM webrick (1.8.1) PLATFORMS + x86_64-darwin-21 + x86_64-darwin-23 x86_64-linux DEPENDENCIES @@ -389,6 +391,7 @@ DEPENDENCIES redis-activesupport redis-rack-cache (~> 2.0) redis-store (= 1.9.1) + request_store shotgun! simplecov simplecov-cobertura @@ -401,4 +404,4 @@ DEPENDENCIES webmock BUNDLED WITH - 2.3.23 + 2.4.21 diff --git a/config/environments/test.rb b/config/environments/test.rb index 0f421dec..16bf407a 100644 --- a/config/environments/test.rb +++ b/config/environments/test.rb @@ -55,6 +55,24 @@ "apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b" } } + config.oauth_providers = { + github: { + check: :access_token, + link: 'https://api.github.com/user' + }, + keycloak: { + check: :jwt_token, + cert: 'KEYCLOAK_SECRET_KEY' + }, + orcid: { + check: :access_token, + link: 'https://pub.orcid.org/v3.0/me' + }, + google: { + check: :access_token, + link: 'https://www.googleapis.com/oauth2/v3/userinfo' + } + } end Annotator.config do |config| diff --git a/controllers/users_controller.rb b/controllers/users_controller.rb index 00b6e732..09a1835b 100644 --- a/controllers/users_controller.rb +++ b/controllers/users_controller.rb @@ -1,14 +1,17 @@ class UsersController < ApplicationController namespace "/users" do post "/authenticate" do - user_id = params["user"] - user_password = params["password"] + # Modify params to show all user attributes params["display"] = User.attributes.join(",") - user = User.find(user_id).include(User.goo_attrs_to_load(includes_param) + [:passwordHash]).first - authenticated = user.authenticate(user_password) unless user.nil? - error 401, "Username/password combination invalid" unless authenticated - user.show_apikey = true + + if params["access_token"] + user = oauth_authenticate(params) + user.bring(*User.goo_attrs_to_load(includes_param)) + else + user = login_password_authenticate(params) + end + user.show_apikey = true unless user.nil? reply user end @@ -20,17 +23,13 @@ class UsersController < ApplicationController post "/create_reset_password_token" do email = params["email"] username = params["username"] - user = LinkedData::Models::User.where(email: email, username: username).include(LinkedData::Models::User.attributes).first - error 404, "User not found" unless user - reset_token = token(36) - user.resetToken = reset_token + user = send_reset_token(email, username) + if user.valid? - user.save(override_security: true) - LinkedData::Utils::Notifications.reset_password(user, reset_token) + halt 204 else error 422, user.errors end - halt 204 end ## @@ -42,11 +41,11 @@ class UsersController < ApplicationController email = params["email"] || "" username = params["username"] || "" token = params["token"] || "" + params["display"] = User.attributes.join(",") # used to serialize everything via the serializer - user = LinkedData::Models::User.where(email: email, username: username).include(User.goo_attrs_to_load(includes_param)).first - error 404, "User not found" unless user - if token.eql?(user.resetToken) - user.show_apikey = true + + user, token_accepted = reset_password(email, username, token) + if token_accepted reply user else error 403, "Password reset not authorized with this token" @@ -98,12 +97,6 @@ class UsersController < ApplicationController private - def token(len) - chars = ("a".."z").to_a + ("A".."Z").to_a + ("1".."9").to_a - token = "" - 1.upto(len) { |i| token << chars[rand(chars.size-1)] } - token - end def create_user params ||= @params @@ -111,14 +104,7 @@ def create_user error 409, "User with username `#{params["username"]}` already exists" unless user.nil? user = instance_from_params(User, params) if user.valid? - user.save - # Send an email to the administrator to warn him about the newly created user - begin - if !LinkedData.settings.admin_emails.nil? && !LinkedData.settings.admin_emails.empty? - LinkedData::Utils::Notifications.new_user(user) - end - rescue Exception => e - end + user.save(send_notifications: false) else error 422, user.errors end diff --git a/docker-compose.yml b/docker-compose.yml index de084081..5cb64963 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -75,10 +75,14 @@ services: redis-ut: image: redis + ports: + - 6379:6379 4store-ut: image: bde2020/4store #volume: fourstore:/var/lib/4store + ports: + - 9000:9000 command: > bash -c "4s-backend-setup --segments 4 ontoportal_kb && 4s-backend ontoportal_kb @@ -88,10 +92,20 @@ services: solr-ut: - image: ontoportal/solr-ut:0.1 + image: solr:8 + volumes: + - ./test/solr/configsets:/configsets:ro + ports: + - "8983:8983" + command: > + bash -c "precreate-core term_search_core1 /configsets/term_search + && precreate-core prop_search_core1 /configsets/property_search + && solr-foreground" mgrep-ut: image: ontoportal/mgrep-ncbo:0.1 + ports: + - "55556:55555" agraph-ut: image: franzinc/agraph:v7.3.0 diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb index 10de14c0..5d37d884 100644 --- a/helpers/search_helper.rb +++ b/helpers/search_helper.rb @@ -345,6 +345,7 @@ def populate_classes_from_search(classes, ontology_acronyms=nil) doc[:submission] = old_class.submission doc[:properties] = MultiJson.load(doc.delete(:propertyRaw)) if include_param_contains?(:properties) instance = LinkedData::Models::Class.read_only(doc) + instance.prefLabel = instance.prefLabel.first if instance.prefLabel.is_a?(Array) classes_hash[ont_uri_class_uri] = instance end diff --git a/helpers/users_helper.rb b/helpers/users_helper.rb index 5d4266c1..e2c69e60 100644 --- a/helpers/users_helper.rb +++ b/helpers/users_helper.rb @@ -17,6 +17,55 @@ def filter_for_user_onts(obj) obj end + + def send_reset_token(email, username) + user = LinkedData::Models::User.where(email: email, username: username).include(LinkedData::Models::User.attributes).first + error 404, "User not found" unless user + reset_token = token(36) + user.resetToken = reset_token + + return user if user.valid? + + user.save(override_security: true) + LinkedData::Utils::Notifications.reset_password(user, reset_token) + user + end + + def token(len) + chars = ("a".."z").to_a + ("A".."Z").to_a + ("1".."9").to_a + token = "" + 1.upto(len) { |i| token << chars[rand(chars.size-1)] } + token + end + + def reset_password(email, username, token) + user = LinkedData::Models::User.where(email: email, username: username).include(User.goo_attrs_to_load(includes_param)).first + + error 404, "User not found" unless user + + user.show_apikey = true + + [user, token.eql?(user.resetToken)] + end + + def oauth_authenticate(params) + access_token = params["access_token"] + provider = params["token_provider"] + user = LinkedData::Models::User.oauth_authenticate(access_token, provider) + error 401, "Access token invalid"if user.nil? + user + end + + def login_password_authenticate(params) + user_id = params["user"] + user_password = params["password"] + user = User.find(user_id).include(User.goo_attrs_to_load(includes_param) + [:passwordHash]).first + authenticated = false + authenticated = user.authenticate(user_password) unless user.nil? + error 401, "Username/password combination invalid" unless authenticated + + user + end end end end diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb index 44c67c7e..21a3dd18 100644 --- a/test/controllers/test_search_controller.rb +++ b/test/controllers/test_search_controller.rb @@ -85,7 +85,7 @@ def test_search_ontology_filter assert last_response.ok? results = MultiJson.load(last_response.body) doc = results["collection"][0] - assert_equal "cell line", doc["prefLabel"] + assert_equal "cell line", doc["prefLabel"].first assert doc["links"]["ontology"].include? acronym results["collection"].each do |doc| acr = doc["links"]["ontology"].split('/')[-1] @@ -103,7 +103,8 @@ def test_search_other_filters get "search?q=data&require_definitions=true" assert last_response.ok? results = MultiJson.load(last_response.body) - assert_equal 26, results["collection"].length + assert results["collection"].all? {|doc| !doc["definition"].nil? && doc.values.flatten.join(" ").include?("data") } + #assert_equal 26, results["collection"].length get "search?q=data&require_definitions=false" assert last_response.ok? @@ -115,10 +116,14 @@ def test_search_other_filters get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}" results = MultiJson.load(last_response.body) - assert_equal 22, results["collection"].length + + assert results["collection"].all? { |x| !x["obsolete"] } + count = results["collection"].length + get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}&also_search_obsolete=false" results = MultiJson.load(last_response.body) - assert_equal 22, results["collection"].length + assert_equal count, results["collection"].length + get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}&also_search_obsolete=true" results = MultiJson.load(last_response.body) assert_equal 29, results["collection"].length @@ -134,8 +139,14 @@ def test_search_other_filters # testing cui and semantic_types flags get "search?q=Funding%20Resource&ontologies=#{acronym}&include=prefLabel,synonym,definition,notation,cui,semanticType" results = MultiJson.load(last_response.body) - assert_equal 35, results["collection"].length - assert_equal "Funding Resource", results["collection"][0]["prefLabel"] + #assert_equal 35, results["collection"].length + assert results["collection"].all? do |r| + ["prefLabel", "synonym", "definition", "notation", "cui", "semanticType"].map {|x| r[x]} + .flatten + .join(' ') + .include?("Funding Resource") + end + assert_equal "Funding Resource", results["collection"][0]["prefLabel"].first assert_equal "T028", results["collection"][0]["semanticType"][0] assert_equal "X123456", results["collection"][0]["cui"][0] @@ -190,7 +201,7 @@ def test_search_provisional_class assert_equal 10, results["collection"].length provisional = results["collection"].select {|res| assert_equal ontology_type, res["ontologyType"]; res["provisional"]} assert_equal 1, provisional.length - assert_equal @@test_pc_root.label, provisional[0]["prefLabel"] + assert_equal @@test_pc_root.label, provisional[0]["prefLabel"].first # subtree root with provisional class test get "search?ontology=#{acronym}&subtree_root_id=#{CGI::escape(@@cls_uri.to_s)}&also_search_provisional=true" @@ -199,7 +210,7 @@ def test_search_provisional_class provisional = results["collection"].select {|res| res["provisional"]} assert_equal 1, provisional.length - assert_equal @@test_pc_child.label, provisional[0]["prefLabel"] + assert_equal @@test_pc_child.label, provisional[0]["prefLabel"].first end end diff --git a/test/controllers/test_users_controller.rb b/test/controllers/test_users_controller.rb index 337da52e..3710b503 100644 --- a/test/controllers/test_users_controller.rb +++ b/test/controllers/test_users_controller.rb @@ -100,4 +100,40 @@ def test_authentication assert user["username"].eql?(@@usernames.first) end + def test_oauth_authentication + fake_responses = { + github: { + id: 123456789, + login: 'github_user', + email: 'github_user@example.com', + name: 'GitHub User', + avatar_url: 'https://avatars.githubusercontent.com/u/123456789' + }, + google: { + sub: 'google_user_id', + email: 'google_user@example.com', + name: 'Google User', + given_name: 'Google', + family_name: 'User', + picture: 'https://lh3.googleusercontent.com/a-/user-profile-image-url' + }, + orcid: { + orcid: '0000-0002-1825-0097', + email: 'orcid_user@example.com', + name: { + "family-name": 'ORCID', + "given-names": 'User' + } + } + } + + fake_responses.each do |provider, data| + WebMock.stub_request(:get, LinkedData::Models::User.oauth_providers[provider][:link]) + .to_return(status: 200, body: data.to_json, headers: { 'Content-Type' => 'application/json' }) + post "/users/authenticate", {access_token:'jkooko', token_provider: provider.to_s} + assert last_response.ok? + user = MultiJson.load(last_response.body) + assert data[:email], user["email"] + end + end end diff --git a/test/middleware/test_rack_attack.rb b/test/middleware/test_rack_attack.rb index 43143080..0b10c9e1 100644 --- a/test/middleware/test_rack_attack.rb +++ b/test/middleware/test_rack_attack.rb @@ -18,14 +18,14 @@ def self.before_suite LinkedData::OntologiesAPI.settings.req_per_second_per_ip = 1 LinkedData::OntologiesAPI.settings.safe_ips = Set.new(["1.2.3.4", "1.2.3.5"]) - @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email@example.org"}) + @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email1@example.org"}) @@user.save - @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email@example.org"}) + @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email2@example.org"}) @@bp_user.save admin_role = LinkedData::Models::Users::Role.find("ADMINISTRATOR").first - @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email@example.org", role: [admin_role]}) + @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email3@example.org", role: [admin_role]}) @@admin.save # Redirect output or we get a bunch of noise from Rack (gets reset in the after_suite method). diff --git a/test/solr/configsets/term_search/conf/schema.xml b/test/solr/configsets/term_search/conf/schema.xml index 6b18a2a1..fa95e127 100644 --- a/test/solr/configsets/term_search/conf/schema.xml +++ b/test/solr/configsets/term_search/conf/schema.xml @@ -128,11 +128,20 @@ - - - - - + + + + + + + + + + + + + + @@ -140,9 +149,18 @@ + + + + + + + - + + + @@ -251,6 +269,17 @@ + + + + + + + + + + + diff --git a/test/test_case.rb b/test/test_case.rb index 7d3d0716..be162d5e 100644 --- a/test/test_case.rb +++ b/test/test_case.rb @@ -21,7 +21,9 @@ require_relative 'test_log_file' require_relative '../app' require 'minitest/unit' +require 'webmock/minitest' MiniTest::Unit.autorun +WebMock.allow_net_connect! require 'rack/test' require 'multi_json' require 'oj' From 7afc66161ae5d451c05937d9bab22eaa23417be4 Mon Sep 17 00:00:00 2001 From: Syphax bouazzouni Date: Tue, 5 Dec 2023 21:12:50 +0100 Subject: [PATCH 7/8] Merge to master: Release 2.3.4 - Multilingual (#42) * Merge pull request #34 from ontoportal-lirmm/feature/paginate-and-filter-ontologies-endpoint Feature: Implement pagination and filters to submissions endpoint * Merge pull request #36 from ontoportal-lirmm/feature/paginate-and-filter-ontologies-endpoint Feature: Add oder by and filters for ontologies endpoint * Merge pull request #32 from ontoportal-lirmm/feature/support-multilingual-read-one-language-from-request-parameter Feature: Support multilingual - Add request_lang middleware * Feature: Add support of multilingual search (#40) * update get_term_search_query to support multilanguages search * rename var * fix search lang suffix to use underscore not @ * add multilangual search test --------- Co-authored-by: Syphax Bouazzouni --------- Co-authored-by: HADDAD Zineddine --- Gemfile.lock | 6 +-- app.rb | 6 +++ helpers/search_helper.rb | 15 ++++--- lib/rack/request_lang.rb | 16 +++++++ .../test_ontology_submissions_controller.rb | 15 +++++++ test/controllers/test_search_controller.rb | 44 +++++++++++++++++++ test/data/ontology_files/BRO_v3.2.owl | 3 ++ test/solr/docker-compose.yml | 13 ++++++ test/solr/generate_ncbo_configsets.sh | 35 ++++++++------- 9 files changed, 129 insertions(+), 24 deletions(-) create mode 100644 lib/rack/request_lang.rb create mode 100644 test/solr/docker-compose.yml diff --git a/Gemfile.lock b/Gemfile.lock index 2612b968..3428c7b2 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -247,7 +247,7 @@ GEM rack (>= 0.4) rack-attack (6.6.1) rack (>= 1.0, < 3) - rack-cache (1.13.0) + rack-cache (1.14.0) rack (>= 0.4) rack-cors (1.0.6) rack (>= 1.6.0) @@ -270,8 +270,8 @@ GEM redis-rack-cache (2.2.1) rack-cache (>= 1.10, < 2) redis-store (>= 1.6, < 2) - redis-store (1.9.1) - redis (>= 4, < 5) + redis-store (1.9.2) + redis (>= 4, < 6) representable (3.2.0) declarative (< 0.1.0) trailblazer-option (>= 0.1.1, < 0.2.0) diff --git a/app.rb b/app.rb index 5360ae4b..46457c86 100644 --- a/app.rb +++ b/app.rb @@ -29,6 +29,7 @@ require_relative 'lib/rack/cube_reporter' require_relative 'lib/rack/param_translator' require_relative 'lib/rack/slice_detection' +require_relative 'lib/rack/request_lang' # Logging setup require_relative "config/logging" @@ -36,6 +37,8 @@ # Inflector setup require_relative "config/inflections" +require 'request_store' + # Protection settings set :protection, :except => :path_traversal @@ -143,6 +146,9 @@ use Rack::PostBodyToParams use Rack::ParamTranslator +use RequestStore::Middleware +use Rack::RequestLang + use LinkedData::Security::Authorization use LinkedData::Security::AccessDenied diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb index 5d37d884..071000d9 100644 --- a/helpers/search_helper.rb +++ b/helpers/search_helper.rb @@ -82,6 +82,9 @@ def get_term_search_query(text, params={}) end end + lang = params["lang"] || params["language"] + lang_suffix = lang && !lang.eql?("all") ? "_#{lang}" : "" + query = "" params["defType"] = "edismax" params["stopwords"] = "true" @@ -98,15 +101,15 @@ def get_term_search_query(text, params={}) if params[EXACT_MATCH_PARAM] == "true" query = "\"#{solr_escape(text)}\"" - params["qf"] = "resource_id^20 prefLabelExact^10 synonymExact #{QUERYLESS_FIELDS_STR}" - params["hl.fl"] = "resource_id prefLabelExact synonymExact #{QUERYLESS_FIELDS_STR}" + params["qf"] = "resource_id^20 prefLabelExact#{lang_suffix }^10 synonymExact#{lang_suffix } #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix } synonymExact#{lang_suffix } #{QUERYLESS_FIELDS_STR}" elsif params[SUGGEST_PARAM] == "true" || text[-1] == '*' text.gsub!(/\*+$/, '') query = "\"#{solr_escape(text)}\"" params["qt"] = "/suggest_ncbo" - params["qf"] = "prefLabelExact^100 prefLabelSuggestEdge^50 synonymSuggestEdge^10 prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" + params["qf"] = "prefLabelExact#{lang_suffix }^100 prefLabelSuggestEdge^50 synonymSuggestEdge^10 prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" params["pf"] = "prefLabelSuggest^50" - params["hl.fl"] = "prefLabelExact prefLabelSuggestEdge synonymSuggestEdge prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "prefLabelExact#{lang_suffix } prefLabelSuggestEdge synonymSuggestEdge prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}" else if text.strip.empty? query = '*' @@ -114,9 +117,9 @@ def get_term_search_query(text, params={}) query = solr_escape(text) end - params["qf"] = "resource_id^100 prefLabelExact^90 prefLabel^70 synonymExact^50 synonym^10 #{QUERYLESS_FIELDS_STR}" + params["qf"] = "resource_id^100 prefLabelExact#{lang_suffix }^90 prefLabel#{lang_suffix }^70 synonymExact#{lang_suffix }^50 synonym#{lang_suffix }^10 #{QUERYLESS_FIELDS_STR}" params["qf"] << " property" if params[INCLUDE_PROPERTIES_PARAM] == "true" - params["hl.fl"] = "resource_id prefLabelExact prefLabel synonymExact synonym #{QUERYLESS_FIELDS_STR}" + params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix } prefLabel#{lang_suffix } synonymExact#{lang_suffix } synonym#{lang_suffix } #{QUERYLESS_FIELDS_STR}" params["hl.fl"] = "#{params["hl.fl"]} property" if params[INCLUDE_PROPERTIES_PARAM] == "true" end diff --git a/lib/rack/request_lang.rb b/lib/rack/request_lang.rb new file mode 100644 index 00000000..b2221041 --- /dev/null +++ b/lib/rack/request_lang.rb @@ -0,0 +1,16 @@ +module Rack + class RequestLang + + def initialize(app = nil, options = {}) + @app = app + end + + def call(env) + r = Rack::Request.new(env) + lang = r.params["lang"] || r.params["language"] + lang = lang.upcase.to_sym if lang + RequestStore.store[:requested_lang] = lang + @app.call(env) + end + end +end \ No newline at end of file diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb index 8c4cb098..77b6e6bc 100644 --- a/test/controllers/test_ontology_submissions_controller.rb +++ b/test/controllers/test_ontology_submissions_controller.rb @@ -201,6 +201,21 @@ def test_download_acl_only end end + def test_submissions_pagination + num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 2, submission_count: 2) + + get "/submissions" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + + assert_equal 2, submissions.length + + + get "/submissions?page=1&pagesize=1" + assert last_response.ok? + submissions = MultiJson.load(last_response.body) + assert_equal 1, submissions["collection"].length + end def test_submissions_default_includes diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb index 21a3dd18..74be75d2 100644 --- a/test/controllers/test_search_controller.rb +++ b/test/controllers/test_search_controller.rb @@ -213,4 +213,48 @@ def test_search_provisional_class assert_equal @@test_pc_child.label, provisional[0]["prefLabel"].first end + def test_multilingual_search + get "/search?q=Activity&ontologies=BROSEARCHTEST-0" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + + doc = res["collection"].select{|doc| doc["@id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + refute_nil doc + + #res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BROSEARCHTEST-0", :start => 0, :rows => 80}, :main) + #refute_equal 0, res["response"]["numFound"] + #refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en" + res = MultiJson.load(last_response.body) + refute_equal 0, res["totalCount"] + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true" + res = MultiJson.load(last_response.body) + assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true" + res = MultiJson.load(last_response.body) + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=Activity&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true" + res = MultiJson.load(last_response.body) + assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true" + res = MultiJson.load(last_response.body) + refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first + + + end + end diff --git a/test/data/ontology_files/BRO_v3.2.owl b/test/data/ontology_files/BRO_v3.2.owl index d64075cc..b2aeccf5 100644 --- a/test/data/ontology_files/BRO_v3.2.owl +++ b/test/data/ontology_files/BRO_v3.2.owl @@ -631,6 +631,9 @@ Activity + Activity + ActivityEnglish + Activité Activity of interest that may be related to a BRO:Resource. activities diff --git a/test/solr/docker-compose.yml b/test/solr/docker-compose.yml new file mode 100644 index 00000000..3ddae69c --- /dev/null +++ b/test/solr/docker-compose.yml @@ -0,0 +1,13 @@ +version: '3.8' + +services: + op_solr: + image: solr:8.8 + volumes: + - ./solr_configsets:/configsets:ro + ports: + - "8983:8983" + command: > + bash -c "precreate-core term_search_core1 /configsets/term_search + && precreate-core prop_search_core1 /configsets/property_search + && solr-foreground" diff --git a/test/solr/generate_ncbo_configsets.sh b/test/solr/generate_ncbo_configsets.sh index 893f7f3a..7b4281f7 100755 --- a/test/solr/generate_ncbo_configsets.sh +++ b/test/solr/generate_ncbo_configsets.sh @@ -2,18 +2,23 @@ # generates solr configsets by merging _default configset with config files in config/solr # _default is copied from sorl distribuion solr-8.10.1/server/solr/configsets/_default/ -pushd solr/configsets -ld_config='../../../../ontologies_linked_data/config/solr/' -#ld_config='../../../../config/solr/' -ls -l $ld_config -pwd -[ -d property_search ] && rm -Rf property_search -[ -d term_search ] && rm -Rf property_search -[ -d $ld_config/property_search ] || echo "cant find ontologies_linked_data project" -mkdir -p property_search/conf -mkdir -p term_search/conf -cp -a _default/conf/* property_search/conf/ -cp -a _default/conf/* term_search/conf/ -cp -a $ld_config/property_search/* property_search/conf -cp -a $ld_config/term_search/* term_search/conf -popd +#cd solr/configsets +ld_config='config/solr' +configsets='test/solr/configsets' +[ -d ${configsets}/property_search ] && rm -Rf ${configsets}/property_search +[ -d ${configsets}/term_search ] && rm -Rf ${configsets}/term_search +if [[ ! -d ${ld_config}/property_search ]]; then + echo 'cant find ld solr config sets' + exit 1 +fi +if [[ ! -d ${configsets}/_default/conf ]]; then + echo 'cant find default solr configset' + exit 1 +fi +mkdir -p ${configsets}/property_search/conf +mkdir -p ${configsets}/term_search/conf +cp -a ${configsets}/_default/conf/* ${configsets}/property_search/conf/ +cp -a ${configsets}/_default/conf/* ${configsets}/term_search/conf/ +cp -a $ld_config/property_search/* ${configsets}/property_search/conf +cp -a $ld_config/term_search/* ${configsets}/term_search/conf + From e04096a006f66bd9b7ec45f7643be8e4e94c6c3f Mon Sep 17 00:00:00 2001 From: Syphax bouazzouni Date: Tue, 5 Dec 2023 22:24:55 +0100 Subject: [PATCH 8/8] Merge to master: Release 2.3.5 - New metadata model (#43) * Merge pull request #33 from feature/update-submission-mdetamodel-2023 * Merge pull request #39 from ontoportal-lirmm/feature/add-agent-crud-endpoint Feature: Add agent crud endpoints --- config/solr/property_search/enumsconfig.xml | 12 + .../mapping-ISOLatin1Accent.txt | 246 ++++ config/solr/property_search/schema.xml | 1179 +++++++++++++++ config/solr/property_search/solrconfig.xml | 1299 +++++++++++++++++ config/solr/solr.xml | 60 + config/solr/term_search/enumsconfig.xml | 12 + .../term_search/mapping-ISOLatin1Accent.txt | 246 ++++ config/solr/term_search/schema.xml | 1222 ++++++++++++++++ config/solr/term_search/solrconfig.xml | 1299 +++++++++++++++++ controllers/agents_controller.rb | 145 ++ helpers/metadata_helper.rb | 14 +- test/controllers/test_agents_controller.rb | 225 +++ test/controllers/test_annotator_controller.rb | 6 +- test/controllers/test_mappings_controller.rb | 2 +- 14 files changed, 5960 insertions(+), 7 deletions(-) create mode 100644 config/solr/property_search/enumsconfig.xml create mode 100644 config/solr/property_search/mapping-ISOLatin1Accent.txt create mode 100644 config/solr/property_search/schema.xml create mode 100644 config/solr/property_search/solrconfig.xml create mode 100644 config/solr/solr.xml create mode 100644 config/solr/term_search/enumsconfig.xml create mode 100644 config/solr/term_search/mapping-ISOLatin1Accent.txt create mode 100644 config/solr/term_search/schema.xml create mode 100644 config/solr/term_search/solrconfig.xml create mode 100644 controllers/agents_controller.rb create mode 100644 test/controllers/test_agents_controller.rb diff --git a/config/solr/property_search/enumsconfig.xml b/config/solr/property_search/enumsconfig.xml new file mode 100644 index 00000000..72e7b7d3 --- /dev/null +++ b/config/solr/property_search/enumsconfig.xml @@ -0,0 +1,12 @@ + + + + ONTOLOGY + VALUE_SET_COLLECTION + + + ANNOTATION + DATATYPE + OBJECT + + \ No newline at end of file diff --git a/config/solr/property_search/mapping-ISOLatin1Accent.txt b/config/solr/property_search/mapping-ISOLatin1Accent.txt new file mode 100644 index 00000000..ede77425 --- /dev/null +++ b/config/solr/property_search/mapping-ISOLatin1Accent.txt @@ -0,0 +1,246 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Syntax: +# "source" => "target" +# "source".length() > 0 (source cannot be empty.) +# "target".length() >= 0 (target can be empty.) + +# example: +# "À" => "A" +# "\u00C0" => "A" +# "\u00C0" => "\u0041" +# "ß" => "ss" +# "\t" => " " +# "\n" => "" + +# À => A +"\u00C0" => "A" + +# Á => A +"\u00C1" => "A" + +#  => A +"\u00C2" => "A" + +# à => A +"\u00C3" => "A" + +# Ä => A +"\u00C4" => "A" + +# Å => A +"\u00C5" => "A" + +# Æ => AE +"\u00C6" => "AE" + +# Ç => C +"\u00C7" => "C" + +# È => E +"\u00C8" => "E" + +# É => E +"\u00C9" => "E" + +# Ê => E +"\u00CA" => "E" + +# Ë => E +"\u00CB" => "E" + +# Ì => I +"\u00CC" => "I" + +# Í => I +"\u00CD" => "I" + +# Î => I +"\u00CE" => "I" + +# Ï => I +"\u00CF" => "I" + +# IJ => IJ +"\u0132" => "IJ" + +# Ð => D +"\u00D0" => "D" + +# Ñ => N +"\u00D1" => "N" + +# Ò => O +"\u00D2" => "O" + +# Ó => O +"\u00D3" => "O" + +# Ô => O +"\u00D4" => "O" + +# Õ => O +"\u00D5" => "O" + +# Ö => O +"\u00D6" => "O" + +# Ø => O +"\u00D8" => "O" + +# Œ => OE +"\u0152" => "OE" + +# Þ +"\u00DE" => "TH" + +# Ù => U +"\u00D9" => "U" + +# Ú => U +"\u00DA" => "U" + +# Û => U +"\u00DB" => "U" + +# Ü => U +"\u00DC" => "U" + +# Ý => Y +"\u00DD" => "Y" + +# Ÿ => Y +"\u0178" => "Y" + +# à => a +"\u00E0" => "a" + +# á => a +"\u00E1" => "a" + +# â => a +"\u00E2" => "a" + +# ã => a +"\u00E3" => "a" + +# ä => a +"\u00E4" => "a" + +# å => a +"\u00E5" => "a" + +# æ => ae +"\u00E6" => "ae" + +# ç => c +"\u00E7" => "c" + +# è => e +"\u00E8" => "e" + +# é => e +"\u00E9" => "e" + +# ê => e +"\u00EA" => "e" + +# ë => e +"\u00EB" => "e" + +# ì => i +"\u00EC" => "i" + +# í => i +"\u00ED" => "i" + +# î => i +"\u00EE" => "i" + +# ï => i +"\u00EF" => "i" + +# ij => ij +"\u0133" => "ij" + +# ð => d +"\u00F0" => "d" + +# ñ => n +"\u00F1" => "n" + +# ò => o +"\u00F2" => "o" + +# ó => o +"\u00F3" => "o" + +# ô => o +"\u00F4" => "o" + +# õ => o +"\u00F5" => "o" + +# ö => o +"\u00F6" => "o" + +# ø => o +"\u00F8" => "o" + +# œ => oe +"\u0153" => "oe" + +# ß => ss +"\u00DF" => "ss" + +# þ => th +"\u00FE" => "th" + +# ù => u +"\u00F9" => "u" + +# ú => u +"\u00FA" => "u" + +# û => u +"\u00FB" => "u" + +# ü => u +"\u00FC" => "u" + +# ý => y +"\u00FD" => "y" + +# ÿ => y +"\u00FF" => "y" + +# ff => ff +"\uFB00" => "ff" + +# fi => fi +"\uFB01" => "fi" + +# fl => fl +"\uFB02" => "fl" + +# ffi => ffi +"\uFB03" => "ffi" + +# ffl => ffl +"\uFB04" => "ffl" + +# ſt => ft +"\uFB05" => "ft" + +# st => st +"\uFB06" => "st" diff --git a/config/solr/property_search/schema.xml b/config/solr/property_search/schema.xml new file mode 100644 index 00000000..20824ea6 --- /dev/null +++ b/config/solr/property_search/schema.xml @@ -0,0 +1,1179 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/solr/property_search/solrconfig.xml b/config/solr/property_search/solrconfig.xml new file mode 100644 index 00000000..771a0f32 --- /dev/null +++ b/config/solr/property_search/solrconfig.xml @@ -0,0 +1,1299 @@ + + + + + + + + + 8.8.2 + + + + + + + + + + + ${solr.data.dir:} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.lock.type:native} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + + ${solr.autoCommit.maxTime:15000} + false + + + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + + + + + + + + + + ${solr.max.booleanClauses:500000} + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + + 20 + + + 200 + + + + + + + + + + + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + explicit + 10 + + + + + + + + + + + + + + + + explicit + json + true + + + + + + _text_ + + + + + + + + + text_general + + + + + + default + _text_ + solr.DirectSolrSpellChecker + + internal + + 0.5 + + 2 + + 1 + + 5 + + 4 + + 0.01 + + + + + + + + + + + + default + on + true + 10 + 5 + 5 + true + true + 10 + 5 + + + spellcheck + + + + + + + + + + true + false + + + terms + + + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + + + + + + + + + + + + + + + ,, + ,, + ,, + ,, + ,]]> + ]]> + + + + + + 10 + .,!? + + + + + + + WORD + + + en + US + + + + + + + + + + + + [^\w-\.] + _ + + + + + + + yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z + yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z + yyyy-MM-dd HH:mm[:ss[.SSS]][z + yyyy-MM-dd HH:mm[:ss[,SSS]][z + [EEE, ]dd MMM yyyy HH:mm[:ss] z + EEEE, dd-MMM-yy HH:mm:ss z + EEE MMM ppd HH:mm:ss [z ]yyyy + + + + + java.lang.String + text_general + + *_str + 256 + + + true + + + java.lang.Boolean + booleans + + + java.util.Date + pdates + + + java.lang.Long + java.lang.Integer + plongs + + + java.lang.Number + pdoubles + + + + + + + + + + + + + + + + + + + + text/plain; charset=UTF-8 + + + + + + + + + + + + + + diff --git a/config/solr/solr.xml b/config/solr/solr.xml new file mode 100644 index 00000000..d9d089e4 --- /dev/null +++ b/config/solr/solr.xml @@ -0,0 +1,60 @@ + + + + + + + + ${solr.max.booleanClauses:500000} + ${solr.sharedLib:} + ${solr.allowPaths:} + + + + ${host:} + ${solr.port.advertise:0} + ${hostContext:solr} + + ${genericCoreNodeNames:true} + + ${zkClientTimeout:30000} + ${distribUpdateSoTimeout:600000} + ${distribUpdateConnTimeout:60000} + ${zkCredentialsProvider:org.apache.solr.common.cloud.DefaultZkCredentialsProvider} + ${zkACLProvider:org.apache.solr.common.cloud.DefaultZkACLProvider} + + + + + ${socketTimeout:600000} + ${connTimeout:60000} + ${solr.shardsWhitelist:} + + + + + diff --git a/config/solr/term_search/enumsconfig.xml b/config/solr/term_search/enumsconfig.xml new file mode 100644 index 00000000..72e7b7d3 --- /dev/null +++ b/config/solr/term_search/enumsconfig.xml @@ -0,0 +1,12 @@ + + + + ONTOLOGY + VALUE_SET_COLLECTION + + + ANNOTATION + DATATYPE + OBJECT + + \ No newline at end of file diff --git a/config/solr/term_search/mapping-ISOLatin1Accent.txt b/config/solr/term_search/mapping-ISOLatin1Accent.txt new file mode 100644 index 00000000..ede77425 --- /dev/null +++ b/config/solr/term_search/mapping-ISOLatin1Accent.txt @@ -0,0 +1,246 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Syntax: +# "source" => "target" +# "source".length() > 0 (source cannot be empty.) +# "target".length() >= 0 (target can be empty.) + +# example: +# "À" => "A" +# "\u00C0" => "A" +# "\u00C0" => "\u0041" +# "ß" => "ss" +# "\t" => " " +# "\n" => "" + +# À => A +"\u00C0" => "A" + +# Á => A +"\u00C1" => "A" + +#  => A +"\u00C2" => "A" + +# à => A +"\u00C3" => "A" + +# Ä => A +"\u00C4" => "A" + +# Å => A +"\u00C5" => "A" + +# Æ => AE +"\u00C6" => "AE" + +# Ç => C +"\u00C7" => "C" + +# È => E +"\u00C8" => "E" + +# É => E +"\u00C9" => "E" + +# Ê => E +"\u00CA" => "E" + +# Ë => E +"\u00CB" => "E" + +# Ì => I +"\u00CC" => "I" + +# Í => I +"\u00CD" => "I" + +# Î => I +"\u00CE" => "I" + +# Ï => I +"\u00CF" => "I" + +# IJ => IJ +"\u0132" => "IJ" + +# Ð => D +"\u00D0" => "D" + +# Ñ => N +"\u00D1" => "N" + +# Ò => O +"\u00D2" => "O" + +# Ó => O +"\u00D3" => "O" + +# Ô => O +"\u00D4" => "O" + +# Õ => O +"\u00D5" => "O" + +# Ö => O +"\u00D6" => "O" + +# Ø => O +"\u00D8" => "O" + +# Œ => OE +"\u0152" => "OE" + +# Þ +"\u00DE" => "TH" + +# Ù => U +"\u00D9" => "U" + +# Ú => U +"\u00DA" => "U" + +# Û => U +"\u00DB" => "U" + +# Ü => U +"\u00DC" => "U" + +# Ý => Y +"\u00DD" => "Y" + +# Ÿ => Y +"\u0178" => "Y" + +# à => a +"\u00E0" => "a" + +# á => a +"\u00E1" => "a" + +# â => a +"\u00E2" => "a" + +# ã => a +"\u00E3" => "a" + +# ä => a +"\u00E4" => "a" + +# å => a +"\u00E5" => "a" + +# æ => ae +"\u00E6" => "ae" + +# ç => c +"\u00E7" => "c" + +# è => e +"\u00E8" => "e" + +# é => e +"\u00E9" => "e" + +# ê => e +"\u00EA" => "e" + +# ë => e +"\u00EB" => "e" + +# ì => i +"\u00EC" => "i" + +# í => i +"\u00ED" => "i" + +# î => i +"\u00EE" => "i" + +# ï => i +"\u00EF" => "i" + +# ij => ij +"\u0133" => "ij" + +# ð => d +"\u00F0" => "d" + +# ñ => n +"\u00F1" => "n" + +# ò => o +"\u00F2" => "o" + +# ó => o +"\u00F3" => "o" + +# ô => o +"\u00F4" => "o" + +# õ => o +"\u00F5" => "o" + +# ö => o +"\u00F6" => "o" + +# ø => o +"\u00F8" => "o" + +# œ => oe +"\u0153" => "oe" + +# ß => ss +"\u00DF" => "ss" + +# þ => th +"\u00FE" => "th" + +# ù => u +"\u00F9" => "u" + +# ú => u +"\u00FA" => "u" + +# û => u +"\u00FB" => "u" + +# ü => u +"\u00FC" => "u" + +# ý => y +"\u00FD" => "y" + +# ÿ => y +"\u00FF" => "y" + +# ff => ff +"\uFB00" => "ff" + +# fi => fi +"\uFB01" => "fi" + +# fl => fl +"\uFB02" => "fl" + +# ffi => ffi +"\uFB03" => "ffi" + +# ffl => ffl +"\uFB04" => "ffl" + +# ſt => ft +"\uFB05" => "ft" + +# st => st +"\uFB06" => "st" diff --git a/config/solr/term_search/schema.xml b/config/solr/term_search/schema.xml new file mode 100644 index 00000000..fa95e127 --- /dev/null +++ b/config/solr/term_search/schema.xml @@ -0,0 +1,1222 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/solr/term_search/solrconfig.xml b/config/solr/term_search/solrconfig.xml new file mode 100644 index 00000000..771a0f32 --- /dev/null +++ b/config/solr/term_search/solrconfig.xml @@ -0,0 +1,1299 @@ + + + + + + + + + 8.8.2 + + + + + + + + + + + ${solr.data.dir:} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.lock.type:native} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + + ${solr.autoCommit.maxTime:15000} + false + + + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + + + + + + + + + + ${solr.max.booleanClauses:500000} + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + + 20 + + + 200 + + + + + + + + + + + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + explicit + 10 + + + + + + + + + + + + + + + + explicit + json + true + + + + + + _text_ + + + + + + + + + text_general + + + + + + default + _text_ + solr.DirectSolrSpellChecker + + internal + + 0.5 + + 2 + + 1 + + 5 + + 4 + + 0.01 + + + + + + + + + + + + default + on + true + 10 + 5 + 5 + true + true + 10 + 5 + + + spellcheck + + + + + + + + + + true + false + + + terms + + + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + + + + + + + + + + + + + + + ,, + ,, + ,, + ,, + ,]]> + ]]> + + + + + + 10 + .,!? + + + + + + + WORD + + + en + US + + + + + + + + + + + + [^\w-\.] + _ + + + + + + + yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z + yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z + yyyy-MM-dd HH:mm[:ss[.SSS]][z + yyyy-MM-dd HH:mm[:ss[,SSS]][z + [EEE, ]dd MMM yyyy HH:mm[:ss] z + EEEE, dd-MMM-yy HH:mm:ss z + EEE MMM ppd HH:mm:ss [z ]yyyy + + + + + java.lang.String + text_general + + *_str + 256 + + + true + + + java.lang.Boolean + booleans + + + java.util.Date + pdates + + + java.lang.Long + java.lang.Integer + plongs + + + java.lang.Number + pdoubles + + + + + + + + + + + + + + + + + + + + text/plain; charset=UTF-8 + + + + + + + + + + + + + + diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb new file mode 100644 index 00000000..87572e99 --- /dev/null +++ b/controllers/agents_controller.rb @@ -0,0 +1,145 @@ +class AgentsController < ApplicationController + + %w[/agents /Agents].each do |namespace| + namespace namespace do + # Display all agents + get do + check_last_modified_collection(LinkedData::Models::Agent) + query = LinkedData::Models::Agent.where + query = apply_filters(LinkedData::Models::Agent, query) + query = query.include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)) + if page? + page, size = page_params + agents = query.page(page, size).all + else + agents = query.to_a + end + reply agents + end + + # Display a single agent + get '/:id' do + check_last_modified_collection(LinkedData::Models::Agent) + id = params["id"] + agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first + error 404, "Agent #{id} not found" if agent.nil? + reply 200, agent + end + + # Create a agent with the given acronym + post do + reply 201, create_new_agent + end + + # Create a agent with the given acronym + put '/:acronym' do + reply 201, create_new_agent + end + + # Update an existing submission of a agent + patch '/:id' do + acronym = params["id"] + agent = LinkedData::Models::Agent.find(acronym).include(LinkedData::Models::Agent.attributes).first + + if agent.nil? + error 400, "Agent does not exist, please create using HTTP PUT before modifying" + else + agent = update_agent(agent, params) + + error 400, agent.errors unless agent.errors.empty? + end + halt 204 + end + + # Delete a agent + delete '/:id' do + agent = LinkedData::Models::Agent.find(params["id"]).first + agent.delete + halt 204 + end + + private + + def update_identifiers(identifiers) + Array(identifiers).map do |i| + next nil if i.empty? + + id = i["id"] || LinkedData::Models::AgentIdentifier.generate_identifier(i['notation'], i['schemaAgency']) + identifier = LinkedData::Models::AgentIdentifier.find(RDF::URI.new(id)).first + + if identifier + identifier.bring_remaining + else + identifier = LinkedData::Models::AgentIdentifier.new + end + + i.delete "id" + + next identifier if i.keys.size.zero? + + populate_from_params(identifier, i) + + if identifier.valid? + identifier.save + else + error 400, identifier.errors + end + identifier + end.compact + end + + def update_affiliations(affiliations) + Array(affiliations).map do |aff| + affiliation = aff["id"] ? LinkedData::Models::Agent.find(RDF::URI.new(aff["id"])).first : nil + + if affiliation + affiliation.bring_remaining + affiliation.identifiers.each{|i| i.bring_remaining} + end + + next affiliation if aff.keys.size.eql?(1) && aff["id"] + + if affiliation + affiliation = update_agent(affiliation, aff) + else + affiliation = create_new_agent(aff["id"], aff) + end + + error 400, affiliation.errors unless affiliation.errors.empty? + + affiliation + end + end + + def create_new_agent (id = @params['id'], params = @params) + agent = nil + agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first if id + + if agent.nil? + agent = update_agent(LinkedData::Models::Agent.new, params) + error 400, agent.errors unless agent.errors.empty? + + return agent + else + error 400, "Agent exists, please use HTTP PATCH to update" + end + end + + def update_agent(agent, params) + return agent unless agent + + identifiers = params.delete "identifiers" + affiliations = params.delete "affiliations" + params.delete "id" + populate_from_params(agent, params) + agent.identifiers = update_identifiers(identifiers) + agent.affiliations = update_affiliations(affiliations) + + agent.save if agent.valid? + return agent + end + + end + end + +end \ No newline at end of file diff --git a/helpers/metadata_helper.rb b/helpers/metadata_helper.rb index db61c414..2c5d7182 100644 --- a/helpers/metadata_helper.rb +++ b/helpers/metadata_helper.rb @@ -64,15 +64,23 @@ def klass_metadata(klass, type) # Get display from the metadata if klass.attribute_settings(attr)[:display].nil? - attr_settings[:display] = "no" + attr_settings[:category] = "no" else - attr_settings[:display] = klass.attribute_settings(attr)[:display] + attr_settings[:category] = klass.attribute_settings(attr)[:display] end - if !klass.attribute_settings(attr)[:helpText].nil? + unless klass.attribute_settings(attr)[:helpText].nil? attr_settings[:helpText] = klass.attribute_settings(attr)[:helpText] end + unless klass.attribute_settings(attr)[:description].nil? + attr_settings[:description] = klass.attribute_settings(attr)[:description] + end + + unless klass.attribute_settings(attr)[:example].nil? + attr_settings[:example] = klass.attribute_settings(attr)[:example] + end + attr_settings[:@context] = { "@vocab" => "#{id_url_prefix}metadata/" } diff --git a/test/controllers/test_agents_controller.rb b/test/controllers/test_agents_controller.rb new file mode 100644 index 00000000..ef0e5c47 --- /dev/null +++ b/test/controllers/test_agents_controller.rb @@ -0,0 +1,225 @@ +require_relative '../test_case' +require "multi_json" + +class TestAgentsController < TestCase + + def setup + + @number_of_organizations = 6 + + + @test_agents = 8.times.map do |i| + type = i < @number_of_organizations ? 'organization' : 'person' + _agent_data(type: type) + end + @agents = [] + 2.times.map do + agents_tmp = [ _agent_data(type: 'organization'), _agent_data(type: 'organization'), _agent_data(type: 'person')] + agent = agents_tmp.last + agent[:affiliations] = [agents_tmp[0].stringify_keys, agents_tmp[1].stringify_keys] + _test_agent_creation(agent) + @agents = @agents + agents_tmp + end + end + + def teardown + # Delete groups + _delete_agents + end + + def test_all_agents + get '/agents' + assert last_response.ok? + + created_agents = MultiJson.load(last_response.body) + + @agents.each do |agent| + created_agent = created_agents.select{|x| x["name"].eql?(agent[:name])}.first + refute_nil created_agent + assert_equal agent[:name], created_agent["name"] + assert_equal agent[:identifiers].size, created_agent["identifiers"].size + assert_equal agent[:identifiers].map{|x| x[:notation]}.sort, created_agent["identifiers"].map{|x| x['notation']}.sort + assert_equal agent[:affiliations].size, created_agent["affiliations"].size + assert_equal agent[:affiliations].map{|x| x["name"]}.sort, created_agent["affiliations"].map{|x| x['name']}.sort + + end + end + + def test_single_agent + @agents.each do |agent| + agent_obj = _find_agent(agent['name']) + get "/agents/#{agent_obj.id.to_s.split('/').last}" + assert last_response.ok? + agent_found = MultiJson.load(last_response.body) + assert_equal agent_obj.id.to_s, agent_found["id"] + end + end + + def test_create_new_agent + + ## Create Agent of type affiliation with no parent affiliation + agent = @test_agents[0] + created_agent = _test_agent_creation(agent) + + ## Create Agent of type affiliation with an extent parent affiliation + + agent = @test_agents[1] + agent[:affiliations] = [created_agent] + + created_agent = _test_agent_creation(agent) + + ## Create Agent of type affiliation with an no extent parent affiliation + agent = @test_agents[3] + agent[:affiliations] = [created_agent, @test_agents[2].stringify_keys] + created_agent = _test_agent_creation(agent) + + ## Create Agent of type Person with an extent affiliations + + agent = @test_agents[6] + agent[:affiliations] = created_agent["affiliations"] + _test_agent_creation(agent) + + ## Create Agent of type Person with no extent affiliations + + agent = @test_agents[7] + agent[:affiliations] = [@test_agents[4].stringify_keys, @test_agents[5].stringify_keys] + _test_agent_creation(agent) + + @agents = @agents + @test_agents + end + + + def test_new_agent_no_valid + agents_tmp = [ _agent_data(type: 'organization'), _agent_data(type: 'person'), _agent_data(type: 'person')] + agent = agents_tmp.last + agent[:affiliations] = [agents_tmp[0].stringify_keys, agents_tmp[1].stringify_keys] + post "/agents", MultiJson.dump(agent), "CONTENT_TYPE" => "application/json" + assert last_response.status == 400 + end + + def test_update_patch_agent + + agents = [ _agent_data(type: 'organization'), _agent_data(type: 'organization'), _agent_data(type: 'person')] + agent = agents.last + agent[:affiliations] = [agents[0].stringify_keys, agents[1].stringify_keys] + agent = _test_agent_creation(agent) + @agents = @agents + agents + agent = LinkedData::Models::Agent.find(agent['id'].split('/').last).first + agent.bring_remaining + + + ## update identifiers + agent.identifiers.each{|i| i.bring_remaining} + new_identifiers = [] + ## update an existent identifier + new_identifiers[0] = { + id: agent.identifiers[0].id.to_s, + schemaAgency: 'TEST ' + agent.identifiers[0].notation + } + + new_identifiers[1] = { + id: agent.identifiers[1].id.to_s + } + + ## update affiliation + agent.affiliations.each{|aff| aff.bring_remaining} + new_affiliations = [] + ## update an existent affiliation + new_affiliations[0] = { + name: 'TEST new of ' + agent.affiliations[0].name, + id: agent.affiliations[0].id.to_s + } + ## create a new affiliation + new_affiliations[1] = _agent_data(type: 'organization') + new_affiliations[1][:name] = 'new affiliation' + + new_values = { + name: 'new name ', + identifiers: new_identifiers, + affiliations: new_affiliations + } + + patch "/agents/#{agent.id.split('/').last}", MultiJson.dump(new_values), "CONTENT_TYPE" => "application/json" + assert last_response.status == 204 + + get "/agents/#{agent.id.split('/').last}" + new_agent = MultiJson.load(last_response.body) + assert_equal 'new name ', new_agent["name"] + + assert_equal new_identifiers.size, new_agent["identifiers"].size + assert_equal new_identifiers[0][:schemaAgency], new_agent["identifiers"].select{|x| x["id"].eql?(agent.identifiers[0].id.to_s)}.first["schemaAgency"] + assert_equal agent.identifiers[1].schemaAgency, new_agent["identifiers"].select{|x| x["id"].eql?(agent.identifiers[1].id.to_s)}.first["schemaAgency"] + + assert_equal new_affiliations.size, new_agent["affiliations"].size + assert_equal new_affiliations[0][:name], new_agent["affiliations"].select{|x| x["id"].eql?(agent.affiliations[0].id.to_s)}.first["name"] + assert_nil new_agent["affiliations"].select{|x| x["id"].eql?(agent.affiliations[1].id.to_s)}.first + assert_equal new_affiliations[1][:name], new_agent["affiliations"].reject{|x| x["id"].eql?(agent.affiliations[0].id.to_s)}.first["name"] + end + + def test_delete_agent + agent = @agents.delete_at(0) + agent_obj = _find_agent(agent['name']) + id = agent_obj.id.to_s.split('/').last + delete "/agents/#{id}" + assert last_response.status == 204 + + get "/agents/#{id}" + assert last_response.status == 404 + end + + private + def _agent_data(type: 'organization') + schema_agencies = LinkedData::Models::AgentIdentifier::IDENTIFIER_SCHEMES.keys + users = LinkedData::Models::User.all + users = [LinkedData::Models::User.new(username: "tim", email: "tim@example.org", password: "password").save] if users.empty? + test_identifiers = 5.times.map { |i| { notation: rand.to_s[2..11], schemaAgency: schema_agencies.sample.to_s } } + user = users.sample.id.to_s + + i = rand.to_s[2..11] + return { + agentType: type, + name: "name #{i}", + homepage: "home page #{i}", + acronym: "acronym #{i}", + email: "email_#{i}@test.com", + identifiers: test_identifiers.sample(2).map { |x| x.merge({ creator: user }) }, + affiliations: [], + creator: user + } + end + + def _find_agent(name) + LinkedData::Models::Agent.where(name: name).first + end + + def _delete_agents + @agents.each do |agent| + test_cat = _find_agent(agent[:name]) + next if test_cat.nil? + + test_cat.bring :identifiers + test_cat.identifiers.each { |i| i.delete } + test_cat.delete + end + end + + def _test_agent_creation(agent) + post "/agents", MultiJson.dump(agent), "CONTENT_TYPE" => "application/json" + + assert last_response.status == 201 + created_agent = MultiJson.load(last_response.body) + assert created_agent["name"].eql?(agent[:name]) + + get "/agents/#{created_agent['id'].split('/').last}" + assert last_response.ok? + + created_agent = MultiJson.load(last_response.body) + assert_equal agent[:name], created_agent["name"] + assert_equal agent[:identifiers].size, created_agent["identifiers"].size + assert_equal agent[:identifiers].map { |x| x[:notation] }.sort, created_agent["identifiers"].map { |x| x['notation'] }.sort + + assert_equal agent[:affiliations].size, created_agent["affiliations"].size + assert_equal agent[:affiliations].map { |x| x["name"] }.sort, created_agent["affiliations"].map { |x| x['name'] }.sort + created_agent + end +end \ No newline at end of file diff --git a/test/controllers/test_annotator_controller.rb b/test/controllers/test_annotator_controller.rb index ffa65a97..47f45f40 100644 --- a/test/controllers/test_annotator_controller.rb +++ b/test/controllers/test_annotator_controller.rb @@ -260,16 +260,16 @@ def test_default_properties_output assert last_response.ok? annotations = MultiJson.load(last_response.body) assert_equal 9, annotations.length - annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].downcase <=> b["annotatedClass"]["prefLabel"].downcase } + annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].first.downcase <=> b["annotatedClass"]["prefLabel"].first.downcase } assert_equal "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Aggregate_Human_Data", annotations.first["annotatedClass"]["@id"] - assert_equal "Aggregate Human Data", annotations.first["annotatedClass"]["prefLabel"] + assert_equal "Aggregate Human Data", Array(annotations.first["annotatedClass"]["prefLabel"]).first params = {text: text, include: "prefLabel,definition"} get "/annotator", params assert last_response.ok? annotations = MultiJson.load(last_response.body) assert_equal 9, annotations.length - annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].downcase <=> b["annotatedClass"]["prefLabel"].downcase } + annotations.sort! { |a,b| Array(a["annotatedClass"]["prefLabel"]).first.downcase <=> Array(b["annotatedClass"]["prefLabel"]).first.downcase } assert_equal "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Aggregate_Human_Data", annotations.first["annotatedClass"]["@id"] assert_equal ["A resource that provides data from clinical care that comprises combined data from multiple individual human subjects."], annotations.first["annotatedClass"]["definition"] end diff --git a/test/controllers/test_mappings_controller.rb b/test/controllers/test_mappings_controller.rb index 52c3975d..cff52225 100644 --- a/test/controllers/test_mappings_controller.rb +++ b/test/controllers/test_mappings_controller.rb @@ -245,7 +245,7 @@ def mappings_with_display get "/ontologies/#{ontology}/mappings?pagesize=#{pagesize}&page=#{page}&display=prefLabel" assert last_response.ok? mappings = MultiJson.load(last_response.body) - assert mappings["collection"].all? { |m| m["classes"].all? { |c| c["prefLabel"].is_a?(String) && c["prefLabel"].length > 0 } } + assert mappings["collection"].all? { |m| m["classes"].all? { |c| c["prefLabel"].first.is_a?(String) && c["prefLabel"].first.length > 0 } } def_count = 0 next_page = 1