diff --git a/Gemfile b/Gemfile
index 73ce5f88..12b3eeda 100644
--- a/Gemfile
+++ b/Gemfile
@@ -19,6 +19,7 @@ gem 'rest-client'
gem 'rsolr', '~> 1.0'
gem 'rubyzip', '~> 1.0'
gem 'thin'
+gem 'request_store'
# Testing
group :test do
diff --git a/Gemfile.lock b/Gemfile.lock
index 2ca149a4..2228cc51 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,6 +1,6 @@
GIT
remote: https://github.com/ontoportal-lirmm/goo.git
- revision: bd7154217438c3b9160e0e9b495c7c718b55fbf8
+ revision: 74ea47defc7f6260b045a6c6997bbe6a59c7bf62
branch: master
specs:
goo (0.0.2)
@@ -36,18 +36,16 @@ GEM
public_suffix (>= 2.0.2, < 6.0)
ansi (1.5.0)
ast (2.4.2)
- base64 (0.1.1)
- bcrypt (3.1.19)
+ bcrypt (3.1.20)
builder (3.2.4)
coderay (1.1.3)
concurrent-ruby (1.2.2)
connection_pool (2.4.1)
cube-ruby (0.0.3)
daemons (1.4.1)
- date (3.3.3)
+ date (3.3.4)
docile (1.4.0)
- domain_name (0.5.20190701)
- unf (>= 0.0.5, < 1.0.0)
+ domain_name (0.6.20231109)
email_spec (2.2.2)
htmlentities (~> 4.3.3)
launchy (~> 2.1)
@@ -84,13 +82,13 @@ GEM
domain_name (~> 0.5)
i18n (0.9.5)
concurrent-ruby (~> 1.0)
- json (2.6.3)
- json_pure (2.6.3)
+ json (2.7.1)
+ json_pure (2.7.1)
language_server-protocol (3.17.0.3)
launchy (2.5.2)
addressable (~> 2.8)
libxml-ruby (2.9.0)
- logger (1.5.3)
+ logger (1.6.0)
macaddr (1.7.2)
systemu (~> 2.6.5)
mail (2.8.1)
@@ -112,12 +110,12 @@ GEM
multi_json (1.15.0)
multipart-post (2.3.0)
net-http-persistent (2.9.4)
- net-imap (0.4.1)
+ net-imap (0.4.7)
date
net-protocol
net-pop (0.1.2)
net-protocol
- net-protocol (0.2.1)
+ net-protocol (0.2.2)
timeout
net-smtp (0.4.0)
net-protocol
@@ -136,8 +134,8 @@ GEM
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.0.3)
- racc (1.7.1)
+ public_suffix (5.0.4)
+ racc (1.7.3)
rack (1.6.13)
rack-test (0.8.3)
rack (>= 1.0, < 3)
@@ -145,11 +143,13 @@ GEM
rake (10.5.0)
rdf (1.0.8)
addressable (>= 2.2)
- redis (5.0.7)
- redis-client (>= 0.9.0)
- redis-client (0.17.0)
+ redis (5.0.8)
+ redis-client (>= 0.17.0)
+ redis-client (0.18.0)
connection_pool
- regexp_parser (2.8.2)
+ regexp_parser (2.8.3)
+ request_store (1.5.1)
+ rack (>= 1.4)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
@@ -158,8 +158,7 @@ GEM
rexml (3.2.6)
rsolr (1.1.2)
builder (>= 2.1.2)
- rubocop (1.57.1)
- base64 (~> 0.1.1)
+ rubocop (1.58.0)
json (~> 2.3)
language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
@@ -167,10 +166,10 @@ GEM
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 1.8, < 3.0)
rexml (>= 3.2.5, < 4.0)
- rubocop-ast (>= 1.28.1, < 2.0)
+ rubocop-ast (>= 1.30.0, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
- rubocop-ast (1.29.0)
+ rubocop-ast (1.30.0)
parser (>= 3.2.1.0)
ruby-progressbar (1.13.0)
ruby2_keywords (0.0.5)
@@ -192,16 +191,15 @@ GEM
eventmachine (~> 1.0, >= 1.0.4)
rack (>= 1, < 3)
thread_safe (0.3.6)
- timeout (0.4.0)
+ timeout (0.4.1)
tzinfo (0.3.62)
- unf (0.1.4)
- unf_ext
- unf_ext (0.0.8.2)
unicode-display_width (2.5.0)
uuid (2.3.9)
macaddr (~> 1.0)
PLATFORMS
+ x86_64-darwin-21
+ x86_64-darwin-23
x86_64-linux
DEPENDENCIES
@@ -224,6 +222,7 @@ DEPENDENCIES
rack (~> 1.0)
rack-test (~> 0.6)
rake (~> 10.0)
+ request_store
rest-client
rsolr (~> 1.0)
rubocop
diff --git a/config/solr/term_search/schema.xml b/config/solr/term_search/schema.xml
index 6b18a2a1..fa95e127 100644
--- a/config/solr/term_search/schema.xml
+++ b/config/solr/term_search/schema.xml
@@ -128,11 +128,20 @@
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -140,9 +149,18 @@
+
+
+
+
+
+
+
-
+
+
+
@@ -251,6 +269,17 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb b/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb
index bbd4281d..600ed13a 100644
--- a/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb
+++ b/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb
@@ -81,8 +81,8 @@ def create_mapping_process(mapping_process_hash, source_uri, object_uri, user)
process.relation = relations_array
process.creator = user
- process.subject_source_id = RDF::URI.new(source_uri || mapping_process_hash[:subject_source_id])
- process.object_source_id = RDF::URI.new(object_uri || mapping_process_hash[:object_source_id])
+ process.subject_source_id = create_uri(source_uri || mapping_process_hash[:subject_source_id])
+ process.object_source_id = create_uri(object_uri || mapping_process_hash[:object_source_id])
process.date = mapping_process_hash[:date] ? DateTime.parse(mapping_process_hash[:date]) : DateTime.now
process_fields = %i[source source_name comment name source_contact_info]
process_fields.each do |att|
@@ -92,6 +92,9 @@ def create_mapping_process(mapping_process_hash, source_uri, object_uri, user)
end
private
+ def create_uri(value)
+ RDF::URI.new(value) unless value.nil?
+ end
def save_rest_mapping(classes, process)
LinkedData::Mappings.create_rest_mapping(classes, process)
diff --git a/lib/ontologies_linked_data/models/class.rb b/lib/ontologies_linked_data/models/class.rb
index 7a3a5864..5481bd78 100644
--- a/lib/ontologies_linked_data/models/class.rb
+++ b/lib/ontologies_linked_data/models/class.rb
@@ -57,33 +57,33 @@ def self.urn_id(acronym,classId)
attribute :parents, namespace: :rdfs,
property: lambda {|x| self.tree_view_property(x) },
- enforce: [:list, :class]
+ enforce: [:list, :class]
#transitive parent
attribute :ancestors, namespace: :rdfs,
- property: :subClassOf,
- enforce: [:list, :class],
- transitive: true
+ property: :subClassOf,
+ enforce: [:list, :class],
+ transitive: true
attribute :children, namespace: :rdfs,
property: lambda {|x| self.tree_view_property(x) },
inverse: { on: :class , :attribute => :parents }
attribute :subClassOf, namespace: :rdfs,
- enforce: [:list, :uri]
+ enforce: [:list, :uri]
attribute :ancestors, namespace: :rdfs, property: :subClassOf, handler: :retrieve_ancestors
attribute :descendants, namespace: :rdfs, property: :subClassOf,
- handler: :retrieve_descendants
+ handler: :retrieve_descendants
attribute :semanticType, enforce: [:list], :namespace => :umls, :property => :hasSTY
attribute :cui, enforce: [:list], :namespace => :umls, alias: true
attribute :xref, :namespace => :oboinowl_gen, alias: true,
- :property => :hasDbXref
+ :property => :hasDbXref
attribute :notes,
- inverse: { on: :note, attribute: :relatedClass }
+ inverse: { on: :note, attribute: :relatedClass }
attribute :inScheme, enforce: [:list, :uri], namespace: :skos
attribute :memberOf, namespace: :uneskos, inverse: { on: :collection , :attribute => :member }
attribute :created, namespace: :dcterms
@@ -146,6 +146,31 @@ def index_id()
"#{self.id.to_s}_#{self.submission.ontology.acronym}_#{self.submission.submissionId}"
end
+ def to_hash(include_languages: false)
+ attr_hash = {}
+ self.class.attributes.each do |attr|
+ v = self.instance_variable_get("@#{attr}")
+ attr_hash[attr] = v unless v.nil?
+ end
+ properties_values = properties(include_languages: include_languages)
+ if properties_values
+ all_attr_uris = Set.new
+ self.class.attributes.each do |attr|
+ if self.class.collection_opts
+ all_attr_uris << self.class.attribute_uri(attr, self.collection)
+ else
+ all_attr_uris << self.class.attribute_uri(attr)
+ end
+ end
+ properties_values.each do |attr, values|
+ values = values.values.flatten if values.is_a?(Hash)
+ attr_hash[attr] = values.map { |v| v.to_s } unless all_attr_uris.include?(attr)
+ end
+ end
+ attr_hash[:id] = @id
+ attr_hash
+ end
+
# to_set is an optional array that allows passing specific
# field names that require updating
# if to_set is nil, it's assumed to be a new document for insert
@@ -187,20 +212,29 @@ def index_doc(to_set=nil)
all_attrs = self.to_hash
std = [:id, :prefLabel, :notation, :synonym, :definition, :cui]
-
+ multi_language_fields = [:prefLabel, :synonym, :definition]
std.each do |att|
cur_val = all_attrs[att]
# don't store empty values
next if cur_val.nil? || (cur_val.respond_to?('empty?') && cur_val.empty?)
+ if cur_val.is_a?(Hash) # Multi language
+ if multi_language_fields.include?(att)
+ doc[att] = cur_val.values.flatten # index all values of each language
+ cur_val.each { |lang, values| doc["#{att}_#{lang}".to_sym] = values } # index values per language
+ else
+ cur_val = cur_val.values.flatten
+ end
+ end
+
if cur_val.is_a?(Array)
# don't store empty values
cur_val = cur_val.reject { |c| c.respond_to?('empty?') && c.empty? }
doc[att] = []
cur_val = cur_val.uniq
cur_val.map { |val| doc[att] << (val.kind_of?(Goo::Base::Resource) ? val.id.to_s : val.to_s.strip) }
- else
+ elsif doc[att].nil?
doc[att] = cur_val.to_s.strip
end
end
@@ -234,28 +268,28 @@ def properties_for_indexing()
self_props.each do |attr_key, attr_val|
# unless doc.include?(attr_key)
- if attr_val.is_a?(Array)
- props[attr_key] = []
- attr_val = attr_val.uniq
-
- attr_val.map { |val|
- real_val = val.kind_of?(Goo::Base::Resource) ? val.id.to_s : val.to_s.strip
+ if attr_val.is_a?(Array)
+ props[attr_key] = []
+ attr_val = attr_val.uniq
- # don't store empty values
- unless real_val.respond_to?('empty?') && real_val.empty?
- prop_vals << real_val
- props[attr_key] << real_val
- end
- }
- else
- real_val = attr_val.to_s.strip
+ attr_val.map { |val|
+ real_val = val.kind_of?(Goo::Base::Resource) ? val.id.to_s : val.to_s.strip
# don't store empty values
unless real_val.respond_to?('empty?') && real_val.empty?
prop_vals << real_val
- props[attr_key] = real_val
+ props[attr_key] << real_val
end
+ }
+ else
+ real_val = attr_val.to_s.strip
+
+ # don't store empty values
+ unless real_val.respond_to?('empty?') && real_val.empty?
+ prop_vals << real_val
+ props[attr_key] = real_val
end
+ end
# end
end
@@ -283,9 +317,9 @@ def childrenCount()
BAD_PROPERTY_URIS = LinkedData::Mappings.mapping_predicates.values.flatten + ['http://bioportal.bioontology.org/metadata/def/prefLabel']
EXCEPTION_URIS = ["http://bioportal.bioontology.org/ontologies/umls/cui"]
BLACKLIST_URIS = BAD_PROPERTY_URIS - EXCEPTION_URIS
- def properties
- return nil if self.unmapped.nil?
- properties = self.unmapped
+ def properties(*args)
+ return nil if self.unmapped(*args).nil?
+ properties = self.unmapped(*args)
BLACKLIST_URIS.each {|bad_iri| properties.delete(RDF::URI.new(bad_iri))}
properties
end
@@ -372,7 +406,7 @@ def hasChildren()
- def load_has_children()
+ def load_has_children()
if !instance_variable_get("@intlHasChildren").nil?
return
end
@@ -381,7 +415,7 @@ def load_has_children()
has_c = false
Goo.sparql_query_client.query(query,
query_options: {rules: :NONE }, graphs: graphs)
- .each do |sol|
+ .each do |sol|
has_c = true
end
@intlHasChildren = has_c
@@ -404,7 +438,7 @@ def retrieve_hierarchy_ids(direction=:ancestors)
next_level_thread = Set.new
query = hierarchy_query(direction,ids_slice)
Goo.sparql_query_client.query(query,query_options: {rules: :NONE }, graphs: graphs)
- .each do |sol|
+ .each do |sol|
parent = sol[:node].to_s
next if !parent.start_with?("http")
ontology = sol[:graph].to_s
@@ -443,7 +477,7 @@ def has_children_query(class_id, submission_id)
}
LIMIT 1
eos
- return query
+ return query
end
def hierarchy_query(direction, class_ids)
@@ -464,7 +498,7 @@ def hierarchy_query(direction, class_ids)
FILTER (#{filter_ids})
}
eos
- return query
+ return query
end
def append_if_not_there_already(path, r)
@@ -488,7 +522,7 @@ def traverse_path_to_root(parents, paths, path_i, tree = false, roots = nil)
parents.each_index do |i|
rec_i = recursions[i]
recurse_on_path[i] = recurse_on_path[i] ||
- !append_if_not_there_already(paths[rec_i], parents[i]).nil?
+ !append_if_not_there_already(paths[rec_i], parents[i]).nil?
end
else
path = paths[path_i]
diff --git a/lib/ontologies_linked_data/models/ontology_submission.rb b/lib/ontologies_linked_data/models/ontology_submission.rb
index 73a9c013..1a289e8c 100644
--- a/lib/ontologies_linked_data/models/ontology_submission.rb
+++ b/lib/ontologies_linked_data/models/ontology_submission.rb
@@ -18,6 +18,7 @@ class OntologySubmission < LinkedData::Models::Base
include SKOS::RootsFetcher
FILES_TO_DELETE = ['labels.ttl', 'mappings.ttl', 'obsolete.ttl', 'owlapi.xrdf', 'errors.log']
+ FOLDERS_TO_DELETE = ['unzipped']
FLAT_ROOTS_LIMIT = 1000
model :ontology_submission, name_with: lambda { |s| submission_id_generator(s) }
@@ -757,6 +758,21 @@ def delete_old_submission_files
submission_files.push(csv_path)
submission_files.push(parsing_log_path) unless parsing_log_path.nil?
FileUtils.rm(submission_files, force: true)
+
+ submission_folders = FOLDERS_TO_DELETE.map { |f| File.join(path_to_repo, f) }
+ submission_folders.each {|d| FileUtils.remove_dir(d) if File.directory?(d)}
+ end
+
+ def zip_submission_uploaded_file
+ self.bring(:uploadFilePath) if self.bring?(:uploadFilePath)
+
+ return self.uploadFilePath if zipped?
+ return self.uploadFilePath if self.uploadFilePath.nil? || self.uploadFilePath.empty?
+
+ old_path = self.uploadFilePath
+ new_path = Utils::FileHelpers.zip_file(old_path)
+ FileUtils.rm(old_path, force: true)
+ new_path
end
# accepts another submission in 'older' (it should be an 'older' ontology version)
@@ -1295,6 +1311,25 @@ def archived?
return ready?(status: [:archived])
end
+ def archive_submission
+ self.submissionStatus = nil
+ status = LinkedData::Models::SubmissionStatus.find("ARCHIVED").first
+ add_submission_status(status)
+
+ # Delete everything except for original ontology file.
+ ontology.bring(:submissions)
+ submissions = ontology.submissions
+ unless submissions.nil?
+ submissions.each { |s| s.bring(:submissionId) }
+ submission = submissions.sort { |a, b| b.submissionId <=> a.submissionId }[0]
+ # Don't perform deletion if this is the most recent submission.
+ if self.submissionId < submission.submissionId
+ delete_old_submission_files
+ self.uploadFilePath = zip_submission_uploaded_file
+ end
+ end
+ end
+
################################################################
# Possible options with their defaults:
# process_rdf = false
@@ -1363,21 +1398,7 @@ def process_submission(logger, options = {})
status = nil
if archive
- self.submissionStatus = nil
- status = LinkedData::Models::SubmissionStatus.find("ARCHIVED").first
- add_submission_status(status)
-
- # Delete everything except for original ontology file.
- ontology.bring(:submissions)
- submissions = ontology.submissions
- unless submissions.nil?
- submissions.each { |s| s.bring(:submissionId) }
- submission = submissions.sort { |a, b| b.submissionId <=> a.submissionId }[0]
- # Don't perform deletion if this is the most recent submission.
- if (self.submissionId < submission.submissionId)
- delete_old_submission_files
- end
- end
+ archive_submission
else
if process_rdf
# Remove processing status types before starting RDF parsing etc.
@@ -1621,6 +1642,7 @@ def index(logger, commit = true, optimize = true)
Thread.current["done"] = true
else
Thread.current["page"] = page || "nil"
+ RequestStore.store[:requested_lang] = :ALL
page_classes = paging.page(page, size).all
count_classes += page_classes.length
Thread.current["page_classes"] = page_classes
@@ -1671,7 +1693,7 @@ def index(logger, commit = true, optimize = true)
Thread.current["page_classes"].each do |c|
begin
# this cal is needed for indexing of properties
- LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates)
+ LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates, include_languages: true )
rescue Exception => e
i = 0
num_calls = LinkedData.settings.num_retries_4store
@@ -1683,7 +1705,7 @@ def index(logger, commit = true, optimize = true)
sleep(2)
begin
- LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates)
+ LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates, include_languages: true)
logger.info("Thread #{num + 1}: Success mapping attributes for #{c.id.to_s} after retrying #{i} times...")
success = true
rescue Exception => e1
diff --git a/lib/ontologies_linked_data/models/users/user.rb b/lib/ontologies_linked_data/models/users/user.rb
index 46217469..39bd4f8d 100644
--- a/lib/ontologies_linked_data/models/users/user.rb
+++ b/lib/ontologies_linked_data/models/users/user.rb
@@ -92,7 +92,11 @@ def custom_ontology_id_set
end
def to_s
- self.username.to_s
+ if bring?(:username)
+ self.id.to_s
+ else
+ self.username.to_s
+ end
end
private
diff --git a/lib/ontologies_linked_data/monkeypatches/object.rb b/lib/ontologies_linked_data/monkeypatches/object.rb
index deadf71c..ad805310 100644
--- a/lib/ontologies_linked_data/monkeypatches/object.rb
+++ b/lib/ontologies_linked_data/monkeypatches/object.rb
@@ -262,7 +262,7 @@ def populate_hash_from_list(hash, attributes)
next unless self.respond_to?(attribute)
begin
- hash[attribute] = self.send(attribute)
+ hash[attribute] = self.send(attribute, include_languages: true)
rescue Goo::Base::AttributeNotLoaded
next
rescue ArgumentError
diff --git a/lib/ontologies_linked_data/serializer.rb b/lib/ontologies_linked_data/serializer.rb
index ede39cc5..4e2e67ba 100644
--- a/lib/ontologies_linked_data/serializer.rb
+++ b/lib/ontologies_linked_data/serializer.rb
@@ -84,7 +84,7 @@ def self.response(options = {})
end
def self.serialize(type, obj, params, request)
- lang = params['lang'] || Goo.main_languages.first
+ lang = params['lang'] || params['language']|| Goo.main_languages.first
only = params['display'] || []
only = only.split(',') unless only.is_a?(Array)
all = only[0] == 'all'
@@ -106,5 +106,11 @@ def self.print_stacktrace?
end
end
+ def self.get_language(params)
+ lang = params['lang'] || params['language'] || Goo.main_languages&.first.to_s || 'en'
+ lang = lang.split(',').map {|l| l.downcase.to_sym}
+ return lang.length == 1 ? lang.first : lang
+ end
+
end
end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/json.rb b/lib/ontologies_linked_data/serializers/json.rb
index d13d99de..e29bdeb3 100644
--- a/lib/ontologies_linked_data/serializers/json.rb
+++ b/lib/ontologies_linked_data/serializers/json.rb
@@ -6,9 +6,11 @@ class JSON
CONTEXTS = {}
def self.serialize(obj, options = {})
+
+
hash = obj.to_flex_hash(options) do |hash, hashed_obj|
current_cls = hashed_obj.respond_to?(:klass) ? hashed_obj.klass : hashed_obj.class
- result_lang = self.get_languages(get_object_submission(hashed_obj), options[:lang])
+ result_lang = self.get_languages(get_object_submission(hashed_obj), options[:lang]) if result_lang.nil?
# Add the id to json-ld attribute
if current_cls.ancestors.include?(LinkedData::Hypermedia::Resource) && !current_cls.embedded? && hashed_obj.respond_to?(:id)
diff --git a/lib/ontologies_linked_data/utils/file.rb b/lib/ontologies_linked_data/utils/file.rb
index e0866df3..dd517877 100644
--- a/lib/ontologies_linked_data/utils/file.rb
+++ b/lib/ontologies_linked_data/utils/file.rb
@@ -14,15 +14,7 @@ def initialize(gz)
self.name = gz.orig_name
end
end
-
- def self.gzip?(file_path)
- file_path = file_path.to_s
- unless File.exist? file_path
- raise ArgumentError, "File path #{file_path} not found"
- end
- file_type = `file --mime -b #{Shellwords.escape(file_path)}`
- return file_type.split(";")[0] == "application/x-gzip"
- end
+
def self.zip?(file_path)
file_path = file_path.to_s
@@ -88,6 +80,21 @@ def self.unzip(file_path, dst_folder)
extracted_files
end
+ def self.zip_file(file_path)
+ return file_path if self.zip?(file_path)
+
+ zip_file_path = "#{file_path}.zip"
+ Zip::File.open(zip_file_path, Zip::File::CREATE) do |zipfile|
+ # Add the file to the zip
+ begin
+ zipfile.add(File.basename(file_path), file_path)
+ rescue Zip::EntryExistsError
+ end
+
+ end
+ zip_file_path
+ end
+
def self.automaster?(path, format)
self.automaster(path, format) != nil
end
diff --git a/test/data/ontology_files/BRO_v3.5.owl b/test/data/ontology_files/BRO_v3.5.owl
index aee5caa9..33f16c9d 100644
--- a/test/data/ontology_files/BRO_v3.5.owl
+++ b/test/data/ontology_files/BRO_v3.5.owl
@@ -616,6 +616,8 @@
Activity
+ ActivityEnglish
+ Activité
Activity of interest that may be related to a BRO:Resource.
activities
diff --git a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf b/test/data/ontology_files/thesaurusINRAE_nouv_structure.skos
similarity index 99%
rename from test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
rename to test/data/ontology_files/thesaurusINRAE_nouv_structure.skos
index fef6bbe2..3123a0d9 100644
--- a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
+++ b/test/data/ontology_files/thesaurusINRAE_nouv_structure.skos
@@ -30,7 +30,7 @@
1331561625299
- aktivite
+ aktivite
2012-03-12T22:13:45Z
2017-09-22T14:09:06Z
@@ -39,7 +39,7 @@
00008d7b
- air-water exchanges
+ air-water exchanges
2019-09-04T12:02:37
diff --git a/test/models/notes/test_note.rb b/test/models/notes/test_note.rb
index 6fb20171..ecc3cde6 100644
--- a/test/models/notes/test_note.rb
+++ b/test/models/notes/test_note.rb
@@ -67,11 +67,11 @@ def test_note_lifecycle
relatedOntology: [@@ontology],
})
- assert_equal false, n.exist?(reload=true)
+ assert_equal false, n.exist?
n.save
- assert_equal true, n.exist?(reload=true)
+ assert_equal true, n.exist?
n.delete
- assert_equal false, n.exist?(reload=true)
+ assert_equal false, n.exist?
ensure
n.delete if !n.nil? && n.persistent?
end
diff --git a/test/models/skos/test_collections.rb b/test/models/skos/test_collections.rb
index 6d719063..9a1993a7 100644
--- a/test/models/skos/test_collections.rb
+++ b/test/models/skos/test_collections.rb
@@ -10,7 +10,7 @@ def self.before_suite
def test_collections_all
submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
1,
process_rdf: true, index_search: false,
run_metrics: false, reasoning: false)
@@ -31,7 +31,7 @@ def test_collections_all
def test_collection_members
submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
1,
process_rdf: true, index_search: false,
run_metrics: false, reasoning: false)
diff --git a/test/models/skos/test_schemes.rb b/test/models/skos/test_schemes.rb
index 4960c33b..9f52e303 100644
--- a/test/models/skos/test_schemes.rb
+++ b/test/models/skos/test_schemes.rb
@@ -10,7 +10,7 @@ def self.before_suite
def test_schemes_all
submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
1,
process_rdf: true, index_search: false,
run_metrics: false, reasoning: false)
diff --git a/test/models/skos/test_skos_xl.rb b/test/models/skos/test_skos_xl.rb
index f0643317..aa781a4c 100644
--- a/test/models/skos/test_skos_xl.rb
+++ b/test/models/skos/test_skos_xl.rb
@@ -9,7 +9,7 @@ def self.before_suite
def test_skos_xl_label_all
submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
1,
process_rdf: true, index_search: false,
run_metrics: false, reasoning: false)
@@ -27,7 +27,7 @@ def test_skos_xl_label_all
def test_class_skos_xl_label
submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
1,
process_rdf: true, index_search: false,
run_metrics: false, reasoning: false)
@@ -35,10 +35,6 @@ def test_class_skos_xl_label
ont = LinkedData::Models::Ontology.find(ont).first
sub = ont.latest_submission
- sub.bring_remaining
- sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first
- sub.save
-
class_test = LinkedData::Models::Class.find('http://opendata.inrae.fr/thesaurusINRAE/c_16193')
.in(sub).include(:prefLabel,
altLabelXl: [:literalForm],
diff --git a/test/models/test_class_main_lang.rb b/test/models/test_class_portal_lang.rb
similarity index 59%
rename from test/models/test_class_main_lang.rb
rename to test/models/test_class_portal_lang.rb
index 352dbe7f..2427dfb5 100644
--- a/test/models/test_class_main_lang.rb
+++ b/test/models/test_class_portal_lang.rb
@@ -1,68 +1,79 @@
require_relative './test_ontology_common'
-class TestClassMainLang < LinkedData::TestOntologyCommon
+class TestClassPortalLang < LinkedData::TestOntologyCommon
def self.before_suite
@@old_main_languages = Goo.main_languages
+ RequestStore.store[:requested_lang] = nil
+ parse
end
def self.after_suite
Goo.main_languages = @@old_main_languages
+ RequestStore.store[:requested_lang] = nil
+ end
+
+ def self.parse
+ new('').submission_parse('AGROOE', 'AGROOE Test extract metadata ontology',
+ './test/data/ontology_files/agrooeMappings-05-05-2016.owl', 1,
+ process_rdf: true, index_search: false,
+ run_metrics: false, reasoning: true)
end
def test_map_attribute_found
- cls = parse_and_get_class lang: ['fr']
+ cls = parse_and_get_class lang: [:FR]
cls.bring :unmapped
LinkedData::Models::Class.map_attributes(cls)
- assert_equal 'entité matérielle detaillée', cls.label.first
- assert_equal 'skos prefLabel fr', cls.prefLabel
+ assert_equal ['entité matérielle detaillée'], cls.label
+ assert_includes ['skos prefLabel fr', 'skos prefLabel rien'], cls.prefLabel
assert_equal ['entité fra', 'entite rien'], cls.synonym
end
def test_map_attribute_not_found
- cls = parse_and_get_class lang: ['es']
+ cls = parse_and_get_class lang: [:ES]
cls.bring :unmapped
LinkedData::Models::Class.map_attributes(cls)
- assert_equal ['material detailed entity', 'entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
- assert_equal ['entita esp' , 'entite rien' ], cls.synonym
+ assert_equal ['entita esp', 'entite rien'], cls.synonym
end
def test_map_attribute_secondary_lang
- cls = parse_and_get_class lang: %w[es fr]
+ cls = parse_and_get_class lang: %i[ES FR]
cls.bring :unmapped
LinkedData::Models::Class.map_attributes(cls)
- assert_equal ['entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
assert_equal ['entita esp', 'entite rien'], cls.synonym
end
def test_label_main_lang_fr_found
- cls = parse_and_get_class lang: ['fr']
- assert_equal 'entité matérielle detaillée', cls.label.first
+ cls = parse_and_get_class lang: [:FR]
+ assert_equal ['entité matérielle detaillée'], cls.label
assert_equal 'skos prefLabel fr', cls.prefLabel
assert_equal ['entité fra', 'entite rien'], cls.synonym
end
def test_label_main_lang_not_found
- cls = parse_and_get_class lang: ['es']
+ cls = parse_and_get_class lang: [:ES]
- assert_equal ['material detailed entity', 'entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
assert_equal ['entita esp' , 'entite rien' ], cls.synonym
end
def test_label_secondary_lang
- # 'es' will not be found so will take 'fr' if fond or anything else
- cls = parse_and_get_class lang: %w[es fr]
+ # This feature is obsolete with the request language feature
+ # 'es' will not be found
+ cls = parse_and_get_class lang: %i[ES FR]
- assert_equal ['entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
assert_equal ['entita esp', 'entite rien'], cls.synonym
end
def test_label_main_lang_en_found
- cls = parse_and_get_class lang: ['en']
+ cls = parse_and_get_class lang: [:EN]
assert_equal 'material detailed entity', cls.label.first
assert_equal 'skos prefLabel en', cls.prefLabel
assert_equal ['entity eng', 'entite rien'], cls.synonym
@@ -72,12 +83,7 @@ def test_label_main_lang_en_found
private
def parse_and_get_class(lang:, klass: 'http://lirmm.fr/2015/resource/AGROOE_c_03')
- lang_set lang
- submission_parse('AGROOE', 'AGROOE Test extract metadata ontology',
- './test/data/ontology_files/agrooeMappings-05-05-2016.owl', 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
-
+ portal_lang_set portal_languages: lang
cls = get_class(klass,'AGROOE')
assert !cls.nil?
@@ -85,14 +91,13 @@ def parse_and_get_class(lang:, klass: 'http://lirmm.fr/2015/resource/AGROOE_c_03
cls
end
- def lang_set(lang)
- Goo.main_languages = lang
- end
- def get_ontology_last_submission(ont)
- LinkedData::Models::Ontology.find(ont).first.latest_submission()
+ def portal_lang_set(portal_languages: nil)
+ Goo.main_languages = portal_languages if portal_languages
+ RequestStore.store[:requested_lang] = nil
end
+
def get_class(cls, ont)
sub = LinkedData::Models::Ontology.find(ont).first.latest_submission()
LinkedData::Models::Class.find(cls).in(sub).first
diff --git a/test/models/test_class_request_lang.rb b/test/models/test_class_request_lang.rb
new file mode 100644
index 00000000..a07eeffd
--- /dev/null
+++ b/test/models/test_class_request_lang.rb
@@ -0,0 +1,115 @@
+require_relative './test_ontology_common'
+require 'request_store'
+
+class TestClassRequestedLang < LinkedData::TestOntologyCommon
+
+ def self.before_suite
+ @@old_main_languages = Goo.main_languages
+ RequestStore.store[:requested_lang] = nil
+
+ parse
+ end
+
+ def self.after_suite
+ Goo.main_languages = @@old_main_languages
+ RequestStore.store[:requested_lang] = nil
+ end
+
+ def self.parse
+ new('').submission_parse('INRAETHES', 'Testing skos',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos', 1,
+ process_rdf: true, index_search: false,
+ run_metrics: false, reasoning: false
+ )
+ end
+
+ def teardown
+ reset_lang
+ end
+
+ def test_requested_language_found
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :FR)
+ assert_equal 'industrialisation', cls.prefLabel
+ assert_equal ['développement industriel'], cls.synonym
+
+ properties = cls.properties
+ assert_equal ['développement industriel'], properties.select { |x| x.to_s['altLabel'] }.values.first.map(&:to_s)
+ assert_equal ['industrialisation'], properties.select { |x| x.to_s['prefLabel'] }.values.first.map(&:to_s)
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :EN)
+ assert_equal 'industrialization', cls.prefLabel
+ assert_equal ['industrial development'], cls.synonym
+
+ properties = cls.properties
+ assert_equal ['industrial development'], properties.select { |x| x.to_s['altLabel'] }.values.first.map(&:to_s)
+ assert_equal ['industrialization'], properties.select { |x| x.to_s['prefLabel'] }.values.first.map(&:to_s)
+
+ end
+
+ def test_requested_language_not_found
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :ES)
+ assert_nil cls.prefLabel
+ assert_empty cls.synonym
+
+ properties = cls.properties
+ assert_empty properties.select { |x| x.to_s['altLabel'] }.values
+ assert_empty properties.select { |x| x.to_s['prefLabel'] }.values
+ end
+
+ def test_request_all_languages
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :ALL)
+
+ pref_label_all_languages = { en: 'industrialization', fr: 'industrialisation' }
+ assert_includes pref_label_all_languages.values, cls.prefLabel
+ assert_equal pref_label_all_languages, cls.prefLabel(include_languages: true)
+
+ synonym_all_languages = { en: ['industrial development'], fr: ['développement industriel'] }
+
+ assert_equal synonym_all_languages.values.flatten.sort, cls.synonym.sort
+ assert_equal synonym_all_languages, cls.synonym(include_languages: true)
+
+ properties = cls.properties
+
+ assert_equal synonym_all_languages.values.flatten.sort, properties.select { |x| x.to_s['altLabel'] }.values.first.map(&:to_s).sort
+ assert_equal pref_label_all_languages.values.sort, properties.select { |x| x.to_s['prefLabel'] }.values.first.map(&:to_s).sort
+
+ properties = cls.properties(include_languages: true)
+
+ assert_equal synonym_all_languages.stringify_keys,
+ properties.select { |x| x.to_s['altLabel'] }.values.first.transform_values{|v| v.map(&:object)}
+ assert_equal pref_label_all_languages.stringify_keys,
+ properties.select { |x| x.to_s['prefLabel'] }.values.first.transform_values{|v| v.first.object}
+ end
+
+ private
+
+ def lang_set(requested_lang: nil, portal_languages: nil)
+ Goo.main_languages = portal_languages if portal_languages
+ RequestStore.store[:requested_lang] = requested_lang
+ end
+
+ def reset_lang
+ lang_set requested_lang: nil, portal_languages: @@old_main_languages
+ end
+
+ def get_class(cls, ont)
+ sub = LinkedData::Models::Ontology.find(ont).first.latest_submission
+ LinkedData::Models::Class.find(cls).in(sub).first
+ end
+
+ def get_class_by_lang(cls, requested_lang:, portal_languages: nil)
+ lang_set requested_lang: requested_lang, portal_languages: portal_languages
+ cls = get_class(cls, 'INRAETHES')
+ refute_nil cls
+ cls.bring_remaining
+ cls.bring :unmapped
+ cls
+ end
+end
\ No newline at end of file
diff --git a/test/models/test_ontology.rb b/test/models/test_ontology.rb
index b3f055a7..96ec14f0 100644
--- a/test/models/test_ontology.rb
+++ b/test/models/test_ontology.rb
@@ -308,11 +308,11 @@ def test_ontology_delete
})
assert pc.valid?
pc.save
- assert_equal true, pc.exist?(reload=true)
+ assert_equal true, pc.exist?
assert n.valid?
n.save()
- assert_equal true, n.exist?(reload=true)
+ assert_equal true, n.exist?
review_params = {
:creator => u,
@@ -329,12 +329,12 @@ def test_ontology_delete
r = LinkedData::Models::Review.new(review_params)
r.save()
- assert_equal true, r.exist?(reload=true)
+ assert_equal true, r.exist?
o1.delete()
- assert_equal false, n.exist?(reload=true)
- assert_equal false, r.exist?(reload=true)
- assert_equal false, o1.exist?(reload=true)
+ assert_equal false, n.exist?
+ assert_equal false, r.exist?
+ assert_equal false, o1.exist?
o2.delete()
end
diff --git a/test/models/test_ontology_submission.rb b/test/models/test_ontology_submission.rb
index 195a513b..ecae7880 100644
--- a/test/models/test_ontology_submission.rb
+++ b/test/models/test_ontology_submission.rb
@@ -362,6 +362,7 @@ def test_process_submission_archive
# Process one prior to latest submission. Some files should be deleted.
old_sub = sorted_submissions.last
+ old_file_path = old_sub.uploadFilePath
old_sub.process_submission(Logger.new(old_sub.parsing_log_path), parse_options)
assert old_sub.archived?
@@ -382,6 +383,13 @@ def test_process_submission_archive
assert_equal false, File.file?(old_sub.parsing_log_path),
%-File deletion failed for '#{old_sub.parsing_log_path}'-
+
+ assert_equal false, File.file?(old_file_path),
+ %-File deletion failed for '#{old_file_path}'-
+
+ assert old_sub.zipped?
+ assert File.file?(old_sub.uploadFilePath)
+
end
def test_submission_diff_across_ontologies
@@ -446,6 +454,79 @@ def test_index_properties
assert_equal 0, res["response"]["numFound"]
end
+ def test_index_multilingual
+
+ submission_parse("BRO", "BRO Ontology",
+ "./test/data/ontology_files/BRO_v3.5.owl", 1,
+ process_rdf: true, reasoning: false, index_search: true)
+
+
+ res = LinkedData::Models::Class.search("prefLabel:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ refute_equal 0, res["response"]["numFound"]
+
+ doc = res["response"]["docs"].select{|doc| doc["resource_id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+ assert_equal 30, doc.keys.select{|k| k['prefLabel'] || k['synonym']}.size # test that all the languages are indexed
+
+
+ res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ res = LinkedData::Models::Class.search("prefLabel_fr:Activité", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+
+ res = LinkedData::Models::Class.search("prefLabel_en:ActivityEnglish", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+ res = LinkedData::Models::Class.search("prefLabel_fr:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ assert_equal 0, res["response"]["numFound"]
+ end
+
+ def test_zipped_submission_process
+ acronym = "PIZZA"
+ name = "PIZZA Ontology"
+ ontologyFile = "./test/data/ontology_files/pizza.owl.zip"
+ archived_submission = nil
+ 2.times do |i|
+ id = 20 + i
+ ont_submision = LinkedData::Models::OntologySubmission.new({ :submissionId => id})
+ assert (not ont_submision.valid?)
+ assert_equal 4, ont_submision.errors.length
+ uploadFilePath = LinkedData::Models::OntologySubmission.copy_file_repository(acronym, id,ontologyFile)
+ ont_submision.uploadFilePath = uploadFilePath
+ owl, bro, user, contact = submission_dependent_objects("OWL", acronym, "test_linked_models", name)
+ ont_submision.released = DateTime.now - 4
+ ont_submision.hasOntologyLanguage = owl
+ ont_submision.ontology = bro
+ ont_submision.contact = [contact]
+ assert ont_submision.valid?
+ ont_submision.save
+ parse_options = {process_rdf: true, reasoning: true, index_search: false, run_metrics: false, diff: true}
+ begin
+ tmp_log = Logger.new(TestLogFile.new)
+ ont_submision.process_submission(tmp_log, parse_options)
+ rescue Exception => e
+ puts "Error, logged in #{tmp_log.instance_variable_get("@logdev").dev.path}"
+ raise e
+ end
+ archived_submission = ont_submision if i.zero?
+ end
+ parse_options = { process_rdf: false, index_search: false, index_commit: false,
+ run_metrics: false, reasoning: false, archive: true }
+ archived_submission.process_submission(Logger.new(TestLogFile.new), parse_options)
+
+ assert_equal false, File.file?(archived_submission.zip_folder),
+ %-File deletion failed for '#{archived_submission.zip_folder}'-
+
+
+
+ end
def test_submission_parse_zip
skip if ENV["BP_SKIP_HEAVY_TESTS"] == "1"
diff --git a/test/models/test_provisional_class.rb b/test/models/test_provisional_class.rb
index 5c06539c..b7f91034 100644
--- a/test/models/test_provisional_class.rb
+++ b/test/models/test_provisional_class.rb
@@ -37,19 +37,19 @@ def test_provisional_class_lifecycle
# Before save
assert_equal LinkedData::Models::ProvisionalClass.where(label: label).all.count, 0
- assert_equal false, pc.exist?(reload=true)
+ assert_equal false, pc.exist?
pc.save
# After save
assert_equal LinkedData::Models::ProvisionalClass.where(label: label).all.count, 1
- assert_equal true, pc.exist?(reload=true)
+ assert_equal true, pc.exist?
pc.delete
# After delete
assert_equal LinkedData::Models::ProvisionalClass.where(label: label).all.count, 0
- assert_equal false, pc.exist?(reload=true)
+ assert_equal false, pc.exist?
end
def test_provisional_class_valid
@@ -292,7 +292,7 @@ def test_provisional_class_search_indexing
pc.index
resp = LinkedData::Models::Ontology.search("\"#{pc.label}\"", params)
assert_equal 1, resp["response"]["numFound"]
- assert_equal pc.label, resp["response"]["docs"][0]["prefLabel"]
+ assert_equal pc.label, resp["response"]["docs"][0]["prefLabel"].first
pc.unindex
acr = "CSTPROPS"
@@ -315,7 +315,7 @@ def test_provisional_class_search_indexing
resp = LinkedData::Models::Ontology.search("\"#{pc1.label}\"", params)
assert_equal 1, resp["response"]["numFound"]
- assert_equal pc1.label, resp["response"]["docs"][0]["prefLabel"]
+ assert_equal pc1.label, resp["response"]["docs"][0]["prefLabel"].first
par_len = resp["response"]["docs"][0]["parents"].length
assert_equal 5, par_len
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == class_id.to_s }).length
diff --git a/test/solr/configsets/term_search/conf/schema.xml b/test/solr/configsets/term_search/conf/schema.xml
new file mode 100644
index 00000000..3bb5f9e7
--- /dev/null
+++ b/test/solr/configsets/term_search/conf/schema.xml
@@ -0,0 +1,1222 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ id
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/test/test_case.rb b/test/test_case.rb
index 5ae5abd8..df7d5b76 100644
--- a/test/test_case.rb
+++ b/test/test_case.rb
@@ -218,11 +218,11 @@ def model_created_test(m)
def model_lifecycle_test(m)
assert_equal(true, m.is_a?(LinkedData::Models::Base), 'Expected is_a?(LinkedData::Models::Base).')
assert_equal(true, m.valid?, "Expected valid model: #{m.errors}")
- assert_equal(false, m.exist?(reload=true), 'Given model is already saved, expected one that is not.')
+ assert_equal(false, m.exist?, 'Given model is already saved, expected one that is not.')
m.save
- assert_equal(true, m.exist?(reload=true), 'Failed to save model.')
+ assert_equal(true, m.exist?, 'Failed to save model.')
m.delete
- assert_equal(false, m.exist?(reload=true), 'Failed to delete model.')
+ assert_equal(false, m.exist?, 'Failed to delete model.')
end
def self.count_pattern(pattern)