diff --git a/.dockerignore b/.dockerignore
index cf76ed57..3b15d33c 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -9,3 +9,4 @@ tmp/*
# Editor temp files
*.swp
*.swo
+test/solr
diff --git a/.env.sample b/.env.sample
new file mode 100644
index 00000000..2c15a1c0
--- /dev/null
+++ b/.env.sample
@@ -0,0 +1,4 @@
+API_URL=http://localhost:9393
+ONTOLOGIES_LINKED_DATA_PATH=
+GOO_PATH=
+SPARQL_CLIENT_PATH=
\ No newline at end of file
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 212d5dcd..e0b23263 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -1,85 +1,92 @@
-# Workflow for deploying ontologies_api to stage/prod systems via capistrano.
-# This workflow runs after a successeful execution of the unit test workflow and it
-# can also be triggered manually.
+# Workflow to deploy OntoPortal API to stage/prod systems
#
# Required github secrets:
#
-# CONFIG_REPO - github repo containing config and customizations for the API. Format 'author/private_config_repo'
+# CONFIG_REPO - github repo containing config and customizations for API. Format 'author/private_config_repo'
# it is used for getting capistrano deployment configuration for stages on the github actions runner and
-# PRIVATE_CONFIG_REPO env var is constructed from it which is used by capistrano on the remote servers for pulling configs.
+# PRIVATE_CONFIG_REPO env var is constructed from it which is used by capistrano on the API hosts for pulling configs.
#
-# GH_PAT - github Personal Access Token for accessing PRIVATE_CONFIG_REPO
+# GH_PAT - github Personal Access Token for accessing private config repo
#
-# SSH_JUMPHOST - ssh jump/proxy host though which deployments have to though if app servers are hosted on private network.
+# SSH_JUMPHOST - ssh jump/proxy host though which deployments have to though if API nodes live on private network.
+# SSH_JUMPHOST_USER - username to use to connect to the ssh jump/proxy.
#
-# DEPLOY_ENC_KEY - key for decrypting deploymnet ssh key residing in config/deploy_id_rsa_enc (see miloserdow/capistrano-deploy)
-# this SSH key is used for accessing jump host, UI nodes, and private github repo.
+# DEPLOY_ENC_KEY - key for decrypting deploymnet ssh key residing in config/
+# this SSH key is used for accessing jump host, API nodes, and private github repo.
name: Capistrano Deployment
# Controls when the action will run.
on:
- # Trigger deployment to staging after unit test action completes
- workflow_run:
- workflows: ["Ruby Unit Tests"]
- types:
- - completed
- branches: [master, develop]
+ push:
+ branches:
+ - stage
+ - test
# Allows running this workflow manually from the Actions tab
workflow_dispatch:
- branches: [master, develop]
inputs:
BRANCH:
- description: 'Branch/tag to deploy'
- default: develop
+ description: "Branch/tag to deploy"
+ options:
+ - stage
+ - test
+ - master
+ default: stage
required: true
environment:
- description: 'target environment to deploy to'
+ description: "target environment to deploy to"
type: choice
options:
- staging
- - production
- default: staging
-
+ - agroportal
+ - test
+ default: stage
jobs:
deploy:
runs-on: ubuntu-latest
- # run deployment only if "Ruby Unit Tests" workflow completes sucessefully or when manually triggered
- if: ${{ (github.event.workflow_run.conclusion == 'success') || (github.event_name == 'workflow_dispatch') }}
env:
- BUNDLE_WITHOUT: default #install gems required primarily for the deployment in order to speed this workflow
+ BUNDLE_WITHOUT: default #install gems required primarely for deployment in order to speed up workflow
PRIVATE_CONFIG_REPO: ${{ format('git@github.com:{0}.git', secrets.CONFIG_REPO) }}
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- - name: set branch/tag and environment to deploy from inputs
- run: |
- # workflow_dispatch default input doesn't get set on push so we need to set defaults
- # via shell parameter expansion
- # https://dev.to/mrmike/github-action-handling-input-default-value-5f2g
- USER_INPUT_BRANCH=${{ inputs.branch }}
- echo "BRANCH=${USER_INPUT_BRANCH:-develop}" >> $GITHUB_ENV
- USER_INPUT_ENVIRONMENT=${{ inputs.environment }}
- echo "TARGET=${USER_INPUT_ENVIRONMENT:-staging}" >> $GITHUB_ENV
- # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- - uses: actions/checkout@v3
- - uses: ruby/setup-ruby@v1
- with:
- ruby-version: 2.7.6 # Not needed with a .ruby-version file
- bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- - name: get-deployment-config
- uses: actions/checkout@v3
- with:
- repository: ${{ secrets.CONFIG_REPO }} # repository containing deployment settings
- token: ${{ secrets.GH_PAT }} # `GH_PAT` is a secret that contains your PAT
- path: deploy_config
- - name: copy-deployment-config
- run: cp -r deploy_config/ontologies_api/* .
- # add ssh hostkey so that capistrano doesn't complain
- - name: Add jumphost's hostkey to Known Hosts
- run: |
- mkdir -p ~/.ssh
- ssh-keyscan -H ${{ secrets.SSH_JUMPHOST }} > ~/.ssh/known_hosts
- shell: bash
- - uses: miloserdow/capistrano-deploy@master
- with:
- target: ${{ env.TARGET }} # which environment to deploy
- deploy_key: ${{ secrets.DEPLOY_ENC_KEY }} # Name of the variable configured in Settings/Secrets of your github project
+ - name: set branch/tag and environment to deploy from inputs
+ run: |
+ # workflow_dispatch default input doesn't get set on push so we need to set defaults
+ # via shell parameter expansion
+ # https://dev.to/mrmike/github-action-handling-input-default-value-5f2g
+ USER_INPUT_BRANCH=${{ inputs.branch }}
+ echo "BRANCH=${USER_INPUT_BRANCH:github.head_ref:-master}" >> $GITHUB_ENV
+
+ USER_INPUT_ENVIRONMENT=${{ inputs.environment }}
+ echo "TARGET=${USER_INPUT_ENVIRONMENT:-staging}" >> $GITHUB_ENV
+
+ CONFIG_REPO=${{ secrets.CONFIG_REPO }}
+ GH_PAT=${{ secrets.GH_PAT }}
+ echo "PRIVATE_CONFIG_REPO=https://${GH_PAT}@github.com/${CONFIG_REPO}" >> $GITHUB_ENV
+
+ echo "SSH_JUMPHOST=${{ secrets.SSH_JUMPHOST }}" >> $GITHUB_ENV
+ echo "SSH_JUMPHOST_USER=${{ secrets.SSH_JUMPHOST_USER }}" >> $GITHUB_ENV
+ # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
+ - uses: actions/checkout@v3
+ - uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: 2.7.6 # Not needed with a .ruby-version file
+ bundler-cache: true # runs 'bundle install' and caches installed gems automatically
+ - name: get-deployment-config
+ uses: actions/checkout@v3
+ with:
+ repository: ${{ secrets.CONFIG_REPO }} # repository containing deployment settings
+ token: ${{ secrets.GH_PAT }} # `GH_PAT` is a secret that contains your PAT
+ path: deploy_config
+ - name: copy-deployment-config
+ run: cp -r deploy_config/ontologies_api/${{ inputs.environment }}/* .
+ # add ssh hostkey so that capistrano doesn't complain
+ - name: Add jumphost's hostkey to Known Hosts
+ run: |
+ mkdir -p ~/.ssh
+ echo "${{ secrets.SSH_JUMPHOST }}"
+ ssh-keyscan -H ${{ secrets.SSH_JUMPHOST }} > ~/.ssh/known_hosts
+ shell: bash
+ - uses: miloserdow/capistrano-deploy@master
+ with:
+ target: ${{ env.TARGET }} # which environment to deploy
+ deploy_key: ${{ secrets.DEPLOY_ENC_KEY }} # Name of the variable configured in Settings/Secrets of your github project
diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml
new file mode 100644
index 00000000..9d47b3f9
--- /dev/null
+++ b/.github/workflows/docker-image.yml
@@ -0,0 +1,56 @@
+name: Docker branch Images build
+
+on:
+ push:
+ branches:
+ - master
+ - development
+ - stage
+ - test
+ release:
+ types: [ published ]
+jobs:
+ push_to_registry:
+ name: Push Docker branch image to Docker Hub
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Log in to Docker Hub
+ uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Log in to the Container registry
+ uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images: |
+ agroportal/ontologies_api
+ ghcr.io/${{ github.repository }}
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm64
+ build-args: |
+ RUBY_VERSION=2.7.8
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml
index 6b2c973d..16d8357e 100644
--- a/.github/workflows/ruby-unit-tests.yml
+++ b/.github/workflows/ruby-unit-tests.yml
@@ -7,22 +7,35 @@ on:
jobs:
test:
strategy:
+ fail-fast: false
matrix:
- backend: ['api'] # api runs tests with 4store backend and api-agraph runs with AllegroGraph backend
+ goo-slice: [ '20', '100', '500' ]
+ ruby-version: [ '2.7' ]
+ triplestore: [ 'fs', 'ag', 'vo', 'gb' ]
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - name: Build docker-compose
- run: docker-compose --profile 4store build #profile flag is set in order to build all containers in this step
- - name: Run unit tests
- # unit tests are run inside a container
- # http://docs.codecov.io/docs/testing-with-docker
- run: |
- ci_env=`bash <(curl -s https://codecov.io/env)`
- docker-compose run $ci_env -e CI --rm ${{ matrix.backend }} wait-for-it solr-ut:8983 -- bundle exec rake test TESTOPTS='-v'
- - name: Upload coverage reports to Codecov
- uses: codecov/codecov-action@v3
- with:
- flags: unittests
- verbose: true
- fail_ci_if_error: false # optional (default = false)
+ - uses: actions/checkout@v3
+ - name: Install Dependencies
+ run: sudo apt-get update && sudo apt-get -y install raptor2-utils
+ - name: Set up JDK 11
+ uses: actions/setup-java@v2
+ with:
+ java-version: '11'
+ distribution: 'adopt'
+ - name: Set up Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.ruby-version }}
+ bundler-cache: true # runs 'bundle install' and caches installed gems automatically
+ - name: Run unit tests
+ # unit tests are run inside a container
+ # http://docs.codecov.io/docs/testing-with-docker
+ run: |
+ ci_env=`bash <(curl -s https://codecov.io/env)`
+ GOO_SLICES=${{ matrix.goo-slice }} bundle exec rake test:docker:${{ matrix.triplestore }} TESTOPTS="-v"
+ - name: Upload coverage reports to Codecov
+ uses: codecov/codecov-action@v3
+ with:
+ flags: unittests
+ verbose: true
+ fail_ci_if_error: false # optional (default = false)
diff --git a/.gitignore b/.gitignore
index c9a9f729..786ec9bf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,7 +37,6 @@ config/environments/*
!config/environments/biodiv-denbi-config.rb.sample
#ignore capistrano deployment
-config/deploy/*
config/*.p12
# Ignore generated test data
@@ -77,3 +76,5 @@ ontologies_api.iml
.rubocop.yml
.solargraph.yml
.vscode
+
+.env
diff --git a/Dockerfile b/Dockerfile
index 3e65fe4a..4dcc0c72 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,6 +7,7 @@ RUN apt-get update -yqq && apt-get install -yqq --no-install-recommends \
openjdk-11-jre-headless \
raptor2-utils \
wait-for-it \
+ libraptor2-dev \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /srv/ontoportal/ontologies_api
@@ -15,12 +16,14 @@ COPY Gemfile* /srv/ontoportal/ontologies_api/
WORKDIR /srv/ontoportal/ontologies_api
-RUN gem update --system
+RUN gem update --system 3.4.22 # the 3.4.22 can be removed if we support Ruby version > 3.0
RUN gem install bundler
ENV BUNDLE_PATH=/srv/ontoportal/bundle
RUN bundle install
COPY . /srv/ontoportal/ontologies_api
+RUN cp /srv/ontoportal/ontologies_api/config/environments/config.rb.sample /srv/ontoportal/ontologies_api/config/environments/development.rb
+RUN cp /srv/ontoportal/ontologies_api/config/environments/config.rb.sample /srv/ontoportal/ontologies_api/config/environments/production.rb
EXPOSE 9393
CMD ["bundle", "exec", "rackup", "-p", "9393", "--host", "0.0.0.0"]
diff --git a/Gemfile b/Gemfile
index e14696cf..1770d15a 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,24 +1,28 @@
source 'https://rubygems.org'
-gem 'activesupport', '~> 3.0'
+gem 'activesupport', '~> 3.2'
# see https://github.com/ncbo/ontologies_api/issues/69
gem 'bigdecimal', '1.4.2'
gem 'faraday', '~> 1.9'
gem 'json-schema', '~> 2.0'
gem 'multi_json', '~> 1.0'
-gem 'oj', '~> 2.0'
+gem 'oj'
gem 'parseconfig'
gem 'rack'
gem 'rake', '~> 10.0'
gem 'sinatra', '~> 1.0'
gem 'sinatra-advanced-routes'
gem 'sinatra-contrib', '~> 1.0'
+gem 'request_store'
+gem 'parallel'
+gem 'json-ld'
+
# Rack middleware
gem 'ffi'
gem 'rack-accept', '~> 0.4'
gem 'rack-attack', '~> 6.6.1', require: 'rack/attack'
-gem 'rack-cache', '~> 1.0'
+gem 'rack-cache', '~> 1.13.0'
gem 'rack-cors', require: 'rack/cors'
# GitHub dependency can be removed when https://github.com/niko/rack-post-body-to-params/pull/6 is merged and released
gem 'rack-post-body-to-params', github: 'palexander/rack-post-body-to-params', branch: 'multipart_support'
@@ -27,7 +31,7 @@ gem 'redis-rack-cache', '~> 2.0'
# Data access (caching)
gem 'redis'
-gem 'redis-activesupport'
+gem 'redis-store', '~>1.10'
# Monitoring
gem 'cube-ruby', require: 'cube'
@@ -43,10 +47,10 @@ gem 'redcarpet'
# NCBO gems (can be from a local dev path or from rubygems/git)
gem 'goo', git: 'https://github.com/ontoportal-lirmm/goo.git', branch: 'master'
-gem 'ncbo_annotator', git: 'https://github.com/ontoportal-lirmm/ncbo_annotator.git', branch: 'master'
+gem 'ncbo_annotator', git: 'https://github.com/ontoportal-lirmm/ncbo_annotator.git', branch: 'development'
gem 'ncbo_cron', git: 'https://github.com/ontoportal-lirmm/ncbo_cron.git', branch: 'master'
gem 'ncbo_ontology_recommender', git: 'https://github.com/ncbo/ncbo_ontology_recommender.git', branch: 'master'
-gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'master'
+gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'development'
gem 'ontologies_linked_data', git: 'https://github.com/BiodivPortal/ontologies_linked_data.git', branch: 'master'
group :development do
@@ -66,6 +70,7 @@ group :development do
gem 'reek', require: false
end
+
group :profiling do
gem 'rack-mini-profiler'
end
@@ -76,4 +81,5 @@ group :test do
gem 'rack-test'
gem 'simplecov', require: false
gem 'simplecov-cobertura' # for codecov.io
+ gem 'webmock', '~> 3.19.1'
end
diff --git a/Gemfile.lock b/Gemfile.lock
index 25553563..4a6056c5 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,6 +1,6 @@
GIT
remote: https://github.com/BiodivPortal/ontologies_linked_data.git
- revision: 9b90ad0483b62b267e6a57006fd38fd35d4987cb
+ revision: 4bd30687ba5800edd1d349267e1aacbd0cc6d0ca
branch: master
specs:
ontologies_linked_data (0.0.1)
@@ -20,7 +20,7 @@ GIT
GIT
remote: https://github.com/ncbo/ncbo_ontology_recommender.git
- revision: d0ac992c88bd417f2f2137ba62934c3c41b6db7c
+ revision: 013abea4af3b10910ec661dbb358a4b6cae198a4
branch: master
specs:
ncbo_ontology_recommender (0.0.1)
@@ -31,13 +31,16 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/goo.git
- revision: e03f6e8f209a3408a348290d76d129743c0f2fed
+ revision: c48a1e4dfe82a2e5c42614a30380f3bdb2044ba9
branch: master
specs:
goo (0.0.2)
addressable (~> 2.8)
pry
- rdf (= 1.0.8)
+ rdf (= 3.2.11)
+ rdf-raptor
+ rdf-rdfxml
+ rdf-vocab
redis
rest-client
rsolr
@@ -46,8 +49,8 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/ncbo_annotator.git
- revision: 57204d8e54432ba660af4c49806e2a3019a23fa2
- branch: master
+ revision: 1eb751b65d10ae23d45c74e0516c78754a8419f0
+ branch: development
specs:
ncbo_annotator (0.0.1)
goo
@@ -57,12 +60,13 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/ncbo_cron.git
- revision: 8db3481116c57d2a21dc8f32bcd1695d95442280
+ revision: bed0ff08408ad1241db3513992ad025a253eeef0
branch: master
specs:
ncbo_cron (0.0.1)
dante
goo
+ google-analytics-data
google-apis-analytics_v3
mlanett-redis-lock
multi_json
@@ -73,13 +77,12 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/sparql-client.git
- revision: aed51baf4106fd0f3d0e3f9238f0aad9406aa3f0
- branch: master
+ revision: 59251e59346c9a69a67c88552ba55a1244eec602
+ branch: development
specs:
- sparql-client (1.0.1)
- json_pure (>= 1.4)
- net-http-persistent (= 2.9.4)
- rdf (>= 1.0)
+ sparql-client (3.2.2)
+ net-http-persistent (~> 4.0, >= 4.0.2)
+ rdf (~> 3.2, >= 3.2.11)
GIT
remote: https://github.com/palexander/rack-post-body-to-params.git
@@ -103,24 +106,25 @@ GEM
activesupport (3.2.22.5)
i18n (~> 0.6, >= 0.6.4)
multi_json (~> 1.0)
- addressable (2.8.4)
- public_suffix (>= 2.0.2, < 6.0)
- airbrussh (1.4.1)
+ addressable (2.8.7)
+ public_suffix (>= 2.0.2, < 7.0)
+ airbrussh (1.5.2)
sshkit (>= 1.6.1, != 1.7.0)
ast (2.4.2)
backport (1.2.0)
- backports (3.24.1)
- bcrypt (3.1.18)
- bcrypt_pbkdf (1.1.0)
- benchmark (0.2.1)
+ backports (3.25.0)
+ base64 (0.2.0)
+ bcrypt (3.1.20)
+ bcrypt_pbkdf (1.1.1)
+ benchmark (0.3.0)
bigdecimal (1.4.2)
- builder (3.2.4)
- capistrano (3.17.2)
+ builder (3.3.0)
+ capistrano (3.19.1)
airbrussh (>= 1.0.0)
i18n
rake (>= 10.0.0)
sshkit (>= 1.9.0)
- capistrano-bundler (2.1.0)
+ capistrano-bundler (2.1.1)
capistrano (~> 3.1)
capistrano-locally (0.3.0)
capistrano (~> 3.0)
@@ -128,16 +132,18 @@ GEM
capistrano (~> 3.1)
sshkit (~> 1.3)
coderay (1.1.3)
- concurrent-ruby (1.2.2)
- connection_pool (2.4.0)
+ concurrent-ruby (1.3.4)
+ connection_pool (2.4.1)
+ crack (1.0.0)
+ bigdecimal
+ rexml
cube-ruby (0.0.3)
dante (0.2.0)
- date (3.3.3)
+ date (3.3.4)
declarative (0.0.20)
- diff-lcs (1.5.0)
- docile (1.4.0)
- domain_name (0.5.20190701)
- unf (>= 0.0.5, < 1.0.0)
+ diff-lcs (1.5.1)
+ docile (1.4.1)
+ domain_name (0.6.20240107)
e2mmap (0.1.0)
ed25519 (1.3.0)
faraday (1.10.3)
@@ -158,55 +164,91 @@ GEM
faraday-httpclient (1.0.1)
faraday-multipart (1.0.4)
multipart-post (~> 2)
- faraday-net_http (1.0.1)
+ faraday-net_http (1.0.2)
faraday-net_http_persistent (1.2.0)
faraday-patron (1.0.0)
faraday-rack (1.0.0)
faraday-retry (1.0.3)
- ffi (1.15.5)
+ ffi (1.17.0)
+ gapic-common (0.21.1)
+ faraday (>= 1.9, < 3.a)
+ faraday-retry (>= 1.0, < 3.a)
+ google-protobuf (~> 3.18)
+ googleapis-common-protos (>= 1.4.0, < 2.a)
+ googleapis-common-protos-types (>= 1.11.0, < 2.a)
+ googleauth (~> 1.9)
+ grpc (~> 1.59)
get_process_mem (0.2.7)
ffi (~> 1.0)
- google-apis-analytics_v3 (0.13.0)
- google-apis-core (>= 0.11.0, < 2.a)
- google-apis-core (0.11.0)
+ google-analytics-data (0.6.1)
+ google-analytics-data-v1beta (>= 0.11, < 2.a)
+ google-cloud-core (~> 1.6)
+ google-analytics-data-v1beta (0.13.0)
+ gapic-common (>= 0.21.1, < 2.a)
+ google-cloud-errors (~> 1.0)
+ google-apis-analytics_v3 (0.16.0)
+ google-apis-core (>= 0.15.0, < 2.a)
+ google-apis-core (0.15.1)
addressable (~> 2.5, >= 2.5.1)
- googleauth (>= 0.16.2, < 2.a)
- httpclient (>= 2.8.1, < 3.a)
+ googleauth (~> 1.9)
+ httpclient (>= 2.8.3, < 3.a)
mini_mime (~> 1.0)
+ mutex_m
representable (~> 3.0)
retriable (>= 2.0, < 4.a)
- rexml
- webrick
- googleauth (1.5.2)
- faraday (>= 0.17.3, < 3.a)
+ google-cloud-core (1.7.1)
+ google-cloud-env (>= 1.0, < 3.a)
+ google-cloud-errors (~> 1.0)
+ google-cloud-env (2.1.1)
+ faraday (>= 1.0, < 3.a)
+ google-cloud-errors (1.4.0)
+ google-protobuf (3.25.4)
+ googleapis-common-protos (1.6.0)
+ google-protobuf (>= 3.18, < 5.a)
+ googleapis-common-protos-types (~> 1.7)
+ grpc (~> 1.41)
+ googleapis-common-protos-types (1.15.0)
+ google-protobuf (>= 3.18, < 5.a)
+ googleauth (1.11.0)
+ faraday (>= 1.0, < 3.a)
+ google-cloud-env (~> 2.1)
jwt (>= 1.4, < 3.0)
- memoist (~> 0.16)
multi_json (~> 1.11)
os (>= 0.9, < 2.0)
signet (>= 0.16, < 2.a)
+ grpc (1.65.2-x86_64-linux)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
haml (5.2.2)
temple (>= 0.8.0)
tilt
+ hashdiff (1.1.1)
+ htmlentities (4.3.4)
http-accept (1.7.0)
- http-cookie (1.0.5)
+ http-cookie (1.0.7)
domain_name (~> 0.5)
httpclient (2.8.3)
i18n (0.9.5)
concurrent-ruby (~> 1.0)
- jaro_winkler (1.5.4)
- json (2.6.3)
+ jaro_winkler (1.6.0)
+ json (2.7.2)
+ json-ld (3.0.2)
+ multi_json (~> 1.12)
+ rdf (>= 2.2.8, < 4.0)
json-schema (2.8.1)
addressable (>= 2.4)
- json_pure (2.6.3)
- jwt (2.7.0)
+ jwt (2.8.2)
+ base64
kgio (2.11.4)
kramdown (2.4.0)
rexml
kramdown-parser-gfm (1.1.0)
kramdown (~> 2.0)
kwalify (0.7.2)
- libxml-ruby (4.1.1)
- logger (1.5.3)
+ language_server-protocol (3.17.0.3)
+ libxml-ruby (5.0.3)
+ link_header (0.0.8)
+ logger (1.6.0)
macaddr (1.7.2)
systemu (~> 2.6.5)
mail (2.8.1)
@@ -214,50 +256,54 @@ GEM
net-imap
net-pop
net-smtp
- memoist (0.16.2)
- method_source (1.0.0)
- mime-types (3.4.1)
+ method_source (1.1.0)
+ mime-types (3.5.2)
mime-types-data (~> 3.2015)
- mime-types-data (3.2023.0218.1)
- mini_mime (1.1.2)
+ mime-types-data (3.2024.0806)
+ mini_mime (1.1.5)
minitest (4.7.5)
minitest-stub_any_instance (1.0.3)
mlanett-redis-lock (0.2.7)
redis
multi_json (1.15.0)
- multipart-post (2.3.0)
- net-http-persistent (2.9.4)
- net-imap (0.3.4)
+ multipart-post (2.4.1)
+ mutex_m (0.2.0)
+ net-http-persistent (4.0.2)
+ connection_pool (~> 2.2)
+ net-imap (0.4.14)
date
net-protocol
net-pop (0.1.2)
net-protocol
- net-protocol (0.2.1)
+ net-protocol (0.2.2)
timeout
net-scp (4.0.0)
net-ssh (>= 2.6.5, < 8.0.0)
- net-smtp (0.3.3)
+ net-sftp (4.0.0)
+ net-ssh (>= 5.0.0, < 8.0.0)
+ net-smtp (0.5.0)
net-protocol
- net-ssh (7.1.0)
+ net-ssh (7.2.3)
netrc (0.11.0)
- newrelic_rpm (9.2.2)
- nokogiri (1.15.0-x86_64-linux)
+ newrelic_rpm (9.12.0)
+ nokogiri (1.15.6-x86_64-linux)
racc (~> 1.4)
- oj (2.18.5)
+ oj (3.16.1)
omni_logger (0.1.4)
logger
os (1.1.4)
- parallel (1.23.0)
+ parallel (1.26.2)
parseconfig (1.1.2)
- parser (3.2.2.1)
+ parser (3.2.2.4)
ast (~> 2.4.1)
+ racc
pony (1.13.1)
mail (>= 2.0)
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.0.1)
- racc (1.6.2)
+ public_suffix (5.1.1)
+ racc (1.8.1)
rack (1.6.13)
rack-accept (0.4.5)
rack (>= 0.4)
@@ -267,41 +313,53 @@ GEM
rack (>= 0.4)
rack-cors (1.0.6)
rack (>= 1.6.0)
- rack-mini-profiler (3.1.0)
+ rack-mini-profiler (3.3.1)
rack (>= 1.2.0)
rack-protection (1.5.5)
rack
rack-test (2.1.0)
rack (>= 1.3)
- rack-timeout (0.6.3)
+ rack-timeout (0.7.0)
rainbow (3.1.1)
raindrops (0.20.1)
rake (10.5.0)
rbs (2.8.4)
- rdf (1.0.8)
- addressable (>= 2.2)
+ rdf (3.2.11)
+ link_header (~> 0.0, >= 0.0.8)
+ rdf-raptor (3.2.0)
+ ffi (~> 1.15)
+ rdf (~> 3.2)
+ rdf-rdfxml (3.2.2)
+ builder (~> 3.2)
+ htmlentities (~> 4.3)
+ rdf (~> 3.2)
+ rdf-xsd (~> 3.2)
+ rdf-vocab (3.2.7)
+ rdf (~> 3.2, >= 3.2.4)
+ rdf-xsd (3.2.1)
+ rdf (~> 3.2)
+ rexml (~> 3.2)
redcarpet (3.6.0)
- redis (5.0.6)
- redis-client (>= 0.9.0)
- redis-activesupport (5.3.0)
- activesupport (>= 3, < 8)
- redis-store (>= 1.3, < 2)
- redis-client (0.14.1)
+ redis (5.2.0)
+ redis-client (>= 0.22.0)
+ redis-client (0.22.2)
connection_pool
redis-rack-cache (2.2.1)
rack-cache (>= 1.10, < 2)
redis-store (>= 1.6, < 2)
- redis-store (1.9.2)
+ redis-store (1.11.0)
redis (>= 4, < 6)
reek (6.1.4)
kwalify (~> 0.7.0)
parser (~> 3.2.0)
rainbow (>= 2.0, < 4.0)
- regexp_parser (2.8.0)
+ regexp_parser (2.9.2)
representable (3.2.0)
declarative (< 0.1.0)
trailblazer-option (>= 0.1.1, < 0.2.0)
uber (< 0.2.0)
+ request_store (1.7.0)
+ rack (>= 1.4)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
@@ -310,30 +368,32 @@ GEM
retriable (3.1.2)
reverse_markdown (2.1.1)
nokogiri
- rexml (3.2.5)
- rsolr (2.5.0)
+ rexml (3.3.5)
+ strscan
+ rsolr (2.6.0)
builder (>= 2.1.2)
faraday (>= 0.9, < 3, != 2.0.0)
- rubocop (1.51.0)
+ rubocop (1.59.0)
json (~> 2.3)
+ language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
- parser (>= 3.2.0.0)
+ parser (>= 3.2.2.4)
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 1.8, < 3.0)
rexml (>= 3.2.5, < 4.0)
- rubocop-ast (>= 1.28.0, < 2.0)
+ rubocop-ast (>= 1.30.0, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
- rubocop-ast (1.28.1)
+ rubocop-ast (1.30.0)
parser (>= 3.2.1.0)
ruby-progressbar (1.13.0)
ruby-xxHash (0.4.0.2)
ruby2_keywords (0.0.5)
rubyzip (2.3.2)
- rufo (0.16.1)
+ rufo (0.17.1)
rufus-scheduler (2.0.24)
tzinfo (>= 0.3.22)
- signet (0.17.0)
+ signet (0.19.0)
addressable (~> 2.8)
faraday (>= 0.17.5, < 3.a)
jwt (>= 1.5, < 3.0)
@@ -360,7 +420,7 @@ GEM
rack-test
sinatra (~> 1.4.0)
tilt (>= 1.3, < 3)
- solargraph (0.49.0)
+ solargraph (0.50.0)
backport (~> 1.2)
benchmark
bundler (~> 2.0)
@@ -379,22 +439,22 @@ GEM
solargraph-rails (1.1.0)
activesupport
solargraph
- sshkit (1.21.4)
+ sshkit (1.23.0)
+ base64
net-scp (>= 1.1.2)
+ net-sftp (>= 2.1.2)
net-ssh (>= 2.8.0)
+ strscan (3.1.0)
systemu (2.6.5)
- temple (0.10.1)
- thor (1.2.2)
- tilt (2.1.0)
- timeout (0.3.2)
+ temple (0.10.3)
+ thor (1.3.1)
+ tilt (2.4.0)
+ timeout (0.4.1)
trailblazer-option (0.1.2)
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
uber (0.1.0)
- unf (0.1.4)
- unf_ext
- unf_ext (0.0.8.2)
- unicode-display_width (2.4.2)
+ unicode-display_width (2.5.0)
unicorn (6.1.0)
kgio (~> 2.6)
raindrops (~> 0.7)
@@ -403,14 +463,17 @@ GEM
unicorn (>= 4, < 7)
uuid (2.3.9)
macaddr (~> 1.0)
- webrick (1.8.1)
- yard (0.9.34)
+ webmock (3.19.1)
+ addressable (>= 2.8.0)
+ crack (>= 0.3.2)
+ hashdiff (>= 0.4.0, < 2.0.0)
+ yard (0.9.36)
PLATFORMS
x86_64-linux
DEPENDENCIES
- activesupport (~> 3.0)
+ activesupport (~> 3.2)
bcrypt_pbkdf (>= 1.0, < 2.0)
bigdecimal (= 1.4.2)
capistrano (~> 3)
@@ -423,6 +486,7 @@ DEPENDENCIES
ffi
goo!
haml (~> 5.2.2)
+ json-ld
json-schema (~> 2.0)
minitest (~> 4.0)
minitest-stub_any_instance
@@ -431,14 +495,15 @@ DEPENDENCIES
ncbo_cron!
ncbo_ontology_recommender!
newrelic_rpm
- oj (~> 2.0)
+ oj
ontologies_linked_data!
+ parallel
parseconfig
pry
rack
rack-accept (~> 0.4)
rack-attack (~> 6.6.1)
- rack-cache (~> 1.0)
+ rack-cache (~> 1.13.0)
rack-cors
rack-mini-profiler
rack-post-body-to-params!
@@ -447,9 +512,10 @@ DEPENDENCIES
rake (~> 10.0)
redcarpet
redis
- redis-activesupport
redis-rack-cache (~> 2.0)
+ redis-store (~> 1.10)
reek
+ request_store
rufo
shotgun!
simplecov
@@ -462,6 +528,7 @@ DEPENDENCIES
sparql-client!
unicorn
unicorn-worker-killer
+ webmock (~> 3.19.1)
BUNDLED WITH
- 2.3.26
+ 2.3.23
diff --git a/README.md b/README.md
index dfaa77ea..02b9f076 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,50 @@
ontologies_api provides a RESTful interface for accessing [BioPortal](https://bioportal.bioontology.org/) (an open repository of biomedical ontologies). Supported services include downloads, search, access to terms and concepts, text annotation, and much more.
-## Prerequisites
+# Run ontologies_api
+
+## Using OntoPortal api utilities script
+### See help
+
+```bash
+bin/ontoportal help
+```
+
+```
+Usage: bin/ontoportal {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY]
+ dev : Start the Ontoportal API development server.
+ Example: bin/ontoportal dev --api-url http://localhost:9393
+ Use --reset-cache to remove volumes: bin/ontoportal dev --reset-cache
+ test : Run tests.
+ run : Run a command in the Ontoportal API Docker container.
+ help : Show this help message.
+
+Description:
+ This script provides convenient commands for managing an Ontoportal API
+ application using Docker Compose. It includes options for starting the development server,
+ running tests, and executing commands within the Ontoportal API Docker container.
+
+Goals:
+ - Simplify common tasks related to Ontoportal API development using Docker.
+ - Provide a consistent and easy-to-use interface for common actions.
+
+
+```
+
+
+### Run dev
+```bash
+bin/ontoportal dev
+```
+
+### Run test with a local OntoPortal API
+```bash
+bin/ontoportal test
+```
+
+
+## Manually
+### Prerequisites
- [Ruby 2.x](http://www.ruby-lang.org/en/downloads/) (most recent patch level)
- [rbenv](https://github.com/sstephenson/rbenv) and [ruby-build](https://github.com/sstephenson/ruby-build) (optional)
@@ -19,7 +62,7 @@ ontologies_api provides a RESTful interface for accessing [BioPortal](https://bi
- [Solr](http://lucene.apache.org/solr/)
- BioPortal indexes ontology class and property content using Solr (a Lucene-based server)
-## Configuring Solr
+### Configuring Solr
To configure Solr for ontologies_api usage, modify the example project included with Solr by doing the following:
@@ -46,22 +89,22 @@ To configure Solr for ontologies_api usage, modify the example project included
# Edit the ontologieS_api/config/environments/{env}.rb file to point to your running instance:
# http://localhost:8983/solr/NCBO1
-## Installing
+### Installing
-### Clone the repository
+#### Clone the repository
```
$ git clone git@github.com:ncbo/ontologies_api.git
$ cd ontologies_api
```
-### Install the dependencies
+#### Install the dependencies
```
$ bundle install
```
-### Create an environment configuration file
+#### Create an environment configuration file
```
$ cp config/environments/config.rb.sample config/environments/development.rb
@@ -73,7 +116,7 @@ production.rb
development.rb
test.rb
-### Run the unit tests (optional)
+#### Run the unit tests (optional)
Requires a configuration file for the test environment:
@@ -87,7 +130,7 @@ Execute the suite of tests from the command line:
$ bundle exec rake test
```
-### Run the application
+#### Run the application
```
$ bundle exec rackup --port 9393
diff --git a/app.rb b/app.rb
index 5360ae4b..e09178bd 100644
--- a/app.rb
+++ b/app.rb
@@ -11,8 +11,6 @@
require 'oj'
require 'multi_json'
require 'cgi'
-require 'google/apis/analytics_v3'
-require 'google/api_client/auth/key_utils'
# NCBO dependencies
require 'ontologies_linked_data'
@@ -29,6 +27,7 @@
require_relative 'lib/rack/cube_reporter'
require_relative 'lib/rack/param_translator'
require_relative 'lib/rack/slice_detection'
+require_relative 'lib/rack/request_lang'
# Logging setup
require_relative "config/logging"
@@ -36,6 +35,8 @@
# Inflector setup
require_relative "config/inflections"
+require 'request_store'
+
# Protection settings
set :protection, :except => :path_traversal
@@ -143,6 +144,9 @@
use Rack::PostBodyToParams
use Rack::ParamTranslator
+use RequestStore::Middleware
+use Rack::RequestLang
+
use LinkedData::Security::Authorization
use LinkedData::Security::AccessDenied
diff --git a/bin/ontoportal b/bin/ontoportal
new file mode 100755
index 00000000..66f1a654
--- /dev/null
+++ b/bin/ontoportal
@@ -0,0 +1,240 @@
+#!/usr/bin/env bash
+
+# Function to display script usage information
+show_help() {
+ cat << EOL
+Usage: $0 {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY] [--old-path OLD_PATH] [--goo-path GOO_PATH] [--sparql-client-path SPARQL_CLIENT_PATH]
+ dev : Start the Ontoportal API development server.
+ Example: $0 dev --api-url http://localhost:9393
+ Use --reset-cache to remove volumes: $0 dev --reset-cache
+ test : Run tests. Specify either a test file or use 'all'.
+ Example: $0 test test/controllers/test_users_controller.rb -v --name=name_of_the_test
+ Example (run all tests): $0 test all -v
+ run : Run a command in the Ontoportal API Docker container.
+ help : Show this help message.
+
+Description:
+ This script provides convenient commands for managing an Ontoportal API
+ application using Docker Compose. It includes options for starting the development server,
+ running tests, and executing commands within the Ontoportal API Docker container.
+
+Options:
+ --reset-cache : Remove Docker volumes (used with 'dev').
+ --api-url API_URL : Specify the API URL.
+ --api-key API_KEY : Specify the API key.
+ --old-path OLD_PATH : Specify the path for ontologies_linked_data.
+ --goo-path GOO_PATH : Specify the path for goo.
+ --sparql-client-path : Specify the path for sparql-client.
+ test_file | all : Specify either a test file or all the tests will be run.
+ -v : Enable verbosity.
+ --name=name_of_the_test : Specify the name of the test.
+
+Goals:
+ - Simplify common tasks related to Ontoportal API development using Docker.
+ - Provide a consistent and easy-to-use interface for common actions.
+EOL
+}
+
+
+# Function to update or create the .env file with API_URL and API_KEY
+update_env_file() {
+ # Update the .env file with the provided values
+ local api_url="$1"
+ local old_path="$2"
+ local goo_path="$3"
+ local sparql_client_path="$4"
+
+ # Update the .env file with the provided values
+ file_content=$(<.env)
+
+ # Make changes to the variable
+ while IFS= read -r line; do
+ if [[ "$line" == "API_URL="* && -n "$api_url" ]]; then
+ echo "API_URL=$api_url"
+ elif [[ "$line" == "ONTOLOGIES_LINKED_DATA_PATH="* ]]; then
+ echo "ONTOLOGIES_LINKED_DATA_PATH=$old_path"
+ elif [[ "$line" == "GOO_PATH="* ]]; then
+ echo "GOO_PATH=$goo_path"
+ elif [[ "$line" == "SPARQL_CLIENT_PATH="* ]]; then
+ echo "SPARQL_CLIENT_PATH=$sparql_client_path"
+ else
+ echo "$line"
+ fi
+ done <<< "$file_content" > .env
+}
+
+# Function to create configuration files if they don't exist
+create_config_files() {
+ [ -f ".env" ] || cp .env.sample .env
+ [ -f "config/environments/development.rb" ] || cp config/environments/config.rb.sample config/environments/development.rb
+}
+
+# Function to build Docker run command with conditionally added bind mounts
+build_docker_run_cmd() {
+ local custom_command="$1"
+ local old_path="$2"
+ local goo_path="$3"
+ local sparql_client_path="$4"
+
+ local docker_run_cmd="docker compose -p ontoportal_docker run --rm -it --name api-service"
+ local bash_cmd=""
+
+ # Conditionally add bind mounts only if the paths are not empty
+ for path_var in "old_path:ontologies_linked_data" "goo_path:goo" "sparql_client_path:sparql-client"; do
+ IFS=':' read -r path value <<< "$path_var"
+
+ if [ -n "${!path}" ]; then
+ host_path="$(realpath "$(dirname "${!path}")")/$value"
+ echo "Run: bundle config local.$value ${!path}"
+ container_path="/srv/ontoportal/$value"
+ docker_run_cmd+=" -v $host_path:$container_path"
+ bash_cmd+="(git config --global --add safe.directory $container_path && bundle config local.$value $container_path) &&"
+ else
+ bash_cmd+=" (bundle config unset local.$value) &&"
+ fi
+ done
+
+ bash_cmd+=" (bundle check || bundle install || bundle update) && $custom_command"
+ docker_run_cmd+=" --service-ports api bash -c \"$bash_cmd\""
+
+ eval "$docker_run_cmd"
+}
+
+
+# Function to handle the "dev" and "test" options
+run_command() {
+ local custom_command="$1"
+
+ local reset_cache=false
+ local api_url=""
+ local old_path=""
+ local goo_path=""
+ local sparql_client_path=""
+
+ shift
+ # Check for command line arguments
+ while [[ "$#" -gt 0 ]]; do
+ case $1 in
+ --reset-cache)
+ reset_cache=true
+ shift
+ ;;
+ --api-url)
+ api_url="$2"
+ shift 2
+ ;;
+ --old-path)
+ old_path="$2"
+ shift 2
+ ;;
+ --goo-path)
+ goo_path="$2"
+ shift 2
+ ;;
+ --sparql-client-path)
+ sparql_client_path="$2"
+ shift 2
+ ;;
+ *)
+ echo "Unknown option: $1"
+ show_help
+ exit 1
+ ;;
+ esac
+ done
+
+ # Check if --reset-cache is present and execute docker compose down --volumes
+ if [ "$reset_cache" = true ]; then
+ echo "Resetting cache. Running: docker compose down --volumes"
+ docker compose down --volumes
+ fi
+
+ # Check if arguments are provided
+ update_env_file "$api_url" "$old_path" "$goo_path" "$sparql_client_path"
+
+
+
+ # If no arguments, fetch values from the .env file
+ source .env
+ api_url="$API_URL"
+ old_path="$ONTOLOGIES_LINKED_DATA_PATH"
+ goo_path="$GOO_PATH"
+ sparql_client_path="$SPARQL_CLIENT_PATH"
+
+
+ if [ -z "$api_url" ] ; then
+ echo "Error: Missing required arguments. Please provide both --api-url or update them in your .env"
+ exit 1
+ fi
+
+
+
+ # Build the Docker run command
+ echo "Run: $custom_command"
+ build_docker_run_cmd "$custom_command" "$old_path" "$goo_path" "$sparql_client_path"
+}
+
+# Function to handle the "dev" option
+dev() {
+ echo "Starting OntoPortal API development server..."
+
+ local custom_command="bundle exec shotgun --host 0.0.0.0 --env=development --port 9393"
+ run_command "$custom_command" "$@"
+}
+
+# Function to handle the "test" option
+test() {
+ echo "Running tests..."
+ local test_path=""
+ local test_options=""
+ local all_arguments=()
+ # Check for command line arguments
+ while [ "$#" -gt 0 ]; do
+ case "$1" in
+ --api-url | --reset-cache | --old-path | --goo-path | --sparql-client-path)
+ all_arguments+=("$1" "$2")
+ shift 2
+ ;;
+ *)
+ if [ -z "$test_path" ]; then
+ test_path="$1"
+ else
+ test_options="$test_options $1"
+ fi
+ ;;
+ esac
+ shift
+ done
+
+ local custom_command="bundle exec rake test TEST='$test_path' TESTOPTS='$test_options'"
+ echo "run : $custom_command"
+ run_command "$custom_command" "${all_arguments[@]}"
+}
+
+# Function to handle the "run" option
+run() {
+ echo "Run: $*"
+ docker compose run --rm -it api bash -c "$*"
+}
+
+create_config_files
+
+# Main script logic
+case "$1" in
+ "run")
+ run "${@:2}"
+ ;;
+ "dev")
+ dev "${@:2}"
+ ;;
+ "test")
+ test "${@:2}"
+ ;;
+ "help")
+ show_help
+ ;;
+ *)
+ show_help
+ exit 1
+ ;;
+esac
diff --git a/config/deploy.rb b/config/deploy.rb
index 23a982cd..6916caf5 100644
--- a/config/deploy.rb
+++ b/config/deploy.rb
@@ -1,9 +1,6 @@
-# config valid only for Capistrano 3
-
-APP_PATH = '/srv/ontoportal'
-
-set :application, 'ontologies_api'
-set :repo_url, "https://github.com/ncbo/#{fetch(:application)}.git"
+set :author, "ontoportal-lirmm"
+set :application, "ontologies_api"
+set :repo_url, "https://github.com/#{fetch(:author)}/#{fetch(:application)}.git"
set :deploy_via, :remote_cache
@@ -11,7 +8,7 @@
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }
# Default deploy_to directory is /var/www/my_app
-set :deploy_to, "#{APP_PATH}/#{fetch(:application)}"
+set :deploy_to, "/srv/ontoportal/#{fetch(:application)}"
# Default value for :scm is :git
# set :scm, :git
@@ -20,7 +17,7 @@
# set :format, :pretty
# Default value for :log_level is :debug
-# set :log_level, :debug
+set :log_level, :error
# Default value for :pty is false
# set :pty, true
@@ -32,21 +29,40 @@
# set :linked_dirs, %w{log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system}
set :linked_dirs, %w{log vendor/bundle tmp/pids tmp/sockets public/system}
-# rbenv
-# set :rbenv_type, :system #or :user
-# set :rbenv_ruby, '2.2.5'
-# set :rbenv_roles, :all # default value
-
-# do not use sudo
-set :use_sudo, false
-# required for restarting unicorn with sudo
-set :pty, true
# Default value for default_env is {}
-set :default_env, {
-}
+# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
set :keep_releases, 5
+set :config_folder_path, "#{fetch(:application)}/#{fetch(:stage)}"
+
+# If you want to restart using `touch tmp/restart.txt`, add this to your config/deploy.rb:
+
+SSH_JUMPHOST = ENV.include?('SSH_JUMPHOST') ? ENV['SSH_JUMPHOST'] : 'jumpbox.hostname.com'
+SSH_JUMPHOST_USER = ENV.include?('SSH_JUMPHOST_USER') ? ENV['SSH_JUMPHOST_USER'] : 'username'
+
+JUMPBOX_PROXY = "#{SSH_JUMPHOST_USER}@#{SSH_JUMPHOST}"
+set :ssh_options, {
+ user: 'ontoportal',
+ forward_agent: 'true',
+ keys: %w(config/deploy_id_rsa),
+ auth_methods: %w(publickey),
+ # use ssh proxy if API servers are on a private network
+ proxy: Net::SSH::Proxy::Command.new("ssh #{JUMPBOX_PROXY} -W %h:%p")
+}
+
+# private git repo for configuraiton
+PRIVATE_CONFIG_REPO = ENV.include?('PRIVATE_CONFIG_REPO') ? ENV['PRIVATE_CONFIG_REPO'] : 'https://your_github_pat_token@github.com/your_organization/ontoportal-configs.git'
+desc "Check if agent forwarding is working"
+task :forwarding do
+ on roles(:all) do |h|
+ if test("env | grep SSH_AUTH_SOCK")
+ info "Agent forwarding is up to #{h}"
+ else
+ error "Agent forwarding is NOT up to #{h}"
+ end
+ end
+end
# inspired by http://nathaniel.talbott.ws/blog/2013/03/14/post-deploy-smoke-tests/
desc 'Run smoke test'
@@ -74,7 +90,6 @@
end
end
-
namespace :deploy do
desc 'Incorporate the private repository content'
@@ -82,10 +97,10 @@
# or get config from local directory if LOCAL_CONFIG_PATH env var is set
task :get_config do
if defined?(PRIVATE_CONFIG_REPO)
- TMP_CONFIG_PATH = "/tmp/#{SecureRandom.hex(15)}"
+ TMP_CONFIG_PATH = "/tmp/#{SecureRandom.hex(15)}".freeze
on roles(:app) do
execute "git clone -q #{PRIVATE_CONFIG_REPO} #{TMP_CONFIG_PATH}"
- execute "rsync -av #{TMP_CONFIG_PATH}/#{fetch(:application)}/ #{release_path}/"
+ execute "rsync -av #{TMP_CONFIG_PATH}/#{fetch(:config_folder_path)}/ #{release_path}/"
execute "rm -rf #{TMP_CONFIG_PATH}"
end
elsif defined?(LOCAL_CONFIG_PATH)
@@ -98,16 +113,15 @@
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
- # Your restart mechanism here, for example:
- # execute :touch, release_path.join('tmp/restart.txt')
- execute 'sudo systemctl restart unicorn'
- execute 'sleep 5'
+ # Your restart mechanism here, for example:
+ # execute :touch, release_path.join('tmp/restart.txt')
+ execute 'sudo systemctl restart unicorn'
+ execute 'sleep 5'
end
end
- after :publishing, :get_config
- after :get_config, :restart
- # after :deploy, :smoke_test
+ after :updating, :get_config
+ after :publishing, :restart
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
diff --git a/config/deploy/agroportal.rb b/config/deploy/agroportal.rb
new file mode 100644
index 00000000..c01f3fb9
--- /dev/null
+++ b/config/deploy/agroportal.rb
@@ -0,0 +1,17 @@
+# Simple Role Syntax
+# ==================
+# Supports bulk-adding hosts to roles, the primary
+# server in each group is considered to be the first
+# unless any hosts have the primary property set.
+# Don't declare `role :all`, it's a meta role
+role :app, %w[agroportal.lirmm.fr]
+role :db, %w[agroportal.lirmm.fr] # sufficient to run db:migrate only on one system
+set :branch, ENV.include?('BRANCH') ? ENV['BRANCH'] : 'master'
+# Extended Server Syntax
+# ======================
+# This can be used to drop a more detailed server
+# definition into the server list. The second argument
+# something that quacks like a hash can be used to set
+# extended properties on the server.
+# server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value
+set :log_level, :error
diff --git a/config/deploy/appliance.rb b/config/deploy/appliance.rb
deleted file mode 100644
index fdfe0d70..00000000
--- a/config/deploy/appliance.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# Simple Role Syntax
-# ==================
-# Supports bulk-adding hosts to roles, the primary
-# server in each group is considered to be the first
-# unless any hosts have the primary property set.
-# Don't declare `role :all`, it's a meta role
-
-# Extended Server Syntax
-# ======================
-# This can be used to drop a more detailed server
-# definition into the server list. The second argument
-# something that quacks like a hash can be used to set
-# extended properties on the server.
-server 'localhost', roles: %w{app}
-
-# you can set custom ssh options
-# it's possible to pass any option but you need to keep in mind that net/ssh understand limited list of options
-# you can see them in [net/ssh documentation](http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start)
-# set it globally
-# set :ssh_options, {
-# keys: %w(/home/rlisowski/.ssh/id_rsa),
-# forward_agent: false,
-# auth_methods: %w(password)
-# }
-# and/or per server
-# server 'example.com',
-# user: 'user_name',
-# roles: %w{web app},
-# ssh_options: {
-# user: 'user_name', # overrides user setting above
-# keys: %w(/home/user_name/.ssh/id_rsa),
-# forward_agent: false,
-# auth_methods: %w(publickey password)
-# # password: 'please use keys'
-# }
-# setting per server overrides global ssh_options
-
-BRANCH = ENV.include?('BRANCH') ? ENV['BRANCH'] : 'master'
-set :branch, "#{BRANCH}"
-set :deploy_to, "/srv/ontoportal/#{fetch(:application)}"
-# install gems into a common direcotry shared across ui, api and ncbo_cron to reduce disk usage
-set :bundle_path, '/srv/ontoportal/.bundle'
-remove :linked_dirs, 'vendor/bundle'
-
-# private git repo for configuraiton
-# PRIVATE_CONFIG_REPO = ENV.include?('PRIVATE_CONFIG_REPO') ? ENV['PRIVATE_CONFIG_REPO'] : 'git@github.com:your_org/private-config-repo.git'
-
-# location of local configuration files
-LOCAL_CONFIG_PATH = ENV.include?('LOCAL_CONFIG_PATH') ? ENV['LOCAL_CONFIG_PATH'] : '/srv/ontoportal/virtual_appliance/appliance_config'
diff --git a/config/deploy/production.rb b/config/deploy/production.rb
deleted file mode 100644
index c84d24ea..00000000
--- a/config/deploy/production.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# Simple Role Syntax
-# ==================
-# Supports bulk-adding hosts to roles, the primary
-# server in each group is considered to be the first
-# unless any hosts have the primary property set.
-# Don't declare `role :all`, it's a meta role
-role :app, %w{deploy@example.com}
-role :web, %w{deploy@example.com}
-role :db, %w{deploy@example.com}
-
-# Extended Server Syntax
-# ======================
-# This can be used to drop a more detailed server
-# definition into the server list. The second argument
-# something that quacks like a hash can be used to set
-# extended properties on the server.
-server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value
-
-# you can set custom ssh options
-# it's possible to pass any option but you need to keep in mind that net/ssh understand limited list of options
-# you can see them in [net/ssh documentation](http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start)
-# set it globally
-# set :ssh_options, {
-# keys: %w(/home/rlisowski/.ssh/id_rsa),
-# forward_agent: false,
-# auth_methods: %w(password)
-# }
-# and/or per server
-# server 'example.com',
-# user: 'user_name',
-# roles: %w{web app},
-# ssh_options: {
-# user: 'user_name', # overrides user setting above
-# keys: %w(/home/user_name/.ssh/id_rsa),
-# forward_agent: false,
-# auth_methods: %w(publickey password)
-# # password: 'please use keys'
-# }
-# setting per server overrides global ssh_options
diff --git a/config/deploy/staging.rb b/config/deploy/staging.rb
new file mode 100644
index 00000000..47b158ae
--- /dev/null
+++ b/config/deploy/staging.rb
@@ -0,0 +1,17 @@
+# Simple Role Syntax
+# ==================
+# Supports bulk-adding hosts to roles, the primary
+# server in each group is considered to be the first
+# unless any hosts have the primary property set.
+# Don't declare `role :all`, it's a meta role
+role :app, %w{stageportal.lirmm.fr}
+role :db, %w{stageportal.lirmm.fr} # sufficient to run db:migrate only on one system
+set :branch, ENV.include?('BRANCH') ? ENV['BRANCH'] : 'stage'
+# Extended Server Syntax
+# ======================
+# This can be used to drop a more detailed server
+# definition into the server list. The second argument
+# something that quacks like a hash can be used to set
+# extended properties on the server.
+#server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value
+set :log_level, :error
diff --git a/config/deploy/test.rb b/config/deploy/test.rb
new file mode 100644
index 00000000..fcbe1efc
--- /dev/null
+++ b/config/deploy/test.rb
@@ -0,0 +1,17 @@
+# Simple Role Syntax
+# ==================
+# Supports bulk-adding hosts to roles, the primary
+# server in each group is considered to be the first
+# unless any hosts have the primary property set.
+# Don't declare `role :all`, it's a meta role
+role :app, %w{testportal.lirmm.fr}
+role :db, %w{testportal.lirmm.fr} # sufficient to run db:migrate only on one system
+# Extended Server Syntax
+# ======================
+# This can be used to drop a more detailed server
+# definition into the server list. The second argument
+# something that quacks like a hash can be used to set
+# extended properties on the server.
+#server 'example.com', user: 'deploy', roles: %w{web app}, my_property: :my_value
+set :log_level, :error
+set :branch, ENV.include?('BRANCH') ? ENV['BRANCH'] : 'test'
diff --git a/config/environments/config.rb.sample b/config/environments/config.rb.sample
index e5f9fd9c..f143b8f9 100644
--- a/config/environments/config.rb.sample
+++ b/config/environments/config.rb.sample
@@ -3,120 +3,108 @@
# development.rb
# test.rb
-begin
- LinkedData.config do |config|
- config.repository_folder = "/srv/ncbo/repository"
- config.goo_host = "localhost"
- config.goo_port = 9000
- config.search_server_url = "http://localhost:8082/solr/term_search_core1"
- config.property_search_server_url = "http://localhost:8082/solr/prop_search_core1"
- config.rest_url_prefix = "http://#{$SITE_URL}:8080/"
- config.replace_url_prefix = true
- config.enable_security = true
-
- config.apikey = "24e0e77e-54e0-11e0-9d7b-005056aa3316"
- config.ui_host = "http://#{$SITE_URL}"
- config.enable_monitoring = false
- config.cube_host = "localhost"
- config.enable_resource_index = false
-
- # Used to define other BioPortal to which this appliance can be mapped to
- # Example to map to the NCBO BioPortal : {"ncbo" => {"api" => "http://data.bioontology.org", "ui" => "http://bioportal.bioontology.org", "apikey" => ""}}
- # Then create the mapping using the following class in JSON : "http://purl.bioontology.org/ontology/MESH/C585345": "ncbo:MESH"
- # Where "ncbo" is the key in the interportal_hash. Use only lowercase letters for this key.
- # And do not use "ext" as a key, it is reserved for clases outside of any BioPortal
- config.interportal_hash = {}
-
- # Caches
- config.http_redis_host = "localhost"
- config.http_redis_port = 6380
- config.enable_http_cache = true
- config.goo_redis_host = "localhost"
- config.goo_redis_port = 6382
+GOO_BACKEND_NAME = ENV.include?("GOO_BACKEND_NAME") ? ENV["GOO_BACKEND_NAME"] : "4store"
+GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
+GOO_PATH_DATA = ENV.include?("GOO_PATH_DATA") ? ENV["GOO_PATH_DATA"] : "/data/"
+GOO_PATH_QUERY = ENV.include?("GOO_PATH_QUERY") ? ENV["GOO_PATH_QUERY"] : "/sparql/"
+GOO_PATH_UPDATE = ENV.include?("GOO_PATH_UPDATE") ? ENV["GOO_PATH_UPDATE"] : "/update/"
+GOO_PORT = ENV.include?("GOO_PORT") ? ENV["GOO_PORT"] : 9000
+MGREP_HOST = ENV.include?("MGREP_HOST") ? ENV["MGREP_HOST"] : "localhost"
+MGREP_PORT = ENV.include?("MGREP_PORT") ? ENV["MGREP_PORT"] : 55555
+MGREP_DICTIONARY_FILE = ENV.include?("MGREP_DICTIONARY_FILE") ? ENV["MGREP_DICTIONARY_FILE"] : "./test/data/dictionary.txt"
+REDIS_GOO_CACHE_HOST = ENV.include?("REDIS_GOO_CACHE_HOST") ? ENV["REDIS_GOO_CACHE_HOST"] : "localhost"
+REDIS_HTTP_CACHE_HOST = ENV.include?("REDIS_HTTP_CACHE_HOST") ? ENV["REDIS_HTTP_CACHE_HOST"] : "localhost"
+REDIS_PERSISTENT_HOST = ENV.include?("REDIS_PERSISTENT_HOST") ? ENV["REDIS_PERSISTENT_HOST"] : "localhost"
+REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
+REPORT_PATH = ENV.include?("REPORT_PATH") ? ENV["REPORT_PATH"] : "./test/ontologies_report.json"
+REPOSITORY_FOLDER = ENV.include?("REPOSITORY_FOLDER") ? ENV["REPOSITORY_FOLDER"] : "./test/data/ontology_files/repo"
+REST_URL_PREFIX = ENV.include?("REST_URL_PREFIX") ? ENV["REST_URL_PREFIX"] : ENV["API_URL"] || "http://localhost:9393"
+SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr"
+SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr"
- Goo.use_cache = true
-
- # Email notifications
- config.enable_notifications = false
- config.email_sender = "admin@example.org" # Default sender for emails
- config.email_override = "override@example.org" # all email gets sent here. Disable with email_override_disable.
- config.email_disable_override = true
- config.smtp_host = "localhost"
- config.smtp_port = 25
- config.smtp_auth_type = :none # :none, :plain, :login, :cram_md5
- config.smtp_domain = "example.org"
- # Emails of the instance administrators to get mail notifications when new user or new ontology
- config.admin_emails = ["admin@example.org"]
+begin
+ # For prefLabel extract main_lang first, or anything if no main found.
+ # For other properties only properties with a lang that is included in main_lang are used
+ Goo.main_languages = ["en", "fr"]
+ Goo.use_cache = false
+rescue NoMethodError
+ puts "(CNFG) >> Goo.main_lang not available"
+end
- # PURL server config parameters
- config.enable_purl = false
- config.purl_host = "purl.example.org"
- config.purl_port = 80
- config.purl_username = "admin"
- config.purl_password = "password"
- config.purl_maintainers = "admin"
- config.purl_target_url_prefix = "http://example.org"
+LinkedData.config do |config|
+ config.goo_backend_name = GOO_BACKEND_NAME.to_s
+ config.goo_host = GOO_HOST.to_s
+ config.goo_port = GOO_PORT.to_i
+ config.goo_path_query = GOO_PATH_QUERY.to_s
+ config.goo_path_data = GOO_PATH_DATA.to_s
+ config.goo_path_update = GOO_PATH_UPDATE.to_s
+ config.goo_redis_host = REDIS_GOO_CACHE_HOST.to_s
+ config.goo_redis_port = REDIS_PORT.to_i
+ config.http_redis_host = REDIS_HTTP_CACHE_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+ config.ontology_analytics_redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.ontology_analytics_redis_port = REDIS_PORT.to_i
+ config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
+ config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
+ config.replace_url_prefix = true
+ config.rest_url_prefix = REST_URL_PREFIX.to_s
+ config.sparql_endpoint_url = "http://sparql.bioontology.org"
+ config.repository_folder = REPOSITORY_FOLDER.to_s
+# config.enable_notifications = false
- # Ontology Google Analytics Redis
- # disabled
- config.ontology_analytics_redis_host = "localhost"
- config.enable_ontology_analytics = false
- config.ontology_analytics_redis_port = 6379
- end
-rescue NameError
- puts "(CNFG) >> LinkedData not available, cannot load config"
+ config.interportal_hash = {
+ "agroportal" => {
+ "api" => "http://data.agroportal.lirmm.fr",
+ "ui" => "http://agroportal.lirmm.fr",
+ "apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b"
+ },
+ "ncbo" => {
+ "api" => "http://data.bioontology.org",
+ "apikey" => "4a5011ea-75fa-4be6-8e89-f45c8c84844e",
+ "ui" => "http://bioportal.bioontology.org",
+ },
+ "sifr" => {
+ "api" => "http://data.bioportal.lirmm.fr",
+ "ui" => "http://bioportal.lirmm.fr",
+ "apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b"
+ }
+ }
+ config.oauth_providers = {
+ github: {
+ check: :access_token,
+ link: 'https://api.github.com/user'
+ },
+ keycloak: {
+ check: :jwt_token,
+ cert: 'KEYCLOAK_SECRET_KEY'
+ },
+ orcid: {
+ check: :access_token,
+ link: 'https://pub.orcid.org/v3.0/me'
+ },
+ google: {
+ check: :access_token,
+ link: 'https://www.googleapis.com/oauth2/v3/userinfo'
+ }
+ }
end
-begin
- Annotator.config do |config|
- config.mgrep_dictionary_file = "/srv/mgrep/dictionary/dictionary.txt"
- config.stop_words_default_file = "./config/default_stop_words.txt"
- config.mgrep_host = "localhost"
- config.mgrep_port = 55555
- config.mgrep_alt_host = "localhost"
- config.mgrep_alt_port = 55555
- config.annotator_redis_host = "localhost"
- config.annotator_redis_port = 6379
- end
-rescue NameError
- puts "(CNFG) >> Annotator not available, cannot load config"
+Annotator.config do |config|
+ config.annotator_redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.annotator_redis_port = REDIS_PORT.to_i
+ config.mgrep_host = MGREP_HOST.to_s
+ config.mgrep_port = MGREP_PORT.to_i
+ config.mgrep_dictionary_file = MGREP_DICTIONARY_FILE.to_s
end
LinkedData::OntologiesAPI.config do |config|
- config.restrict_download = ["ACR0", "ACR1", "ACR2"]
-end
-
-begin
- LinkedData::OntologiesAPI.config do |config|
- config.enable_unicorn_workerkiller = true
- config.enable_throttling = false
- config.enable_monitoring = false
- config.cube_host = "localhost"
- config.http_redis_host = "localhost"
- config.http_redis_port = 6380
- config.ontology_rank = ""
- config.resolver_redis_host = "localhost"
- config.resolver_redis_port = 6379
- config.restrict_download = ["ACR0", "ACR1", "ACR2"]
- end
-rescue NameError
- puts "(CNFG) >> OntologiesAPI not available, cannot load config"
+ config.http_redis_host = REDIS_HTTP_CACHE_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+# config.restrict_download = ["ACR0", "ACR1", "ACR2"]
end
-begin
- NcboCron.config do |config|
- config.redis_host = Annotator.settings.annotator_redis_host
- config.redis_port = Annotator.settings.annotator_redis_port
- config.enable_ontology_analytics = false
- config.enable_ontologies_report = false
- # Schedulues
- config.cron_schedule = "30 */4 * * *"
- # Pull schedule
- config.pull_schedule = "00 18 * * *"
- # Pull long schedule for ontology that are pulled less frequently: run weekly on monday at 11 a.m. (23:00)
- config.pull_schedule_long = "00 23 * * 1"
- config.pull_long_ontologies = ["BIOREFINERY", "TRANSMAT", "GO"]
- end
-rescue NameError
- puts "(CNFG) >> NcboCron not available, cannot load config"
-end
+NcboCron.config do |config|
+ config.redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.redis_port = REDIS_PORT.to_i
+ config.ontology_report_path = REPORT_PATH
+end
\ No newline at end of file
diff --git a/config/environments/test.rb b/config/environments/test.rb
index 0f421dec..0b68cc3b 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -8,16 +8,18 @@
GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
REDIS_HOST = ENV.include?("REDIS_HOST") ? ENV["REDIS_HOST"] : "localhost"
REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
-SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr/term_search_core1"
-SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr/prop_search_core1"
+SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr"
+SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr"
MGREP_HOST = ENV.include?("MGREP_HOST") ? ENV["MGREP_HOST"] : "localhost"
-MGREP_PORT = ENV.include?("MGREP_PORT") ? ENV["MGREP_PORT"] : 55555
+MGREP_PORT = ENV.include?("MGREP_PORT") ? ENV["MGREP_PORT"] : 55556
+GOO_SLICES = ENV["GOO_SLICES"] || 500
begin
# For prefLabel extract main_lang first, or anything if no main found.
# For other properties only properties with a lang that is included in main_lang are used
Goo.main_languages = ['en']
Goo.use_cache = false
+ Goo.slice_loading_size = GOO_SLICES.to_i
rescue NoMethodError
puts "(CNFG) >> Goo.main_lang not available"
end
@@ -37,6 +39,7 @@
config.ontology_analytics_redis_port = REDIS_PORT.to_i
config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
+ config.sparql_endpoint_url = "http://sparql.bioontology.org"
# config.enable_notifications = false
config.interportal_hash = {
"agroportal" => {
@@ -55,6 +58,24 @@
"apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b"
}
}
+ config.oauth_providers = {
+ github: {
+ check: :access_token,
+ link: 'https://api.github.com/user'
+ },
+ keycloak: {
+ check: :jwt_token,
+ cert: 'KEYCLOAK_SECRET_KEY'
+ },
+ orcid: {
+ check: :access_token,
+ link: 'https://pub.orcid.org/v3.0/me'
+ },
+ google: {
+ check: :access_token,
+ link: 'https://www.googleapis.com/oauth2/v3/userinfo'
+ }
+ }
end
Annotator.config do |config|
diff --git a/config/rack_attack.rb b/config/rack_attack.rb
index 60d2e3de..88a3e8d6 100644
--- a/config/rack_attack.rb
+++ b/config/rack_attack.rb
@@ -3,15 +3,14 @@
puts "(API) >> Throttling enabled at #{LinkedData::OntologiesAPI.settings.req_per_second_per_ip} req/sec"
require 'rack/attack'
-require 'redis-activesupport'
use Rack::Attack
attack_redis_host_port = {
host: LinkedData::OntologiesAPI.settings.http_redis_host,
- port: LinkedData::OntologiesAPI.settings.http_redis_port
+ port: LinkedData::OntologiesAPI.settings.http_redis_port,
+ db: 1
}
-attack_store = ActiveSupport::Cache::RedisStore.new(attack_redis_host_port)
-Rack::Attack.cache.store = attack_store
+Rack::Attack.cache.store = Redis.new(attack_redis_host_port)
safe_ips = LinkedData::OntologiesAPI.settings.safe_ips ||= Set.new
safe_ips.each do |safe_ip|
diff --git a/config/unicorn.rb b/config/unicorn.rb
index ce1df7fb..85eabbdd 100644
--- a/config/unicorn.rb
+++ b/config/unicorn.rb
@@ -1,23 +1,45 @@
application = 'ontologies_api'
app_path = "/srv/ontoportal/#{application}"
-working_directory "#{app_path}/current/"
+current_version_path = "#{app_path}/current"
+pid_file_path = 'tmp/pids/unicorn.pid'
+if Dir.exists?(current_version_path)
+ app_socket_path = app_path + '/shared/tmp/sockets/unicorn.sock'
+ app_gemfile_path = "#{current_version_path}/Gemfile"
+ user = 'ontoportal'
+else
+ current_version_path = app_path
+ app_gemfile_path = "#{app_path}/Gemfile"
+ app_socket_path = app_path + '/tmp/sockets/unicorn.sock'
+ user = 'root'
+end
+
+working_directory current_version_path
worker_processes 8
timeout 300
preload_app true
-user 'ontoportal', 'ontoportal'
+user user, user
stderr_path 'log/unicorn.stderr.log'
stdout_path 'log/unicorn.stdout.log'
-pid 'tmp/pids/unicorn.pid'
+
+require 'fileutils'
+[pid_file_path, app_socket_path].each do |file_path|
+ directory_path = File.dirname(file_path)
+ FileUtils.mkdir_p(directory_path) unless Dir.exist?(File.dirname(file_path))
+end
+
+
+
+pid pid_file_path
# Listen on both fast-failing unix data socket (for nginx) & a backloggable TCP connection
-listen app_path + '/shared/tmp/sockets/unicorn.sock', :backlog => 1024
+listen app_socket_path, :backlog => 1024
#listen 8087, :backlog => 256
# Make sure unicorn is using current gems after rolling restarts
before_exec do |server|
- ENV['BUNDLE_GEMFILE'] = "#{app_path}/current/Gemfile"
+ ENV['BUNDLE_GEMFILE'] = app_gemfile_path
end
before_fork do |server, worker|
diff --git a/controllers/admin_controller.rb b/controllers/admin_controller.rb
index 7ae6d800..70b94411 100644
--- a/controllers/admin_controller.rb
+++ b/controllers/admin_controller.rb
@@ -68,7 +68,7 @@ class AdminController < ApplicationController
latest = ont.latest_submission(status: :any)
error 404, "Ontology #{params["acronym"]} contains no submissions" if latest.nil?
check_last_modified(latest)
- latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param))
+ latest.bring(*submission_include_params)
NcboCron::Models::OntologySubmissionParser.new.queue_submission(latest, actions)
halt 204
end
@@ -84,7 +84,7 @@ class AdminController < ApplicationController
latest = ont.latest_submission(status: :any)
end
check_last_modified(latest) if latest
- latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) if latest
+ latest.bring(*submission_include_params) if latest
reply(latest || {})
end
@@ -127,6 +127,79 @@ class AdminController < ApplicationController
halt 204
end
+ namespace "/search" do
+ get '/collections' do
+ conn = SOLR::SolrConnector.new(Goo.search_conf, '')
+ collections = { collections: conn.fetch_all_collections}
+ reply(200, collections)
+ end
+
+ get '/collections/:collection/schema' do
+ collection = params[:collection].to_sym
+ conn = SOLR::SolrConnector.new(Goo.search_conf, collection)
+ collection_schema = conn.fetch_schema
+
+ reply(200, collection_schema)
+ end
+
+ post '/collections/:collection/schema/init' do
+ collection = params[:collection].to_sym
+ conn = SOLR::SolrConnector.new(Goo.search_conf, collection)
+ collection_schema = conn.init_schema
+ reply(200, collection_schema)
+ end
+
+
+ post '/collections/:collection/search' do
+ collection = params[:collection].to_sym
+
+ search_keys = %w[defType fq qf sort start rows fl stopwords lowercaseOperators]
+
+ search_params = params.select { |key, _| search_keys.include?(key) }
+ search_query = params[:query] || params[:q]
+ search_query = search_query.blank? ? '*' : search_query
+ conn = SOLR::SolrConnector.new(Goo.search_conf, collection)
+ reply(200, conn.search(search_query, search_params).to_h)
+ end
+
+ post '/index_batch/:model_name' do
+ error 500, "model_name parameter not set" if params["model_name"].blank?
+
+ model = Goo.model_by_name(params["model_name"].to_sym)
+ error 500, "#{params["model_name"]} is not indexable" if model.nil? || !model.index_enabled?
+
+ all_attrs = get_attributes_to_include([:all], model)
+
+ collections = model.where.include(all_attrs).all
+ indexed = []
+ not_indexed = []
+ collections.each do |m|
+ begin
+ response = m.index.dig("responseHeader", "status")
+ if response.eql?(0)
+ indexed << m.id
+ else
+ not_indexed << m.id
+ end
+ rescue StandardError
+ not_indexed << m.id
+ end
+ end
+
+ if !indexed.empty?
+ msg = "Batch indexing for #{model.model_name} completed for"
+
+ if not_indexed.empty?
+ msg += " all models"
+ else
+ msg += " #{indexed.join(', ')} and not for the following #{not_indexed.join(', ')}, see logs for more details"
+ end
+ reply(200, msg)
+ else
+ reply(500, "Batch indexing for #{model.model_name} failed")
+ end
+ end
+ end
private
def process_long_operation(timeout, args)
diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb
new file mode 100644
index 00000000..0b47c0c2
--- /dev/null
+++ b/controllers/agents_controller.rb
@@ -0,0 +1,171 @@
+class AgentsController < ApplicationController
+
+ get '/ontologies/:acronym/agents' do
+ ont = Ontology.find(params["acronym"]).first
+ latest = ont.latest_submission(status: :any)
+ latest.bring(*OntologySubmission.agents_attrs)
+ properties_agents= {}
+ OntologySubmission.agents_attrs.each do |attr|
+ properties_agents[attr] = Array(latest.send(attr))
+ end
+
+ agents = []
+ properties_agents.each do |key, value|
+ agents.concat(value.map{ |agent| agent.bring_remaining})
+ end
+ agents.uniq!
+
+ if includes_param.include?(:all) || includes_param.include?(:usages)
+ LinkedData::Models::Agent.load_agents_usages(agents)
+ end
+
+ reply agents
+ end
+
+ %w[agents Agents].each do |namespace|
+ namespace "/#{namespace}" do
+ get do
+ check_last_modified_collection(LinkedData::Models::Agent)
+ query = LinkedData::Models::Agent.where
+ query = apply_filters(LinkedData::Models::Agent, query)
+ query = query.include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param))
+ if page?
+ page, size = page_params
+ agents = query.page(page, size).all
+ else
+ agents = query.to_a
+ end
+
+ if includes_param.include?(:all) || includes_param.include?(:usages)
+ LinkedData::Models::Agent.load_agents_usages(agents)
+ end
+
+ reply agents
+ end
+
+ # Display a single agent
+ get '/:id' do
+ check_last_modified_collection(LinkedData::Models::Agent)
+ id = params["id"]
+ agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first
+ error 404, "Agent #{id} not found" if agent.nil?
+ reply 200, agent
+ end
+
+ # Create a agent with the given acronym
+ post do
+ reply 201, create_new_agent
+ end
+
+ # Create a agent with the given acronym
+ put '/:acronym' do
+ reply 201, create_new_agent
+ end
+
+ # Update an existing submission of a agent
+ patch '/:id' do
+ acronym = params["id"]
+ agent = LinkedData::Models::Agent.find(acronym).include(LinkedData::Models::Agent.attributes).first
+
+ if agent.nil?
+ error 400, "Agent does not exist, please create using HTTP PUT before modifying"
+ else
+ agent = update_agent(agent, params)
+
+ error 400, agent.errors unless agent.errors.empty?
+ end
+ halt 204
+ end
+
+ # Delete a agent
+ delete '/:id' do
+ agent = LinkedData::Models::Agent.find(params["id"]).first
+ agent.delete
+ halt 204
+ end
+
+ private
+
+ def update_identifiers(identifiers)
+ Array(identifiers).map do |i|
+ next nil if i.empty?
+
+ id = i["id"] || LinkedData::Models::AgentIdentifier.generate_identifier(i['notation'], i['schemaAgency'])
+ identifier = LinkedData::Models::AgentIdentifier.find(RDF::URI.new(id)).first
+
+ if identifier
+ identifier.bring_remaining
+ else
+ identifier = LinkedData::Models::AgentIdentifier.new
+ end
+
+ i.delete "id"
+
+ next identifier if i.keys.size.zero?
+
+ populate_from_params(identifier, i)
+
+ if identifier.valid?
+ identifier.save
+ else
+ error 400, identifier.errors
+ end
+ identifier
+ end.compact
+ end
+
+ def update_affiliations(affiliations)
+ Array(affiliations).map do |aff|
+ affiliation = aff["id"] ? LinkedData::Models::Agent.find(RDF::URI.new(aff["id"])).first : nil
+
+ if affiliation
+ affiliation.bring_remaining
+ affiliation.identifiers.each{|i| i.bring_remaining}
+ end
+
+ next affiliation if aff.keys.size.eql?(1) && aff["id"]
+
+ if affiliation
+ affiliation = update_agent(affiliation, aff)
+ else
+ affiliation = create_new_agent(aff["id"], aff)
+ end
+
+ error 400, affiliation.errors unless affiliation.errors.empty?
+
+ affiliation
+ end
+ end
+
+ def create_new_agent (id = @params['id'], params = @params)
+ agent = nil
+ agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first if id
+
+ if agent.nil?
+ agent = update_agent(LinkedData::Models::Agent.new, params)
+ error 400, agent.errors unless agent.errors.empty?
+
+ return agent
+ else
+ error 400, "Agent exists, please use HTTP PATCH to update"
+ end
+ end
+
+ def update_agent(agent, params)
+ return agent unless agent
+
+ identifiers = params.delete "identifiers"
+ affiliations = params.delete "affiliations"
+ params.delete "id"
+ populate_from_params(agent, params)
+ agent.identifiers = update_identifiers(identifiers)
+ agent.affiliations = update_affiliations(affiliations)
+
+ agent.save if agent.valid?
+ return agent
+ end
+
+ end
+ end
+
+end
diff --git a/controllers/analytics_controller.rb b/controllers/analytics_controller.rb
new file mode 100644
index 00000000..e9b655fd
--- /dev/null
+++ b/controllers/analytics_controller.rb
@@ -0,0 +1,45 @@
+require 'csv'
+
+class AnalyticsController < ApplicationController
+
+ ##
+ # get all ontology analytics for a given year/month combination
+ # TODO use a namespace analytics after migration the old OntologyAnalyticsController
+ namespace "/data/analytics" do
+
+ get '/ontologies' do
+ expires 86400, :public
+ year = year_param(params)
+ error 400, "The year you supplied is invalid. Valid years start with 2 and contain 4 digits." if params["year"] && !year
+ month = month_param(params)
+ error 400, "The month you supplied is invalid. Valid months are 1-12." if params["month"] && !month
+ acronyms = restricted_ontologies_to_acronyms(params)
+ analytics = Ontology.analytics(year, month, acronyms)
+
+ reply analytics
+ end
+
+
+ get '/users' do
+ expires 86400, :public
+ year = year_param(params)
+ error 400, "The year you supplied is invalid. Valid years start with 2 and contain 4 digits." if params["year"] && !year
+ month = month_param(params)
+ error 400, "The month you supplied is invalid. Valid months are 1-12." if params["month"] && !month
+ analytics = User.analytics(year, month)
+ reply analytics['all_users']
+ end
+
+ get '/page_visits' do
+ expires 86400, :public
+ year = year_param(params)
+ error 400, "The year you supplied is invalid. Valid years start with 2 and contain 4 digits." if params["year"] && !year
+ month = month_param(params)
+ error 400, "The month you supplied is invalid. Valid months are 1-12." if params["month"] && !month
+ analytics = User.page_visits_analytics
+ reply analytics['all_pages']
+ end
+
+ end
+
+end
diff --git a/controllers/classes_controller.rb b/controllers/classes_controller.rb
index c8e55bf8..d792c172 100644
--- a/controllers/classes_controller.rb
+++ b/controllers/classes_controller.rb
@@ -262,13 +262,7 @@ def includes_param_check
end
end
- def concept_schemes
- params["concept_schemes"]&.split(',') || []
- end
- def concept_collections
- params["concept_collections"]&.split(',') || []
- end
def request_display(attrs)
diff --git a/controllers/dereference_resource_controller.rb b/controllers/dereference_resource_controller.rb
new file mode 100644
index 00000000..8b69efdb
--- /dev/null
+++ b/controllers/dereference_resource_controller.rb
@@ -0,0 +1,53 @@
+use Rack::ContentNegotiation
+
+class DereferenceResourceController < ApplicationController
+ namespace "/ontologies" do
+ get "/:acronym/resolve/:uri" do
+ acronym = params[:acronym]
+ uri = params[:uri]
+
+ if acronym.blank? || uri.blank?
+ error 500, "Usage: ontologies/:acronym/resolve/:uri?output_format= OR POST: acronym, uri, output_format parameters"
+ end
+
+ output_format = env["format"].presence || params[:output_format].presence || 'application/n-triples'
+
+ process_request(acronym, uri, output_format)
+ end
+
+ private
+
+ def process_request(acronym_param, uri_param, output_format)
+ acronym = acronym_param
+ uri = URI.decode_www_form_component(uri_param)
+
+ error 500, "INVALID URI" unless valid_url?(uri)
+ sub = LinkedData::Models::Ontology.find(acronym).first&.latest_submission
+
+ error 500, "Ontology not found" unless sub
+
+ r = Resource.new(sub.id, uri)
+ case output_format
+ when 'application/ld+json', 'application/json'
+ r.to_json
+ when 'application/rdf+xml', 'application/xml'
+ r.to_xml
+ when 'text/turtle'
+ r.to_turtle
+ when 'application/n-triples'
+ r.to_ntriples
+ else
+ error 500, "Invalid output format, valid format are: application/json, application/ld+json, application/xml, application/rdf+xml, text/turtle and application/n-triples"
+ end
+
+
+ end
+
+ def valid_url?(url)
+ uri = URI.parse(url)
+ uri.is_a?(URI::HTTP) || uri.is_a?(URI::HTTPS)
+ rescue URI::InvalidURIError
+ false
+ end
+ end
+end
\ No newline at end of file
diff --git a/controllers/home_controller.rb b/controllers/home_controller.rb
index 459c6a4f..a44fd22e 100644
--- a/controllers/home_controller.rb
+++ b/controllers/home_controller.rb
@@ -13,11 +13,15 @@ class HomeController < ApplicationController
expires 3600, :public
last_modified @@root_last_modified ||= Time.now.httpdate
routes = routes_list
+
#TODO: delete when ccv will be on production
routes.delete("/ccv")
if LinkedData.settings.enable_resource_index == false
routes.delete("/resource_index")
end
+
+ routes.delete('/Agents')
+
routes_hash = {}
context = {}
routes.each do |route|
@@ -121,7 +125,7 @@ def metadata_all
unique: cls.unique?(attribute) || " ",
required: cls.required?(attribute) || " ",
list: cls.list?(attribute) || " ",
- cardinality: cls.cardinality(attribute) || " "
+ cardinality: (cls.cardinality(attribute) rescue nil) || " "
}
else
attributes_info[attribute] = {
diff --git a/controllers/mappings_controller.rb b/controllers/mappings_controller.rb
index 75f0c5b8..82c280fa 100644
--- a/controllers/mappings_controller.rb
+++ b/controllers/mappings_controller.rb
@@ -191,6 +191,8 @@ class MappingsController < ApplicationController
.each do |m|
persistent_counts[m.ontologies.first] = m.count
end
+ ont_acronyms = restricted_ontologies_to_acronyms(params)
+ persistent_counts = persistent_counts.select { |key, _| ont_acronyms.include?(key) || key.start_with?("http://") }
reply persistent_counts
end
diff --git a/controllers/ontologies_controller.rb b/controllers/ontologies_controller.rb
index da1b748c..58518420 100644
--- a/controllers/ontologies_controller.rb
+++ b/controllers/ontologies_controller.rb
@@ -38,21 +38,12 @@ class OntologiesController < ApplicationController
else
latest = ont.latest_submission(status: :any)
end
- check_last_modified(latest) if latest
- # When asking to display all metadata, we are using bring_remaining which is more performant than including all metadata (remove this when the query to get metadata will be fixed)
+
if latest
- if includes_param.first == :all
- # Bring what we need to display all attr of the submission
- latest.bring_remaining
- latest.bring({:contact=>[:name, :email],
- :ontology=>[:acronym, :name, :administeredBy, :group, :viewingRestriction, :doNotUpdate, :flat,
- :hasDomain, :summaryOnly, :acl, :viewOf, :ontologyType],
- :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]})
- else
- latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param))
- end
+ check_last_modified(latest)
+ latest.bring(*submission_include_params)
end
- #remove the whole previous if block and replace by it: latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) if latest
+
reply(latest || {})
end
@@ -62,7 +53,7 @@ class OntologiesController < ApplicationController
patch '/:acronym/latest_submission' do
ont = Ontology.find(params["acronym"]).first
error 422, "You must provide an existing `acronym` to patch" if ont.nil?
-
+
submission = ont.latest_submission(status: :any)
submission.bring(*OntologySubmission.attributes)
diff --git a/controllers/ontology_submissions_controller.rb b/controllers/ontology_submissions_controller.rb
index cf55659d..0068a5f1 100644
--- a/controllers/ontology_submissions_controller.rb
+++ b/controllers/ontology_submissions_controller.rb
@@ -1,9 +1,15 @@
class OntologySubmissionsController < ApplicationController
get "/submissions" do
check_last_modified_collection(LinkedData::Models::OntologySubmission)
- #using appplication_helper method
- options = {also_include_views: params["also_include_views"], status: (params["include_status"] || "ANY")}
- reply retrieve_latest_submissions(options).values
+ options = {
+ also_include_views: params["also_include_views"],
+ status: (params["include_status"] || "ANY")
+ }
+ subs = retrieve_latest_submissions(options)
+ subs = subs.values unless page?
+ # Force to show ontology reviews, notes and projects by default only for this request
+ LinkedData::Models::Ontology.serialize_default(*(LinkedData::Models::Ontology.hypermedia_settings[:serialize_default] + [:reviews, :notes, :projects]))
+ reply subs
end
##
@@ -19,22 +25,18 @@ class OntologySubmissionsController < ApplicationController
##
# Display all submissions of an ontology
get do
- ont = Ontology.find(params["acronym"]).include(:acronym).first
+ ont = Ontology.find(params["acronym"]).include(:acronym, :administeredBy, :acl, :viewingRestriction).first
error 422, "Ontology #{params["acronym"]} does not exist" unless ont
check_last_modified_segment(LinkedData::Models::OntologySubmission, [ont.acronym])
- if includes_param.first == :all
- # When asking to display all metadata, we are using bring_remaining which is more performant than including all metadata (remove this when the query to get metadata will be fixed)
- ont.bring(submissions: [:released, :creationDate, :status, :submissionId,
- {:contact=>[:name, :email], :ontology=>[:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl, :group, :hasDomain, :views, :viewOf, :flat],
- :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]}, :submissionStatus])
-
- ont.submissions.each do |sub|
- sub.bring_remaining
- end
- else
- ont.bring(submissions: OntologySubmission.goo_attrs_to_load(includes_param))
- end
- reply ont.submissions.sort {|a,b| b.submissionId.to_i <=> a.submissionId.to_i } # descending order of submissionId
+ check_access(ont)
+ options = {
+ also_include_views: true,
+ status: (params["include_status"] || "ANY"),
+ ontology: params["acronym"]
+ }
+ subs = retrieve_submissions(options)
+
+ reply subs.sort {|a,b| b.submissionId.to_i <=> a.submissionId.to_i } # descending order of submissionId
end
##
@@ -53,7 +55,7 @@ class OntologySubmissionsController < ApplicationController
ont.bring(:submissions)
ont_submission = ont.submission(params["ontology_submission_id"])
error 404, "`submissionId` not found" if ont_submission.nil?
- ont_submission.bring(*OntologySubmission.goo_attrs_to_load(includes_param))
+ ont_submission.bring(*submission_include_params)
reply ont_submission
end
diff --git a/controllers/properties_controller.rb b/controllers/properties_controller.rb
index f98e9016..d32180d5 100644
--- a/controllers/properties_controller.rb
+++ b/controllers/properties_controller.rb
@@ -24,7 +24,7 @@ class PropertiesController < ApplicationController
get '/:property' do
prop = params[:property]
ont, submission = get_ontology_and_submission
- p = ont.property(prop, submission)
+ p = ont.property(prop, submission, display_all_attributes: false)
error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil?
reply 200, p
end
@@ -51,7 +51,7 @@ class PropertiesController < ApplicationController
get '/:property/tree' do
prop = params[:property]
ont, submission = get_ontology_and_submission
- p = ont.property(prop, submission)
+ p = ont.property(prop, submission, display_all_attributes: false)
error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil?
root_tree = p.tree
@@ -79,7 +79,7 @@ class PropertiesController < ApplicationController
get '/:property/ancestors' do
prop = params[:property]
ont, submission = get_ontology_and_submission
- p = ont.property(prop, submission)
+ p = ont.property(prop, submission, display_all_attributes: false)
error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil?
ancestors = p.ancestors
p.class.in(submission).models(ancestors).include(:label, :definition).all
@@ -91,7 +91,7 @@ class PropertiesController < ApplicationController
get '/:property/descendants' do
prop = params[:property]
ont, submission = get_ontology_and_submission
- p = ont.property(prop, submission)
+ p = ont.property(prop, submission, display_all_attributes: false)
error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil?
descendants = p.descendants
p.class.in(submission).models(descendants).include(:label, :definition).all
@@ -103,7 +103,7 @@ class PropertiesController < ApplicationController
get '/:property/parents' do
prop = params[:property]
ont, submission = get_ontology_and_submission
- p = ont.property(prop, submission)
+ p = ont.property(prop, submission, display_all_attributes: false)
error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil?
p.bring(:parents)
@@ -120,7 +120,7 @@ class PropertiesController < ApplicationController
get '/:property/children' do
prop = params[:property]
ont, submission = get_ontology_and_submission
- p = ont.property(prop, submission)
+ p = ont.property(prop, submission, display_all_attributes: false)
error 404, "Property #{prop} not found in ontology #{ont.id.to_s}" if p.nil?
p.bring(:children)
diff --git a/controllers/properties_search_controller.rb b/controllers/properties_search_controller.rb
index 29d6b772..6c5b6cdf 100644
--- a/controllers/properties_search_controller.rb
+++ b/controllers/properties_search_controller.rb
@@ -22,7 +22,7 @@ def process_search(params=nil)
# puts "Properties query: #{query}, params: #{params}"
set_page_params(params)
docs = Array.new
- resp = LinkedData::Models::Class.search(query, params, :property)
+ resp = LinkedData::Models::OntologyProperty.search(query, params)
total_found = resp["response"]["numFound"]
add_matched_fields(resp, Sinatra::Helpers::SearchHelper::MATCH_TYPE_LABEL)
ontology_rank = LinkedData::Models::Ontology.rank
diff --git a/controllers/search_controller.rb b/controllers/search_controller.rb
index 3bc1c13f..ce34d51d 100644
--- a/controllers/search_controller.rb
+++ b/controllers/search_controller.rb
@@ -5,16 +5,208 @@ class SearchController < ApplicationController
namespace "/search" do
# execute a search query
get do
- process_search()
+ process_search
end
post do
- process_search()
+ process_search
+ end
+
+ namespace "/ontologies" do
+ get do
+ query = params[:query] || params[:q]
+ groups = params.fetch("groups", "").split(',')
+ categories = params.fetch("hasDomain", "").split(',')
+ languages = params.fetch("languages", "").split(',')
+ status = params.fetch("status", "").split(',')
+ format = params.fetch("hasOntologyLanguage", "").split(',')
+ is_of_type = params.fetch("isOfType", "").split(',')
+ has_format = params.fetch("hasFormat", "").split(',')
+ visibility = params["visibility"]
+ show_views = params["show_views"] == 'true'
+ sort = params.fetch("sort", "score desc, ontology_name_sort asc, ontology_acronym_sort asc")
+ page, page_size = page_params
+
+ fq = [
+ 'resource_model:"ontology_submission"',
+ 'submissionStatus_txt:ERROR_* OR submissionStatus_txt:"RDF" OR submissionStatus_txt:"UPLOADED"',
+ groups.map { |x| "ontology_group_txt:\"http://data.bioontology.org/groups/#{x.upcase}\"" }.join(' OR '),
+ categories.map { |x| "ontology_hasDomain_txt:\"http://data.bioontology.org/categories/#{x.upcase}\"" }.join(' OR '),
+ languages.map { |x| "naturalLanguage_txt:\"#{x.downcase}\"" }.join(' OR '),
+ ]
+
+ fq << "ontology_viewingRestriction_t:#{visibility}" unless visibility.blank?
+ fq << "!ontology_viewOf_t:*" unless show_views
+
+ fq << format.map { |x| "hasOntologyLanguage_t:\"http://data.bioontology.org/ontology_formats/#{x}\"" }.join(' OR ') unless format.blank?
+
+ fq << status.map { |x| "status_t:#{x}" }.join(' OR ') unless status.blank?
+ fq << is_of_type.map { |x| "isOfType_t:#{x}" }.join(' OR ') unless is_of_type.blank?
+ fq << has_format.map { |x| "hasFormalityLevel_t:#{x}" }.join(' OR ') unless has_format.blank?
+
+ fq.reject!(&:blank?)
+
+ if params[:qf]
+ qf = params[:qf]
+ else
+ qf = [
+ "ontology_acronymSuggestEdge^25 ontology_nameSuggestEdge^15 descriptionSuggestEdge^10 ", # start of the word first
+ "ontology_acronym_text^15 ontology_name_text^10 description_text^5 ", # full word match
+ "ontology_acronymSuggestNgram^2 ontology_nameSuggestNgram^1.5 descriptionSuggestNgram" # substring match last
+ ].join(' ')
+ end
+
+ page_data = search(Ontology, query, {
+ fq: fq,
+ qf: qf,
+ page: page,
+ page_size: page_size,
+ sort: sort
+ })
+
+ total_found = page_data.aggregate
+ ontology_rank = LinkedData::Models::Ontology.rank
+ docs = {}
+ acronyms_ids = {}
+ page_data.each do |doc|
+ resource_id = doc["resource_id"]
+ id = doc["submissionId_i"]
+ acronym = doc["ontology_acronym_text"]
+ old_resource_id = acronyms_ids[acronym]
+ old_id = old_resource_id.split('/').last.to_i rescue 0
+
+ already_found = (old_id && id && (id <= old_id))
+ not_restricted = (doc["ontology_viewingRestriction_t"]&.eql?('public') || current_user&.admin?)
+ user_not_restricted = not_restricted ||
+ Array(doc["ontology_viewingRestriction_txt"]).any? {|u| u.split(' ').last == current_user&.username} ||
+ Array(doc["ontology_acl_txt"]).any? {|u| u.split(' ').last == current_user&.username}
+
+ user_restricted = !user_not_restricted
+
+ if acronym.blank? || already_found || user_restricted
+ total_found -= 1
+ next
+ end
+
+ docs.delete(old_resource_id)
+ acronyms_ids[acronym] = resource_id
+
+ doc["ontology_rank"] = ontology_rank.dig(doc["ontology_acronym_text"], :normalizedScore) || 0.0
+ docs[resource_id] = doc
+ end
+
+ docs = docs.values
+
+ docs.sort! { |a, b| [b["score"], b["ontology_rank"]] <=> [a["score"], a["ontology_rank"]] } unless params[:sort].present?
+
+ page = page_object(docs, total_found)
+
+ reply 200, page
+ end
+
+ get '/content' do
+ query = params[:query] || params[:q]
+ page, page_size = page_params
+
+ ontologies = params.fetch("ontologies", "").split(',')
+
+ unless current_user&.admin?
+ restricted_acronyms = restricted_ontologies_to_acronyms(params)
+ ontologies = ontologies.empty? ? restricted_acronyms : ontologies & restricted_acronyms
+ end
+
+
+ types = params.fetch("types", "").split(',')
+ qf = params.fetch("qf", "")
+
+ qf = [
+ "ontology_t^100 resource_id^10",
+ "http___www.w3.org_2004_02_skos_core_prefLabel_txt^30",
+ "http___www.w3.org_2004_02_skos_core_prefLabel_t^30",
+ "http___www.w3.org_2000_01_rdf-schema_label_txt^30",
+ "http___www.w3.org_2000_01_rdf-schema_label_t^30",
+ ].join(' ') if qf.blank?
+
+ fq = []
+
+ fq << ontologies.map { |x| "ontology_t:\"#{x}\"" }.join(' OR ') unless ontologies.blank?
+ fq << types.map { |x| "type_t:\"#{x}\" OR type_txt:\"#{x}\"" }.join(' OR ') unless types.blank?
+
+
+ conn = SOLR::SolrConnector.new(Goo.search_conf, :ontology_data)
+ resp = conn.search(query, fq: fq, qf: qf, defType: "edismax",
+ start: (page - 1) * page_size, rows: page_size)
+
+ total_found = resp["response"]["numFound"]
+ docs = resp["response"]["docs"]
+
+
+ reply 200, page_object(docs, total_found)
+ end
+ end
+
+ namespace "/agents" do
+ get do
+ query = params[:query] || params[:q]
+ page, page_size = page_params
+ type = params[:agentType].blank? ? nil : params[:agentType]
+
+ fq = "agentType_t:#{type}" if type
+
+ if params[:qf]
+ qf = params[:qf]
+ else
+ qf = [
+ "acronymSuggestEdge^25 nameSuggestEdge^15 emailSuggestEdge^15 identifiersSuggestEdge^10 ", # start of the word first
+ "identifiers_texts^20 acronym_text^15 name_text^10 email_text^10 ", # full word match
+ "acronymSuggestNgram^2 nameSuggestNgram^1.5 email_text^1" # substring match last
+ ].join(' ')
+ end
+
+
+
+ if params[:sort]
+ sort = "#{params[:sort]} asc, score desc"
+ else
+ sort = "score desc, acronym_sort asc, name_sort asc"
+ end
+
+ reply 200, search(LinkedData::Models::Agent,
+ query,
+ fq: fq, qf: qf,
+ page: page, page_size: page_size,
+ sort: sort)
+ end
end
private
- def process_search(params=nil)
+ def search(model, query, params = {})
+ query = query.blank? ? "*" : query
+
+ resp = model.search(query, search_params(params))
+
+ total_found = resp["response"]["numFound"]
+ docs = resp["response"]["docs"]
+
+ page_object(docs, total_found)
+ end
+
+ def search_params(defType: "edismax", fq:, qf:, stopwords: "true", lowercaseOperators: "true", page:, page_size:, fl: '*,score', sort:)
+ {
+ defType: defType,
+ fq: fq,
+ qf: qf,
+ sort: sort,
+ start: (page - 1) * page_size,
+ rows: page_size,
+ fl: fl,
+ stopwords: stopwords,
+ lowercaseOperators: lowercaseOperators,
+ }
+ end
+
+ def process_search(params = nil)
params ||= @params
text = params["q"]
@@ -44,19 +236,22 @@ def process_search(params=nil)
doc[:submission] = submission
doc[:ontology_rank] = (ontology_rank[doc[:submissionAcronym]] && !ontology_rank[doc[:submissionAcronym]].empty?) ? ontology_rank[doc[:submissionAcronym]][:normalizedScore] : 0.0
doc[:properties] = MultiJson.load(doc.delete(:propertyRaw)) if include_param_contains?(:properties)
+
+ doc = filter_attrs_by_language(doc)
+
instance = doc[:provisional] ? LinkedData::Models::ProvisionalClass.read_only(doc) : LinkedData::Models::Class.read_only(doc)
docs.push(instance)
end
unless params['sort']
if !text.nil? && text[-1] == '*'
- docs.sort! {|a, b| [b[:score], a[:prefLabelExact].downcase, b[:ontology_rank]] <=> [a[:score], b[:prefLabelExact].downcase, a[:ontology_rank]]}
+ docs.sort! { |a, b| [b[:score], a[:prefLabelExact].downcase, b[:ontology_rank]] <=> [a[:score], b[:prefLabelExact].downcase, a[:ontology_rank]] }
else
- docs.sort! {|a, b| [b[:score], b[:ontology_rank]] <=> [a[:score], a[:ontology_rank]]}
+ docs.sort! { |a, b| [b[:score], b[:ontology_rank]] <=> [a[:score], a[:ontology_rank]] }
end
end
- #need to return a Page object
+ # need to return a Page object
page = page_object(docs, total_found)
reply 200, page
diff --git a/controllers/slices_controller.rb b/controllers/slices_controller.rb
index a31f799e..9033222c 100644
--- a/controllers/slices_controller.rb
+++ b/controllers/slices_controller.rb
@@ -41,17 +41,20 @@ class SlicesController < ApplicationController
##
# Create a new slice
post do
+ error 403, "Access denied" unless current_user && current_user.admin?
create_slice
end
# Delete a slice
delete '/:slice' do
+ error 403, "Access denied" unless current_user && current_user.admin?
LinkedData::Models::Slice.find(params[:slice]).first.delete
halt 204
end
# Update an existing slice
patch '/:slice' do
+ error 403, "Access denied" unless current_user && current_user.admin?
slice = LinkedData::Models::Slice.find(params[:slice]).include(LinkedData::Models::Slice.attributes(:all)).first
populate_from_params(slice, params)
if slice.valid?
@@ -61,7 +64,7 @@ class SlicesController < ApplicationController
end
halt 204
end
-
+
private
def create_slice
diff --git a/controllers/users_controller.rb b/controllers/users_controller.rb
index 00b6e732..58e7667f 100644
--- a/controllers/users_controller.rb
+++ b/controllers/users_controller.rb
@@ -1,14 +1,17 @@
class UsersController < ApplicationController
namespace "/users" do
post "/authenticate" do
- user_id = params["user"]
- user_password = params["password"]
+
# Modify params to show all user attributes
params["display"] = User.attributes.join(",")
- user = User.find(user_id).include(User.goo_attrs_to_load(includes_param) + [:passwordHash]).first
- authenticated = user.authenticate(user_password) unless user.nil?
- error 401, "Username/password combination invalid" unless authenticated
- user.show_apikey = true
+
+ if params["access_token"]
+ user = oauth_authenticate(params)
+ user.bring(*User.goo_attrs_to_load(includes_param))
+ else
+ user = login_password_authenticate(params)
+ end
+ user.show_apikey = true unless user.nil?
reply user
end
@@ -20,17 +23,13 @@ class UsersController < ApplicationController
post "/create_reset_password_token" do
email = params["email"]
username = params["username"]
- user = LinkedData::Models::User.where(email: email, username: username).include(LinkedData::Models::User.attributes).first
- error 404, "User not found" unless user
- reset_token = token(36)
- user.resetToken = reset_token
+ user = send_reset_token(email, username)
+
if user.valid?
- user.save(override_security: true)
- LinkedData::Utils::Notifications.reset_password(user, reset_token)
+ halt 204
else
error 422, user.errors
end
- halt 204
end
##
@@ -42,11 +41,11 @@ class UsersController < ApplicationController
email = params["email"] || ""
username = params["username"] || ""
token = params["token"] || ""
+
params["display"] = User.attributes.join(",") # used to serialize everything via the serializer
- user = LinkedData::Models::User.where(email: email, username: username).include(User.goo_attrs_to_load(includes_param)).first
- error 404, "User not found" unless user
- if token.eql?(user.resetToken)
- user.show_apikey = true
+
+ user, token_accepted = reset_password(email, username, token)
+ if token_accepted
reply user
else
error 403, "Password reset not authorized with this token"
@@ -81,6 +80,7 @@ class UsersController < ApplicationController
# Update an existing submission of an user
patch '/:username' do
user = User.find(params[:username]).include(User.attributes).first
+ params.delete("role") unless current_user.admin?
populate_from_params(user, params)
if user.valid?
user.save
@@ -98,27 +98,15 @@ class UsersController < ApplicationController
private
- def token(len)
- chars = ("a".."z").to_a + ("A".."Z").to_a + ("1".."9").to_a
- token = ""
- 1.upto(len) { |i| token << chars[rand(chars.size-1)] }
- token
- end
- def create_user
+ def create_user(send_notifications: true)
params ||= @params
user = User.find(params["username"]).first
error 409, "User with username `#{params["username"]}` already exists" unless user.nil?
+ params.delete("role") unless current_user.admin?
user = instance_from_params(User, params)
if user.valid?
- user.save
- # Send an email to the administrator to warn him about the newly created user
- begin
- if !LinkedData.settings.admin_emails.nil? && !LinkedData.settings.admin_emails.empty?
- LinkedData::Utils::Notifications.new_user(user)
- end
- rescue Exception => e
- end
+ user.save(send_notifications: send_notifications)
else
error 422, user.errors
end
diff --git a/docker-compose.yml b/docker-compose.yml
index de084081..564fc8d2 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,115 +1,201 @@
x-app: &app
- build:
- context: .
- args:
- RUBY_VERSION: '2.7'
- # Increase the version number in the image tag every time Dockerfile or its arguments is changed
- image: ontologies_api:0.0.1
- environment: &env
- BUNDLE_PATH: /srv/ontoportal/bundle
- # default bundle config resolves to /usr/local/bundle/config inside of the container
- # we are setting it to local app directory if we need to use 'bundle config local'
- BUNDLE_APP_CONFIG: /srv/ontoportal/ontologies_api/.bundle
- COVERAGE: 'true'
- GOO_REDIS_HOST: redis-ut
- REDIS_HOST: redis-ut
- REDIS_PORT: 6379
- SOLR_HOST: solr-ut
- SOLR_TERM_SEARCH_URL: http://solr-ut:8983/solr/term_search_core1
- SOLR_PROP_SEARCH_URL: http://solr-ut:8983/solr/prop_search_core1
- MGREP_HOST: mgrep-ut
- MGREP_PORT: 55555
- stdin_open: true
- tty: true
- command: "bundle exec rackup -o 0.0.0.0 --port 9393"
- ports:
- - 9393:9393
- volumes:
- # bundle volume for hosting gems installed by bundle; it helps in local development with gem udpates
- - bundle:/srv/ontoportal/bundle
- # api code
- - .:/srv/ontoportal/ontologies_api
- # mount directory containing development version of the gems if you need to use 'bundle config local'
- #- /Users/alexskr/ontoportal:/Users/alexskr/ontoportal
- depends_on:
- - solr-ut
- - redis-ut
- - mgrep-ut
+ image: agroportal/ontologies_api:master
+ environment: &env
+ # default bundle config resolves to /usr/local/bundle/config inside of the container
+ # we are setting it to local app directory if we need to use 'bundle config local'
+ BUNDLE_PATH: /srv/ontoportal/bundle
+ COVERAGE: 'true' # enable simplecov code coverage
+ REDIS_HOST: redis-ut
+ REDIS_PORT: 6379
+ SOLR_TERM_SEARCH_URL: http://solr-ut:8983/solr
+ SOLR_PROP_SEARCH_URL: http://solr-ut:8983/solr
+ GOO_BACKEND_NAME: 4store
+ GOO_PORT: 9000
+ GOO_HOST: 4store-ut
+ MGREP_HOST: mgrep-ut
+ MGREP_PORT: 55555
+ REPOSITORY_FOLDER: /srv/ontoportal/data/repository
+ REPORT_PATH: /srv/ontoportal/data/reports/ontologies_report.json
+ MGREP_DICTIONARY_FILE: /srv/ontoportal/data/mgrep
+ stdin_open: true
+ tty: true
+ command: /bin/bash
+
+
services:
api:
<<: *app
+ env_file:
+ .env
environment:
<<: *env
- GOO_BACKEND_NAME: 4store
- GOO_PORT: 9000
- GOO_HOST: 4store-ut
- GOO_PATH_QUERY: /sparql/
- GOO_PATH_DATA: /data/
- GOO_PATH_UPDATE: /update/
+ BUNDLE_APP_CONFIG: /srv/ontoportal/ontologies_api/.bundle
+
profiles:
- 4store
depends_on:
- - solr-ut
- - redis-ut
- - mgrep-ut
- - 4store-ut
+ solr-ut:
+ condition: service_healthy
+ redis-ut:
+ condition: service_healthy
+ mgrep-ut:
+ condition: service_started
+ 4store-ut:
+ condition: service_started
+ ncbo_cron:
+ condition: service_started
+ ports:
+ - "9393:9393"
+ volumes:
+ # bundle volume for hosting gems installed by bundle; it speeds up gem install in local development
+ - app_api:/srv/ontoportal/ontologies_api
+ - repository:/srv/ontoportal/data/repository
- api-agraph:
+ ncbo_cron:
<<: *app
+ image: agroportal/ncbo_cron:master
+ env_file:
+ .env
environment:
<<: *env
- GOO_BACKEND_NAME: ag
- GOO_PORT: 10035
- GOO_HOST: agraph-ut
- GOO_PATH_QUERY: /repositories/bioportal_test
- GOO_PATH_DATA: /repositories/bioportal_test/statements
- GOO_PATH_UPDATE: /repositories/bioportal_test/statements
+ BUNDLE_APP_CONFIG: /srv/ontoportal/ncbo_cron/.bundle
+ command: "bundle exec bin/ncbo_cron"
profiles:
- - agraph
+ - 4store
+ volumes:
+ - app_cron:/srv/ontoportal/ncbo_cron
+ - repository:/srv/ontoportal/data/repository
+ - history:/usr/local/hist
+ - reports:/srv/ontoportal/data/reports
+ - mgrep:/srv/ontoportal/data/mgrep
+ - logs:/srv/ontoportal/ncbo_cron/logs
depends_on:
- - solr-ut
- - redis-ut
- - mgrep-ut
- - agraph-ut
+ solr-ut:
+ condition: service_healthy
+ redis-ut:
+ condition: service_healthy
+ mgrep-ut:
+ condition: service_started
+ 4store-ut:
+ condition: service_started
+
+
+ mgrep-ut:
+ image: ontoportal/mgrep-ncbo:0.1
+ ports:
+ - "55556:55555"
redis-ut:
image: redis
+ ports:
+ - "6379:6379"
+ command: [ "redis-server", "--save", "", "--appendonly", "no" ]
+ healthcheck:
+ test: redis-cli ping
+ interval: 10s
+ timeout: 3s
+ retries: 10
4store-ut:
image: bde2020/4store
- #volume: fourstore:/var/lib/4store
+ volumes:
+ - 4store:/var/lib/4store
command: >
- bash -c "4s-backend-setup --segments 4 ontoportal_kb
- && 4s-backend ontoportal_kb
- && 4s-httpd -D -s-1 -p 9000 ontoportal_kb"
+ bash -c "if [ ! -d '/var/lib/4store/ontoportal_kb' ]; then 4s-backend-setup --segments 4 ontoportal_kb; fi ; 4s-backend ontoportal_kb ; 4s-httpd -D -s-1 -p 9000 ontoportal_kb"
+
+ ports:
+ - "9000:9000"
profiles:
+ - fs
- 4store
-
solr-ut:
- image: ontoportal/solr-ut:0.1
-
- mgrep-ut:
- image: ontoportal/mgrep-ncbo:0.1
+ image: solr:8
+ ports:
+ - 8983:8983
+ command: bin/solr start -cloud -f
+ volumes:
+ - solr_data:/var/solr/data
+ healthcheck:
+ test: [ "CMD", "curl", "-f", "http://localhost:8983/solr/admin/info/system?wt=json" ]
+ interval: 30s
+ timeout: 10s
+ retries: 3
agraph-ut:
- image: franzinc/agraph:v7.3.0
+ image: franzinc/agraph:v8.1.0
+ platform: linux/amd64
environment:
- AGRAPH_SUPER_USER=test
- AGRAPH_SUPER_PASSWORD=xyzzy
shm_size: 1g
- # ports:
- # - 10035:10035
+ ports:
+ # - 10035:10035
+ - 10000-10035:10000-10035
+ volumes:
+ - agdata:/agraph/data
+ # - ./agraph/etc:/agraph/etc
command: >
- bash -c "/agraph/bin/agraph-control --config /agraph/etc/agraph.cfg start
- ; agtool repos create bioportal_test
- ; agtool users add anonymous
- ; agtool users grant anonymous root:bioportal_test:rw
- ; tail -f /agraph/data/agraph.log"
+ bash -c "/agraph/bin/agraph-control --config /agraph/etc/agraph.cfg start
+ ; agtool repos create ontoportal_test --supersede
+ ; agtool users add anonymous
+ ; agtool users grant anonymous root:ontoportal_test:rw
+ ; tail -f /agraph/data/agraph.log"
+ # healthcheck:
+ # test: ["CMD-SHELL", "curl -sf http://127.0.0.1:10035/repositories/ontoportal_test/status | grep -iqE '(^running|^lingering)' || exit 1"]
+ # start_period: 10s
+ # interval: 10s
+ # timeout: 5s
+ # retries: 5
+ profiles:
+ - ag
+
+ virtuoso-ut:
+ image: tenforce/virtuoso:virtuoso7.2.5
+ platform: linux/amd64
+ environment:
+ - SPARQL_UPDATE=true
+ ports:
+ - 1111:1111
+ - 8890:8890
+ profiles:
+ - vo
+ healthcheck:
+ test: [ "CMD-SHELL", "curl -sf http://localhost:8890/sparql || exit 1" ]
+ start_period: 10s
+ interval: 60s
+ timeout: 5s
+ retries: 3
+
+ graphdb-ut:
+ image: ontotext/graphdb:10.3.3
+ platform: linux/amd64
+ privileged: true
+ environment:
+ GDB_HEAP_SIZE: 5G
+ GDB_JAVA_OPTS: >-
+ -Xms5g -Xmx5g
+ ports:
+ - 7200:7200
+ - 7300:7300
+ volumes:
+ - ./test/data/graphdb-repo-config.ttl:/opt/graphdb/dist/configs/templates/data/graphdb-repo-config.ttl
+ - ./test/data/graphdb-test-load.nt:/opt/graphdb/dist/configs/templates/data/graphdb-test-load.nt
+
+ entrypoint: >
+ bash -c " importrdf load -f -c /opt/graphdb/dist/configs/templates/data/graphdb-repo-config.ttl -m parallel /opt/graphdb/dist/configs/templates/data/graphdb-test-load.nt ; graphdb -Ddefault.min.distinct.threshold=3000 "
profiles:
- - agraph
+ - gb
volumes:
- bundle:
- #fourstore:
+ app_api:
+ app_cron:
+ agdata:
+ 4store:
+ repository:
+ solr_data:
+ reports:
+ mgrep:
+ logs:
+ history:
diff --git a/helpers/access_control_helper.rb b/helpers/access_control_helper.rb
index 1de3bee5..74416866 100644
--- a/helpers/access_control_helper.rb
+++ b/helpers/access_control_helper.rb
@@ -10,11 +10,7 @@ module AccessControlHelper
def check_access(obj)
return obj unless LinkedData.settings.enable_security
if obj.is_a?(Enumerable)
- if obj.first.is_a?(LinkedData::Models::Base) && obj.first.access_based_on?
- check_access(obj.first)
- else
filter_access(obj)
- end
else
if obj.respond_to?(:read_restricted?) && obj.read_restricted?
readable = obj.readable?(env["REMOTE_USER"])
diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb
index 6c44f25f..24893eef 100644
--- a/helpers/application_helper.rb
+++ b/helpers/application_helper.rb
@@ -31,6 +31,7 @@ def populate_from_params(obj, params)
# Deal with empty strings for String and URI
empty_string = value.is_a?(String) && value.empty?
old_string_value_exists = obj.respond_to?(attribute) && (obj.send(attribute).is_a?(String) || obj.send(attribute).is_a?(RDF::URI))
+ old_string_value_exists = old_string_value_exists || (obj.respond_to?(attribute) && obj.send(attribute).is_a?(LinkedData::Models::Base))
if old_string_value_exists && empty_string
value = nil
elsif empty_string
@@ -51,6 +52,10 @@ def populate_from_params(obj, params)
value = is_arr ? value : [value]
new_value = []
value.each do |cls|
+ if uri_as_needed(cls["ontology"]).nil?
+ new_value << cls
+ next
+ end
sub = LinkedData::Models::Ontology.find(uri_as_needed(cls["ontology"])).first.latest_submission
new_value << LinkedData::Models::Class.find(cls["class"]).in(sub).first
end
@@ -59,7 +64,7 @@ def populate_from_params(obj, params)
# Replace the initial value with the object, handling Arrays as appropriate
if value.is_a?(Array)
value = value.map {|e| attr_cls.find(uri_as_needed(e)).include(attr_cls.attributes).first}
- else
+ elsif !value.nil?
value = attr_cls.find(uri_as_needed(value)).include(attr_cls.attributes).first
end
elsif attr_cls
@@ -83,7 +88,10 @@ def populate_from_params(obj, params)
value = retrieved_values
elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:date_time)
# TODO: Remove this awful hack when obj.class.model_settings[:range][attribute] contains DateTime class
- value = DateTime.parse(value)
+ is_array = value.is_a?(Array)
+ value = Array(value).map{ |v| DateTime.parse(v) }
+ value = value.first unless is_array
+ value
elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:uri) && attribute_settings[:enforce].include?(:list)
# in case its a list of URI, convert all value to IRI
value = value.map { |v| RDF::IRI.new(v) }
@@ -268,7 +276,7 @@ def month_param(params=nil)
if params["month"]
month = params["month"].strip
if %r{(?^(0[1-9]|[1-9]|1[0-2])$)}x === month
- month.to_i
+ month.to_i.to_s
end
end
end
@@ -279,7 +287,7 @@ def year_param(params=nil)
if params["year"]
year = params["year"].strip
if %r{(?^([1-2]\d{3})$)}x === year
- year.to_i
+ year.to_i.to_s
end
end
end
@@ -355,40 +363,16 @@ def replace_url_prefix(id)
end
def retrieve_latest_submissions(options = {})
- status = (options[:status] || "RDF").to_s.upcase
- include_ready = status.eql?("READY") ? true : false
- status = "RDF" if status.eql?("READY")
- any = true if status.eql?("ANY")
- include_views = options[:also_include_views] || false
- includes = OntologySubmission.goo_attrs_to_load(includes_param)
-
- includes << :submissionStatus unless includes.include?(:submissionStatus)
- if any
- submissions_query = OntologySubmission.where
- else
- submissions_query = OntologySubmission.where(submissionStatus: [ code: status])
- end
+ submissions = retrieve_submissions(options)
- submissions_query = submissions_query.filter(Goo::Filter.new(ontology: [:viewOf]).unbound) unless include_views
- submissions_query = submissions_query.filter(filter) if filter?
- # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs
- if includes_param.first == :all
- includes = [:submissionId, {:contact=>[:name, :email], :ontology=>[:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl,
- :group, :hasDomain, :views, :viewOf, :flat], :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]}, :submissionStatus]
- end
- submissions = submissions_query.include(includes).to_a
-
- # Figure out latest parsed submissions using all submissions
- latest_submissions = {}
+ latest_submissions = page? ? submissions : {} # latest_submission doest not work with pagination
submissions.each do |sub|
- # To retrieve all metadata, but slow when a lot of ontologies
- if includes_param.first == :all
- sub.bring_remaining
+ unless page?
+ next if include_ready?(options) && !sub.ready?
+ next if sub.ontology.nil?
+ latest_submissions[sub.ontology.acronym] ||= sub
+ latest_submissions[sub.ontology.acronym] = sub if sub.submissionId.to_i > latest_submissions[sub.ontology.acronym].submissionId.to_i
end
- next if include_ready && !sub.ready?
- next if sub.ontology.nil?
- latest_submissions[sub.ontology.acronym] ||= sub
- latest_submissions[sub.ontology.acronym] = sub if sub.submissionId.to_i > latest_submissions[sub.ontology.acronym].submissionId.to_i
end
latest_submissions
end
diff --git a/helpers/classes_helper.rb b/helpers/classes_helper.rb
index fa6c48cf..60becb22 100644
--- a/helpers/classes_helper.rb
+++ b/helpers/classes_helper.rb
@@ -32,23 +32,19 @@ def get_class(submission, load_attrs=nil)
load_children = load_attrs.delete :children
load_has_children = load_attrs.delete :hasChildren
- if !load_children
+ unless load_children
load_children = load_attrs.select { |x| x.instance_of?(Hash) && x.include?(:children) }
-
- if load_children.length == 0
- load_children = nil
- end
- if !load_children.nil?
- load_attrs = load_attrs.select { |x| !(x.instance_of?(Hash) && x.include?(:children)) }
- end
+ load_children = nil if load_children.length == 0
+ load_attrs = load_attrs.select { |x| !(x.instance_of?(Hash) && x.include?(:children)) } unless load_children.nil?
end
+
cls_uri = notation_to_class_uri(submission)
if cls_uri.nil?
cls_uri = RDF::URI.new(params[:cls])
- if !cls_uri.valid?
+ unless cls_uri.valid?
error 400, "The input class id '#{params[:cls]}' is not a valid IRI"
end
end
@@ -62,23 +58,38 @@ def get_class(submission, load_attrs=nil)
error 404,
"Resource '#{params[:cls]}' not found in ontology #{submission.ontology.acronym} submission #{submission.submissionId}"
end
- unless load_has_children.nil?
- cls.load_has_children
- end
- if !load_children.nil?
+
+ extra_include = []
+
+ extra_include << :hasChildren if load_has_children
+ extra_include << :isInActiveScheme if load_attrs.include?(:inScheme)
+ extra_include << :isInActiveCollection if load_attrs.include?(:memberOf)
+
+ cls.load_computed_attributes(to_load: extra_include ,
+ options: {schemes: concept_schemes, collections: concept_collections})
+
+
+ unless load_children.nil?
LinkedData::Models::Class.partially_load_children(
- [cls],500,cls.submission)
+ [cls], 500, cls.submission)
unless load_has_children.nil?
cls.children.each do |c|
c.load_has_children
end
end
end
- return cls
+ cls
end
end
+ def concept_schemes
+ params["concept_schemes"]&.split(',') || []
+ end
+
+ def concept_collections
+ params["concept_collections"]&.split(',') || []
+ end
end
end
diff --git a/helpers/metadata_helper.rb b/helpers/metadata_helper.rb
index db61c414..2c5d7182 100644
--- a/helpers/metadata_helper.rb
+++ b/helpers/metadata_helper.rb
@@ -64,15 +64,23 @@ def klass_metadata(klass, type)
# Get display from the metadata
if klass.attribute_settings(attr)[:display].nil?
- attr_settings[:display] = "no"
+ attr_settings[:category] = "no"
else
- attr_settings[:display] = klass.attribute_settings(attr)[:display]
+ attr_settings[:category] = klass.attribute_settings(attr)[:display]
end
- if !klass.attribute_settings(attr)[:helpText].nil?
+ unless klass.attribute_settings(attr)[:helpText].nil?
attr_settings[:helpText] = klass.attribute_settings(attr)[:helpText]
end
+ unless klass.attribute_settings(attr)[:description].nil?
+ attr_settings[:description] = klass.attribute_settings(attr)[:description]
+ end
+
+ unless klass.attribute_settings(attr)[:example].nil?
+ attr_settings[:example] = klass.attribute_settings(attr)[:example]
+ end
+
attr_settings[:@context] = {
"@vocab" => "#{id_url_prefix}metadata/"
}
diff --git a/helpers/properties_search_helper.rb b/helpers/properties_search_helper.rb
index c3567edd..c4295749 100644
--- a/helpers/properties_search_helper.rb
+++ b/helpers/properties_search_helper.rb
@@ -33,7 +33,7 @@ def get_properties_search_query(text, params)
params["qf"] = "resource_id^20 labelExact^10 labelGeneratedExact^8"
params["hl.fl"] = "resource_id labelExact labelGeneratedExact"
else
- params["qf"] = "labelExact^100 labelGeneratedExact^80 labelSuggestEdge^50 labelSuggestNgram label labelGenerated resource_id"
+ params["qf"] = "labelExact^100 labelGeneratedExact^80 labelSuggestEdge^50 labelGeneratedSuggestEdge^40 labelGenerated resource_id"
query = solr_escape(text)
# double quote the query if it is a URL (ID searches)
query = "\"#{query}\"" if text =~ /\A#{URI::regexp(['http', 'https'])}\z/
diff --git a/helpers/request_params_helper.rb b/helpers/request_params_helper.rb
index e7ec091a..59adeba7 100644
--- a/helpers/request_params_helper.rb
+++ b/helpers/request_params_helper.rb
@@ -13,6 +13,10 @@ def settings_params(klass)
[attributes, page, size, order_by, bring_unmapped]
end
+ def page?
+ !params[:page].nil?
+ end
+
def is_set?(param)
!param.nil? && param != ""
end
@@ -25,6 +29,38 @@ def filter
build_filter
end
+ def apply_filters(object, query)
+ attributes_to_filter = object.attributes(:all).select{|x| params.keys.include?(x.to_s)}
+ filters = attributes_to_filter.map {|key| [key, params[key]&.split(',')]}.to_h
+ add_direct_filters(filters, query)
+ end
+
+ def apply_submission_filters(query)
+
+ filters = {
+ naturalLanguage: params[:naturalLanguage]&.split(',') , #%w[http://lexvo.org/id/iso639-3/fra http://lexvo.org/id/iso639-3/eng],
+ hasOntologyLanguage_acronym: params[:hasOntologyLanguage]&.split(',') , #%w[OWL SKOS],
+ ontology_hasDomain_acronym: params[:hasDomain]&.split(',') , #%w[Crop Vue_francais],
+ ontology_group_acronym: params[:group]&.split(','), #%w[RICE CROP],
+ isOfType: params[:isOfType]&.split(','), #["http://omv.ontoware.org/2005/05/ontology#Vocabulary"],
+ hasFormalityLevel: params[:hasFormalityLevel]&.split(','), #["http://w3id.org/nkos/nkostype#thesaurus"],
+ ontology_viewingRestriction: params[:viewingRestriction]&.split(','), #["private"]
+ status: params[:status]&.split(','), #"retired",
+ }
+ inverse_filters = {
+ submissionStatus: params[:submissionStatus] #"RDF",
+ }
+
+ query = add_direct_filters(filters, query)
+
+ query = add_inverse_filters(inverse_filters, query)
+
+ query = add_acronym_name_filters(query)
+
+ add_order_by_patterns(query)
+ end
+
+
def get_order_by_from(params, default_order = :asc)
if is_set?(params['sortby'])
orders = (params["order"] || default_order.to_s).split(',')
@@ -50,6 +86,74 @@ def bring_unmapped_to(page_data, sub, klass)
end
private
+ def extract_attr(key)
+ attr, sub_attr, sub_sub_attr = key.to_s.split('_')
+
+ return attr.to_sym unless sub_attr
+
+ return {attr.to_sym => [sub_attr.to_sym]} unless sub_sub_attr
+
+ {attr.to_sym => [sub_attr.to_sym => sub_sub_attr.to_sym]}
+ end
+
+ def add_direct_filters(filters, query)
+ filters.each do |key, values|
+ attr = extract_attr(key)
+ next if Array(values).empty?
+
+ filter = Goo::Filter.new(attr).regex(values.first)
+ values.drop(1).each do |v|
+ filter = filter.or(Goo::Filter.new(attr).regex(v))
+ end
+ query = query.filter(filter)
+ end
+ query
+ end
+
+ def add_inverse_filters(inverse_filters, query)
+ inverse_filters.each do |key, value|
+ attr = extract_attr(key)
+ next unless value
+
+ filter = Goo::Filter.new(attr).regex("^(?:(?!#{value}).)*$")
+ query = query.filter(filter)
+ end
+ query
+ end
+
+ def add_acronym_name_filters(query)
+ filters = {
+ acronym: :ontology_acronym,
+ name: :ontology_name,
+ description: :description
+ }.map do |key, attr|
+ (params[key].nil? || params[key].empty?) ? nil : [extract_attr(attr), params[key]]
+ end.compact
+
+ return query if filters.empty?
+
+ key, val = filters.first
+ filter = Goo::Filter.new(key).regex(val)
+
+ filters.drop(1).each do |k, v|
+ filter = filter.or(Goo::Filter.new(k).regex(v))
+ end
+
+ query.filter(filter)
+ end
+
+ def add_order_by_patterns(query)
+ if params[:order_by]
+ attr, sub_attr = params[:order_by].to_s.split('_')
+ if sub_attr
+ order_pattern = { attr.to_sym => { sub_attr.to_sym => (sub_attr.eql?("name") ? :asc : :desc) } }
+ else
+ order_pattern = { attr.to_sym => :desc }
+ end
+ query = query.order_by(order_pattern)
+ end
+ query
+ end
def sort_order_item(param, order)
[param.to_sym, order.to_sym]
diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb
index 10de14c0..3805e650 100644
--- a/helpers/search_helper.rb
+++ b/helpers/search_helper.rb
@@ -30,51 +30,51 @@ module SearchHelper
MATCH_TYPE_LABELGENERATED = "labelGenerated"
MATCH_TYPE_MAP = {
- "resource_id" => "id",
- MATCH_TYPE_PREFLABEL => MATCH_TYPE_PREFLABEL,
- "prefLabelExact" => MATCH_TYPE_PREFLABEL,
- "prefLabelSuggestEdge" => MATCH_TYPE_PREFLABEL,
- "prefLabelSuggestNgram" => MATCH_TYPE_PREFLABEL,
- MATCH_TYPE_SYNONYM => MATCH_TYPE_SYNONYM,
- "synonymExact" => MATCH_TYPE_SYNONYM,
- "synonymSuggestEdge" => MATCH_TYPE_SYNONYM,
- "synonymSuggestNgram" => MATCH_TYPE_SYNONYM,
- MATCH_TYPE_PROPERTY => MATCH_TYPE_PROPERTY,
- MATCH_TYPE_LABEL => MATCH_TYPE_LABEL,
- "labelExact" => MATCH_TYPE_LABEL,
- "labelSuggestEdge" => MATCH_TYPE_LABEL,
- "labelSuggestNgram" => MATCH_TYPE_LABEL,
- MATCH_TYPE_LABELGENERATED => MATCH_TYPE_LABELGENERATED,
- "labelGeneratedExact" => MATCH_TYPE_LABELGENERATED,
- "labellabelGeneratedSuggestEdge" => MATCH_TYPE_LABELGENERATED,
- "labellabelGeneratedSuggestNgram" => MATCH_TYPE_LABELGENERATED,
- "notation" => "notation",
- "cui" => "cui",
- "semanticType" => "semanticType"
+ "resource_id" => "id",
+ MATCH_TYPE_PREFLABEL => MATCH_TYPE_PREFLABEL,
+ "prefLabelExact" => MATCH_TYPE_PREFLABEL,
+ "prefLabelSuggestEdge" => MATCH_TYPE_PREFLABEL,
+ "prefLabelSuggestNgram" => MATCH_TYPE_PREFLABEL,
+ MATCH_TYPE_SYNONYM => MATCH_TYPE_SYNONYM,
+ "synonymExact" => MATCH_TYPE_SYNONYM,
+ "synonymSuggestEdge" => MATCH_TYPE_SYNONYM,
+ "synonymSuggestNgram" => MATCH_TYPE_SYNONYM,
+ MATCH_TYPE_PROPERTY => MATCH_TYPE_PROPERTY,
+ MATCH_TYPE_LABEL => MATCH_TYPE_LABEL,
+ "labelExact" => MATCH_TYPE_LABEL,
+ "labelSuggestEdge" => MATCH_TYPE_LABEL,
+ "labelSuggestNgram" => MATCH_TYPE_LABEL,
+ MATCH_TYPE_LABELGENERATED => MATCH_TYPE_LABELGENERATED,
+ "labelGeneratedExact" => MATCH_TYPE_LABELGENERATED,
+ "labellabelGeneratedSuggestEdge" => MATCH_TYPE_LABELGENERATED,
+ "labellabelGeneratedSuggestNgram" => MATCH_TYPE_LABELGENERATED,
+ "notation" => "notation",
+ "cui" => "cui",
+ "semanticType" => "semanticType"
}
# list of fields that allow empty query text
QUERYLESS_FIELDS_PARAMS = {
- "ontologies" => nil,
- "notation" => "notation",
- "cui" => "cui",
- "semantic_types" => "semanticType",
- ONTOLOGY_TYPES_PARAM => "ontologyType",
- ALSO_SEARCH_PROVISIONAL_PARAM => nil,
- SUBTREE_ID_PARAM => nil
+ "ontologies" => nil,
+ "notation" => "notation",
+ "cui" => "cui",
+ "semantic_types" => "semanticType",
+ ONTOLOGY_TYPES_PARAM => "ontologyType",
+ ALSO_SEARCH_PROVISIONAL_PARAM => nil,
+ SUBTREE_ID_PARAM => nil
}
QUERYLESS_FIELDS_STR = QUERYLESS_FIELDS_PARAMS.values.compact.join(" ")
- def get_term_search_query(text, params={})
+ def get_term_search_query(text, params = {})
validate_params_solr_population(ALLOWED_INCLUDES_PARAMS)
sort = params.delete('sort')
# raise error if text is empty AND (none of the QUERYLESS_FIELDS_PARAMS has been passed
# OR either an exact match OR suggest search is being executed)
if text.nil? || text.strip.empty?
- if !QUERYLESS_FIELDS_PARAMS.keys.any? {|k| params.key?(k)} ||
- params[EXACT_MATCH_PARAM] == "true" ||
- params[SUGGEST_PARAM] == "true"
+ if !QUERYLESS_FIELDS_PARAMS.keys.any? { |k| params.key?(k) } ||
+ params[EXACT_MATCH_PARAM] == "true" ||
+ params[SUGGEST_PARAM] == "true"
raise error 400, "The search query must be provided via /search?q=[&page=&pagesize=]"
else
text = ''
@@ -82,7 +82,6 @@ def get_term_search_query(text, params={})
end
end
- query = ""
params["defType"] = "edismax"
params["stopwords"] = "true"
params["lowercaseOperators"] = "true"
@@ -94,19 +93,33 @@ def get_term_search_query(text, params={})
params["hl.simple.pre"] = MATCH_HTML_PRE
params["hl.simple.post"] = MATCH_HTML_POST
- # text.gsub!(/\*+$/, '')
-
if params[EXACT_MATCH_PARAM] == "true"
query = "\"#{solr_escape(text)}\""
- params["qf"] = "resource_id^20 prefLabelExact^10 synonymExact #{QUERYLESS_FIELDS_STR}"
- params["hl.fl"] = "resource_id prefLabelExact synonymExact #{QUERYLESS_FIELDS_STR}"
+ params["qf"] = "resource_id^20 #{add_lang_suffix('prefLabel', '^10')} #{add_lang_suffix('synonymExact')} #{QUERYLESS_FIELDS_STR}"
+ params["hl.fl"] = "resource_id #{add_lang_suffix('prefLabelExact')} #{add_lang_suffix('synonymExact')} #{QUERYLESS_FIELDS_STR}"
elsif params[SUGGEST_PARAM] == "true" || text[-1] == '*'
text.gsub!(/\*+$/, '')
query = "\"#{solr_escape(text)}\""
params["qt"] = "/suggest_ncbo"
- params["qf"] = "prefLabelExact^100 prefLabelSuggestEdge^50 synonymSuggestEdge^10 prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}"
- params["pf"] = "prefLabelSuggest^50"
- params["hl.fl"] = "prefLabelExact prefLabelSuggestEdge synonymSuggestEdge prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}"
+ params["qf"] = [
+ add_lang_suffix('prefLabelExact', '^100'),
+ add_lang_suffix('prefLabelSuggestEdge', '^50'),
+ add_lang_suffix('synonymSuggestEdge', '^10'),
+ add_lang_suffix('prefLabelSuggestNgram'),
+ add_lang_suffix('synonymSuggestNgram'),
+ "resource_id #{QUERYLESS_FIELDS_STR}"
+ ].join(' ')
+
+ params["pf"] = add_lang_suffix('prefLabelSuggest', '^50')
+
+ params["hl.fl"] = [
+ add_lang_suffix('prefLabelExact'),
+ add_lang_suffix('prefLabelSuggestEdge'),
+ add_lang_suffix('synonymSuggestEdge'),
+ add_lang_suffix('prefLabelSuggestNgram'),
+ add_lang_suffix('synonymSuggestNgram'),
+ "resource_id #{QUERYLESS_FIELDS_STR}"
+ ].join(' ')
else
if text.strip.empty?
query = '*'
@@ -114,9 +127,19 @@ def get_term_search_query(text, params={})
query = solr_escape(text)
end
- params["qf"] = "resource_id^100 prefLabelExact^90 prefLabel^70 synonymExact^50 synonym^10 #{QUERYLESS_FIELDS_STR}"
+ params["qf"] = [
+ "resource_id^100",
+ add_lang_suffix('prefLabelExact', '^90'),
+ add_lang_suffix('prefLabel', '^70'),
+ add_lang_suffix('synonymExact', '^50'),
+ add_lang_suffix('synonym', '^10'),
+ QUERYLESS_FIELDS_STR
+ ].join(' ')
+
params["qf"] << " property" if params[INCLUDE_PROPERTIES_PARAM] == "true"
- params["hl.fl"] = "resource_id prefLabelExact prefLabel synonymExact synonym #{QUERYLESS_FIELDS_STR}"
+
+ params["hl.fl"] = "resource_id #{add_lang_suffix('prefLabelExact')} #{ add_lang_suffix('prefLabel')} #{add_lang_suffix('synonymExact')} #{add_lang_suffix('synonym')} #{QUERYLESS_FIELDS_STR}"
+
params["hl.fl"] = "#{params["hl.fl"]} property" if params[INCLUDE_PROPERTIES_PARAM] == "true"
end
@@ -218,6 +241,73 @@ def add_matched_fields(solr_response, default_match)
solr_response["match_types"] = all_matches
end
+ def portal_language
+ Goo.main_languages.first
+ end
+
+ def request_languages
+ lang = params['lang'] || params['languages']
+
+ return [portal_language] if lang.blank?
+
+ lang.split(',')
+ end
+
+ def request_multiple_languages?
+ request_languages.size > 1 || request_all_languages?
+ end
+
+ def request_languages?
+ !(params['lang'] || params['language']).blank?
+ end
+
+ def request_all_languages?
+ request_languages.first.eql?('all')
+ end
+
+ def add_lang_suffix(attr, rank = "")
+ if request_languages? && !request_all_languages?
+ languages = request_languages
+ languages.map { |lang| "#{attr}_#{lang}#{rank} " }.join
+ else
+ "#{attr}#{rank}"
+ end
+ end
+
+ def pref_label_by_language(doc)
+ Array(doc["prefLabel_#{request_languages.first}".to_sym]).first || Array(doc["prefLabel_none".to_sym]).first || Array(doc[:prefLabel]).first
+ end
+
+ def filter_attrs_by_language(doc)
+ lang_values = {}
+ doc.each do |k, v|
+ attr, lang = k.to_s.split('_')
+
+ next if [:ontology_rank, :resource_id, :resource_model].include?(k)
+ next if lang.blank? || attr.blank?
+ next if !(request_languages + %w[none]).include?(lang) && !request_all_languages?
+
+ lang_values[attr.to_sym] ||= {}
+ lang_values[attr.to_sym][lang] ||= []
+ lang_values[attr.to_sym][lang] += v
+ end
+
+ if request_multiple_languages?
+ lang_values.each do |k, lang_vals|
+ doc[k] = lang_vals
+ end
+ else
+ lang_values.each do |k, lang_vals|
+ doc[k] = lang_vals.map { |l, v| l.eql?('none') ? nil : v }.compact.flatten + Array(lang_vals['none'])
+ end
+
+ doc[:prefLabel] = pref_label_by_language(doc)
+ end
+
+ doc
+ end
+
+
# see https://github.com/rsolr/rsolr/issues/101
# and https://github.com/projecthydra/active_fedora/commit/75b4afb248ee61d9edb56911b2ef51f30f1ce17f
#
@@ -330,7 +420,7 @@ def populate_classes_from_search(classes, ontology_acronyms=nil)
params["fq"] << " AND #{get_quoted_field_query_param(class_ids, "OR", "resource_id")}"
params["rows"] = 99999
# Replace fake query with wildcard
- resp = LinkedData::Models::Class.search("*:*", params)
+ resp = LinkedData::Models::Class.submit_search_query("*:*", params)
classes_hash = {}
resp["response"]["docs"].each do |doc|
@@ -345,6 +435,7 @@ def populate_classes_from_search(classes, ontology_acronyms=nil)
doc[:submission] = old_class.submission
doc[:properties] = MultiJson.load(doc.delete(:propertyRaw)) if include_param_contains?(:properties)
instance = LinkedData::Models::Class.read_only(doc)
+ instance.prefLabel = pref_label_by_language(doc)
classes_hash[ont_uri_class_uri] = instance
end
diff --git a/helpers/submission_helper.rb b/helpers/submission_helper.rb
new file mode 100644
index 00000000..b79737d0
--- /dev/null
+++ b/helpers/submission_helper.rb
@@ -0,0 +1,75 @@
+require 'sinatra/base'
+
+module Sinatra
+ module Helpers
+ module SubmissionHelper
+ def submission_include_params
+ # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs
+ includes = OntologySubmission.goo_attrs_to_load(includes_param)
+ if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:ontology)}
+ includes << {:ontology=>[:administeredBy, :acronym, :name, :viewingRestriction, :group, :hasDomain,:notes, :reviews, :projects,:acl, :viewOf]}
+ end
+
+ if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:contact)}
+ includes << {:contact=>[:name, :email]}
+ end
+
+ if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:metrics)}
+ includes << { metrics: [:maxChildCount, :properties, :classesWithMoreThan25Children,
+ :classesWithOneChild, :individuals, :maxDepth, :classes,
+ :classesWithNoDefinition, :averageChildCount, :numberOfAxioms,
+ :entities]}
+ end
+
+ includes
+ end
+
+ def submission_attributes_all
+ out = [LinkedData::Models::OntologySubmission.embed_values_hash]
+ out << {:contact=>[:name, :email]}
+ out << {:ontology=>[:acronym, :name, :administeredBy, :group, :viewingRestriction, :doNotUpdate, :flat,
+ :hasDomain, :summaryOnly, :acl, :viewOf, :ontologyType]}
+
+ out
+ end
+
+ def retrieve_submissions(options)
+ status = (options[:status] || "RDF").to_s.upcase
+ status = "RDF" if status.eql?("READY")
+ ontology_acronym = options[:ontology]
+ any = status.eql?("ANY")
+ include_views = options[:also_include_views] || false
+ includes, page, size, order_by, _ = settings_params(LinkedData::Models::OntologySubmission)
+ includes << :submissionStatus unless includes.include?(:submissionStatus)
+
+ submissions_query = LinkedData::Models::OntologySubmission
+ submissions_query = submissions_query.where(ontology: [acronym: ontology_acronym]) if ontology_acronym
+
+ if any
+ submissions_query = submissions_query.where unless ontology_acronym
+ else
+ submissions_query = submissions_query.where({ submissionStatus: [code: status] })
+ end
+
+ submissions_query = apply_submission_filters(submissions_query)
+ submissions_query = submissions_query.filter(Goo::Filter.new(ontology: [:viewOf]).unbound) unless include_views
+ submissions_query = submissions_query.filter(filter) if filter?
+
+
+ submissions = submissions_query.include(submission_include_params)
+ if page?
+ submissions.page(page, size).all
+ else
+ submissions.to_a
+ end
+ end
+
+ def include_ready?(options)
+ options[:status] && options[:status].to_s.upcase.eql?("READY")
+ end
+
+ end
+ end
+end
+
+helpers Sinatra::Helpers::SubmissionHelper
\ No newline at end of file
diff --git a/helpers/users_helper.rb b/helpers/users_helper.rb
index 5d4266c1..a9a14d30 100644
--- a/helpers/users_helper.rb
+++ b/helpers/users_helper.rb
@@ -17,6 +17,53 @@ def filter_for_user_onts(obj)
obj
end
+
+ def send_reset_token(email, username)
+ user = LinkedData::Models::User.where(email: email, username: username).include(LinkedData::Models::User.attributes).first
+ error 404, "User not found" unless user
+ reset_token = token(36)
+ user.resetToken = reset_token
+
+ user.save(override_security: true)
+ LinkedData::Utils::Notifications.reset_password(user, reset_token)
+ user
+ end
+
+ def token(len)
+ chars = ("a".."z").to_a + ("A".."Z").to_a + ("1".."9").to_a
+ token = ""
+ 1.upto(len) { |i| token << chars[rand(chars.size-1)] }
+ token
+ end
+
+ def reset_password(email, username, token)
+ user = LinkedData::Models::User.where(email: email, username: username).include(User.goo_attrs_to_load(includes_param)).first
+
+ error 404, "User not found" unless user
+
+ user.show_apikey = true
+
+ [user, token.eql?(user.resetToken)]
+ end
+
+ def oauth_authenticate(params)
+ access_token = params["access_token"]
+ provider = params["token_provider"]
+ user = LinkedData::Models::User.oauth_authenticate(access_token, provider)
+ error 401, "Access token invalid"if user.nil?
+ user
+ end
+
+ def login_password_authenticate(params)
+ user_id = params["user"]
+ user_password = params["password"]
+ user = User.find(user_id).include(User.goo_attrs_to_load(includes_param) + [:passwordHash]).first
+ authenticated = false
+ authenticated = user.authenticate(user_password) unless user.nil?
+ error 401, "Username/password combination invalid" unless authenticated
+
+ user
+ end
end
end
end
diff --git a/lib/rack/content_negotiation.rb b/lib/rack/content_negotiation.rb
new file mode 100644
index 00000000..4c91da6a
--- /dev/null
+++ b/lib/rack/content_negotiation.rb
@@ -0,0 +1,131 @@
+module Rack
+ class ContentNegotiation
+ DEFAULT_CONTENT_TYPE = "application/n-triples" # N-Triples
+ VARY = { 'Vary' => 'Accept' }.freeze
+ ENDPOINTS_FILTER = %r{^/ontologies/[^/]+/resolve/[^/]+$} # Accepted API endpoints to apply content negotiation
+
+ # @return [#call]
+ attr_reader :app
+
+ # @return [Hash{Symbol => String}]
+ attr_reader :options
+
+ ##
+ # @param [#call] app
+ # @param [Hash{Symbol => Object}] options
+ # Other options passed to writer.
+ # @option options [String] :default (DEFAULT_CONTENT_TYPE) Specific content type
+ # @option options [RDF::Format, #to_sym] :format Specific RDF writer format to use
+ def initialize(app, options = {})
+ @app, @options = app, options
+ @options[:default] = (@options[:default] || DEFAULT_CONTENT_TYPE).to_s
+ end
+
+ ##
+ # Handles a Rack protocol request.
+ # Parses Accept header to find appropriate mime-type and sets content_type accordingly.
+ #
+ # Inserts ordered content types into the environment as `ORDERED_CONTENT_TYPES` if an Accept header is present
+ #
+ # @param [Hash{String => String}] env
+ # @return [Array(Integer, Hash, #each)] Status, Headers and Body
+ # @see https://rubydoc.info/github/rack/rack/file/SPEC
+ def call(env)
+ if env['PATH_INFO'].match?(ENDPOINTS_FILTER)
+ if env.has_key?('HTTP_ACCEPT')
+ accepted_types = parse_accept_header(env['HTTP_ACCEPT'])
+ if !accepted_types.empty?
+ env["format"] = accepted_types.first
+ add_content_type_header(app.call(env), env["format"])
+ else
+ not_acceptable
+ end
+ else
+ env["format"] = options[:default]
+ add_content_type_header(app.call(env), env["format"])
+ end
+ else
+ app.call(env)
+ end
+ end
+
+ protected
+
+ # Parses an HTTP `Accept` header, returning an array of MIME content types ordered by precedence rules.
+ #
+ # @param [String, #to_s] header
+ # @return [Array] Array of content types sorted by precedence
+ # @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+ def parse_accept_header(header)
+ entries = header.to_s.split(',')
+ parsed_entries = entries.map { |entry| parse_accept_entry(entry) }
+ sorted_entries = parsed_entries.sort_by { |entry| entry.quality }.reverse
+ content_types = sorted_entries.map { |entry| entry.content_type }
+ content_types.flatten.compact
+ end
+
+
+
+ # Parses an individual entry from the Accept header.
+ #
+ # @param [String] entry An entry from the Accept header
+ # @return [Entry] An object representing the parsed entry
+ def parse_accept_entry(entry)
+ # Represents an entry parsed from the Accept header
+ entry_struct = Struct.new(:content_type, :quality, :wildcard_count, :param_count)
+ content_type, *params = entry.split(';').map(&:strip)
+ quality = 1.0 # Default quality
+ params.reject! do |param|
+ if param.start_with?('q=')
+ quality = param[2..-1].to_f
+ true
+ end
+ end
+ wildcard_count = content_type.count('*')
+ entry_struct.new(content_type, quality, wildcard_count, params.size)
+ end
+
+
+ ##
+ # Returns a content type appropriate for the given `media_range`,
+ # returns `nil` if `media_range` contains a wildcard subtype
+ # that is not mapped.
+ #
+ # @param [String, #to_s] media_range
+ # @return [String, nil]
+ def find_content_type_for_media_range(media_range)
+ case media_range.to_s
+ when '*/*', 'text/*'
+ options[:default]
+ when 'application/n-triples'
+ 'application/n-triples'
+ when 'text/turtle'
+ 'text/turtle'
+ when 'application/json', 'application/ld+json', 'application/*'
+ 'application/ld+json'
+ when 'text/xml', 'text/rdf+xml', 'application/rdf+xml', 'application/xml'
+ 'application/rdf+xml'
+ else
+ nil
+ end
+ end
+
+ ##
+ # Outputs an HTTP `406 Not Acceptable` response.
+ #
+ # @param [String, #to_s] message
+ # @return [Array(Integer, Hash, #each)]
+ def not_acceptable(message = nil)
+ code = 406
+ http_status = [code, Rack::Utils::HTTP_STATUS_CODES[code]].join(' ')
+ message = http_status + (message.nil? ? "\n" : " (#{message})\n")
+ [code, { 'Content-Type' => "text/plain" }.merge(VARY), [message]]
+ end
+
+ def add_content_type_header(response, type)
+ response[1] = response[1].merge(VARY).merge('Content-Type' => type)
+ response
+ end
+
+ end
+end
diff --git a/lib/rack/request_lang.rb b/lib/rack/request_lang.rb
new file mode 100644
index 00000000..b2221041
--- /dev/null
+++ b/lib/rack/request_lang.rb
@@ -0,0 +1,16 @@
+module Rack
+ class RequestLang
+
+ def initialize(app = nil, options = {})
+ @app = app
+ end
+
+ def call(env)
+ r = Rack::Request.new(env)
+ lang = r.params["lang"] || r.params["language"]
+ lang = lang.upcase.to_sym if lang
+ RequestStore.store[:requested_lang] = lang
+ @app.call(env)
+ end
+ end
+end
\ No newline at end of file
diff --git a/models/simple_wrappers.rb b/models/simple_wrappers.rb
index e4097aff..f6aeb027 100644
--- a/models/simple_wrappers.rb
+++ b/models/simple_wrappers.rb
@@ -29,3 +29,5 @@
ProvisionalRelation = LinkedData::Models::ProvisionalRelation
SearchHelper = Sinatra::Helpers::SearchHelper
+
+Resource = LinkedData::Models::Resource
\ No newline at end of file
diff --git a/rakelib/docker_based_test.rake b/rakelib/docker_based_test.rake
new file mode 100644
index 00000000..52af504c
--- /dev/null
+++ b/rakelib/docker_based_test.rake
@@ -0,0 +1,120 @@
+# Rake tasks for running unit tests with backend services running as docker containers
+
+desc 'Run unit tests with docker based backend'
+namespace :test do
+ namespace :docker do
+ task :up do
+ system("docker compose up -d") || abort("Unable to start docker containers")
+ unless system("curl -sf http://localhost:8983/solr || exit 1")
+ printf("waiting for Solr container to initialize")
+ sec = 0
+ until system("curl -sf http://localhost:8983/solr || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ if sec > 30
+ abort(" Solr container hasn't initialized properly")
+ end
+ end
+ printf("\n")
+ end
+ end
+ task :down do
+ #system("docker compose --profile fs --profile ag stop")
+ #system("docker compose --profile fs --profile ag kill")
+ end
+ desc "run tests with docker AG backend"
+ task :ag do
+ ENV["GOO_BACKEND_NAME"]="allegrograph"
+ ENV["GOO_PORT"]="10035"
+ ENV["GOO_PATH_QUERY"]="/repositories/ontoportal_test"
+ ENV["GOO_PATH_DATA"]="/repositories/ontoportal_test/statements"
+ ENV["GOO_PATH_UPDATE"]="/repositories/ontoportal_test/statements"
+ ENV["COMPOSE_PROFILES"]="ag"
+ Rake::Task["test:docker:up"].invoke
+ # AG takes some time to start and create databases/accounts
+ # TODO: replace system curl command with native ruby code
+ unless system("curl -sf http://127.0.0.1:10035/repositories/ontoportal_test/status | grep -iqE '(^running|^lingering)' || exit 1")
+ printf("waiting for AllegroGraph container to initialize")
+ sec = 0
+ until system("curl -sf http://127.0.0.1:10035/repositories/ontoportal_test/status | grep -iqE '(^running|^lingering)' || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ end
+ end
+ puts
+ system("docker compose ps") # TODO: remove after GH actions troubleshooting is complete
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+ desc "run tests with docker 4store backend"
+ task :fs do
+ ENV["GOO_PORT"]="9000"
+ ENV["COMPOSE_PROFILES"]='fs'
+ Rake::Task["test:docker:up"].invoke
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+ desc "run tests with docker Virtuoso backend"
+ task :vo do
+ ENV["GOO_BACKEND_NAME"]="virtuoso"
+ ENV["GOO_PORT"]="8890"
+ ENV["GOO_PATH_QUERY"]="/sparql"
+ ENV["GOO_PATH_DATA"]="/sparql"
+ ENV["GOO_PATH_UPDATE"]="/sparql"
+ ENV["COMPOSE_PROFILES"]="vo"
+ Rake::Task["test:docker:up"].invoke
+ #
+ unless system("curl -sf http://localhost:8890/sparql || exit 1")
+ printf("waiting for Virtuoso container to initialize")
+ sec = 0
+ until system("curl -sf http://localhost:8890/sparql || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ if sec > 30
+ system("docker compose logs virtuoso-ut")
+ abort(" Virtuoso container hasn't initialized properly")
+ end
+ end
+ end
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+
+ desc "run tests with docker GraphDb backend"
+ task :gb do
+ ENV["GOO_BACKEND_NAME"]="graphdb"
+ ENV["GOO_PORT"]="7200"
+ ENV["GOO_PATH_QUERY"]="/repositories/ontoportal"
+ ENV["GOO_PATH_DATA"]="/repositories/ontoportal/statements"
+ ENV["GOO_PATH_UPDATE"]="/repositories/ontoportal/statements"
+ ENV["COMPOSE_PROFILES"]="gb"
+ Rake::Task["test:docker:up"].invoke
+
+ #system("docker compose cp ./test/data/graphdb-repo-config.ttl graphdb:/opt/graphdb/dist/configs/templates/graphdb-repo-config.ttl")
+ #system("docker compose cp ./test/data/graphdb-test-load.nt graphdb:/opt/graphdb/dist/configs/templates/graphdb-test-load.nt")
+ #system('docker compose exec graphdb sh -c "importrdf load -f -c /opt/graphdb/dist/configs/templates/graphdb-repo-config.ttl -m parallel /opt/graphdb/dist/configs/templates/graphdb-test-load.nt ;"')
+ unless system("curl -sf http://localhost:7200/repositories || exit 1")
+ printf("waiting for Graphdb container to initialize")
+ sec = 0
+ until system("curl -sf http://localhost:7200/repositories || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ if sec > 30
+ system("docker compose logs graphdb")
+ abort(" Graphdb container hasn't initialized properly")
+ end
+ end
+ end
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+ end
+end
diff --git a/test/controllers/test_agents_controller.rb b/test/controllers/test_agents_controller.rb
new file mode 100644
index 00000000..658ef38b
--- /dev/null
+++ b/test/controllers/test_agents_controller.rb
@@ -0,0 +1,210 @@
+require_relative '../test_case'
+require "multi_json"
+
+class TestAgentsController < TestCase
+
+ def setup
+
+ @number_of_organizations = 6
+
+
+ @test_agents = 8.times.map do |i|
+ type = i < @number_of_organizations ? 'organization' : 'person'
+ _agent_data(type: type)
+ end
+ @agents = []
+ 2.times.map do
+ agents_tmp = [ _agent_data(type: 'organization'), _agent_data(type: 'organization'), _agent_data(type: 'person')]
+ agent = agents_tmp.last
+ agent[:affiliations] = [agents_tmp[0].stringify_keys, agents_tmp[1].stringify_keys]
+ _test_agent_creation(agent)
+ @agents = @agents + agents_tmp
+ end
+ end
+
+ def teardown
+ # Delete groups
+ _delete_agents
+ end
+
+ def test_all_agents
+ get '/agents?display=all&page=1'
+ assert last_response.ok?
+
+ created_agents = MultiJson.load(last_response.body)
+ @agents.each do |agent|
+ created_agent = created_agents["collection"].select{|x| x["name"].eql?(agent[:name])}.first
+ refute_nil created_agent
+ refute_nil created_agent["usages"]
+ assert_equal agent[:name], created_agent["name"]
+ assert_equal agent[:identifiers].size, created_agent["identifiers"].size
+ assert_equal agent[:identifiers].map{|x| x[:notation]}.sort, created_agent["identifiers"].map{|x| x['notation']}.sort
+ assert_equal agent[:affiliations].size, created_agent["affiliations"].size
+ assert_equal agent[:affiliations].map{|x| x["name"]}.sort, created_agent["affiliations"].map{|x| x['name']}.sort
+
+ end
+ end
+
+ def test_single_agent
+ @agents.each do |agent|
+ agent_obj = _find_agent(agent['name'])
+ get "/agents/#{agent_obj.id.to_s.split('/').last}"
+ assert last_response.ok?
+ agent_found = MultiJson.load(last_response.body)
+ assert_equal agent_obj.id.to_s, agent_found["id"]
+ end
+ end
+
+ def test_create_new_agent
+
+ ## Create Agent of type affiliation with no parent affiliation
+ agent = @test_agents[0]
+ created_agent = _test_agent_creation(agent)
+
+ ## Create Agent of type affiliation with an extent parent affiliation
+
+ agent = @test_agents[1]
+ agent[:affiliations] = [created_agent]
+
+ created_agent = _test_agent_creation(agent)
+
+ ## Create Agent of type affiliation with an no extent parent affiliation
+ agent = @test_agents[3]
+ agent[:affiliations] = [created_agent, @test_agents[2].stringify_keys]
+ created_agent = _test_agent_creation(agent)
+
+ ## Create Agent of type Person with an extent affiliations
+
+ agent = @test_agents[6]
+ agent[:affiliations] = created_agent["affiliations"]
+ _test_agent_creation(agent)
+
+ ## Create Agent of type Person with no extent affiliations
+
+ agent = @test_agents[7]
+ agent[:affiliations] = [@test_agents[4].stringify_keys, @test_agents[5].stringify_keys]
+ _test_agent_creation(agent)
+
+ @agents = @agents + @test_agents
+ end
+
+
+ def test_new_agent_no_valid
+ agents_tmp = [ _agent_data(type: 'organization'), _agent_data(type: 'person'), _agent_data(type: 'person')]
+ agent = agents_tmp.last
+ agent[:affiliations] = [agents_tmp[0].stringify_keys, agents_tmp[1].stringify_keys]
+ post "/agents", MultiJson.dump(agent), "CONTENT_TYPE" => "application/json"
+ assert last_response.status == 400
+ end
+
+ def test_update_patch_agent
+
+ agents = [ _agent_data(type: 'organization'), _agent_data(type: 'organization'), _agent_data(type: 'person')]
+ agent = agents.last
+ agent[:affiliations] = [agents[0].stringify_keys, agents[1].stringify_keys]
+ agent = _test_agent_creation(agent)
+ @agents = @agents + agents
+ agent = LinkedData::Models::Agent.find(agent['id'].split('/').last).first
+ agent.bring_remaining
+
+
+ ## update identifiers
+ agent.identifiers.each{|i| i.bring_remaining}
+ new_identifiers = []
+ ## update an existent identifier
+ new_identifiers[0] = {
+ id: agent.identifiers[0].id.to_s,
+ schemaAgency: 'TEST ' + agent.identifiers[0].notation
+ }
+
+ new_identifiers[1] = {
+ id: agent.identifiers[1].id.to_s
+ }
+
+ ## update affiliation
+ agent.affiliations.each{|aff| aff.bring_remaining}
+ new_affiliations = []
+ ## update an existent affiliation
+ new_affiliations[0] = {
+ name: 'TEST new of ' + agent.affiliations[0].name,
+ id: agent.affiliations[0].id.to_s
+ }
+ ## create a new affiliation
+ new_affiliations[1] = _agent_data(type: 'organization')
+ new_affiliations[1][:name] = 'new affiliation'
+
+ new_values = {
+ name: 'new name ',
+ identifiers: new_identifiers,
+ affiliations: new_affiliations
+ }
+
+ patch "/agents/#{agent.id.split('/').last}", MultiJson.dump(new_values), "CONTENT_TYPE" => "application/json"
+ assert last_response.status == 204
+
+ get "/agents/#{agent.id.split('/').last}"
+ new_agent = MultiJson.load(last_response.body)
+ assert_equal 'new name ', new_agent["name"]
+
+ assert_equal new_identifiers.size, new_agent["identifiers"].size
+ assert_equal new_identifiers[0][:schemaAgency], new_agent["identifiers"].select{|x| x["id"].eql?(agent.identifiers[0].id.to_s)}.first["schemaAgency"]
+ assert_equal agent.identifiers[1].schemaAgency, new_agent["identifiers"].select{|x| x["id"].eql?(agent.identifiers[1].id.to_s)}.first["schemaAgency"]
+
+ assert_equal new_affiliations.size, new_agent["affiliations"].size
+ assert_equal new_affiliations[0][:name], new_agent["affiliations"].select{|x| x["id"].eql?(agent.affiliations[0].id.to_s)}.first["name"]
+ assert_nil new_agent["affiliations"].select{|x| x["id"].eql?(agent.affiliations[1].id.to_s)}.first
+ assert_equal new_affiliations[1][:name], new_agent["affiliations"].reject{|x| x["id"].eql?(agent.affiliations[0].id.to_s)}.first["name"]
+ end
+
+ def test_delete_agent
+ agent = @agents.delete_at(0)
+ agent_obj = _find_agent(agent['name'])
+ id = agent_obj.id.to_s.split('/').last
+ delete "/agents/#{id}"
+ assert last_response.status == 204
+
+ get "/agents/#{id}"
+ assert last_response.status == 404
+ end
+
+ private
+
+ def _agent_data(type: 'organization')
+ agent_data(type: type)
+ end
+
+ def _find_agent(name)
+ LinkedData::Models::Agent.where(name: name).first
+ end
+
+ def _delete_agents
+ @agents.each do |agent|
+ test_cat = _find_agent(agent[:name])
+ next if test_cat.nil?
+
+ test_cat.bring :identifiers
+ test_cat.identifiers.each { |i| i.delete }
+ test_cat.delete
+ end
+ end
+
+ def _test_agent_creation(agent)
+ post "/agents", MultiJson.dump(agent), "CONTENT_TYPE" => "application/json"
+
+ assert last_response.status == 201
+ created_agent = MultiJson.load(last_response.body)
+ assert created_agent["name"].eql?(agent[:name])
+
+ get "/agents/#{created_agent['id'].split('/').last}"
+ assert last_response.ok?
+
+ created_agent = MultiJson.load(last_response.body)
+ assert_equal agent[:name], created_agent["name"]
+ assert_equal agent[:identifiers].size, created_agent["identifiers"].size
+ assert_equal agent[:identifiers].map { |x| x[:notation] }.sort, created_agent["identifiers"].map { |x| x['notation'] }.sort
+
+ assert_equal agent[:affiliations].size, created_agent["affiliations"].size
+ assert_equal agent[:affiliations].map { |x| x["name"] }.sort, created_agent["affiliations"].map { |x| x['name'] }.sort
+ created_agent
+ end
+end
\ No newline at end of file
diff --git a/test/controllers/test_annotator_controller.rb b/test/controllers/test_annotator_controller.rb
index ffa65a97..947d474e 100644
--- a/test/controllers/test_annotator_controller.rb
+++ b/test/controllers/test_annotator_controller.rb
@@ -16,7 +16,12 @@ def self.before_suite
end
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies
+ @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies(process_submission: true,
+ process_options: {
+ process_rdf: true,
+ extract_metadata: false,
+ index_search: true
+ })
annotator = Annotator::Models::NcboAnnotator.new
annotator.init_redis_for_tests()
annotator.create_term_cache_from_ontologies(@@ontologies, false)
@@ -31,7 +36,7 @@ def test_annotate
get "/annotator", params
assert last_response.ok?
annotations = MultiJson.load(last_response.body)
- assert_equal(7, annotations.length)
+ assert_includes([7,6], annotations.length)
text = <
+
+
+
+
+
+
+
+ altération de l'ADN
+
+
+
+
+
+ XML
+
+ expected_result_2 = <<-XML
+
+
+
+
+
+
+ altération de l'ADN
+
+
+
+
+
+
+
+ XML
+
+
+ clean_xml = -> (x) { x.strip.gsub('/>', '').gsub('', '').gsub('<', '').gsub('>', '').split(' ').reject(&:empty?)}
+
+
+ a = result.gsub('\\"', '"')[1..-2].split("\\n").map{|x| clean_xml.call(x)}.flatten
+ b_1 = expected_result_1.split("\n").map{|x| clean_xml.call(x)}.flatten
+ b_2 = expected_result_2.split("\n").map{|x| clean_xml.call(x)}.flatten
+
+ assert_includes [b_1.sort, b_2.sort], a.sort
+ end
+
+ def test_dereference_resource_controller_ntriples
+ header 'Accept', 'application/n-triples'
+ get "/ontologies/#{@@graph}/resolve/#{@@uri}"
+ assert last_response.ok?
+
+ result = last_response.body
+ expected_result = <<-NTRIPLES
+ .
+ .
+ .
+ .
+ .
+ .
+ "alt\\u00E9rationdel'ADN"@fr .
+ .
+ NTRIPLES
+ a = result.gsub(' ', '').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').split("\n").reject(&:empty?)
+ assert_equal b.sort, a.sort
+ end
+
+ def test_dereference_resource_controller_turtle
+ header 'Accept', 'text/turtle'
+ get "/ontologies/#{@@graph}/resolve/#{@@uri}"
+ assert last_response.ok?
+
+ result = last_response.body
+ expected_result = <<-TURTLE
+ @prefix rdf: .
+ @prefix ns0: .
+ @prefix owl: .
+ @prefix skos: .
+
+ ns0:c_6496
+ a owl:NamedIndividual, skos:Concept ;
+ skos:broader ns0:c_a9d99f3a ;
+ skos:inScheme ns0:mt_65, ns0:thesaurusINRAE ;
+ skos:prefLabel "altération de l'ADN"@fr ;
+ skos:topConceptOf ns0:mt_65 .
+
+ ns0:mt_65
+ skos:hasTopConcept ns0:c_6496 .
+ TURTLE
+ a = result.gsub(' ', '').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').split("\n").reject(&:empty?)
+
+ assert_equal b.sort, a.sort
+ end
+
+ private
+
+ def sort_nested_hash(hash)
+ sorted_hash = {}
+
+ hash.each do |key, value|
+ if value.is_a?(Hash)
+ sorted_hash[key] = sort_nested_hash(value)
+ elsif value.is_a?(Array)
+ sorted_hash[key] = value.map { |item| item.is_a?(Hash) ? sort_nested_hash(item) : item }.sort_by { |item| item.to_s }
+ else
+ sorted_hash[key] = value
+ end
+ end
+
+ sorted_hash.sort.to_h
+ end
+
+end
\ No newline at end of file
diff --git a/test/controllers/test_external_mappings_controller.rb b/test/controllers/test_external_mappings_controller.rb
index 6cfabf32..cb1f255f 100644
--- a/test/controllers/test_external_mappings_controller.rb
+++ b/test/controllers/test_external_mappings_controller.rb
@@ -12,8 +12,10 @@ def self.before_suite
ont.delete
end
end
+ # indexing term is needed
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options: {process_rdf: true, extract_metadata: false, index_search: true},
acronym: "BRO-TEST-MAP",
name: "BRO-TEST-MAP",
file_path: "./test/data/ontology_files/BRO_v3.2.owl",
@@ -22,6 +24,7 @@ def self.before_suite
})
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options: {process_rdf: true, extract_metadata: false},
acronym: "CNO-TEST-MAP",
name: "CNO-TEST-MAP",
file_path: "./test/data/ontology_files/CNO_05.owl",
@@ -30,6 +33,7 @@ def self.before_suite
})
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options: {process_rdf: true, extract_metadata: false},
acronym: "FAKE-TEST-MAP",
name: "FAKE-TEST-MAP",
file_path: "./test/data/ontology_files/fake_for_mappings.owl",
diff --git a/test/controllers/test_instances_controller.rb b/test/controllers/test_instances_controller.rb
index 9560c0b0..e4b0460b 100644
--- a/test/controllers/test_instances_controller.rb
+++ b/test/controllers/test_instances_controller.rb
@@ -5,6 +5,7 @@ class TestInstancesController < TestCase
def self.before_suite
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, generate_missing_labels: false},
acronym: 'XCT-TEST-INST',
name: 'XCT-TEST-INST',
file_path: './test/data/ontology_files/XCTontologyvtemp2.owl',
@@ -13,9 +14,6 @@ def self.before_suite
})
end
- def self.after_suite
- LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- end
def test_first_default_page
ont = Ontology.find('XCT-TEST-INST-0').include(:acronym).first
@@ -52,6 +50,7 @@ def test_all_instance_pages
assert last_response.ok?
instance_count = instance_count + response['collection'].size
end while response['nextPage']
+
assert_equal 714, instance_count
# Next page should have no results.
diff --git a/test/controllers/test_mappings_controller.rb b/test/controllers/test_mappings_controller.rb
index 52c3975d..736606de 100644
--- a/test/controllers/test_mappings_controller.rb
+++ b/test/controllers/test_mappings_controller.rb
@@ -13,8 +13,10 @@ def self.before_suite
ont.delete
end
end
+ # indexing is needed
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, index_search: true},
acronym: "BRO-TEST-MAP",
name: "BRO-TEST-MAP",
file_path: "./test/data/ontology_files/BRO_v3.2.owl",
@@ -23,6 +25,7 @@ def self.before_suite
})
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, index_search: true},
acronym: "CNO-TEST-MAP",
name: "CNO-TEST-MAP",
file_path: "./test/data/ontology_files/CNO_05.owl",
@@ -31,6 +34,7 @@ def self.before_suite
})
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, index_search: true},
acronym: "FAKE-TEST-MAP",
name: "FAKE-TEST-MAP",
file_path: "./test/data/ontology_files/fake_for_mappings.owl",
@@ -84,6 +88,7 @@ def test_mappings_file_load
commun_created_mappings_test(created, mapping_term_a, mapping_term_b, relations)
end
+
private
def commun_created_mappings_test(created, mapping_term_a, mapping_term_b, relations)
@@ -109,7 +114,7 @@ def commun_created_mappings_test(created, mapping_term_a, mapping_term_b, relati
assert last_response.ok?
mappings = MultiJson.load(last_response.body)
mappings = mappings["collection"]
- assert_equal 21, mappings.length
+ assert_includes [21,11], mappings.length
rest_count = 0
mappings.each do |x|
if x["process"] != nil
@@ -152,7 +157,7 @@ def mappings_for_ontology
assert mappings["prevPage"] == nil
assert mappings["nextPage"] == nil
- assert_equal 18, mappings["collection"].length
+ assert_includes [18,8], mappings["collection"].length
mappings = mappings["collection"]
mappings.each do |mapping|
@@ -195,7 +200,7 @@ def mappings_between_ontologies
assert mappings["prevPage"] == nil
assert mappings["nextPage"] == nil
- assert_equal 8, mappings["collection"].length
+ assert_includes [8,3], mappings["collection"].length
mappings = mappings["collection"]
mappings.each do |mapping|
assert mapping["classes"].length, 2
@@ -245,7 +250,7 @@ def mappings_with_display
get "/ontologies/#{ontology}/mappings?pagesize=#{pagesize}&page=#{page}&display=prefLabel"
assert last_response.ok?
mappings = MultiJson.load(last_response.body)
- assert mappings["collection"].all? { |m| m["classes"].all? { |c| c["prefLabel"].is_a?(String) && c["prefLabel"].length > 0 } }
+ assert mappings["collection"].all? { |m| m["classes"].all? { |c| c["prefLabel"].first.is_a?(String) && c["prefLabel"].first.length > 0 } }
def_count = 0
next_page = 1
@@ -419,4 +424,6 @@ def build_mappings_hash
end
[mappings, mapping_ont_a, mapping_ont_b, mapping_term_a, mapping_term_b, relations]
end
+
+
end
diff --git a/test/controllers/test_metrics_controller.rb b/test/controllers/test_metrics_controller.rb
index 1b8890a6..f5e3d5f3 100644
--- a/test/controllers/test_metrics_controller.rb
+++ b/test/controllers/test_metrics_controller.rb
@@ -7,22 +7,23 @@ def self.before_suite
puts "this test is going to wipe out all submission and ontologies. probably this is not a test env."
return
end
- OntologySubmission.all.each {|s| s.delete }
- Ontology.all.each {|o| o.delete }
- @@data = {"classes"=>486,
- "averageChildCount"=>5,
- "maxChildCount"=>65,
- "classesWithOneChild"=>14,
- "classesWithMoreThan25Children"=>2,
- "classesWithNoDefinition"=>11,
- "individuals"=>124,
- "properties"=>63,
- "maxDepth"=> 7 }
- @@options = {ont_count: 2,
- submission_count: 3,
- submissions_to_process: [1, 2],
- process_submission: true,
- random_submission_count: false}
+ OntologySubmission.all.each { |s| s.delete }
+ Ontology.all.each { |o| o.delete }
+ @@data = { "classes" => [486, 481], # depending if owlapi imports SKOS
+ "averageChildCount" => 5,
+ "maxChildCount" => 65,
+ "classesWithOneChild" => [13, 14],
+ "classesWithMoreThan25Children" => 2,
+ "classesWithNoDefinition" => [11, 10],
+ "individuals" => 124,
+ "properties" => [63, 45],
+ "maxDepth" => 7 }
+ @@options = { ont_count: 2,
+ submission_count: 3,
+ submissions_to_process: [1, 2],
+ process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, run_metrics: true, index_properties: true },
+ random_submission_count: false }
LinkedData::SampleData::Ontology.create_ontologies_and_submissions(@@options)
end
@@ -31,11 +32,15 @@ def test_all_metrics
assert last_response.ok?
metrics = MultiJson.load(last_response.body)
assert metrics.length == 2
- #TODO: improve this test and test for two different ontologies
- #though this is tested in LD
+ # TODO: improve this test and test for two different ontologies
+ # though this is tested in LD
metrics.each do |m|
- @@data.each do |k,v|
- assert_equal(m[k], v)
+ @@data.each do |k, v|
+ if v.is_a?(Array)
+ assert_includes(v, m[k])
+ else
+ assert_equal(v, m[k])
+ end
end
assert m["@id"] == m["submission"].first + "/metrics"
end
@@ -46,10 +51,14 @@ def test_single_metrics
get "/ontologies/#{ontology}/metrics"
assert last_response.ok?
metrics = MultiJson.load(last_response.body)
-
- @@data.each do |k,v|
- assert_equal(metrics[k], v)
+ @@data.each do |k, v|
+ if v.is_a?(Array)
+ assert_includes(v, metrics[k])
+ else
+ assert_equal(v, metrics[k])
+ end
end
+
end
def test_metrics_with_submission_id
@@ -57,9 +66,15 @@ def test_metrics_with_submission_id
get "/ontologies/#{ontology}/submissions/1/metrics"
assert last_response.ok?
metrics = MultiJson.load(last_response.body)
- @@data.each do |k,v|
- assert_equal(metrics[k], v)
+
+ @@data.each do |k, v|
+ if v.is_a?(Array)
+ assert_includes(v, metrics[k])
+ else
+ assert_equal(v, metrics[k])
+ end
end
+
end
def test_metrics_with_submission_id_as_param
@@ -67,8 +82,12 @@ def test_metrics_with_submission_id_as_param
get "/ontologies/#{ontology}/metrics?submissionId=1"
assert last_response.ok?
metrics = MultiJson.load(last_response.body)
- @@data.each do |k,v|
- assert_equal(metrics[k], v)
+ @@data.each do |k, v|
+ if v.is_a?(Array)
+ assert_includes(v, metrics[k])
+ else
+ assert_equal(v, metrics[k])
+ end
end
end
@@ -78,18 +97,18 @@ def test_metrics_missing
get '/metrics/missing'
assert last_response.ok?
ontologies = MultiJson.load(last_response.body)
- assert_equal(0, ontologies.length, msg='Failure to detect 0 ontologies with missing metrics.')
+ assert_equal(0, ontologies.length, msg = 'Failure to detect 0 ontologies with missing metrics.')
# create ontologies with latest submissions that have no metrics
delete_ontologies_and_submissions
- options = {ont_count: 2,
- submission_count: 1,
- process_submission: false,
- random_submission_count: false}
+ options = { ont_count: 2,
+ submission_count: 1,
+ process_submission: false,
+ random_submission_count: false }
create_ontologies_and_submissions(options)
get '/metrics/missing'
assert last_response.ok?
ontologies = MultiJson.load(last_response.body)
- assert_equal(2, ontologies.length, msg='Failure to detect 2 ontologies with missing metrics.')
+ assert_equal(2, ontologies.length, msg = 'Failure to detect 2 ontologies with missing metrics.')
# recreate the before_suite data (this test might not be the last one to run in the suite)
delete_ontologies_and_submissions
create_ontologies_and_submissions(@@options)
diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb
index 4713b699..681ab93b 100644
--- a/test/controllers/test_ontologies_controller.rb
+++ b/test/controllers/test_ontologies_controller.rb
@@ -185,7 +185,9 @@ def test_download_ontology
end
def test_download_ontology_csv
- num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true)
+ num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1,
+ process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: true, index_search: true})
ont = onts.first
acronym = created_ont_acronyms.first
@@ -217,13 +219,13 @@ def test_download_acl_only
begin
allowed_user = User.new({
username: "allowed",
- email: "test@example.org",
+ email: "test1@example.org",
password: "12345"
})
allowed_user.save
blocked_user = User.new({
username: "blocked",
- email: "test@example.org",
+ email: "test2@example.org",
password: "12345"
})
blocked_user.save
@@ -254,6 +256,72 @@ def test_download_acl_only
end
+ def test_detach_a_view
+ view = Ontology.find(@@view_acronym).include(:viewOf).first
+ ont = view.viewOf
+ refute_nil view
+ refute_nil ont
+
+ remove_view_of = {viewOf: ''}
+ patch "/ontologies/#{@@view_acronym}", MultiJson.dump(remove_view_of), "CONTENT_TYPE" => "application/json"
+
+ assert last_response.status == 204
+
+ get "/ontologies/#{@@view_acronym}"
+ onto = MultiJson.load(last_response.body)
+ assert_nil onto["viewOf"]
+
+
+ add_view_of = {viewOf: @@acronym}
+ patch "/ontologies/#{@@view_acronym}", MultiJson.dump(add_view_of), "CONTENT_TYPE" => "application/json"
+
+ assert last_response.status == 204
+
+ get "/ontologies/#{@@view_acronym}"
+ onto = MultiJson.load(last_response.body)
+ assert_equal onto["viewOf"], ont.id.to_s
+ end
+
+ def test_ontology_agents
+ ontologies_and_submissions = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: true)
+ submission1 = ontologies_and_submissions[2].first.submissions.last
+ submission2 = ontologies_and_submissions[2].last.submissions.last
+
+ ontology_acronym1 = ontologies_and_submissions[1].first
+ ontology_acronym2 = ontologies_and_submissions[1].last
+
+ submission1.bring(*OntologySubmission.agents_attrs)
+ submission2.bring(*OntologySubmission.agents_attrs)
+
+ # To insure that we don't have duplicated agents in the response
+ agent_syphax = _create_agent(name: 'Syphax', type: 'person')
+
+ submission1.publisher = [_create_agent(name: 'Bilel', type: 'person'), agent_syphax]
+ submission1.hasContributor = [_create_agent(name: 'Clement', type: 'person'), agent_syphax]
+
+ submission2.publisher = [_create_agent(name: 'Imad', type: 'person'), _create_agent(name: 'Serine', type: 'person')]
+
+ submission1.save
+ submission2.save
+
+
+ get "/ontologies/#{ontology_acronym1}/agents"
+
+ response = MultiJson.load(last_response.body)
+ assert_equal response.length, 3
+ response.each do |r|
+ assert_includes ['Bilel', 'Syphax', 'Clement'], r["name"]
+ end
+
+ get "/ontologies/#{ontology_acronym2}/agents"
+
+ response = MultiJson.load(last_response.body)
+ assert_equal response.length, 2
+ response.each do |r|
+ assert_includes ['Imad', 'Serine'], r["name"]
+ end
+ end
+
private
def check400(response)
@@ -261,4 +329,14 @@ def check400(response)
assert MultiJson.load(response.body)["errors"]
end
+ def _create_agent(name: 'name', type: 'person')
+ agent = LinkedData::Models::Agent.new({
+ agentType: type,
+ name: name,
+ creator: User.find('tim').first
+ })
+ agent.save
+ agent
+ end
+
end
diff --git a/test/controllers/test_ontology_analytics_controller.rb b/test/controllers/test_ontology_analytics_controller.rb
index 67ab5529..7f2df926 100644
--- a/test/controllers/test_ontology_analytics_controller.rb
+++ b/test/controllers/test_ontology_analytics_controller.rb
@@ -203,7 +203,9 @@ def self.before_suite
puts " This test cannot be run because there #{db_size} redis entries (max #{MAX_TEST_REDIS_SIZE}). You are probably pointing to the wrong redis backend. "
return
end
- @@redis.set(LinkedData::Models::Ontology::ONTOLOGY_ANALYTICS_REDIS_FIELD, Marshal.dump(ANALYTICS_DATA))
+
+ stringy_keys = ANALYTICS_DATA.transform_values{|year| year.map{|k,v| [k.to_s , v.stringify_keys]}.to_h}
+ @@redis.set(LinkedData::Models::Ontology::ONTOLOGY_ANALYTICS_REDIS_FIELD, Marshal.dump(stringy_keys))
@@onts = {
"NCIT" => "NCIT Ontology",
"ONTOMA" => "ONTOMA Ontology",
diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb
index 7500dce4..095d0339 100644
--- a/test/controllers/test_ontology_submissions_controller.rb
+++ b/test/controllers/test_ontology_submissions_controller.rb
@@ -18,7 +18,10 @@ def self._set_vars
administeredBy: "tim",
"file" => Rack::Test::UploadedFile.new(@@test_file, ""),
released: DateTime.now.to_s,
- contact: [{name: "test_name", email: "test@example.org"}]
+ contact: [{name: "test_name", email: "test3@example.org"}],
+ URI: 'https://test.com/test',
+ status: 'production',
+ description: 'ontology description'
}
@@status_uploaded = "UPLOADED"
@@status_rdf = "RDF"
@@ -36,6 +39,12 @@ def self._create_onts
ont.save
end
+ def setup
+ delete_ontologies_and_submissions
+ ont = Ontology.new(acronym: @@acronym, name: @@name, administeredBy: [@@user])
+ ont.save
+ end
+
def test_submissions_for_given_ontology
num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1)
ontology = created_ont_acronyms.first
@@ -156,13 +165,13 @@ def test_download_acl_only
begin
allowed_user = User.new({
username: "allowed",
- email: "test@example.org",
+ email: "test4@example.org",
password: "12345"
})
allowed_user.save
blocked_user = User.new({
username: "blocked",
- email: "test@example.org",
+ email: "test5@example.org",
password: "12345"
})
blocked_user.save
@@ -192,4 +201,281 @@ def test_download_acl_only
end
end
+ def test_submissions_pagination
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 2, submission_count: 2)
+
+ get "/submissions"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+
+ assert_equal 2, submissions.length
+
+
+ get "/submissions?page=1&pagesize=1"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions["collection"].length
+ end
+
+ def test_submissions_pagination_filter
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 10, submission_count: 1)
+ group1 = LinkedData::Models::Group.new(acronym: 'group-1', name: "Test Group 1").save
+ group2 = LinkedData::Models::Group.new(acronym: 'group-2', name: "Test Group 2").save
+ category1 = LinkedData::Models::Category.new(acronym: 'category-1', name: "Test Category 1").save
+ category2 = LinkedData::Models::Category.new(acronym: 'category-2', name: "Test Category 2").save
+
+ ontologies1 = ontologies[0..5].each do |o|
+ o.bring_remaining
+ o.group = [group1]
+ o.hasDomain = [category1]
+ o.save
+ end
+
+ ontologies2 = ontologies[6..8].each do |o|
+ o.bring_remaining
+ o.group = [group2]
+ o.hasDomain = [category2]
+ o.save
+ end
+
+
+
+ # test filter by group and category
+ get "/submissions?page=1&pagesize=100&group=#{group1.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&group=#{group2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category1.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group1.acronym}"
+ assert last_response.ok?
+ assert_equal 0, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+
+ ontologies3 = ontologies[9]
+ ontologies3.bring_remaining
+ ontologies3.group = [group1, group2]
+ ontologies3.hasDomain = [category1, category2]
+ ontologies3.name = "name search test"
+ ontologies3.save
+
+ # test search with acronym
+ [
+ [ 1, ontologies.first.acronym],
+ [ 1, ontologies.last.acronym],
+ [ontologies.size, 'TEST-ONT']
+ ].each do |count, acronym_search|
+ get "/submissions?page=1&pagesize=100&acronym=#{acronym_search}"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal count, submissions["collection"].length
+ end
+
+
+ # test search with name
+ [
+ [ 1, ontologies.first.name],
+ [ 1, ontologies.last.name],
+ [ontologies.size - 1, 'TEST-ONT']
+ ].each do |count, name_search|
+ get "/submissions?page=1&pagesize=100&name=#{name_search}"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ binding.pry unless submissions["collection"].length.eql?(count)
+ assert_equal count, submissions["collection"].length
+ end
+
+ # test search with name and acronym
+ # search by name
+ get "/submissions?page=1&pagesize=100&name=search&acronym=search"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions["collection"].length
+ # search by acronym
+ get "/submissions?page=1&pagesize=100&name=9&acronym=9"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions["collection"].length
+ # search by acronym or name
+ get "/submissions?page=1&pagesize=100&name=search&acronym=8"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 2, submissions["collection"].length
+
+ ontologies.first.name = "sort by test"
+ ontologies.first.save
+ sub = ontologies.first.latest_submission(status: :any).bring_remaining
+ sub.status = 'retired'
+ sub.description = "234"
+ sub.creationDate = DateTime.yesterday.to_datetime
+ sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first
+ sub.save
+
+ #test search with sort
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=ontology_name"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.map{|x| x.name}.sort, submissions["collection"].map{|x| x["ontology"]["name"]}
+
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=creationDate"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate}.sort.map(&:to_s), submissions["collection"].map{|x| x["creationDate"]}.reverse
+
+ # test search with format
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=SKOS"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal 1, submissions["collection"].size
+
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=OWL"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.size-1 , submissions["collection"].size
+
+ # test ontology filter with submission filter attributes
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&group=group-2&category=category-2&hasOntologyLanguage=OWL"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies2.size + 1 , submissions["collection"].size
+
+ # test ontology filter with status
+ get "/submissions?page=1&pagesize=100&status=retired"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal 1 , submissions["collection"].size
+
+ get "/submissions?page=1&pagesize=100&status=alpha,beta,production"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.size - 1 , submissions["collection"].size
+ get "/submissions?page=1&pagesize=100&description=234&acronym=234&name=234"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1 , submissions["collection"].size
+ end
+
+ def test_submissions_default_includes
+ ontology_count = 5
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
+
+ submission_default_attributes = LinkedData::Models::OntologySubmission.hypermedia_settings[:serialize_default].map(&:to_s)
+
+ get("/submissions?display_links=false&display_context=false&include_status=ANY")
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+
+ assert_equal ontology_count, submissions.size
+ assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) })
+
+ get("/ontologies/#{created_ont_acronyms.first}/submissions?display_links=false&display_context=false")
+
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions.size
+ assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) })
+ end
+
+ def test_submissions_all_includes
+ ontology_count = 5
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
+ def submission_all_attributes
+ attrs = OntologySubmission.goo_attrs_to_load([:all])
+ embed_attrs = attrs.select { |x| x.is_a?(Hash) }.first
+
+ attrs.delete_if { |x| x.is_a?(Hash) }.map(&:to_s) + embed_attrs.keys.map(&:to_s)
+ end
+ get("/submissions?include=all&display_links=false&display_context=false")
+
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal ontology_count, submissions.size
+
+ assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) })
+ assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
+
+ get("/ontologies/#{created_ont_acronyms.first}/submissions?include=all&display_links=false&display_context=false")
+
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions.size
+
+ assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) })
+ assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
+
+ get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=all&display_links=false&display_context=false")
+ assert last_response.ok?
+ sub = MultiJson.load(last_response.body)
+
+ assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort))
+ assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+
+ get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=all&display_links=false&display_context=false")
+ assert last_response.ok?
+ sub = MultiJson.load(last_response.body)
+
+ assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort))
+ assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+ end
+
+ def test_submissions_custom_includes
+ ontology_count = 5
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: [])
+ include = 'ontology,contact,submissionId'
+
+ get("/submissions?include=#{include}&display_links=false&display_context=false")
+
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal ontology_count, submissions.size
+ assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) })
+ assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
+
+ get("/ontologies/#{created_ont_acronyms.first}/submissions?include=#{include}&display_links=false&display_context=false")
+
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions.size
+ assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) })
+ assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) })
+
+ get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=#{include}&display_links=false&display_context=false")
+ assert last_response.ok?
+ sub = MultiJson.load(last_response.body)
+ assert(include.split(',').eql?(submission_keys(sub)))
+ assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+
+ get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=#{include}&display_links=false&display_context=false")
+ assert last_response.ok?
+ sub = MultiJson.load(last_response.body)
+ assert(include.split(',').eql?(submission_keys(sub)))
+ assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])))
+ end
+
+ def test_submissions_param_include
+ skip('only for local development regrouping a set of tests')
+ test_submissions_default_includes
+ test_submissions_all_includes
+ test_submissions_custom_includes
+ end
+
+ private
+ def submission_keys(sub)
+ sub.to_hash.keys - %w[@id @type id]
+ end
end
diff --git a/test/controllers/test_properties_controller.rb b/test/controllers/test_properties_controller.rb
index 605ea385..96879083 100644
--- a/test/controllers/test_properties_controller.rb
+++ b/test/controllers/test_properties_controller.rb
@@ -5,6 +5,7 @@ class TestPropertiesController < TestCase
def self.before_suite
count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: false},
acronym: "BROSEARCHTEST",
name: "BRO Search Test",
file_path: "./test/data/ontology_files/BRO_v3.2.owl",
@@ -15,6 +16,7 @@ def self.before_suite
count, acronyms, mccl = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: true},
acronym: "MCCLSEARCHTEST",
name: "MCCL Search Test",
file_path: "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
@@ -33,7 +35,7 @@ def test_properties
get "/ontologies/#{@@acronyms.first}/properties"
assert last_response.ok?
results = MultiJson.load(last_response.body)
- assert_equal 85, results.length
+ assert_equal 81, results.length
get "/ontologies/#{@@acronyms.last}/properties"
assert last_response.ok?
@@ -57,18 +59,19 @@ def test_property_roots
get "/ontologies/#{@@acronyms.first}/properties/roots"
assert last_response.ok?
pr = MultiJson.load(last_response.body)
- assert_equal 62, pr.length
+ assert_equal 58, pr.length
# count object properties
opr = pr.select { |p| p["@type"] == "http://www.w3.org/2002/07/owl#ObjectProperty" }
- assert_equal 18, opr.length
+ assert_includes [18, 13], opr.length
# count datatype properties
dpr = pr.select { |p| p["@type"] == "http://www.w3.org/2002/07/owl#DatatypeProperty" }
- assert_equal 32, dpr.length
+ assert_includes [32,31], dpr.length
# count annotation properties
apr = pr.select { |p| p["@type"] == "http://www.w3.org/2002/07/owl#AnnotationProperty" }
- assert_equal 12, apr.length
+ assert_includes [12,8], apr.length
# check for non-root properties
+
assert_empty pr.select { |p| ["http://www.w3.org/2004/02/skos/core#broaderTransitive",
"http://www.w3.org/2004/02/skos/core#topConceptOf",
"http://www.w3.org/2004/02/skos/core#relatedMatch",
@@ -98,10 +101,14 @@ def test_property_roots
end
def test_property_tree
+
+ get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23topConceptOf"
+ return unless last_response.ok? # depending if owlapi import SKOS
+
get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23topConceptOf/tree"
assert last_response.ok?
pr = MultiJson.load(last_response.body)
- assert_equal 62, pr.length
+ assert_equal 58, pr.length
num_found = 0
pr.each do |p|
@@ -129,6 +136,10 @@ def test_property_tree
end
def test_property_ancestors
+
+ get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23exactMatch"
+ return unless last_response.ok?
+
get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23exactMatch/ancestors"
assert last_response.ok?
an = MultiJson.load(last_response.body)
@@ -143,6 +154,9 @@ def test_property_ancestors
end
def test_property_descendants
+ get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23note"
+ return unless last_response.ok? # depending if owlapi import SKOS
+
get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23note/descendants"
assert last_response.ok?
dn = MultiJson.load(last_response.body)
@@ -164,6 +178,9 @@ def test_property_descendants
end
def test_property_parents
+ get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23changeNote"
+ return unless last_response.ok? # depending if owlapi import SKOS
+
get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23changeNote/parents"
assert last_response.ok?
pr = MultiJson.load(last_response.body)
@@ -189,6 +206,9 @@ def test_property_children
ch = MultiJson.load(last_response.body)
assert_empty ch
+ get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23semanticRelation"
+ return unless last_response.ok? #depending if owlapi import SKOS
+
get "/ontologies/#{@@acronyms.first}/properties/http%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23semanticRelation/children"
assert last_response.ok?
ch = MultiJson.load(last_response.body)
diff --git a/test/controllers/test_properties_search_controller.rb b/test/controllers/test_properties_search_controller.rb
index f93a90a1..6c99fc40 100644
--- a/test/controllers/test_properties_search_controller.rb
+++ b/test/controllers/test_properties_search_controller.rb
@@ -5,6 +5,7 @@ class TestPropertiesSearchController < TestCase
def self.before_suite
count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: false, index_properties: true},
acronym: "BROSEARCHTEST",
name: "BRO Search Test",
file_path: "./test/data/ontology_files/BRO_v3.2.owl",
@@ -15,6 +16,7 @@ def self.before_suite
count, acronyms, mccl = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: false, index_properties: true},
acronym: "MCCLSEARCHTEST",
name: "MCCL Search Test",
file_path: "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
@@ -26,8 +28,8 @@ def self.before_suite
def self.after_suite
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- LinkedData::Models::Ontology.indexClear(:property)
- LinkedData::Models::Ontology.indexCommit(nil, :property)
+ LinkedData::Models::OntologyProperty.indexClear
+ LinkedData::Models::OntologyProperty.indexCommit
end
def test_property_search
@@ -55,7 +57,7 @@ def test_search_filters
get '/property_search?q=has'
assert last_response.ok?
results = MultiJson.load(last_response.body)
- assert_equal 17, results["collection"].length
+ assert_includes [17,4], results["collection"].length # depending if owlapi imports SKOS
get '/property_search?q=has&ontologies=MCCLSEARCHTEST-0'
assert last_response.ok?
diff --git a/test/controllers/test_recommender_controller.rb b/test/controllers/test_recommender_controller.rb
index 29caf28c..58d6d942 100644
--- a/test/controllers/test_recommender_controller.rb
+++ b/test/controllers/test_recommender_controller.rb
@@ -14,7 +14,7 @@ def self.before_suite
@@redis.del(mappings)
end
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies
+ @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies(process_submission: true)
annotator = Annotator::Models::NcboAnnotator.new
annotator.init_redis_for_tests()
annotator.create_term_cache_from_ontologies(@@ontologies, false)
diff --git a/test/controllers/test_recommender_v1_controller.rb b/test/controllers/test_recommender_v1_controller.rb
index 7b14a63d..3ac4862d 100644
--- a/test/controllers/test_recommender_v1_controller.rb
+++ b/test/controllers/test_recommender_v1_controller.rb
@@ -1,10 +1,10 @@
require_relative '../test_case'
-class TestRecommenderController < TestCase
+class TestRecommenderV1Controller < TestCase
def self.before_suite
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies
+ @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies(process_submission: true)
@@text = < "submissionAcronym:BROSEARCHTEST-0", :start => 0, :rows => 80})
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+ assert_equal "Activité", doc["prefLabel"]
+
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+ assert_equal "ActivityEnglish", doc["prefLabel"]
+
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr,es"
+ res = MultiJson.load(last_response.body)
+ assert_equal 0, res["totalCount"]
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en,es"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+ expected_pref_label = {"none"=>["Activity"], "en"=>["ActivityEnglish"]}
+ assert_equal expected_pref_label, doc["prefLabel"]
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=all"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ doc = res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+ expected_pref_label = {"none"=>["Activity"], "en"=>["ActivityEnglish"], "fr"=>["Activité"]}
+ assert_equal expected_pref_label, doc["prefLabel"]
+
+
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activity&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+ end
+
+
end
diff --git a/test/controllers/test_search_models_controller.rb b/test/controllers/test_search_models_controller.rb
new file mode 100644
index 00000000..233c7bc4
--- /dev/null
+++ b/test/controllers/test_search_models_controller.rb
@@ -0,0 +1,471 @@
+require_relative '../test_case'
+
+class TestSearchModelsController < TestCase
+
+ def self.after_suite
+ LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
+ LinkedData::Models::Ontology.indexClear
+ LinkedData::Models::Agent.indexClear
+ LinkedData::Models::Class.indexClear
+ LinkedData::Models::OntologyProperty.indexClear
+ Goo.init_search_connection(:ontology_data)
+ end
+
+ def setup
+ self.class.after_suite
+ end
+
+ def test_show_all_collection
+ get '/admin/search/collections'
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ array = %w[agents_metadata ontology_data ontology_metadata prop_search_core1 term_search_core1]
+ assert_equal res["collections"].sort , array.sort
+ end
+
+ def test_collection_schema
+ get '/admin/search/collections'
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ collection = res["collections"].first
+ refute_nil collection
+ get "/admin/search/collections/#{collection}/schema"
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ fields = res["fields"].map { |x| x["name"] }
+ assert_includes fields, 'id'
+ assert_includes fields, 'resource_id'
+ assert_includes fields, 'resource_model'
+ end
+
+ def test_collection_search
+
+ count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
+ process_submission: false,
+ acronym: "BROSEARCHTEST",
+ name: "BRO Search Test",
+ file_path: "./test/data/ontology_files/BRO_v3.2.owl",
+ ont_count: 1,
+ submission_count: 1,
+ ontology_type: "VALUE_SET_COLLECTION"
+ })
+ collection = 'ontology_metadata'
+ post "/admin/search/collections/#{collection}/search", {q: ""}
+
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ assert_equal 2, res['response']['numFound']
+ end
+
+ def test_search_security
+ count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, generate_missing_labels: false},
+ acronym: "BROSEARCHTEST",
+ name: "BRO Search Test",
+ file_path: "./test/data/ontology_files/BRO_v3.2.owl",
+ ont_count: 1,
+ submission_count: 1,
+ ontology_type: "VALUE_SET_COLLECTION"
+ })
+
+ count, acronyms, mccl = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, generate_missing_labels: false},
+ acronym: "MCCLSEARCHTEST",
+ name: "MCCL Search Test",
+ file_path: "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
+ ont_count: 1,
+ submission_count: 1
+ })
+
+
+ subs = LinkedData::Models::OntologySubmission.all
+ subs.each do |s|
+ s.bring_remaining
+ s.index_all(Logger.new($stdout))
+ end
+
+
+ allowed_user = User.new({
+ username: "allowed",
+ email: "test1@example.org",
+ password: "12345"
+ })
+ allowed_user.save
+
+ blocked_user = User.new({
+ username: "blocked",
+ email: "test2@example.org",
+ password: "12345"
+ })
+ blocked_user.save
+
+ bro = bro.first
+ bro.bring_remaining
+ bro.acl = [allowed_user]
+ bro.viewingRestriction = "private"
+ bro.save
+
+ self.class.enable_security
+ get "/search/ontologies?query=#{bro.acronym}&apikey=#{blocked_user.apikey}"
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_empty response.select{|x| x["ontology_acronym_text"].eql?(bro.acronym)}
+
+ get "/search/ontologies/content?q=*Research_Lab_Management*&apikey=#{blocked_user.apikey}"
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ assert_equal 0, res['totalCount']
+
+ get "/search/ontologies?query=#{bro.acronym}&apikey=#{allowed_user.apikey}"
+ response = MultiJson.load(last_response.body)["collection"]
+ refute_empty response.select{|x| x["ontology_acronym_text"].eql?(bro.acronym)}
+
+ get "/search/ontologies/content?q=*Research_Lab_Management*&apikey=#{allowed_user.apikey}"
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ assert_equal 1, res['totalCount']
+
+ self.class.reset_security(false)
+ end
+
+ def test_ontology_metadata_search
+ count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
+ process_submission: false,
+ acronym: "BROSEARCHTEST",
+ name: "BRO Search Test",
+ file_path: "./test/data/ontology_files/BRO_v3.2.owl",
+ ont_count: 1,
+ submission_count: 1,
+ ontology_type: "VALUE_SET_COLLECTION"
+ })
+
+ count, acronyms, mccl = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
+ process_submission: false,
+ acronym: "MCCLSEARCHTEST",
+ name: "MCCL Search Test",
+ file_path: "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
+ ont_count: 1,
+ submission_count: 1
+ })
+
+ # Search ACRONYM
+ ## full word
+ get '/search/ontologies?query=BROSEARCHTEST-0'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+
+ ### start
+ get '/search/ontologies?query=BROSEARCHTEST'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+
+ ## part of the word
+ get '/search/ontologies?query=BRO'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+
+
+ # Search name
+ ## full word
+ ### start
+ get '/search/ontologies?query=MCCL Search'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 'MCCLSEARCHTEST-0', response.first['ontology_acronym_text']
+ ###in the middle
+ get '/search/ontologies?query=Search Test'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 2, response.size
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+ assert_equal 'MCCLSEARCHTEST-0', response.last['ontology_acronym_text']
+ ## part of the word
+ ### start
+ get '/search/ontologies?query=MCCL Sea'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 'MCCLSEARCHTEST-0', response.first['ontology_acronym_text']
+ ### in the middle
+ get '/search/ontologies?query=Sea'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 2, response.size
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+ assert_equal 'MCCLSEARCHTEST-0', response.last['ontology_acronym_text']
+
+
+ ## full text
+ get '/search/ontologies?query=MCCL Search Test'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 'MCCLSEARCHTEST-0', response.first['ontology_acronym_text']
+
+
+ # Search description
+ ## full word
+ ### start
+ get '/search/ontologies?query=Description'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 2, response.size
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+ assert_equal 'MCCLSEARCHTEST-0', response.last['ontology_acronym_text']
+
+ ### in the middle
+ get '/search/ontologies?query=1'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 2, response.size
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+ assert_equal 'MCCLSEARCHTEST-0', response.last['ontology_acronym_text']
+
+ ## part of the word
+ ### start
+ get '/search/ontologies?query=Desc'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 2, response.size
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+ assert_equal 'MCCLSEARCHTEST-0', response.last['ontology_acronym_text']
+
+ ### full text
+ get '/search/ontologies?query=Description 1'
+ response = MultiJson.load(last_response.body)["collection"]
+ assert_equal 2, response.size
+ assert_equal 'BROSEARCHTEST-0', response.first['ontology_acronym_text']
+ assert_equal 'MCCLSEARCHTEST-0', response.last['ontology_acronym_text']
+ end
+
+ def test_ontology_metadata_filters
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 10, submission_count: 1)
+
+
+ group1 = LinkedData::Models::Group.find('group-1').first || LinkedData::Models::Group.new(acronym: 'group-1', name: "Test Group 1").save
+ group2 = LinkedData::Models::Group.find('group-2').first || LinkedData::Models::Group.new(acronym: 'group-2', name: "Test Group 2").save
+ category1 = LinkedData::Models::Category.find('category-1').first || LinkedData::Models::Category.new(acronym: 'category-1', name: "Test Category 1").save
+ category2 = LinkedData::Models::Category.find('category-2').first || LinkedData::Models::Category.new(acronym: 'category-2', name: "Test Category 2").save
+
+ ontologies1 = ontologies[0..5].each do |o|
+ o.bring_remaining
+ o.group = [group1]
+ o.hasDomain = [category1]
+ o.save
+ end
+
+ ontologies2 = ontologies[6..8].each do |o|
+ o.bring_remaining
+ o.group = [group2]
+ o.hasDomain = [category2]
+ o.save
+ end
+
+
+ # test filter by group and category
+ get "/search/ontologies?page=1&pagesize=100&groups=#{group1.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+ get "/search/ontologies?page=1&pagesize=100&groups=#{group2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+
+
+ get "/search/ontologies?page=1&pagesize=100&groups=#{group1.acronym},#{group2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size + ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+
+ get "/search/ontologies?page=1&pagesize=100&hasDomain=#{category1.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+
+ get "/search/ontologies?page=1&pagesize=100&hasDomain=#{category2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+
+ get "/search/ontologies?page=1&pagesize=100&hasDomain=#{category2.acronym},#{category1.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size + ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+
+ get "/search/ontologies?page=1&pagesize=100&hasDomain=#{category2.acronym}&groups=#{group1.acronym}"
+ assert last_response.ok?
+ assert_equal 0, MultiJson.load(last_response.body)["collection"].length
+ get "/search/ontologies?page=1&pagesize=100&hasDomain=#{category2.acronym}&groups=#{group2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+
+
+
+ ontologies3 = ontologies[9]
+ ontologies3.bring_remaining
+ ontologies3.group = [group1, group2]
+ ontologies3.hasDomain = [category1, category2]
+ ontologies3.name = "name search test"
+ ontologies3.save
+
+ ontologies.first.name = "sort by test"
+ ontologies.first.save
+ sub = ontologies.first.latest_submission(status: :any).bring_remaining
+ sub.status = 'retired'
+ sub.description = "234"
+ sub.creationDate = DateTime.yesterday.to_datetime
+ sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first
+ sub.save
+
+ #test search with sort
+ get "/search/ontologies?page=1&pagesize=100&q=tes&sort=ontology_name_sort asc"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+
+ refute_empty submissions["collection"]
+ assert_equal ontologies.map{|x| x.bring(:name).name}.sort, submissions["collection"].map{|x| x["ontology_name_text"]}
+
+ get "/search/ontologies?page=1&pagesize=100&q=tes&sort=creationDate_dt desc"
+
+
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate.to_s.split('T').first}.sort.reverse,
+ submissions["collection"].map{|x| x["creationDate_dt"].split('T').first}
+
+ # test search with format
+ get "/search/ontologies?page=1&pagesize=100&q=tes&hasOntologyLanguage=SKOS"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+
+ refute_empty submissions["collection"]
+ assert_equal 1, submissions["collection"].size
+
+
+
+ get "/search/ontologies?page=1&pagesize=100&q=tes&hasOntologyLanguage=OWL"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.size-1 , submissions["collection"].size
+
+
+ # test ontology filter with submission filter attributes
+ get "/search/ontologies?page=1&pagesize=100&q=tes&groups=group-2&hasDomain=category-2&hasOntologyLanguage=OWL"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies2.size + 1 , submissions["collection"].size
+
+
+
+ # test ontology filter with status
+
+ get "/search/ontologies?page=1&pagesize=100&status=retired"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal 1 , submissions["collection"].size
+
+ get "/search/ontologies?page=1&pagesize=100&status=alpha,beta,production"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.size - 1 , submissions["collection"].size
+
+ get "/search/ontologies?page=1&pagesize=100&q=234"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal "http://data.bioontology.org/ontologies/TEST-ONT-0/submissions/1" , submissions["collection"].first["id"]
+ end
+
+ def test_agents_search
+ agents_tmp = [ agent_data(type: 'organization'), agent_data(type: 'organization'), agent_data(type: 'person')]
+ agents_tmp.each do |a|
+ post "/agents", MultiJson.dump(a), "CONTENT_TYPE" => "application/json"
+ assert last_response.status == 201
+ end
+
+ agent_person = LinkedData::Models::Agent.where(agentType: 'person').all.first.bring_remaining
+ agent_org = LinkedData::Models::Agent.where(agentType: 'organization').all.first.bring_remaining
+
+
+ get "/search/agents?&q=name"
+ assert last_response.ok?
+ agents = MultiJson.load(last_response.body)
+
+
+ assert_equal 3, agents["totalCount"]
+
+
+ get "/search/agents?&q=name&agentType=organization"
+ assert last_response.ok?
+ agents = MultiJson.load(last_response.body)
+ assert_equal 2, agents["totalCount"]
+
+
+
+ get "/search/agents?&q=name&agentType=person"
+ assert last_response.ok?
+ agents = MultiJson.load(last_response.body)
+ assert_equal 1, agents["totalCount"]
+
+
+ get "/search/agents?&q=#{agent_person.name}"
+ assert last_response.ok?
+ agents = MultiJson.load(last_response.body)
+ assert_equal agent_person.id.to_s, agents["collection"].first["id"]
+
+ get "/search/agents?&q=#{agent_org.acronym}"
+ assert last_response.ok?
+ agents = MultiJson.load(last_response.body)
+ assert_equal agent_org.id.to_s, agents["collection"].first["id"]
+
+
+ get "/search/agents?&q=#{agent_org.identifiers.first.id.split('/').last}"
+ assert last_response.ok?
+ agents = MultiJson.load(last_response.body)
+ assert_equal agent_org.id.to_s, agents["collection"].first["id"]
+ end
+
+ def test_search_data
+ count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, index_all_data: true, generate_missing_labels: false},
+ acronym: "BROSEARCHTEST",
+ name: "BRO Search Test",
+ file_path: "./test/data/ontology_files/BRO_v3.2.owl",
+ ont_count: 1,
+ submission_count: 1,
+ ontology_type: "VALUE_SET_COLLECTION"
+ })
+
+ count, acronyms, mccl = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, index_all_data: true, generate_missing_labels: false},
+ acronym: "MCCLSEARCHTEST",
+ name: "MCCL Search Test",
+ file_path: "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
+ ont_count: 1,
+ submission_count: 1
+ })
+
+
+ subs = LinkedData::Models::OntologySubmission.all
+ count = []
+ subs.each do |s|
+ count << Goo.sparql_query_client.query("SELECT (COUNT( DISTINCT ?id) as ?c) FROM <#{s.id}> WHERE {?id ?p ?v}")
+ .first[:c]
+ .to_i
+ end
+
+ get "/search/ontologies/content?q=*"
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ assert_equal count.sum, res['totalCount']
+
+
+ get "/search/ontologies/content?q=*&ontologies=MCCLSEARCHTEST-0,BROSEARCHTEST-0"
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ assert_equal count.sum, res['totalCount']
+
+ get "/search/ontologies/content?q=*&ontologies=BROSEARCHTEST-0"
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ assert_includes count, res['totalCount']
+
+ get "/search/ontologies/content?q=*&ontologies=MCCLSEARCHTEST-0"
+ assert last_response.ok?
+ res = MultiJson.load(last_response.body)
+ assert_includes count, res['totalCount']
+
+ end
+end
diff --git a/test/controllers/test_slices_controller.rb b/test/controllers/test_slices_controller.rb
index 92ce6b1d..601b15a7 100644
--- a/test/controllers/test_slices_controller.rb
+++ b/test/controllers/test_slices_controller.rb
@@ -3,28 +3,77 @@
class TestSlicesController < TestCase
def self.before_suite
- onts = LinkedData::SampleData::Ontology.create_ontologies_and_submissions(ont_count: 1, submission_count: 0)[2]
+ ont_count, ont_acronyms, @@onts = LinkedData::SampleData::Ontology.create_ontologies_and_submissions(ont_count: 1, submission_count: 0)
@@slice_acronyms = ["tst-a", "tst-b"].sort
- _create_slice(@@slice_acronyms[0], "Test Slice A", onts)
- _create_slice(@@slice_acronyms[1], "Test Slice B", onts)
+ _create_slice(@@slice_acronyms[0], "Test Slice A", @@onts)
+ _create_slice(@@slice_acronyms[1], "Test Slice B", @@onts)
+
+ @@user = User.new({
+ username: "test-slice",
+ email: "test-slice@example.org",
+ password: "12345"
+ }).save
+ @@new_slice_data = { acronym: 'tst-c', name: "Test Slice C", ontologies: ont_acronyms}
+ @@old_security_setting = LinkedData.settings.enable_security
+ end
+
+ def self.after_suite
+ LinkedData::Models::Slice.all.each(&:delete)
+ @@user.delete
+ reset_security(@@old_security_setting)
+ end
+
+ def setup
+ self.class.reset_security(@@old_security_setting)
+ self.class.reset_to_not_admin(@@user)
+ LinkedData::Models::Slice.find(@@new_slice_data[:acronym]).first&.delete
end
def test_all_slices
get "/slices"
assert last_response.ok?
slices = MultiJson.load(last_response.body)
- assert_equal @@slice_acronyms, slices.map {|s| s["acronym"]}.sort
+ assert_equal @@slice_acronyms, slices.map { |s| s["acronym"] }.sort
+ end
+
+ def test_create_slices
+ self.class.enable_security
+
+ post "/slices?apikey=#{@@user.apikey}", MultiJson.dump(@@new_slice_data), "CONTENT_TYPE" => "application/json"
+ assert_equal 403, last_response.status
+
+ self.class.make_admin(@@user)
+
+ post "/slices?apikey=#{@@user.apikey}", MultiJson.dump(@@new_slice_data), "CONTENT_TYPE" => "application/json"
+
+ assert 201, last_response.status
+ end
+
+ def test_delete_slices
+ self.class.enable_security
+ LinkedData.settings.enable_security = @@old_security_setting
+ self.class._create_slice(@@new_slice_data[:acronym], @@new_slice_data[:name], @@onts)
+
+
+ delete "/slices/#{@@new_slice_data[:acronym]}?apikey=#{@@user.apikey}"
+ assert_equal 403, last_response.status
+
+ self.class.make_admin(@@user)
+
+ delete "/slices/#{@@new_slice_data[:acronym]}?apikey=#{@@user.apikey}"
+ assert 201, last_response.status
end
private
def self._create_slice(acronym, name, ontologies)
slice = LinkedData::Models::Slice.new({
- acronym: acronym,
- name: "Test #{name}",
- ontologies: ontologies
- })
+ acronym: acronym,
+ name: "Test #{name}",
+ ontologies: ontologies
+ })
slice.save
end
-end
+
+end
\ No newline at end of file
diff --git a/test/controllers/test_users_controller.rb b/test/controllers/test_users_controller.rb
index 337da52e..a165a5d7 100644
--- a/test/controllers/test_users_controller.rb
+++ b/test/controllers/test_users_controller.rb
@@ -6,7 +6,7 @@ def self.before_suite
@@usernames = %w(fred goerge henry ben mark matt charlie)
# Create them again
- @@usernames.each do |username|
+ @@users = @@usernames.map do |username|
User.new(username: username, email: "#{username}@example.org", password: "pass_word").save
end
@@ -21,6 +21,17 @@ def self._delete_users
end
end
+ def test_admin_creation
+ existent_user = @@users.first #no admin
+
+ refute _create_admin_user(apikey: existent_user.apikey), "A no admin user can't create an admin user or update it to an admin"
+
+ existent_user = self.class.make_admin(existent_user)
+ assert _create_admin_user(apikey: existent_user.apikey), "Admin can create an admin user or update it to be an admin"
+ self.class.reset_to_not_admin(existent_user)
+ delete "/users/#{@@username}"
+ end
+
def test_all_users
get '/users'
assert last_response.ok?
@@ -100,4 +111,68 @@ def test_authentication
assert user["username"].eql?(@@usernames.first)
end
+ def test_oauth_authentication
+ fake_responses = {
+ github: {
+ id: 123456789,
+ login: 'github_user',
+ email: 'github_user@example.com',
+ name: 'GitHub User',
+ avatar_url: 'https://avatars.githubusercontent.com/u/123456789'
+ },
+ google: {
+ sub: 'google_user_id',
+ email: 'google_user@example.com',
+ name: 'Google User',
+ given_name: 'Google',
+ family_name: 'User',
+ picture: 'https://lh3.googleusercontent.com/a-/user-profile-image-url'
+ },
+ orcid: {
+ orcid: '0000-0002-1825-0097',
+ email: 'orcid_user@example.com',
+ name: {
+ "family-name": 'ORCID',
+ "given-names": 'User'
+ }
+ }
+ }
+
+ fake_responses.each do |provider, data|
+ WebMock.stub_request(:get, LinkedData::Models::User.oauth_providers[provider][:link])
+ .to_return(status: 200, body: data.to_json, headers: { 'Content-Type' => 'application/json' })
+ post "/users/authenticate", {access_token:'jkooko', token_provider: provider.to_s}
+ assert last_response.ok?
+ user = MultiJson.load(last_response.body)
+ assert data[:email], user["email"]
+ end
+ end
+
+ private
+ def _create_admin_user(apikey: nil)
+ user = {email: "#{@@username}@example.org", password: "pass_the_word", role: ['ADMINISTRATOR']}
+ LinkedData::Models::User.find(@@username).first&.delete
+
+ put "/users/#{@@username}", MultiJson.dump(user), "CONTENT_TYPE" => "application/json", "Authorization" => "apikey token=#{apikey}"
+ assert last_response.status == 201
+ created_user = MultiJson.load(last_response.body)
+ assert created_user["username"].eql?(@@username)
+
+ get "/users/#{@@username}?apikey=#{apikey}"
+ assert last_response.ok?
+ user = MultiJson.load(last_response.body)
+ assert user["username"].eql?(@@username)
+
+ return true if user["role"].eql?(['ADMINISTRATOR'])
+
+ patch "/users/#{@@username}", MultiJson.dump(role: ['ADMINISTRATOR']), "CONTENT_TYPE" => "application/json", "Authorization" => "apikey token=#{apikey}"
+ assert last_response.status == 204
+
+ get "/users/#{@@username}?apikey=#{apikey}"
+ assert last_response.ok?
+ user = MultiJson.load(last_response.body)
+ assert user["username"].eql?(@@username)
+
+ true if user["role"].eql?(['ADMINISTRATOR'])
+ end
end
diff --git a/test/data/graphdb-repo-config.ttl b/test/data/graphdb-repo-config.ttl
new file mode 100644
index 00000000..9200da9a
--- /dev/null
+++ b/test/data/graphdb-repo-config.ttl
@@ -0,0 +1,33 @@
+@prefix rdfs: .
+@prefix rep: .
+@prefix sail: .
+@prefix xsd: .
+
+<#ontoportal> a rep:Repository;
+ rep:repositoryID "ontoportal";
+ rep:repositoryImpl [
+ rep:repositoryType "graphdb:SailRepository";
+ [
+ "http://example.org/owlim#";
+ "false";
+ "";
+ "true";
+ "false";
+ "true";
+ "true";
+ "32";
+ "10000000";
+ "";
+ "true";
+ "";
+ "0";
+ "0";
+ "false";
+ "file-repository";
+ "rdfsplus-optimized";
+ "storage";
+ "false";
+ sail:sailType "owlim:Sail"
+ ]
+ ];
+ rdfs:label "" .
\ No newline at end of file
diff --git a/test/data/graphdb-test-load.nt b/test/data/graphdb-test-load.nt
new file mode 100644
index 00000000..e69de29b
diff --git a/test/data/ontology_files/BRO_v3.2.owl b/test/data/ontology_files/BRO_v3.2.owl
index d64075cc..b2aeccf5 100644
--- a/test/data/ontology_files/BRO_v3.2.owl
+++ b/test/data/ontology_files/BRO_v3.2.owl
@@ -631,6 +631,9 @@
Activity
+ Activity
+ ActivityEnglish
+ Activité
Activity of interest that may be related to a BRO:Resource.
activities
diff --git a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf b/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
index 8353d82f..ca303834 100644
--- a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
+++ b/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
@@ -30,7 +30,7 @@
1331561625299
- aktivite
+ aktivite
2012-03-12T22:13:45Z
2017-09-22T14:09:06Z
diff --git a/test/helpers/test_http_cache_helper.rb b/test/helpers/test_http_cache_helper.rb
index 944198a6..5268066a 100644
--- a/test/helpers/test_http_cache_helper.rb
+++ b/test/helpers/test_http_cache_helper.rb
@@ -4,7 +4,6 @@ class TestHTTPCacheHelper < TestCaseHelpers
def self.before_suite
raise Exception, "Redis is unavailable, caching will not function" if LinkedData::HTTPCache.redis.ping.nil?
- self.new("before_suite").delete_ontologies_and_submissions
ontologies = self.new("before_suite")._ontologies
@@ontology = ontologies.shift
@@ontology_alt = ontologies.shift
diff --git a/test/helpers/test_slices_helper.rb b/test/helpers/test_slices_helper.rb
index 165a2a7e..bddd5c2d 100644
--- a/test/helpers/test_slices_helper.rb
+++ b/test/helpers/test_slices_helper.rb
@@ -70,6 +70,31 @@ def test_search_slices
assert results.all? {|r| group_ids.include?(r["links"]["ontology"])}
end
+ def test_mappings_slices
+ LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
+
+ get "/mappings/statistics/ontologies/"
+
+ expected_result_without_slice = ["PARSED-0",
+ "PARSED-1",
+ "http://data.bioontology.org/metadata/ExternalMappings",
+ "http://data.bioontology.org/metadata/InterportalMappings/agroportal",
+ "http://data.bioontology.org/metadata/InterportalMappings/ncbo",
+ "http://data.bioontology.org/metadata/InterportalMappings/sifr"]
+
+ assert_equal expected_result_without_slice, MultiJson.load(last_response.body).keys.sort
+
+ get "http://#{@@group_acronym}/mappings/statistics/ontologies/"
+
+ expected_result_with_slice = ["PARSED-0",
+ "http://data.bioontology.org/metadata/ExternalMappings",
+ "http://data.bioontology.org/metadata/InterportalMappings/agroportal",
+ "http://data.bioontology.org/metadata/InterportalMappings/ncbo",
+ "http://data.bioontology.org/metadata/InterportalMappings/sifr"]
+
+ assert_equal expected_result_with_slice, MultiJson.load(last_response.body).keys.sort
+ end
+
private
def self._create_group
diff --git a/test/middleware/test_rack_attack.rb b/test/middleware/test_rack_attack.rb
index 43143080..53f5fe3b 100644
--- a/test/middleware/test_rack_attack.rb
+++ b/test/middleware/test_rack_attack.rb
@@ -18,22 +18,22 @@ def self.before_suite
LinkedData::OntologiesAPI.settings.req_per_second_per_ip = 1
LinkedData::OntologiesAPI.settings.safe_ips = Set.new(["1.2.3.4", "1.2.3.5"])
- @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email@example.org"})
+ @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email1@example.org"})
@@user.save
- @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email@example.org"})
+ @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email2@example.org"})
@@bp_user.save
admin_role = LinkedData::Models::Users::Role.find("ADMINISTRATOR").first
- @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email@example.org", role: [admin_role]})
+ @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email3@example.org", role: [admin_role]})
@@admin.save
# Redirect output or we get a bunch of noise from Rack (gets reset in the after_suite method).
$stdout = File.open("/dev/null", "w")
$stderr = File.open("/dev/null", "w")
- # http://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Dynamic.2C_private_or_ephemeral_ports
- @@port1 = Random.rand(55000..65535)
+
+ @@port1 = self.new('').unused_port
# Fork the process to create two servers. This isolates the Rack::Attack configuration, which makes other tests fail if included.
@@pid1 = fork do
@@ -45,7 +45,7 @@ def self.before_suite
Signal.trap("HUP") { Process.exit! }
end
- @@port2 = Random.rand(55000..65535) # http://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Dynamic.2C_private_or_ephemeral_ports
+ @@port2 = self.new('').unused_port
@@pid2 = fork do
require_relative '../../config/rack_attack'
Rack::Server.start(
@@ -150,7 +150,7 @@ def request(user: nil, port: nil)
# Sometimes a single request can get through without failing depending
# on the order of the request as it coincides with the threaded requests.
(LinkedData::OntologiesAPI.settings.req_per_second_per_ip * 2).times do
- open("http://127.0.0.1:#{port}/ontologies", headers)
+ open("http://localhost:#{port}/ontologies", headers)
end
end
diff --git a/test/solr/configsets/term_search/conf/schema.xml b/test/solr/configsets/term_search/conf/schema.xml
index 6b18a2a1..fa95e127 100644
--- a/test/solr/configsets/term_search/conf/schema.xml
+++ b/test/solr/configsets/term_search/conf/schema.xml
@@ -128,11 +128,20 @@
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -140,9 +149,18 @@
+
+
+
+
+
+
+
-
+
+
+
@@ -251,6 +269,17 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/solr/docker-compose.yml b/test/solr/docker-compose.yml
new file mode 100644
index 00000000..3ddae69c
--- /dev/null
+++ b/test/solr/docker-compose.yml
@@ -0,0 +1,13 @@
+version: '3.8'
+
+services:
+ op_solr:
+ image: solr:8.8
+ volumes:
+ - ./solr_configsets:/configsets:ro
+ ports:
+ - "8983:8983"
+ command: >
+ bash -c "precreate-core term_search_core1 /configsets/term_search
+ && precreate-core prop_search_core1 /configsets/property_search
+ && solr-foreground"
diff --git a/test/solr/generate_ncbo_configsets.sh b/test/solr/generate_ncbo_configsets.sh
index 893f7f3a..7b4281f7 100755
--- a/test/solr/generate_ncbo_configsets.sh
+++ b/test/solr/generate_ncbo_configsets.sh
@@ -2,18 +2,23 @@
# generates solr configsets by merging _default configset with config files in config/solr
# _default is copied from sorl distribuion solr-8.10.1/server/solr/configsets/_default/
-pushd solr/configsets
-ld_config='../../../../ontologies_linked_data/config/solr/'
-#ld_config='../../../../config/solr/'
-ls -l $ld_config
-pwd
-[ -d property_search ] && rm -Rf property_search
-[ -d term_search ] && rm -Rf property_search
-[ -d $ld_config/property_search ] || echo "cant find ontologies_linked_data project"
-mkdir -p property_search/conf
-mkdir -p term_search/conf
-cp -a _default/conf/* property_search/conf/
-cp -a _default/conf/* term_search/conf/
-cp -a $ld_config/property_search/* property_search/conf
-cp -a $ld_config/term_search/* term_search/conf
-popd
+#cd solr/configsets
+ld_config='config/solr'
+configsets='test/solr/configsets'
+[ -d ${configsets}/property_search ] && rm -Rf ${configsets}/property_search
+[ -d ${configsets}/term_search ] && rm -Rf ${configsets}/term_search
+if [[ ! -d ${ld_config}/property_search ]]; then
+ echo 'cant find ld solr config sets'
+ exit 1
+fi
+if [[ ! -d ${configsets}/_default/conf ]]; then
+ echo 'cant find default solr configset'
+ exit 1
+fi
+mkdir -p ${configsets}/property_search/conf
+mkdir -p ${configsets}/term_search/conf
+cp -a ${configsets}/_default/conf/* ${configsets}/property_search/conf/
+cp -a ${configsets}/_default/conf/* ${configsets}/term_search/conf/
+cp -a $ld_config/property_search/* ${configsets}/property_search/conf
+cp -a $ld_config/term_search/* ${configsets}/term_search/conf
+
diff --git a/test/test_case.rb b/test/test_case.rb
index 7d3d0716..06bbc99f 100644
--- a/test/test_case.rb
+++ b/test/test_case.rb
@@ -21,7 +21,9 @@
require_relative 'test_log_file'
require_relative '../app'
require 'minitest/unit'
+require 'webmock/minitest'
MiniTest::Unit.autorun
+WebMock.allow_net_connect!
require 'rack/test'
require 'multi_json'
require 'oj'
@@ -72,12 +74,15 @@ def count_pattern(pattern)
def backend_4s_delete
if count_pattern("?s ?p ?o") < 400000
- LinkedData::Models::Ontology.where.include(:acronym).each do |o|
- query = "submissionAcronym:#{o.acronym}"
- LinkedData::Models::Ontology.unindexByQuery(query)
+ puts 'clear backend & index'
+ raise StandardError, 'Too many triples in KB, does not seem right to run tests' unless
+ count_pattern('?s ?p ?o') < 400000
+
+ graphs = Goo.sparql_query_client.query("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o . } }")
+ graphs.each_solution do |sol|
+ Goo.sparql_data_client.delete_graph(sol[:g])
end
- LinkedData::Models::Ontology.indexCommit()
- Goo.sparql_update_client.update("DELETE {?s ?p ?o } WHERE { ?s ?p ?o }")
+
LinkedData::Models::SubmissionStatus.init_enum
LinkedData::Models::OntologyType.init_enum
LinkedData::Models::OntologyFormat.init_enum
@@ -144,9 +149,33 @@ def app
# @option options [TrueClass, FalseClass] :random_submission_count Use a random number of submissions between 1 and :submission_count
# @option options [TrueClass, FalseClass] :process_submission Parse the test ontology file
def create_ontologies_and_submissions(options = {})
+ if options[:process_submission] && options[:process_options].nil?
+ options[:process_options] = { process_rdf: true, extract_metadata: false, generate_missing_labels: false }
+ end
LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)
end
+
+ def agent_data(type: 'organization')
+ schema_agencies = LinkedData::Models::AgentIdentifier::IDENTIFIER_SCHEMES.keys
+ users = LinkedData::Models::User.all
+ users = [LinkedData::Models::User.new(username: "tim", email: "tim@example.org", password: "password").save] if users.empty?
+ test_identifiers = 5.times.map { |i| { notation: rand.to_s[2..11], schemaAgency: schema_agencies.sample.to_s } }
+ user = users.sample.id.to_s
+
+ i = rand.to_s[2..11]
+ return {
+ agentType: type,
+ name: "name #{i}",
+ homepage: "home page #{i}",
+ acronym: "acronym #{i}",
+ email: "email_#{i}@test.com",
+ identifiers: test_identifiers.sample(2).map { |x| x.merge({ creator: user }) },
+ affiliations: [],
+ creator: user
+ }
+ end
+
##
# Delete all ontologies and their submissions
def delete_ontologies_and_submissions
@@ -192,4 +221,45 @@ def get_errors(response)
return errors.strip
end
+ def self.enable_security
+ LinkedData.settings.enable_security = true
+ end
+
+ def self.reset_security(old_security = @@old_security_setting)
+ LinkedData.settings.enable_security = old_security
+ end
+
+
+ def self.make_admin(user)
+ user.bring_remaining
+ user.role = [LinkedData::Models::Users::Role.find(LinkedData::Models::Users::Role::ADMIN).first]
+ user.save
+ end
+
+ def self.reset_to_not_admin(user)
+ user.bring_remaining
+ user.role = [LinkedData::Models::Users::Role.find(LinkedData::Models::Users::Role::DEFAULT).first]
+ user.save
+ end
+
+ def unused_port
+ max_retries = 5
+ retries = 0
+ server_port = Random.rand(55000..65535)
+ while port_in_use?(server_port)
+ retries += 1
+ break if retries >= max_retries
+ server_port = Random.rand(55000..65535)
+ end
+ server_port
+ end
+ private
+ def port_in_use?(port)
+ server = TCPServer.new(port)
+ server.close
+ false
+ rescue Errno::EADDRINUSE
+ true
+ end
+
end
diff --git a/views/documentation/documentation.haml b/views/documentation/documentation.haml
index 527d781f..916eb1e7 100644
--- a/views/documentation/documentation.haml
+++ b/views/documentation/documentation.haml
@@ -151,6 +151,7 @@
%li include={prefLabel, synonym, definition, notation, cui, semanticType} // default = (see Common Parameters section)
%li page={integer representing the page number} // default = 1
%li pagesize={integer representing the size of the returned page} // default = 50
+ %li language={an ISO 639-1 language value, e.g 'fr' or 'en'} // by default search in all languages
%h4#nav_search_subtree Subtree Search