diff --git a/.env.sample b/.env.sample
new file mode 100644
index 00000000..2c15a1c0
--- /dev/null
+++ b/.env.sample
@@ -0,0 +1,4 @@
+API_URL=http://localhost:9393
+ONTOLOGIES_LINKED_DATA_PATH=
+GOO_PATH=
+SPARQL_CLIENT_PATH=
\ No newline at end of file
diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml
new file mode 100644
index 00000000..737482f9
--- /dev/null
+++ b/.github/workflows/docker-image.yml
@@ -0,0 +1,55 @@
+name: Docker branch Images build
+
+on:
+ push:
+ branches:
+ - development
+ - stage
+ - test
+ release:
+ types: [ published ]
+jobs:
+ push_to_registry:
+ name: Push Docker branch image to Docker Hub
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Log in to Docker Hub
+ uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Log in to the Container registry
+ uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images: |
+ agroportal/ontologies_api
+ ghcr.io/${{ github.repository }}
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm64
+ build-args: |
+ RUBY_VERSION=2.7.8
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml
index 6b2c973d..4dc9e323 100644
--- a/.github/workflows/ruby-unit-tests.yml
+++ b/.github/workflows/ruby-unit-tests.yml
@@ -12,6 +12,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
+ - name: copy-env-config
+ run: cp .env.sample .env
- name: Build docker-compose
run: docker-compose --profile 4store build #profile flag is set in order to build all containers in this step
- name: Run unit tests
@@ -19,6 +21,7 @@ jobs:
# http://docs.codecov.io/docs/testing-with-docker
run: |
ci_env=`bash <(curl -s https://codecov.io/env)`
+ docker-compose run $ci_env -e CI --rm ${{ matrix.backend }} wait-for-it solr-ut:8983 -- bundle install
docker-compose run $ci_env -e CI --rm ${{ matrix.backend }} wait-for-it solr-ut:8983 -- bundle exec rake test TESTOPTS='-v'
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
diff --git a/.gitignore b/.gitignore
index 886a220f..8b568832 100644
--- a/.gitignore
+++ b/.gitignore
@@ -71,3 +71,5 @@ test/data/ontology_files/catalog-v001.xml
create_permissions.log
ontologies_api.iml
+
+.env
diff --git a/Dockerfile b/Dockerfile
index 3e65fe4a..6294e102 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -21,6 +21,7 @@ ENV BUNDLE_PATH=/srv/ontoportal/bundle
RUN bundle install
COPY . /srv/ontoportal/ontologies_api
+RUN cp /srv/ontoportal/ontologies_api/config/environments/config.rb.sample /srv/ontoportal/ontologies_api/config/environments/development.rb
EXPOSE 9393
-CMD ["bundle", "exec", "rackup", "-p", "9393", "--host", "0.0.0.0"]
+CMD ["bundle", "exec", "rackup", "-p", "9393", "--host", "0.0.0.0"]
\ No newline at end of file
diff --git a/Gemfile b/Gemfile
index 49c8357e..caa9818a 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,6 +1,6 @@
source 'https://rubygems.org'
-gem 'activesupport', '~> 3.1'
+gem 'activesupport', '~> 3.2'
# see https://github.com/ncbo/ontologies_api/issues/69
gem 'bigdecimal', '1.4.2'
gem 'faraday', '~> 1.9'
@@ -44,12 +44,12 @@ gem 'haml', '~> 5.2.2' # pin see https://github.com/ncbo/ontologies_api/pull/107
gem 'redcarpet'
# NCBO gems (can be from a local dev path or from rubygems/git)
-gem 'goo', git: 'https://github.com/ontoportal-lirmm/goo.git', branch: 'master'
+gem 'goo', git: 'https://github.com/ontoportal-lirmm/goo.git', branch: 'development'
gem 'ncbo_annotator', git: 'https://github.com/ontoportal-lirmm/ncbo_annotator.git', branch: 'master'
gem 'ncbo_cron', git: 'https://github.com/ontoportal-lirmm/ncbo_cron.git', branch: 'master'
gem 'ncbo_ontology_recommender', git: 'https://github.com/ncbo/ncbo_ontology_recommender.git', branch: 'master'
gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'master'
-gem 'ontologies_linked_data', git: 'https://github.com/ontoportal-lirmm/ontologies_linked_data.git', branch: 'master'
+gem 'ontologies_linked_data', git: 'https://github.com/ontoportal-lirmm/ontologies_linked_data.git', branch: 'development'
group :development do
# bcrypt_pbkdf and ed35519 is required for capistrano deployments when using ed25519 keys; see https://github.com/miloserdow/capistrano-deploy/issues/42
@@ -63,6 +63,7 @@ group :development do
gem 'shotgun', github: 'palexander/shotgun', branch: 'ncbo'
end
+
group :profiling do
gem 'rack-mini-profiler'
end
diff --git a/Gemfile.lock b/Gemfile.lock
index 3428c7b2..52a5c072 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -11,8 +11,8 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/goo.git
- revision: 74ea47defc7f6260b045a6c6997bbe6a59c7bf62
- branch: master
+ revision: 5979402d5138850fb9bdb34edfa350e9af1b5d22
+ branch: development
specs:
goo (0.0.2)
addressable (~> 2.8)
@@ -53,8 +53,8 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git
- revision: 80a331d053ea04397a903452288c2186822c340c
- branch: master
+ revision: a199eff007f5d7f18205d61194f3823445aa6460
+ branch: development
specs:
ontologies_linked_data (0.0.1)
activesupport
@@ -216,7 +216,7 @@ GEM
multi_json (1.15.0)
multipart-post (2.3.0)
net-http-persistent (2.9.4)
- net-imap (0.4.7)
+ net-imap (0.4.6)
date
net-protocol
net-pop (0.1.2)
@@ -247,7 +247,7 @@ GEM
rack (>= 0.4)
rack-attack (6.6.1)
rack (>= 1.0, < 3)
- rack-cache (1.14.0)
+ rack-cache (1.13.0)
rack (>= 0.4)
rack-cors (1.0.6)
rack (>= 1.6.0)
@@ -270,8 +270,8 @@ GEM
redis-rack-cache (2.2.1)
rack-cache (>= 1.10, < 2)
redis-store (>= 1.6, < 2)
- redis-store (1.9.2)
- redis (>= 4, < 6)
+ redis-store (1.9.1)
+ redis (>= 4, < 5)
representable (3.2.0)
declarative (< 0.1.0)
trailblazer-option (>= 0.1.1, < 0.2.0)
@@ -346,12 +346,11 @@ GEM
webrick (1.8.1)
PLATFORMS
- x86_64-darwin-21
x86_64-darwin-23
x86_64-linux
DEPENDENCIES
- activesupport (~> 3.1)
+ activesupport (~> 3.2)
bcrypt_pbkdf (>= 1.0, < 2.0)
bigdecimal (= 1.4.2)
capistrano (~> 3)
diff --git a/README.md b/README.md
index dfaa77ea..02b9f076 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,50 @@
ontologies_api provides a RESTful interface for accessing [BioPortal](https://bioportal.bioontology.org/) (an open repository of biomedical ontologies). Supported services include downloads, search, access to terms and concepts, text annotation, and much more.
-## Prerequisites
+# Run ontologies_api
+
+## Using OntoPortal api utilities script
+### See help
+
+```bash
+bin/ontoportal help
+```
+
+```
+Usage: bin/ontoportal {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY]
+ dev : Start the Ontoportal API development server.
+ Example: bin/ontoportal dev --api-url http://localhost:9393
+ Use --reset-cache to remove volumes: bin/ontoportal dev --reset-cache
+ test : Run tests.
+ run : Run a command in the Ontoportal API Docker container.
+ help : Show this help message.
+
+Description:
+ This script provides convenient commands for managing an Ontoportal API
+ application using Docker Compose. It includes options for starting the development server,
+ running tests, and executing commands within the Ontoportal API Docker container.
+
+Goals:
+ - Simplify common tasks related to Ontoportal API development using Docker.
+ - Provide a consistent and easy-to-use interface for common actions.
+
+
+```
+
+
+### Run dev
+```bash
+bin/ontoportal dev
+```
+
+### Run test with a local OntoPortal API
+```bash
+bin/ontoportal test
+```
+
+
+## Manually
+### Prerequisites
- [Ruby 2.x](http://www.ruby-lang.org/en/downloads/) (most recent patch level)
- [rbenv](https://github.com/sstephenson/rbenv) and [ruby-build](https://github.com/sstephenson/ruby-build) (optional)
@@ -19,7 +62,7 @@ ontologies_api provides a RESTful interface for accessing [BioPortal](https://bi
- [Solr](http://lucene.apache.org/solr/)
- BioPortal indexes ontology class and property content using Solr (a Lucene-based server)
-## Configuring Solr
+### Configuring Solr
To configure Solr for ontologies_api usage, modify the example project included with Solr by doing the following:
@@ -46,22 +89,22 @@ To configure Solr for ontologies_api usage, modify the example project included
# Edit the ontologieS_api/config/environments/{env}.rb file to point to your running instance:
# http://localhost:8983/solr/NCBO1
-## Installing
+### Installing
-### Clone the repository
+#### Clone the repository
```
$ git clone git@github.com:ncbo/ontologies_api.git
$ cd ontologies_api
```
-### Install the dependencies
+#### Install the dependencies
```
$ bundle install
```
-### Create an environment configuration file
+#### Create an environment configuration file
```
$ cp config/environments/config.rb.sample config/environments/development.rb
@@ -73,7 +116,7 @@ production.rb
development.rb
test.rb
-### Run the unit tests (optional)
+#### Run the unit tests (optional)
Requires a configuration file for the test environment:
@@ -87,7 +130,7 @@ Execute the suite of tests from the command line:
$ bundle exec rake test
```
-### Run the application
+#### Run the application
```
$ bundle exec rackup --port 9393
diff --git a/bin/ontoportal b/bin/ontoportal
new file mode 100755
index 00000000..4840dad3
--- /dev/null
+++ b/bin/ontoportal
@@ -0,0 +1,239 @@
+#!/usr/bin/env bash
+
+# Function to display script usage information
+show_help() {
+ cat << EOL
+Usage: $0 {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY] [--old-path OLD_PATH] [--goo-path GOO_PATH] [--sparql-client-path SPARQL_CLIENT_PATH]
+ dev : Start the Ontoportal API development server.
+ Example: $0 dev --api-url http://localhost:9393
+ Use --reset-cache to remove volumes: $0 dev --reset-cache
+ test : Run tests. Specify either a test file or use 'all'.
+ Example: $0 test test/controllers/test_users_controller.rb -v --name=name_of_the_test
+ Example (run all tests): $0 test all -v
+ run : Run a command in the Ontoportal API Docker container.
+ help : Show this help message.
+
+Description:
+ This script provides convenient commands for managing an Ontoportal API
+ application using Docker Compose. It includes options for starting the development server,
+ running tests, and executing commands within the Ontoportal API Docker container.
+
+Options:
+ --reset-cache : Remove Docker volumes (used with 'dev').
+ --api-url API_URL : Specify the API URL.
+ --api-key API_KEY : Specify the API key.
+ --old-path OLD_PATH : Specify the path for ontologies_linked_data.
+ --goo-path GOO_PATH : Specify the path for goo.
+ --sparql-client-path : Specify the path for sparql-client.
+ test_file | all : Specify either a test file or all the tests will be run.
+ -v : Enable verbosity.
+ --name=name_of_the_test : Specify the name of the test.
+
+Goals:
+ - Simplify common tasks related to Ontoportal API development using Docker.
+ - Provide a consistent and easy-to-use interface for common actions.
+EOL
+}
+
+
+# Function to update or create the .env file with API_URL and API_KEY
+update_env_file() {
+ # Update the .env file with the provided values
+ local api_url="$1"
+ local old_path="$2"
+ local goo_path="$3"
+ local sparql_client_path="$4"
+
+ # Update the .env file with the provided values
+ file_content=$(<.env)
+
+ # Make changes to the variable
+ while IFS= read -r line; do
+ if [[ "$line" == "API_URL="* && -n "$api_url" ]]; then
+ echo "API_URL=$api_url"
+ elif [[ "$line" == "ONTOLOGIES_LINKED_DATA_PATH="* ]]; then
+ echo "ONTOLOGIES_LINKED_DATA_PATH=$old_path"
+ elif [[ "$line" == "GOO_PATH="* ]]; then
+ echo "GOO_PATH=$goo_path"
+ elif [[ "$line" == "SPARQL_CLIENT_PATH="* ]]; then
+ echo "SPARQL_CLIENT_PATH=$sparql_client_path"
+ else
+ echo "$line"
+ fi
+ done <<< "$file_content" > .env
+}
+
+# Function to create configuration files if they don't exist
+create_config_files() {
+ [ -f ".env" ] || cp .env.sample .env
+ [ -f "config/environments/development.rb" ] || cp config/environments/config.rb.sample config/environments/development.rb
+}
+
+# Function to build Docker run command with conditionally added bind mounts
+build_docker_run_cmd() {
+ local custom_command="$1"
+ local old_path="$2"
+ local goo_path="$3"
+ local sparql_client_path="$4"
+
+ local docker_run_cmd="docker compose run --rm -it"
+ local bash_cmd=""
+
+ # Conditionally add bind mounts only if the paths are not empty
+ for path_var in "old_path:ontologies_linked_data" "goo_path:goo" "sparql_client_path:sparql-client"; do
+ IFS=':' read -r path value <<< "$path_var"
+
+ if [ -n "${!path}" ]; then
+ host_path="$(realpath "$(dirname "${!path}")")/$value"
+ echo "Run: bundle config local.$value ${!path}"
+ container_path="/srv/ontoportal/$value"
+ docker_run_cmd+=" -v $host_path:$container_path"
+ bash_cmd+="(git config --global --add safe.directory $container_path && bundle config local.$value $container_path) &&"
+ else
+ bash_cmd+=" (bundle config unset local.$value) &&"
+ fi
+ done
+
+ bash_cmd+=" (bundle check || bundle install || bundle update) && $custom_command"
+ docker_run_cmd+=" --service-ports api bash -c \"$bash_cmd\""
+
+ eval "$docker_run_cmd"
+}
+
+# Function to handle the "dev" and "test" options
+run_command() {
+ local custom_command="$1"
+
+ local reset_cache=false
+ local api_url=""
+ local old_path=""
+ local goo_path=""
+ local sparql_client_path=""
+
+ shift
+ # Check for command line arguments
+ while [[ "$#" -gt 0 ]]; do
+ case $1 in
+ --reset-cache)
+ reset_cache=true
+ shift
+ ;;
+ --api-url)
+ api_url="$2"
+ shift 2
+ ;;
+ --old-path)
+ old_path="$2"
+ shift 2
+ ;;
+ --goo-path)
+ goo_path="$2"
+ shift 2
+ ;;
+ --sparql-client-path)
+ sparql_client_path="$2"
+ shift 2
+ ;;
+ *)
+ echo "Unknown option: $1"
+ show_help
+ exit 1
+ ;;
+ esac
+ done
+
+ # Check if --reset-cache is present and execute docker compose down --volumes
+ if [ "$reset_cache" = true ]; then
+ echo "Resetting cache. Running: docker compose down --volumes"
+ docker compose down --volumes
+ fi
+
+ # Check if arguments are provided
+ update_env_file "$api_url" "$old_path" "$goo_path" "$sparql_client_path"
+
+
+
+ # If no arguments, fetch values from the .env file
+ source .env
+ api_url="$API_URL"
+ old_path="$ONTOLOGIES_LINKED_DATA_PATH"
+ goo_path="$GOO_PATH"
+ sparql_client_path="$SPARQL_CLIENT_PATH"
+
+
+ if [ -z "$api_url" ] ; then
+ echo "Error: Missing required arguments. Please provide both --api-url or update them in your .env"
+ exit 1
+ fi
+
+
+
+ # Build the Docker run command
+ echo "Run: $custom_command"
+ build_docker_run_cmd "$custom_command" "$old_path" "$goo_path" "$sparql_client_path"
+}
+
+# Function to handle the "dev" option
+dev() {
+ echo "Starting OntoPortal API development server..."
+
+ local custom_command="bundle exec shotgun --host 0.0.0.0 --env=development"
+ run_command "$custom_command" "$@"
+}
+
+# Function to handle the "test" option
+test() {
+ echo "Running tests..."
+ local test_path=""
+ local test_options=""
+ local all_arguments=()
+ # Check for command line arguments
+ while [ "$#" -gt 0 ]; do
+ case "$1" in
+ --api-url | --reset-cache | --old-path | --goo-path | --sparql-client-path)
+ all_arguments+=("$1" "$2")
+ shift 2
+ ;;
+ *)
+ if [ -z "$test_path" ]; then
+ test_path="$1"
+ else
+ test_options="$test_options $1"
+ fi
+ ;;
+ esac
+ shift
+ done
+
+ local custom_command="bundle exec rake test TEST='$test_path' TESTOPTS='$test_options'"
+ echo "run : $custom_command"
+ run_command "$custom_command" "${all_arguments[@]}"
+}
+
+# Function to handle the "run" option
+run() {
+ echo "Run: $*"
+ docker compose run --rm -it api bash -c "$*"
+}
+
+create_config_files
+
+# Main script logic
+case "$1" in
+ "run")
+ run "${@:2}"
+ ;;
+ "dev")
+ dev "${@:2}"
+ ;;
+ "test")
+ test "${@:2}"
+ ;;
+ "help")
+ show_help
+ ;;
+ *)
+ show_help
+ exit 1
+ ;;
+esac
diff --git a/config/environments/config.rb.sample b/config/environments/config.rb.sample
index e5f9fd9c..8713b9f2 100644
--- a/config/environments/config.rb.sample
+++ b/config/environments/config.rb.sample
@@ -3,120 +3,106 @@
# development.rb
# test.rb
-begin
- LinkedData.config do |config|
- config.repository_folder = "/srv/ncbo/repository"
- config.goo_host = "localhost"
- config.goo_port = 9000
- config.search_server_url = "http://localhost:8082/solr/term_search_core1"
- config.property_search_server_url = "http://localhost:8082/solr/prop_search_core1"
- config.rest_url_prefix = "http://#{$SITE_URL}:8080/"
- config.replace_url_prefix = true
- config.enable_security = true
-
- config.apikey = "24e0e77e-54e0-11e0-9d7b-005056aa3316"
- config.ui_host = "http://#{$SITE_URL}"
- config.enable_monitoring = false
- config.cube_host = "localhost"
- config.enable_resource_index = false
-
- # Used to define other BioPortal to which this appliance can be mapped to
- # Example to map to the NCBO BioPortal : {"ncbo" => {"api" => "http://data.bioontology.org", "ui" => "http://bioportal.bioontology.org", "apikey" => ""}}
- # Then create the mapping using the following class in JSON : "http://purl.bioontology.org/ontology/MESH/C585345": "ncbo:MESH"
- # Where "ncbo" is the key in the interportal_hash. Use only lowercase letters for this key.
- # And do not use "ext" as a key, it is reserved for clases outside of any BioPortal
- config.interportal_hash = {}
-
- # Caches
- config.http_redis_host = "localhost"
- config.http_redis_port = 6380
- config.enable_http_cache = true
- config.goo_redis_host = "localhost"
- config.goo_redis_port = 6382
+GOO_BACKEND_NAME = ENV.include?("GOO_BACKEND_NAME") ? ENV["GOO_BACKEND_NAME"] : "4store"
+GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
+GOO_PATH_DATA = ENV.include?("GOO_PATH_DATA") ? ENV["GOO_PATH_DATA"] : "/data/"
+GOO_PATH_QUERY = ENV.include?("GOO_PATH_QUERY") ? ENV["GOO_PATH_QUERY"] : "/sparql/"
+GOO_PATH_UPDATE = ENV.include?("GOO_PATH_UPDATE") ? ENV["GOO_PATH_UPDATE"] : "/update/"
+GOO_PORT = ENV.include?("GOO_PORT") ? ENV["GOO_PORT"] : 9000
+MGREP_HOST = ENV.include?("MGREP_HOST") ? ENV["MGREP_HOST"] : "localhost"
+MGREP_PORT = ENV.include?("MGREP_PORT") ? ENV["MGREP_PORT"] : 55555
+MGREP_DICTIONARY_FILE = ENV.include?("MGREP_DICTIONARY_FILE") ? ENV["MGREP_DICTIONARY_FILE"] : "./test/data/dictionary.txt"
+REDIS_GOO_CACHE_HOST = ENV.include?("REDIS_GOO_CACHE_HOST") ? ENV["REDIS_GOO_CACHE_HOST"] : "localhost"
+REDIS_HTTP_CACHE_HOST = ENV.include?("REDIS_HTTP_CACHE_HOST") ? ENV["REDIS_HTTP_CACHE_HOST"] : "localhost"
+REDIS_PERSISTENT_HOST = ENV.include?("REDIS_PERSISTENT_HOST") ? ENV["REDIS_PERSISTENT_HOST"] : "localhost"
+REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
+REPORT_PATH = ENV.include?("REPORT_PATH") ? ENV["REPORT_PATH"] : "./test/ontologies_report.json"
+REPOSITORY_FOLDER = ENV.include?("REPOSITORY_FOLDER") ? ENV["REPOSITORY_FOLDER"] : "./test/data/ontology_files/repo"
+REST_URL_PREFIX = ENV.include?("REST_URL_PREFIX") ? ENV["REST_URL_PREFIX"] : ENV["API_URL"] || "http://localhost:9393"
+SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr/prop_search_core1"
+SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr/term_search_core1"
- Goo.use_cache = true
-
- # Email notifications
- config.enable_notifications = false
- config.email_sender = "admin@example.org" # Default sender for emails
- config.email_override = "override@example.org" # all email gets sent here. Disable with email_override_disable.
- config.email_disable_override = true
- config.smtp_host = "localhost"
- config.smtp_port = 25
- config.smtp_auth_type = :none # :none, :plain, :login, :cram_md5
- config.smtp_domain = "example.org"
- # Emails of the instance administrators to get mail notifications when new user or new ontology
- config.admin_emails = ["admin@example.org"]
+begin
+ # For prefLabel extract main_lang first, or anything if no main found.
+ # For other properties only properties with a lang that is included in main_lang are used
+ Goo.main_languages = ["en", "fr"]
+ Goo.use_cache = false
+rescue NoMethodError
+ puts "(CNFG) >> Goo.main_lang not available"
+end
- # PURL server config parameters
- config.enable_purl = false
- config.purl_host = "purl.example.org"
- config.purl_port = 80
- config.purl_username = "admin"
- config.purl_password = "password"
- config.purl_maintainers = "admin"
- config.purl_target_url_prefix = "http://example.org"
+LinkedData.config do |config|
+ config.goo_backend_name = GOO_BACKEND_NAME.to_s
+ config.goo_host = GOO_HOST.to_s
+ config.goo_port = GOO_PORT.to_i
+ config.goo_path_query = GOO_PATH_QUERY.to_s
+ config.goo_path_data = GOO_PATH_DATA.to_s
+ config.goo_path_update = GOO_PATH_UPDATE.to_s
+ config.goo_redis_host = REDIS_GOO_CACHE_HOST.to_s
+ config.goo_redis_port = REDIS_PORT.to_i
+ config.http_redis_host = REDIS_HTTP_CACHE_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+ config.ontology_analytics_redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.ontology_analytics_redis_port = REDIS_PORT.to_i
+ config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
+ config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
+ config.replace_url_prefix = true
+ config.rest_url_prefix = REST_URL_PREFIX.to_s
+# config.enable_notifications = false
- # Ontology Google Analytics Redis
- # disabled
- config.ontology_analytics_redis_host = "localhost"
- config.enable_ontology_analytics = false
- config.ontology_analytics_redis_port = 6379
- end
-rescue NameError
- puts "(CNFG) >> LinkedData not available, cannot load config"
+ config.interportal_hash = {
+ "agroportal" => {
+ "api" => "http://data.agroportal.lirmm.fr",
+ "ui" => "http://agroportal.lirmm.fr",
+ "apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b"
+ },
+ "ncbo" => {
+ "api" => "http://data.bioontology.org",
+ "apikey" => "4a5011ea-75fa-4be6-8e89-f45c8c84844e",
+ "ui" => "http://bioportal.bioontology.org",
+ },
+ "sifr" => {
+ "api" => "http://data.bioportal.lirmm.fr",
+ "ui" => "http://bioportal.lirmm.fr",
+ "apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b"
+ }
+ }
+ config.oauth_providers = {
+ github: {
+ check: :access_token,
+ link: 'https://api.github.com/user'
+ },
+ keycloak: {
+ check: :jwt_token,
+ cert: 'KEYCLOAK_SECRET_KEY'
+ },
+ orcid: {
+ check: :access_token,
+ link: 'https://pub.orcid.org/v3.0/me'
+ },
+ google: {
+ check: :access_token,
+ link: 'https://www.googleapis.com/oauth2/v3/userinfo'
+ }
+ }
end
-begin
- Annotator.config do |config|
- config.mgrep_dictionary_file = "/srv/mgrep/dictionary/dictionary.txt"
- config.stop_words_default_file = "./config/default_stop_words.txt"
- config.mgrep_host = "localhost"
- config.mgrep_port = 55555
- config.mgrep_alt_host = "localhost"
- config.mgrep_alt_port = 55555
- config.annotator_redis_host = "localhost"
- config.annotator_redis_port = 6379
- end
-rescue NameError
- puts "(CNFG) >> Annotator not available, cannot load config"
+Annotator.config do |config|
+ config.annotator_redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.annotator_redis_port = REDIS_PORT.to_i
+ config.mgrep_host = MGREP_HOST.to_s
+ config.mgrep_port = MGREP_PORT.to_i
+ config.mgrep_dictionary_file = MGREP_DICTIONARY_FILE.to_s
end
LinkedData::OntologiesAPI.config do |config|
- config.restrict_download = ["ACR0", "ACR1", "ACR2"]
-end
-
-begin
- LinkedData::OntologiesAPI.config do |config|
- config.enable_unicorn_workerkiller = true
- config.enable_throttling = false
- config.enable_monitoring = false
- config.cube_host = "localhost"
- config.http_redis_host = "localhost"
- config.http_redis_port = 6380
- config.ontology_rank = ""
- config.resolver_redis_host = "localhost"
- config.resolver_redis_port = 6379
- config.restrict_download = ["ACR0", "ACR1", "ACR2"]
- end
-rescue NameError
- puts "(CNFG) >> OntologiesAPI not available, cannot load config"
+ config.http_redis_host = REDIS_HTTP_CACHE_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+# config.restrict_download = ["ACR0", "ACR1", "ACR2"]
end
-begin
- NcboCron.config do |config|
- config.redis_host = Annotator.settings.annotator_redis_host
- config.redis_port = Annotator.settings.annotator_redis_port
- config.enable_ontology_analytics = false
- config.enable_ontologies_report = false
- # Schedulues
- config.cron_schedule = "30 */4 * * *"
- # Pull schedule
- config.pull_schedule = "00 18 * * *"
- # Pull long schedule for ontology that are pulled less frequently: run weekly on monday at 11 a.m. (23:00)
- config.pull_schedule_long = "00 23 * * 1"
- config.pull_long_ontologies = ["BIOREFINERY", "TRANSMAT", "GO"]
- end
-rescue NameError
- puts "(CNFG) >> NcboCron not available, cannot load config"
-end
+NcboCron.config do |config|
+ config.redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.redis_port = REDIS_PORT.to_i
+ config.ontology_report_path = REPORT_PATH
+end
\ No newline at end of file
diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb
index 87572e99..1bf86321 100644
--- a/controllers/agents_controller.rb
+++ b/controllers/agents_controller.rb
@@ -14,6 +14,11 @@ class AgentsController < ApplicationController
else
agents = query.to_a
end
+
+ if includes_param.include?(:all) || includes_param.include?(:usages)
+ LinkedData::Models::Agent.load_agents_usages(agents)
+ end
+
reply agents
end
diff --git a/docker-compose.yml b/docker-compose.yml
index 5cb64963..f7325381 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,10 +1,5 @@
x-app: &app
- build:
- context: .
- args:
- RUBY_VERSION: '2.7'
- # Increase the version number in the image tag every time Dockerfile or its arguments is changed
- image: ontologies_api:0.0.1
+ image: agroportal/ontologies_api:development
environment: &env
BUNDLE_PATH: /srv/ontoportal/bundle
# default bundle config resolves to /usr/local/bundle/config inside of the container
@@ -39,6 +34,8 @@ x-app: &app
services:
api:
<<: *app
+ env_file:
+ .env
environment:
<<: *env
GOO_BACKEND_NAME: 4store
diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb
index 172170fa..d90630a3 100644
--- a/helpers/application_helper.rb
+++ b/helpers/application_helper.rb
@@ -88,7 +88,10 @@ def populate_from_params(obj, params)
value = retrieved_values
elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:date_time)
# TODO: Remove this awful hack when obj.class.model_settings[:range][attribute] contains DateTime class
- value = DateTime.parse(value)
+ is_array = value.is_a?(Array)
+ value = Array(value).map{ |v| DateTime.parse(v) }
+ value = value.first unless is_array
+ value
elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:uri) && attribute_settings[:enforce].include?(:list)
# in case its a list of URI, convert all value to IRI
value = value.map { |v| RDF::IRI.new(v) }
diff --git a/helpers/request_params_helper.rb b/helpers/request_params_helper.rb
index 842ee0a7..59adeba7 100644
--- a/helpers/request_params_helper.rb
+++ b/helpers/request_params_helper.rb
@@ -45,9 +45,9 @@ def apply_submission_filters(query)
isOfType: params[:isOfType]&.split(','), #["http://omv.ontoware.org/2005/05/ontology#Vocabulary"],
hasFormalityLevel: params[:hasFormalityLevel]&.split(','), #["http://w3id.org/nkos/nkostype#thesaurus"],
ontology_viewingRestriction: params[:viewingRestriction]&.split(','), #["private"]
+ status: params[:status]&.split(','), #"retired",
}
inverse_filters = {
- status: params[:status], #"retired",
submissionStatus: params[:submissionStatus] #"RDF",
}
@@ -122,17 +122,24 @@ def add_inverse_filters(inverse_filters, query)
end
def add_acronym_name_filters(query)
- if params[:acronym]
- filter = Goo::Filter.new(extract_attr(:ontology_acronym)).regex(params[:acronym])
- if params[:name]
- filter.or(Goo::Filter.new(extract_attr(:ontology_name)).regex(params[:name]))
- end
- query = query.filter(filter)
- elsif params[:name]
- filter = Goo::Filter.new(extract_attr(:ontology_name)).regex(params[:name])
- query = query.filter(filter)
+ filters = {
+ acronym: :ontology_acronym,
+ name: :ontology_name,
+ description: :description
+ }.map do |key, attr|
+ (params[key].nil? || params[key].empty?) ? nil : [extract_attr(attr), params[key]]
+ end.compact
+
+ return query if filters.empty?
+
+ key, val = filters.first
+ filter = Goo::Filter.new(key).regex(val)
+
+ filters.drop(1).each do |k, v|
+ filter = filter.or(Goo::Filter.new(k).regex(v))
end
- query
+
+ query.filter(filter)
end
def add_order_by_patterns(query)
diff --git a/helpers/submission_helper.rb b/helpers/submission_helper.rb
index 07f82138..b79737d0 100644
--- a/helpers/submission_helper.rb
+++ b/helpers/submission_helper.rb
@@ -13,6 +13,14 @@ def submission_include_params
if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:contact)}
includes << {:contact=>[:name, :email]}
end
+
+ if includes.find{|v| v.is_a?(Hash) && v.keys.include?(:metrics)}
+ includes << { metrics: [:maxChildCount, :properties, :classesWithMoreThan25Children,
+ :classesWithOneChild, :individuals, :maxDepth, :classes,
+ :classesWithNoDefinition, :averageChildCount, :numberOfAxioms,
+ :entities]}
+ end
+
includes
end
diff --git a/test/controllers/test_agents_controller.rb b/test/controllers/test_agents_controller.rb
index ef0e5c47..de36bc36 100644
--- a/test/controllers/test_agents_controller.rb
+++ b/test/controllers/test_agents_controller.rb
@@ -28,14 +28,14 @@ def teardown
end
def test_all_agents
- get '/agents'
+ get '/agents?display=all&page=1'
assert last_response.ok?
created_agents = MultiJson.load(last_response.body)
-
@agents.each do |agent|
- created_agent = created_agents.select{|x| x["name"].eql?(agent[:name])}.first
+ created_agent = created_agents["collection"].select{|x| x["name"].eql?(agent[:name])}.first
refute_nil created_agent
+ refute_nil created_agent["usages"]
assert_equal agent[:name], created_agent["name"]
assert_equal agent[:identifiers].size, created_agent["identifiers"].size
assert_equal agent[:identifiers].map{|x| x[:notation]}.sort, created_agent["identifiers"].map{|x| x['notation']}.sort
diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb
index 77b6e6bc..095d0339 100644
--- a/test/controllers/test_ontology_submissions_controller.rb
+++ b/test/controllers/test_ontology_submissions_controller.rb
@@ -202,7 +202,7 @@ def test_download_acl_only
end
def test_submissions_pagination
- num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 2, submission_count: 2)
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 2, submission_count: 2)
get "/submissions"
assert last_response.ok?
@@ -217,6 +217,158 @@ def test_submissions_pagination
assert_equal 1, submissions["collection"].length
end
+ def test_submissions_pagination_filter
+ num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 10, submission_count: 1)
+ group1 = LinkedData::Models::Group.new(acronym: 'group-1', name: "Test Group 1").save
+ group2 = LinkedData::Models::Group.new(acronym: 'group-2', name: "Test Group 2").save
+ category1 = LinkedData::Models::Category.new(acronym: 'category-1', name: "Test Category 1").save
+ category2 = LinkedData::Models::Category.new(acronym: 'category-2', name: "Test Category 2").save
+
+ ontologies1 = ontologies[0..5].each do |o|
+ o.bring_remaining
+ o.group = [group1]
+ o.hasDomain = [category1]
+ o.save
+ end
+
+ ontologies2 = ontologies[6..8].each do |o|
+ o.bring_remaining
+ o.group = [group2]
+ o.hasDomain = [category2]
+ o.save
+ end
+
+
+
+ # test filter by group and category
+ get "/submissions?page=1&pagesize=100&group=#{group1.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&group=#{group2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category1.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group1.acronym}"
+ assert last_response.ok?
+ assert_equal 0, MultiJson.load(last_response.body)["collection"].length
+ get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group2.acronym}"
+ assert last_response.ok?
+ assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length
+
+ ontologies3 = ontologies[9]
+ ontologies3.bring_remaining
+ ontologies3.group = [group1, group2]
+ ontologies3.hasDomain = [category1, category2]
+ ontologies3.name = "name search test"
+ ontologies3.save
+
+ # test search with acronym
+ [
+ [ 1, ontologies.first.acronym],
+ [ 1, ontologies.last.acronym],
+ [ontologies.size, 'TEST-ONT']
+ ].each do |count, acronym_search|
+ get "/submissions?page=1&pagesize=100&acronym=#{acronym_search}"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal count, submissions["collection"].length
+ end
+
+
+ # test search with name
+ [
+ [ 1, ontologies.first.name],
+ [ 1, ontologies.last.name],
+ [ontologies.size - 1, 'TEST-ONT']
+ ].each do |count, name_search|
+ get "/submissions?page=1&pagesize=100&name=#{name_search}"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ binding.pry unless submissions["collection"].length.eql?(count)
+ assert_equal count, submissions["collection"].length
+ end
+
+ # test search with name and acronym
+ # search by name
+ get "/submissions?page=1&pagesize=100&name=search&acronym=search"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions["collection"].length
+ # search by acronym
+ get "/submissions?page=1&pagesize=100&name=9&acronym=9"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions["collection"].length
+ # search by acronym or name
+ get "/submissions?page=1&pagesize=100&name=search&acronym=8"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 2, submissions["collection"].length
+
+ ontologies.first.name = "sort by test"
+ ontologies.first.save
+ sub = ontologies.first.latest_submission(status: :any).bring_remaining
+ sub.status = 'retired'
+ sub.description = "234"
+ sub.creationDate = DateTime.yesterday.to_datetime
+ sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first
+ sub.save
+
+ #test search with sort
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=ontology_name"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.map{|x| x.name}.sort, submissions["collection"].map{|x| x["ontology"]["name"]}
+
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=creationDate"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate}.sort.map(&:to_s), submissions["collection"].map{|x| x["creationDate"]}.reverse
+
+ # test search with format
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=SKOS"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal 1, submissions["collection"].size
+
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=OWL"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.size-1 , submissions["collection"].size
+
+ # test ontology filter with submission filter attributes
+ get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&group=group-2&category=category-2&hasOntologyLanguage=OWL"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies2.size + 1 , submissions["collection"].size
+
+ # test ontology filter with status
+ get "/submissions?page=1&pagesize=100&status=retired"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal 1 , submissions["collection"].size
+
+ get "/submissions?page=1&pagesize=100&status=alpha,beta,production"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ refute_empty submissions["collection"]
+ assert_equal ontologies.size - 1 , submissions["collection"].size
+ get "/submissions?page=1&pagesize=100&description=234&acronym=234&name=234"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1 , submissions["collection"].size
+ end
def test_submissions_default_includes
ontology_count = 5