diff --git a/.env.sample b/.env.sample
new file mode 100644
index 00000000..719949b1
--- /dev/null
+++ b/.env.sample
@@ -0,0 +1 @@
+API_URL=http://localhost:9393
\ No newline at end of file
diff --git a/.github/workflows/docker-dev-release.yml b/.github/workflows/docker-dev-release.yml
new file mode 100644
index 00000000..dda0554d
--- /dev/null
+++ b/.github/workflows/docker-dev-release.yml
@@ -0,0 +1,45 @@
+name: Docker branch Images build
+
+on:
+ push:
+ branches:
+ - development
+ - stage
+ - test
+
+jobs:
+ push_to_registry:
+ name: Push Docker branch image to Docker Hub
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Log in to Docker Hub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images: agroportal/ontologies_api
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm64
+ build-args: |
+ RUBY_VERSION=2.7
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml
new file mode 100644
index 00000000..0368c3ac
--- /dev/null
+++ b/.github/workflows/docker-image.yml
@@ -0,0 +1,55 @@
+name: Docker branch Images build
+
+on:
+ push:
+ branches:
+ - development
+ - stage
+ - test
+ release:
+ types: [ published ]
+jobs:
+ push_to_registry:
+ name: Push Docker branch image to Docker Hub
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Log in to Docker Hub
+ uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Log in to the Container registry
+ uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images: |
+ agroportal/ontologies_api
+ ghcr.io/${{ github.repository }}
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ platforms: linux/amd64, linux/arm64
+ build-args: |
+ RUBY_VERSION=2.7.8
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml
index 6b2c973d..e7b3524f 100644
--- a/.github/workflows/ruby-unit-tests.yml
+++ b/.github/workflows/ruby-unit-tests.yml
@@ -12,6 +12,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
+ - name: copy-env-config
+ run: cp .env.sample .env
- name: Build docker-compose
run: docker-compose --profile 4store build #profile flag is set in order to build all containers in this step
- name: Run unit tests
diff --git a/.gitignore b/.gitignore
index 886a220f..8b568832 100644
--- a/.gitignore
+++ b/.gitignore
@@ -71,3 +71,5 @@ test/data/ontology_files/catalog-v001.xml
create_permissions.log
ontologies_api.iml
+
+.env
diff --git a/Dockerfile b/Dockerfile
index 3e65fe4a..6294e102 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -21,6 +21,7 @@ ENV BUNDLE_PATH=/srv/ontoportal/bundle
RUN bundle install
COPY . /srv/ontoportal/ontologies_api
+RUN cp /srv/ontoportal/ontologies_api/config/environments/config.rb.sample /srv/ontoportal/ontologies_api/config/environments/development.rb
EXPOSE 9393
-CMD ["bundle", "exec", "rackup", "-p", "9393", "--host", "0.0.0.0"]
+CMD ["bundle", "exec", "rackup", "-p", "9393", "--host", "0.0.0.0"]
\ No newline at end of file
diff --git a/Gemfile b/Gemfile
index b4c0c5f7..5716331d 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,6 +1,6 @@
source 'https://rubygems.org'
-gem 'activesupport', '~> 3.1'
+gem 'activesupport', '~> 3.2'
# see https://github.com/ncbo/ontologies_api/issues/69
gem 'bigdecimal', '1.4.2'
gem 'faraday', '~> 1.9'
@@ -63,6 +63,7 @@ group :development do
gem 'shotgun', github: 'palexander/shotgun', branch: 'ncbo'
end
+
group :profiling do
gem 'rack-mini-profiler'
end
diff --git a/Gemfile.lock b/Gemfile.lock
index 963b378a..9875b7fc 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -53,7 +53,7 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git
- revision: b49ad10781e83360a5dd1968972c4048592eefda
+ revision: 4c7dfa80a8bb4a7d8cfb7ad1fc8a1a88e420e59e
branch: development
specs:
ontologies_linked_data (0.0.1)
@@ -108,8 +108,8 @@ GEM
airbrussh (1.5.0)
sshkit (>= 1.6.1, != 1.7.0)
backports (3.24.1)
- base64 (0.1.1)
- bcrypt (3.1.19)
+ base64 (0.2.0)
+ bcrypt (3.1.20)
bcrypt_pbkdf (1.1.0)
bigdecimal (1.4.2)
builder (3.2.4)
@@ -131,11 +131,10 @@ GEM
rexml
cube-ruby (0.0.3)
dante (0.2.0)
- date (3.3.3)
+ date (3.3.4)
declarative (0.0.20)
docile (1.4.0)
- domain_name (0.5.20190701)
- unf (>= 0.0.5, < 1.0.0)
+ domain_name (0.6.20231109)
ed25519 (1.3.0)
faraday (1.10.3)
faraday-em_http (~> 1.0)
@@ -196,8 +195,8 @@ GEM
json_pure (2.6.3)
jwt (2.7.1)
kgio (2.11.4)
- libxml-ruby (4.1.1)
- logger (1.5.3)
+ libxml-ruby (4.1.2)
+ logger (1.6.0)
macaddr (1.7.2)
systemu (~> 2.6.5)
mail (2.8.1)
@@ -217,12 +216,12 @@ GEM
multi_json (1.15.0)
multipart-post (2.3.0)
net-http-persistent (2.9.4)
- net-imap (0.4.3)
+ net-imap (0.4.6)
date
net-protocol
net-pop (0.1.2)
net-protocol
- net-protocol (0.2.1)
+ net-protocol (0.2.2)
timeout
net-scp (4.0.0)
net-ssh (>= 2.6.5, < 8.0.0)
@@ -242,7 +241,7 @@ GEM
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.0.3)
+ public_suffix (5.0.4)
rack (1.6.13)
rack-accept (0.4.5)
rack (>= 0.4)
@@ -321,20 +320,17 @@ GEM
rack-test
sinatra (~> 1.4.0)
tilt (>= 1.3, < 3)
- sshkit (1.21.5)
+ sshkit (1.21.6)
net-scp (>= 1.1.2)
net-ssh (>= 2.8.0)
systemu (2.6.5)
temple (0.10.3)
tilt (2.3.0)
- timeout (0.4.0)
+ timeout (0.4.1)
trailblazer-option (0.1.2)
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
uber (0.1.0)
- unf (0.1.4)
- unf_ext
- unf_ext (0.0.8.2)
unicorn (6.1.0)
kgio (~> 2.6)
raindrops (~> 0.7)
@@ -350,10 +346,11 @@ GEM
webrick (1.8.1)
PLATFORMS
+ x86_64-darwin-23
x86_64-linux
DEPENDENCIES
- activesupport (~> 3.1)
+ activesupport (~> 3.2)
bcrypt_pbkdf (>= 1.0, < 2.0)
bigdecimal (= 1.4.2)
capistrano (~> 3)
@@ -406,4 +403,4 @@ DEPENDENCIES
webmock
BUNDLED WITH
- 2.3.14
+ 2.4.21
diff --git a/README.md b/README.md
index dfaa77ea..b4caa10a 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,53 @@
ontologies_api provides a RESTful interface for accessing [BioPortal](https://bioportal.bioontology.org/) (an open repository of biomedical ontologies). Supported services include downloads, search, access to terms and concepts, text annotation, and much more.
-## Prerequisites
+# Run ontologies_api
+
+## Using OntoPortal api utilities script
+### See help
+
+```bash
+bin/ontoportal help
+```
+
+```
+Usage: bin/ontoportal {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY]
+ dev : Start the Ontoportal API development server.
+ Example: bin/ontoportal dev --api-url http://localhost:9393
+ Use --reset-cache to remove volumes: bin/ontoportal dev --reset-cache
+ test : Run tests.
+ run : Run a command in the Ontoportal API Docker container.
+ help : Show this help message.
+
+Description:
+ This script provides convenient commands for managing an Ontoportal API
+ application using Docker Compose. It includes options for starting the development server,
+ running tests, and executing commands within the Ontoportal API Docker container.
+
+Goals:
+ - Simplify common tasks related to Ontoportal API development using Docker.
+ - Provide a consistent and easy-to-use interface for common actions.
+
+
+```
+### Configuration
+```
+cp .env.sample .env
+```
+
+### Run dev
+```bash
+bin/ontoportal dev
+```
+
+### Run test with a local OntoPortal API
+```bash
+bin/ontoportal test
+```
+
+
+## Manually
+### Prerequisites
- [Ruby 2.x](http://www.ruby-lang.org/en/downloads/) (most recent patch level)
- [rbenv](https://github.com/sstephenson/rbenv) and [ruby-build](https://github.com/sstephenson/ruby-build) (optional)
@@ -19,7 +65,7 @@ ontologies_api provides a RESTful interface for accessing [BioPortal](https://bi
- [Solr](http://lucene.apache.org/solr/)
- BioPortal indexes ontology class and property content using Solr (a Lucene-based server)
-## Configuring Solr
+### Configuring Solr
To configure Solr for ontologies_api usage, modify the example project included with Solr by doing the following:
@@ -46,22 +92,22 @@ To configure Solr for ontologies_api usage, modify the example project included
# Edit the ontologieS_api/config/environments/{env}.rb file to point to your running instance:
# http://localhost:8983/solr/NCBO1
-## Installing
+### Installing
-### Clone the repository
+#### Clone the repository
```
$ git clone git@github.com:ncbo/ontologies_api.git
$ cd ontologies_api
```
-### Install the dependencies
+#### Install the dependencies
```
$ bundle install
```
-### Create an environment configuration file
+#### Create an environment configuration file
```
$ cp config/environments/config.rb.sample config/environments/development.rb
@@ -73,7 +119,7 @@ production.rb
development.rb
test.rb
-### Run the unit tests (optional)
+#### Run the unit tests (optional)
Requires a configuration file for the test environment:
@@ -87,7 +133,7 @@ Execute the suite of tests from the command line:
$ bundle exec rake test
```
-### Run the application
+#### Run the application
```
$ bundle exec rackup --port 9393
diff --git a/bin/ontoportal b/bin/ontoportal
new file mode 100755
index 00000000..573b49c7
--- /dev/null
+++ b/bin/ontoportal
@@ -0,0 +1,166 @@
+#!/usr/bin/env bash
+
+# Function to display script usage information
+show_help() {
+ echo "Usage: $0 {dev|test|run|help} [--reset-cache] [--api-url API_URL] [--api-key API_KEY]"
+ echo " dev : Start the Ontoportal API development server."
+ echo " Example: $0 dev --api-url http://localhost:9393"
+ echo " Use --reset-cache to remove volumes: $0 dev --reset-cache"
+ echo " test : Run tests."
+ echo " run : Run a command in the Ontoportal API Docker container."
+ echo " help : Show this help message."
+ echo
+ echo "Description:"
+ echo " This script provides convenient commands for managing an Ontoportal API"
+ echo " application using Docker Compose. It includes options for starting the development server,"
+ echo " running tests, and executing commands within the Ontoportal API Docker container."
+ echo
+ echo "Goals:"
+ echo " - Simplify common tasks related to Ontoportal API development using Docker."
+ echo " - Provide a consistent and easy-to-use interface for common actions."
+}
+# Function to update or create the .env file with API_URL and API_KEY
+update_env_file() {
+ local api_url="$1"
+
+ # Update the .env file with the provided values
+ file_content=$(<.env)
+
+ # Make changes to the variable
+ while IFS= read -r line; do
+ if [[ "$line" == "API_URL="* ]]; then
+ echo "API_URL=$api_url"
+ else
+ echo "$line"
+ fi
+ done <<< "$file_content" > .env
+}
+
+# Function to create configuration files if they don't exist
+create_config_files() {
+ if [ ! -f ".env" ]; then
+ echo "Creating .env file from env.sample"
+ cp .env.sample .env
+ fi
+
+ if [ ! -f "config/environments/development.rb" ]; then
+ echo "Creating config/environments/development.rb file from config/environments/config.rb.sample"
+ cp config/bioportal_config_env.rb.sample config/bioportal_config_development.rb
+ fi
+}
+
+# Function to handle the "dev" option
+dev() {
+ echo "Starting Ontoportal API development server..."
+
+ create_config_files
+ local reset_cache=false
+ local api_url=""
+
+
+ # Check for command line arguments
+ while [[ "$#" -gt 0 ]]; do
+ case $1 in
+ --reset-cache)
+ reset_cache=true
+ shift
+ ;;
+ --api-url)
+ api_url="$2"
+ shift 2
+ ;;
+ *)
+ echo "Unknown option: $1"
+ show_help
+ exit 1
+ ;;
+ esac
+ done
+
+
+
+ # Check if arguments are provided
+ if [ -n "$api_url" ] ; then
+ # If arguments are provided, update the .env file
+ update_env_file "$api_url"
+ else
+ # If no arguments, fetch values from the .env file
+ source .env
+ api_url="$API_URL"
+ fi
+
+ if [ -z "$api_url" ] ; then
+ echo "Error: Missing required arguments. Please provide both --api-url or update them in your .env"
+ exit 1
+ fi
+
+ # Check if --reset-cache is present and execute docker compose down --volumes
+ if [ "$reset_cache" = true ]; then
+ echo "Resetting cache. Running: docker compose down --volumes"
+ docker compose down --volumes
+ fi
+
+ echo "Run: bundle exec api s -b 0.0.0.0 -p 3000"
+ docker compose run --rm -it --service-ports api bash -c "(bundle check || bundle install) && bundle exec rackup -o 0.0.0.0 --port 9393"
+}
+
+# Function to handle the "test" option
+test() {
+
+
+ local api_url=""
+ local test_path=""
+ local test_options=""
+
+ # Check for command line arguments
+ while [ "$#" -gt 0 ]; do
+ case "$1" in
+ --api-url)
+ shift
+ api_url="$1"
+ ;;
+ *)
+ if [ -z "$test_path" ]; then
+ test_path="$1"
+ else
+ test_options="$test_options $1"
+ fi
+ ;;
+ esac
+ shift
+ done
+
+
+
+ script="API_URL=$api_url bundle exec rake test TEST=\"$test_path\" TESTOPTS=\"$test_options\""
+ echo "Running tests..."
+ echo "Run: $script"
+
+ docker compose run --rm -it api bash -c "(bundle check || bundle install) && $script"
+}
+
+# Function to handle the "run" option
+run() {
+ echo "Run: $*"
+ docker compose run --rm -it api bash -c "$*"
+}
+
+# Main script logic
+case "$1" in
+ "run")
+ run "${@:2}"
+ ;;
+ "dev")
+ dev "${@:2}"
+ ;;
+ "test")
+ test "${@:2}"
+ ;;
+ "help")
+ show_help
+ ;;
+ *)
+ show_help
+ exit 1
+ ;;
+esac
diff --git a/config/environments/config.rb.sample b/config/environments/config.rb.sample
index e5f9fd9c..8713b9f2 100644
--- a/config/environments/config.rb.sample
+++ b/config/environments/config.rb.sample
@@ -3,120 +3,106 @@
# development.rb
# test.rb
-begin
- LinkedData.config do |config|
- config.repository_folder = "/srv/ncbo/repository"
- config.goo_host = "localhost"
- config.goo_port = 9000
- config.search_server_url = "http://localhost:8082/solr/term_search_core1"
- config.property_search_server_url = "http://localhost:8082/solr/prop_search_core1"
- config.rest_url_prefix = "http://#{$SITE_URL}:8080/"
- config.replace_url_prefix = true
- config.enable_security = true
-
- config.apikey = "24e0e77e-54e0-11e0-9d7b-005056aa3316"
- config.ui_host = "http://#{$SITE_URL}"
- config.enable_monitoring = false
- config.cube_host = "localhost"
- config.enable_resource_index = false
-
- # Used to define other BioPortal to which this appliance can be mapped to
- # Example to map to the NCBO BioPortal : {"ncbo" => {"api" => "http://data.bioontology.org", "ui" => "http://bioportal.bioontology.org", "apikey" => ""}}
- # Then create the mapping using the following class in JSON : "http://purl.bioontology.org/ontology/MESH/C585345": "ncbo:MESH"
- # Where "ncbo" is the key in the interportal_hash. Use only lowercase letters for this key.
- # And do not use "ext" as a key, it is reserved for clases outside of any BioPortal
- config.interportal_hash = {}
-
- # Caches
- config.http_redis_host = "localhost"
- config.http_redis_port = 6380
- config.enable_http_cache = true
- config.goo_redis_host = "localhost"
- config.goo_redis_port = 6382
+GOO_BACKEND_NAME = ENV.include?("GOO_BACKEND_NAME") ? ENV["GOO_BACKEND_NAME"] : "4store"
+GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
+GOO_PATH_DATA = ENV.include?("GOO_PATH_DATA") ? ENV["GOO_PATH_DATA"] : "/data/"
+GOO_PATH_QUERY = ENV.include?("GOO_PATH_QUERY") ? ENV["GOO_PATH_QUERY"] : "/sparql/"
+GOO_PATH_UPDATE = ENV.include?("GOO_PATH_UPDATE") ? ENV["GOO_PATH_UPDATE"] : "/update/"
+GOO_PORT = ENV.include?("GOO_PORT") ? ENV["GOO_PORT"] : 9000
+MGREP_HOST = ENV.include?("MGREP_HOST") ? ENV["MGREP_HOST"] : "localhost"
+MGREP_PORT = ENV.include?("MGREP_PORT") ? ENV["MGREP_PORT"] : 55555
+MGREP_DICTIONARY_FILE = ENV.include?("MGREP_DICTIONARY_FILE") ? ENV["MGREP_DICTIONARY_FILE"] : "./test/data/dictionary.txt"
+REDIS_GOO_CACHE_HOST = ENV.include?("REDIS_GOO_CACHE_HOST") ? ENV["REDIS_GOO_CACHE_HOST"] : "localhost"
+REDIS_HTTP_CACHE_HOST = ENV.include?("REDIS_HTTP_CACHE_HOST") ? ENV["REDIS_HTTP_CACHE_HOST"] : "localhost"
+REDIS_PERSISTENT_HOST = ENV.include?("REDIS_PERSISTENT_HOST") ? ENV["REDIS_PERSISTENT_HOST"] : "localhost"
+REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
+REPORT_PATH = ENV.include?("REPORT_PATH") ? ENV["REPORT_PATH"] : "./test/ontologies_report.json"
+REPOSITORY_FOLDER = ENV.include?("REPOSITORY_FOLDER") ? ENV["REPOSITORY_FOLDER"] : "./test/data/ontology_files/repo"
+REST_URL_PREFIX = ENV.include?("REST_URL_PREFIX") ? ENV["REST_URL_PREFIX"] : ENV["API_URL"] || "http://localhost:9393"
+SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr/prop_search_core1"
+SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr/term_search_core1"
- Goo.use_cache = true
-
- # Email notifications
- config.enable_notifications = false
- config.email_sender = "admin@example.org" # Default sender for emails
- config.email_override = "override@example.org" # all email gets sent here. Disable with email_override_disable.
- config.email_disable_override = true
- config.smtp_host = "localhost"
- config.smtp_port = 25
- config.smtp_auth_type = :none # :none, :plain, :login, :cram_md5
- config.smtp_domain = "example.org"
- # Emails of the instance administrators to get mail notifications when new user or new ontology
- config.admin_emails = ["admin@example.org"]
+begin
+ # For prefLabel extract main_lang first, or anything if no main found.
+ # For other properties only properties with a lang that is included in main_lang are used
+ Goo.main_languages = ["en", "fr"]
+ Goo.use_cache = false
+rescue NoMethodError
+ puts "(CNFG) >> Goo.main_lang not available"
+end
- # PURL server config parameters
- config.enable_purl = false
- config.purl_host = "purl.example.org"
- config.purl_port = 80
- config.purl_username = "admin"
- config.purl_password = "password"
- config.purl_maintainers = "admin"
- config.purl_target_url_prefix = "http://example.org"
+LinkedData.config do |config|
+ config.goo_backend_name = GOO_BACKEND_NAME.to_s
+ config.goo_host = GOO_HOST.to_s
+ config.goo_port = GOO_PORT.to_i
+ config.goo_path_query = GOO_PATH_QUERY.to_s
+ config.goo_path_data = GOO_PATH_DATA.to_s
+ config.goo_path_update = GOO_PATH_UPDATE.to_s
+ config.goo_redis_host = REDIS_GOO_CACHE_HOST.to_s
+ config.goo_redis_port = REDIS_PORT.to_i
+ config.http_redis_host = REDIS_HTTP_CACHE_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+ config.ontology_analytics_redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.ontology_analytics_redis_port = REDIS_PORT.to_i
+ config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
+ config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
+ config.replace_url_prefix = true
+ config.rest_url_prefix = REST_URL_PREFIX.to_s
+# config.enable_notifications = false
- # Ontology Google Analytics Redis
- # disabled
- config.ontology_analytics_redis_host = "localhost"
- config.enable_ontology_analytics = false
- config.ontology_analytics_redis_port = 6379
- end
-rescue NameError
- puts "(CNFG) >> LinkedData not available, cannot load config"
+ config.interportal_hash = {
+ "agroportal" => {
+ "api" => "http://data.agroportal.lirmm.fr",
+ "ui" => "http://agroportal.lirmm.fr",
+ "apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b"
+ },
+ "ncbo" => {
+ "api" => "http://data.bioontology.org",
+ "apikey" => "4a5011ea-75fa-4be6-8e89-f45c8c84844e",
+ "ui" => "http://bioportal.bioontology.org",
+ },
+ "sifr" => {
+ "api" => "http://data.bioportal.lirmm.fr",
+ "ui" => "http://bioportal.lirmm.fr",
+ "apikey" => "1cfae05f-9e67-486f-820b-b393dec5764b"
+ }
+ }
+ config.oauth_providers = {
+ github: {
+ check: :access_token,
+ link: 'https://api.github.com/user'
+ },
+ keycloak: {
+ check: :jwt_token,
+ cert: 'KEYCLOAK_SECRET_KEY'
+ },
+ orcid: {
+ check: :access_token,
+ link: 'https://pub.orcid.org/v3.0/me'
+ },
+ google: {
+ check: :access_token,
+ link: 'https://www.googleapis.com/oauth2/v3/userinfo'
+ }
+ }
end
-begin
- Annotator.config do |config|
- config.mgrep_dictionary_file = "/srv/mgrep/dictionary/dictionary.txt"
- config.stop_words_default_file = "./config/default_stop_words.txt"
- config.mgrep_host = "localhost"
- config.mgrep_port = 55555
- config.mgrep_alt_host = "localhost"
- config.mgrep_alt_port = 55555
- config.annotator_redis_host = "localhost"
- config.annotator_redis_port = 6379
- end
-rescue NameError
- puts "(CNFG) >> Annotator not available, cannot load config"
+Annotator.config do |config|
+ config.annotator_redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.annotator_redis_port = REDIS_PORT.to_i
+ config.mgrep_host = MGREP_HOST.to_s
+ config.mgrep_port = MGREP_PORT.to_i
+ config.mgrep_dictionary_file = MGREP_DICTIONARY_FILE.to_s
end
LinkedData::OntologiesAPI.config do |config|
- config.restrict_download = ["ACR0", "ACR1", "ACR2"]
-end
-
-begin
- LinkedData::OntologiesAPI.config do |config|
- config.enable_unicorn_workerkiller = true
- config.enable_throttling = false
- config.enable_monitoring = false
- config.cube_host = "localhost"
- config.http_redis_host = "localhost"
- config.http_redis_port = 6380
- config.ontology_rank = ""
- config.resolver_redis_host = "localhost"
- config.resolver_redis_port = 6379
- config.restrict_download = ["ACR0", "ACR1", "ACR2"]
- end
-rescue NameError
- puts "(CNFG) >> OntologiesAPI not available, cannot load config"
+ config.http_redis_host = REDIS_HTTP_CACHE_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+# config.restrict_download = ["ACR0", "ACR1", "ACR2"]
end
-begin
- NcboCron.config do |config|
- config.redis_host = Annotator.settings.annotator_redis_host
- config.redis_port = Annotator.settings.annotator_redis_port
- config.enable_ontology_analytics = false
- config.enable_ontologies_report = false
- # Schedulues
- config.cron_schedule = "30 */4 * * *"
- # Pull schedule
- config.pull_schedule = "00 18 * * *"
- # Pull long schedule for ontology that are pulled less frequently: run weekly on monday at 11 a.m. (23:00)
- config.pull_schedule_long = "00 23 * * 1"
- config.pull_long_ontologies = ["BIOREFINERY", "TRANSMAT", "GO"]
- end
-rescue NameError
- puts "(CNFG) >> NcboCron not available, cannot load config"
-end
+NcboCron.config do |config|
+ config.redis_host = REDIS_PERSISTENT_HOST.to_s
+ config.redis_port = REDIS_PORT.to_i
+ config.ontology_report_path = REPORT_PATH
+end
\ No newline at end of file
diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb
index 87572e99..1bf86321 100644
--- a/controllers/agents_controller.rb
+++ b/controllers/agents_controller.rb
@@ -14,6 +14,11 @@ class AgentsController < ApplicationController
else
agents = query.to_a
end
+
+ if includes_param.include?(:all) || includes_param.include?(:usages)
+ LinkedData::Models::Agent.load_agents_usages(agents)
+ end
+
reply agents
end
diff --git a/docker-compose.yml b/docker-compose.yml
index 5cb64963..f7325381 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,10 +1,5 @@
x-app: &app
- build:
- context: .
- args:
- RUBY_VERSION: '2.7'
- # Increase the version number in the image tag every time Dockerfile or its arguments is changed
- image: ontologies_api:0.0.1
+ image: agroportal/ontologies_api:development
environment: &env
BUNDLE_PATH: /srv/ontoportal/bundle
# default bundle config resolves to /usr/local/bundle/config inside of the container
@@ -39,6 +34,8 @@ x-app: &app
services:
api:
<<: *app
+ env_file:
+ .env
environment:
<<: *env
GOO_BACKEND_NAME: 4store
diff --git a/test/controllers/test_agents_controller.rb b/test/controllers/test_agents_controller.rb
index e7521b37..de36bc36 100644
--- a/test/controllers/test_agents_controller.rb
+++ b/test/controllers/test_agents_controller.rb
@@ -28,12 +28,12 @@ def teardown
end
def test_all_agents
- get '/agents?display=all'
+ get '/agents?display=all&page=1'
assert last_response.ok?
created_agents = MultiJson.load(last_response.body)
@agents.each do |agent|
- created_agent = created_agents.select{|x| x["name"].eql?(agent[:name])}.first
+ created_agent = created_agents["collection"].select{|x| x["name"].eql?(agent[:name])}.first
refute_nil created_agent
refute_nil created_agent["usages"]
assert_equal agent[:name], created_agent["name"]