diff --git a/.dockerignore b/.dockerignore
index cf76ed57..3b15d33c 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -9,3 +9,4 @@ tmp/*
# Editor temp files
*.swp
*.swo
+test/solr
diff --git a/Gemfile b/Gemfile
index 5ff855ea..127ad02f 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,6 +1,6 @@
source 'https://rubygems.org'
-gem 'activesupport', '~> 3.0'
+gem 'activesupport', '~> 3.1'
# see https://github.com/ncbo/ontologies_api/issues/69
gem 'bigdecimal', '1.4.2'
gem 'faraday', '~> 1.9'
@@ -13,12 +13,13 @@ gem 'rake', '~> 10.0'
gem 'sinatra', '~> 1.0'
gem 'sinatra-advanced-routes'
gem 'sinatra-contrib', '~> 1.0'
+gem 'request_store'
# Rack middleware
gem 'ffi'
gem 'rack-accept', '~> 0.4'
gem 'rack-attack', '~> 6.6.1', require: 'rack/attack'
-gem 'rack-cache', '~> 1.0'
+gem 'rack-cache', '~> 1.13.0'
gem 'rack-cors', require: 'rack/cors'
# GitHub dependency can be removed when https://github.com/niko/rack-post-body-to-params/pull/6 is merged and released
gem 'rack-post-body-to-params', github: 'palexander/rack-post-body-to-params', branch: 'multipart_support'
@@ -26,8 +27,9 @@ gem 'rack-timeout'
gem 'redis-rack-cache', '~> 2.0'
# Data access (caching)
-gem 'redis'
+gem 'redis', '~> 4.8.1'
gem 'redis-activesupport'
+gem 'redis-store', '1.9.1'
# Monitoring
gem 'cube-ruby', require: 'cube'
diff --git a/Gemfile.lock b/Gemfile.lock
index 46b2a605..fabf3bd7 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,6 +1,6 @@
GIT
remote: https://github.com/ncbo/ncbo_ontology_recommender.git
- revision: d0ac992c88bd417f2f2137ba62934c3c41b6db7c
+ revision: 83e835de368bc9f19da800a477982e0ad770900d
branch: master
specs:
ncbo_ontology_recommender (0.0.1)
@@ -11,7 +11,7 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/goo.git
- revision: cda6aff2338e2a2831e4e7bf716abdf8fa8483d2
+ revision: ddb95e427950fde3ac715aec340394208c8166fe
branch: development
specs:
goo (0.0.2)
@@ -53,7 +53,7 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git
- revision: e9b708c40b2b22b935fb48d18ed19de8148fca35
+ revision: 4c89c8346766d23e09b24c8e29750bf3a91e6b53
branch: development
specs:
ontologies_linked_data (0.0.1)
@@ -103,16 +103,16 @@ GEM
activesupport (3.2.22.5)
i18n (~> 0.6, >= 0.6.4)
multi_json (~> 1.0)
- addressable (2.8.1)
+ addressable (2.8.5)
public_suffix (>= 2.0.2, < 6.0)
- airbrussh (1.4.1)
+ airbrussh (1.4.2)
sshkit (>= 1.6.1, != 1.7.0)
- backports (3.23.0)
- bcrypt (3.1.18)
+ backports (3.24.1)
+ bcrypt (3.1.19)
bcrypt_pbkdf (1.1.0)
bigdecimal (1.4.2)
builder (3.2.4)
- capistrano (3.17.1)
+ capistrano (3.17.3)
airbrussh (>= 1.0.0)
i18n
rake (>= 10.0.0)
@@ -125,7 +125,7 @@ GEM
capistrano (~> 3.1)
sshkit (~> 1.3)
coderay (1.1.3)
- concurrent-ruby (1.2.0)
+ concurrent-ruby (1.2.2)
cube-ruby (0.0.3)
dante (0.2.0)
date (3.3.3)
@@ -160,9 +160,9 @@ GEM
ffi (1.15.5)
get_process_mem (0.2.7)
ffi (~> 1.0)
- google-apis-analytics_v3 (0.12.0)
- google-apis-core (>= 0.9.1, < 2.a)
- google-apis-core (0.11.0)
+ google-apis-analytics_v3 (0.13.0)
+ google-apis-core (>= 0.11.0, < 2.a)
+ google-apis-core (0.11.1)
addressable (~> 2.5, >= 2.5.1)
googleauth (>= 0.16.2, < 2.a)
httpclient (>= 2.8.1, < 3.a)
@@ -171,7 +171,7 @@ GEM
retriable (>= 2.0, < 4.a)
rexml
webrick
- googleauth (1.3.0)
+ googleauth (1.7.0)
faraday (>= 0.17.3, < 3.a)
jwt (>= 1.4, < 3.0)
memoist (~> 0.16)
@@ -191,9 +191,9 @@ GEM
json-schema (2.8.1)
addressable (>= 2.4)
json_pure (2.6.3)
- jwt (2.7.0)
+ jwt (2.7.1)
kgio (2.11.4)
- libxml-ruby (4.0.0)
+ libxml-ruby (4.1.1)
logger (1.5.3)
macaddr (1.7.2)
systemu (~> 2.6.5)
@@ -204,10 +204,10 @@ GEM
net-smtp
memoist (0.16.2)
method_source (1.0.0)
- mime-types (3.4.1)
+ mime-types (3.5.1)
mime-types-data (~> 3.2015)
- mime-types-data (3.2022.0105)
- mini_mime (1.1.2)
+ mime-types-data (3.2023.0808)
+ mini_mime (1.1.5)
minitest (4.7.5)
minitest-stub_any_instance (1.0.3)
mlanett-redis-lock (0.2.7)
@@ -215,7 +215,7 @@ GEM
multi_json (1.15.0)
multipart-post (2.3.0)
net-http-persistent (2.9.4)
- net-imap (0.3.4)
+ net-imap (0.3.7)
date
net-protocol
net-pop (0.1.2)
@@ -226,9 +226,9 @@ GEM
net-ssh (>= 2.6.5, < 8.0.0)
net-smtp (0.3.3)
net-protocol
- net-ssh (7.0.1)
+ net-ssh (7.2.0)
netrc (0.11.0)
- newrelic_rpm (8.16.0)
+ newrelic_rpm (9.4.2)
oj (2.18.5)
omni_logger (0.1.4)
logger
@@ -239,7 +239,7 @@ GEM
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.0.1)
+ public_suffix (5.0.3)
rack (1.6.13)
rack-accept (0.4.5)
rack (>= 0.4)
@@ -249,14 +249,14 @@ GEM
rack (>= 0.4)
rack-cors (1.0.6)
rack (>= 1.6.0)
- rack-mini-profiler (3.0.0)
+ rack-mini-profiler (3.1.1)
rack (>= 1.2.0)
rack-protection (1.5.5)
rack
- rack-test (2.0.2)
+ rack-test (2.1.0)
rack (>= 1.3)
rack-timeout (0.6.3)
- raindrops (0.20.0)
+ raindrops (0.20.1)
rake (10.5.0)
rdf (1.0.8)
addressable (>= 2.2)
@@ -274,13 +274,15 @@ GEM
declarative (< 0.1.0)
trailblazer-option (>= 0.1.1, < 0.2.0)
uber (< 0.2.0)
+ request_store (1.5.1)
+ rack (>= 1.4)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
retriable (3.1.2)
- rexml (3.2.5)
+ rexml (3.2.6)
rsolr (2.5.0)
builder (>= 2.1.2)
faraday (>= 0.9, < 3, != 2.0.0)
@@ -316,13 +318,13 @@ GEM
rack-test
sinatra (~> 1.4.0)
tilt (>= 1.3, < 3)
- sshkit (1.21.3)
+ sshkit (1.21.5)
net-scp (>= 1.1.2)
net-ssh (>= 2.8.0)
systemu (2.6.5)
- temple (0.10.0)
- tilt (2.0.11)
- timeout (0.3.2)
+ temple (0.10.2)
+ tilt (2.2.0)
+ timeout (0.4.0)
trailblazer-option (0.1.2)
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
@@ -341,10 +343,11 @@ GEM
webrick (1.8.1)
PLATFORMS
+ x86_64-darwin-21
x86_64-linux
DEPENDENCIES
- activesupport (~> 3.0)
+ activesupport (~> 3.1)
bcrypt_pbkdf (>= 1.0, < 2.0)
bigdecimal (= 1.4.2)
capistrano (~> 3)
@@ -372,7 +375,7 @@ DEPENDENCIES
rack
rack-accept (~> 0.4)
rack-attack (~> 6.6.1)
- rack-cache (~> 1.0)
+ rack-cache (~> 1.13.0)
rack-cors
rack-mini-profiler
rack-post-body-to-params!
@@ -380,9 +383,11 @@ DEPENDENCIES
rack-timeout
rake (~> 10.0)
redcarpet
- redis
+ redis (~> 4.8.1)
redis-activesupport
redis-rack-cache (~> 2.0)
+ redis-store (= 1.9.1)
+ request_store
shotgun!
simplecov
simplecov-cobertura
diff --git a/app.rb b/app.rb
index 5360ae4b..b61fdcbe 100644
--- a/app.rb
+++ b/app.rb
@@ -36,6 +36,8 @@
# Inflector setup
require_relative "config/inflections"
+require 'request_store'
+
# Protection settings
set :protection, :except => :path_traversal
@@ -143,6 +145,8 @@
use Rack::PostBodyToParams
use Rack::ParamTranslator
+use RequestStore::Middleware
+
use LinkedData::Security::Authorization
use LinkedData::Security::AccessDenied
diff --git a/controllers/agents_controller.rb b/controllers/agents_controller.rb
new file mode 100644
index 00000000..87572e99
--- /dev/null
+++ b/controllers/agents_controller.rb
@@ -0,0 +1,145 @@
+class AgentsController < ApplicationController
+
+ %w[/agents /Agents].each do |namespace|
+ namespace namespace do
+ # Display all agents
+ get do
+ check_last_modified_collection(LinkedData::Models::Agent)
+ query = LinkedData::Models::Agent.where
+ query = apply_filters(LinkedData::Models::Agent, query)
+ query = query.include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param))
+ if page?
+ page, size = page_params
+ agents = query.page(page, size).all
+ else
+ agents = query.to_a
+ end
+ reply agents
+ end
+
+ # Display a single agent
+ get '/:id' do
+ check_last_modified_collection(LinkedData::Models::Agent)
+ id = params["id"]
+ agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first
+ error 404, "Agent #{id} not found" if agent.nil?
+ reply 200, agent
+ end
+
+ # Create a agent with the given acronym
+ post do
+ reply 201, create_new_agent
+ end
+
+ # Create a agent with the given acronym
+ put '/:acronym' do
+ reply 201, create_new_agent
+ end
+
+ # Update an existing submission of a agent
+ patch '/:id' do
+ acronym = params["id"]
+ agent = LinkedData::Models::Agent.find(acronym).include(LinkedData::Models::Agent.attributes).first
+
+ if agent.nil?
+ error 400, "Agent does not exist, please create using HTTP PUT before modifying"
+ else
+ agent = update_agent(agent, params)
+
+ error 400, agent.errors unless agent.errors.empty?
+ end
+ halt 204
+ end
+
+ # Delete a agent
+ delete '/:id' do
+ agent = LinkedData::Models::Agent.find(params["id"]).first
+ agent.delete
+ halt 204
+ end
+
+ private
+
+ def update_identifiers(identifiers)
+ Array(identifiers).map do |i|
+ next nil if i.empty?
+
+ id = i["id"] || LinkedData::Models::AgentIdentifier.generate_identifier(i['notation'], i['schemaAgency'])
+ identifier = LinkedData::Models::AgentIdentifier.find(RDF::URI.new(id)).first
+
+ if identifier
+ identifier.bring_remaining
+ else
+ identifier = LinkedData::Models::AgentIdentifier.new
+ end
+
+ i.delete "id"
+
+ next identifier if i.keys.size.zero?
+
+ populate_from_params(identifier, i)
+
+ if identifier.valid?
+ identifier.save
+ else
+ error 400, identifier.errors
+ end
+ identifier
+ end.compact
+ end
+
+ def update_affiliations(affiliations)
+ Array(affiliations).map do |aff|
+ affiliation = aff["id"] ? LinkedData::Models::Agent.find(RDF::URI.new(aff["id"])).first : nil
+
+ if affiliation
+ affiliation.bring_remaining
+ affiliation.identifiers.each{|i| i.bring_remaining}
+ end
+
+ next affiliation if aff.keys.size.eql?(1) && aff["id"]
+
+ if affiliation
+ affiliation = update_agent(affiliation, aff)
+ else
+ affiliation = create_new_agent(aff["id"], aff)
+ end
+
+ error 400, affiliation.errors unless affiliation.errors.empty?
+
+ affiliation
+ end
+ end
+
+ def create_new_agent (id = @params['id'], params = @params)
+ agent = nil
+ agent = LinkedData::Models::Agent.find(id).include(LinkedData::Models::Agent.goo_attrs_to_load(includes_param)).first if id
+
+ if agent.nil?
+ agent = update_agent(LinkedData::Models::Agent.new, params)
+ error 400, agent.errors unless agent.errors.empty?
+
+ return agent
+ else
+ error 400, "Agent exists, please use HTTP PATCH to update"
+ end
+ end
+
+ def update_agent(agent, params)
+ return agent unless agent
+
+ identifiers = params.delete "identifiers"
+ affiliations = params.delete "affiliations"
+ params.delete "id"
+ populate_from_params(agent, params)
+ agent.identifiers = update_identifiers(identifiers)
+ agent.affiliations = update_affiliations(affiliations)
+
+ agent.save if agent.valid?
+ return agent
+ end
+
+ end
+ end
+
+end
\ No newline at end of file
diff --git a/controllers/ontologies_controller.rb b/controllers/ontologies_controller.rb
index da1b748c..99c0ce68 100644
--- a/controllers/ontologies_controller.rb
+++ b/controllers/ontologies_controller.rb
@@ -44,12 +44,13 @@ class OntologiesController < ApplicationController
if includes_param.first == :all
# Bring what we need to display all attr of the submission
latest.bring_remaining
- latest.bring({:contact=>[:name, :email],
- :ontology=>[:acronym, :name, :administeredBy, :group, :viewingRestriction, :doNotUpdate, :flat,
- :hasDomain, :summaryOnly, :acl, :viewOf, :ontologyType],
- :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]})
+ latest.bring(*submission_attributes_all)
else
- latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param))
+ includes = OntologySubmission.goo_attrs_to_load(includes_param)
+
+ includes << {:contact=>[:name, :email]} if includes.find{|v| v.is_a?(Hash) && v.keys.first.eql?(:contact)}
+
+ latest.bring(*includes)
end
end
#remove the whole previous if block and replace by it: latest.bring(*OntologySubmission.goo_attrs_to_load(includes_param)) if latest
diff --git a/controllers/ontology_submissions_controller.rb b/controllers/ontology_submissions_controller.rb
index cf55659d..e2817de9 100644
--- a/controllers/ontology_submissions_controller.rb
+++ b/controllers/ontology_submissions_controller.rb
@@ -1,9 +1,15 @@
class OntologySubmissionsController < ApplicationController
get "/submissions" do
check_last_modified_collection(LinkedData::Models::OntologySubmission)
- #using appplication_helper method
- options = {also_include_views: params["also_include_views"], status: (params["include_status"] || "ANY")}
- reply retrieve_latest_submissions(options).values
+ options = {
+ also_include_views: params["also_include_views"],
+ status: (params["include_status"] || "ANY")
+ }
+ subs = retrieve_latest_submissions(options)
+ subs = subs.values unless page?
+ # Force to show ontology reviews, notes and projects by default only for this request
+ LinkedData::Models::Ontology.serialize_default(*(LinkedData::Models::Ontology.hypermedia_settings[:serialize_default] + [:reviews, :notes, :projects]))
+ reply subs
end
##
@@ -24,9 +30,7 @@ class OntologySubmissionsController < ApplicationController
check_last_modified_segment(LinkedData::Models::OntologySubmission, [ont.acronym])
if includes_param.first == :all
# When asking to display all metadata, we are using bring_remaining which is more performant than including all metadata (remove this when the query to get metadata will be fixed)
- ont.bring(submissions: [:released, :creationDate, :status, :submissionId,
- {:contact=>[:name, :email], :ontology=>[:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl, :group, :hasDomain, :views, :viewOf, :flat],
- :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]}, :submissionStatus])
+ ont.bring(submission_attributes_all)
ont.submissions.each do |sub|
sub.bring_remaining
diff --git a/docker-compose.yml b/docker-compose.yml
index de084081..5cb64963 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -75,10 +75,14 @@ services:
redis-ut:
image: redis
+ ports:
+ - 6379:6379
4store-ut:
image: bde2020/4store
#volume: fourstore:/var/lib/4store
+ ports:
+ - 9000:9000
command: >
bash -c "4s-backend-setup --segments 4 ontoportal_kb
&& 4s-backend ontoportal_kb
@@ -88,10 +92,20 @@ services:
solr-ut:
- image: ontoportal/solr-ut:0.1
+ image: solr:8
+ volumes:
+ - ./test/solr/configsets:/configsets:ro
+ ports:
+ - "8983:8983"
+ command: >
+ bash -c "precreate-core term_search_core1 /configsets/term_search
+ && precreate-core prop_search_core1 /configsets/property_search
+ && solr-foreground"
mgrep-ut:
image: ontoportal/mgrep-ncbo:0.1
+ ports:
+ - "55556:55555"
agraph-ut:
image: franzinc/agraph:v7.3.0
diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb
index 6c44f25f..5d6d1b0a 100644
--- a/helpers/application_helper.rb
+++ b/helpers/application_helper.rb
@@ -51,6 +51,10 @@ def populate_from_params(obj, params)
value = is_arr ? value : [value]
new_value = []
value.each do |cls|
+ if uri_as_needed(cls["ontology"]).nil?
+ new_value << cls
+ next
+ end
sub = LinkedData::Models::Ontology.find(uri_as_needed(cls["ontology"])).first.latest_submission
new_value << LinkedData::Models::Class.find(cls["class"]).in(sub).first
end
@@ -355,40 +359,18 @@ def replace_url_prefix(id)
end
def retrieve_latest_submissions(options = {})
- status = (options[:status] || "RDF").to_s.upcase
- include_ready = status.eql?("READY") ? true : false
- status = "RDF" if status.eql?("READY")
- any = true if status.eql?("ANY")
- include_views = options[:also_include_views] || false
- includes = OntologySubmission.goo_attrs_to_load(includes_param)
-
- includes << :submissionStatus unless includes.include?(:submissionStatus)
- if any
- submissions_query = OntologySubmission.where
- else
- submissions_query = OntologySubmission.where(submissionStatus: [ code: status])
- end
+ submissions = retrieve_submissions(options)
- submissions_query = submissions_query.filter(Goo::Filter.new(ontology: [:viewOf]).unbound) unless include_views
- submissions_query = submissions_query.filter(filter) if filter?
- # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs
- if includes_param.first == :all
- includes = [:submissionId, {:contact=>[:name, :email], :ontology=>[:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl,
- :group, :hasDomain, :views, :viewOf, :flat], :submissionStatus=>[:code], :hasOntologyLanguage=>[:acronym]}, :submissionStatus]
- end
- submissions = submissions_query.include(includes).to_a
-
- # Figure out latest parsed submissions using all submissions
- latest_submissions = {}
+ latest_submissions = page? ? submissions : {} # latest_submission doest not work with pagination
submissions.each do |sub|
# To retrieve all metadata, but slow when a lot of ontologies
- if includes_param.first == :all
- sub.bring_remaining
+ sub.bring_remaining if includes_param.first == :all
+ unless page?
+ next if include_ready?(options) && !sub.ready?
+ next if sub.ontology.nil?
+ latest_submissions[sub.ontology.acronym] ||= sub
+ latest_submissions[sub.ontology.acronym] = sub if sub.submissionId.to_i > latest_submissions[sub.ontology.acronym].submissionId.to_i
end
- next if include_ready && !sub.ready?
- next if sub.ontology.nil?
- latest_submissions[sub.ontology.acronym] ||= sub
- latest_submissions[sub.ontology.acronym] = sub if sub.submissionId.to_i > latest_submissions[sub.ontology.acronym].submissionId.to_i
end
latest_submissions
end
diff --git a/helpers/request_params_helper.rb b/helpers/request_params_helper.rb
index e7ec091a..45091042 100644
--- a/helpers/request_params_helper.rb
+++ b/helpers/request_params_helper.rb
@@ -13,6 +13,10 @@ def settings_params(klass)
[attributes, page, size, order_by, bring_unmapped]
end
+ def page?
+ !params[:page].nil?
+ end
+
def is_set?(param)
!param.nil? && param != ""
end
@@ -25,6 +29,39 @@ def filter
build_filter
end
+ def apply_filters(object, query)
+ attributes_to_filter = object.attributes(:all).select{|x| params.keys.include?(x.to_s)}
+ filters = attributes_to_filter.map {|key| [key, params[key]&.split(',')]}.to_h
+ add_direct_filters(filters, query)
+ end
+
+ def apply_submission_filters(query)
+
+ filters = {
+ naturalLanguage: params[:naturalLanguage]&.split(',') , #%w[http://lexvo.org/id/iso639-3/fra http://lexvo.org/id/iso639-3/eng],
+ hasOntologyLanguage_acronym: params[:hasOntologyLanguage]&.split(',') , #%w[OWL SKOS],
+ ontology_hasDomain_acronym: params[:hasDomain]&.split(',') , #%w[Crop Vue_francais],
+ ontology_group_acronym: params[:group]&.split(','), #%w[RICE CROP],
+ ontology_name: Array(params[:name]) + Array(params[:name]&.capitalize),
+ isOfType: params[:isOfType]&.split(','), #["http://omv.ontoware.org/2005/05/ontology#Vocabulary"],
+ hasFormalityLevel: params[:hasFormalityLevel]&.split(','), #["http://w3id.org/nkos/nkostype#thesaurus"],
+ ontology_viewingRestriction: params[:viewingRestriction]&.split(','), #["private"]
+ }
+ inverse_filters = {
+ status: params[:status], #"retired",
+ submissionStatus: params[:submissionStatus] #"RDF",
+ }
+
+ query = add_direct_filters(filters, query)
+
+ query = add_inverse_filters(inverse_filters, query)
+
+ query = add_acronym_name_filters(query)
+
+ add_order_by_patterns(query)
+ end
+
+
def get_order_by_from(params, default_order = :asc)
if is_set?(params['sortby'])
orders = (params["order"] || default_order.to_s).split(',')
@@ -50,6 +87,67 @@ def bring_unmapped_to(page_data, sub, klass)
end
private
+ def extract_attr(key)
+ attr, sub_attr, sub_sub_attr = key.to_s.split('_')
+
+ return attr.to_sym unless sub_attr
+
+ return {attr.to_sym => [sub_attr.to_sym]} unless sub_sub_attr
+
+ {attr.to_sym => [sub_attr.to_sym => sub_sub_attr.to_sym]}
+ end
+
+ def add_direct_filters(filters, query)
+ filters.each do |key, values|
+ attr = extract_attr(key)
+ next if Array(values).empty?
+
+ filter = Goo::Filter.new(attr).regex(values.first)
+ values.drop(1).each do |v|
+ filter = filter.or(Goo::Filter.new(attr).regex(v))
+ end
+ query = query.filter(filter)
+ end
+ query
+ end
+
+ def add_inverse_filters(inverse_filters, query)
+ inverse_filters.each do |key, value|
+ attr = extract_attr(key)
+ next unless value
+
+ filter = Goo::Filter.new(attr).regex("^(?:(?!#{value}).)*$")
+ query = query.filter(filter)
+ end
+ query
+ end
+
+ def add_acronym_name_filters(query)
+ if params[:acronym]
+ filter = Goo::Filter.new(extract_attr(:ontology_acronym)).regex(params[:acronym])
+ if params[:name]
+ filter.or(Goo::Filter.new(extract_attr(:ontology_name)).regex(params[:name]))
+ end
+ query = query.filter(filter)
+ elsif params[:name]
+ filter = Goo::Filter.new(extract_attr(:ontology_name)).regex(params[:name])
+ query = query.filter(filter)
+ end
+ query
+ end
+
+ def add_order_by_patterns(query)
+ if params[:order_by]
+ attr, sub_attr = params[:order_by].to_s.split('_')
+ if sub_attr
+ order_pattern = { attr.to_sym => { sub_attr.to_sym => (sub_attr.eql?("name") ? :asc : :desc) } }
+ else
+ order_pattern = { attr.to_sym => :desc }
+ end
+ query = query.order_by(order_pattern)
+ end
+ query
+ end
def sort_order_item(param, order)
[param.to_sym, order.to_sym]
diff --git a/helpers/submission_helper.rb b/helpers/submission_helper.rb
new file mode 100644
index 00000000..c1ee5dd3
--- /dev/null
+++ b/helpers/submission_helper.rb
@@ -0,0 +1,68 @@
+require 'sinatra/base'
+
+module Sinatra
+ module Helpers
+ module SubmissionHelper
+
+ def submission_attributes_all
+ out = [LinkedData::Models::OntologySubmission.embed_values_hash]
+ out << {:contact=>[:name, :email]}
+ out << {:ontology=>[:acronym, :name, :administeredBy, :group, :viewingRestriction, :doNotUpdate, :flat,
+ :hasDomain, :summaryOnly, :acl, :viewOf, :ontologyType]}
+
+ out
+ end
+
+ def retrieve_submissions(options)
+ status = (options[:status] || "RDF").to_s.upcase
+ status = "RDF" if status.eql?("READY")
+ any = status.eql?("ANY")
+ include_views = options[:also_include_views] || false
+ includes, page, size, order_by, _ = settings_params(LinkedData::Models::OntologySubmission)
+ includes << :submissionStatus unless includes.include?(:submissionStatus)
+
+ submissions_query = LinkedData::Models::OntologySubmission
+ if any
+ submissions_query = submissions_query.where
+ else
+ submissions_query = submissions_query.where({ submissionStatus: [code: status] })
+ end
+
+ submissions_query = apply_submission_filters(submissions_query)
+ submissions_query = submissions_query.filter(Goo::Filter.new(ontology: [:viewOf]).unbound) unless include_views
+ submissions_query = submissions_query.filter(filter) if filter?
+
+ # When asking to display all metadata, we are using bring_remaining on each submission. Slower but best way to retrieve all attrs
+ if includes_param.first == :all
+ includes = [:submissionId, { :contact => [:name, :email],
+ :ontology => [:administeredBy, :acronym, :name, :summaryOnly, :ontologyType, :viewingRestriction, :acl,
+ :group, :hasDomain, :views, :viewOf, :flat, :notes, :reviews, :projects],
+ :submissionStatus => [:code], :hasOntologyLanguage => [:acronym], :metrics => [:classes, :individuals, :properties] },
+ :submissionStatus]
+ else
+ if includes.find { |v| v.is_a?(Hash) && v.keys.include?(:ontology) }
+ includes << { :ontology => [:administeredBy, :acronym, :name, :viewingRestriction, :group, :hasDomain, :notes, :reviews, :projects, :acl, :viewOf] }
+ end
+
+ if includes.find { |v| v.is_a?(Hash) && v.keys.include?(:contact) }
+ includes << { :contact => [:name, :email] }
+ end
+ end
+
+ submissions = submissions_query.include(includes)
+ if page?
+ submissions.page(page, size).all
+ else
+ submissions.to_a
+ end
+ end
+
+ def include_ready?(options)
+ options[:status] && options[:status].to_s.upcase.eql?("READY")
+ end
+
+ end
+ end
+end
+
+helpers Sinatra::Helpers::SubmissionHelper
\ No newline at end of file
diff --git a/test/controllers/test_agents_controller.rb b/test/controllers/test_agents_controller.rb
new file mode 100644
index 00000000..ef0e5c47
--- /dev/null
+++ b/test/controllers/test_agents_controller.rb
@@ -0,0 +1,225 @@
+require_relative '../test_case'
+require "multi_json"
+
+class TestAgentsController < TestCase
+
+ def setup
+
+ @number_of_organizations = 6
+
+
+ @test_agents = 8.times.map do |i|
+ type = i < @number_of_organizations ? 'organization' : 'person'
+ _agent_data(type: type)
+ end
+ @agents = []
+ 2.times.map do
+ agents_tmp = [ _agent_data(type: 'organization'), _agent_data(type: 'organization'), _agent_data(type: 'person')]
+ agent = agents_tmp.last
+ agent[:affiliations] = [agents_tmp[0].stringify_keys, agents_tmp[1].stringify_keys]
+ _test_agent_creation(agent)
+ @agents = @agents + agents_tmp
+ end
+ end
+
+ def teardown
+ # Delete groups
+ _delete_agents
+ end
+
+ def test_all_agents
+ get '/agents'
+ assert last_response.ok?
+
+ created_agents = MultiJson.load(last_response.body)
+
+ @agents.each do |agent|
+ created_agent = created_agents.select{|x| x["name"].eql?(agent[:name])}.first
+ refute_nil created_agent
+ assert_equal agent[:name], created_agent["name"]
+ assert_equal agent[:identifiers].size, created_agent["identifiers"].size
+ assert_equal agent[:identifiers].map{|x| x[:notation]}.sort, created_agent["identifiers"].map{|x| x['notation']}.sort
+ assert_equal agent[:affiliations].size, created_agent["affiliations"].size
+ assert_equal agent[:affiliations].map{|x| x["name"]}.sort, created_agent["affiliations"].map{|x| x['name']}.sort
+
+ end
+ end
+
+ def test_single_agent
+ @agents.each do |agent|
+ agent_obj = _find_agent(agent['name'])
+ get "/agents/#{agent_obj.id.to_s.split('/').last}"
+ assert last_response.ok?
+ agent_found = MultiJson.load(last_response.body)
+ assert_equal agent_obj.id.to_s, agent_found["id"]
+ end
+ end
+
+ def test_create_new_agent
+
+ ## Create Agent of type affiliation with no parent affiliation
+ agent = @test_agents[0]
+ created_agent = _test_agent_creation(agent)
+
+ ## Create Agent of type affiliation with an extent parent affiliation
+
+ agent = @test_agents[1]
+ agent[:affiliations] = [created_agent]
+
+ created_agent = _test_agent_creation(agent)
+
+ ## Create Agent of type affiliation with an no extent parent affiliation
+ agent = @test_agents[3]
+ agent[:affiliations] = [created_agent, @test_agents[2].stringify_keys]
+ created_agent = _test_agent_creation(agent)
+
+ ## Create Agent of type Person with an extent affiliations
+
+ agent = @test_agents[6]
+ agent[:affiliations] = created_agent["affiliations"]
+ _test_agent_creation(agent)
+
+ ## Create Agent of type Person with no extent affiliations
+
+ agent = @test_agents[7]
+ agent[:affiliations] = [@test_agents[4].stringify_keys, @test_agents[5].stringify_keys]
+ _test_agent_creation(agent)
+
+ @agents = @agents + @test_agents
+ end
+
+
+ def test_new_agent_no_valid
+ agents_tmp = [ _agent_data(type: 'organization'), _agent_data(type: 'person'), _agent_data(type: 'person')]
+ agent = agents_tmp.last
+ agent[:affiliations] = [agents_tmp[0].stringify_keys, agents_tmp[1].stringify_keys]
+ post "/agents", MultiJson.dump(agent), "CONTENT_TYPE" => "application/json"
+ assert last_response.status == 400
+ end
+
+ def test_update_patch_agent
+
+ agents = [ _agent_data(type: 'organization'), _agent_data(type: 'organization'), _agent_data(type: 'person')]
+ agent = agents.last
+ agent[:affiliations] = [agents[0].stringify_keys, agents[1].stringify_keys]
+ agent = _test_agent_creation(agent)
+ @agents = @agents + agents
+ agent = LinkedData::Models::Agent.find(agent['id'].split('/').last).first
+ agent.bring_remaining
+
+
+ ## update identifiers
+ agent.identifiers.each{|i| i.bring_remaining}
+ new_identifiers = []
+ ## update an existent identifier
+ new_identifiers[0] = {
+ id: agent.identifiers[0].id.to_s,
+ schemaAgency: 'TEST ' + agent.identifiers[0].notation
+ }
+
+ new_identifiers[1] = {
+ id: agent.identifiers[1].id.to_s
+ }
+
+ ## update affiliation
+ agent.affiliations.each{|aff| aff.bring_remaining}
+ new_affiliations = []
+ ## update an existent affiliation
+ new_affiliations[0] = {
+ name: 'TEST new of ' + agent.affiliations[0].name,
+ id: agent.affiliations[0].id.to_s
+ }
+ ## create a new affiliation
+ new_affiliations[1] = _agent_data(type: 'organization')
+ new_affiliations[1][:name] = 'new affiliation'
+
+ new_values = {
+ name: 'new name ',
+ identifiers: new_identifiers,
+ affiliations: new_affiliations
+ }
+
+ patch "/agents/#{agent.id.split('/').last}", MultiJson.dump(new_values), "CONTENT_TYPE" => "application/json"
+ assert last_response.status == 204
+
+ get "/agents/#{agent.id.split('/').last}"
+ new_agent = MultiJson.load(last_response.body)
+ assert_equal 'new name ', new_agent["name"]
+
+ assert_equal new_identifiers.size, new_agent["identifiers"].size
+ assert_equal new_identifiers[0][:schemaAgency], new_agent["identifiers"].select{|x| x["id"].eql?(agent.identifiers[0].id.to_s)}.first["schemaAgency"]
+ assert_equal agent.identifiers[1].schemaAgency, new_agent["identifiers"].select{|x| x["id"].eql?(agent.identifiers[1].id.to_s)}.first["schemaAgency"]
+
+ assert_equal new_affiliations.size, new_agent["affiliations"].size
+ assert_equal new_affiliations[0][:name], new_agent["affiliations"].select{|x| x["id"].eql?(agent.affiliations[0].id.to_s)}.first["name"]
+ assert_nil new_agent["affiliations"].select{|x| x["id"].eql?(agent.affiliations[1].id.to_s)}.first
+ assert_equal new_affiliations[1][:name], new_agent["affiliations"].reject{|x| x["id"].eql?(agent.affiliations[0].id.to_s)}.first["name"]
+ end
+
+ def test_delete_agent
+ agent = @agents.delete_at(0)
+ agent_obj = _find_agent(agent['name'])
+ id = agent_obj.id.to_s.split('/').last
+ delete "/agents/#{id}"
+ assert last_response.status == 204
+
+ get "/agents/#{id}"
+ assert last_response.status == 404
+ end
+
+ private
+ def _agent_data(type: 'organization')
+ schema_agencies = LinkedData::Models::AgentIdentifier::IDENTIFIER_SCHEMES.keys
+ users = LinkedData::Models::User.all
+ users = [LinkedData::Models::User.new(username: "tim", email: "tim@example.org", password: "password").save] if users.empty?
+ test_identifiers = 5.times.map { |i| { notation: rand.to_s[2..11], schemaAgency: schema_agencies.sample.to_s } }
+ user = users.sample.id.to_s
+
+ i = rand.to_s[2..11]
+ return {
+ agentType: type,
+ name: "name #{i}",
+ homepage: "home page #{i}",
+ acronym: "acronym #{i}",
+ email: "email_#{i}@test.com",
+ identifiers: test_identifiers.sample(2).map { |x| x.merge({ creator: user }) },
+ affiliations: [],
+ creator: user
+ }
+ end
+
+ def _find_agent(name)
+ LinkedData::Models::Agent.where(name: name).first
+ end
+
+ def _delete_agents
+ @agents.each do |agent|
+ test_cat = _find_agent(agent[:name])
+ next if test_cat.nil?
+
+ test_cat.bring :identifiers
+ test_cat.identifiers.each { |i| i.delete }
+ test_cat.delete
+ end
+ end
+
+ def _test_agent_creation(agent)
+ post "/agents", MultiJson.dump(agent), "CONTENT_TYPE" => "application/json"
+
+ assert last_response.status == 201
+ created_agent = MultiJson.load(last_response.body)
+ assert created_agent["name"].eql?(agent[:name])
+
+ get "/agents/#{created_agent['id'].split('/').last}"
+ assert last_response.ok?
+
+ created_agent = MultiJson.load(last_response.body)
+ assert_equal agent[:name], created_agent["name"]
+ assert_equal agent[:identifiers].size, created_agent["identifiers"].size
+ assert_equal agent[:identifiers].map { |x| x[:notation] }.sort, created_agent["identifiers"].map { |x| x['notation'] }.sort
+
+ assert_equal agent[:affiliations].size, created_agent["affiliations"].size
+ assert_equal agent[:affiliations].map { |x| x["name"] }.sort, created_agent["affiliations"].map { |x| x['name'] }.sort
+ created_agent
+ end
+end
\ No newline at end of file
diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb
index 4713b699..dc79359b 100644
--- a/test/controllers/test_ontologies_controller.rb
+++ b/test/controllers/test_ontologies_controller.rb
@@ -217,13 +217,13 @@ def test_download_acl_only
begin
allowed_user = User.new({
username: "allowed",
- email: "test@example.org",
+ email: "test1@example.org",
password: "12345"
})
allowed_user.save
blocked_user = User.new({
username: "blocked",
- email: "test@example.org",
+ email: "test2@example.org",
password: "12345"
})
blocked_user.save
diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb
index 7500dce4..9ee81257 100644
--- a/test/controllers/test_ontology_submissions_controller.rb
+++ b/test/controllers/test_ontology_submissions_controller.rb
@@ -18,7 +18,10 @@ def self._set_vars
administeredBy: "tim",
"file" => Rack::Test::UploadedFile.new(@@test_file, ""),
released: DateTime.now.to_s,
- contact: [{name: "test_name", email: "test@example.org"}]
+ contact: [{name: "test_name", email: "test3@example.org"}],
+ URI: 'https://test.com/test',
+ status: 'production',
+ description: 'ontology description'
}
@@status_uploaded = "UPLOADED"
@@status_rdf = "RDF"
@@ -156,13 +159,13 @@ def test_download_acl_only
begin
allowed_user = User.new({
username: "allowed",
- email: "test@example.org",
+ email: "test4@example.org",
password: "12345"
})
allowed_user.save
blocked_user = User.new({
username: "blocked",
- email: "test@example.org",
+ email: "test5@example.org",
password: "12345"
})
blocked_user.save
@@ -192,4 +195,19 @@ def test_download_acl_only
end
end
+ def test_submissions_pagination
+ num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 2, submission_count: 2)
+
+ get "/submissions"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+
+ assert_equal 2, submissions.length
+
+
+ get "/submissions?page=1&pagesize=1"
+ assert last_response.ok?
+ submissions = MultiJson.load(last_response.body)
+ assert_equal 1, submissions["collection"].length
+ end
end
diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb
index 44c67c7e..74be75d2 100644
--- a/test/controllers/test_search_controller.rb
+++ b/test/controllers/test_search_controller.rb
@@ -85,7 +85,7 @@ def test_search_ontology_filter
assert last_response.ok?
results = MultiJson.load(last_response.body)
doc = results["collection"][0]
- assert_equal "cell line", doc["prefLabel"]
+ assert_equal "cell line", doc["prefLabel"].first
assert doc["links"]["ontology"].include? acronym
results["collection"].each do |doc|
acr = doc["links"]["ontology"].split('/')[-1]
@@ -103,7 +103,8 @@ def test_search_other_filters
get "search?q=data&require_definitions=true"
assert last_response.ok?
results = MultiJson.load(last_response.body)
- assert_equal 26, results["collection"].length
+ assert results["collection"].all? {|doc| !doc["definition"].nil? && doc.values.flatten.join(" ").include?("data") }
+ #assert_equal 26, results["collection"].length
get "search?q=data&require_definitions=false"
assert last_response.ok?
@@ -115,10 +116,14 @@ def test_search_other_filters
get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}"
results = MultiJson.load(last_response.body)
- assert_equal 22, results["collection"].length
+
+ assert results["collection"].all? { |x| !x["obsolete"] }
+ count = results["collection"].length
+
get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}&also_search_obsolete=false"
results = MultiJson.load(last_response.body)
- assert_equal 22, results["collection"].length
+ assert_equal count, results["collection"].length
+
get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}&also_search_obsolete=true"
results = MultiJson.load(last_response.body)
assert_equal 29, results["collection"].length
@@ -134,8 +139,14 @@ def test_search_other_filters
# testing cui and semantic_types flags
get "search?q=Funding%20Resource&ontologies=#{acronym}&include=prefLabel,synonym,definition,notation,cui,semanticType"
results = MultiJson.load(last_response.body)
- assert_equal 35, results["collection"].length
- assert_equal "Funding Resource", results["collection"][0]["prefLabel"]
+ #assert_equal 35, results["collection"].length
+ assert results["collection"].all? do |r|
+ ["prefLabel", "synonym", "definition", "notation", "cui", "semanticType"].map {|x| r[x]}
+ .flatten
+ .join(' ')
+ .include?("Funding Resource")
+ end
+ assert_equal "Funding Resource", results["collection"][0]["prefLabel"].first
assert_equal "T028", results["collection"][0]["semanticType"][0]
assert_equal "X123456", results["collection"][0]["cui"][0]
@@ -190,7 +201,7 @@ def test_search_provisional_class
assert_equal 10, results["collection"].length
provisional = results["collection"].select {|res| assert_equal ontology_type, res["ontologyType"]; res["provisional"]}
assert_equal 1, provisional.length
- assert_equal @@test_pc_root.label, provisional[0]["prefLabel"]
+ assert_equal @@test_pc_root.label, provisional[0]["prefLabel"].first
# subtree root with provisional class test
get "search?ontology=#{acronym}&subtree_root_id=#{CGI::escape(@@cls_uri.to_s)}&also_search_provisional=true"
@@ -199,7 +210,51 @@ def test_search_provisional_class
provisional = results["collection"].select {|res| res["provisional"]}
assert_equal 1, provisional.length
- assert_equal @@test_pc_child.label, provisional[0]["prefLabel"]
+ assert_equal @@test_pc_child.label, provisional[0]["prefLabel"].first
+ end
+
+ def test_multilingual_search
+ get "/search?q=Activity&ontologies=BROSEARCHTEST-0"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+
+ doc = res["collection"].select{|doc| doc["@id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+
+ #res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BROSEARCHTEST-0", :start => 0, :rows => 80}, :main)
+ #refute_equal 0, res["response"]["numFound"]
+ #refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activity&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
end
end
diff --git a/test/data/ontology_files/BRO_v3.2.owl b/test/data/ontology_files/BRO_v3.2.owl
index d64075cc..b2aeccf5 100644
--- a/test/data/ontology_files/BRO_v3.2.owl
+++ b/test/data/ontology_files/BRO_v3.2.owl
@@ -631,6 +631,9 @@
Activity
+ Activity
+ ActivityEnglish
+ Activité
Activity of interest that may be related to a BRO:Resource.
activities
diff --git a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf b/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
index 8353d82f..ca303834 100644
--- a/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
+++ b/test/data/ontology_files/thesaurusINRAE_nouv_structure.rdf
@@ -30,7 +30,7 @@
1331561625299
- aktivite
+ aktivite
2012-03-12T22:13:45Z
2017-09-22T14:09:06Z
diff --git a/test/middleware/test_rack_attack.rb b/test/middleware/test_rack_attack.rb
index 43143080..0b10c9e1 100644
--- a/test/middleware/test_rack_attack.rb
+++ b/test/middleware/test_rack_attack.rb
@@ -18,14 +18,14 @@ def self.before_suite
LinkedData::OntologiesAPI.settings.req_per_second_per_ip = 1
LinkedData::OntologiesAPI.settings.safe_ips = Set.new(["1.2.3.4", "1.2.3.5"])
- @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email@example.org"})
+ @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email1@example.org"})
@@user.save
- @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email@example.org"})
+ @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email2@example.org"})
@@bp_user.save
admin_role = LinkedData::Models::Users::Role.find("ADMINISTRATOR").first
- @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email@example.org", role: [admin_role]})
+ @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email3@example.org", role: [admin_role]})
@@admin.save
# Redirect output or we get a bunch of noise from Rack (gets reset in the after_suite method).
diff --git a/test/solr/configsets/term_search/conf/schema.xml b/test/solr/configsets/term_search/conf/schema.xml
index 6b18a2a1..fa95e127 100644
--- a/test/solr/configsets/term_search/conf/schema.xml
+++ b/test/solr/configsets/term_search/conf/schema.xml
@@ -128,11 +128,20 @@
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -140,9 +149,18 @@
+
+
+
+
+
+
+
-
+
+
+
@@ -251,6 +269,17 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/solr/docker-compose.yml b/test/solr/docker-compose.yml
new file mode 100644
index 00000000..3ddae69c
--- /dev/null
+++ b/test/solr/docker-compose.yml
@@ -0,0 +1,13 @@
+version: '3.8'
+
+services:
+ op_solr:
+ image: solr:8.8
+ volumes:
+ - ./solr_configsets:/configsets:ro
+ ports:
+ - "8983:8983"
+ command: >
+ bash -c "precreate-core term_search_core1 /configsets/term_search
+ && precreate-core prop_search_core1 /configsets/property_search
+ && solr-foreground"
diff --git a/test/solr/generate_ncbo_configsets.sh b/test/solr/generate_ncbo_configsets.sh
index 893f7f3a..7b4281f7 100755
--- a/test/solr/generate_ncbo_configsets.sh
+++ b/test/solr/generate_ncbo_configsets.sh
@@ -2,18 +2,23 @@
# generates solr configsets by merging _default configset with config files in config/solr
# _default is copied from sorl distribuion solr-8.10.1/server/solr/configsets/_default/
-pushd solr/configsets
-ld_config='../../../../ontologies_linked_data/config/solr/'
-#ld_config='../../../../config/solr/'
-ls -l $ld_config
-pwd
-[ -d property_search ] && rm -Rf property_search
-[ -d term_search ] && rm -Rf property_search
-[ -d $ld_config/property_search ] || echo "cant find ontologies_linked_data project"
-mkdir -p property_search/conf
-mkdir -p term_search/conf
-cp -a _default/conf/* property_search/conf/
-cp -a _default/conf/* term_search/conf/
-cp -a $ld_config/property_search/* property_search/conf
-cp -a $ld_config/term_search/* term_search/conf
-popd
+#cd solr/configsets
+ld_config='config/solr'
+configsets='test/solr/configsets'
+[ -d ${configsets}/property_search ] && rm -Rf ${configsets}/property_search
+[ -d ${configsets}/term_search ] && rm -Rf ${configsets}/term_search
+if [[ ! -d ${ld_config}/property_search ]]; then
+ echo 'cant find ld solr config sets'
+ exit 1
+fi
+if [[ ! -d ${configsets}/_default/conf ]]; then
+ echo 'cant find default solr configset'
+ exit 1
+fi
+mkdir -p ${configsets}/property_search/conf
+mkdir -p ${configsets}/term_search/conf
+cp -a ${configsets}/_default/conf/* ${configsets}/property_search/conf/
+cp -a ${configsets}/_default/conf/* ${configsets}/term_search/conf/
+cp -a $ld_config/property_search/* ${configsets}/property_search/conf
+cp -a $ld_config/term_search/* ${configsets}/term_search/conf
+