diff --git a/.env b/.env
new file mode 100644
index 00000000..2b53fceb
--- /dev/null
+++ b/.env
@@ -0,0 +1,6 @@
+API_URL=http://localhost:9393
+ONTOLOGIES_LINKED_DATA_PATH=
+GOO_PATH=
+SPARQL_CLIENT_PATH=
+ONTOPORTAL_KB=ontoportal_kb
+REDIS_HOST=redis-ut
diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml
index 358ff4f1..8c912cc7 100644
--- a/.github/workflows/docker-image.yml
+++ b/.github/workflows/docker-image.yml
@@ -35,8 +35,8 @@ jobs:
with:
context: .
platforms: linux/amd64,linux/arm64
- build-args: |
- RUBY_VERSION=2.7
+ # build-args: |
+ # RUBY_VERSION=2.7
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml
index d764159b..2aac1468 100644
--- a/.github/workflows/ruby-unit-tests.yml
+++ b/.github/workflows/ruby-unit-tests.yml
@@ -14,13 +14,13 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Build docker-compose
- run: docker-compose --profile 4store build #profile flag is set in order to build all containers in this step
+ run: docker compose --profile 4store build #profile flag is set in order to build all containers in this step
- name: Run unit tests
# unit tests are run inside a container
# http://docs.codecov.io/docs/testing-with-docker
run: |
ci_env=`bash <(curl -s https://codecov.io/env)`
- docker-compose run $ci_env -e CI --rm ${{ matrix.backend }} bundle exec rake test TESTOPTS='-v'
+ docker compose run $ci_env -e CI --rm ${{ matrix.backend }} bundle exec rake test TESTOPTS='-v'
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4
with:
diff --git a/.ruby-version b/.ruby-version
index 6a81b4c8..818bd47a 100644
--- a/.ruby-version
+++ b/.ruby-version
@@ -1 +1 @@
-2.7.8
+3.0.6
diff --git a/Dockerfile b/Dockerfile
index a7adf16c..b6130644 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-ARG RUBY_VERSION
+ARG RUBY_VERSION=3.0
ARG DISTRO_NAME=bullseye
FROM ruby:$RUBY_VERSION-$DISTRO_NAME
@@ -14,10 +14,7 @@ COPY Gemfile* /srv/ontoportal/ontologies_api/
WORKDIR /srv/ontoportal/ontologies_api
-# set rubygem and bundler to the last version supported by ruby 2.7
-# remove version after ruby v3 upgrade
-RUN gem update --system '3.4.22'
-RUN gem install bundler -v 2.4.22
+RUN gem update --system
RUN gem install bundler
ENV BUNDLE_PATH=/srv/ontoportal/bundle
RUN bundle install
diff --git a/Gemfile b/Gemfile
index ec1f76f3..806fbf75 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,14 +1,15 @@
source 'https://rubygems.org'
-
-gem 'activesupport', '~> 3.2'
+gem 'activesupport', '~> 5'
# see https://github.com/ncbo/ontologies_api/issues/69
-gem 'bigdecimal', '1.4.2'
+gem 'bigdecimal'
+# gem 'faraday', '~> 1.9'
gem 'json-schema', '~> 2.0'
-gem 'multi_json', '~> 1.0'
-gem 'oj', '~> 3.0'
+gem 'multi_json'
+gem 'oj'
gem 'parseconfig'
gem 'rack'
gem 'rake', '~> 10.0'
+gem 'rexml' # Investigate why unicorn fails to start under ruby 3 without adding rexml gem to the Gemfile
gem 'sinatra', '~> 1.0'
gem 'sinatra-advanced-routes'
gem 'sinatra-contrib', '~> 1.0'
@@ -40,13 +41,15 @@ gem 'unicorn-worker-killer'
gem 'haml', '~> 5.2.2' # pin see https://github.com/ncbo/ontologies_api/pull/107
gem 'redcarpet'
-# NCBO
-gem 'goo', github: 'ncbo/goo', branch: 'master'
+
+# NCBO gems (can be from a local dev path or from rubygems/git)
gem 'ncbo_annotator', github: 'ncbo/ncbo_annotator', branch: 'master'
gem 'ncbo_cron', github: 'ncbo/ncbo_cron', branch: 'master'
gem 'ncbo_ontology_recommender', github: 'ncbo/ncbo_ontology_recommender', branch: 'master'
-gem 'ontologies_linked_data', github: 'ncbo/ontologies_linked_data', branch: 'master'
gem 'sparql-client', github: 'ncbo/sparql-client', branch: 'master'
+gem 'goo', github: 'ncbo/goo', branch: 'master'
+gem 'ontologies_linked_data', github: 'ncbo/ontologies_linked_data', branch: 'master'
+
group :development do
# bcrypt_pbkdf and ed35519 is required for capistrano deployments when using ed25519 keys; see https://github.com/miloserdow/capistrano-deploy/issues/42
@@ -69,9 +72,13 @@ group :profiling do
end
group :test do
- gem 'minitest', '~> 4.0'
+ gem 'crack', '0.4.5'
+ gem 'minitest', '~> 5.0'
+ gem 'minitest-hooks', "~> 1.5"
gem 'minitest-stub_any_instance'
gem 'rack-test'
gem 'simplecov', require: false
gem 'simplecov-cobertura' # for codecov.io
+ gem 'webmock', '~> 3.19.1'
+ gem 'webrick'
end
diff --git a/Gemfile.lock b/Gemfile.lock
index fc1b44ac..c21d2c66 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,6 +1,6 @@
GIT
remote: https://github.com/ncbo/goo.git
- revision: f01386fe55ffba39cbf5e0bf95c8e2dc18767711
+ revision: f943ac33e87c55c88131c32d826ed5d39c89302c
branch: master
specs:
goo (0.0.2)
@@ -8,6 +8,7 @@ GIT
pry
rdf (= 1.0.8)
redis
+ request_store
rest-client
rsolr
sparql-client
@@ -15,7 +16,7 @@ GIT
GIT
remote: https://github.com/ncbo/ncbo_annotator.git
- revision: 63c986880aa88c9384043e6611a682434a14aba7
+ revision: d46d667ec1b2343fbcdab587f1a70e763b894133
branch: master
specs:
ncbo_annotator (0.0.1)
@@ -26,11 +27,13 @@ GIT
GIT
remote: https://github.com/ncbo/ncbo_cron.git
- revision: b5764cc314758dbfdbb855b2b63c1c53b1325f6f
+ revision: 32a721543f518167385256fa4f0a08c8c6d987b7
branch: master
specs:
ncbo_cron (0.0.1)
dante
+ faraday (~> 2)
+ faraday-follow_redirects (~> 0)
goo
google-analytics-data
mlanett-redis-lock
@@ -42,7 +45,7 @@ GIT
GIT
remote: https://github.com/ncbo/ncbo_ontology_recommender.git
- revision: 013abea4af3b10910ec661dbb358a4b6cae198a4
+ revision: 9dbd4f179e42c52095129d353a5ac584e9bd47f3
branch: master
specs:
ncbo_ontology_recommender (0.0.1)
@@ -53,7 +56,7 @@ GIT
GIT
remote: https://github.com/ncbo/ontologies_linked_data.git
- revision: 4dc3b1123e871b53ba9eb51983ff69c5d2c75c18
+ revision: 9ab4be437ed84f5a480e7f0d8799824fcea310ae
branch: master
specs:
ontologies_linked_data (0.0.1)
@@ -73,7 +76,7 @@ GIT
GIT
remote: https://github.com/ncbo/sparql-client.git
- revision: d418d56a6c9ff5692f925b45739a2a1c66bca851
+ revision: e89c26aa96f184dbe9b52d51e04fb3d9ba998dbc
branch: master
specs:
sparql-client (1.0.1)
@@ -100,26 +103,29 @@ GIT
GEM
remote: https://rubygems.org/
specs:
- activesupport (3.2.22.5)
- i18n (~> 0.6, >= 0.6.4)
- multi_json (~> 1.0)
- addressable (2.8.6)
- public_suffix (>= 2.0.2, < 6.0)
- airbrussh (1.5.1)
+ activesupport (5.2.8.1)
+ concurrent-ruby (~> 1.0, >= 1.0.2)
+ i18n (>= 0.7, < 2)
+ minitest (~> 5.1)
+ tzinfo (~> 1.1)
+ addressable (2.8.7)
+ public_suffix (>= 2.0.2, < 7.0)
+ airbrussh (1.5.3)
sshkit (>= 1.6.1, != 1.7.0)
ast (2.4.2)
backports (3.25.0)
base64 (0.2.0)
bcrypt (3.1.20)
- bcrypt_pbkdf (1.1.0)
- bigdecimal (1.4.2)
- builder (3.2.4)
- capistrano (3.18.1)
+ bcrypt_pbkdf (1.1.1)
+ bcrypt_pbkdf (1.1.1-arm64-darwin)
+ bigdecimal (3.1.8)
+ builder (3.3.0)
+ capistrano (3.19.1)
airbrussh (>= 1.0.0)
i18n
rake (>= 10.0.0)
sshkit (>= 1.9.0)
- capistrano-bundler (2.1.0)
+ capistrano-bundler (2.1.1)
capistrano (~> 3.1)
capistrano-locally (0.3.0)
capistrano (~> 3.0)
@@ -127,91 +133,95 @@ GEM
capistrano (~> 3.1)
sshkit (~> 1.3)
coderay (1.1.3)
- concurrent-ruby (1.2.3)
+ concurrent-ruby (1.3.4)
connection_pool (2.4.1)
+ crack (0.4.5)
+ rexml
cube-ruby (0.0.3)
dante (0.2.0)
date (3.3.4)
- docile (1.4.0)
+ docile (1.4.1)
domain_name (0.6.20240107)
ed25519 (1.3.0)
- faraday (2.8.1)
- base64
- faraday-net_http (>= 2.0, < 3.1)
- ruby2_keywords (>= 0.0.4)
- faraday-net_http (3.0.2)
- faraday-retry (2.2.0)
+ faraday (2.12.0)
+ faraday-net_http (>= 2.0, < 3.4)
+ json
+ logger
+ faraday-follow_redirects (0.3.0)
+ faraday (>= 1, < 3)
+ faraday-net_http (3.3.0)
+ net-http
+ faraday-retry (2.2.1)
faraday (~> 2.0)
- ffi (1.16.3)
- gapic-common (0.21.1)
+ ffi (1.17.0-aarch64-linux-gnu)
+ ffi (1.17.0-arm64-darwin)
+ ffi (1.17.0-x86_64-linux-gnu)
+ gapic-common (0.22.0)
faraday (>= 1.9, < 3.a)
faraday-retry (>= 1.0, < 3.a)
- google-protobuf (~> 3.18)
- googleapis-common-protos (>= 1.4.0, < 2.a)
- googleapis-common-protos-types (>= 1.11.0, < 2.a)
- googleauth (~> 1.9)
- grpc (~> 1.59)
+ google-protobuf (>= 3.25, < 5.a)
+ googleapis-common-protos (~> 1.6)
+ googleapis-common-protos-types (~> 1.15)
+ googleauth (~> 1.11)
+ grpc (~> 1.65)
get_process_mem (0.2.7)
ffi (~> 1.0)
- google-analytics-data (0.6.0)
+ google-analytics-data (0.6.1)
google-analytics-data-v1beta (>= 0.11, < 2.a)
google-cloud-core (~> 1.6)
- google-analytics-data-v1beta (0.12.0)
+ google-analytics-data-v1beta (0.13.1)
gapic-common (>= 0.21.1, < 2.a)
google-cloud-errors (~> 1.0)
- google-cloud-core (1.7.0)
+ google-cloud-core (1.7.1)
google-cloud-env (>= 1.0, < 3.a)
google-cloud-errors (~> 1.0)
- google-cloud-env (2.1.1)
+ google-cloud-env (2.2.1)
faraday (>= 1.0, < 3.a)
google-cloud-errors (1.4.0)
- google-protobuf (3.25.3-aarch64-linux)
- google-protobuf (3.25.3-arm64-darwin)
- google-protobuf (3.25.3-x86_64-darwin)
- google-protobuf (3.25.3-x86_64-linux)
- googleapis-common-protos (1.5.0)
- google-protobuf (~> 3.18)
+ google-protobuf (3.25.5-aarch64-linux)
+ google-protobuf (3.25.5-arm64-darwin)
+ google-protobuf (3.25.5-x86_64-linux)
+ googleapis-common-protos (1.6.0)
+ google-protobuf (>= 3.18, < 5.a)
googleapis-common-protos-types (~> 1.7)
grpc (~> 1.41)
- googleapis-common-protos-types (1.14.0)
- google-protobuf (~> 3.18)
- googleauth (1.11.0)
+ googleapis-common-protos-types (1.16.0)
+ google-protobuf (>= 3.18, < 5.a)
+ googleauth (1.11.1)
faraday (>= 1.0, < 3.a)
google-cloud-env (~> 2.1)
jwt (>= 1.4, < 3.0)
multi_json (~> 1.11)
os (>= 0.9, < 2.0)
signet (>= 0.16, < 2.a)
- grpc (1.62.0-aarch64-linux)
- google-protobuf (~> 3.25)
+ grpc (1.66.0-aarch64-linux)
+ google-protobuf (>= 3.25, < 5.0)
googleapis-common-protos-types (~> 1.0)
- grpc (1.62.0-arm64-darwin)
- google-protobuf (~> 3.25)
+ grpc (1.66.0-arm64-darwin)
+ google-protobuf (>= 3.25, < 5.0)
googleapis-common-protos-types (~> 1.0)
- grpc (1.62.0-x86_64-darwin)
- google-protobuf (~> 3.25)
- googleapis-common-protos-types (~> 1.0)
- grpc (1.62.0-x86_64-linux)
- google-protobuf (~> 3.25)
+ grpc (1.66.0-x86_64-linux)
+ google-protobuf (>= 3.25, < 5.0)
googleapis-common-protos-types (~> 1.0)
haml (5.2.2)
temple (>= 0.8.0)
tilt
+ hashdiff (1.1.1)
http-accept (1.7.0)
- http-cookie (1.0.5)
+ http-cookie (1.0.7)
domain_name (~> 0.5)
- i18n (0.9.5)
+ i18n (1.14.6)
concurrent-ruby (~> 1.0)
- json (2.7.1)
+ json (2.7.2)
json-schema (2.8.1)
addressable (>= 2.4)
- json_pure (2.7.1)
- jwt (2.8.1)
+ json_pure (2.7.2)
+ jwt (2.9.3)
base64
kgio (2.11.4)
language_server-protocol (3.17.0.3)
libxml-ruby (5.0.3)
- logger (1.6.0)
+ logger (1.6.1)
macaddr (1.7.2)
systemu (~> 2.6.5)
mail (2.8.1)
@@ -219,19 +229,23 @@ GEM
net-imap
net-pop
net-smtp
- method_source (1.0.0)
- mime-types (3.5.2)
+ method_source (1.1.0)
+ mime-types (3.6.0)
+ logger
mime-types-data (~> 3.2015)
- mime-types-data (3.2024.0305)
+ mime-types-data (3.2024.1001)
mini_mime (1.1.5)
- minitest (4.7.5)
+ minitest (5.25.1)
+ minitest-hooks (1.5.2)
+ minitest (> 5.3)
minitest-stub_any_instance (1.0.3)
mlanett-redis-lock (0.2.7)
redis
multi_json (1.15.0)
- mutex_m (0.2.0)
+ net-http (0.4.1)
+ uri
net-http-persistent (2.9.4)
- net-imap (0.4.10)
+ net-imap (0.4.16)
date
net-protocol
net-pop (0.1.2)
@@ -244,16 +258,19 @@ GEM
net-ssh (>= 5.0.0, < 8.0.0)
net-smtp (0.5.0)
net-protocol
- net-ssh (7.2.1)
+ net-ssh (7.3.0)
netrc (0.11.0)
- newrelic_rpm (9.8.0)
- oj (3.16.1)
+ newrelic_rpm (9.14.0)
+ oj (3.16.6)
+ bigdecimal (>= 3.0)
+ ostruct (>= 0.2)
omni_logger (0.1.4)
logger
os (1.1.4)
- parallel (1.24.0)
+ ostruct (0.6.0)
+ parallel (1.26.3)
parseconfig (1.1.2)
- parser (3.3.0.5)
+ parser (3.3.5.0)
ast (~> 2.4.1)
racc
pony (1.13.1)
@@ -261,8 +278,8 @@ GEM
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.0.4)
- racc (1.7.3)
+ public_suffix (6.0.1)
+ racc (1.8.1)
rack (1.6.13)
rack-accept (0.4.5)
rack (>= 0.4)
@@ -278,48 +295,48 @@ GEM
rack
rack-test (2.1.0)
rack (>= 1.3)
- rack-timeout (0.6.3)
+ rack-timeout (0.7.0)
rainbow (3.1.1)
raindrops (0.20.1)
rake (10.5.0)
rdf (1.0.8)
addressable (>= 2.2)
redcarpet (3.6.0)
- redis (5.1.0)
- redis-client (>= 0.17.0)
- redis-client (0.21.1)
+ redis (5.3.0)
+ redis-client (>= 0.22.0)
+ redis-client (0.22.2)
connection_pool
redis-rack-cache (2.2.1)
rack-cache (>= 1.10, < 2)
redis-store (>= 1.6, < 2)
- redis-store (1.10.0)
+ redis-store (1.11.0)
redis (>= 4, < 6)
- regexp_parser (2.9.0)
+ regexp_parser (2.9.2)
+ request_store (1.7.0)
+ rack (>= 1.4)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
- rexml (3.2.6)
+ rexml (3.3.8)
rsolr (2.6.0)
builder (>= 2.1.2)
faraday (>= 0.9, < 3, != 2.0.0)
- rubocop (1.62.1)
+ rubocop (1.66.1)
json (~> 2.3)
language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
parser (>= 3.3.0.2)
rainbow (>= 2.2.2, < 4.0)
- regexp_parser (>= 1.8, < 3.0)
- rexml (>= 3.2.5, < 4.0)
- rubocop-ast (>= 1.31.1, < 2.0)
+ regexp_parser (>= 2.4, < 3.0)
+ rubocop-ast (>= 1.32.2, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
- rubocop-ast (1.31.2)
- parser (>= 3.3.0.4)
+ rubocop-ast (1.32.3)
+ parser (>= 3.3.1.0)
ruby-progressbar (1.13.0)
ruby-xxHash (0.4.0.2)
- ruby2_keywords (0.0.5)
rubyzip (2.3.2)
rufus-scheduler (2.0.24)
tzinfo (>= 0.3.22)
@@ -335,7 +352,7 @@ GEM
simplecov-cobertura (2.1.0)
rexml
simplecov (~> 0.19)
- simplecov-html (0.12.3)
+ simplecov-html (0.13.1)
simplecov_json_formatter (0.1.4)
sinatra (1.4.8)
rack (~> 1.5)
@@ -350,58 +367,64 @@ GEM
rack-test
sinatra (~> 1.4.0)
tilt (>= 1.3, < 3)
- sshkit (1.22.0)
- mutex_m
+ sshkit (1.23.1)
+ base64
net-scp (>= 1.1.2)
net-sftp (>= 2.1.2)
net-ssh (>= 2.8.0)
+ ostruct
systemu (2.6.5)
temple (0.10.3)
- tilt (2.3.0)
+ thread_safe (0.3.6)
+ tilt (2.4.0)
timeout (0.4.1)
- tzinfo (2.0.6)
- concurrent-ruby (~> 1.0)
- unicode-display_width (2.5.0)
+ tzinfo (1.2.11)
+ thread_safe (~> 0.1)
+ unicode-display_width (2.6.0)
unicorn (6.1.0)
kgio (~> 2.6)
raindrops (~> 0.7)
unicorn-worker-killer (0.4.5)
get_process_mem (~> 0)
unicorn (>= 4, < 7)
+ uri (0.13.1)
uuid (2.3.9)
macaddr (~> 1.0)
+ webmock (3.19.1)
+ addressable (>= 2.8.0)
+ crack (>= 0.3.2)
+ hashdiff (>= 0.4.0, < 2.0.0)
+ webrick (1.8.2)
PLATFORMS
aarch64-linux
arm64-darwin-22
- arm64-darwin-23
- x86_64-darwin-18
- x86_64-darwin-21
- x86_64-darwin-23
x86_64-linux
DEPENDENCIES
- activesupport (~> 3.2)
+ activesupport (~> 5)
bcrypt_pbkdf (>= 1.0, < 2.0)
- bigdecimal (= 1.4.2)
+ bigdecimal
capistrano (~> 3)
capistrano-bundler
capistrano-locally
capistrano-rbenv
+ crack (= 0.4.5)
cube-ruby
ed25519 (>= 1.2, < 2.0)
ffi
goo!
haml (~> 5.2.2)
json-schema (~> 2.0)
- minitest (~> 4.0)
+ minitest (~> 5.0)
+ minitest-hooks (~> 1.5)
minitest-stub_any_instance
- multi_json (~> 1.0)
+ multi_json
ncbo_annotator!
ncbo_cron!
ncbo_ontology_recommender!
newrelic_rpm
- oj (~> 3.0)
+ oj
ontologies_linked_data!
parseconfig
rack
@@ -418,6 +441,7 @@ DEPENDENCIES
redis
redis-rack-cache (~> 2.0)
redis-store (~> 1.10)
+ rexml
rubocop
shotgun!
simplecov
@@ -428,6 +452,8 @@ DEPENDENCIES
sparql-client!
unicorn
unicorn-worker-killer
+ webmock (~> 3.19.1)
+ webrick
BUNDLED WITH
- 2.4.22
+ 2.5.7
diff --git a/app.rb b/app.rb
index 41ad56ac..e09178bd 100644
--- a/app.rb
+++ b/app.rb
@@ -27,6 +27,7 @@
require_relative 'lib/rack/cube_reporter'
require_relative 'lib/rack/param_translator'
require_relative 'lib/rack/slice_detection'
+require_relative 'lib/rack/request_lang'
# Logging setup
require_relative "config/logging"
@@ -34,6 +35,8 @@
# Inflector setup
require_relative "config/inflections"
+require 'request_store'
+
# Protection settings
set :protection, :except => :path_traversal
@@ -141,6 +144,9 @@
use Rack::PostBodyToParams
use Rack::ParamTranslator
+use RequestStore::Middleware
+use Rack::RequestLang
+
use LinkedData::Security::Authorization
use LinkedData::Security::AccessDenied
diff --git a/config/solr/property_search/enumsconfig.xml b/config/solr/property_search/enumsconfig.xml
new file mode 100644
index 00000000..72e7b7d3
--- /dev/null
+++ b/config/solr/property_search/enumsconfig.xml
@@ -0,0 +1,12 @@
+
+
+
+ ONTOLOGY
+ VALUE_SET_COLLECTION
+
+
+ ANNOTATION
+ DATATYPE
+ OBJECT
+
+
\ No newline at end of file
diff --git a/config/solr/property_search/mapping-ISOLatin1Accent.txt b/config/solr/property_search/mapping-ISOLatin1Accent.txt
new file mode 100644
index 00000000..ede77425
--- /dev/null
+++ b/config/solr/property_search/mapping-ISOLatin1Accent.txt
@@ -0,0 +1,246 @@
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Syntax:
+# "source" => "target"
+# "source".length() > 0 (source cannot be empty.)
+# "target".length() >= 0 (target can be empty.)
+
+# example:
+# "À" => "A"
+# "\u00C0" => "A"
+# "\u00C0" => "\u0041"
+# "ß" => "ss"
+# "\t" => " "
+# "\n" => ""
+
+# À => A
+"\u00C0" => "A"
+
+# Á => A
+"\u00C1" => "A"
+
+# Â => A
+"\u00C2" => "A"
+
+# Ã => A
+"\u00C3" => "A"
+
+# Ä => A
+"\u00C4" => "A"
+
+# Å => A
+"\u00C5" => "A"
+
+# Æ => AE
+"\u00C6" => "AE"
+
+# Ç => C
+"\u00C7" => "C"
+
+# È => E
+"\u00C8" => "E"
+
+# É => E
+"\u00C9" => "E"
+
+# Ê => E
+"\u00CA" => "E"
+
+# Ë => E
+"\u00CB" => "E"
+
+# Ì => I
+"\u00CC" => "I"
+
+# Í => I
+"\u00CD" => "I"
+
+# Î => I
+"\u00CE" => "I"
+
+# Ï => I
+"\u00CF" => "I"
+
+# IJ => IJ
+"\u0132" => "IJ"
+
+# Ð => D
+"\u00D0" => "D"
+
+# Ñ => N
+"\u00D1" => "N"
+
+# Ò => O
+"\u00D2" => "O"
+
+# Ó => O
+"\u00D3" => "O"
+
+# Ô => O
+"\u00D4" => "O"
+
+# Õ => O
+"\u00D5" => "O"
+
+# Ö => O
+"\u00D6" => "O"
+
+# Ø => O
+"\u00D8" => "O"
+
+# Œ => OE
+"\u0152" => "OE"
+
+# Þ
+"\u00DE" => "TH"
+
+# Ù => U
+"\u00D9" => "U"
+
+# Ú => U
+"\u00DA" => "U"
+
+# Û => U
+"\u00DB" => "U"
+
+# Ü => U
+"\u00DC" => "U"
+
+# Ý => Y
+"\u00DD" => "Y"
+
+# Ÿ => Y
+"\u0178" => "Y"
+
+# à => a
+"\u00E0" => "a"
+
+# á => a
+"\u00E1" => "a"
+
+# â => a
+"\u00E2" => "a"
+
+# ã => a
+"\u00E3" => "a"
+
+# ä => a
+"\u00E4" => "a"
+
+# å => a
+"\u00E5" => "a"
+
+# æ => ae
+"\u00E6" => "ae"
+
+# ç => c
+"\u00E7" => "c"
+
+# è => e
+"\u00E8" => "e"
+
+# é => e
+"\u00E9" => "e"
+
+# ê => e
+"\u00EA" => "e"
+
+# ë => e
+"\u00EB" => "e"
+
+# ì => i
+"\u00EC" => "i"
+
+# í => i
+"\u00ED" => "i"
+
+# î => i
+"\u00EE" => "i"
+
+# ï => i
+"\u00EF" => "i"
+
+# ij => ij
+"\u0133" => "ij"
+
+# ð => d
+"\u00F0" => "d"
+
+# ñ => n
+"\u00F1" => "n"
+
+# ò => o
+"\u00F2" => "o"
+
+# ó => o
+"\u00F3" => "o"
+
+# ô => o
+"\u00F4" => "o"
+
+# õ => o
+"\u00F5" => "o"
+
+# ö => o
+"\u00F6" => "o"
+
+# ø => o
+"\u00F8" => "o"
+
+# œ => oe
+"\u0153" => "oe"
+
+# ß => ss
+"\u00DF" => "ss"
+
+# þ => th
+"\u00FE" => "th"
+
+# ù => u
+"\u00F9" => "u"
+
+# ú => u
+"\u00FA" => "u"
+
+# û => u
+"\u00FB" => "u"
+
+# ü => u
+"\u00FC" => "u"
+
+# ý => y
+"\u00FD" => "y"
+
+# ÿ => y
+"\u00FF" => "y"
+
+# ff => ff
+"\uFB00" => "ff"
+
+# fi => fi
+"\uFB01" => "fi"
+
+# fl => fl
+"\uFB02" => "fl"
+
+# ffi => ffi
+"\uFB03" => "ffi"
+
+# ffl => ffl
+"\uFB04" => "ffl"
+
+# ſt => ft
+"\uFB05" => "ft"
+
+# st => st
+"\uFB06" => "st"
diff --git a/config/solr/property_search/schema.xml b/config/solr/property_search/schema.xml
new file mode 100644
index 00000000..20824ea6
--- /dev/null
+++ b/config/solr/property_search/schema.xml
@@ -0,0 +1,1179 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ id
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/config/solr/property_search/solrconfig.xml b/config/solr/property_search/solrconfig.xml
new file mode 100644
index 00000000..771a0f32
--- /dev/null
+++ b/config/solr/property_search/solrconfig.xml
@@ -0,0 +1,1299 @@
+
+
+
+
+
+
+
+
+ 8.8.2
+
+
+
+
+
+
+
+
+
+
+ ${solr.data.dir:}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${solr.lock.type:native}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${solr.ulog.dir:}
+ ${solr.ulog.numVersionBuckets:65536}
+
+
+
+
+ ${solr.autoCommit.maxTime:15000}
+ false
+
+
+
+
+
+ ${solr.autoSoftCommit.maxTime:-1}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${solr.max.booleanClauses:500000}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+ 20
+
+
+ 200
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ explicit
+ 10
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ explicit
+ json
+ true
+
+
+
+
+
+ _text_
+
+
+
+
+
+
+
+
+ text_general
+
+
+
+
+
+ default
+ _text_
+ solr.DirectSolrSpellChecker
+
+ internal
+
+ 0.5
+
+ 2
+
+ 1
+
+ 5
+
+ 4
+
+ 0.01
+
+
+
+
+
+
+
+
+
+
+
+ default
+ on
+ true
+ 10
+ 5
+ 5
+ true
+ true
+ 10
+ 5
+
+
+ spellcheck
+
+
+
+
+
+
+
+
+
+ true
+ false
+
+
+ terms
+
+
+
+
+
+
+
+
+
+
+ 100
+
+
+
+
+
+
+
+ 70
+
+ 0.5
+
+ [-\w ,/\n\"']{20,200}
+
+
+
+
+
+
+ ]]>
+ ]]>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ,,
+ ,,
+ ,,
+ ,,
+ ,]]>
+ ]]>
+
+
+
+
+
+ 10
+ .,!?
+
+
+
+
+
+
+ WORD
+
+
+ en
+ US
+
+
+
+
+
+
+
+
+
+
+
+ [^\w-\.]
+ _
+
+
+
+
+
+
+ yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z
+ yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z
+ yyyy-MM-dd HH:mm[:ss[.SSS]][z
+ yyyy-MM-dd HH:mm[:ss[,SSS]][z
+ [EEE, ]dd MMM yyyy HH:mm[:ss] z
+ EEEE, dd-MMM-yy HH:mm:ss z
+ EEE MMM ppd HH:mm:ss [z ]yyyy
+
+
+
+
+ java.lang.String
+ text_general
+
+ *_str
+ 256
+
+
+ true
+
+
+ java.lang.Boolean
+ booleans
+
+
+ java.util.Date
+ pdates
+
+
+ java.lang.Long
+ java.lang.Integer
+ plongs
+
+
+ java.lang.Number
+ pdoubles
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/plain; charset=UTF-8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/config/solr/solr.xml b/config/solr/solr.xml
new file mode 100644
index 00000000..d9d089e4
--- /dev/null
+++ b/config/solr/solr.xml
@@ -0,0 +1,60 @@
+
+
+
+
+
+
+
+ ${solr.max.booleanClauses:500000}
+ ${solr.sharedLib:}
+ ${solr.allowPaths:}
+
+
+
+ ${host:}
+ ${solr.port.advertise:0}
+ ${hostContext:solr}
+
+ ${genericCoreNodeNames:true}
+
+ ${zkClientTimeout:30000}
+ ${distribUpdateSoTimeout:600000}
+ ${distribUpdateConnTimeout:60000}
+ ${zkCredentialsProvider:org.apache.solr.common.cloud.DefaultZkCredentialsProvider}
+ ${zkACLProvider:org.apache.solr.common.cloud.DefaultZkACLProvider}
+
+
+
+
+ ${socketTimeout:600000}
+ ${connTimeout:60000}
+ ${solr.shardsWhitelist:}
+
+
+
+
+
diff --git a/config/solr/term_search/enumsconfig.xml b/config/solr/term_search/enumsconfig.xml
new file mode 100644
index 00000000..72e7b7d3
--- /dev/null
+++ b/config/solr/term_search/enumsconfig.xml
@@ -0,0 +1,12 @@
+
+
+
+ ONTOLOGY
+ VALUE_SET_COLLECTION
+
+
+ ANNOTATION
+ DATATYPE
+ OBJECT
+
+
\ No newline at end of file
diff --git a/config/solr/term_search/mapping-ISOLatin1Accent.txt b/config/solr/term_search/mapping-ISOLatin1Accent.txt
new file mode 100644
index 00000000..ede77425
--- /dev/null
+++ b/config/solr/term_search/mapping-ISOLatin1Accent.txt
@@ -0,0 +1,246 @@
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Syntax:
+# "source" => "target"
+# "source".length() > 0 (source cannot be empty.)
+# "target".length() >= 0 (target can be empty.)
+
+# example:
+# "À" => "A"
+# "\u00C0" => "A"
+# "\u00C0" => "\u0041"
+# "ß" => "ss"
+# "\t" => " "
+# "\n" => ""
+
+# À => A
+"\u00C0" => "A"
+
+# Á => A
+"\u00C1" => "A"
+
+# Â => A
+"\u00C2" => "A"
+
+# Ã => A
+"\u00C3" => "A"
+
+# Ä => A
+"\u00C4" => "A"
+
+# Å => A
+"\u00C5" => "A"
+
+# Æ => AE
+"\u00C6" => "AE"
+
+# Ç => C
+"\u00C7" => "C"
+
+# È => E
+"\u00C8" => "E"
+
+# É => E
+"\u00C9" => "E"
+
+# Ê => E
+"\u00CA" => "E"
+
+# Ë => E
+"\u00CB" => "E"
+
+# Ì => I
+"\u00CC" => "I"
+
+# Í => I
+"\u00CD" => "I"
+
+# Î => I
+"\u00CE" => "I"
+
+# Ï => I
+"\u00CF" => "I"
+
+# IJ => IJ
+"\u0132" => "IJ"
+
+# Ð => D
+"\u00D0" => "D"
+
+# Ñ => N
+"\u00D1" => "N"
+
+# Ò => O
+"\u00D2" => "O"
+
+# Ó => O
+"\u00D3" => "O"
+
+# Ô => O
+"\u00D4" => "O"
+
+# Õ => O
+"\u00D5" => "O"
+
+# Ö => O
+"\u00D6" => "O"
+
+# Ø => O
+"\u00D8" => "O"
+
+# Œ => OE
+"\u0152" => "OE"
+
+# Þ
+"\u00DE" => "TH"
+
+# Ù => U
+"\u00D9" => "U"
+
+# Ú => U
+"\u00DA" => "U"
+
+# Û => U
+"\u00DB" => "U"
+
+# Ü => U
+"\u00DC" => "U"
+
+# Ý => Y
+"\u00DD" => "Y"
+
+# Ÿ => Y
+"\u0178" => "Y"
+
+# à => a
+"\u00E0" => "a"
+
+# á => a
+"\u00E1" => "a"
+
+# â => a
+"\u00E2" => "a"
+
+# ã => a
+"\u00E3" => "a"
+
+# ä => a
+"\u00E4" => "a"
+
+# å => a
+"\u00E5" => "a"
+
+# æ => ae
+"\u00E6" => "ae"
+
+# ç => c
+"\u00E7" => "c"
+
+# è => e
+"\u00E8" => "e"
+
+# é => e
+"\u00E9" => "e"
+
+# ê => e
+"\u00EA" => "e"
+
+# ë => e
+"\u00EB" => "e"
+
+# ì => i
+"\u00EC" => "i"
+
+# í => i
+"\u00ED" => "i"
+
+# î => i
+"\u00EE" => "i"
+
+# ï => i
+"\u00EF" => "i"
+
+# ij => ij
+"\u0133" => "ij"
+
+# ð => d
+"\u00F0" => "d"
+
+# ñ => n
+"\u00F1" => "n"
+
+# ò => o
+"\u00F2" => "o"
+
+# ó => o
+"\u00F3" => "o"
+
+# ô => o
+"\u00F4" => "o"
+
+# õ => o
+"\u00F5" => "o"
+
+# ö => o
+"\u00F6" => "o"
+
+# ø => o
+"\u00F8" => "o"
+
+# œ => oe
+"\u0153" => "oe"
+
+# ß => ss
+"\u00DF" => "ss"
+
+# þ => th
+"\u00FE" => "th"
+
+# ù => u
+"\u00F9" => "u"
+
+# ú => u
+"\u00FA" => "u"
+
+# û => u
+"\u00FB" => "u"
+
+# ü => u
+"\u00FC" => "u"
+
+# ý => y
+"\u00FD" => "y"
+
+# ÿ => y
+"\u00FF" => "y"
+
+# ff => ff
+"\uFB00" => "ff"
+
+# fi => fi
+"\uFB01" => "fi"
+
+# fl => fl
+"\uFB02" => "fl"
+
+# ffi => ffi
+"\uFB03" => "ffi"
+
+# ffl => ffl
+"\uFB04" => "ffl"
+
+# ſt => ft
+"\uFB05" => "ft"
+
+# st => st
+"\uFB06" => "st"
diff --git a/config/solr/term_search/schema.xml b/config/solr/term_search/schema.xml
new file mode 100644
index 00000000..73c75b31
--- /dev/null
+++ b/config/solr/term_search/schema.xml
@@ -0,0 +1,1224 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ id
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/config/solr/term_search/solrconfig.xml b/config/solr/term_search/solrconfig.xml
new file mode 100644
index 00000000..771a0f32
--- /dev/null
+++ b/config/solr/term_search/solrconfig.xml
@@ -0,0 +1,1299 @@
+
+
+
+
+
+
+
+
+ 8.8.2
+
+
+
+
+
+
+
+
+
+
+ ${solr.data.dir:}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${solr.lock.type:native}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${solr.ulog.dir:}
+ ${solr.ulog.numVersionBuckets:65536}
+
+
+
+
+ ${solr.autoCommit.maxTime:15000}
+ false
+
+
+
+
+
+ ${solr.autoSoftCommit.maxTime:-1}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${solr.max.booleanClauses:500000}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+ 20
+
+
+ 200
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ explicit
+ 10
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ explicit
+ json
+ true
+
+
+
+
+
+ _text_
+
+
+
+
+
+
+
+
+ text_general
+
+
+
+
+
+ default
+ _text_
+ solr.DirectSolrSpellChecker
+
+ internal
+
+ 0.5
+
+ 2
+
+ 1
+
+ 5
+
+ 4
+
+ 0.01
+
+
+
+
+
+
+
+
+
+
+
+ default
+ on
+ true
+ 10
+ 5
+ 5
+ true
+ true
+ 10
+ 5
+
+
+ spellcheck
+
+
+
+
+
+
+
+
+
+ true
+ false
+
+
+ terms
+
+
+
+
+
+
+
+
+
+
+ 100
+
+
+
+
+
+
+
+ 70
+
+ 0.5
+
+ [-\w ,/\n\"']{20,200}
+
+
+
+
+
+
+ ]]>
+ ]]>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ,,
+ ,,
+ ,,
+ ,,
+ ,]]>
+ ]]>
+
+
+
+
+
+ 10
+ .,!?
+
+
+
+
+
+
+ WORD
+
+
+ en
+ US
+
+
+
+
+
+
+
+
+
+
+
+ [^\w-\.]
+ _
+
+
+
+
+
+
+ yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z
+ yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z
+ yyyy-MM-dd HH:mm[:ss[.SSS]][z
+ yyyy-MM-dd HH:mm[:ss[,SSS]][z
+ [EEE, ]dd MMM yyyy HH:mm[:ss] z
+ EEEE, dd-MMM-yy HH:mm:ss z
+ EEE MMM ppd HH:mm:ss [z ]yyyy
+
+
+
+
+ java.lang.String
+ text_general
+
+ *_str
+ 256
+
+
+ true
+
+
+ java.lang.Boolean
+ booleans
+
+
+ java.util.Date
+ pdates
+
+
+ java.lang.Long
+ java.lang.Integer
+ plongs
+
+
+ java.lang.Number
+ pdoubles
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/plain; charset=UTF-8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/controllers/recommender_v1_controller.rb b/controllers/recommender_v1_controller.rb
index 5b0123a9..29991299 100644
--- a/controllers/recommender_v1_controller.rb
+++ b/controllers/recommender_v1_controller.rb
@@ -1,17 +1,10 @@
class RecommenderController < ApplicationController
namespace "/recommender_v1" do
- # execute an annotator query
+ # Mark this route as deprecated
get do
- text = params["text"]
- raise error 400, "A text to be analyzed by the recommender must be supplied using the argument text=" if text.nil? || text.strip.empty?
- acronyms = restricted_ontologies_to_acronyms(params)
- display_classes = params['display_classes'].eql?('true') # default will be false
- recommender = Recommender::Models::NcboRecommender.new
- recommendations = recommender.recommend(text, acronyms, display_classes)
- reply 200, recommendations
+ reply 410, { message: "This API endpoint has been deprecated and is no longer available. Please use /recommender or refer to the API documentation for updated routes." }
end
end
end
-
diff --git a/controllers/search_controller.rb b/controllers/search_controller.rb
index 3bc1c13f..280944a6 100644
--- a/controllers/search_controller.rb
+++ b/controllers/search_controller.rb
@@ -45,6 +45,7 @@ def process_search(params=nil)
doc[:ontology_rank] = (ontology_rank[doc[:submissionAcronym]] && !ontology_rank[doc[:submissionAcronym]].empty?) ? ontology_rank[doc[:submissionAcronym]][:normalizedScore] : 0.0
doc[:properties] = MultiJson.load(doc.delete(:propertyRaw)) if include_param_contains?(:properties)
instance = doc[:provisional] ? LinkedData::Models::ProvisionalClass.read_only(doc) : LinkedData::Models::Class.read_only(doc)
+ filter_language_attributes(params, instance)
docs.push(instance)
end
diff --git a/controllers/users_controller.rb b/controllers/users_controller.rb
index fdad6a74..a25c1b5d 100644
--- a/controllers/users_controller.rb
+++ b/controllers/users_controller.rb
@@ -46,14 +46,24 @@ class UsersController < ApplicationController
params["display"] = User.attributes.join(",") # used to serialize everything via the serializer
user = LinkedData::Models::User.where(email: email, username: username).include(User.goo_attrs_to_load(includes_param)).first
error 404, "User not found" unless user
+
+ user.bring(:resetToken)
+ user.bring(:resetTokenExpireTime)
+ user.bring(:passwordHash)
+
if token.eql?(user.resetToken)
error 401, "Invalid password reset token" if user.resetTokenExpireTime.nil?
error 401, "The password reset token expired" if user.resetTokenExpireTime < Time.now.to_i
user.resetToken = nil
user.resetTokenExpireTime = nil
- user.save(override_security: true) if user.valid?
- user.show_apikey = true
- reply user
+
+ if user.valid?
+ user.save(override_security: true)
+ user.show_apikey = true
+ reply user
+ else
+ error 422, "Error resetting password"
+ end
else
error 401, "Password reset not authorized with this token"
end
diff --git a/docker-compose.yml b/docker-compose.yml
index 182a18c9..11811fe7 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -2,9 +2,9 @@ x-app: &app
build:
context: .
args:
- RUBY_VERSION: '2.7'
+ RUBY_VERSION: '3.0'
# Increase the version number in the image tag every time Dockerfile or its arguments is changed
- image: ontologies_api:0.0.4
+ image: ontologies_api:0.0.5
environment: &env
BUNDLE_PATH: /srv/ontoportal/bundle
# default bundle config resolves to /usr/local/bundle/config inside of the container
@@ -95,10 +95,18 @@ services:
- 4store
solr-ut:
- image: ontoportal/solr-ut:0.1.0
+ image: solr:8
+ volumes:
+ - ./test/solr/configsets:/configsets:ro
+ ports:
+ - "8983:8983"
+ command: >
+ bash -c "precreate-core term_search_core1 /configsets/term_search
+ && precreate-core prop_search_core1 /configsets/property_search
+ && solr-foreground"
healthcheck:
- test: ["CMD-SHELL", "curl -sf http://localhost:8983/solr/term_search_core1/admin/ping?wt=json | grep -iq '\"status\":\"OK\"}' || exit 1"]
- start_period: 3s
+ test: [ "CMD-SHELL", "curl -sf http://localhost:8983/solr/term_search_core1/admin/ping?wt=json | grep -iq '\"status\":\"OK\"}' || exit 1" ]
+ start_period: 5s
interval: 10s
timeout: 5s
retries: 5
@@ -107,7 +115,7 @@ services:
image: ontoportal/mgrep:0.0.2
platform: linux/amd64
healthcheck:
- test: ["CMD", "nc", "-z", "-v", "localhost", "55556"]
+ test: ["CMD", "nc", "-z", "-v", "127.0.0.1", "55556"]
start_period: 3s
interval: 10s
timeout: 5s
diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb
index 251f90f0..ad813e31 100644
--- a/helpers/application_helper.rb
+++ b/helpers/application_helper.rb
@@ -125,10 +125,10 @@ def reply(*response)
check_access(obj) if LinkedData.settings.enable_security
# Slice or set check
- filter_for_slice(obj) if LinkedData.settings.enable_slices
+ obj = filter_for_slice(obj) if LinkedData.settings.enable_slices
# Check for custom ontologies set by user
- filter_for_user_onts(obj)
+ obj = filter_for_user_onts(obj)
LinkedData::Serializer.build_response(@env, status: status, ld_object: obj)
end
@@ -211,8 +211,8 @@ def restricted_ontologies(params=nil)
found_onts = onts.length > 0
- filter_for_slice(onts)
- filter_for_user_onts(onts)
+ onts = filter_for_slice(onts)
+ onts = filter_for_user_onts(onts)
end
onts = filter_access(onts)
@@ -396,19 +396,13 @@ def get_ontology_and_submission
else
submission = ont.latest_submission(status: [:RDF])
end
- if submission.nil?
- error 404, "Ontology #{@params["ontology"]} submission not found."
- end
+ error 404, "Ontology #{@params["ontology"]} submission not found." if submission.nil?
if !submission.ready?(status: [:RDF])
- error(404,
- "Ontology #{@params["ontology"]} submission i"+
- "#{submission.submissionId} has not been parsed.")
- end
- if submission.nil?
- if submission.nil?
- error 404, "Ontology #{@params["acronym"]} does not have any submissions"
- end
+ error 404, "Ontology #{@params["ontology"]} submission #{submission.submissionId} has not been parsed."
end
+
+ save_submission_language(submission)
+
return ont, submission
end
@@ -438,6 +432,22 @@ def naive_expiring_cache_read(key)
return object[:object]
end
+
+ def save_submission_language(submission, language_property = :naturalLanguage)
+ request_lang = RequestStore.store[:requested_lang]
+
+ return if submission.nil? || !request_lang.blank?
+
+ submission.bring(language_property) if submission.bring?(language_property)
+ collection_natural_language = submission.send(language_property) rescue nil
+ return [] if collection_natural_language.blank?
+
+ collection_natural_language = collection_natural_language.values.flatten if collection_natural_language.is_a?(Hash)
+ submissions_language = collection_natural_language.map { |natural_language| natural_language.to_s.split('/').last[0..1] }.compact.first
+
+ RequestStore.store[:requested_lang] = submissions_language if submissions_language
+ end
+
end
end
end
diff --git a/helpers/search_helper.rb b/helpers/search_helper.rb
index 499e61ac..80eec4cc 100644
--- a/helpers/search_helper.rb
+++ b/helpers/search_helper.rb
@@ -19,6 +19,7 @@ module SearchHelper
VALUESET_ROOTS_ONLY_PARAM = "valueset_roots_only"
VALUESET_EXCLUDE_ROOTS_PARAM = "valueset_exclude_roots"
ONTOLOGY_TYPES_PARAM = "ontology_types"
+ LANGUAGES_PARAM = "lang"
ALSO_SEARCH_VIEWS = "also_search_views" # NCBO-961
MATCH_HTML_PRE = ""
@@ -26,8 +27,10 @@ module SearchHelper
MATCH_TYPE_PREFLABEL = "prefLabel"
MATCH_TYPE_SYNONYM = "synonym"
MATCH_TYPE_PROPERTY = "property"
+ MATCH_TYPE_DEFINITION = "definition"
MATCH_TYPE_LABEL = "label"
MATCH_TYPE_LABELGENERATED = "labelGenerated"
+ NO_LANGUAGE_SUFFIX = "none"
MATCH_TYPE_MAP = {
"resource_id" => "id",
@@ -84,6 +87,9 @@ def get_term_search_query(text, params={})
end
end
+ lang = params["lang"] || params["language"]
+ lang_suffix = lang && !lang.eql?("all") ? "_#{lang}" : ""
+
query = ""
params["defType"] = "edismax"
params["stopwords"] = "true"
@@ -100,25 +106,25 @@ def get_term_search_query(text, params={})
if params[EXACT_MATCH_PARAM] == "true"
query = "\"#{solr_escape(text)}\""
- params["qf"] = "resource_id^20 notation^20 oboId^20 prefLabelExact^10 synonymExact #{QUERYLESS_FIELDS_STR_NO_IDS}"
- params["hl.fl"] = "resource_id prefLabelExact synonymExact #{QUERYLESS_FIELDS_STR}"
+ params["qf"] = "resource_id^20 notation^20 oboId^20 prefLabelExact#{lang_suffix}^10 synonymExact#{lang_suffix} #{QUERYLESS_FIELDS_STR_NO_IDS}"
+ params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix} synonymExact#{lang_suffix} #{QUERYLESS_FIELDS_STR}"
elsif params[SUGGEST_PARAM] == "true" || text[-1] == '*'
text.gsub!(/\*+$/, '')
query = "\"#{solr_escape(text)}\""
params["qt"] = "/suggest_ncbo"
- params["qf"] = "prefLabelExact^100 prefLabelSuggestEdge^50 synonymSuggestEdge^10 prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}"
- params["pf"] = "prefLabelSuggest^50"
- params["hl.fl"] = "prefLabelExact prefLabelSuggestEdge synonymSuggestEdge prefLabelSuggestNgram synonymSuggestNgram resource_id #{QUERYLESS_FIELDS_STR}"
+ params["qf"] = " prefLabelExact#{lang_suffix}^100 prefLabelSuggestEdge#{lang_suffix}^50 synonymSuggestEdge#{lang_suffix}^10 prefLabelSuggestNgram#{lang_suffix} synonymSuggestNgram#{lang_suffix} resource_id #{QUERYLESS_FIELDS_STR}"
+ params["pf"] = "prefLabelSuggest#{lang_suffix}^50"
+ params["hl.fl"] = "prefLabelExact#{lang_suffix} prefLabelSuggestEdge#{lang_suffix} synonymSuggestEdge#{lang_suffix} prefLabelSuggestNgram#{lang_suffix} synonymSuggestNgram#{lang_suffix} resource_id #{QUERYLESS_FIELDS_STR}"
else
if text.strip.empty?
query = '*'
else
query = solr_escape(text)
end
- params["qf"] = "resource_id^100 notation^100 oboId^100 prefLabelExact^90 prefLabel^70 synonymExact^50 synonym^10 #{QUERYLESS_FIELDS_STR_NO_IDS}"
+ params["qf"] = "resource_id^100 notation^100 oboId^100 prefLabelExact#{lang_suffix}^90 prefLabel#{lang_suffix}^70 synonymExact#{lang_suffix}^50 synonym#{lang_suffix}^10 #{QUERYLESS_FIELDS_STR_NO_IDS}"
params["qf"] << " property" if params[INCLUDE_PROPERTIES_PARAM] == "true"
params["bq"] = "idAcronymMatch:true^80"
- params["hl.fl"] = "resource_id prefLabelExact prefLabel synonymExact synonym #{QUERYLESS_FIELDS_STR}"
+ params["hl.fl"] = "resource_id prefLabelExact#{lang_suffix} prefLabel#{lang_suffix} synonymExact#{lang_suffix} synonym#{lang_suffix} #{QUERYLESS_FIELDS_STR}"
params["hl.fl"] = "#{params["hl.fl"]} property" if params[INCLUDE_PROPERTIES_PARAM] == "true"
end
@@ -179,6 +185,10 @@ def get_term_search_query(text, params={})
params["fq"] = filter_query
params["q"] = query
+
+ # binding.pry
+
+
query
end
@@ -347,12 +357,40 @@ def populate_classes_from_search(classes, ontology_acronyms=nil)
doc[:submission] = old_class.submission
doc[:properties] = MultiJson.load(doc.delete(:propertyRaw)) if include_param_contains?(:properties)
instance = LinkedData::Models::Class.read_only(doc)
+ filter_language_attributes(@params, instance)
classes_hash[ont_uri_class_uri] = instance
end
classes_hash
end
+ def filter_language_attribute(params, class_instance, attr, is_single)
+ if class_instance.respond_to?(attr)
+ lang_param = (params["lang"] || params["language"])&.downcase
+ lang_suffix = lang_param && !lang_param.eql?("all") ? "_#{lang_param}" : ""
+
+ if !lang_suffix.empty? && class_instance.respond_to?("#{attr}#{lang_suffix}")
+ class_instance[attr] = is_single ? class_instance["#{attr}#{lang_suffix}"][0] : class_instance["#{attr}#{lang_suffix}"]
+ elsif !lang_param.eql?("all")
+ site_label = Goo.main_languages[0]
+
+ if class_instance.respond_to?("#{attr}_#{site_label}") && class_instance["#{attr}_#{site_label}"]
+ class_instance[attr] = is_single ? class_instance["#{attr}_#{site_label}"][0] : class_instance["#{attr}_#{site_label}"]
+ elsif class_instance.respond_to?("#{attr}_#{NO_LANGUAGE_SUFFIX}") && class_instance["#{attr}_#{NO_LANGUAGE_SUFFIX}"]
+ class_instance[attr] = is_single ? class_instance["#{attr}_#{NO_LANGUAGE_SUFFIX}"][0] : class_instance["#{attr}_#{NO_LANGUAGE_SUFFIX}"]
+ elsif is_single
+ class_instance[attr] = class_instance[attr][0]
+ end
+ end
+ end
+ end
+
+ def filter_language_attributes(params, class_instance)
+ filter_language_attribute(params, class_instance, MATCH_TYPE_PREFLABEL, true)
+ filter_language_attribute(params, class_instance, MATCH_TYPE_SYNONYM, false)
+ filter_language_attribute(params, class_instance, MATCH_TYPE_DEFINITION, false)
+ end
+
def validate_params_solr_population(allowed_includes_params)
leftover = includes_param - allowed_includes_params
invalid = leftover.length > 0
diff --git a/helpers/slices_helper.rb b/helpers/slices_helper.rb
index d17db53e..6f527f29 100644
--- a/helpers/slices_helper.rb
+++ b/helpers/slices_helper.rb
@@ -10,10 +10,8 @@ def filter_for_slice(obj)
slice = current_slice()
- if obj.is_a?(Enumerable)
- if obj.first.is_a?(LinkedData::Models::Ontology)
- obj.delete_if {|o| !slice.ontology_id_set.include?(o.id.to_s)}
- end
+ if obj.is_a?(Enumerable) && obj.first.is_a?(LinkedData::Models::Ontology)
+ obj = obj.select { |o| slice.ontology_id_set.include?(o.id.to_s) }
end
obj
end
@@ -35,4 +33,4 @@ def current_slice_acronyms
end
end
-helpers Sinatra::Helpers::SlicesHelper
\ No newline at end of file
+helpers Sinatra::Helpers::SlicesHelper
diff --git a/helpers/users_helper.rb b/helpers/users_helper.rb
index 5d4266c1..eeb6f800 100644
--- a/helpers/users_helper.rb
+++ b/helpers/users_helper.rb
@@ -12,7 +12,7 @@ def filter_for_user_onts(obj)
user = env["REMOTE_USER"]
if obj.first.is_a?(LinkedData::Models::Ontology)
- obj.delete_if {|o| !user.custom_ontology_id_set.include?(o.id.to_s)}
+ obj = obj.select {|o| user.custom_ontology_id_set.include?(o.id.to_s)}
end
obj
@@ -21,4 +21,4 @@ def filter_for_user_onts(obj)
end
end
-helpers Sinatra::Helpers::UsersHelper
\ No newline at end of file
+helpers Sinatra::Helpers::UsersHelper
diff --git a/lib/rack/request_lang.rb b/lib/rack/request_lang.rb
new file mode 100644
index 00000000..b2221041
--- /dev/null
+++ b/lib/rack/request_lang.rb
@@ -0,0 +1,16 @@
+module Rack
+ class RequestLang
+
+ def initialize(app = nil, options = {})
+ @app = app
+ end
+
+ def call(env)
+ r = Rack::Request.new(env)
+ lang = r.params["lang"] || r.params["language"]
+ lang = lang.upcase.to_sym if lang
+ RequestStore.store[:requested_lang] = lang
+ @app.call(env)
+ end
+ end
+end
\ No newline at end of file
diff --git a/test/controllers/test_annotator_controller.rb b/test/controllers/test_annotator_controller.rb
index 3b21b9e7..2e6dada1 100644
--- a/test/controllers/test_annotator_controller.rb
+++ b/test/controllers/test_annotator_controller.rb
@@ -2,7 +2,7 @@
class TestAnnotatorController < TestCase
- def self.before_suite
+ def before_suite
@@redis = Redis.new(:host => Annotator.settings.annotator_redis_host, :port => Annotator.settings.annotator_redis_port)
db_size = @@redis.dbsize
if db_size > MAX_TEST_REDIS_SIZE
@@ -16,11 +16,16 @@ def self.before_suite
end
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies
+ @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies(process_submission: true,
+ process_options: {
+ process_rdf: true,
+ extract_metadata: false,
+ index_search: true
+ })
annotator = Annotator::Models::NcboAnnotator.new
annotator.init_redis_for_tests()
annotator.create_term_cache_from_ontologies(@@ontologies, false)
- mapping_test_set
+ self.class.mapping_test_set
end
def test_annotate
@@ -260,7 +265,7 @@ def test_default_properties_output
assert last_response.ok?
annotations = MultiJson.load(last_response.body)
assert_equal 9, annotations.length
- annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].downcase <=> b["annotatedClass"]["prefLabel"].downcase }
+ annotations.sort! { |a,b| a["annotatedClass"]["prefLabel"].first.downcase <=> b["annotatedClass"]["prefLabel"].first.downcase }
assert_equal "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Aggregate_Human_Data", annotations.first["annotatedClass"]["@id"]
assert_equal "Aggregate Human Data", annotations.first["annotatedClass"]["prefLabel"]
@@ -348,7 +353,7 @@ def self.mapping_test_set
classes = []
class_id = terms_a[i]
ont_acr = onts_a[i]
- sub = LinkedData::Models::Ontology.find(ont_acr).first.latest_submission
+ sub = LinkedData::Models::Ontology.find(ont_acr).first.latest_submission(status: :any)
sub.bring(ontology: [:acronym])
c = LinkedData::Models::Class.find(RDF::URI.new(class_id))
.in(sub)
@@ -356,7 +361,7 @@ def self.mapping_test_set
classes << c
class_id = terms_b[i]
ont_acr = onts_b[i]
- sub = LinkedData::Models::Ontology.find(ont_acr).first.latest_submission
+ sub = LinkedData::Models::Ontology.find(ont_acr).first.latest_submission(status: :any)
sub.bring(ontology: [:acronym])
c = LinkedData::Models::Class.find(RDF::URI.new(class_id))
.in(sub)
diff --git a/test/controllers/test_batch_controller.rb b/test/controllers/test_batch_controller.rb
index 55d9cee9..ca37b156 100644
--- a/test/controllers/test_batch_controller.rb
+++ b/test/controllers/test_batch_controller.rb
@@ -1,9 +1,9 @@
require_relative '../test_case'
class TestBatchController < TestCase
- def self.before_suite
+ def before_suite
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies
+ @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies(process_submission: true)
end
def test_class_batch_one_ontology
@@ -26,7 +26,7 @@ def test_class_batch_one_ontology
assert last_response.ok?
data = MultiJson.load(last_response.body)
classes = data["http://www.w3.org/2002/07/owl#Class"]
- assert classes.length == 4
+ assert_equal 4, classes.length
classes.each do |klass|
assert_instance_of String, klass["prefLabel"]
assert_instance_of Array, klass["synonym"]
@@ -76,11 +76,11 @@ def test_class_batch_multiple
assert last_response.ok?
data = MultiJson.load(last_response.body)
classes = data["http://www.w3.org/2002/07/owl#Class"]
- assert classes.length == 6
+ assert_equal 6, classes.length
classes.each do |klass|
assert_instance_of String, klass["prefLabel"]
- assert !klass["synonym"]
- assert klass["prefLabel"] == class_ids[klass["@id"]]
+ refute klass["synonym"]
+ assert_equal klass["prefLabel"], class_ids[klass["@id"]]
end
end
@@ -88,6 +88,7 @@ def test_class_batch_multiple
def test_class_all_bro
mccl = @@ontologies.select { |y| y.id.to_s.include? "MCCL"}.first
assert mccl, "mccl is not found to execute batch test."
+ # binding.pry
classes = LinkedData::Models::Class.in(mccl.latest_submission).include(:prefLabel).page(1,500).read_only.all
class_ids = {}
classes.each do |klass|
@@ -102,9 +103,10 @@ def test_class_all_bro
}
post "/batch/", call_params
assert last_response.ok?
+# refute last_response.ok?
data = MultiJson.load(last_response.body)
classes_response = data["http://www.w3.org/2002/07/owl#Class"]
- assert classes_response.length == classes.length
+ assert_equal classes.length, classes_response.length
classes_response.each do |klass|
assert_instance_of String, klass["prefLabel"]
assert klass["prefLabel"] == class_ids[klass["@id"]]
diff --git a/test/controllers/test_classes_controller.rb b/test/controllers/test_classes_controller.rb
index a918ece0..ab8ff63e 100644
--- a/test/controllers/test_classes_controller.rb
+++ b/test/controllers/test_classes_controller.rb
@@ -2,12 +2,14 @@
class TestClassesController < TestCase
- def self.before_suite
+ def before_suite
options = {ont_count: 1,
submission_count: 3,
submissions_to_process: [1, 2],
process_submission: true,
- random_submission_count: false}
+ random_submission_count: false,
+ process_options: {process_rdf: true, extract_metadata: false}
+ }
return LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)
end
@@ -529,4 +531,27 @@ def test_class_page_with_metric_count
assert page_response["collection"].length == 0
end
+ def test_default_multilingual
+ ont = Ontology.find("TEST-ONT-0").include(:acronym).first
+ sub = ont.latest_submission
+ sub.bring_remaining
+
+ get "/ontologies/#{ont.acronym}/classes/#{CGI.escape('http://bioontology.org/ontologies/Activity.owl#Biospecimen_Management')}"
+ assert last_response.ok?
+ page_response = MultiJson.load(last_response.body)
+
+ # does not contain a value in english show the generated one
+ assert_equal 'Biospecimen_Management', page_response["prefLabel"]
+
+
+ sub.naturalLanguage = ['fr']
+ sub.save
+
+ get "/ontologies/#{ont.acronym}/classes/#{CGI.escape('http://bioontology.org/ontologies/Activity.owl#Biospecimen_Management')}"
+ assert last_response.ok?
+ page_response = MultiJson.load(last_response.body)
+
+ # show french value as specified in submission naturalLanguage
+ assert_equal 'Biospecimen Management', page_response["prefLabel"]
+ end
end
diff --git a/test/controllers/test_instances_controller.rb b/test/controllers/test_instances_controller.rb
index 4e9b8a5c..1735455f 100644
--- a/test/controllers/test_instances_controller.rb
+++ b/test/controllers/test_instances_controller.rb
@@ -2,7 +2,7 @@
class TestInstancesController < TestCase
- def self.before_suite
+ def before_suite
LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
acronym: 'XCT-TEST-INST',
diff --git a/test/controllers/test_mappings_controller.rb b/test/controllers/test_mappings_controller.rb
index 44bfa032..d8c6f619 100644
--- a/test/controllers/test_mappings_controller.rb
+++ b/test/controllers/test_mappings_controller.rb
@@ -2,8 +2,8 @@
class TestMappingsController < TestCase
- def self.before_suite
-
+ def before_suite
+ self.backend_4s_delete
["BRO-TEST-MAP-0","CNO-TEST-MAP-0","FAKE-TEST-MAP-0"].each do |acr|
LinkedData::Models::OntologySubmission.where(ontology: [acronym: acr]).to_a.each do |s|
s.delete
diff --git a/test/controllers/test_metrics_controller.rb b/test/controllers/test_metrics_controller.rb
index 1b8890a6..0d8b547d 100644
--- a/test/controllers/test_metrics_controller.rb
+++ b/test/controllers/test_metrics_controller.rb
@@ -2,7 +2,7 @@
class TestMetricsController < TestCase
- def self.before_suite
+ def before_suite
if OntologySubmission.all.count > 100
puts "this test is going to wipe out all submission and ontologies. probably this is not a test env."
return
@@ -18,11 +18,12 @@ def self.before_suite
"individuals"=>124,
"properties"=>63,
"maxDepth"=> 7 }
- @@options = {ont_count: 2,
- submission_count: 3,
- submissions_to_process: [1, 2],
- process_submission: true,
- random_submission_count: false}
+ @@options = { ont_count: 2,
+ submission_count: 3,
+ submissions_to_process: [1, 2],
+ process_submission: true,
+ process_options: { process_rdf: true, extract_metadata: false, run_metrics: true, index_properties: true },
+ random_submission_count: false }
LinkedData::SampleData::Ontology.create_ontologies_and_submissions(@@options)
end
@@ -78,18 +79,18 @@ def test_metrics_missing
get '/metrics/missing'
assert last_response.ok?
ontologies = MultiJson.load(last_response.body)
- assert_equal(0, ontologies.length, msg='Failure to detect 0 ontologies with missing metrics.')
+ assert_equal(0, ontologies.length, msg = 'Failure to detect 0 ontologies with missing metrics.')
# create ontologies with latest submissions that have no metrics
delete_ontologies_and_submissions
- options = {ont_count: 2,
- submission_count: 1,
- process_submission: false,
- random_submission_count: false}
+ options = { ont_count: 2,
+ submission_count: 1,
+ process_submission: false,
+ random_submission_count: false }
create_ontologies_and_submissions(options)
get '/metrics/missing'
assert last_response.ok?
ontologies = MultiJson.load(last_response.body)
- assert_equal(2, ontologies.length, msg='Failure to detect 2 ontologies with missing metrics.')
+ assert_equal(2, ontologies.length, msg = 'Failure to detect 2 ontologies with missing metrics.')
# recreate the before_suite data (this test might not be the last one to run in the suite)
delete_ontologies_and_submissions
create_ontologies_and_submissions(@@options)
diff --git a/test/controllers/test_notes_controller.rb b/test/controllers/test_notes_controller.rb
index d9223669..83cb1045 100644
--- a/test/controllers/test_notes_controller.rb
+++ b/test/controllers/test_notes_controller.rb
@@ -3,9 +3,9 @@
class TestNotesController < TestCase
- def self.before_suite
- self.new("before_suite").delete_ontologies_and_submissions
- @@ontology, @@cls = self.new("before_suite")._ontology_and_class
+ def before_suite
+ self.delete_ontologies_and_submissions
+ @@ontology, @@cls = self._ontology_and_class
@@note_user = "test_note_user"
@@user = LinkedData::Models::User.new(
@@ -41,7 +41,7 @@ def test_all_notes
get '/notes'
assert last_response.ok?
notes = MultiJson.load(last_response.body)
- assert notes.length >= 5
+ assert_operator 5, :<=, notes.length
end
def test_single_note
@@ -62,21 +62,22 @@ def test_note_lifecycle
relatedOntology: [@@ontology.id.to_s]
}
post "/notes", MultiJson.dump(note), "CONTENT_TYPE" => "application/json"
- assert last_response.status == 201
+ assert_equal 201, last_response.status
new_note = MultiJson.load(last_response.body)
get new_note["@id"]
assert last_response.ok?
+ assert_equal 200, last_response.status
note_changes = {body: "New testing body"}
patch new_note["@id"], MultiJson.dump(note_changes), "CONTENT_TYPE" => "application/json"
- assert last_response.status == 204
+ assert_equal 204, last_response.status
get new_note["@id"]
patched_note = MultiJson.load(last_response.body)
assert_equal patched_note["body"], note_changes[:body]
delete new_note["@id"]
- assert last_response.status == 204
+ assert_equal 204, last_response.status
end
def test_proposal_lifecycle
@@ -97,21 +98,23 @@ def test_proposal_lifecycle
}
post "/notes", MultiJson.dump(note), "CONTENT_TYPE" => "application/json"
- assert last_response.status == 201
+ assert_equal 201, last_response.status
new_note = MultiJson.load(last_response.body)
+ # assert_equal 'blah', new_note
get new_note["@id"]
assert last_response.ok?
note_changes = {proposal: {label: "New sleed study facility"}}
patch new_note["@id"], MultiJson.dump(note_changes), "CONTENT_TYPE" => "application/json"
- assert last_response.status == 204
+ assert_equal 204, last_response.status
get new_note["@id"]
patched_note = MultiJson.load(last_response.body)
- assert_equal patched_note["label"], note_changes[:label]
+ refute_nil patched_note['proposal']['label']
+ assert_equal patched_note['proposal']['label'], note_changes[:proposal][:label]
delete new_note["@id"]
- assert last_response.status == 204
+ assert_equal 204, last_response.status
end
def test_notes_for_ontology
@@ -120,8 +123,8 @@ def test_notes_for_ontology
get ont["links"]["notes"]
notes = MultiJson.load(last_response.body)
test_note = notes.select {|n| n["subject"].eql?("Test subject 1")}
- assert test_note.length == 1
- assert notes.length >= 5
+ assert_equal 1, test_note.length
+ assert_operator 5, :<=, notes.length
end
def test_notes_for_class
@@ -130,7 +133,7 @@ def test_notes_for_class
get cls["links"]["notes"]
notes = MultiJson.load(last_response.body)
test_note = notes.select {|n| n["subject"].eql?("Test subject 1")}
- assert test_note.length == 1
- assert notes.length >= 5
+ assert_equal 1, test_note.length
+ assert_operator 5, :<=, notes.length
end
end
diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb
index 4f61256b..874efe39 100644
--- a/test/controllers/test_ontologies_controller.rb
+++ b/test/controllers/test_ontologies_controller.rb
@@ -2,11 +2,11 @@
require_relative '../test_case'
class TestOntologiesController < TestCase
- def self.before_suite
- _set_vars
- _delete
- _create_user
- _create_onts
+ def before_suite
+ self.class._set_vars
+ self.class._delete
+ self.class._create_user
+ self.class._create_onts
end
def teardown
@@ -188,7 +188,9 @@ def test_download_ontology
end
def test_download_ontology_csv
- num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true)
+ num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1,
+ process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: true, index_search: true})
ont = onts.first
acronym = created_ont_acronyms.first
@@ -220,13 +222,13 @@ def test_download_acl_only
begin
allowed_user = User.new({
username: "allowed",
- email: "test@example.org",
+ email: "test1@example.org",
password: "12345"
})
allowed_user.save
blocked_user = User.new({
username: "blocked",
- email: "test@example.org",
+ email: "test2@example.org",
password: "12345"
})
blocked_user.save
@@ -262,7 +264,7 @@ def test_on_demand_ontology_pull
acronym = ont.acronym
sub = ont.submissions.first
sub.bring(:pullLocation) if sub.bring?(:pullLocation)
- assert_equal(nil, sub.pullLocation, msg="Pull location should be nil at this point in the test")
+ assert_nil sub.pullLocation, msg = "Pull location should be nil at this point in the test"
allowed_user = ont.administeredBy.first
allowed_user.bring(:apikey) if allowed_user.bring?(:apikey)
@@ -296,6 +298,32 @@ def test_on_demand_ontology_pull
end
end
+ def test_detach_a_view
+ view = Ontology.find(@@view_acronym).include(:viewOf).first
+ ont = view.viewOf
+ refute_nil view
+ refute_nil ont
+
+ remove_view_of = {viewOf: ''}
+ patch "/ontologies/#{@@view_acronym}", MultiJson.dump(remove_view_of), "CONTENT_TYPE" => "application/json"
+
+ assert last_response.status == 204
+
+ get "/ontologies/#{@@view_acronym}"
+ onto = MultiJson.load(last_response.body)
+ assert_nil onto["viewOf"]
+
+
+ add_view_of = {viewOf: @@acronym}
+ patch "/ontologies/#{@@view_acronym}", MultiJson.dump(add_view_of), "CONTENT_TYPE" => "application/json"
+
+ assert last_response.status == 204
+
+ get "/ontologies/#{@@view_acronym}?include=all"
+ onto = MultiJson.load(last_response.body)
+ assert_equal onto["viewOf"], ont.id.to_s
+ end
+
private
def start_server
diff --git a/test/controllers/test_ontology_analytics_controller.rb b/test/controllers/test_ontology_analytics_controller.rb
index b8e36dce..721efea6 100644
--- a/test/controllers/test_ontology_analytics_controller.rb
+++ b/test/controllers/test_ontology_analytics_controller.rb
@@ -196,7 +196,7 @@ class TestOntologyAnalyticsController < TestCase
}
}
- def self.before_suite
+ def before_suite
@@redis = Redis.new(:host => Annotator.settings.annotator_redis_host, :port => Annotator.settings.annotator_redis_port)
db_size = @@redis.dbsize
if db_size > MAX_TEST_REDIS_SIZE
@@ -212,9 +212,9 @@ def self.before_suite
"SNOMEDCT" => "SNOMEDCT Ontology",
"TST" => "TST Ontology"
}
- _delete
- _create_user
- _create_onts
+ self.class._delete
+ self.class._create_user
+ self.class._create_onts
end
def teardown
diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb
index 40532cd0..d0f58582 100644
--- a/test/controllers/test_ontology_submissions_controller.rb
+++ b/test/controllers/test_ontology_submissions_controller.rb
@@ -2,10 +2,11 @@
class TestOntologySubmissionsController < TestCase
- def self.before_suite
- _set_vars
- _create_user
- _create_onts
+ def before_suite
+ self.backend_4s_delete
+ self.class._set_vars
+ self.class._create_user
+ self.class._create_onts
end
def self._set_vars
@@ -18,7 +19,10 @@ def self._set_vars
administeredBy: "tim",
"file" => Rack::Test::UploadedFile.new(@@test_file, ""),
released: DateTime.now.to_s,
- contact: [{name: "test_name", email: "test@example.org"}]
+ contact: [{name: "test_name", email: "test3@example.org"}],
+ URI: 'https://test.com/test',
+ status: 'production',
+ description: 'ontology description'
}
@@status_uploaded = "UPLOADED"
@@status_rdf = "RDF"
@@ -36,6 +40,12 @@ def self._create_onts
ont.save
end
+ def setup
+ delete_ontologies_and_submissions
+ ont = Ontology.new(acronym: @@acronym, name: @@name, administeredBy: [@@user])
+ ont.save
+ end
+
def test_submissions_for_given_ontology
num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1)
ontology = created_ont_acronyms.first
@@ -156,13 +166,13 @@ def test_download_acl_only
begin
allowed_user = User.new({
username: "allowed",
- email: "test@example.org",
+ email: "test4@example.org",
password: "12345"
})
allowed_user.save
blocked_user = User.new({
username: "blocked",
- email: "test@example.org",
+ email: "test5@example.org",
password: "12345"
})
blocked_user.save
@@ -235,5 +245,4 @@ def test_ontology_submissions_access_controller
del.delete if del
end
end
-
end
diff --git a/test/controllers/test_properties_controller.rb b/test/controllers/test_properties_controller.rb
index 8248403c..d2957956 100644
--- a/test/controllers/test_properties_controller.rb
+++ b/test/controllers/test_properties_controller.rb
@@ -2,9 +2,10 @@
class TestPropertiesController < TestCase
- def self.before_suite
+ def before_suite
count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: false},
acronym: "BROSEARCHTEST",
name: "BRO Search Test",
file_path: "./test/data/ontology_files/BRO_v3.2.owl",
@@ -15,6 +16,7 @@ def self.before_suite
count, acronyms, mccl = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: true},
acronym: "MCCLSEARCHTEST",
name: "MCCL Search Test",
file_path: "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
@@ -25,7 +27,7 @@ def self.before_suite
@@acronyms = @@ontologies.map { |ont| ont.bring_remaining; ont.acronym }
end
- def self.after_suite
+ def after_suite
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
end
diff --git a/test/controllers/test_properties_search_controller.rb b/test/controllers/test_properties_search_controller.rb
index f93a90a1..2a8cadd0 100644
--- a/test/controllers/test_properties_search_controller.rb
+++ b/test/controllers/test_properties_search_controller.rb
@@ -2,9 +2,12 @@
class TestPropertiesSearchController < TestCase
- def self.before_suite
+ def before_suite
+ self.backend_4s_delete
+
count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: false, index_properties: true},
acronym: "BROSEARCHTEST",
name: "BRO Search Test",
file_path: "./test/data/ontology_files/BRO_v3.2.owl",
@@ -15,6 +18,7 @@ def self.before_suite
count, acronyms, mccl = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
+ process_options:{process_rdf: true, extract_metadata: false, index_properties: true},
acronym: "MCCLSEARCHTEST",
name: "MCCL Search Test",
file_path: "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
@@ -24,7 +28,7 @@ def self.before_suite
@@ontologies = bro.concat(mccl)
end
- def self.after_suite
+ def after_suite
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
LinkedData::Models::Ontology.indexClear(:property)
LinkedData::Models::Ontology.indexCommit(nil, :property)
diff --git a/test/controllers/test_provisional_classes_controller.rb b/test/controllers/test_provisional_classes_controller.rb
index 7225d772..e54c7805 100644
--- a/test/controllers/test_provisional_classes_controller.rb
+++ b/test/controllers/test_provisional_classes_controller.rb
@@ -1,9 +1,9 @@
require_relative '../test_case'
class TestProvisionalClassesController < TestCase
- def self.before_suite
- self.new("before_suite").delete_ontologies_and_submissions
- @@ontology, classes = self.new("before_suite")._ontology_and_classes
+ def before_suite
+ self.delete_ontologies_and_submissions
+ @@ontology, classes = self._ontology_and_classes
@@cls = classes[0]
@@cls1 = classes[1]
@@ -32,7 +32,7 @@ def self.before_suite
end
end
- def self.after_suite
+ def after_suite
3.times do |i|
@@pcs[i].delete
end
diff --git a/test/controllers/test_provisional_relations_controller.rb b/test/controllers/test_provisional_relations_controller.rb
index 83f2761d..f097dde9 100644
--- a/test/controllers/test_provisional_relations_controller.rb
+++ b/test/controllers/test_provisional_relations_controller.rb
@@ -1,9 +1,9 @@
require_relative '../test_case'
class TestProvisionalRelationsController < TestCase
- def self.before_suite
- self.new("before_suite").delete_ontologies_and_submissions
- @@ontology, classes = self.new("before_suite")._ontology_and_classes
+ def before_suite
+ self.delete_ontologies_and_submissions
+ @@ontology, classes = self._ontology_and_classes
@@cls1 = classes[0]
@@cls2 = classes[1]
@@ -39,7 +39,7 @@ def self.before_suite
@@test_rel.save
end
- def self.after_suite
+ def after_suite
@@test_pc.delete
@@test_user.delete
end
diff --git a/test/controllers/test_recommender_controller.rb b/test/controllers/test_recommender_controller.rb
index 29caf28c..c46255fc 100644
--- a/test/controllers/test_recommender_controller.rb
+++ b/test/controllers/test_recommender_controller.rb
@@ -2,7 +2,7 @@
class TestRecommenderController < TestCase
- def self.before_suite
+ def before_suite
@@redis = Redis.new(:host => Annotator.settings.annotator_redis_host, :port => Annotator.settings.annotator_redis_port)
db_size = @@redis.dbsize
if db_size > MAX_TEST_REDIS_SIZE
@@ -14,7 +14,7 @@ def self.before_suite
@@redis.del(mappings)
end
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies
+ @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies(process_submission: true)
annotator = Annotator::Models::NcboAnnotator.new
annotator.init_redis_for_tests()
annotator.create_term_cache_from_ontologies(@@ontologies, false)
diff --git a/test/controllers/test_recommender_v1_controller.rb b/test/controllers/test_recommender_v1_controller.rb
index 7b14a63d..7ef09cee 100644
--- a/test/controllers/test_recommender_v1_controller.rb
+++ b/test/controllers/test_recommender_v1_controller.rb
@@ -1,40 +1,10 @@
require_relative '../test_case'
-
-class TestRecommenderController < TestCase
-
- def self.before_suite
- LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
- @@ontologies = LinkedData::SampleData::Ontology.sample_owl_ontologies
- @@text = < @@text
- }
+# recommender_v1 is deprecated as of 2024-10-27
+# TODO: remove completely after 2025-10-27
+class TestRecommenderV1Controller < TestCase
+ def test_recommender_v1_deprecation
+ params = { text: 'recommender v1 is deprecated' }
get "/recommender_v1", params
- assert last_response.ok?
- recommendations = MultiJson.load(last_response.body)
- assert_instance_of(Array, recommendations)
- assert_equal(3, recommendations.length, msg='Failed to return 3 recommendations')
- rec = recommendations.first
- assert_instance_of(Hash, rec)
- ont_acronyms = @@ontologies.map {|o| o.bring(:acronym); o.acronym }
- assert ont_acronyms.include? rec['ontology']['acronym']
- assert rec['annotatedClasses'].length == 0 # no classes requested
- assert rec['numTermsMatched'] > 0
- assert rec['numTermsTotal'] > 0
- assert rec['numTermsTotal'] >= rec['numTermsMatched']
- assert recommendations[0]['score'].to_i >= recommendations[1]['score'].to_i
- assert recommendations[1]['score'].to_i >= recommendations[2]['score'].to_i
+ assert_equal 410, last_response.status
end
-
end
diff --git a/test/controllers/test_replies_controller.rb b/test/controllers/test_replies_controller.rb
index 41b15e2e..19a6ddd0 100644
--- a/test/controllers/test_replies_controller.rb
+++ b/test/controllers/test_replies_controller.rb
@@ -2,8 +2,8 @@
class TestRepliesController < TestCase
- def self.before_suite
- ontologies = self.new("before_suite").create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false)[2]
+ def before_suite
+ ontologies = self.create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false)[2]
@@ontology = ontologies.first
@@reply_user = "test_reply_user"
diff --git a/test/controllers/test_search_controller.rb b/test/controllers/test_search_controller.rb
index 70bf246e..1e98a84a 100644
--- a/test/controllers/test_search_controller.rb
+++ b/test/controllers/test_search_controller.rb
@@ -2,7 +2,8 @@
class TestSearchController < TestCase
- def self.before_suite
+ def before_suite
+ self.backend_4s_delete
count, acronyms, bro = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
process_submission: true,
acronym: "BROSEARCHTEST",
@@ -54,7 +55,7 @@ def self.before_suite
@@test_pc_child.save
end
- def self.after_suite
+ def after_suite
@@test_pc_root.delete
@@test_pc_child.delete
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
@@ -85,7 +86,10 @@ def test_search_ontology_filter
assert last_response.ok?
results = MultiJson.load(last_response.body)
doc = results["collection"][0]
- assert_equal "cell line", doc["prefLabel"]
+
+ pref_label = doc["prefLabel"].kind_of?(Array) ? doc["prefLabel"].first : doc["prefLabel"]
+ assert_equal "cell line", pref_label
+
assert doc["links"]["ontology"].include? acronym
results["collection"].each do |doc|
acr = doc["links"]["ontology"].split('/')[-1]
@@ -103,7 +107,8 @@ def test_search_other_filters
get "search?q=data&require_definitions=true"
assert last_response.ok?
results = MultiJson.load(last_response.body)
- assert_equal 26, results["collection"].length
+ assert results["collection"].all? {|doc| !doc["definition"].nil? && doc.values.flatten.join(" ").include?("data") }
+ #assert_equal 26, results["collection"].length
get "search?q=data&require_definitions=false"
assert last_response.ok?
@@ -115,10 +120,14 @@ def test_search_other_filters
get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}"
results = MultiJson.load(last_response.body)
- assert_equal 22, results["collection"].length
+
+ assert results["collection"].all? { |x| !x["obsolete"] }
+ count = results["collection"].length
+
get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}&also_search_obsolete=false"
results = MultiJson.load(last_response.body)
- assert_equal 22, results["collection"].length
+ assert_equal count, results["collection"].length
+
get "search?q=Integration%20and%20Interoperability&ontologies=#{acronym}&also_search_obsolete=true"
results = MultiJson.load(last_response.body)
assert_equal 29, results["collection"].length
@@ -134,8 +143,16 @@ def test_search_other_filters
# testing cui and semantic_types flags
get "search?q=Funding%20Resource&ontologies=#{acronym}&include=prefLabel,synonym,definition,notation,cui,semanticType"
results = MultiJson.load(last_response.body)
- assert_equal 35, results["collection"].length
- assert_equal "Funding Resource", results["collection"][0]["prefLabel"]
+ #assert_equal 35, results["collection"].length
+ assert results["collection"].all? do |r|
+ ["prefLabel", "synonym", "definition", "notation", "cui", "semanticType"].map {|x| r[x]}
+ .flatten
+ .join(' ')
+ .include?("Funding Resource")
+ end
+
+ label0 = results["collection"][0]["prefLabel"].kind_of?(Array) ? results["collection"][0]["prefLabel"].first : results["collection"][0]["prefLabel"]
+ assert_equal "Funding Resource", label0
assert_equal "T028", results["collection"][0]["semanticType"][0]
assert_equal "X123456", results["collection"][0]["cui"][0]
@@ -190,7 +207,8 @@ def test_search_provisional_class
assert_equal 10, results["collection"].length
provisional = results["collection"].select {|res| assert_equal ontology_type, res["ontologyType"]; res["provisional"]}
assert_equal 1, provisional.length
- assert_equal @@test_pc_root.label, provisional[0]["prefLabel"]
+ prov_label = provisional[0]["prefLabel"].kind_of?(Array) ? provisional[0]["prefLabel"].first : provisional[0]["prefLabel"]
+ assert_equal @@test_pc_root.label, prov_label
# subtree root with provisional class test
get "search?ontology=#{acronym}&subtree_root_id=#{CGI::escape(@@cls_uri.to_s)}&also_search_provisional=true"
@@ -199,7 +217,9 @@ def test_search_provisional_class
provisional = results["collection"].select {|res| res["provisional"]}
assert_equal 1, provisional.length
- assert_equal @@test_pc_child.label, provisional[0]["prefLabel"]
+
+ prov_label = provisional[0]["prefLabel"].kind_of?(Array) ? provisional[0]["prefLabel"].first : provisional[0]["prefLabel"]
+ assert_equal @@test_pc_child.label, prov_label
end
def test_search_obo_id
@@ -254,11 +274,18 @@ def test_search_obo_id
assert_equal ogms_acronym, LinkedData::Utils::Triples.last_iri_fragment(docs[0]["links"]["ontology"])
assert_equal cno_acronym, LinkedData::Utils::Triples.last_iri_fragment(docs[1]["links"]["ontology"])
assert_equal ncit_acronym, LinkedData::Utils::Triples.last_iri_fragment(docs[2]["links"]["ontology"])
- assert_equal 'realization', docs[1]["prefLabel"]
- assert_equal 'realization', docs[2]["prefLabel"]
- assert docs[3]["prefLabel"].upcase.include?('OGMS ')
- assert docs[4]["prefLabel"].upcase.include?('OGMS ')
- assert docs[5]["prefLabel"].upcase.include?('OGMS ')
+
+ label1 = docs[1]["prefLabel"].kind_of?(Array) ? docs[1]["prefLabel"].first : docs[1]["prefLabel"]
+ label2 = docs[2]["prefLabel"].kind_of?(Array) ? docs[2]["prefLabel"].first : docs[2]["prefLabel"]
+ label3 = docs[3]["prefLabel"].kind_of?(Array) ? docs[3]["prefLabel"].first : docs[3]["prefLabel"]
+ label4 = docs[4]["prefLabel"].kind_of?(Array) ? docs[4]["prefLabel"].first : docs[4]["prefLabel"]
+ label5 = docs[5]["prefLabel"].kind_of?(Array) ? docs[5]["prefLabel"].first : docs[5]["prefLabel"]
+
+ assert_equal 'realization', label1
+ assert_equal 'realization', label2
+ assert label3.upcase.include?('OGMS ')
+ assert label4.upcase.include?('OGMS ')
+ assert label5.upcase.include?('OGMS ')
get "/search?q=CNO:0000002"
assert last_response.ok?
@@ -267,13 +294,20 @@ def test_search_obo_id
assert_equal 7, docs.size
assert_equal cno_acronym, LinkedData::Utils::Triples.last_iri_fragment(docs[0]["links"]["ontology"])
acr_1 = LinkedData::Utils::Triples.last_iri_fragment(docs[1]["links"]["ontology"])
- assert acr_1 === ncit_acronym || acr_1 === ogms_acronym
+ assert_includes [ncit_acronym, ogms_acronym], acr_1
acr_2= LinkedData::Utils::Triples.last_iri_fragment(docs[2]["links"]["ontology"])
+
assert acr_2 === ncit_acronym || acr_2 === ogms_acronym
- assert docs[3]["prefLabel"].upcase.include?('CNO ')
- assert docs[4]["prefLabel"].upcase.include?('CNO ')
- assert docs[5]["prefLabel"].upcase.include?('CNO ')
- assert docs[6]["prefLabel"].upcase.include?('CNO ')
+
+ label3 = docs[3]["prefLabel"].kind_of?(Array) ? docs[3]["prefLabel"].first : docs[3]["prefLabel"]
+ label4 = docs[4]["prefLabel"].kind_of?(Array) ? docs[4]["prefLabel"].first : docs[4]["prefLabel"]
+ label5 = docs[5]["prefLabel"].kind_of?(Array) ? docs[5]["prefLabel"].first : docs[5]["prefLabel"]
+ label6 = docs[6]["prefLabel"].kind_of?(Array) ? docs[6]["prefLabel"].first : docs[6]["prefLabel"]
+
+ assert label3.upcase.include?('CNO ')
+ assert label4.upcase.include?('CNO ')
+ assert label5.upcase.include?('CNO ')
+ assert label6.upcase.include?('CNO ')
# mdorf, 3/2/2024, when the : is followed by a LETTER, as in NCIT:C20480,
# then Solr does not split the query on the tokens,
@@ -284,37 +318,46 @@ def test_search_obo_id
results = MultiJson.load(last_response.body)
docs = results["collection"]
assert_equal 1, docs.size
- assert_equal 'Cellular Process', docs[0]["prefLabel"]
+
+ label0 = docs[0]["prefLabel"].kind_of?(Array) ? docs[0]["prefLabel"].first : docs[0]["prefLabel"]
+ assert_equal 'Cellular Process', label0
get "/search?q=NCIT:C20480"
assert last_response.ok?
results = MultiJson.load(last_response.body)
docs = results["collection"]
assert_equal 1, docs.size
- assert_equal 'Cellular Process', docs[0]["prefLabel"]
+
+ label0 = docs[0]["prefLabel"].kind_of?(Array) ? docs[0]["prefLabel"].first : docs[0]["prefLabel"]
+ assert_equal 'Cellular Process', label0
get "/search?q=Leukocyte Apoptotic Process&ontologies=#{ncit_acronym}"
assert last_response.ok?
results = MultiJson.load(last_response.body)
docs = results["collection"]
- assert_equal 'Leukocyte Apoptotic Process', docs[0]["prefLabel"]
- assert_equal 'Leukocyte Apoptotic Test Class', docs[1]["prefLabel"]
- assert_equal 'Lymphocyte Apoptotic Process', docs[2]["prefLabel"]
+
+ label0 = docs[0]["prefLabel"].kind_of?(Array) ? docs[0]["prefLabel"].first : docs[0]["prefLabel"]
+ label1 = docs[1]["prefLabel"].kind_of?(Array) ? docs[1]["prefLabel"].first : docs[1]["prefLabel"]
+ label2 = docs[2]["prefLabel"].kind_of?(Array) ? docs[2]["prefLabel"].first : docs[2]["prefLabel"]
+
+ assert_equal 'Leukocyte Apoptotic Process', label0
+ assert_equal 'Leukocyte Apoptotic Test Class', label1
+ assert_equal 'Lymphocyte Apoptotic Process', label2
ensure
ont = LinkedData::Models::Ontology.find(ncit_acronym).first
ont.delete if ont
ont = LinkedData::Models::Ontology.find(ncit_acronym).first
- assert ont.nil?
+ assert_nil ont
ont = LinkedData::Models::Ontology.find(ogms_acronym).first
ont.delete if ont
ont = LinkedData::Models::Ontology.find(ogms_acronym).first
- assert ont.nil?
+ assert_nil ont
ont = LinkedData::Models::Ontology.find(cno_acronym).first
ont.delete if ont
ont = LinkedData::Models::Ontology.find(cno_acronym).first
- assert ont.nil?
+ assert_nil ont
end
end
@@ -357,4 +400,88 @@ def test_search_short_id
end
end
+ def test_language_attribute_filter
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr"
+ results = MultiJson.load(last_response.body)
+ assert last_response.ok?
+ assert_equal 1, results["collection"].size
+ doc = results["collection"][0]
+ pref_label = doc["prefLabel"].kind_of?(Array) ? doc["prefLabel"].first : doc["prefLabel"]
+ assert_equal "Activité", pref_label
+ assert_equal 1, doc["definition"].size
+ assert 1, doc["definition"][0].include?("d’intérêt pouvant")
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en"
+ results = MultiJson.load(last_response.body)
+ assert last_response.ok?
+ assert_equal 1, results["collection"].size
+ doc = results["collection"][0]
+ pref_label = doc["prefLabel"].kind_of?(Array) ? doc["prefLabel"].first : doc["prefLabel"]
+ assert_equal "ActivityEnglish", pref_label
+ assert_equal 1, doc["definition"].size
+ assert 1, doc["definition"][0].include?("Activity of interest that may be related to")
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0"
+ results = MultiJson.load(last_response.body)
+ assert last_response.ok?
+ assert_equal 1, results["collection"].size
+ doc = results["collection"][0]
+ pref_label = doc["prefLabel"].kind_of?(Array) ? doc["prefLabel"].first : doc["prefLabel"]
+ assert_equal "ActivityEnglish", pref_label
+ assert_equal 1, doc["definition"].size
+ assert 1, doc["definition"][0].include?("Activity of interest that may be related to")
+
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=all"
+ results = MultiJson.load(last_response.body)
+ assert last_response.ok?
+ assert_equal 1, results["collection"].size
+ doc = results["collection"][0]
+ assert doc["prefLabel"].kind_of?(Array)
+ assert_equal 3, doc["prefLabel"].size
+ assert doc["synonym"].kind_of?(Array)
+ assert_equal 1, doc["synonym"].size
+ assert doc["definition"].kind_of?(Array)
+ assert_equal 2, doc["definition"].size
+ end
+
+ def test_multilingual_search
+ get "/search?q=Activity&ontologies=BROSEARCHTEST-0"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+
+ doc = res["collection"].select{|doc| doc["@id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+
+ res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BROSEARCHTEST-0", :start => 0, :rows => 80})
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en"
+ res = MultiJson.load(last_response.body)
+ refute_equal 0, res["totalCount"]
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=ActivityEnglish&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activity&ontologies=BROSEARCHTEST-0&lang=en&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ assert_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ get "/search?q=Activit%C3%A9&ontologies=BROSEARCHTEST-0&lang=fr&require_exact_match=true"
+ res = MultiJson.load(last_response.body)
+ refute_nil res["collection"].select{|doc| doc["@id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ end
+
+
end
diff --git a/test/controllers/test_slices_controller.rb b/test/controllers/test_slices_controller.rb
index 308b9fb7..e1250ba2 100644
--- a/test/controllers/test_slices_controller.rb
+++ b/test/controllers/test_slices_controller.rb
@@ -2,12 +2,11 @@
class TestSlicesController < TestCase
- def self.before_suite
+ def before_suite
ont_count, ont_acronyms, @@onts = LinkedData::SampleData::Ontology.create_ontologies_and_submissions(ont_count: 1, submission_count: 0)
-
@@slice_acronyms = ["tst-a", "tst-b"].sort
- _create_slice(@@slice_acronyms[0], "Test Slice A", @@onts)
- _create_slice(@@slice_acronyms[1], "Test Slice B", @@onts)
+ self.class._create_slice(@@slice_acronyms[0], "Test Slice A", @@onts)
+ self.class._create_slice(@@slice_acronyms[1], "Test Slice B", @@onts)
@@user = User.new({
username: "test-slice",
@@ -15,13 +14,13 @@ def self.before_suite
password: "12345"
}).save
@@new_slice_data = { acronym: 'tst-c', name: "Test Slice C", ontologies: ont_acronyms}
- enable_security
+ self.class.enable_security
end
- def self.after_suite
+ def after_suite
LinkedData::Models::Slice.all.each(&:delete)
@@user.delete
- reset_security
+ self.class.reset_security
end
def setup
diff --git a/test/controllers/test_users_controller.rb b/test/controllers/test_users_controller.rb
index 53bf520f..1aea137a 100644
--- a/test/controllers/test_users_controller.rb
+++ b/test/controllers/test_users_controller.rb
@@ -1,7 +1,7 @@
require_relative '../test_case'
class TestUsersController < TestCase
- def self.before_suite
+ def before_suite
# Create a bunch of test users
@@usernames = %w(fred goerge henry ben mark matt charlie)
@@ -48,6 +48,21 @@ def test_single_user
assert_equal "fred", MultiJson.load(last_response.body)["username"]
end
+ def test_hide_sensitive_data
+ user = @@users[0]
+ reset_token = token(36)
+ user.resetToken = reset_token
+ user.resetTokenExpireTime = Time.now.to_i - 2.hours.to_i
+ user.save
+
+ username = user.username
+ get "/users/#{username}?display=resetToken,resetTokenExpireTime"
+ assert last_response.ok?
+
+ refute_includes MultiJson.load(last_response.body), 'resetToken', "resetToken should NOT be included in the response"
+ refute_includes MultiJson.load(last_response.body), 'resetTokenExpireTime', "resetTokenExpireTime should NOT be included in the response"
+ end
+
def test_create_new_user
user = {email: "#{@@username}@example.org", password: "pass_the_word"}
put "/users/#{@@username}", MultiJson.dump(user), "CONTENT_TYPE" => "application/json"
@@ -171,6 +186,13 @@ def test_authentication
private
+ def token(len)
+ chars = ("a".."z").to_a + ("A".."Z").to_a + ("1".."9").to_a
+ token = ""
+ 1.upto(len) { |i| token << chars[rand(chars.size-1)] }
+ token
+ end
+
def _delete_user(username)
LinkedData::Models::User.find(@@username).first&.delete
end
diff --git a/test/data/ontology_files/BRO_v3.2.owl b/test/data/ontology_files/BRO_v3.2.owl
index d64075cc..33e91d79 100644
--- a/test/data/ontology_files/BRO_v3.2.owl
+++ b/test/data/ontology_files/BRO_v3.2.owl
@@ -631,7 +631,11 @@
Activity
- Activity of interest that may be related to a BRO:Resource.
+ Activity
+ ActivityEnglish
+ Activité
+ Activity of interest that may be related to a BRO:Resource.
+ Activité d’intérêt pouvant être liée à un BRO:Resource.
activities
@@ -641,8 +645,10 @@
- Activity related to the creation, use, or maintenance of a biorepository (http://en.wikipedia.org/wiki/Biorepository)
- Biospecimen Management
+ Activity related to the creation, use, or maintenance of a biorepository (http://en.wikipedia.org/wiki/Biorepository)
+ Activité liée à la création, à l'utilisation ou à la maintenance d'un bioréférentiel (http://en.wikipedia.org/wiki/Biorepository)
+ Biospecimen Management
+ Gestion des échantillons biologiques
diff --git a/test/helpers/test_access_control_helper.rb b/test/helpers/test_access_control_helper.rb
index 1e3807d5..19b0ae0f 100644
--- a/test/helpers/test_access_control_helper.rb
+++ b/test/helpers/test_access_control_helper.rb
@@ -2,8 +2,8 @@
class TestAccessControlHelper < TestCaseHelpers
- def self.before_suite
- self.new("before_suite").delete_ontologies_and_submissions
+ def before_suite
+ self.backend_4s_delete
@@usernames = ["user1", "user2", "user3", "admin"]
@@usernames.each do |username|
@@ -14,7 +14,7 @@ def self.before_suite
)
user.save
user.bring_remaining
- self.class_variable_set(:"@@#{username}", user)
+ self.class.class_variable_set(:"@@#{username}", user)
end
@@admin.role = [LinkedData::Models::Users::Role.find(LinkedData::Models::Users::Role::ADMIN).first]
@@ -42,8 +42,9 @@ def self.before_suite
LinkedData.settings.enable_security = true
end
- def self.after_suite
- LinkedData.settings.enable_security = @@old_security_setting if class_variable_defined?("@@old_security_setting")
+ def after_suite
+ self.backend_4s_delete
+ LinkedData.settings.enable_security = @@old_security_setting unless @@old_security_setting.nil?
end
def test_filtered_list
diff --git a/test/helpers/test_application_helper.rb b/test/helpers/test_application_helper.rb
index b90aa30f..2315a677 100644
--- a/test/helpers/test_application_helper.rb
+++ b/test/helpers/test_application_helper.rb
@@ -3,7 +3,7 @@
class TestApplicationHelper < TestCaseHelpers
- def self.before_suite
+ def before_suite
@@ontologies = LinkedData::SampleData::Ontology.create_ontologies_and_submissions[2]
end
diff --git a/test/helpers/test_http_cache_helper.rb b/test/helpers/test_http_cache_helper.rb
index 944198a6..8a88cc55 100644
--- a/test/helpers/test_http_cache_helper.rb
+++ b/test/helpers/test_http_cache_helper.rb
@@ -2,10 +2,10 @@
class TestHTTPCacheHelper < TestCaseHelpers
- def self.before_suite
+ def before_suite
raise Exception, "Redis is unavailable, caching will not function" if LinkedData::HTTPCache.redis.ping.nil?
- self.new("before_suite").delete_ontologies_and_submissions
- ontologies = self.new("before_suite")._ontologies
+ self.delete_ontologies_and_submissions
+ ontologies = self._ontologies
@@ontology = ontologies.shift
@@ontology_alt = ontologies.shift
@@ontology.bring_remaining
@@ -39,7 +39,7 @@ def self.before_suite
LinkedData.settings.enable_http_cache = true
end
- def self.after_suite
+ def after_suite
LinkedData.settings.enable_http_cache = @orig_enable_cache
LinkedData::HTTPCache.invalidate_all
end
diff --git a/test/helpers/test_slices_helper.rb b/test/helpers/test_slices_helper.rb
index 165a2a7e..e776c155 100644
--- a/test/helpers/test_slices_helper.rb
+++ b/test/helpers/test_slices_helper.rb
@@ -2,13 +2,14 @@
class TestSlicesHelper < TestCaseHelpers
- def self.before_suite
- self.new("before_suite").delete_ontologies_and_submissions
+ def before_suite
+ self.backend_4s_delete
+
@@orig_slices_setting = LinkedData.settings.enable_slices
LinkedData.settings.enable_slices = true
@@onts = LinkedData::SampleData::Ontology.create_ontologies_and_submissions(ont_count: 5, submission_count: 0)[2]
@@group_acronym = "test-group"
- @@group = _create_group
+ @@group = self.class._create_group
@@onts[0..2].each do |o|
o.bring_remaining
o.group = [@@group]
@@ -30,6 +31,10 @@ def self.before_suite
LinkedData::Models::Slice.synchronize_groups_to_slices
end
+ def after_suite
+ self.backend_4s_delete
+ end
+
def test_filtered_list
get "http://#{@@group_acronym}.dev/ontologies"
assert last_response.ok?
diff --git a/test/helpers/test_users_helper.rb b/test/helpers/test_users_helper.rb
index 6cd5bd50..9f1614f1 100644
--- a/test/helpers/test_users_helper.rb
+++ b/test/helpers/test_users_helper.rb
@@ -2,9 +2,10 @@
class TestUsersHelper < TestCaseHelpers
- def self.before_suite
- @@user = _create_user
- @@non_custom_user = _create_user("notcustom")
+ def before_suite
+ self.backend_4s_delete
+ @@user = self.class._create_user
+ @@non_custom_user = self.class._create_user("notcustom")
@@onts = LinkedData::SampleData::Ontology.create_ontologies_and_submissions({
ont_count: 5,
@@ -29,7 +30,7 @@ def self.before_suite
LinkedData.settings.enable_security = true
end
- def self.after_suite
+ def after_suite
LinkedData.settings.enable_security = @@old_security_setting
end
diff --git a/test/middleware/test_rack_attack.rb b/test/middleware/test_rack_attack.rb
index 42b1ddf2..92b4d636 100644
--- a/test/middleware/test_rack_attack.rb
+++ b/test/middleware/test_rack_attack.rb
@@ -5,8 +5,8 @@
RACK_CONFIG = File.join([settings.root, "config.ru"])
class TestRackAttack < TestCase
-
- def self.before_suite
+
+ def before_suite
# Store app settings
@@auth_setting = LinkedData.settings.enable_security
@@throttling_setting = LinkedData.settings.enable_throttling
@@ -18,14 +18,14 @@ def self.before_suite
LinkedData::OntologiesAPI.settings.req_per_second_per_ip = 1
LinkedData::OntologiesAPI.settings.safe_ips = Set.new(["1.2.3.4", "1.2.3.5"])
- @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email@example.org"})
+ @@user = LinkedData::Models::User.new({username: "user", password: "test_password", email: "test_email1@example.org"})
@@user.save
- @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email@example.org"})
+ @@bp_user = LinkedData::Models::User.new({username: "ncbobioportal", password: "test_password", email: "test_email2@example.org"})
@@bp_user.save
admin_role = LinkedData::Models::Users::Role.find("ADMINISTRATOR").first
- @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email@example.org", role: [admin_role]})
+ @@admin = LinkedData::Models::User.new({username: "admin", password: "test_password", email: "test_email3@example.org", role: [admin_role]})
@@admin.save
# Redirect output or we get a bunch of noise from Rack (gets reset in the after_suite method).
@@ -34,8 +34,8 @@ def self.before_suite
$stdout = File.open("/dev/null", "w")
$stderr = File.open("/dev/null", "w")
- # http://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Dynamic.2C_private_or_ephemeral_ports
- @@port1 = Random.rand(55000..65535)
+
+ @@port1 = unused_port
# Fork the process to create two servers. This isolates the Rack::Attack configuration, which makes other tests fail if included.
@@pid1 = fork do
@@ -47,7 +47,7 @@ def self.before_suite
Signal.trap("HUP") { Process.exit! }
end
- @@port2 = Random.rand(55000..65535) # http://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Dynamic.2C_private_or_ephemeral_ports
+ @@port2 = unused_port
@@pid2 = fork do
require_relative '../../config/rack_attack'
Rack::Server.start(
@@ -61,7 +61,7 @@ def self.before_suite
sleep(5)
end
- def self.after_suite
+ def after_suite
# Restore app settings
LinkedData.settings.enable_security = @@auth_setting
LinkedData::OntologiesAPI.settings.enable_throttling = @@throttling_setting
diff --git a/test/solr/configsets/term_search/conf/schema.xml b/test/solr/configsets/term_search/conf/schema.xml
index 6b18a2a1..73c75b31 100644
--- a/test/solr/configsets/term_search/conf/schema.xml
+++ b/test/solr/configsets/term_search/conf/schema.xml
@@ -128,11 +128,20 @@
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -140,9 +149,20 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
@@ -151,8 +171,8 @@
-
-
+
+
@@ -251,8 +271,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
+
+
+
+
@@ -769,255 +800,255 @@
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
+
\ No newline at end of file
diff --git a/test/solr/generate_ncbo_configsets.sh b/test/solr/generate_ncbo_configsets.sh
index 893f7f3a..29134dad 100755
--- a/test/solr/generate_ncbo_configsets.sh
+++ b/test/solr/generate_ncbo_configsets.sh
@@ -2,18 +2,23 @@
# generates solr configsets by merging _default configset with config files in config/solr
# _default is copied from sorl distribuion solr-8.10.1/server/solr/configsets/_default/
-pushd solr/configsets
-ld_config='../../../../ontologies_linked_data/config/solr/'
-#ld_config='../../../../config/solr/'
-ls -l $ld_config
-pwd
-[ -d property_search ] && rm -Rf property_search
-[ -d term_search ] && rm -Rf property_search
-[ -d $ld_config/property_search ] || echo "cant find ontologies_linked_data project"
-mkdir -p property_search/conf
-mkdir -p term_search/conf
-cp -a _default/conf/* property_search/conf/
-cp -a _default/conf/* term_search/conf/
-cp -a $ld_config/property_search/* property_search/conf
-cp -a $ld_config/term_search/* term_search/conf
-popd
+#cd solr/configsets
+ld_config='config/solr'
+configsets='test/solr/configsets'
+[ -d ${configsets}/property_search ] && rm -Rf ${configsets}/property_search
+[ -d ${configsets}/term_search ] && rm -Rf ${configsets}/term_search
+if [[ ! -d ${ld_config}/property_search ]]; then
+ echo 'cant find ld solr config sets'
+ exit 1
+fi
+if [[ ! -d ${configsets}/_default/conf ]]; then
+ echo 'cant find default solr configset'
+ exit 1
+fi
+mkdir -p ${configsets}/property_search/conf
+mkdir -p ${configsets}/term_search/conf
+cp -a ${configsets}/_default/conf/* ${configsets}/property_search/conf/
+cp -a ${configsets}/_default/conf/* ${configsets}/term_search/conf/
+cp -a $ld_config/property_search/* ${configsets}/property_search/conf
+cp -a $ld_config/term_search/* ${configsets}/term_search/conf
+
diff --git a/test/test_case.rb b/test/test_case.rb
index 98b02442..dba7d327 100644
--- a/test/test_case.rb
+++ b/test/test_case.rb
@@ -20,8 +20,10 @@
require_relative 'test_log_file'
require_relative '../app'
-require 'minitest/unit'
-MiniTest::Unit.autorun
+require 'minitest/autorun'
+require 'minitest/hooks/test'
+require 'webmock/minitest'
+WebMock.allow_net_connect!
require 'rack/test'
require 'multi_json'
require 'oj'
@@ -60,7 +62,9 @@ def safe_redis_hosts?(sh)
$stdout.flush
end
-class AppUnit < MiniTest::Unit
+class AppUnit < Minitest::Test
+ include Minitest::Hooks
+
def count_pattern(pattern)
q = "SELECT (count(DISTINCT ?s) as ?c) WHERE { #{pattern} }"
rs = Goo.sparql_query_client.query(q)
@@ -88,23 +92,27 @@ def backend_4s_delete
end
end
- def before_suites
+ def before_suite
# code to run before the first test (gets inherited in sub-tests)
end
- def after_suites
+ def after_suite
# code to run after the last test (gets inherited in sub-tests)
end
- def _run_suites(suites, type)
- begin
- before_suites
- super(suites, type)
- ensure
- after_suites
- end
+ def before_all
+ super
+ backend_4s_delete
+ before_suite
end
+ def after_all
+ after_suite
+ super
+ end
+
+
+
def _run_suite(suite, type)
begin
backend_4s_delete
@@ -122,12 +130,12 @@ def _run_suite(suite, type)
end
end
-AppUnit.runner = AppUnit.new
+
# All tests should inherit from this class.
# Use 'rake test' from the command line to run tests.
# See http://www.sinatrarb.com/testing.html for testing information
-class TestCase < MiniTest::Unit::TestCase
+class TestCase < AppUnit
include Rack::Test::Methods
@@ -144,6 +152,9 @@ def app
# @option options [TrueClass, FalseClass] :random_submission_count Use a random number of submissions between 1 and :submission_count
# @option options [TrueClass, FalseClass] :process_submission Parse the test ontology file
def create_ontologies_and_submissions(options = {})
+ if options[:process_submission] && options[:process_options].nil?
+ options[:process_options] = { process_rdf: true, extract_metadata: false, generate_missing_labels: false }
+ end
LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options)
end
@@ -214,4 +225,20 @@ def self.reset_to_not_admin(user)
user.save
end
+ def unused_port
+ server = TCPServer.new('127.0.0.1', 0)
+ port = server.addr[1]
+ server.close
+ port
+ end
+
+ private
+ def port_in_use?(port)
+ server = TCPServer.new(port)
+ server.close
+ false
+ rescue Errno::EADDRINUSE
+ true
+ end
+
end
diff --git a/views/documentation/documentation.haml b/views/documentation/documentation.haml
index 527d781f..570cf23b 100644
--- a/views/documentation/documentation.haml
+++ b/views/documentation/documentation.haml
@@ -151,6 +151,7 @@
%li include={prefLabel, synonym, definition, notation, cui, semanticType} // default = (see Common Parameters section)
%li page={integer representing the page number} // default = 1
%li pagesize={integer representing the size of the returned page} // default = 50
+ %li language={an ISO 639-1 language value, e.g 'fr' or 'en'} // by default search in English
%h4#nav_search_subtree Subtree Search