diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml index 16d8357e..39d28a8c 100644 --- a/.github/workflows/ruby-unit-tests.yml +++ b/.github/workflows/ruby-unit-tests.yml @@ -2,15 +2,14 @@ name: Ruby Unit Tests on: push: - pull_request: jobs: test: strategy: fail-fast: false matrix: - goo-slice: [ '20', '100', '500' ] - ruby-version: [ '2.7' ] + goo-slice: [ '100' ] + ruby-version: [ '3.2.0' ] triplestore: [ 'fs', 'ag', 'vo', 'gb' ] runs-on: ubuntu-latest steps: @@ -28,11 +27,10 @@ jobs: ruby-version: ${{ matrix.ruby-version }} bundler-cache: true # runs 'bundle install' and caches installed gems automatically - name: Run unit tests - # unit tests are run inside a container - # http://docs.codecov.io/docs/testing-with-docker run: | ci_env=`bash <(curl -s https://codecov.io/env)` - GOO_SLICES=${{ matrix.goo-slice }} bundle exec rake test:docker:${{ matrix.triplestore }} TESTOPTS="-v" + GOO_SLICES=${{ matrix.goo-slice }} bundle exec rake test:docker:${{ matrix.triplestore }} + - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v3 with: diff --git a/Gemfile b/Gemfile index bf0d2af8..bf659419 100644 --- a/Gemfile +++ b/Gemfile @@ -1,40 +1,43 @@ source 'https://rubygems.org' -gem 'activesupport', '~> 5' -# see https://github.com/ncbo/ontologies_api/issues/69 +gem 'activesupport' gem 'bigdecimal' -# gem 'faraday', '~> 1.9' -gem 'json-schema', '~> 2.0' +gem 'json-schema' gem 'multi_json' gem 'oj' gem 'parseconfig' gem 'rack' -gem 'rake', '~> 10.0' +gem 'rake' gem 'rexml' # Investigate why unicorn fails to start under ruby 3 without adding rexml gem to the Gemfile -gem 'sinatra', '~> 1.0' -gem 'sinatra-advanced-routes' -gem 'sinatra-contrib', '~> 1.0' +gem 'sinatra' +gem 'rackup' + +github 'sinatra/sinatra' do + gem 'sinatra-contrib' +end + gem 'request_store' gem 'parallel' -gem 'json-ld' -gem 'google-protobuf', '3.25.3' +gem 'google-protobuf' +gem 'net-ftp' +gem 'json-ld', '~> 3.2.0' +gem 'rdf-raptor', github:'ruby-rdf/rdf-raptor', ref: '6392ceabf71c3233b0f7f0172f662bd4a22cd534' # use version 3.3.0 when available # Rack middleware -gem 'ffi', '~> 1.16.3' -gem 'rack-accept', '~> 0.4' -gem 'rack-attack', '~> 6.6.1', require: 'rack/attack' -gem 'rack-cache', '~> 1.13.0' +gem 'ffi' +gem 'rack-accept' +gem 'rack-attack', require: 'rack/attack' +gem 'rack-cache' gem 'rack-cors', require: 'rack/cors' # GitHub dependency can be removed when https://github.com/niko/rack-post-body-to-params/pull/6 is merged and released gem 'rack-post-body-to-params', github: 'palexander/rack-post-body-to-params', branch: 'multipart_support' gem 'rack-timeout' -gem 'redis-rack-cache', '~> 2.0' +gem 'redis-rack-cache' # Data access (caching) gem 'redis' -gem 'redis-store', '~>1.10' +gem 'redis-store' # Monitoring -gem 'cube-ruby', require: 'cube' gem 'newrelic_rpm', group: [:default, :deployment] # HTTP server @@ -42,16 +45,16 @@ gem 'unicorn' gem 'unicorn-worker-killer' # Templating -gem 'haml', '~> 5.2.2' # pin see https://github.com/ncbo/ontologies_api/pull/107 +gem 'haml' gem 'redcarpet' # NCBO gems (can be from a local dev path or from rubygems/git) gem 'ncbo_annotator', git: 'https://github.com/ontoportal-lirmm/ncbo_annotator.git', branch: 'development' -gem 'ncbo_cron', git: 'https://github.com/ontoportal-lirmm/ncbo_cron.git', branch: 'development' +gem 'ncbo_cron', git: 'https://github.com/ontoportal-lirmm/ncbo_cron.git', branch: 'feature/migrate-to-ruby-3.2' gem 'ncbo_ontology_recommender', git: 'https://github.com/ontoportal-lirmm/ncbo_ontology_recommender.git', branch: 'development' -gem 'goo', github: 'ontoportal-lirmm/goo', branch: 'development' +gem 'ontologies_linked_data', github: 'ontoportal-lirmm/ontologies_linked_data', branch: 'feature/migrate-ruby-3.2' +gem 'goo', github: 'ontoportal-lirmm/goo', branch: 'feature/migrate-ruby-3.2' gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'development' -gem 'ontologies_linked_data', git: 'https://github.com/ontoportal-lirmm/ontologies_linked_data.git', branch: 'development' group :development do @@ -77,12 +80,14 @@ end group :test do gem 'crack', '0.4.5' - gem 'minitest', '~> 5.0' - gem 'minitest-hooks', "~> 1.5" + gem 'minitest' + gem 'minitest-hooks' gem 'minitest-stub_any_instance' + gem 'minitest-reporters' + gem 'minitest-fail-fast' gem 'rack-test' gem 'simplecov', require: false gem 'simplecov-cobertura' # for codecov.io - gem 'webmock', '~> 3.19.1' + gem 'webmock' gem 'webrick' end diff --git a/Gemfile.lock b/Gemfile.lock index f0708f5a..eda9337a 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,12 +1,12 @@ GIT remote: https://github.com/ontoportal-lirmm/goo.git - revision: 5825dc1f9d0ff439b1ba9d8f78fa7bb20b1c65d0 - branch: development + revision: dd3ea6c0f583c2044622a9f872a0bd18e898bb79 + branch: feature/migrate-ruby-3.2 specs: goo (0.0.2) addressable (~> 2.8) pry - rdf (= 3.2.11) + rdf rdf-raptor rdf-rdfxml rdf-vocab @@ -29,8 +29,8 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ncbo_cron.git - revision: dd736917974f13ac7558e0d2a61a84030d82acaa - branch: development + revision: 9cbc8b9ea384350597412c24e3dffb96b7d650ff + branch: feature/migrate-to-ruby-3.2 specs: ncbo_cron (0.0.1) dante @@ -42,7 +42,7 @@ GIT ncbo_annotator ontologies_linked_data redis - rufus-scheduler (~> 2.0.24) + rufus-scheduler GIT remote: https://github.com/ontoportal-lirmm/ncbo_ontology_recommender.git @@ -57,8 +57,8 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/ontologies_linked_data.git - revision: 6cb18910e322645e3cc3490951d10f19468da52f - branch: development + revision: d5f5a439fdec58a9f0ac074f263dc18ff78f3df0 + branch: feature/migrate-ruby-3.2 specs: ontologies_linked_data (0.0.1) activesupport @@ -77,7 +77,7 @@ GIT GIT remote: https://github.com/ontoportal-lirmm/sparql-client.git - revision: d4a226e75eb4aeaaf42720eac4f23f55380a0bd3 + revision: 736b7650e28db3ce5e3e49511ac30f958a29e8f1 branch: development specs: sparql-client (3.2.2) @@ -100,23 +100,58 @@ GIT shotgun (0.9) rack (>= 1.0) +GIT + remote: https://github.com/ruby-rdf/rdf-raptor.git + revision: 6392ceabf71c3233b0f7f0172f662bd4a22cd534 + ref: 6392ceabf71c3233b0f7f0172f662bd4a22cd534 + specs: + rdf-raptor (3.3.0) + ffi (~> 1.15) + rdf (~> 3.3) + +GIT + remote: https://github.com/sinatra/sinatra.git + revision: c4b7c04e6d23ef8e17404d64cc731bece268acea + specs: + rack-protection (4.1.1) + base64 (>= 0.1.0) + logger (>= 1.6.0) + rack (>= 3.0.0, < 4) + sinatra-contrib (4.1.1) + multi_json (>= 0.0.2) + mustermann (~> 3.0) + rack-protection (= 4.1.1) + sinatra (= 4.1.1) + tilt (~> 2.0) + GEM remote: https://rubygems.org/ specs: - activesupport (5.2.8.1) - concurrent-ruby (~> 1.0, >= 1.0.2) - i18n (>= 0.7, < 2) - minitest (~> 5.1) - tzinfo (~> 1.1) + activesupport (8.0.1) + base64 + benchmark (>= 0.3) + bigdecimal + concurrent-ruby (~> 1.0, >= 1.3.1) + connection_pool (>= 2.2.5) + drb + i18n (>= 1.6, < 2) + logger (>= 1.4.2) + minitest (>= 5.1) + securerandom (>= 0.3) + tzinfo (~> 2.0, >= 2.0.5) + uri (>= 0.13.1) addressable (2.8.7) public_suffix (>= 2.0.2, < 7.0) airbrussh (1.5.3) sshkit (>= 1.6.1, != 1.7.0) + ansi (1.5.0) ast (2.4.2) - backports (3.25.0) base64 (0.2.0) + bcp47_spec (0.2.1) bcrypt (3.1.20) bcrypt_pbkdf (1.1.1) + bcrypt_pbkdf (1.1.1-arm64-darwin) + benchmark (0.4.0) bigdecimal (3.1.9) builder (3.3.0) capistrano (3.19.2) @@ -136,36 +171,45 @@ GEM connection_pool (2.5.0) crack (0.4.5) rexml - cube-ruby (0.0.3) dante (0.2.0) date (3.4.1) declarative (0.0.20) docile (1.4.1) domain_name (0.6.20240107) + drb (2.2.1) ed25519 (1.3.0) - faraday (2.8.1) - base64 - faraday-net_http (>= 2.0, < 3.1) - ruby2_keywords (>= 0.0.4) - faraday-net_http (3.0.2) + et-orbi (1.2.11) + tzinfo + faraday (2.12.2) + faraday-net_http (>= 2.0, < 3.5) + json + logger + faraday-net_http (3.4.0) + net-http (>= 0.5.0) faraday-retry (2.2.1) faraday (~> 2.0) - ffi (1.16.3) - gapic-common (0.21.1) + ffi (1.17.1-arm64-darwin) + ffi (1.17.1-x86_64-linux-gnu) + fugit (1.11.1) + et-orbi (~> 1, >= 1.2.11) + raabro (~> 1.4) + gapic-common (0.25.0) faraday (>= 1.9, < 3.a) faraday-retry (>= 1.0, < 3.a) - google-protobuf (~> 3.18) - googleapis-common-protos (>= 1.4.0, < 2.a) - googleapis-common-protos-types (>= 1.11.0, < 2.a) - googleauth (~> 1.9) - grpc (~> 1.59) + google-cloud-env (~> 2.2) + google-logging-utils (~> 0.1) + google-protobuf (>= 3.25, < 5.a) + googleapis-common-protos (~> 1.6) + googleapis-common-protos-types (~> 1.15) + googleauth (~> 1.12) + grpc (~> 1.66) get_process_mem (0.2.7) ffi (~> 1.0) - google-analytics-data (0.6.1) + google-analytics-data (0.7.0) google-analytics-data-v1beta (>= 0.11, < 2.a) google-cloud-core (~> 1.6) - google-analytics-data-v1beta (0.14.0) - gapic-common (>= 0.21.1, < 2.a) + google-analytics-data-v1beta (0.16.0) + gapic-common (>= 0.25.0, < 2.a) google-cloud-errors (~> 1.0) google-apis-analytics_v3 (0.16.0) google-apis-core (>= 0.15.0, < 2.a) @@ -180,28 +224,39 @@ GEM google-cloud-core (1.7.1) google-cloud-env (>= 1.0, < 3.a) google-cloud-errors (~> 1.0) - google-cloud-env (2.1.1) + google-cloud-env (2.2.1) faraday (>= 1.0, < 3.a) google-cloud-errors (1.4.0) - google-protobuf (3.25.3-x86_64-linux) + google-logging-utils (0.1.0) + google-protobuf (4.29.3-arm64-darwin) + bigdecimal + rake (>= 13) + google-protobuf (4.29.3-x86_64-linux) + bigdecimal + rake (>= 13) googleapis-common-protos (1.6.0) google-protobuf (>= 3.18, < 5.a) googleapis-common-protos-types (~> 1.7) grpc (~> 1.41) googleapis-common-protos-types (1.18.0) google-protobuf (>= 3.18, < 5.a) - googleauth (1.11.2) + googleauth (1.13.1) faraday (>= 1.0, < 3.a) - google-cloud-env (~> 2.1) + google-cloud-env (~> 2.2) + google-logging-utils (~> 0.1) jwt (>= 1.4, < 3.0) multi_json (~> 1.11) os (>= 0.9, < 2.0) signet (>= 0.16, < 2.a) - grpc (1.65.2-x86_64-linux) + grpc (1.70.1-arm64-darwin) + google-protobuf (>= 3.25, < 5.0) + googleapis-common-protos-types (~> 1.0) + grpc (1.70.1-x86_64-linux) google-protobuf (>= 3.25, < 5.0) googleapis-common-protos-types (~> 1.0) - haml (5.2.2) - temple (>= 0.8.0) + haml (6.3.0) + temple (>= 0.8.2) + thor tilt hashdiff (1.1.2) htmlentities (4.3.4) @@ -212,15 +267,21 @@ GEM i18n (1.14.7) concurrent-ruby (~> 1.0) json (2.9.1) - json-ld (3.0.2) - multi_json (~> 1.12) - rdf (>= 2.2.8, < 4.0) - json-schema (2.8.1) - addressable (>= 2.4) + json-canonicalization (0.4.0) + json-ld (3.2.5) + htmlentities (~> 4.3) + json-canonicalization (~> 0.3, >= 0.3.2) + link_header (~> 0.0, >= 0.0.8) + multi_json (~> 1.15) + rack (>= 2.2, < 4) + rdf (~> 3.2, >= 3.2.10) + json-schema (5.1.1) + addressable (~> 2.8) + bigdecimal (~> 3.1) jwt (2.10.1) base64 kgio (2.11.4) - language_server-protocol (3.17.0.3) + language_server-protocol (3.17.0.4) libxml-ruby (5.0.3) link_header (0.0.8) logger (1.6.5) @@ -235,19 +296,33 @@ GEM mime-types (3.6.0) logger mime-types-data (~> 3.2015) - mime-types-data (3.2025.0107) + mime-types-data (3.2025.0204) mini_mime (1.1.5) minitest (5.25.4) + minitest-fail-fast (0.1.0) + minitest (~> 5) minitest-hooks (1.5.2) minitest (> 5.3) + minitest-reporters (1.7.1) + ansi + builder + minitest (>= 5.0) + ruby-progressbar minitest-stub_any_instance (1.0.3) mlanett-redis-lock (0.2.7) redis multi_json (1.15.0) + mustermann (3.0.3) + ruby2_keywords (~> 0.0.1) mutex_m (0.3.0) + net-ftp (0.3.8) + net-protocol + time + net-http (0.6.0) + uri net-http-persistent (4.0.5) connection_pool (~> 2.2) - net-imap (0.4.18) + net-imap (0.5.5) date net-protocol net-pop (0.1.2) @@ -258,11 +333,11 @@ GEM net-ssh (>= 2.6.5, < 8.0.0) net-sftp (4.0.0) net-ssh (>= 5.0.0, < 8.0.0) - net-smtp (0.5.0) + net-smtp (0.5.1) net-protocol net-ssh (7.3.0) netrc (0.11.0) - newrelic_rpm (9.16.1) + newrelic_rpm (9.17.0) oj (3.16.9) bigdecimal (>= 3.0) ostruct (>= 0.2) @@ -272,7 +347,7 @@ GEM ostruct (0.6.1) parallel (1.26.3) parseconfig (1.1.2) - parser (3.3.7.0) + parser (3.3.7.1) ast (~> 2.4.1) racc pony (1.13.1) @@ -280,41 +355,44 @@ GEM pry (0.15.2) coderay (~> 1.1) method_source (~> 1.0) - public_suffix (5.1.1) + public_suffix (6.0.1) + raabro (1.4.0) racc (1.8.1) - rack (1.6.13) + rack (3.1.9) rack-accept (0.4.5) rack (>= 0.4) - rack-attack (6.6.1) - rack (>= 1.0, < 3) - rack-cache (1.13.0) + rack-attack (6.7.0) + rack (>= 1.0, < 4) + rack-cache (1.17.0) rack (>= 0.4) - rack-cors (1.0.6) - rack (>= 1.6.0) + rack-cors (2.0.2) + rack (>= 2.0.0) rack-mini-profiler (3.3.1) rack (>= 1.2.0) - rack-protection (1.5.5) - rack + rack-session (2.1.0) + base64 (>= 0.1.0) + rack (>= 3.0.0) rack-test (2.2.0) rack (>= 1.3) rack-timeout (0.7.0) + rackup (2.2.1) + rack (>= 3) rainbow (3.1.1) raindrops (0.20.1) - rake (10.5.0) - rdf (3.2.11) + rake (13.2.1) + rdf (3.3.2) + bcp47_spec (~> 0.2) + bigdecimal (~> 3.1, >= 3.1.5) link_header (~> 0.0, >= 0.0.8) - rdf-raptor (3.2.0) - ffi (~> 1.15) - rdf (~> 3.2) - rdf-rdfxml (3.2.2) - builder (~> 3.2) + rdf-rdfxml (3.3.0) + builder (~> 3.2, >= 3.2.4) htmlentities (~> 4.3) - rdf (~> 3.2) - rdf-xsd (~> 3.2) - rdf-vocab (3.2.7) - rdf (~> 3.2, >= 3.2.4) - rdf-xsd (3.2.1) - rdf (~> 3.2) + rdf (~> 3.3) + rdf-xsd (~> 3.3) + rdf-vocab (3.3.2) + rdf (~> 3.3) + rdf-xsd (3.3.0) + rdf (~> 3.3) rexml (~> 3.2) redcarpet (3.6.0) redis (5.3.0) @@ -343,24 +421,25 @@ GEM rsolr (2.6.0) builder (>= 2.1.2) faraday (>= 0.9, < 3, != 2.0.0) - rubocop (1.71.0) + rubocop (1.71.2) json (~> 2.3) language_server-protocol (>= 3.17.0) parallel (~> 1.10) parser (>= 3.3.0.2) rainbow (>= 2.2.2, < 4.0) regexp_parser (>= 2.9.3, < 3.0) - rubocop-ast (>= 1.36.2, < 2.0) + rubocop-ast (>= 1.38.0, < 2.0) ruby-progressbar (~> 1.7) unicode-display_width (>= 2.4.0, < 4.0) - rubocop-ast (1.37.0) + rubocop-ast (1.38.0) parser (>= 3.3.1.0) ruby-progressbar (1.13.0) ruby-xxHash (0.4.0.2) ruby2_keywords (0.0.5) rubyzip (2.4.1) - rufus-scheduler (2.0.24) - tzinfo (>= 0.3.22) + rufus-scheduler (3.9.2) + fugit (~> 1.1, >= 1.11.1) + securerandom (0.4.1) signet (0.19.0) addressable (~> 2.8) faraday (>= 0.17.5, < 3.a) @@ -375,19 +454,13 @@ GEM simplecov (~> 0.19) simplecov-html (0.13.1) simplecov_json_formatter (0.1.4) - sinatra (1.4.8) - rack (~> 1.5) - rack-protection (~> 1.4) - tilt (>= 1.3, < 3) - sinatra-advanced-routes (0.5.3) - sinatra (~> 1.0) - sinatra-contrib (1.4.7) - backports (>= 2.0) - multi_json - rack-protection - rack-test - sinatra (~> 1.4.0) - tilt (>= 1.3, < 3) + sinatra (4.1.1) + logger (>= 1.6.0) + mustermann (~> 3.0) + rack (>= 3.0.0, < 4) + rack-protection (= 4.1.1) + rack-session (>= 2.0.0, < 3) + tilt (~> 2.0) sshkit (1.23.2) base64 net-scp (>= 1.1.2) @@ -396,12 +469,14 @@ GEM ostruct systemu (2.6.5) temple (0.10.3) - thread_safe (0.3.6) + thor (1.3.2) tilt (2.6.0) + time (0.4.1) + date timeout (0.4.3) trailblazer-option (0.1.2) - tzinfo (1.2.11) - thread_safe (~> 0.1) + tzinfo (2.0.6) + concurrent-ruby (~> 1.0) uber (0.1.0) unicode-display_width (3.1.4) unicode-emoji (~> 4.0, >= 4.0.4) @@ -412,19 +487,21 @@ GEM unicorn-worker-killer (0.4.5) get_process_mem (~> 0) unicorn (>= 4, < 7) + uri (1.0.2) uuid (2.3.9) macaddr (~> 1.0) - webmock (3.19.1) + webmock (3.25.0) addressable (>= 2.8.0) crack (>= 0.3.2) hashdiff (>= 0.4.0, < 2.0.0) webrick (1.9.1) PLATFORMS + arm64-darwin-24 x86_64-linux DEPENDENCIES - activesupport (~> 5) + activesupport bcrypt_pbkdf (>= 1.0, < 2.0) bigdecimal capistrano (~> 3) @@ -432,54 +509,57 @@ DEPENDENCIES capistrano-locally capistrano-rbenv crack (= 0.4.5) - cube-ruby ed25519 (>= 1.2, < 2.0) - ffi (~> 1.16.3) + ffi goo! - google-protobuf (= 3.25.3) - haml (~> 5.2.2) - json-ld - json-schema (~> 2.0) - minitest (~> 5.0) - minitest-hooks (~> 1.5) + google-protobuf + haml + json-ld (~> 3.2.0) + json-schema + minitest + minitest-fail-fast + minitest-hooks + minitest-reporters minitest-stub_any_instance multi_json ncbo_annotator! ncbo_cron! ncbo_ontology_recommender! + net-ftp newrelic_rpm oj ontologies_linked_data! parallel parseconfig rack - rack-accept (~> 0.4) - rack-attack (~> 6.6.1) - rack-cache (~> 1.13.0) + rack-accept + rack-attack + rack-cache rack-cors rack-mini-profiler rack-post-body-to-params! rack-test rack-timeout - rake (~> 10.0) + rackup + rake + rdf-raptor! redcarpet redis - redis-rack-cache (~> 2.0) - redis-store (~> 1.10) + redis-rack-cache + redis-store request_store rexml rubocop shotgun! simplecov simplecov-cobertura - sinatra (~> 1.0) - sinatra-advanced-routes - sinatra-contrib (~> 1.0) + sinatra + sinatra-contrib! sparql-client! unicorn unicorn-worker-killer - webmock (~> 3.19.1) + webmock webrick BUNDLED WITH - 2.4.22 + 2.6.3 diff --git a/app.rb b/app.rb index e09178bd..338b60b7 100644 --- a/app.rb +++ b/app.rb @@ -1,10 +1,11 @@ +$VERBOSE = false + # sinatra-base require 'sinatra' # sinatra-contrib require 'sinatra/respond_with' require 'sinatra/namespace' -require 'sinatra/advanced_routes' require 'sinatra/multi_route' # Other gem dependencies @@ -24,7 +25,6 @@ require 'rack-timeout' require 'rack/cors' require_relative 'lib/rack/slow_requests' -require_relative 'lib/rack/cube_reporter' require_relative 'lib/rack/param_translator' require_relative 'lib/rack/slice_detection' require_relative 'lib/rack/request_lang' @@ -86,25 +86,6 @@ set :show_exceptions, false end -# mini-profiler sets the etag header to nil, so don't use when caching is enabled -if [:development].include?(settings.environment) && !LinkedData.settings.enable_http_cache && LinkedData::OntologiesAPI.settings.enable_miniprofiler - begin - require 'rack-mini-profiler' - Rack::MiniProfiler.config.storage = Rack::MiniProfiler::FileStore - Rack::MiniProfiler.config.position = 'right' - c = ::Rack::MiniProfiler.config - c.pre_authorize_cb = lambda { |env| - true - } - tmp = File.expand_path("../tmp/miniprofiler", __FILE__) - FileUtils.mkdir_p(tmp) unless File.exists?(tmp) - c.storage_options = {path: tmp} - use Rack::MiniProfiler - puts ">> rack-mini-profiler is enabled" - rescue LoadError - # profiler isn't there - end -end use Rack::Cors do allow do @@ -113,32 +94,14 @@ end end -# Use middleware (ORDER IS IMPORTANT) -use Rack::Cors do - allow do - origins '*' - resource '*', :headers => :any, :methods => [:get, :post, :put, :patch, :delete, :options] - end -end -if Goo.queries_debug? - use Goo::Debug -end -# Monitoring middleware -if LinkedData::OntologiesAPI.settings.enable_monitoring - cube_settings = { - cube_host: LinkedData::OntologiesAPI.settings.cube_host, - cube_port: LinkedData::OntologiesAPI.settings.cube_port - } - use Rack::CubeReporter, cube_settings - use Rack::SlowRequests, log_path: LinkedData::OntologiesAPI.settings.slow_request_log -end # Show exceptions after timeout if LinkedData::OntologiesAPI.settings.enable_req_timeout use Rack::Timeout; Rack::Timeout.timeout = LinkedData::OntologiesAPI.settings.req_timeout # seconds, shorter than unicorn timeout end + use Rack::SliceDetection use Rack::Accept use Rack::PostBodyToParams diff --git a/config/logging.rb b/config/logging.rb index e37ba4aa..66c323a5 100644 --- a/config/logging.rb +++ b/config/logging.rb @@ -1,23 +1,9 @@ require 'logger' -class CustomLogger < Logger - alias write << - def flush - ((self.instance_variable_get :@logdev).instance_variable_get :@dev).flush - end -end - -# Setup global logging -require 'rack/logger' -# if [:development, :console, :test].include?(settings.environment) -if [:development, :console].include?(settings.environment) - LOGGER = CustomLogger.new(STDOUT) - LOGGER.level = Logger::DEBUG -else - Dir.mkdir('log') unless File.exist?('log') - log = File.new("log/#{settings.environment}.log", "a+") - log.sync = true - LOGGER = CustomLogger.new(log) - LOGGER.level = Logger::INFO - use Rack::CommonLogger, log +configure do + log_file = File.new("log/#{settings.environment}.log", 'a+') + log_file.sync = true + LOGGER = Logger.new(log_file) + LOGGER.level = settings.development? ? Logger::DEBUG : Logger::INFO + set :logger, LOGGER end diff --git a/controllers/batch_controller.rb b/controllers/batch_controller.rb index 2ee9b88c..33276cc8 100644 --- a/controllers/batch_controller.rb +++ b/controllers/batch_controller.rb @@ -15,7 +15,7 @@ class BatchController < ApplicationController goo_include = LinkedData::Models::Class.goo_attrs_to_load(incl) class_id_by_ontology = {} collection.each do |class_input| - unless class_input.instance_of?(Hash) + unless class_input.is_a?(Hash) error 422, "The collection param needs to be { 'class' : CLS_ID, 'ontology' : ont_id }" end unless class_input.include?("ontology") and class_input.include?("class") diff --git a/controllers/notes_controller.rb b/controllers/notes_controller.rb index d0ca83f8..cecca1f6 100644 --- a/controllers/notes_controller.rb +++ b/controllers/notes_controller.rb @@ -1,7 +1,7 @@ class NotesController < ApplicationController ## # Ontology notes - get "/ontologies/:ontology/notes?:include_threads?" do + get '/ontologies/:ontology/notes' do ont = Ontology.find(params["ontology"]).include(:acronym).first error 404, "You must provide a valid id to retrieve notes for an ontology" if ont.nil? check_last_modified_segment(LinkedData::Models::Note, [ont.acronym]) @@ -13,7 +13,7 @@ class NotesController < ApplicationController ## # Class notes - get "/ontologies/:ontology/classes/:cls/notes?:include_threads?" do + get "/ontologies/:ontology/classes/:cls/notes" do ont = Ontology.find(params["ontology"]).include(:submissions, :acronym).first error 404, "You must provide a valid id to retrieve notes for an ontology" if ont.nil? check_last_modified_segment(LinkedData::Models::Note, [ont.acronym]) @@ -27,7 +27,7 @@ class NotesController < ApplicationController namespace "/notes" do # Display all notes - get "?:include_threads?" do + get '' do check_last_modified_collection(LinkedData::Models::Note) notes = LinkedData::Models::Note.where.include(LinkedData::Models::Note.goo_attrs_to_load(includes_param)).to_a recurse_replies(notes) if params["include_threads"] @@ -35,7 +35,7 @@ class NotesController < ApplicationController end # Display a single note - get '/:noteid?:include_threads?' do + get '/:noteid' do noteid = params["noteid"] note = LinkedData::Models::Note.find(noteid).include(relatedOntology: [:acronym]).first error 404, "Note #{noteid} not found" if note.nil? @@ -121,4 +121,4 @@ def clean_notes_hash(hash) hash end end -end \ No newline at end of file +end diff --git a/controllers/ontology_analytics_controller.rb b/controllers/ontology_analytics_controller.rb index 8ecd77d5..81218b84 100644 --- a/controllers/ontology_analytics_controller.rb +++ b/controllers/ontology_analytics_controller.rb @@ -4,14 +4,14 @@ class OntologyAnalyticsController < ApplicationController ## # get all ontology analytics for a given year/month combination - namespace "/analytics" do + namespace '/analytics' do get do expires 86400, :public year = year_param(params) - error 400, "The year you supplied is invalid. Valid years start with 2 and contain 4 digits." if params["year"] && !year + error 400, 'The year you supplied is invalid. Valid years start with 2 and contain 4 digits.' if params['year'] && !year month = month_param(params) - error 400, "The month you supplied is invalid. Valid months are 1-12." if params["month"] && !month + error 400, 'The month you supplied is invalid. Valid months are 1-12.' if params['month'] && !month acronyms = restricted_ontologies_to_acronyms(params) analytics = Ontology.analytics(year, month, acronyms) @@ -22,32 +22,31 @@ class OntologyAnalyticsController < ApplicationController ## # get all analytics for a given ontology - namespace "/ontologies/:acronym/analytics" do + namespace '/ontologies/:acronym/analytics' do get do expires 86400, :public - ont = Ontology.find(params["acronym"]).first + ont = Ontology.find(params['acronym']).first error 404, "No ontology exists with the acronym: #{params["acronym"]}" if ont.nil? analytics = ont.analytics - if params["format"].to_s.downcase.eql?("csv") + if params['format'].to_s.downcase.eql?('csv') tf = Tempfile.new("analytics-#{params['acronym']}") csv = CSV.new(tf, headers: true, return_headers: true, write_headers: true) csv << [:month, :visits] - years = analytics[params["acronym"]].keys.sort + years = analytics[params['acronym']].keys.sort now = Time.now years.each do |year| - months = analytics[params["acronym"]][year].keys.sort + months = analytics[params['acronym']][year].keys.sort months.each do |month| next if now.year == year && now.month <= month || (year == 2013 && month < 10) # we don't have good data going back past Oct 2013 - visits = analytics[params["acronym"]][year][month] - month = DateTime.parse("#{year}/#{month}").strftime("%b %Y") + visits = analytics[params['acronym']][year][month] + month = DateTime.parse("#{year}/#{month}").strftime('%b %Y') csv << [month, visits] end end csv.close - content_type "text/csv" - send_file tf.path, filename: "analytics-#{params['acronym']}.csv" + send_file tf.path, filename: "analytics-#{params['acronym']}.csv", type: 'text/csv', status: 200 else reply analytics end diff --git a/controllers/replies_controller.rb b/controllers/replies_controller.rb index 081238a1..9ee0fbd6 100644 --- a/controllers/replies_controller.rb +++ b/controllers/replies_controller.rb @@ -12,7 +12,7 @@ class RepliesController < ApplicationController namespace "/replies" do # Display all replies - get "?:include_threads?" do + get "" do check_last_modified_collection(LinkedData::Models::Notes::Reply) replies = LinkedData::Models::Notes::Reply.where.include(LinkedData::Models::Notes::Reply.goo_attrs_to_load(includes_param)).to_a reply replies @@ -82,4 +82,4 @@ class RepliesController < ApplicationController halt 204 end end -end \ No newline at end of file +end diff --git a/controllers/search_controller.rb b/controllers/search_controller.rb index ce34d51d..682bd7bf 100644 --- a/controllers/search_controller.rb +++ b/controllers/search_controller.rb @@ -184,7 +184,7 @@ class SearchController < ApplicationController def search(model, query, params = {}) query = query.blank? ? "*" : query - resp = model.search(query, search_params(params)) + resp = model.search(query, search_params(**params)) total_found = resp["response"]["numFound"] docs = resp["response"]["docs"] diff --git a/helpers/application_helper.rb b/helpers/application_helper.rb index 51bd4f08..c6554141 100644 --- a/helpers/application_helper.rb +++ b/helpers/application_helper.rb @@ -8,13 +8,13 @@ module ApplicationHelper ## # Escape text for use in html def h(text) - Rack::Utils.escape_html(text) + Rack::Utils.escape_html(text).gsub('/', '/') end ## # Populate +obj+ using values from +params+ # Will also try to find related objects using a Goo lookup. - # TODO: Currerntly, this allows for mass-assignment of everything, which will permit + # TODO: Currently, this allows for mass-assignment of everything, which will permit # users to overwrite any attribute, including things like passwords. def populate_from_params(obj, params) return if obj.nil? @@ -23,7 +23,7 @@ def populate_from_params(obj, params) if obj.is_a?(LinkedData::Models::Base) obj.bring_remaining if obj.exist? no_writable_attributes = obj.class.attributes(:all) - obj.class.attributes - params = params.reject {|k,v| no_writable_attributes.include? k.to_sym} + params = params.reject { |k, v| no_writable_attributes.include? k.to_sym } end params.each do |attribute, value| next if value.nil? @@ -63,7 +63,7 @@ def populate_from_params(obj, params) elsif attr_cls && not_hash_or_array || (attr_cls && not_array_of_hashes) # Replace the initial value with the object, handling Arrays as appropriate if value.is_a?(Array) - value = value.map {|e| attr_cls.find(uri_as_needed(e)).include(attr_cls.attributes).first} + value = value.map { |e| attr_cls.find(uri_as_needed(e)).include(attr_cls.attributes).first } elsif !value.nil? value = attr_cls.find(uri_as_needed(value)).include(attr_cls.attributes).first end @@ -72,6 +72,7 @@ def populate_from_params(obj, params) if value.is_a?(Array) retrieved_values = [] value.each do |e| + e = e.to_h retrieved_value = attr_cls.where(e.symbolize_keys).first if retrieved_value retrieved_values << retrieved_value @@ -80,7 +81,7 @@ def populate_from_params(obj, params) end end else - retrieved_values = attr_cls.where(value.symbolize_keys).to_a + retrieved_values = attr_cls.where(value.to_h.symbolize_keys).to_a unless retrieved_values retrieved_values = populate_from_params(attr_cls.new, e.symbolize_keys).save end @@ -89,7 +90,7 @@ def populate_from_params(obj, params) elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:date_time) # TODO: Remove this awful hack when obj.class.model_settings[:range][attribute] contains DateTime class is_array = value.is_a?(Array) - value = Array(value).map{ |v| DateTime.parse(v) } + value = Array(value).map { |v| DateTime.parse(v) } value = value.first unless is_array value elsif attribute_settings && attribute_settings[:enforce] && attribute_settings[:enforce].include?(:uri) && attribute_settings[:enforce].include?(:list) @@ -157,9 +158,19 @@ def halt(*response) status = obj obj = nil end - status, obj = response.first, response.last if response.length == 2 - status, headers, obj = response.first, response[1], response.last if response.length == 3 - if obj.is_a?(Rack::File) # Avoid the serializer when returning files + + if response.length == 2 + status = response.first + obj = response.last + end + + if response.length == 3 + status = response.first + headers = response[1] + obj = response.last + end + + if obj.is_a?(Rack::Files) || obj.is_a?(Rack::Files::Iterator) # Avoid the serializer when returning files super(response) else super(LinkedData::Serializer.build_response(@env, status: status, headers: headers, ld_object: obj)) @@ -184,7 +195,7 @@ def error(*message) # Look for the includes parameter and provide a formatted list of attributes def includes_param if @params["display"] - return @params["display"].split(",").map {|e| e.to_sym} + return @params["display"].split(",").map { |e| e.to_sym } end Array.new end @@ -192,14 +203,14 @@ def includes_param ## # Look for the ontologies acronym and give back a formatted list of ontolody id uris # This can be called without passing an argument and it will use the values from the current request - def ontologies_param(params=nil) + def ontologies_param(params = nil) params ||= @params if params["ontologies"] # Get list - ontologies = params["ontologies"].split(",").map {|o| o.strip} + ontologies = params["ontologies"].split(",").map { |o| o.strip } # When they aren't URIs, make them URIs - ontologies.map! {|o| o.start_with?("http://") ? replace_url_prefix(o) : ontology_uri_from_acronym(o)} + ontologies.map! { |o| o.start_with?("http://") ? replace_url_prefix(o) : ontology_uri_from_acronym(o) } if ontologies.include? nil error 404, "The ontologies parameter `[#{params["ontologies"]}]` includes non-existent acronyms. Notice that acronyms are case sensitive." end @@ -208,7 +219,7 @@ def ontologies_param(params=nil) Array.new end - def restricted_ontologies(params=nil) + def restricted_ontologies(params = nil) params ||= @params found_onts = false @@ -237,23 +248,23 @@ def restricted_ontologies(params=nil) return onts end - def restricted_ontologies_to_acronyms(params=nil, onts=nil) + def restricted_ontologies_to_acronyms(params = nil, onts = nil) onts ||= restricted_ontologies(params) - return onts.map {|o| o.acronym } + return onts.map { |o| o.acronym } end - def ontologies_param_to_acronyms(params=nil) + def ontologies_param_to_acronyms(params = nil) ontResourceIds = ontologies_param(params) - return ontResourceIds.map { |ontResourceId| ontResourceId.to_s.split('/')[-1]} + return ontResourceIds.map { |ontResourceId| ontResourceId.to_s.split('/')[-1] } end ## # Get semantic types parameter in the form [semantic_types=T099,T085,T345] - def semantic_types_param(params=nil) + def semantic_types_param(params = nil) params ||= @params if params["semantic_types"] - semanticTypes = params["semantic_types"].split(",").map {|o| o.strip} + semanticTypes = params["semantic_types"].split(",").map { |o| o.strip } return semanticTypes end Array.new @@ -261,21 +272,21 @@ def semantic_types_param(params=nil) ## # Get cui parameter in the form [cui=C0302369,C0522224,C0176617] - def cui_param(params=nil) + def cui_param(params = nil) params ||= @params if params["cui"] - cui = params["cui"].split(",").map {|o| o.strip} + cui = params["cui"].split(",").map { |o| o.strip } return cui end Array.new end # validates month for 1-12 or 01-09 - def month_param(params=nil) + def month_param(params = nil) params ||= @params if params["month"] month = params["month"].strip - if %r{(?^(0[1-9]|[1-9]|1[0-2])$)}x === month + if /(?^(0[1-9]|[1-9]|1[0-2])$)/x === month return month.to_i.to_s end end @@ -283,11 +294,11 @@ def month_param(params=nil) end # validates year for starting with 1 or 2 and containing 4 digits - def year_param(params=nil) + def year_param(params = nil) params ||= @params if params["year"] year = params["year"].strip - if %r{(?^([1-2]\d{3})$)}x === year + if /(?^([1-2]\d{3})$)/x === year return year.to_i.to_s end end @@ -327,14 +338,14 @@ def ontology_from_acronym(acronym) def ontology_objects_from_params(params = nil) ontologies = Set.new(ontologies_param(params)) all_onts = LinkedData::Models::Ontology.where.include(LinkedData::Models::Ontology.goo_attrs_to_load).to_a - all_onts.select {|o| ontologies.include?(o.id.to_s)} + all_onts.select { |o| ontologies.include?(o.id.to_s) } end def ontology_uri_acronym_map cached_map = naive_expiring_cache_read(__method__) return cached_map if cached_map map = {} - LinkedData::Models::Ontology.where.include(:acronym).all.each {|o| map[o.acronym] = o.id.to_s} + LinkedData::Models::Ontology.where.include(:acronym).all.each { |o| map[o.acronym] = o.id.to_s } naive_expiring_cache_write(__method__, map) map end @@ -343,7 +354,7 @@ def acronym_ontology_uri_map cached_map = naive_expiring_cache_read(__method__) return cached_map if cached_map map = {} - LinkedData::Models::Ontology.where.include(:acronym).all.each {|o| map[o.id.to_s] = o.acronym} + LinkedData::Models::Ontology.where.include(:acronym).all.each { |o| map[o.id.to_s] = o.acronym } naive_expiring_cache_write(__method__, map) map end @@ -381,10 +392,10 @@ def retrieve_latest_submissions(options = {}) def get_ontology_and_submission ont = Ontology.find(@params["ontology"]) - .include(:acronym, :administeredBy, :acl, :viewingRestriction) - .include(submissions: - [:submissionId, submissionStatus: [:code], ontology: [:acronym], metrics: :classes]) - .first + .include(:acronym, :administeredBy, :acl, :viewingRestriction) + .include(submissions: + [:submissionId, submissionStatus: [:code], ontology: [:acronym], metrics: :classes]) + .first error(404, "Ontology '#{@params["ontology"]}' not found.") if ont.nil? check_access(ont) if LinkedData.settings.enable_security # Security check submission = nil @@ -392,7 +403,7 @@ def get_ontology_and_submission submission = ont.submission(@params[:ontology_submission_id]) if submission.nil? error 404, - "You must provide an existing submission ID for the #{@params["acronym"]} ontology" + "You must provide an existing submission ID for the #{@params["acronym"]} ontology" end else submission = ont.latest_submission(status: [:RDF]) @@ -418,28 +429,29 @@ def include_param_contains?(str) return class_params_include || params_include end - ## # Checks to see if the request has a file attached def request_has_file? - @params.any? {|p,v| v.instance_of?(Hash) && v.key?(:tempfile) && v[:tempfile].instance_of?(Tempfile)} + @params.any? { |p, v| v.instance_of?(Hash) && v.key?(:tempfile) && v[:tempfile].instance_of?(Tempfile) } end ## # Looks for a file that was included as a multipart in a request def file_from_request - @params.each do |param, value| - if value.instance_of?(Hash) && value.has_key?(:tempfile) && value[:tempfile].instance_of?(Tempfile) + @params.each_value do |value| + if value.is_a?(Hash) && value.key?(:tempfile) && value[:tempfile].instance_of?(Tempfile) return value[:filename], value[:tempfile] end end - return nil, nil + + [nil, nil] end + private def naive_expiring_cache_write(key, object, timeout = 60) @naive_expiring_cache ||= {} - @naive_expiring_cache[key] = {timeout: Time.now + timeout, object: object} + @naive_expiring_cache[key] = { timeout: Time.now + timeout, object: object } end def naive_expiring_cache_read(key) @@ -450,7 +462,6 @@ def naive_expiring_cache_read(key) return object[:object] end - def save_submission_language(submission, language_property = :naturalLanguage) request_lang = RequestStore.store[:requested_lang] @@ -463,7 +474,7 @@ def save_submission_language(submission, language_property = :naturalLanguage) collection_natural_language = collection_natural_language.values.flatten if collection_natural_language.is_a?(Hash) submissions_language = collection_natural_language.map { |natural_language| natural_language.to_s.split('/').last[0..1] }.compact.first - RequestStore.store[:requested_lang] = submissions_language if submissions_language + RequestStore.store[:requested_lang] = submissions_language if submissions_language end end diff --git a/init.rb b/init.rb index 44a1eef5..8ab393b3 100644 --- a/init.rb +++ b/init.rb @@ -1,34 +1,29 @@ -# Recursively require files from directories and their sub-directories +# Recursively require files from directories def require_dir(dir) - Dir.glob("#{dir}/*.rb").each {|f| require_relative f } - Dir.glob("#{dir}/*/").each {|d| require_dir(d.gsub(/\/+$/, '')) } + Dir.glob("#{dir}/**/*.rb").sort.each { |f| require_relative f } end -# Require controller base files -require_relative "controllers/application_controller" +# Require core files +require_relative 'controllers/application_controller' +require_dir('lib') +require_dir('helpers') +require_dir('models') +require_dir('controllers') -# Require known directories -require_dir("lib") -require_dir("helpers") -require_dir("models") -require_dir("controllers") +# Add optional trailing slash to routes +Sinatra.register do + def self.registered(app) + app.routes.each do |verb, routes| + routes.each do |route| + pattern = route[0] + next if pattern.to_s.end_with?('/') -## -# Look for routes without an optional trailing slash or existing trailing slash -# and add the optional trailing slash so both /ontologies/ and /ontologies works -def rewrite_routes_trailing_slash - trailing_slash = Regexp.new(/.*\/\?\\z/) - no_trailing_slash = Regexp.new(/(.*)\\z\//) - Sinatra::Application.routes.each do |method, routes| - routes.each do |r| - route_regexp_str = r[0].inspect - if trailing_slash.match(route_regexp_str) - next - else - new_route = route_regexp_str.gsub(no_trailing_slash, "\\1\\/?\\z/") - r[0] = eval(new_route) + http_verb = verb.to_s.downcase + app.public_send(http_verb, "#{pattern}/") do + pass unless request.path_info.end_with?('/') + redirect "#{request.path_info}/", 301 + end end end end end -rewrite_routes_trailing_slash() \ No newline at end of file diff --git a/lib/rack/cube_reporter.rb b/lib/rack/cube_reporter.rb deleted file mode 100644 index d6694b87..00000000 --- a/lib/rack/cube_reporter.rb +++ /dev/null @@ -1,41 +0,0 @@ -require 'cube' - -## -# This enables collection of request statistics for anaylsis via cube. -# A cube server is required. See http://square.github.io/cube/ for more info. -module Rack - class CubeReporter - - def initialize(app = nil, options = {}) - host = options[:cube_host] || "localhost" - port = options[:cube_port] || 1180 - @app = app - @cube = ::Cube::Client.new(host, port) - end - - def call(env) - start = Time.now - data = @app.call(env) - finish = Time.now - cache_hit = !data[1]["X-Rack-Cache"].nil? && data[1]["X-Rack-Cache"].eql?("fresh") - user = env["REMOTE_USER"] - apikey = user.apikey if user - username = user.username if user - req_data = { - duration_ms: ((finish - start)*1000).ceil, - path: env["REQUEST_PATH"], - cache_hit: cache_hit, - status: data[0], - user: { - apikey: apikey, - username: username, - ip: env["REMOTE_ADDR"], - user_agent: env["HTTP_USER_AGENT"] - } - } - @cube.send "ontologies_api_request", DateTime.now, req_data - data - end - - end -end \ No newline at end of file diff --git a/mise.toml b/mise.toml index 83aa57a8..a050f48b 100644 --- a/mise.toml +++ b/mise.toml @@ -1,2 +1,2 @@ [tools] -ruby = "2.7.8" +ruby = "3.1.0" diff --git a/test/controllers/test_batch_controller.rb b/test/controllers/test_batch_controller.rb index ca37b156..72d0a98c 100644 --- a/test/controllers/test_batch_controller.rb +++ b/test/controllers/test_batch_controller.rb @@ -22,7 +22,7 @@ def test_class_batch_one_ontology "display" => "prefLabel,synonym" } } - post "/batch/", call_params + post "/batch", call_params assert last_response.ok? data = MultiJson.load(last_response.body) classes = data["http://www.w3.org/2002/07/owl#Class"] @@ -48,7 +48,7 @@ def test_class_wrong_params "display" => "prefLabel,synonym" } } - post "/batch/", call_params + post "/batch", call_params assert last_response.status = 422 end @@ -72,7 +72,7 @@ def test_class_batch_multiple "display" => "prefLabel" } } - post "/batch/", call_params + post "/batch", call_params assert last_response.ok? data = MultiJson.load(last_response.body) classes = data["http://www.w3.org/2002/07/owl#Class"] @@ -101,7 +101,7 @@ def test_class_all_bro "display" => "prefLabel" } } - post "/batch/", call_params + post "/batch", call_params assert last_response.ok? # refute last_response.ok? data = MultiJson.load(last_response.body) diff --git a/test/controllers/test_classes_controller.rb b/test/controllers/test_classes_controller.rb index 323d241d..2def5e89 100644 --- a/test/controllers/test_classes_controller.rb +++ b/test/controllers/test_classes_controller.rb @@ -420,7 +420,7 @@ def test_calls_not_found escaped_cls= CGI.escape("http://my.bogus.inexistent.class/that/this/is") #404 on ontology - get "/ontologies/NO-ONT-ZZZZZZ/classes/" + get "/ontologies/NO-ONT-ZZZZZZ/classes" assert last_response.status == 404 get "/ontologies/NO-ONT-ZZZZZZ/classes/#{escaped_cls}/children" assert last_response.status == 404 diff --git a/test/controllers/test_external_mappings_controller.rb b/test/controllers/test_external_mappings_controller.rb index 0a18bf63..1bd0eaf5 100644 --- a/test/controllers/test_external_mappings_controller.rb +++ b/test/controllers/test_external_mappings_controller.rb @@ -65,7 +65,7 @@ def delete_external_mappings creator: "tim" } - post "/mappings/", MultiJson.dump(mapping), "CONTENT_TYPE" => "application/json" + post "/mappings", MultiJson.dump(mapping), "CONTENT_TYPE" => "application/json" assert last_response.status == 201, "Error creating the external mapping: #{last_response.body}" response = MultiJson.load(last_response.body) diff --git a/test/controllers/test_mappings_controller.rb b/test/controllers/test_mappings_controller.rb index 2ab61261..9aa76b23 100644 --- a/test/controllers/test_mappings_controller.rb +++ b/test/controllers/test_mappings_controller.rb @@ -124,7 +124,7 @@ def commun_created_mappings_test(created, mapping_term_a, mapping_term_b, relati end assert rest_count == 3 - get "/mappings/recent/" + get "/mappings/recent" assert last_response.status == 200 response = MultiJson.load(last_response.body) assert (response.length == 5) @@ -191,7 +191,7 @@ def mappings_between_ontologies ] ontologies_params.each do |ontologies| ont1, ont2 = ontologies.split(",") - get "/mappings/?ontologies=#{ontologies}" + get "/mappings?ontologies=#{ontologies}" assert last_response.ok? mappings = MultiJson.load(last_response.body) #pages @@ -284,7 +284,7 @@ def create_mapping created = [] mappings.each_with_index do |mapping, i| - post '/mappings/', + post '/mappings', MultiJson.dump(mapping), "CONTENT_TYPE" => "application/json" @@ -315,7 +315,7 @@ def delete_mapping created = [] mappings.each do |mapping| - post "/mappings/", + post "/mappings", MultiJson.dump(mapping), "CONTENT_TYPE" => "application/json" @@ -351,7 +351,7 @@ def mappings_statistics end NcboCron::Models::QueryWarmer.new(Logger.new(TestLogFile.new)).run assert LinkedData::Models::MappingCount.where.all.length > 2 - get "/mappings/statistics/ontologies/" + get "/mappings/statistics/ontologies" assert last_response.ok? stats = MultiJson.load(last_response.body) data = {"CNO-TEST-MAP-0"=>19, diff --git a/test/controllers/test_ontologies_controller.rb b/test/controllers/test_ontologies_controller.rb index d05959e8..97005339 100644 --- a/test/controllers/test_ontologies_controller.rb +++ b/test/controllers/test_ontologies_controller.rb @@ -97,7 +97,7 @@ def test_create_ontology assert last_response.status == 201 delete "/ontologies/#{@@acronym}" - post "/ontologies/", @@file_params.merge(acronym: @@acronym) + post "/ontologies", @@file_params.merge(acronym: @@acronym) assert last_response.status == 201 end diff --git a/test/controllers/test_ontology_submissions_controller.rb b/test/controllers/test_ontology_submissions_controller.rb index 670658a7..f9130c85 100644 --- a/test/controllers/test_ontology_submissions_controller.rb +++ b/test/controllers/test_ontology_submissions_controller.rb @@ -10,27 +10,27 @@ def before_suite end def self._set_vars - @@acronym = "TST" - @@name = "Test Ontology" - @@test_file = File.expand_path("../../data/ontology_files/BRO_v3.1.owl", __FILE__) + @@acronym = 'TST' + @@name = 'Test Ontology' + @@test_file = File.expand_path('../../data/ontology_files/BRO_v3.1.owl', __FILE__) @@file_params = { name: @@name, - hasOntologyLanguage: "OWL", - administeredBy: "tim", - "file" => Rack::Test::UploadedFile.new(@@test_file, ""), + hasOntologyLanguage: 'OWL', + administeredBy: 'tim', + 'file' => Rack::Test::UploadedFile.new(@@test_file, ''), released: DateTime.now.to_s, - contact: [{name: "test_name", email: "test3@example.org"}], + contact: [{name: 'test_name', email: 'test3@example.org'}], URI: 'https://test.com/test', status: 'production', description: 'ontology description' } - @@status_uploaded = "UPLOADED" - @@status_rdf = "RDF" + @@status_uploaded = 'UPLOADED' + @@status_rdf = 'RDF' end def self._create_user - username = "tim" - test_user = User.new(username: username, email: "#{username}@example.org", password: "password") + username = 'tim' + test_user = User.new(username: username, email: "#{username}@example.org", password: 'password') test_user.save if test_user.valid? @@user = test_user.valid? ? test_user : User.find(username).first end @@ -47,7 +47,7 @@ def setup end def test_submissions_for_given_ontology - num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1) + _, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1) ontology = created_ont_acronyms.first get "/ontologies/#{ontology}/submissions" assert last_response.ok? @@ -59,104 +59,104 @@ def test_submissions_for_given_ontology end def test_create_new_submission_missing_file_and_pull_location - post "/ontologies/#{@@acronym}/submissions", name: @@name, hasOntologyLanguage: "OWL" - assert_equal(400, last_response.status, msg=get_errors(last_response)) - assert MultiJson.load(last_response.body)["errors"] + post "/ontologies/#{@@acronym}/submissions", name: @@name, hasOntologyLanguage: 'OWL' + assert_equal(400, last_response.status, get_errors(last_response)) + assert MultiJson.load(last_response.body)['errors'] end def test_create_new_submission_file post "/ontologies/#{@@acronym}/submissions", @@file_params - assert_equal(201, last_response.status, msg=get_errors(last_response)) + assert_equal(201, last_response.status, get_errors(last_response)) sub = MultiJson.load(last_response.body) get "/ontologies/#{@@acronym}" ont = MultiJson.load(last_response.body) - assert ont["acronym"].eql?(@@acronym) + assert ont['acronym'].eql?(@@acronym) # Cleanup delete "/ontologies/#{@@acronym}/submissions/#{sub['submissionId']}" - assert_equal(204, last_response.status, msg=get_errors(last_response)) + assert_equal(204, last_response.status, get_errors(last_response)) end def test_create_new_ontology_submission post "/ontologies/#{@@acronym}/submissions", @@file_params - assert_equal(201, last_response.status, msg=get_errors(last_response)) + assert_equal(201, last_response.status, get_errors(last_response)) # Cleanup sub = MultiJson.load(last_response.body) delete "/ontologies/#{@@acronym}/submissions/#{sub['submissionId']}" - assert_equal(204, last_response.status, msg=get_errors(last_response)) + assert_equal(204, last_response.status, get_errors(last_response)) end def test_patch_ontology_submission - num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1) + _, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1) ont = Ontology.find(created_ont_acronyms.first).include(submissions: [:submissionId, ontology: :acronym]).first assert(ont.submissions.length > 0) submission = ont.submissions[0] - new_values = {description: "Testing new description changes"} - patch "/ontologies/#{submission.ontology.acronym}/submissions/#{submission.submissionId}", MultiJson.dump(new_values), "CONTENT_TYPE" => "application/json" - assert_equal(204, last_response.status, msg=get_errors(last_response)) + new_values = {description: 'Testing new description changes'} + patch "/ontologies/#{submission.ontology.acronym}/submissions/#{submission.submissionId}", MultiJson.dump(new_values), 'CONTENT_TYPE' => 'application/json' + assert_equal(204, last_response.status, get_errors(last_response)) get "/ontologies/#{submission.ontology.acronym}/submissions/#{submission.submissionId}" submission = MultiJson.load(last_response.body) - assert submission["description"].eql?("Testing new description changes") + assert submission['description'].eql?('Testing new description changes') end def test_delete_ontology_submission - num_onts_created, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1, random_submission_count: false, submission_count: 5) + _, created_ont_acronyms = create_ontologies_and_submissions(ont_count: 1, random_submission_count: false, submission_count: 5) acronym = created_ont_acronyms.first submission_to_delete = (1..5).to_a.shuffle.first delete "/ontologies/#{acronym}/submissions/#{submission_to_delete}" - assert_equal(204, last_response.status, msg=get_errors(last_response)) + assert_equal(204, last_response.status, get_errors(last_response)) get "/ontologies/#{acronym}/submissions/#{submission_to_delete}" - assert_equal(404, last_response.status, msg=get_errors(last_response)) + assert_equal(404, last_response.status, get_errors(last_response)) end def test_download_submission num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false) - assert_equal(1, num_onts_created, msg="Failed to create 1 ontology?") - assert_equal(1, onts.length, msg="Failed to create 1 ontology?") + assert_equal(1, num_onts_created, 'Failed to create 1 ontology?') + assert_equal(1, onts.length, 'Failed to create 1 ontology?') ont = onts.first ont.bring(:submissions, :acronym) - assert_instance_of(Ontology, ont, msg="ont is not a #{Ontology.class}") - assert_equal(1, ont.submissions.length, msg="Failed to create 1 ontology submission?") + assert_instance_of(Ontology, ont, "ont is not a #{Ontology.class}") + assert_equal(1, ont.submissions.length, 'Failed to create 1 ontology submission?') sub = ont.submissions.first sub.bring(:submissionId) - assert_instance_of(OntologySubmission, sub, msg="sub is not a #{OntologySubmission.class}") + assert_instance_of(OntologySubmission, sub, "sub is not a #{OntologySubmission.class}") # Clear restrictions on downloads LinkedData::OntologiesAPI.settings.restrict_download = [] # Download the specific submission get "/ontologies/#{ont.acronym}/submissions/#{sub.submissionId}/download" - assert_equal(200, last_response.status, msg='failed download for specific submission : ' + get_errors(last_response)) + assert_equal(200, last_response.status, 'failed download for specific submission : ' + get_errors(last_response)) # Add restriction on download acronym = created_ont_acronyms.first LinkedData::OntologiesAPI.settings.restrict_download = [acronym] # Try download get "/ontologies/#{ont.acronym}/submissions/#{sub.submissionId}/download" # download should fail with a 403 status - assert_equal(403, last_response.status, msg='failed to restrict download for ontology : ' + get_errors(last_response)) + assert_equal(403, last_response.status, 'failed to restrict download for ontology : ' + get_errors(last_response)) # Clear restrictions on downloads LinkedData::OntologiesAPI.settings.restrict_download = [] # see also test_ontologies_controller::test_download_ontology # Test downloads of nonexistent ontology - get "/ontologies/BOGUS66/submissions/55/download" - assert_equal(422, last_response.status, "failed to handle downloads of nonexistent ontology" + get_errors(last_response)) + get '/ontologies/BOGUS66/submissions/55/download' + assert_equal(422, last_response.status, 'failed to handle downloads of nonexistent ontology' + get_errors(last_response)) end def test_download_ontology_submission_rdf - count, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true) + _, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true) acronym = created_ont_acronyms.first ont = onts.first sub = ont.submissions.first get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?download_format=rdf" - assert_equal(200, last_response.status, msg="Download failure for '#{acronym}' ontology: " + get_errors(last_response)) + assert_equal(200, last_response.status, "Download failure for '#{acronym}' ontology: " + get_errors(last_response)) # Download should fail with a 400 status. get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?download_format=csr" - assert_equal(400, last_response.status, msg="Download failure for '#{acronym}' ontology: " + get_errors(last_response)) + assert_equal(400, last_response.status, "Download failure for '#{acronym}' ontology: " + get_errors(last_response)) end def test_download_acl_only - count, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false) + _, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: false) acronym = created_ont_acronyms.first ont = onts.first.bring_remaining ont.bring(:submissions) @@ -165,65 +165,65 @@ def test_download_acl_only begin allowed_user = User.new({ - username: "allowed", - email: "test4@example.org", - password: "12345" + username: 'allowed', + email: 'test4@example.org', + password: '12345' }) allowed_user.save blocked_user = User.new({ - username: "blocked", - email: "test5@example.org", - password: "12345" + username: 'blocked', + email: 'test5@example.org', + password: '12345' }) blocked_user.save ont.acl = [allowed_user] - ont.viewingRestriction = "private" + ont.viewingRestriction = 'private' ont.save LinkedData.settings.enable_security = true get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?apikey=#{allowed_user.apikey}" - assert_equal(200, last_response.status, msg="User who is in ACL couldn't download ontology") + assert_equal(200, last_response.status, "User who is in ACL couldn't download ontology") get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?apikey=#{blocked_user.apikey}" - assert_equal(403, last_response.status, msg="User who isn't in ACL could download ontology") + assert_equal(403, last_response.status, "User who isn't in ACL could download ontology") admin = ont.administeredBy.first admin.bring(:apikey) get "/ontologies/#{acronym}/submissions/#{sub.submissionId}/download?apikey=#{admin.apikey}" - assert_equal(200, last_response.status, msg="Admin couldn't download ontology") + assert_equal(200, last_response.status, "Admin couldn't download ontology") ensure LinkedData.settings.enable_security = false - del = User.find("allowed").first + del = User.find('allowed').first del.delete if del - del = User.find("blocked").first + del = User.find('blocked').first del.delete if del end end def test_ontology_submissions_access_controller - count, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: false) + _, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: false) # case first submission is private - acronym = created_ont_acronyms.first + created_ont_acronyms.first ont = onts.first.bring_remaining begin allowed_user = User.new({ - username: "allowed", - email: "test@example.org", - password: "12345" + username: 'allowed', + email: 'test@example.org', + password: '12345' }) allowed_user.save blocked_user = User.new({ - username: "blocked", - email: "test1254@example.org", - password: "12345" + username: 'blocked', + email: 'test1254@example.org', + password: '12345' }) blocked_user.save ont.acl = [allowed_user] - ont.viewingRestriction = "private" + ont.viewingRestriction = 'private' ont.save LinkedData.settings.enable_security = true @@ -239,34 +239,34 @@ def test_ontology_submissions_access_controller assert_equal 1, submissions.size ensure LinkedData.settings.enable_security = false - del = User.find("allowed").first + del = User.find('allowed').first del.delete if del - del = User.find("blocked").first + del = User.find('blocked').first del.delete if del end end def test_submissions_pagination - num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 2, submission_count: 2) + create_ontologies_and_submissions(ont_count: 2, submission_count: 2) - get "/submissions" + get '/submissions' assert last_response.ok? submissions = MultiJson.load(last_response.body) assert_equal 2, submissions.length - get "/submissions?page=1&pagesize=1" + get '/submissions?page=1&pagesize=1' assert last_response.ok? submissions = MultiJson.load(last_response.body) - assert_equal 1, submissions["collection"].length + assert_equal 1, submissions['collection'].length end def test_submissions_pagination_filter num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: 10, submission_count: 1) - group1 = LinkedData::Models::Group.new(acronym: 'group-1', name: "Test Group 1").save - group2 = LinkedData::Models::Group.new(acronym: 'group-2', name: "Test Group 2").save - category1 = LinkedData::Models::Category.new(acronym: 'category-1', name: "Test Category 1").save - category2 = LinkedData::Models::Category.new(acronym: 'category-2', name: "Test Category 2").save + group1 = LinkedData::Models::Group.new(acronym: 'group-1', name: 'Test Group 1').save + group2 = LinkedData::Models::Group.new(acronym: 'group-2', name: 'Test Group 2').save + category1 = LinkedData::Models::Category.new(acronym: 'category-1', name: 'Test Category 1').save + category2 = LinkedData::Models::Category.new(acronym: 'category-2', name: 'Test Category 2').save ontologies1 = ontologies[0..5].each do |o| o.bring_remaining @@ -287,28 +287,28 @@ def test_submissions_pagination_filter # test filter by group and category get "/submissions?page=1&pagesize=100&group=#{group1.acronym}" assert last_response.ok? - assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length + assert_equal ontologies1.size, MultiJson.load(last_response.body)['collection'].length get "/submissions?page=1&pagesize=100&group=#{group2.acronym}" assert last_response.ok? - assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length + assert_equal ontologies2.size, MultiJson.load(last_response.body)['collection'].length get "/submissions?page=1&pagesize=100&hasDomain=#{category1.acronym}" assert last_response.ok? - assert_equal ontologies1.size, MultiJson.load(last_response.body)["collection"].length + assert_equal ontologies1.size, MultiJson.load(last_response.body)['collection'].length get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}" assert last_response.ok? - assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length + assert_equal ontologies2.size, MultiJson.load(last_response.body)['collection'].length get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group1.acronym}" assert last_response.ok? - assert_equal 0, MultiJson.load(last_response.body)["collection"].length + assert_equal 0, MultiJson.load(last_response.body)['collection'].length get "/submissions?page=1&pagesize=100&hasDomain=#{category2.acronym}&group=#{group2.acronym}" assert last_response.ok? - assert_equal ontologies2.size, MultiJson.load(last_response.body)["collection"].length + assert_equal ontologies2.size, MultiJson.load(last_response.body)['collection'].length ontologies3 = ontologies[9] ontologies3.bring_remaining ontologies3.group = [group1, group2] ontologies3.hasDomain = [category1, category2] - ontologies3.name = "name search test" + ontologies3.name = 'name search test' ontologies3.save # test search with acronym @@ -320,7 +320,7 @@ def test_submissions_pagination_filter get "/submissions?page=1&pagesize=100&acronym=#{acronym_search}" assert last_response.ok? submissions = MultiJson.load(last_response.body) - assert_equal count, submissions["collection"].length + assert_equal count, submissions['collection'].length end @@ -333,94 +333,94 @@ def test_submissions_pagination_filter get "/submissions?page=1&pagesize=100&name=#{name_search}" assert last_response.ok? submissions = MultiJson.load(last_response.body) - binding.pry unless submissions["collection"].length.eql?(count) - assert_equal count, submissions["collection"].length + binding.pry unless submissions['collection'].length.eql?(count) + assert_equal count, submissions['collection'].length end # test search with name and acronym # search by name - get "/submissions?page=1&pagesize=100&name=search&acronym=search" + get '/submissions?page=1&pagesize=100&name=search&acronym=search' assert last_response.ok? submissions = MultiJson.load(last_response.body) - assert_equal 1, submissions["collection"].length + assert_equal 1, submissions['collection'].length # search by acronym - get "/submissions?page=1&pagesize=100&name=9&acronym=9" + get '/submissions?page=1&pagesize=100&name=9&acronym=9' assert last_response.ok? submissions = MultiJson.load(last_response.body) - assert_equal 1, submissions["collection"].length + assert_equal 1, submissions['collection'].length # search by acronym or name - get "/submissions?page=1&pagesize=100&name=search&acronym=8" + get '/submissions?page=1&pagesize=100&name=search&acronym=8' assert last_response.ok? submissions = MultiJson.load(last_response.body) - assert_equal 2, submissions["collection"].length + assert_equal 2, submissions['collection'].length - ontologies.first.name = "sort by test" + ontologies.first.name = 'sort by test' ontologies.first.save sub = ontologies.first.latest_submission(status: :any).bring_remaining sub.status = 'retired' - sub.description = "234" + sub.description = '234' sub.creationDate = DateTime.yesterday.to_datetime sub.hasOntologyLanguage = LinkedData::Models::OntologyFormat.find('SKOS').first sub.save #test search with sort - get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=ontology_name" + get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=ontology_name' assert last_response.ok? submissions = MultiJson.load(last_response.body) - refute_empty submissions["collection"] - assert_equal ontologies.map{|x| x.name}.sort, submissions["collection"].map{|x| x["ontology"]["name"]} + refute_empty submissions['collection'] + assert_equal ontologies.map{|x| x.name}.sort, submissions['collection'].map{|x| x['ontology']['name']} - get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=creationDate" + get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&order_by=creationDate' assert last_response.ok? submissions = MultiJson.load(last_response.body) - refute_empty submissions["collection"] - assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate}.sort, submissions["collection"].map{|x| DateTime.parse(x["creationDate"])}.reverse + refute_empty submissions['collection'] + assert_equal ontologies.map{|x| x.latest_submission(status: :any).bring(:creationDate).creationDate}.sort, submissions['collection'].map{|x| DateTime.parse(x['creationDate'])}.reverse # test search with format - get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=SKOS" + get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=SKOS' assert last_response.ok? submissions = MultiJson.load(last_response.body) - refute_empty submissions["collection"] - assert_equal 1, submissions["collection"].size + refute_empty submissions['collection'] + assert_equal 1, submissions['collection'].size - get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=OWL" + get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&hasOntologyLanguage=OWL' assert last_response.ok? submissions = MultiJson.load(last_response.body) - refute_empty submissions["collection"] - assert_equal ontologies.size-1 , submissions["collection"].size + refute_empty submissions['collection'] + assert_equal ontologies.size-1 , submissions['collection'].size # test ontology filter with submission filter attributes - get "/submissions?page=1&pagesize=100&acronym=tes&name=tes&group=group-2&category=category-2&hasOntologyLanguage=OWL" + get '/submissions?page=1&pagesize=100&acronym=tes&name=tes&group=group-2&category=category-2&hasOntologyLanguage=OWL' assert last_response.ok? submissions = MultiJson.load(last_response.body) - refute_empty submissions["collection"] - assert_equal ontologies2.size + 1 , submissions["collection"].size + refute_empty submissions['collection'] + assert_equal ontologies2.size + 1 , submissions['collection'].size # test ontology filter with status - get "/submissions?page=1&pagesize=100&status=retired" + get '/submissions?page=1&pagesize=100&status=retired' assert last_response.ok? submissions = MultiJson.load(last_response.body) - refute_empty submissions["collection"] - assert_equal 1 , submissions["collection"].size + refute_empty submissions['collection'] + assert_equal 1 , submissions['collection'].size - get "/submissions?page=1&pagesize=100&status=alpha,beta,production" + get '/submissions?page=1&pagesize=100&status=alpha,beta,production' assert last_response.ok? submissions = MultiJson.load(last_response.body) - refute_empty submissions["collection"] - assert_equal ontologies.size - 1 , submissions["collection"].size - get "/submissions?page=1&pagesize=100&description=234&acronym=234&name=234" + refute_empty submissions['collection'] + assert_equal ontologies.size - 1 , submissions['collection'].size + get '/submissions?page=1&pagesize=100&description=234&acronym=234&name=234' assert last_response.ok? submissions = MultiJson.load(last_response.body) - assert_equal 1 , submissions["collection"].size + assert_equal 1 , submissions['collection'].size end def test_submissions_default_includes ontology_count = 5 - num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + _, created_ont_acronyms, = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) submission_default_attributes = LinkedData::Models::OntologySubmission.hypermedia_settings[:serialize_default].map(&:to_s) - get("/submissions?display_links=false&display_context=false&include_status=ANY") + get('/submissions?display_links=false&display_context=false&include_status=ANY') assert last_response.ok? submissions = MultiJson.load(last_response.body) @@ -435,80 +435,59 @@ def test_submissions_default_includes assert(submissions.all? { |sub| submission_default_attributes.eql?(submission_keys(sub)) }) end + def test_submissions_all_includes ontology_count = 5 - num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) - def submission_all_attributes - attrs = OntologySubmission.goo_attrs_to_load([:all]) - embed_attrs = attrs.select { |x| x.is_a?(Hash) }.first - - attrs.delete_if { |x| x.is_a?(Hash) }.map(&:to_s) + embed_attrs.keys.map(&:to_s) - end - get("/submissions?include=all&display_links=false&display_context=false") - - assert last_response.ok? - submissions = MultiJson.load(last_response.body) - assert_equal ontology_count, submissions.size + _, created_ont_acronyms, = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) - assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) }) - assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + submission_all_attributes = begin + attrs = OntologySubmission.goo_attrs_to_load([:all]) + embed_attrs = attrs.select { |x| x.is_a?(Hash) }.first || {} + attrs.reject { |x| x.is_a?(Hash) }.map(&:to_s) + embed_attrs.keys.map(&:to_s) + end.sort - get("/ontologies/#{created_ont_acronyms.first}/submissions?include=all&display_links=false&display_context=false") + params = '?include=all&display_links=false&display_context=false' - assert last_response.ok? - submissions = MultiJson.load(last_response.body) - assert_equal 1, submissions.size - - assert(submissions.all? { |sub| submission_all_attributes.sort.eql?(submission_keys(sub).sort) }) - assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) - - get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=all&display_links=false&display_context=false") - assert last_response.ok? - sub = MultiJson.load(last_response.body) + [ + "/submissions#{params}", + "/ontologies/#{created_ont_acronyms.first}/submissions#{params}", + "/ontologies/#{created_ont_acronyms.first}/latest_submission#{params}", + "/ontologies/#{created_ont_acronyms.first}/submissions/1#{params}" + ].each do |url| + get(url) + assert last_response.ok? - assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort)) - assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + response_body = MultiJson.load(last_response.body) + submissions = response_body.is_a?(Array) ? response_body : [response_body] - get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=all&display_links=false&display_context=false") - assert last_response.ok? - sub = MultiJson.load(last_response.body) - - assert(submission_all_attributes.sort.eql?(submission_keys(sub).sort)) - assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + assert_equal(ontology_count, submissions.size) if url == "/submissions#{params}" + assert(submissions.all? { |sub| submission_all_attributes.eql?(submission_keys(sub).sort) }) + assert(submissions.all? { |sub| sub['contact']&.first&.keys.to_a.sort.eql?(%w[name email id].sort) }) + end end def test_submissions_custom_includes ontology_count = 5 - num_onts_created, created_ont_acronyms, ontologies = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) - include = 'ontology,contact,submissionId' - - get("/submissions?include=#{include}&display_links=false&display_context=false") + _, created_ont_acronyms, _ = create_ontologies_and_submissions(ont_count: ontology_count, submission_count: 1, submissions_to_process: []) + include_keys = %w[ontology contact submissionId] + params = "?include=#{include_keys.join(',')}&display_links=false&display_context=false" - assert last_response.ok? - submissions = MultiJson.load(last_response.body) - assert_equal ontology_count, submissions.size - assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) }) - assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) - - get("/ontologies/#{created_ont_acronyms.first}/submissions?include=#{include}&display_links=false&display_context=false") - - assert last_response.ok? - submissions = MultiJson.load(last_response.body) - assert_equal 1, submissions.size - assert(submissions.all? { |sub| include.split(',').eql?(submission_keys(sub)) }) - assert(submissions.all? { |sub| sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id])) }) + [ + "/submissions#{params}", + "/ontologies/#{created_ont_acronyms.first}/submissions#{params}", + "/ontologies/#{created_ont_acronyms.first}/latest_submission#{params}", + "/ontologies/#{created_ont_acronyms.first}/submissions/1#{params}" + ].each do |url| + get(url) + assert last_response.ok? - get("/ontologies/#{created_ont_acronyms.first}/latest_submission?include=#{include}&display_links=false&display_context=false") - assert last_response.ok? - sub = MultiJson.load(last_response.body) - assert(include.split(',').eql?(submission_keys(sub))) - assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + response_body = MultiJson.load(last_response.body) + submissions = response_body.is_a?(Array) ? response_body : [response_body] - get("/ontologies/#{created_ont_acronyms.first}/submissions/1?include=#{include}&display_links=false&display_context=false") - assert last_response.ok? - sub = MultiJson.load(last_response.body) - assert(include.split(',').eql?(submission_keys(sub))) - assert(sub["contact"] && (sub["contact"].first.nil? || sub["contact"].first.keys.eql?(%w[name email id]))) + assert_equal(ontology_count, submissions.size) if url == "/submissions#{params}" + assert(submissions.all? { |sub| include_keys.eql?(submission_keys(sub)) }) + assert(submissions.all? { |sub| sub['contact']&.first&.keys&.sort.eql?(%w[name email id].sort) }) + end end def test_submissions_param_include @@ -519,7 +498,7 @@ def test_submissions_param_include end def test_submission_diff - num_onts_created, created_ont_acronyms, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 2, + _, _, onts = create_ontologies_and_submissions(ont_count: 1, submission_count: 2, process_submission: true, process_options: { process_rdf: true, extract_metadata: false, diff: true} ) diff --git a/test/helpers/test_application_helper.rb b/test/helpers/test_application_helper.rb index 2315a677..f1557243 100644 --- a/test/helpers/test_application_helper.rb +++ b/test/helpers/test_application_helper.rb @@ -9,22 +9,22 @@ def before_suite def test_it_escapes_html escaped_html = helper.h("http://testlink.com") - assert escaped_html.eql?("<a>http://testlink.com</a>") + assert_equal "<a>http://testlink.com</a>", escaped_html end def test_ontologies_param - ids = @@ontologies.map {|o| o.id.to_s} - acronyms = @@ontologies.map {|o| o.id.to_s.split("/").last} - params = {"ontologies" => acronyms.join(",")} + ids = @@ontologies.map { |o| o.id.to_s } + acronyms = @@ontologies.map { |o| o.id.to_s.split("/").last } + params = { "ontologies" => acronyms.join(",") } ontologies = ontologies_param(params) assert ontologies == ids - params = {"ontologies" => ids.join(",")} + params = { "ontologies" => ids.join(",") } ontologies = ontologies_param(params) assert ontologies == ids id_acronym = ids + acronyms - params = {"ontologies" => id_acronym.join(",")} + params = { "ontologies" => id_acronym.join(",") } ontologies = ontologies_param(params) assert ontologies == (ids + ids) end @@ -48,16 +48,16 @@ def test_acronym_from_ontology_uri def test_bad_accept_header_handling # This accept header contains '*; q=.2', which isn't valid according to the spec, should be '*/*; q=.2' bad_accept_header = "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2" - get "/ontologies", {}, {"HTTP_ACCEPT" => bad_accept_header} + get "/ontologies", {}, { "HTTP_ACCEPT" => bad_accept_header } assert last_response.status == 400 assert last_response.body.include?("Accept header `#{bad_accept_header}` is invalid") end def test_http_method_override - post "/ontologies", {}, {"HTTP_X_HTTP_METHOD_OVERRIDE" => "GET"} + post "/ontologies", {}, { "HTTP_X_HTTP_METHOD_OVERRIDE" => "GET" } assert last_response.ok? - acronyms = @@ontologies.map {|o| o.bring(:acronym).acronym}.sort - resp_acronyms = MultiJson.load(last_response.body).map {|o| o["acronym"]}.sort + acronyms = @@ontologies.map { |o| o.bring(:acronym).acronym }.sort + resp_acronyms = MultiJson.load(last_response.body).map { |o| o["acronym"] }.sort assert_equal acronyms, resp_acronyms end end diff --git a/test/helpers/test_slices_helper.rb b/test/helpers/test_slices_helper.rb index ae01aae7..7e8cfdac 100644 --- a/test/helpers/test_slices_helper.rb +++ b/test/helpers/test_slices_helper.rb @@ -79,7 +79,7 @@ def test_search_slices def test_mappings_slices LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new)) - get "/mappings/statistics/ontologies/" + get "/mappings/statistics/ontologies" expected_result_without_slice = ["PARSED-0", "PARSED-1", @@ -90,7 +90,7 @@ def test_mappings_slices assert_equal expected_result_without_slice, MultiJson.load(last_response.body).keys.sort - get "http://#{@@group_acronym}/mappings/statistics/ontologies/" + get "http://#{@@group_acronym}/mappings/statistics/ontologies" expected_result_with_slice = ["PARSED-0", "http://data.bioontology.org/metadata/ExternalMappings", diff --git a/test/middleware/test_rack_attack.rb b/test/middleware/test_rack_attack.rb index 92b4d636..937c1bf0 100644 --- a/test/middleware/test_rack_attack.rb +++ b/test/middleware/test_rack_attack.rb @@ -40,7 +40,7 @@ def before_suite # Fork the process to create two servers. This isolates the Rack::Attack configuration, which makes other tests fail if included. @@pid1 = fork do require_relative '../../config/rack_attack' - Rack::Server.start( + Rackup::Server.start( config: RACK_CONFIG, Port: @@port1 ) @@ -50,7 +50,7 @@ def before_suite @@port2 = unused_port @@pid2 = fork do require_relative '../../config/rack_attack' - Rack::Server.start( + Rackup::Server.start( config: RACK_CONFIG, Port: @@port2 ) diff --git a/test/test_case.rb b/test/test_case.rb index e9b8956d..b1de654c 100644 --- a/test/test_case.rb +++ b/test/test_case.rb @@ -28,21 +28,24 @@ require 'multi_json' require 'oj' require 'json-schema' - +require 'minitest/reporters' +Minitest::Reporters.use! [Minitest::Reporters::SpecReporter.new(:color => true), Minitest::Reporters::MeanTimeReporter.new] MAX_TEST_REDIS_SIZE = 10_000 # Check to make sure you want to run if not pointed at localhost safe_hosts = Regexp.new(/localhost|-ut|ncbo-dev*|ncbo-unittest*/) + def safe_redis_hosts?(sh) return [LinkedData.settings.http_redis_host, - Annotator.settings.annotator_redis_host, - LinkedData.settings.goo_redis_host].select { |x| + Annotator.settings.annotator_redis_host, + LinkedData.settings.goo_redis_host].select { |x| x.match(sh) }.length == 3 end + unless LinkedData.settings.goo_host.match(safe_hosts) && - safe_redis_hosts?(safe_hosts) && - LinkedData.settings.search_server_url.match(safe_hosts) + safe_redis_hosts?(safe_hosts) && + LinkedData.settings.search_server_url.match(safe_hosts) print "\n\n================================== WARNING ==================================\n" print "** TESTS CAN BE DESTRUCTIVE -- YOU ARE POINTING TO A POTENTIAL PRODUCTION/STAGE SERVER **\n" print "Servers:\n" @@ -77,8 +80,7 @@ def count_pattern(pattern) def backend_4s_delete if count_pattern("?s ?p ?o") < 400000 puts 'clear backend & index' - raise StandardError, 'Too many triples in KB, does not seem right to run tests' unless - count_pattern('?s ?p ?o') < 400000 + raise StandardError, 'Too many triples in KB, does not seem right to run tests' unless count_pattern('?s ?p ?o') < 400000 graphs = Goo.sparql_query_client.query("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o . } }") graphs.each_solution do |sol| @@ -114,8 +116,6 @@ def after_all super end - - def _run_suite(suite, type) begin backend_4s_delete @@ -160,12 +160,11 @@ def app # @option options [TrueClass, FalseClass] :process_submission Parse the test ontology file def create_ontologies_and_submissions(options = {}) if options[:process_submission] && options[:process_options].nil? - options[:process_options] = { process_rdf: true, extract_metadata: false, generate_missing_labels: false } + options[:process_options] = { process_rdf: true, extract_metadata: false, generate_missing_labels: false } end LinkedData::SampleData::Ontology.create_ontologies_and_submissions(options) end - def agent_data(type: 'organization') schema_agencies = LinkedData::Models::AgentIdentifier::IDENTIFIER_SCHEMES.keys users = LinkedData::Models::User.all @@ -206,13 +205,13 @@ def delete_goo_models(gooModelArray) # @param [String] jsonData a json string that will be parsed by MultiJson.load # @param [String] jsonSchemaString a json schema string that will be parsed by MultiJson.load # @param [boolean] list set it true for jsonObj array of items to validate against jsonSchemaString - def validate_json(jsonData, jsonSchemaString, list=false) + def validate_json(jsonData, jsonSchemaString, list = false) schemaVer = :draft3 jsonObj = MultiJson.load(jsonData) jsonSchema = MultiJson.load(jsonSchemaString) assert( - JSON::Validator.validate(jsonSchema, jsonObj, :list => list, :version => schemaVer), - JSON::Validator.fully_validate(jsonSchema, jsonObj, :list => list, :version => schemaVer, :validate_schema => true).to_s + JSON::Validator.validate(jsonSchema, jsonObj, list: list, version: schemaVer), + JSON::Validator.fully_validate(jsonSchema, jsonObj, list: list, version: schemaVer, validate_schema: true).to_s ) end @@ -236,11 +235,10 @@ def self.enable_security LinkedData.settings.enable_security = true end - def self.reset_security(old_security = @@old_security_setting) + def self.reset_security(old_security = @@old_security_setting) LinkedData.settings.enable_security = old_security end - def self.make_admin(user) user.bring_remaining user.role = [LinkedData::Models::Users::Role.find(LinkedData::Models::Users::Role::ADMIN).first] @@ -261,6 +259,7 @@ def unused_port end private + def port_in_use?(port) server = TCPServer.new(port) server.close