diff --git a/.github/workflows/2.3.yml b/.github/workflows/2.3.yml new file mode 100644 index 000000000..2b60c8323 --- /dev/null +++ b/.github/workflows/2.3.yml @@ -0,0 +1,40 @@ +name: Ruby 2.3 +on: + push: + branches: + - 6.x + pull_request: + branches: + - 6.x +jobs: + tests: + env: + TEST_ES_SERVER: http://localhost:9200 + RAILS_VERSIONS: '4.0,5.0' + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Increase system limits + run: | + sudo swapoff -a + sudo sysctl -w vm.swappiness=1 + sudo sysctl -w fs.file-max=262144 + sudo sysctl -w vm.max_map_count=262144 + - uses: elastic/elastic-github-actions/elasticsearch@master + with: + stack-version: 6.8.12 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: 2.3 + - name: Bundle + run: | + sudo apt-get install libsqlite3-dev + gem install bundler + bundle install + bundle exec rake bundle:clean + bundle exec rake bundle:install + - name: Test + run: bundle exec rake test:all + diff --git a/.github/workflows/2.4.yml b/.github/workflows/2.4.yml new file mode 100644 index 000000000..17594a7eb --- /dev/null +++ b/.github/workflows/2.4.yml @@ -0,0 +1,40 @@ +name: Ruby 2.4 +on: + push: + branches: + - 6.x + pull_request: + branches: + - 6.x +jobs: + tests: + env: + TEST_ES_SERVER: http://localhost:9200 + RAILS_VERSIONS: '4.0,5.0' + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Increase system limits + run: | + sudo swapoff -a + sudo sysctl -w vm.swappiness=1 + sudo sysctl -w fs.file-max=262144 + sudo sysctl -w vm.max_map_count=262144 + - uses: elastic/elastic-github-actions/elasticsearch@master + with: + stack-version: 6.8.12 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: 2.4 + - name: Bundle + run: | + sudo apt-get install libsqlite3-dev + gem install bundler + bundle install + bundle exec rake bundle:clean + bundle exec rake bundle:install + - name: Test + run: bundle exec rake test:all + diff --git a/.github/workflows/2.5.yml b/.github/workflows/2.5.yml new file mode 100644 index 000000000..55b36affb --- /dev/null +++ b/.github/workflows/2.5.yml @@ -0,0 +1,40 @@ +name: Ruby 2.5 +on: + push: + branches: + - 6.x + pull_request: + branches: + - 6.x +jobs: + tests: + env: + TEST_ES_SERVER: http://localhost:9200 + RAILS_VERSIONS: '5.0,6.0' + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Increase system limits + run: | + sudo swapoff -a + sudo sysctl -w vm.swappiness=1 + sudo sysctl -w fs.file-max=262144 + sudo sysctl -w vm.max_map_count=262144 + - uses: elastic/elastic-github-actions/elasticsearch@master + with: + stack-version: 6.8.12 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: 2.5 + - name: Bundle + run: | + sudo apt-get install libsqlite3-dev + gem install bundler + bundle install + bundle exec rake bundle:clean + bundle exec rake bundle:install + - name: Test + run: bundle exec rake test:all + diff --git a/.github/workflows/2.6.yml b/.github/workflows/2.6.yml new file mode 100644 index 000000000..91cec1238 --- /dev/null +++ b/.github/workflows/2.6.yml @@ -0,0 +1,40 @@ +name: Ruby 2.6 +on: + push: + branches: + - 6.x + pull_request: + branches: + - 6.x +jobs: + tests: + env: + TEST_ES_SERVER: http://localhost:9200 + RAILS_VERSIONS: '5.0,6.0' + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Increase system limits + run: | + sudo swapoff -a + sudo sysctl -w vm.swappiness=1 + sudo sysctl -w fs.file-max=262144 + sudo sysctl -w vm.max_map_count=262144 + - uses: elastic/elastic-github-actions/elasticsearch@master + with: + stack-version: 6.8.12 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: 2.6 + - name: Bundle + run: | + sudo apt-get install libsqlite3-dev + gem install bundler + bundle install + bundle exec rake bundle:clean + bundle exec rake bundle:install + - name: Test + run: bundle exec rake test:all + diff --git a/.github/workflows/2.7.yml b/.github/workflows/2.7.yml new file mode 100644 index 000000000..c5c4df953 --- /dev/null +++ b/.github/workflows/2.7.yml @@ -0,0 +1,40 @@ +name: Ruby 2.7 +on: + push: + branches: + - 6.x + pull_request: + branches: + - 6.x +jobs: + tests: + env: + TEST_ES_SERVER: http://localhost:9200 + RAILS_VERSIONS: '5.0,6.0' + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Increase system limits + run: | + sudo swapoff -a + sudo sysctl -w vm.swappiness=1 + sudo sysctl -w fs.file-max=262144 + sudo sysctl -w vm.max_map_count=262144 + - uses: elastic/elastic-github-actions/elasticsearch@master + with: + stack-version: 6.8.12 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: 2.7 + - name: Bundle + run: | + sudo apt-get install libsqlite3-dev + gem install bundler + bundle install + bundle exec rake bundle:clean + bundle exec rake bundle:install + - name: Test + run: bundle exec rake test:all + diff --git a/.github/workflows/jruby.yml b/.github/workflows/jruby.yml new file mode 100644 index 000000000..7c814d10b --- /dev/null +++ b/.github/workflows/jruby.yml @@ -0,0 +1,40 @@ +name: JRuby +on: + push: + branches: + - 6.x + pull_request: + branches: + - 6.x +jobs: + tests: + env: + TEST_ES_SERVER: http://localhost:9200 + RAILS_VERSIONS: '5.0,6.0' + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Increase system limits + run: | + sudo swapoff -a + sudo sysctl -w vm.swappiness=1 + sudo sysctl -w fs.file-max=262144 + sudo sysctl -w vm.max_map_count=262144 + - uses: elastic/elastic-github-actions/elasticsearch@master + with: + stack-version: 6.8.12 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: jruby-9.2 + - name: Bundle + run: | + sudo apt-get install libsqlite3-dev + gem install bundler + bundle install + bundle exec rake bundle:clean + bundle exec rake bundle:install + - name: Test + run: bundle exec rake test:all + diff --git a/.gitignore b/.gitignore index 6076dc96c..43ddc2e30 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ .DS_Store *.log tmp/ +.idea/* .yardoc/ _yardoc/ diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index fed580991..000000000 --- a/.travis.yml +++ /dev/null @@ -1,55 +0,0 @@ -# ----------------------------------------------------------------------------- -# Configuration file for http://travis-ci.org/elasticsearch/elasticsearch-rails -# ----------------------------------------------------------------------------- - -dist: trusty - -sudo: required - -language: ruby - -services: - - mongodb - -branches: - only: - - master - - travis - - 5.x - - 2.x - -matrix: - include: - - rvm: 2.2.10 - jdk: oraclejdk8 - env: TEST_SUITE=unit - - - rvm: 2.3.7 - jdk: oraclejdk8 - env: TEST_SUITE=unit - - - rvm: 2.5.1 - jdk: oraclejdk8 - env: TEST_SUITE=unit - - - rvm: 2.3.3 - jdk: oraclejdk8 - env: TEST_SUITE=integration QUIET=y SERVER=start TEST_CLUSTER_LOGS=/tmp/log TEST_CLUSTER_COMMAND=/tmp/elasticsearch-6.3.0/bin/elasticsearch - -before_install: - - gem update --system --no-rdoc --no-ri - - gem --version - - gem install bundler -v 1.14.3 --no-rdoc --no-ri - - bundle version - - curl -sS https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.3.0.tar.gz | tar xz -C /tmp - -install: - - bundle install - - rake bundle:clean - - rake bundle:install - -script: - - rake test:$TEST_SUITE - -notifications: - disable: true diff --git a/CHANGELOG.md b/CHANGELOG.md index d81c0e69e..1bba7b26e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,125 @@ +## 6.1.2 + +* Updates Bundler.with_clean_dev (deprecated) to with_unbundled_env + +## 6.1.1 + +* fix: Ruby 2.7 deprecation warning on `find_in_batches` + +## 6.1.0 + +* Update test tasks and travis (#840) +* `respond_to_missing?` to silence Ruby 2.4 warnings (#838) +* Only execute update if document attributes is not empty (#862) +* Update bundler dependencies in gemspec files (#899) +* Ensure that 6.x is in the Compatibility table + +### ActiveModel + +* Fix import when preprocess returns empty collection (#720) +* Add test for not importing when ActiveRecord query is empty +* Validate that #first called on response records equals #[] with 0 +* Port basic response tests to rspec (#833) +* Add newlines at the end of files that are missing it +* Port adapter tests to rspec (#834) +* Ensure that specified ActiveRecord order is not overwritten by Elasticsearch search results order (#835) +* Port remainder of Elasticsearch::Model unit tests to rspec (#836) +* Port all integration tests to rspec (#837) +* Avoid executing search twice; Reuse response in Response#raw_response (#850) +* Update example to account for deprecation of _suggest endpoint in favor of _search +* Use logger to log index not found message (#868) +* Add warning and documentation about STI support being deprecated (#898) +* Tweak STI deprecation description + +### Persistence + +* Ensure that arguments are passed to super (#853) +* Index name option is handled by super, no need to pass options expicitly + +### Ruby on Rails + +* Convert tests to rspec (#842) +* Fix seeds file to stop using outdated YAML method (#843) +* Fixed 03-expert.rb set tracer only in dev env (#621) + +## 6.0.0 + +* Update to test against Elasticsearch 6.4 +* Fix sort order on ActiveRecord >= 5. re issue #546 (#831) + +### ActiveModel + +* Inherit from HashWrapper for disabling warnings +* Fix import method to pass index name on refresh (#692) +* Use default scope on ActiveRecord model when importing (#827) +* Support scope, query and preprocess importing options in Mongoid Adapter in 6.x (#829) +* Address performance of HashWrapper in Response objects (#825) + +### Persistence + +* Address performance of HashWrapper in Response objects (#825) +* Minor refactor in Repository::Search +* Remove example music app that demonstrates the AR pattern +* Update Sinatra app +* Update README +* Change document type references to _doc + +## 6.0.0.pre + +* Added the "Compatibility" chapter to the READMEs +* Updated the Bundler instructions and Github URLs in the READMEs +* Updated the version on the `master` branch to `6.0.0.alpha1` +* Update versions to 6.0.0.beta +* minor: Fix spacing +* Update various gemspecs to conditionally depend on gems incompatible with JRuby (#810) +* Update versions +* Use local as source for gem dependencies when possible +* Only require 'oj' gem if not using JRuby +* Update versions to .pre + +### ActiveModel + +* Added an example with a custom "pattern" analyzer +* Added a "trigram" custom analyzer to the example +* Fix README typo (s/situation/situations) +* Fix reference to @ids in example and README +* Add Callbacks to the example datamapper adapter +* Fix `Asynchronous Callbacks` example +* Fixed a typo in the README +* Improved the custom analyzer example +* Removed left-overs from previous implementation in the "completion suggester" example +* Updated the `changes` method name in `Indexing` to `changes_to_save` for compatibility with Rails 5.1 +* Fixed the handling of changed attributes in `Indexing` to work with older Rails versions +* Update child-parent integration test to use single index type for ES 6.3 (#805) +* Use default doc type: _doc (#814) +* Avoid making an update when no attributes are changed (#762) + +### Persistence + +* Updated the failing integration tests for Elasticsearch 5.x +* Updated the dependency for "elasticsearch" and "elasticsearch-model" to `5.x` +* Documentation for Model should include Model and not Repository +* Depend on version >= 6 of elasticsearch gems +* Undo last commit; depend on version 5 of elasticsearch gems +* Reduce repeated string instantiation (#813) +* Make default doc type '_doc' in preparation for deprecation of mapping types (#816) +* Remove Elasticsearch::Persistence::Model (ActiveRecord persistence pattern) (#812) +* Deprecate _all field in ES 6.x (#820) +* Remove development dependency on virtus, include explicitly in Gemfile for integration test +* Refactor Repository as mixin (#824) +* Add missing Repository::Response::Results spec +* Update README for Repository mixin refactor +* Minor typo in README +* Add #inspect method for Repository +* Update references to Elasticsearch::Client + +### Ruby on Rails + +* Fixed typo in README +* Fix typo in rake import task +* Updated the templates for example Rails applications +* Add 'oj' back as a development dependency in gemspec + ## 6.0.0.alpha1 * Updated the Rake dependency to 11.1 diff --git a/Gemfile b/Gemfile index 4a7fe0473..31ae798e9 100644 --- a/Gemfile +++ b/Gemfile @@ -1,6 +1,5 @@ source 'https://rubygems.org/' -gem "bundler", "~> 1" gem "rake", "~> 11.1" gem 'elasticsearch-extensions' @@ -8,3 +7,8 @@ gem 'elasticsearch-extensions' gem "pry" gem "ansi" gem "cane" + +group :development do + gem 'yard' + gem 'rspec' +end diff --git a/README.md b/README.md index f7c8e1151..f8cd7a1a5 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,16 @@ # Elasticsearch +[![Ruby 2.7](https://github.com/elastic/elasticsearch-rails/workflows/Ruby%202.7/badge.svg?branch=6.x)](https://github.com/elastic/elasticsearch-rails/actions) +[![Ruby 2.6](https://github.com/elastic/elasticsearch-rails/workflows/Ruby%202.6/badge.svg?branch=6.x)](https://github.com/elastic/elasticsearch-rails/actions) +[![Ruby 2.5](https://github.com/elastic/elasticsearch-rails/workflows/Ruby%202.5/badge.svg?branch=6.x)](https://github.com/elastic/elasticsearch-rails/actions) +[![Ruby 2.4](https://github.com/elastic/elasticsearch-rails/workflows/Ruby%202.4/badge.svg?branch=6.x)](https://github.com/elastic/elasticsearch-rails/actions) +[![JRuby](https://github.com/elastic/elasticsearch-rails/workflows/JRuby/badge.svg?branch=6.x)](https://github.com/elastic/elasticsearch-rails/actions) +[![Code Climate](https://codeclimate.com/github/elastic/elasticsearch-rails/badges/gpa.svg)](https://codeclimate.com/github/elastic/elasticsearch-rails) + This repository contains various Ruby and Rails integrations for [Elasticsearch](http://elasticsearch.org): * ActiveModel integration with adapters for ActiveRecord and Mongoid * _Repository pattern_ based persistence layer for Ruby objects -* _Active Record pattern_ based persistence layer for Ruby models * Enumerable-based wrapper for search results * ActiveRecord::Relation-based wrapper for returning search results as records * Convenience model methods such as `search`, `mapping`, `import`, etc @@ -28,6 +34,7 @@ is compatible with the Elasticsearch `master` branch, therefore, with the next m | 0.1 | → | 1.x | | 2.x | → | 2.x | | 5.x | → | 5.x | +| 6.x | → | 6.x | | master | → | master | ## Installation @@ -98,20 +105,6 @@ repository.save Article.new(title: 'Test') # => {"_index"=>"repository", "_type"=>"article", "_id"=>"Ak75E0U9Q96T5Y999_39NA", ...} ``` -Example of using Elasticsearch as a persistence layer for a Ruby model: - -```ruby -require 'elasticsearch/persistence/model' -class Article - include Elasticsearch::Persistence::Model - attribute :title, String, mapping: { analyzer: 'snowball' } -end - -Article.create title: 'Test' -# POST http://localhost:9200/articles/article -# => #
-``` - **Please refer to each library documentation for detailed information and examples.** ### Model @@ -134,8 +127,6 @@ Article.create title: 'Test' ## Development -[![Build Status](https://travis-ci.org/elastic/elasticsearch-rails.svg?branch=master)](https://travis-ci.org/elastic/elasticsearch-rails) [![Code Climate](https://codeclimate.com/github/elastic/elasticsearch-rails/badges/gpa.svg)](https://codeclimate.com/github/elastic/elasticsearch-rails) - To work on the code, clone the repository and install all dependencies first: ``` diff --git a/Rakefile b/Rakefile index b4c90d710..6020f304d 100644 --- a/Rakefile +++ b/Rakefile @@ -1,6 +1,7 @@ require 'pathname' -subprojects = %w| elasticsearch-rails elasticsearch-persistence elasticsearch-model | +subprojects = [ 'elasticsearch-rails', 'elasticsearch-persistence' ] +subprojects << 'elasticsearch-model' unless defined?(JRUBY_VERSION) __current__ = Pathname( File.expand_path('..', __FILE__) ) @@ -25,15 +26,9 @@ namespace :bundle do task :install do subprojects.each do |project| puts '-'*80 - sh "bundle install --gemfile #{__current__.join(project)}/Gemfile" + sh "cd #{__current__.join(project)} && bundle exec rake bundle:install" puts end - puts '-'*80 - sh "bundle install --gemfile #{__current__.join('elasticsearch-model/gemfiles')}/3.0.gemfile" - puts '-'*80 - sh "bundle install --gemfile #{__current__.join('elasticsearch-model/gemfiles')}/4.0.gemfile" - puts '-'*80 - sh "bundle install --gemfile #{__current__.join('elasticsearch-model/gemfiles')}/5.0.gemfile" end desc "Remove Gemfile.lock in all subprojects" @@ -59,8 +54,43 @@ namespace :test do end end + desc "Run Elasticsearch (Docker)" + task :setup_elasticsearch_docker do + begin + sh <<-COMMAND.gsub(/^\s*/, '').gsub(/\s{1,}/, ' ') + docker run -d=true \ + --env "discovery.type=single-node" \ + --env "cluster.name=elasticsearch-rails" \ + --env "http.port=9200" \ + --env "cluster.routing.allocation.disk.threshold_enabled=false" \ + --publish 9250:9200 \ + --rm \ + docker.elastic.co/elasticsearch/elasticsearch:${ELASTICSEARCH_VERSION} + COMMAND + require 'elasticsearch/extensions/test/cluster' + Elasticsearch::Extensions::Test::Cluster::Cluster.new(version: ENV['ELASTICSEARCH_VERSION'], + number_of_nodes: 1).wait_for_green + rescue + end + end + + desc "Setup MongoDB (Docker)" + task :setup_mongodb_docker do + begin + if ENV['MONGODB_VERSION'] + sh <<-COMMAND.gsub(/^\s*/, '').gsub(/\s{1,}/, ' ') + wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB_VERSION}.tgz -O /tmp/mongodb.tgz && + tar -xvf /tmp/mongodb.tgz && + mkdir /tmp/data && + ${PWD}/mongodb-linux-x86_64-${MONGODB_VERSION}/bin/mongod --setParameter enableTestCommands=1 --dbpath /tmp/data --bind_ip 127.0.0.1 --auth &> /dev/null & + COMMAND + end + rescue + end + end + desc "Run integration tests in all subprojects" - task :integration do + task :integration => :setup_elasticsearch do # 1/ elasticsearch-model # puts '-'*80 @@ -86,8 +116,13 @@ namespace :test do desc "Run all tests in all subprojects" task :all do - Rake::Task['test:unit'].invoke - Rake::Task['test:integration'].invoke + subprojects.each do |project| + puts '-'*80 + sh "cd #{project} && " + + "unset BUNDLE_GEMFILE && " + + "bundle exec rake test:all" + puts "\n" + end end namespace :cluster do diff --git a/elasticsearch-model/.gitignore b/elasticsearch-model/.gitignore index 8a8ab4613..d55d8b5e2 100644 --- a/elasticsearch-model/.gitignore +++ b/elasticsearch-model/.gitignore @@ -16,6 +16,4 @@ test/tmp test/version_tmp tmp -gemfiles/3.0.gemfile.lock -gemfiles/4.0.gemfile.lock -gemfiles/5.0.gemfile.lock +gemfiles/*.gemfile.lock \ No newline at end of file diff --git a/elasticsearch-model/Gemfile b/elasticsearch-model/Gemfile index a54f5084e..c016d096d 100644 --- a/elasticsearch-model/Gemfile +++ b/elasticsearch-model/Gemfile @@ -2,3 +2,8 @@ source 'https://rubygems.org/' # Specify your gem's dependencies in elasticsearch-model.gemspec gemspec + +group :development, :testing do + gem 'rspec' + gem 'pry-nav' +end diff --git a/elasticsearch-model/README.md b/elasticsearch-model/README.md index 22b9b378c..9c143ba9a 100644 --- a/elasticsearch-model/README.md +++ b/elasticsearch-model/README.md @@ -19,6 +19,7 @@ is compatible with the Elasticsearch `master` branch, therefore, with the next m | 0.1 | → | 1.x | | 2.x | → | 2.x | | 5.x | → | 5.x | +| 6.x | → | 6.x | | master | → | master | ## Installation @@ -724,13 +725,8 @@ module and its submodules for technical information. The module provides a common `settings` method to customize various features. -At the moment, the only supported setting is `:inheritance_enabled`, which makes the class receiving the module -respect index names and document types of a super-class, eg. in case you're using "single table inheritance" (STI) -in Rails: - -```ruby -Elasticsearch::Model.settings[:inheritance_enabled] = true -``` +Before version 7.0.0 of the gem, the only supported setting was `:inheritance_enabled`. This setting has been deprecated +and removed. ## Development and Community @@ -748,6 +744,17 @@ curl -# https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticse SERVER=start TEST_CLUSTER_COMMAND=$PWD/tmp/elasticsearch-1.0.0.RC1/bin/elasticsearch bundle exec rake test:all ``` +### Single Table Inheritance deprecation + +`Single Table Inheritance` has been supported through the 6.x series of this gem. With this feature, +elasticsearch settings (index mappings, etc) on a parent model could be inherited by a child model leading to different +model documents being indexed into the same Elasticsearch index. This feature depended on the ability to set a `type` +for a document in Elasticsearch. The Elasticsearch team has deprecated support for `types`, as is described +[here.](https://www.elastic.co/guide/en/elasticsearch/reference/current/removal-of-types.html) +This gem will also remove support for types and `Single Table Inheritance` in version 7.0 as it enables an anti-pattern. +Please save different model documents in separate indices. If you want to use STI, you can include an artificial +`type` field manually in each document and use it in other operations. + ## License This software is licensed under the Apache 2 license, quoted below. diff --git a/elasticsearch-model/Rakefile b/elasticsearch-model/Rakefile index 1efad46da..0a43fd193 100644 --- a/elasticsearch-model/Rakefile +++ b/elasticsearch-model/Rakefile @@ -4,12 +4,26 @@ desc "Run unit tests" task :default => 'test:unit' task :test => 'test:unit' -namespace :bundler do - desc "Install dependencies for all the Gemfiles" +if RUBY_VERSION < '2.3' + GEMFILES = ['3.0.gemfile', '4.0.gemfile', '5.0.gemfile'] +else + GEMFILES = ['4.0.gemfile', '5.0.gemfile'] +end + +namespace :bundle do + desc 'Install dependencies for all the Gemfiles in /gemfiles. Optionally define env variable RAILS_VERSIONS. E.g. RAILS_VERSIONS=3.0,5.0' task :install do - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/3.0.gemfile', __FILE__)}' bundle install" - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/4.0.gemfile', __FILE__)}' bundle install" - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/5.0.gemfile', __FILE__)}' bundle install" + unless defined?(JRUBY_VERSION) + puts '-' * 80 + gemfiles = ENV['RAILS_VERSIONS'] ? ENV['RAILS_VERSIONS'].split(',').map { |v| "#{v}.gemfile"} : GEMFILES + gemfiles.each do |gemfile| + puts "GEMFILE: #{gemfile}" + Bundler.with_unbundled_env do + sh "bundle install --gemfile #{File.expand_path('../gemfiles/'+gemfile, __FILE__)}" + end + puts '-' * 80 + end + end end end @@ -17,38 +31,17 @@ end require 'rake/testtask' namespace :test do - Rake::TestTask.new(:run_unit) do |test| - test.libs << 'lib' << 'test' - test.test_files = FileList["test/unit/**/*_test.rb"] - test.verbose = false - test.warning = false - end - - Rake::TestTask.new(:run_integration) do |test| - test.libs << 'lib' << 'test' - test.test_files = FileList["test/integration/**/*_test.rb"] - test.verbose = false - test.warning = false - end - - desc "Run unit tests against ActiveModel 3, 4 and 5" - task :unit do - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/3.0.gemfile', __FILE__)}' bundle exec rake test:run_unit" - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/4.0.gemfile', __FILE__)}' bundle exec rake test:run_unit" - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/5.0.gemfile', __FILE__)}' bundle exec rake test:run_unit" - end - - desc "Run integration tests against ActiveModel 3, 4 and 5" - task :integration do - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/3.0.gemfile', __FILE__)}' bundle exec rake test:run_integration" - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/4.0.gemfile', __FILE__)}' bundle exec rake test:run_integration" - sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/5.0.gemfile', __FILE__)}' bundle exec rake test:run_integration" - end - desc "Run unit and integration tests" - task :all do - Rake::Task['test:unit'].invoke - Rake::Task['test:integration'].invoke + desc 'Run all tests. Optionally define env variable RAILS_VERSIONS. E.g. RAILS_VERSIONS=3.0,5.0' + task :all, [:rails_versions] do |task, args| + gemfiles = ENV['RAILS_VERSIONS'] ? ENV['RAILS_VERSIONS'].split(',').map {|v| "#{v}.gemfile"} : GEMFILES + puts '-' * 80 + gemfiles.each do |gemfile| + puts "GEMFILE: #{gemfile}" + sh "BUNDLE_GEMFILE='#{File.expand_path("../gemfiles/#{gemfile}", __FILE__)}' " + + " bundle exec rspec" + puts '-' * 80 + end end end diff --git a/elasticsearch-model/elasticsearch-model.gemspec b/elasticsearch-model/elasticsearch-model.gemspec index e058ac43d..f1c04c27f 100644 --- a/elasticsearch-model/elasticsearch-model.gemspec +++ b/elasticsearch-model/elasticsearch-model.gemspec @@ -21,21 +21,21 @@ Gem::Specification.new do |s| s.extra_rdoc_files = [ "README.md", "LICENSE.txt" ] s.rdoc_options = [ "--charset=UTF-8" ] - s.required_ruby_version = ">= 1.9.3" + s.required_ruby_version = ">= 2.2" - s.add_dependency "elasticsearch", '> 1' + s.add_dependency "elasticsearch", '~> 6' s.add_dependency "activesupport", '> 3' s.add_dependency "hashie" - s.add_development_dependency "bundler", "~> 1.3" + s.add_development_dependency "bundler" s.add_development_dependency "rake", "~> 11.1" s.add_development_dependency "elasticsearch-extensions" - s.add_development_dependency "sqlite3" + s.add_development_dependency "sqlite3" unless defined?(JRUBY_VERSION) s.add_development_dependency "activemodel", "> 3" - s.add_development_dependency "oj" + s.add_development_dependency "oj" unless defined?(JRUBY_VERSION) s.add_development_dependency "kaminari" s.add_development_dependency "will_paginate" @@ -45,7 +45,7 @@ Gem::Specification.new do |s| s.add_development_dependency "mocha" s.add_development_dependency "turn" s.add_development_dependency "yard" - s.add_development_dependency "ruby-prof" + s.add_development_dependency "ruby-prof" unless defined?(JRUBY_VERSION) s.add_development_dependency "pry" s.add_development_dependency "simplecov" diff --git a/elasticsearch-model/examples/activerecord_mapping_completion.rb b/elasticsearch-model/examples/activerecord_mapping_completion.rb index b8270313c..d15390525 100644 --- a/elasticsearch-model/examples/activerecord_mapping_completion.rb +++ b/elasticsearch-model/examples/activerecord_mapping_completion.rb @@ -50,20 +50,7 @@ def as_indexed_json(options={}) puts "Article search:".ansi(:bold), response_1.to_a.map { |d| "Title: #{d.title}" }.inspect.ansi(:bold, :yellow) -response_2 = Article.__elasticsearch__.client.suggest \ - index: Article.index_name, - body: { - articles: { - text: 'foo', - completion: { field: 'title.suggest' } - } - }; - -puts "Article suggest:".ansi(:bold), - response_2['articles'].first['options'].map { |d| "#{d['text']} -> #{d['_source']['url']}" }. - inspect.ansi(:bold, :green) - -response_3 = Article.search \ +response_2 = Article.search \ query: { match: { title: 'foo' } }, @@ -76,7 +63,7 @@ def as_indexed_json(options={}) _source: ['title', 'url'] puts "Article search with suggest:".ansi(:bold), - response_3.response['suggest']['articles'].first['options'].map { |d| "#{d['text']} -> #{d['_source']['url']}" }. + response_2.response['suggest']['articles'].first['options'].map { |d| "#{d['text']} -> #{d['_source']['url']}" }. inspect.ansi(:bold, :blue) require 'pry'; binding.pry; diff --git a/elasticsearch-model/gemfiles/3.0.gemfile b/elasticsearch-model/gemfiles/3.0.gemfile index 23cbdf53d..ebb597f63 100644 --- a/elasticsearch-model/gemfiles/3.0.gemfile +++ b/elasticsearch-model/gemfiles/3.0.gemfile @@ -10,4 +10,9 @@ gemspec path: '../' gem 'activemodel', '>= 3.0' gem 'activerecord', '~> 3.2' gem 'mongoid', '>= 3.0' -gem 'sqlite3' +gem 'sqlite3', '~> 1.3.6' unless defined?(JRUBY_VERSION) + +group :development, :testing do + gem 'rspec' + gem 'pry-nav' +end \ No newline at end of file diff --git a/elasticsearch-model/gemfiles/4.0.gemfile b/elasticsearch-model/gemfiles/4.0.gemfile index 89044bb19..c5a748a42 100644 --- a/elasticsearch-model/gemfiles/4.0.gemfile +++ b/elasticsearch-model/gemfiles/4.0.gemfile @@ -9,4 +9,11 @@ gemspec path: '../' gem 'activemodel', '~> 4' gem 'activerecord', '~> 4' -gem 'sqlite3' +gem 'mongoid', '~> 5' +gem 'sqlite3', '~> 1.3.6' unless defined?(JRUBY_VERSION) + +group :development, :testing do + gem 'bigdecimal', '~> 1' + gem 'pry-nav' + gem 'rspec' +end diff --git a/elasticsearch-model/gemfiles/5.0.gemfile b/elasticsearch-model/gemfiles/5.0.gemfile index 75b8a7ca9..da9546846 100644 --- a/elasticsearch-model/gemfiles/5.0.gemfile +++ b/elasticsearch-model/gemfiles/5.0.gemfile @@ -9,4 +9,10 @@ gemspec path: '../' gem 'activemodel', '~> 5' gem 'activerecord', '~> 5' -gem 'sqlite3' +gem 'sqlite3' unless defined?(JRUBY_VERSION) +gem 'mongoid', '~> 6.4' + +group :development, :testing do + gem 'rspec' + gem 'pry-nav' +end diff --git a/elasticsearch-model/gemfiles/6.0.gemfile b/elasticsearch-model/gemfiles/6.0.gemfile new file mode 100644 index 000000000..6997ec965 --- /dev/null +++ b/elasticsearch-model/gemfiles/6.0.gemfile @@ -0,0 +1,18 @@ +# Usage: +# +# $ BUNDLE_GEMFILE=./gemfiles/6.0.gemfile bundle install +# $ BUNDLE_GEMFILE=./gemfiles/6.0.gemfile bundle exec rake test:integration + +source 'https://rubygems.org/' + +gemspec path: '../' + +gem 'activemodel', '~> 6' +gem 'activerecord', '~> 6' +gem 'sqlite3' unless defined?(JRUBY_VERSION) +gem 'mongoid', '~> 7' + +group :development, :testing do + gem 'rspec' + gem 'pry-nav' +end diff --git a/elasticsearch-model/lib/elasticsearch/model.rb b/elasticsearch-model/lib/elasticsearch/model.rb index 2c395bd84..102466916 100644 --- a/elasticsearch-model/lib/elasticsearch/model.rb +++ b/elasticsearch-model/lib/elasticsearch/model.rb @@ -131,12 +131,6 @@ class << self end end - # Access the module settings - # - def self.settings - @settings ||= {} - end - module ClassMethods # Get the client common for all models # @@ -193,7 +187,7 @@ def search(query_or_payload, models=[], options={}) # @note Inheritance is disabled by default. # def inheritance_enabled - @inheritance_enabled ||= false + @settings[:inheritance_enabled] ||= false end # Enable inheritance of index_name and document_type @@ -203,8 +197,21 @@ def inheritance_enabled # Elasticsearch::Model.inheritance_enabled = true # def inheritance_enabled=(inheritance_enabled) - @inheritance_enabled = inheritance_enabled + warn STI_DEPRECATION_WARNING if inheritance_enabled + @settings[:inheritance_enabled] = inheritance_enabled + end + + # Access the module settings + # + def settings + @settings ||= {} end + + private + + STI_DEPRECATION_WARNING = "DEPRECATION WARNING: Support for Single Table Inheritance (STI) is deprecated " + + "and will be removed in version 7.0.0.\nPlease save different model documents in separate indices and refer " + + "to the Elasticsearch documentation for more information.".freeze end extend ClassMethods diff --git a/elasticsearch-model/lib/elasticsearch/model/adapters/active_record.rb b/elasticsearch-model/lib/elasticsearch/model/adapters/active_record.rb index 23c4267fa..afe55c864 100644 --- a/elasticsearch-model/lib/elasticsearch/model/adapters/active_record.rb +++ b/elasticsearch-model/lib/elasticsearch/model/adapters/active_record.rb @@ -35,7 +35,11 @@ def records else self.__send__(:exec_queries) end - @records.sort_by { |record| hits.index { |hit| hit['_id'].to_s == record.id.to_s } } + if !self.order_values.present? + @records.sort_by { |record| hits.index { |hit| hit['_id'].to_s == record.id.to_s } } + else + @records + end end if self end @@ -47,27 +51,6 @@ def records def load records.__send__(:load) end - - # Intercept call to the `order` method, so we can ignore the order from Elasticsearch - # - def order(*args) - sql_records = records.__send__ :order, *args - - # Redefine the `to_a` method to the original one - # - sql_records.instance_exec do - define_singleton_method(:to_a) do - if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 - self.load - else - self.__send__(:exec_queries) - end - @records - end - end - - sql_records - end end module Callbacks @@ -102,8 +85,9 @@ def __find_in_batches(options={}, &block) scope = scope.__send__(named_scope) if named_scope scope = scope.instance_exec(&query) if query - scope.find_in_batches(options) do |batch| - yield (preprocess ? self.__send__(preprocess, batch) : batch) + scope.find_in_batches(**options) do |batch| + batch = self.__send__(preprocess, batch) if preprocess + yield(batch) if batch.present? end end diff --git a/elasticsearch-model/lib/elasticsearch/model/adapters/mongoid.rb b/elasticsearch-model/lib/elasticsearch/model/adapters/mongoid.rb index 5117dbf58..d4aff56f9 100644 --- a/elasticsearch-model/lib/elasticsearch/model/adapters/mongoid.rb +++ b/elasticsearch-model/lib/elasticsearch/model/adapters/mongoid.rb @@ -63,10 +63,17 @@ module Importing # @see https://github.com/karmi/retire/pull/724 # def __find_in_batches(options={}, &block) - options[:batch_size] ||= 1_000 + batch_size = options[:batch_size] || 1_000 + query = options[:query] + named_scope = options[:scope] + preprocess = options[:preprocess] + + scope = all + scope = scope.send(named_scope) if named_scope + scope = query.is_a?(Proc) ? scope.class_exec(&query) : scope.where(query) if query - all.no_timeout.each_slice(options[:batch_size]) do |items| - yield items + scope.no_timeout.each_slice(batch_size) do |items| + yield (preprocess ? self.__send__(preprocess, items) : items) end end diff --git a/elasticsearch-model/lib/elasticsearch/model/importing.rb b/elasticsearch-model/lib/elasticsearch/model/importing.rb index 7c42545d2..764413acf 100644 --- a/elasticsearch-model/lib/elasticsearch/model/importing.rb +++ b/elasticsearch-model/lib/elasticsearch/model/importing.rb @@ -130,7 +130,7 @@ def import(options={}, &block) errors += response['items'].select { |k, v| k.values.first['error'] } end - self.refresh_index! if refresh + self.refresh_index! index: target_index if refresh case return_value when 'errors' diff --git a/elasticsearch-model/lib/elasticsearch/model/indexing.rb b/elasticsearch-model/lib/elasticsearch/model/indexing.rb index 39ad06bc3..a88dd12fb 100644 --- a/elasticsearch-model/lib/elasticsearch/model/indexing.rb +++ b/elasticsearch-model/lib/elasticsearch/model/indexing.rb @@ -269,7 +269,8 @@ def delete_index!(options={}) self.client.indices.delete index: target_index rescue Exception => e if e.class.to_s =~ /NotFound/ && options[:force] - STDERR.puts "[!!!] Index does not exist (#{e.class})" + client.transport.logger.debug("[!!!] Index does not exist (#{e.class})") if client.transport.logger + nil else raise e end @@ -295,7 +296,8 @@ def refresh_index!(options={}) self.client.indices.refresh index: target_index rescue Exception => e if e.class.to_s =~ /NotFound/ && options[:force] - STDERR.puts "[!!!] Index does not exist (#{e.class})" + client.transport.logger.debug("[!!!] Index does not exist (#{e.class})") if client.transport.logger + nil else raise e end @@ -397,7 +399,7 @@ def delete_document(options={}) # @see http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions:update # def update_document(options={}) - if attributes_in_database = self.instance_variable_get(:@__changed_model_attributes) + if attributes_in_database = self.instance_variable_get(:@__changed_model_attributes).presence attributes = if respond_to?(:as_indexed_json) self.as_indexed_json.select { |k,v| attributes_in_database.keys.map(&:to_s).include? k.to_s } else @@ -409,7 +411,7 @@ def update_document(options={}) type: document_type, id: self.id, body: { doc: attributes } }.merge(options) - ) + ) unless attributes.empty? else index_document(options) end diff --git a/elasticsearch-model/lib/elasticsearch/model/naming.rb b/elasticsearch-model/lib/elasticsearch/model/naming.rb index 7bf24f089..50b396c37 100644 --- a/elasticsearch-model/lib/elasticsearch/model/naming.rb +++ b/elasticsearch-model/lib/elasticsearch/model/naming.rb @@ -5,6 +5,8 @@ module Model # module Naming + DEFAULT_DOC_TYPE = '_doc'.freeze + module ClassMethods # Get or set the name of the index @@ -77,7 +79,12 @@ def implicit(prop) if Elasticsearch::Model.settings[:inheritance_enabled] self.ancestors.each do |klass| - next if klass == self + # When Naming is included in Proxy::ClassMethods the actual model + # is among its ancestors. We don't want to call the actual model + # since it will result in the same call to the same instance of + # Proxy::ClassMethods. To prevent this we also skip the ancestor + # that is the target. + next if klass == self || self.respond_to?(:target) && klass == self.target break if value = klass.respond_to?(prop) && klass.send(prop) end end @@ -90,7 +97,7 @@ def default_index_name end def default_document_type - self.model_name.element + DEFAULT_DOC_TYPE end end diff --git a/elasticsearch-model/lib/elasticsearch/model/response.rb b/elasticsearch-model/lib/elasticsearch/model/response.rb index d15c24a04..acb25669e 100644 --- a/elasticsearch-model/lib/elasticsearch/model/response.rb +++ b/elasticsearch-model/lib/elasticsearch/model/response.rb @@ -10,8 +10,7 @@ module Response # Implements Enumerable and forwards its methods to the {#results} object. # class Response - attr_reader :klass, :search, :response, - :took, :timed_out, :shards + attr_reader :klass, :search include Enumerable @@ -27,9 +26,7 @@ def initialize(klass, search, options={}) # @return [Hash] # def response - @response ||= begin - HashWrapper.new(search.execute!) - end + @response ||= HashWrapper.new(search.execute!) end # Returns the collection of "hits" from Elasticsearch @@ -51,31 +48,35 @@ def records(options = {}) # Returns the "took" time # def took - response['took'] + raw_response['took'] end # Returns whether the response timed out # def timed_out - response['timed_out'] + raw_response['timed_out'] end # Returns the statistics on shards # def shards - HashWrapper.new(response['_shards']) + @shards ||= response['_shards'] end # Returns a Hashie::Mash of the aggregations # def aggregations - Aggregations.new(response['aggregations']) + @aggregations ||= Aggregations.new(raw_response['aggregations']) end # Returns a Hashie::Mash of the suggestions # def suggestions - Suggestions.new(response['suggest']) + @suggestions ||= Suggestions.new(raw_response['suggest']) + end + + def raw_response + @raw_response ||= @response ? @response.to_hash : search.execute! end end end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/aggregations.rb b/elasticsearch-model/lib/elasticsearch/model/response/aggregations.rb index 43dfd23e8..c2dd23c67 100644 --- a/elasticsearch-model/lib/elasticsearch/model/response/aggregations.rb +++ b/elasticsearch-model/lib/elasticsearch/model/response/aggregations.rb @@ -2,7 +2,7 @@ module Elasticsearch module Model module Response - class Aggregations < Hashie::Mash + class Aggregations < HashWrapper disable_warnings if respond_to?(:disable_warnings) def initialize(attributes={}) diff --git a/elasticsearch-model/lib/elasticsearch/model/response/base.rb b/elasticsearch-model/lib/elasticsearch/model/response/base.rb index 3bb8005b6..827c52e35 100644 --- a/elasticsearch-model/lib/elasticsearch/model/response/base.rb +++ b/elasticsearch-model/lib/elasticsearch/model/response/base.rb @@ -4,7 +4,7 @@ module Response # Common funtionality for classes in the {Elasticsearch::Model::Response} module # module Base - attr_reader :klass, :response + attr_reader :klass, :response, :raw_response # @param klass [Class] The name of the model class # @param response [Hash] The full response returned from Elasticsearch client @@ -12,7 +12,8 @@ module Base # def initialize(klass, response, options={}) @klass = klass - @response = response + @raw_response = response + @response = response end # @abstract Implement this method in specific class diff --git a/elasticsearch-model/lib/elasticsearch/model/response/pagination.rb b/elasticsearch-model/lib/elasticsearch/model/response/pagination.rb index c8e74b793..82f1301a5 100644 --- a/elasticsearch-model/lib/elasticsearch/model/response/pagination.rb +++ b/elasticsearch-model/lib/elasticsearch/model/response/pagination.rb @@ -1,192 +1,2 @@ -module Elasticsearch - module Model - module Response - - # Pagination for search results/records - # - module Pagination - # Allow models to be paginated with the "kaminari" gem [https://github.com/amatsuda/kaminari] - # - module Kaminari - def self.included(base) - # Include the Kaminari configuration and paging method in response - # - base.__send__ :include, ::Kaminari::ConfigurationMethods::ClassMethods - base.__send__ :include, ::Kaminari::PageScopeMethods - - # Include the Kaminari paging methods in results and records - # - Elasticsearch::Model::Response::Results.__send__ :include, ::Kaminari::ConfigurationMethods::ClassMethods - Elasticsearch::Model::Response::Results.__send__ :include, ::Kaminari::PageScopeMethods - Elasticsearch::Model::Response::Records.__send__ :include, ::Kaminari::PageScopeMethods - - Elasticsearch::Model::Response::Results.__send__ :delegate, :limit_value, :offset_value, :total_count, :max_pages, to: :response - Elasticsearch::Model::Response::Records.__send__ :delegate, :limit_value, :offset_value, :total_count, :max_pages, to: :response - - base.class_eval <<-RUBY, __FILE__, __LINE__ + 1 - # Define the `page` Kaminari method - # - def #{::Kaminari.config.page_method_name}(num=nil) - @results = nil - @records = nil - @response = nil - @page = [num.to_i, 1].max - @per_page ||= __default_per_page - - self.search.definition.update size: @per_page, - from: @per_page * (@page - 1) - - self - end - RUBY - end - - # Returns the current "limit" (`size`) value - # - def limit_value - case - when search.definition[:size] - search.definition[:size] - else - __default_per_page - end - end - - # Returns the current "offset" (`from`) value - # - def offset_value - case - when search.definition[:from] - search.definition[:from] - else - 0 - end - end - - # Set the "limit" (`size`) value - # - def limit(value) - return self if value.to_i <= 0 - @results = nil - @records = nil - @response = nil - @per_page = value.to_i - - search.definition.update :size => @per_page - search.definition.update :from => @per_page * (@page - 1) if @page - self - end - - # Set the "offset" (`from`) value - # - def offset(value) - return self if value.to_i < 0 - @results = nil - @records = nil - @response = nil - @page = nil - search.definition.update :from => value.to_i - self - end - - # Returns the total number of results - # - def total_count - results.total - end - - # Returns the models's `per_page` value or the default - # - # @api private - # - def __default_per_page - klass.respond_to?(:default_per_page) && klass.default_per_page || ::Kaminari.config.default_per_page - end - end - - # Allow models to be paginated with the "will_paginate" gem [https://github.com/mislav/will_paginate] - # - module WillPaginate - def self.included(base) - base.__send__ :include, ::WillPaginate::CollectionMethods - - # Include the paging methods in results and records - # - methods = [:current_page, :offset, :length, :per_page, :total_entries, :total_pages, :previous_page, :next_page, :out_of_bounds?] - Elasticsearch::Model::Response::Results.__send__ :delegate, *methods, to: :response - Elasticsearch::Model::Response::Records.__send__ :delegate, *methods, to: :response - end - - def offset - (current_page - 1) * per_page - end - - def length - search.definition[:size] - end - - # Main pagination method - # - # @example - # - # Article.search('foo').paginate(page: 1, per_page: 30) - # - def paginate(options) - param_name = options[:param_name] || :page - page = [options[param_name].to_i, 1].max - per_page = (options[:per_page] || __default_per_page).to_i - - search.definition.update size: per_page, - from: (page - 1) * per_page - self - end - - # Return the current page - # - def current_page - search.definition[:from] / per_page + 1 if search.definition[:from] && per_page - end - - # Pagination method - # - # @example - # - # Article.search('foo').page(2) - # - def page(num) - paginate(page: num, per_page: per_page) # shorthand - end - - # Return or set the "size" value - # - # @example - # - # Article.search('foo').per_page(15).page(2) - # - def per_page(num = nil) - if num.nil? - search.definition[:size] - else - paginate(page: current_page, per_page: num) # shorthand - end - end - - # Returns the total number of results - # - def total_entries - results.total - end - - # Returns the models's `per_page` value or the default - # - # @api private - # - def __default_per_page - klass.respond_to?(:per_page) && klass.per_page || ::WillPaginate.per_page - end - end - end - - end - end -end +require 'elasticsearch/model/response/pagination/kaminari' +require 'elasticsearch/model/response/pagination/will_paginate' diff --git a/elasticsearch-model/lib/elasticsearch/model/response/pagination/kaminari.rb b/elasticsearch-model/lib/elasticsearch/model/response/pagination/kaminari.rb new file mode 100644 index 000000000..5b1acfd9b --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/pagination/kaminari.rb @@ -0,0 +1,109 @@ +module Elasticsearch + module Model + module Response + + # Pagination for search results/records + # + module Pagination + # Allow models to be paginated with the "kaminari" gem [https://github.com/amatsuda/kaminari] + # + module Kaminari + def self.included(base) + # Include the Kaminari configuration and paging method in response + # + base.__send__ :include, ::Kaminari::ConfigurationMethods::ClassMethods + base.__send__ :include, ::Kaminari::PageScopeMethods + + # Include the Kaminari paging methods in results and records + # + Elasticsearch::Model::Response::Results.__send__ :include, ::Kaminari::ConfigurationMethods::ClassMethods + Elasticsearch::Model::Response::Results.__send__ :include, ::Kaminari::PageScopeMethods + Elasticsearch::Model::Response::Records.__send__ :include, ::Kaminari::PageScopeMethods + + Elasticsearch::Model::Response::Results.__send__ :delegate, :limit_value, :offset_value, :total_count, :max_pages, to: :response + Elasticsearch::Model::Response::Records.__send__ :delegate, :limit_value, :offset_value, :total_count, :max_pages, to: :response + + base.class_eval <<-RUBY, __FILE__, __LINE__ + 1 + # Define the `page` Kaminari method + # + def #{::Kaminari.config.page_method_name}(num=nil) + @results = nil + @records = nil + @response = nil + @page = [num.to_i, 1].max + @per_page ||= __default_per_page + + self.search.definition.update size: @per_page, + from: @per_page * (@page - 1) + + self + end + RUBY + end + + # Returns the current "limit" (`size`) value + # + def limit_value + case + when search.definition[:size] + search.definition[:size] + else + __default_per_page + end + end + + # Returns the current "offset" (`from`) value + # + def offset_value + case + when search.definition[:from] + search.definition[:from] + else + 0 + end + end + + # Set the "limit" (`size`) value + # + def limit(value) + return self if value.to_i <= 0 + @results = nil + @records = nil + @response = nil + @per_page = value.to_i + + search.definition.update :size => @per_page + search.definition.update :from => @per_page * (@page - 1) if @page + self + end + + # Set the "offset" (`from`) value + # + def offset(value) + return self if value.to_i < 0 + @results = nil + @records = nil + @response = nil + @page = nil + search.definition.update :from => value.to_i + self + end + + # Returns the total number of results + # + def total_count + results.total + end + + # Returns the models's `per_page` value or the default + # + # @api private + # + def __default_per_page + klass.respond_to?(:default_per_page) && klass.default_per_page || ::Kaminari.config.default_per_page + end + end + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/pagination/will_paginate.rb b/elasticsearch-model/lib/elasticsearch/model/response/pagination/will_paginate.rb new file mode 100644 index 000000000..7cfc36d0c --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/pagination/will_paginate.rb @@ -0,0 +1,95 @@ +module Elasticsearch + module Model + module Response + + # Pagination for search results/records + # + module Pagination + + + # Allow models to be paginated with the "will_paginate" gem [https://github.com/mislav/will_paginate] + # + module WillPaginate + def self.included(base) + base.__send__ :include, ::WillPaginate::CollectionMethods + + # Include the paging methods in results and records + # + methods = [:current_page, :offset, :length, :per_page, :total_entries, :total_pages, :previous_page, :next_page, :out_of_bounds?] + Elasticsearch::Model::Response::Results.__send__ :delegate, *methods, to: :response + Elasticsearch::Model::Response::Records.__send__ :delegate, *methods, to: :response + end + + def offset + (current_page - 1) * per_page + end + + def length + search.definition[:size] + end + + # Main pagination method + # + # @example + # + # Article.search('foo').paginate(page: 1, per_page: 30) + # + def paginate(options) + param_name = options[:param_name] || :page + page = [options[param_name].to_i, 1].max + per_page = (options[:per_page] || __default_per_page).to_i + + search.definition.update size: per_page, + from: (page - 1) * per_page + self + end + + # Return the current page + # + def current_page + search.definition[:from] / per_page + 1 if search.definition[:from] && per_page + end + + # Pagination method + # + # @example + # + # Article.search('foo').page(2) + # + def page(num) + paginate(page: num, per_page: per_page) # shorthand + end + + # Return or set the "size" value + # + # @example + # + # Article.search('foo').per_page(15).page(2) + # + def per_page(num = nil) + if num.nil? + search.definition[:size] + else + paginate(page: current_page, per_page: num) # shorthand + end + end + + # Returns the total number of results + # + def total_entries + results.total + end + + # Returns the models's `per_page` value or the default + # + # @api private + # + def __default_per_page + klass.respond_to?(:per_page) && klass.per_page || ::WillPaginate.per_page + end + end + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/result.rb b/elasticsearch-model/lib/elasticsearch/model/response/result.rb index 01481d0e1..267ca6300 100644 --- a/elasticsearch-model/lib/elasticsearch/model/response/result.rb +++ b/elasticsearch-model/lib/elasticsearch/model/response/result.rb @@ -46,7 +46,7 @@ def method_missing(name, *arguments) # Respond to methods from `@result` or `@result._source` # - def respond_to?(method_name, include_private = false) + def respond_to_missing?(method_name, include_private = false) @result.respond_to?(method_name.to_sym) || \ @result._source && @result._source.respond_to?(method_name.to_sym) || \ super diff --git a/elasticsearch-model/lib/elasticsearch/model/response/suggestions.rb b/elasticsearch-model/lib/elasticsearch/model/response/suggestions.rb index 1b1cc6598..b2809bb12 100644 --- a/elasticsearch-model/lib/elasticsearch/model/response/suggestions.rb +++ b/elasticsearch-model/lib/elasticsearch/model/response/suggestions.rb @@ -2,7 +2,7 @@ module Elasticsearch module Model module Response - class Suggestions < Hashie::Mash + class Suggestions < HashWrapper disable_warnings if respond_to?(:disable_warnings) def terms diff --git a/elasticsearch-model/lib/elasticsearch/model/version.rb b/elasticsearch-model/lib/elasticsearch/model/version.rb index cfa7e4ab6..6eedce773 100644 --- a/elasticsearch-model/lib/elasticsearch/model/version.rb +++ b/elasticsearch-model/lib/elasticsearch/model/version.rb @@ -1,5 +1,5 @@ module Elasticsearch module Model - VERSION = "6.0.0.alpha1" + VERSION = '6.1.2' end end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapter_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapter_spec.rb new file mode 100644 index 000000000..71fd2e6b5 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapter_spec.rb @@ -0,0 +1,119 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Adapter do + + before(:all) do + class ::DummyAdapterClass; end + class ::DummyAdapterClassWithAdapter; end + class ::DummyAdapter + Records = Module.new + Callbacks = Module.new + Importing = Module.new + end + end + + after(:all) do + [DummyAdapterClassWithAdapter, DummyAdapterClass, DummyAdapter].each do |adapter| + Elasticsearch::Model::Adapter::Adapter.adapters.delete(adapter) + end + remove_classes(DummyAdapterClass, DummyAdapterClassWithAdapter, DummyAdapter) + end + + describe '#from_class' do + + it 'should return an Adapter instance' do + expect(Elasticsearch::Model::Adapter.from_class(DummyAdapterClass)).to be_a(Elasticsearch::Model::Adapter::Adapter) + end + end + + describe 'register' do + + before do + expect(Elasticsearch::Model::Adapter::Adapter).to receive(:register).and_call_original + Elasticsearch::Model::Adapter.register(:foo, lambda { |c| false }) + end + + it 'should register an adapter' do + expect(Elasticsearch::Model::Adapter::Adapter.adapters[:foo]).to be_a(Proc) + end + + context 'when a specific adapter class is set' do + + before do + expect(Elasticsearch::Model::Adapter::Adapter).to receive(:register).and_call_original + Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, + lambda { |c| c == DummyAdapterClassWithAdapter }) + end + + let(:adapter) do + Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) + end + + it 'should register the adapter' do + expect(adapter.adapter).to eq(DummyAdapter) + end + end + end + + describe 'default adapter' do + + let(:adapter) do + Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClass) + end + + it 'sets a default adapter' do + expect(adapter.adapter).to eq(Elasticsearch::Model::Adapter::Default) + end + end + + describe '#records_mixin' do + + before do + Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, + lambda { |c| c == DummyAdapterClassWithAdapter }) + + end + + let(:adapter) do + Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) + end + + it 'returns a Module' do + expect(adapter.records_mixin).to be_a(Module) + end + end + + describe '#callbacks_mixin' do + + before do + Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, + lambda { |c| c == DummyAdapterClassWithAdapter }) + + end + + let(:adapter) do + Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) + end + + it 'returns a Module' do + expect(adapter.callbacks_mixin).to be_a(Module) + end + end + + describe '#importing_mixin' do + + before do + Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, + lambda { |c| c == DummyAdapterClassWithAdapter }) + + end + + let(:adapter) do + Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) + end + + it 'returns a Module' do + expect(adapter.importing_mixin).to be_a(Module) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/associations_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/associations_spec.rb new file mode 100644 index 000000000..0cfed6432 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/associations_spec.rb @@ -0,0 +1,334 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Associations' do + + before(:all) do + ActiveRecord::Schema.define(version: 1) do + create_table :categories do |t| + t.string :title + t.timestamps null: false + end + + create_table :categories_posts do |t| + t.references :post, :category + end + + create_table :authors do |t| + t.string :first_name, :last_name + t.timestamps null: false + end + + create_table :authorships do |t| + t.string :first_name, :last_name + t.references :post + t.references :author + t.timestamps null: false + end + + create_table :comments do |t| + t.string :text + t.string :author + t.references :post + t.timestamps null: false + end + + add_index(:comments, :post_id) unless index_exists?(:comments, :post_id) + + create_table :posts do |t| + t.string :title + t.text :text + t.boolean :published + t.timestamps null: false + end + end + + Comment.__send__ :include, Elasticsearch::Model + Comment.__send__ :include, Elasticsearch::Model::Callbacks + end + + before do + clear_tables(:categories, :categories_posts, :authors, :authorships, :comments, :posts) + clear_indices(Post) + Post.__elasticsearch__.create_index!(force: true) + Comment.__elasticsearch__.create_index!(force: true) + end + + after do + clear_tables(Post, Category) + clear_indices(Post) + end + + context 'when a document is created' do + + before do + Post.create!(title: 'Test') + Post.create!(title: 'Testing Coding') + Post.create!(title: 'Coding') + Post.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Post.search('title:test') + end + + it 'indexes the document' do + expect(search_result.results.size).to eq(2) + expect(search_result.results.first.title).to eq('Test') + expect(search_result.records.size).to eq(2) + expect(search_result.records.first.title).to eq('Test') + end + end + + describe 'has_many_and_belongs_to association' do + + context 'when an association is updated' do + + before do + post.categories = [category_a, category_b] + Post.__elasticsearch__.refresh_index! + end + + let(:category_a) do + Category.where(title: "One").first_or_create! + end + + let(:category_b) do + Category.where(title: "Two").first_or_create! + end + + let(:post) do + Post.create! title: "First Post", text: "This is the first post..." + end + + let(:search_result) do + Post.search(query: { + bool: { + must: { + multi_match: { + fields: ['title'], + query: 'first' + } + }, + filter: { + terms: { + categories: ['One'] + } + } + } + } ) + end + + it 'applies the update with' do + expect(search_result.results.size).to eq(1) + expect(search_result.results.first.title).to eq('First Post') + expect(search_result.records.size).to eq(1) + expect(search_result.records.first.title).to eq('First Post') + end + end + + context 'when an association is deleted' do + + before do + post.categories = [category_a, category_b] + post.categories = [category_b] + Post.__elasticsearch__.refresh_index! + end + + let(:category_a) do + Category.where(title: "One").first_or_create! + end + + let(:category_b) do + Category.where(title: "Two").first_or_create! + end + + let(:post) do + Post.create! title: "First Post", text: "This is the first post..." + end + + let(:search_result) do + Post.search(query: { + bool: { + must: { + multi_match: { + fields: ['title'], + query: 'first' + } + }, + filter: { + terms: { + categories: ['One'] + } + } + } + } ) + end + + it 'applies the update with a reindex' do + expect(search_result.results.size).to eq(0) + expect(search_result.records.size).to eq(0) + end + end + end + + describe 'has_many through association' do + + context 'when the association is updated' do + + before do + author_a = Author.where(first_name: "John", last_name: "Smith").first_or_create! + author_b = Author.where(first_name: "Mary", last_name: "Smith").first_or_create! + author_c = Author.where(first_name: "Kobe", last_name: "Griss").first_or_create! + + # Create posts + post_1 = Post.create!(title: "First Post", text: "This is the first post...") + post_2 = Post.create!(title: "Second Post", text: "This is the second post...") + post_3 = Post.create!(title: "Third Post", text: "This is the third post...") + + # Assign authors + post_1.authors = [author_a, author_b] + post_2.authors = [author_a] + post_3.authors = [author_c] + + Post.__elasticsearch__.refresh_index! + end + + context 'if active record is at least 4' do + + let(:search_result) do + Post.search('authors.full_name:john') + end + + it 'applies the update', if: active_record_at_least_4? do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + end + + context 'if active record is less than 4' do + + let(:search_result) do + Post.search('authors.author.full_name:john') + end + + it 'applies the update', if: !active_record_at_least_4? do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + end + end + + context 'when an association is added', if: active_record_at_least_4? do + + before do + author_a = Author.where(first_name: "John", last_name: "Smith").first_or_create! + author_b = Author.where(first_name: "Mary", last_name: "Smith").first_or_create! + + # Create posts + post_1 = Post.create!(title: "First Post", text: "This is the first post...") + + # Assign authors + post_1.authors = [author_a] + post_1.authors << author_b + Post.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Post.search('authors.full_name:john') + end + + it 'adds the association' do + expect(search_result.results.size).to eq(1) + expect(search_result.records.size).to eq(1) + end + end + end + + describe 'has_many association' do + + context 'when an association is added', if: active_record_at_least_4? do + + before do + # Create posts + post_1 = Post.create!(title: "First Post", text: "This is the first post...") + post_2 = Post.create!(title: "Second Post", text: "This is the second post...") + + # Add comments + post_1.comments.create!(author: 'John', text: 'Excellent') + post_1.comments.create!(author: 'Abby', text: 'Good') + + post_2.comments.create!(author: 'John', text: 'Terrible') + + post_1.comments.create!(author: 'John', text: 'Or rather just good...') + Post.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Post.search(query: { + nested: { + path: 'comments', + query: { + bool: { + must: [ + { match: { 'comments.author' => 'john' } }, + { match: { 'comments.text' => 'good' } } + ] + } + } + } + }) + end + + it 'adds the association' do + expect(search_result.results.size).to eq(1) + end + end + end + + describe '#touch' do + + context 'when a touch callback is defined on the model' do + + before do + # Create categories + category_a = Category.where(title: "One").first_or_create! + + # Create post + post = Post.create!(title: "First Post", text: "This is the first post...") + + # Assign category + post.categories << category_a + category_a.update_attribute(:title, "Updated") + category_a.posts.each { |p| p.touch } + + Post.__elasticsearch__.refresh_index! + end + + it 'executes the callback after #touch' do + expect(Post.search('categories:One').size).to eq(0) + expect(Post.search('categories:Updated').size).to eq(1) + end + end + end + + describe '#includes' do + + before do + post_1 = Post.create(title: 'One') + post_2 = Post.create(title: 'Two') + post_1.comments.create(text: 'First comment') + post_2.comments.create(text: 'Second comment') + + Comment.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Comment.search('first').records(includes: :post) + end + + it 'eager loads associations' do + expect(search_result.first.association(:post)).to be_loaded + expect(search_result.first.post.title).to eq('One') + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/basic_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/basic_spec.rb new file mode 100644 index 000000000..a4d9c05c5 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/basic_spec.rb @@ -0,0 +1,340 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Adapter::ActiveRecord do + + before(:all) do + ActiveRecord::Schema.define(:version => 1) do + create_table :articles do |t| + t.string :title + t.string :body + t.integer :clicks, :default => 0 + t.datetime :created_at, :default => 'NOW()' + end + end + + Article.delete_all + Article.__elasticsearch__.create_index!(force: true) + + Article.create!(title: 'Test', body: '', clicks: 1) + Article.create!(title: 'Testing Coding', body: '', clicks: 2) + Article.create!(title: 'Coding', body: '', clicks: 3) + + Article.__elasticsearch__.refresh_index! + end + + describe 'indexing a document' do + + let(:search_result) do + Article.search('title:test') + end + + it 'allows searching for documents' do + expect(search_result.results.size).to be(2) + expect(search_result.records.size).to be(2) + end + end + + describe '#results' do + + let(:search_result) do + Article.search('title:test') + end + + it 'returns an instance of Response::Result' do + expect(search_result.results.first).to be_a(Elasticsearch::Model::Response::Result) + end + + it 'prooperly loads the document' do + expect(search_result.results.first.title).to eq('Test') + end + + context 'when the result contains other data' do + + let(:search_result) do + Article.search(query: { match: { title: 'test' } }, highlight: { fields: { title: {} } }) + end + + it 'allows access to the Elasticsearch result' do + expect(search_result.results.first.title).to eq('Test') + expect(search_result.results.first.title?).to be(true) + expect(search_result.results.first.boo?).to be(false) + expect(search_result.results.first.highlight?).to be(true) + expect(search_result.results.first.highlight.title?).to be(true) + expect(search_result.results.first.highlight.boo?).to be(false) + end + end + end + + describe '#records' do + + let(:search_result) do + Article.search('title:test') + end + + it 'returns an instance of the model' do + expect(search_result.records.first).to be_a(Article) + end + + it 'prooperly loads the document' do + expect(search_result.records.first.title).to eq('Test') + end + end + + describe 'Enumerable' do + + let(:search_result) do + Article.search('title:test') + end + + it 'allows iteration over results' do + expect(search_result.results.map(&:_id)).to eq(['1', '2']) + end + + it 'allows iteration over records' do + expect(search_result.records.map(&:id)).to eq([1, 2]) + end + end + + describe '#id' do + + let(:search_result) do + Article.search('title:test') + end + + it 'returns the id' do + expect(search_result.results.first.id).to eq('1') + end + end + + describe '#id' do + + let(:search_result) do + Article.search('title:test') + end + + it 'returns the type' do + expect(search_result.results.first.type).to eq('article') + end + end + + describe '#each_with_hit' do + + let(:search_result) do + Article.search('title:test') + end + + it 'returns the record with the Elasticsearch hit' do + search_result.records.each_with_hit do |r, h| + expect(h._score).not_to be_nil + expect(h._source.title).not_to be_nil + end + end + end + + describe 'search results order' do + + let(:search_result) do + Article.search(query: { match: { title: 'code' }}, sort: { clicks: :desc }) + end + + it 'preserves the search results order when accessing a single record' do + expect(search_result.records[0].clicks).to be(3) + expect(search_result.records[1].clicks).to be(2) + expect(search_result.records.first).to eq(search_result.records[0]) + end + + it 'preserves the search results order for the list of records' do + search_result.records.each_with_hit do |r, h| + expect(r.id.to_s).to eq(h._id) + end + + search_result.records.map_with_hit do |r, h| + expect(r.id.to_s).to eq(h._id) + end + end + end + + describe 'a paged collection' do + + let(:search_result) do + Article.search(query: { match: { title: { query: 'test' } } }, + size: 2, + from: 1) + end + + it 'applies the paged options to the search' do + expect(search_result.results.size).to eq(1) + expect(search_result.results.first.title).to eq('Testing Coding') + expect(search_result.records.size).to eq(1) + expect(search_result.records.first.title).to eq('Testing Coding') + end + end + + describe '#destroy' do + + before do + Article.create!(title: 'destroy', body: '', clicks: 1) + Article.__elasticsearch__.refresh_index! + Article.where(title: 'destroy').first.destroy + + Article.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Article.search('title:test') + end + + it 'removes the document from the index' do + expect(Article.count).to eq(3) + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + end + + describe 'full document updates' do + + before do + article = Article.create!(title: 'update', body: '', clicks: 1) + Article.__elasticsearch__.refresh_index! + article.title = 'Writing' + article.save + + Article.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Article.search('title:write') + end + + it 'applies the update' do + expect(search_result.results.size).to eq(1) + expect(search_result.records.size).to eq(1) + end + end + + describe 'attribute updates' do + + before do + article = Article.create!(title: 'update', body: '', clicks: 1) + Article.__elasticsearch__.refresh_index! + article.title = 'special' + article.save + + Article.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Article.search('title:special') + end + + it 'applies the update' do + expect(search_result.results.size).to eq(1) + expect(search_result.records.size).to eq(1) + end + end + + describe '#save' do + + before do + article = Article.create!(title: 'save', body: '', clicks: 1) + + ActiveRecord::Base.transaction do + article.body = 'dummy' + article.save + + article.title = 'special' + article.save + end + + article.__elasticsearch__.update_document + Article.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Article.search('body:dummy') + end + + it 'applies the save' do + expect(search_result.results.size).to eq(1) + expect(search_result.records.size).to eq(1) + end + end + + describe 'a DSL search' do + + let(:search_result) do + Article.search(query: { match: { title: { query: 'test' } } }) + end + + it 'returns the results' do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + end + + describe 'chaining SQL queries on response.records' do + + let(:search_result) do + Article.search(query: { match: { title: { query: 'test' } } }) + end + + it 'executes the SQL request with the chained query criteria' do + expect(search_result.records.size).to eq(2) + expect(search_result.records.where(title: 'Test').size).to eq(1) + expect(search_result.records.where(title: 'Test').first.title).to eq('Test') + end + end + + describe 'ordering of SQL queries' do + + context 'when order is called on the ActiveRecord query' do + + let(:search_result) do + Article.search query: { match: { title: { query: 'test' } } } + end + + it 'allows the SQL query to be ordered independent of the Elasticsearch results order', unless: active_record_at_least_4? do + expect(search_result.records.order('title DESC').first.title).to eq('Testing Coding') + expect(search_result.records.order('title DESC')[0].title).to eq('Testing Coding') + end + + it 'allows the SQL query to be ordered independent of the Elasticsearch results order', if: active_record_at_least_4? do + expect(search_result.records.order(title: :desc).first.title).to eq('Testing Coding') + expect(search_result.records.order(title: :desc)[0].title).to eq('Testing Coding') + end + end + + context 'when more methods are chained on the ActiveRecord query' do + + let(:search_result) do + Article.search query: {match: {title: {query: 'test'}}} + end + + it 'allows the SQL query to be ordered independent of the Elasticsearch results order', if: active_record_at_least_4? do + expect(search_result.records.distinct.order(title: :desc).first.title).to eq('Testing Coding') + expect(search_result.records.distinct.order(title: :desc)[0].title).to eq('Testing Coding') + end + end + end + + describe 'access to the response via methods' do + + let(:search_result) do + Article.search(query: { match: { title: { query: 'test' } } }, + aggregations: { + dates: { date_histogram: { field: 'created_at', interval: 'hour' } }, + clicks: { global: {}, aggregations: { min: { min: { field: 'clicks' } } } } + }, + suggest: { text: 'tezt', title: { term: { field: 'title', suggest_mode: 'always' } } }) + end + + it 'allows document keys to be access via methods' do + expect(search_result.aggregations.dates.buckets.first.doc_count).to eq(2) + expect(search_result.aggregations.clicks.doc_count).to eq(6) + expect(search_result.aggregations.clicks.min.value).to eq(1.0) + expect(search_result.aggregations.clicks.max).to be_nil + expect(search_result.suggestions.title.first.options.size).to eq(1) + expect(search_result.suggestions.terms).to eq(['test']) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/dynamic_index_name_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/dynamic_index_name_spec.rb new file mode 100644 index 000000000..1a116ec7d --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/dynamic_index_name_spec.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Dynamic Index naming' do + + before do + ArticleWithDynamicIndexName.counter = 0 + end + + it 'exavlues the index_name value' do + expect(ArticleWithDynamicIndexName.index_name).to eq('articles-1') + end + + it 'revaluates the index name with each call' do + expect(ArticleWithDynamicIndexName.index_name).to eq('articles-1') + expect(ArticleWithDynamicIndexName.index_name).to eq('articles-2') + expect(ArticleWithDynamicIndexName.index_name).to eq('articles-3') + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/import_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/import_spec.rb new file mode 100644 index 000000000..52301b01a --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/import_spec.rb @@ -0,0 +1,187 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Importing' do + + before(:all) do + ActiveRecord::Schema.define(:version => 1) do + create_table :import_articles do |t| + t.string :title + t.integer :views + t.string :numeric # For the sake of invalid data sent to Elasticsearch + t.datetime :created_at, :default => 'NOW()' + end + end + + ImportArticle.delete_all + ImportArticle.__elasticsearch__.client.cluster.health(wait_for_status: 'yellow') + end + + before do + ImportArticle.__elasticsearch__.create_index! + end + + after do + clear_indices(ImportArticle) + clear_tables(ImportArticle) + end + + describe '#import' do + + context 'when no search criteria is specified' do + + before do + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + ImportArticle.import + ImportArticle.__elasticsearch__.refresh_index! + end + + it 'imports all documents' do + expect(ImportArticle.search('*').results.total).to eq(10) + end + end + + context 'when batch size is specified' do + + before do + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + end + + let!(:batch_count) do + batches = 0 + errors = ImportArticle.import(batch_size: 5) do |response| + batches += 1 + end + ImportArticle.__elasticsearch__.refresh_index! + batches + end + + it 'imports using the batch size' do + expect(batch_count).to eq(2) + end + + it 'imports all the documents' do + expect(ImportArticle.search('*').results.total).to eq(10) + end + end + + context 'when a scope is specified' do + + before do + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + ImportArticle.import(scope: 'popular', force: true) + ImportArticle.__elasticsearch__.refresh_index! + end + + it 'applies the scope' do + expect(ImportArticle.search('*').results.total).to eq(5) + end + end + + context 'when a query is specified' do + + before do + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + ImportArticle.import(query: -> { where('views >= 3') }) + ImportArticle.__elasticsearch__.refresh_index! + end + + it 'applies the query' do + expect(ImportArticle.search('*').results.total).to eq(7) + end + end + + context 'when there are invalid documents' do + + let!(:result) do + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + new_article + batches = 0 + errors = ImportArticle.__elasticsearch__.import(batch_size: 5) do |response| + batches += 1 + end + ImportArticle.__elasticsearch__.refresh_index! + { batch_size: batches, errors: errors} + end + + let(:new_article) do + ImportArticle.create!(title: "Test INVALID", numeric: "INVALID") + end + + it 'does not import them' do + expect(ImportArticle.search('*').results.total).to eq(10) + expect(result[:batch_size]).to eq(3) + expect(result[:errors]).to eq(1) + end + end + + context 'when a transform proc is specified' do + + before do + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + ImportArticle.import( transform: ->(a) {{ index: { data: { name: a.title, foo: 'BAR' } }}} ) + ImportArticle.__elasticsearch__.refresh_index! + end + + it 'transforms the documents' do + expect(ImportArticle.search('*').results.first._source.keys).to include('name') + expect(ImportArticle.search('*').results.first._source.keys).to include('foo') + end + + it 'imports all documents' do + expect(ImportArticle.search('test').results.total).to eq(10) + expect(ImportArticle.search('bar').results.total).to eq(10) + end + end + + context 'when the model has a default scope' do + + around(:all) do |example| + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + ImportArticle.instance_eval { default_scope { where('views > 3') } } + example.run + ImportArticle.default_scopes.pop + end + + before do + ImportArticle.__elasticsearch__.import + ImportArticle.__elasticsearch__.refresh_index! + end + + it 'uses the default scope' do + expect(ImportArticle.search('*').results.total).to eq(6) + end + end + + context 'when there is a default scope and a query specified' do + + around(:all) do |example| + 10.times { |i| ImportArticle.create! title: 'Test', views: "#{i}" } + ImportArticle.instance_eval { default_scope { where('views > 3') } } + example.run + ImportArticle.default_scopes.pop + end + + before do + ImportArticle.import(query: -> { where('views <= 4') }) + ImportArticle.__elasticsearch__.refresh_index! + end + + it 'combines the query and the default scope' do + expect(ImportArticle.search('*').results.total).to eq(1) + end + end + + context 'when the batch is empty' do + + before do + ImportArticle.delete_all + ImportArticle.import + ImportArticle.__elasticsearch__.refresh_index! + end + + it 'does not make any requests to create documents' do + expect(ImportArticle.search('*').results.total).to eq(0) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/multi_model_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/multi_model_spec.rb new file mode 100644 index 000000000..96c65fc5c --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/multi_model_spec.rb @@ -0,0 +1,110 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord MultiModel' do + + before(:all) do + ActiveRecord::Schema.define do + create_table Episode.table_name do |t| + t.string :name + t.datetime :created_at, :default => 'NOW()' + end + + create_table Series.table_name do |t| + t.string :name + t.datetime :created_at, :default => 'NOW()' + end + end + end + + before do + models = [ Episode, Series ] + clear_tables(models) + models.each do |model| + model.__elasticsearch__.create_index! force: true + model.create name: "The #{model.name}" + model.create name: "A great #{model.name}" + model.create name: "The greatest #{model.name}" + model.__elasticsearch__.refresh_index! + end + end + + after do + clear_indices(Episode, Series) + clear_tables(Episode, Series) + end + + context 'when the search is across multimodels' do + + let(:search_result) do + Elasticsearch::Model.search(%q<"The greatest Episode"^2 OR "The greatest Series">, [Series, Episode]) + end + + it 'executes the search across models' do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + + describe '#results' do + + it 'returns an instance of Elasticsearch::Model::Response::Result' do + expect(search_result.results[0]).to be_a(Elasticsearch::Model::Response::Result) + expect(search_result.results[1]).to be_a(Elasticsearch::Model::Response::Result) + end + + it 'returns the correct model instance' do + expect(search_result.results[0].name).to eq('The greatest Episode') + expect(search_result.results[1].name).to eq('The greatest Series') + end + + it 'provides access to the results' do + expect(search_result.results[0].name).to eq('The greatest Episode') + expect(search_result.results[0].name?).to be(true) + expect(search_result.results[0].boo?).to be(false) + + expect(search_result.results[1].name).to eq('The greatest Series') + expect(search_result.results[1].name?).to be(true) + expect(search_result.results[1].boo?).to be(false) + end + end + + describe '#records' do + + it 'returns an instance of Elasticsearch::Model::Response::Result' do + expect(search_result.records[0]).to be_a(Episode) + expect(search_result.records[1]).to be_a(Series) + end + + it 'returns the correct model instance' do + expect(search_result.records[0].name).to eq('The greatest Episode') + expect(search_result.records[1].name).to eq('The greatest Series') + end + + context 'when the data store is changed' do + + before do + Series.find_by_name("The greatest Series").delete + Series.__elasticsearch__.refresh_index! + end + + it 'only returns matching records' do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(1 ) + expect(search_result.records[0].name).to eq('The greatest Episode') + end + end + end + + describe 'pagination' do + + let(:search_result) do + Elasticsearch::Model.search('series OR episode', [Series, Episode]) + end + + it 'properly paginates the results' do + expect(search_result.page(1).per(3).results.size).to eq(3) + expect(search_result.page(2).per(3).results.size).to eq(3) + expect(search_result.page(3).per(3).results.size).to eq(0) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/namespaced_model_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/namespaced_model_spec.rb new file mode 100644 index 000000000..ea426d3f2 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/namespaced_model_spec.rb @@ -0,0 +1,38 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Namespaced Model' do + + before(:all) do + ActiveRecord::Schema.define(:version => 1) do + create_table :books do |t| + t.string :title + end + end + + MyNamespace::Book.delete_all + MyNamespace::Book.__elasticsearch__.create_index!(force: true) + MyNamespace::Book.create!(title: 'Test') + MyNamespace::Book.__elasticsearch__.refresh_index! + end + + after do + clear_indices(MyNamespace::Book) + clear_tables(MyNamespace::Book) + end + + context 'when the model is namespaced' do + + it 'has the proper index name' do + expect(MyNamespace::Book.index_name).to eq('my_namespace-books') + end + + it 'has the proper document type' do + expect(MyNamespace::Book.document_type).to eq('book') + end + + it 'saves the document into the index' do + expect(MyNamespace::Book.search('title:test').results.size).to eq(1) + expect(MyNamespace::Book.search('title:test').results.first.title).to eq('Test') + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/pagination_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/pagination_spec.rb new file mode 100644 index 000000000..9427fae48 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/pagination_spec.rb @@ -0,0 +1,315 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Pagination' do + + before(:all) do + ActiveRecord::Schema.define(:version => 1) do + create_table ArticleForPagination.table_name do |t| + t.string :title + t.datetime :created_at, :default => 'NOW()' + t.boolean :published + end + end + + Kaminari::Hooks.init if defined?(Kaminari::Hooks) + + ArticleForPagination.__elasticsearch__.create_index! force: true + + 68.times do |i| + ArticleForPagination.create! title: "Test #{i}", published: (i % 2 == 0) + end + + ArticleForPagination.import + ArticleForPagination.__elasticsearch__.refresh_index! + end + + context 'when no other page is specified' do + + let(:records) do + ArticleForPagination.search('title:test').page(1).records + end + + describe '#size' do + + it 'returns the correct size' do + expect(records.size).to eq(25) + end + end + + describe '#current_page' do + + it 'returns the correct current page' do + expect(records.current_page).to eq(1) + end + end + + describe '#prev_page' do + + it 'returns the correct previous page' do + expect(records.prev_page).to be_nil + end + end + + describe '#next_page' do + + it 'returns the correct next page' do + expect(records.next_page).to eq(2) + end + end + + describe '#total_pages' do + + it 'returns the correct total pages' do + expect(records.total_pages).to eq(3) + end + end + + describe '#first_page?' do + + it 'returns the correct first page' do + expect(records.first_page?).to be(true) + end + end + + describe '#last_page?' do + + it 'returns the correct last page' do + expect(records.last_page?).to be(false) + end + end + + describe '#out_of_range?' do + + it 'returns whether the pagination is out of range' do + expect(records.out_of_range?).to be(false) + end + end + end + + context 'when a specific page is specified' do + + let(:records) do + ArticleForPagination.search('title:test').page(2).records + end + + describe '#size' do + + it 'returns the correct size' do + expect(records.size).to eq(25) + end + end + + describe '#current_page' do + + it 'returns the correct current page' do + expect(records.current_page).to eq(2) + end + end + + describe '#prev_page' do + + it 'returns the correct previous page' do + expect(records.prev_page).to eq(1) + end + end + + describe '#next_page' do + + it 'returns the correct next page' do + expect(records.next_page).to eq(3) + end + end + + describe '#total_pages' do + + it 'returns the correct total pages' do + expect(records.total_pages).to eq(3) + end + end + + describe '#first_page?' do + + it 'returns the correct first page' do + expect(records.first_page?).to be(false) + end + end + + describe '#last_page?' do + + it 'returns the correct last page' do + expect(records.last_page?).to be(false) + end + end + + describe '#out_of_range?' do + + it 'returns whether the pagination is out of range' do + expect(records.out_of_range?).to be(false) + end + end + end + + context 'when a the last page is specified' do + + let(:records) do + ArticleForPagination.search('title:test').page(3).records + end + + describe '#size' do + + it 'returns the correct size' do + expect(records.size).to eq(18) + end + end + + describe '#current_page' do + + it 'returns the correct current page' do + expect(records.current_page).to eq(3) + end + end + + describe '#prev_page' do + + it 'returns the correct previous page' do + expect(records.prev_page).to eq(2) + end + end + + describe '#next_page' do + + it 'returns the correct next page' do + expect(records.next_page).to be_nil + end + end + + describe '#total_pages' do + + it 'returns the correct total pages' do + expect(records.total_pages).to eq(3) + end + end + + describe '#first_page?' do + + it 'returns the correct first page' do + expect(records.first_page?).to be(false) + end + end + + describe '#last_page?' do + + it 'returns the correct last page' do + expect(records.last_page?).to be(true) + end + end + + describe '#out_of_range?' do + + it 'returns whether the pagination is out of range' do + expect(records.out_of_range?).to be(false) + end + end + end + + context 'when an invalid page is specified' do + + let(:records) do + ArticleForPagination.search('title:test').page(6).records + end + + describe '#size' do + + it 'returns the correct size' do + expect(records.size).to eq(0) + end + end + + describe '#current_page' do + + it 'returns the correct current page' do + expect(records.current_page).to eq(6) + end + end + + describe '#next_page' do + + it 'returns the correct next page' do + expect(records.next_page).to be_nil + end + end + + describe '#total_pages' do + + it 'returns the correct total pages' do + expect(records.total_pages).to eq(3) + end + end + + describe '#first_page?' do + + it 'returns the correct first page' do + expect(records.first_page?).to be(false) + end + end + + describe '#last_page?' do + + it 'returns whether it is the last page', if: !(Kaminari::VERSION < '1') do + expect(records.last_page?).to be(false) + end + + it 'returns whether it is the last page', if: Kaminari::VERSION < '1' do + expect(records.last_page?).to be(true) # Kaminari returns current_page >= total_pages in version < 1.0 + end + end + + describe '#out_of_range?' do + + it 'returns whether the pagination is out of range' do + expect(records.out_of_range?).to be(true) + end + end + end + + context 'when a scope is also specified' do + + let(:records) do + ArticleForPagination.search('title:test').page(2).records.published + end + + describe '#size' do + + it 'returns the correct size' do + expect(records.size).to eq(12) + end + end + end + + context 'when a sorting is specified' do + + let(:search) do + ArticleForPagination.search({ query: { match: { title: 'test' } }, sort: [ { id: 'desc' } ] }) + end + + it 'applies the sort' do + expect(search.page(2).records.first.id).to eq(43) + expect(search.page(3).records.first.id).to eq(18) + expect(search.page(2).per(5).records.first.id).to eq(63) + end + end + + context 'when the model has a specific default per page set' do + + around do |example| + original_default = ArticleForPagination.instance_variable_get(:@_default_per_page) + ArticleForPagination.paginates_per 50 + example.run + ArticleForPagination.paginates_per original_default + end + + it 'uses the default per page setting' do + expect(ArticleForPagination.search('*').page(1).records.size).to eq(50) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/parent_child_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/parent_child_spec.rb new file mode 100644 index 000000000..647cb6dde --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/parent_child_spec.rb @@ -0,0 +1,75 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Parent-Child' do + + before(:all) do + ActiveRecord::Schema.define(version: 1) do + create_table :questions do |t| + t.string :title + t.text :text + t.string :author + t.timestamps null: false + end + + create_table :answers do |t| + t.text :text + t.string :author + t.references :question + t.timestamps null: false + end + + add_index(:answers, :question_id) unless index_exists?(:answers, :question_id) + + clear_tables(Question) + ParentChildSearchable.create_index!(force: true) + + q_1 = Question.create!(title: 'First Question', author: 'John') + q_2 = Question.create!(title: 'Second Question', author: 'Jody') + + q_1.answers.create!(text: 'Lorem Ipsum', author: 'Adam') + q_1.answers.create!(text: 'Dolor Sit', author: 'Ryan') + + q_2.answers.create!(text: 'Amet Et', author: 'John') + + Question.__elasticsearch__.refresh_index! + end + end + + describe 'has_child search' do + + let(:search_result) do + Question.search(query: { has_child: { type: 'answer', query: { match: { author: 'john' } } } }) + end + + it 'finds parents by matching on child search criteria' do + expect(search_result.records.first.title).to eq('Second Question') + end + end + + describe 'hash_parent search' do + + let(:search_result) do + Answer.search(query: { has_parent: { parent_type: 'question', query: { match: { author: 'john' } } } }) + end + + it 'finds children by matching in parent criteria' do + expect(search_result.records.map(&:author)).to match(['Adam', 'Ryan']) + end + end + + context 'when a parent is deleted' do + + before do + Question.where(title: 'First Question').each(&:destroy) + Question.__elasticsearch__.refresh_index! + end + + let(:search_result) do + Answer.search(query: { has_parent: { parent_type: 'question', query: { match_all: {} } } }) + end + + it 'deletes the children' do + expect(search_result.results.total).to eq(1) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/serialization_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/serialization_spec.rb new file mode 100644 index 000000000..c1a8510bb --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record/serialization_spec.rb @@ -0,0 +1,62 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Serialization' do + + before(:all) do + ActiveRecord::Schema.define(:version => 1) do + create_table ArticleWithCustomSerialization.table_name do |t| + t.string :title + t.string :status + end + end + + ArticleWithCustomSerialization.delete_all + ArticleWithCustomSerialization.__elasticsearch__.create_index!(force: true) + end + + context 'when the model has a custom serialization defined' do + + before do + ArticleWithCustomSerialization.create!(title: 'Test', status: 'green') + ArticleWithCustomSerialization.__elasticsearch__.refresh_index! + end + + context 'when a document is indexed' do + + let(:search_result) do + ArticleWithCustomSerialization.__elasticsearch__.client.get(index: 'article_with_custom_serializations', + type: '_doc', + id: '1') + end + + it 'applies the serialization when indexing' do + expect(search_result['_source']).to eq('title' => 'Test') + end + end + + context 'when a document is updated' do + + before do + article.update_attribute(:title, 'UPDATED') + article.update_attribute(:status, 'yellow') + ArticleWithCustomSerialization.__elasticsearch__.refresh_index! + end + + let!(:article) do + art = ArticleWithCustomSerialization.create!(title: 'Test', status: 'red') + ArticleWithCustomSerialization.__elasticsearch__.refresh_index! + art + end + + let(:search_result) do + ArticleWithCustomSerialization.__elasticsearch__.client.get(index: 'article_with_custom_serializations', + type: '_doc', + id: article.id) + end + + it 'applies the serialization when updating' do + expect(search_result['_source']).to eq('title' => 'UPDATED') + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/active_record_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record_spec.rb new file mode 100644 index 000000000..6e0cb7d64 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/active_record_spec.rb @@ -0,0 +1,207 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Adapter::ActiveRecord do + + before(:all) do + class DummyClassForActiveRecord; end + end + + after(:all) do + Elasticsearch::Model::Adapter::Adapter.adapters.delete(DummyClassForActiveRecord) + remove_classes(DummyClassForActiveRecord) + end + + let(:model) do + DummyClassForActiveRecord.new.tap do |m| + allow(m).to receive(:response).and_return(double('response', response: response)) + allow(m).to receive(:ids).and_return(ids) + end + end + + let(:response) do + { 'hits' => {'hits' => [ {'_id' => 2 }, {'_id' => 1 } ]} } + end + + let(:ids) do + [2, 1] + end + + let(:record_1) do + double('record').tap do |rec| + allow(rec).to receive(:id).and_return(1) + end + end + + let(:record_2) do + double('record').tap do |rec| + allow(rec).to receive(:id).and_return(2) + end + end + + let(:records) do + [record_1, record_2].tap do |r| + allow(r).to receive(:load).and_return(true) + allow(r).to receive(:exec_queries).and_return(true) + end + end + + describe 'adapter registration' do + + before(:all) do + DummyClassForActiveRecord.__send__ :include, Elasticsearch::Model::Adapter::ActiveRecord::Records + end + + it 'can register an adapater' do + expect(Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::ActiveRecord]).not_to be_nil + expect(Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::ActiveRecord].call(DummyClassForActiveRecord)).to be(false) + end + end + + describe '#records' do + + before(:all) do + DummyClassForActiveRecord.__send__ :include, Elasticsearch::Model::Adapter::ActiveRecord::Records + end + + let(:instance) do + model.tap do |inst| + allow(inst).to receive(:klass).and_return(double('class', primary_key: :some_key, where: records)).at_least(:once) + allow(inst).to receive(:order).and_return(double('class', primary_key: :some_key, where: records)).at_least(:once) + end + end + + it 'returns the list of records' do + expect(instance.records).to eq(records) + end + + it 'loads the records' do + expect(instance.load).to eq(true) + end + + context 'when :includes is specified' do + + before do + expect(records).to receive(:includes).with([:submodel]).once.and_return(records) + instance.options[:includes] = [:submodel] + end + + it 'incorporates the includes option in the query' do + expect(instance.records).to eq(records) + end + end + end + + describe 'callbacks registration' do + + before do + expect(DummyClassForActiveRecord).to receive(:after_commit).exactly(3).times + end + + it 'should register the model class for callbacks' do + Elasticsearch::Model::Adapter::ActiveRecord::Callbacks.included(DummyClassForActiveRecord) + end + end + + describe 'importing' do + + before do + DummyClassForActiveRecord.__send__ :extend, Elasticsearch::Model::Adapter::ActiveRecord::Importing + end + + context 'when an invalid scope is specified' do + + it 'raises a NoMethodError' do + expect { + DummyClassForActiveRecord.__find_in_batches(scope: :not_found_method) + }.to raise_exception(NoMethodError) + end + end + + context 'when a valid scope is specified' do + + before do + expect(DummyClassForActiveRecord).to receive(:find_in_batches).once.and_return([]) + expect(DummyClassForActiveRecord).to receive(:published).once.and_return(DummyClassForActiveRecord) + end + + it 'uses the scope' do + expect(DummyClassForActiveRecord.__find_in_batches(scope: :published)).to eq([]) + end + end + + context 'allow query criteria to be specified' do + + before do + expect(DummyClassForActiveRecord).to receive(:find_in_batches).once.and_return([]) + expect(DummyClassForActiveRecord).to receive(:where).with(color: 'red').once.and_return(DummyClassForActiveRecord) + end + + it 'uses the scope' do + expect(DummyClassForActiveRecord.__find_in_batches(query: -> { where(color: 'red') })).to eq([]) + end + end + + context 'when preprocessing batches' do + + context 'if the query returns results' do + + before do + class << DummyClassForActiveRecord + def find_in_batches(options = {}, &block) + yield [:a, :b] + end + + def update_batch(batch) + batch.collect { |b| b.to_s + '!' } + end + end + end + + it 'applies the preprocessing method' do + DummyClassForActiveRecord.__find_in_batches(preprocess: :update_batch) do |batch| + expect(batch).to match(['a!', 'b!']) + end + end + end + + context 'if the query does not return results' do + + before do + class << DummyClassForActiveRecord + def find_in_batches(options = {}, &block) + yield [:a, :b] + end + + def update_batch(batch) + [] + end + end + end + + it 'applies the preprocessing method' do + DummyClassForActiveRecord.__find_in_batches(preprocess: :update_batch) do |batch| + expect(batch).to match([]) + end + end + end + end + + context 'when transforming models' do + + let(:instance) do + model.tap do |inst| + allow(inst).to receive(:id).and_return(1) + allow(inst).to receive(:__elasticsearch__).and_return(double('object', id: 1, as_indexed_json: {})) + end + end + + it 'returns an proc' do + expect(DummyClassForActiveRecord.__transform.respond_to?(:call)).to be(true) + end + + it 'provides a default transformation' do + expect(DummyClassForActiveRecord.__transform.call(instance)).to eq(index: { _id: 1, data: {} }) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/default_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/default_spec.rb new file mode 100644 index 000000000..08064df0f --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/default_spec.rb @@ -0,0 +1,41 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Adapter::Default do + + before(:all) do + class DummyClassForDefaultAdapter; end + DummyClassForDefaultAdapter.__send__ :include, Elasticsearch::Model::Adapter::Default::Records + DummyClassForDefaultAdapter.__send__ :include, Elasticsearch::Model::Adapter::Default::Importing + end + + after(:all) do + Elasticsearch::Model::Adapter::Adapter.adapters.delete(DummyClassForDefaultAdapter) + remove_classes(DummyClassForDefaultAdapter) + end + + let(:instance) do + DummyClassForDefaultAdapter.new.tap do |m| + allow(m).to receive(:klass).and_return(double('class', primary_key: :some_key, find: [1])).at_least(:once) + end + end + + it 'should have the default records implementation' do + expect(instance.records).to eq([1]) + end + + it 'should have the default Callback implementation' do + expect(Elasticsearch::Model::Adapter::Default::Callbacks).to be_a(Module) + end + + it 'should have the default Importing implementation' do + expect { + DummyClassForDefaultAdapter.new.__find_in_batches + }.to raise_exception(Elasticsearch::Model::NotImplemented) + end + + it 'should have the default transform implementation' do + expect { + DummyClassForDefaultAdapter.new.__transform + }.to raise_exception(Elasticsearch::Model::NotImplemented) + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid/basic_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid/basic_spec.rb new file mode 100644 index 000000000..f4aefc740 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid/basic_spec.rb @@ -0,0 +1,267 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Adapter::Mongoid, if: test_mongoid? do + + before(:all) do + connect_mongoid('mongoid_test') + Elasticsearch::Model::Adapter.register \ + Elasticsearch::Model::Adapter::Mongoid, + lambda { |klass| !!defined?(::Mongoid::Document) && klass.respond_to?(:ancestors) && klass.ancestors.include?(::Mongoid::Document) } + + MongoidArticle.__elasticsearch__.create_index! force: true + + MongoidArticle.delete_all + + MongoidArticle.__elasticsearch__.refresh_index! + MongoidArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' + end + + after do + clear_indices(MongoidArticle) + clear_tables(MongoidArticle) + end + + describe 'searching' do + + before do + MongoidArticle.create! title: 'Test' + MongoidArticle.create! title: 'Testing Coding' + MongoidArticle.create! title: 'Coding' + MongoidArticle.__elasticsearch__.refresh_index! + end + + let(:search_result) do + MongoidArticle.search('title:test') + end + + it 'find the documents successfully' do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + + describe '#results' do + + it 'returns a Elasticsearch::Model::Response::Result' do + expect(search_result.results.first).to be_a(Elasticsearch::Model::Response::Result) + end + + it 'retrieves the document from Elasticsearch' do + expect(search_result.results.first.title).to eq('Test') + end + + it 'retrieves all results' do + expect(search_result.results.collect(&:title)).to match(['Test', 'Testing Coding']) + end + end + + describe '#records' do + + it 'returns an instance of the model' do + expect(search_result.records.first).to be_a(MongoidArticle) + end + + it 'retrieves the document from Elasticsearch' do + expect(search_result.records.first.title).to eq('Test') + end + + it 'iterates over the records' do + expect(search_result.records.first.title).to eq('Test') + end + + it 'retrieves all records' do + expect(search_result.records.collect(&:title)).to match(['Test', 'Testing Coding']) + end + + describe '#each_with_hit' do + + it 'yields each hit with the model object' do + search_result.records.each_with_hit do |r, h| + expect(h._source).not_to be_nil + expect(h._source.title).not_to be_nil + end + end + + it 'preserves the search order' do + search_result.records.each_with_hit do |r, h| + expect(r.id.to_s).to eq(h._id) + end + end + end + + describe '#map_with_hit' do + + it 'yields each hit with the model object' do + search_result.records.map_with_hit do |r, h| + expect(h._source).not_to be_nil + expect(h._source.title).not_to be_nil + end + end + + it 'preserves the search order' do + search_result.records.map_with_hit do |r, h| + expect(r.id.to_s).to eq(h._id) + end + end + end + end + end + + describe '#destroy' do + + let(:article) do + MongoidArticle.create!(title: 'Test') + end + + before do + article + MongoidArticle.create!(title: 'Coding') + article.destroy + MongoidArticle.__elasticsearch__.refresh_index! + end + + it 'removes documents from the index' do + expect(MongoidArticle.search('title:test').results.total).to eq(0) + expect(MongoidArticle.search('title:code').results.total).to eq(1) + end + end + + describe 'updates to the document' do + + let(:article) do + MongoidArticle.create!(title: 'Test') + end + + before do + article.title = 'Writing' + article.save + MongoidArticle.__elasticsearch__.refresh_index! + end + + it 'indexes updates' do + expect(MongoidArticle.search('title:write').results.total).to eq(1) + expect(MongoidArticle.search('title:test').results.total).to eq(0) + end + end + + describe 'DSL search' do + + before do + MongoidArticle.create! title: 'Test' + MongoidArticle.create! title: 'Testing Coding' + MongoidArticle.create! title: 'Coding' + MongoidArticle.__elasticsearch__.refresh_index! + end + + let(:search_result) do + MongoidArticle.search(query: { match: { title: { query: 'test' } } }) + end + + it 'finds the matching documents' do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + end + + describe 'paging a collection' do + + before do + MongoidArticle.create! title: 'Test' + MongoidArticle.create! title: 'Testing Coding' + MongoidArticle.create! title: 'Coding' + MongoidArticle.__elasticsearch__.refresh_index! + end + + let(:search_result) do + MongoidArticle.search(query: { match: { title: { query: 'test' } } }, + size: 2, + from: 1) + end + + it 'applies the size and from parameters' do + expect(search_result.results.size).to eq(1) + expect(search_result.results.first.title).to eq('Testing Coding') + expect(search_result.records.size).to eq(1) + expect(search_result.records.first.title).to eq('Testing Coding') + end + end + + describe 'importing' do + + before do + 97.times { |i| MongoidArticle.create! title: "Test #{i}" } + MongoidArticle.__elasticsearch__.create_index! force: true + MongoidArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' + end + + context 'when there is no default scope' do + + let!(:batch_count) do + batches = 0 + errors = MongoidArticle.import(batch_size: 10) do |response| + batches += 1 + end + MongoidArticle.__elasticsearch__.refresh_index! + batches + end + + it 'imports all the documents' do + expect(MongoidArticle.search('*').results.total).to eq(97) + end + + it 'uses the specified batch size' do + expect(batch_count).to eq(10) + end + end + + context 'when there is a default scope' do + + around(:all) do |example| + 10.times { |i| MongoidArticle.create! title: 'Test', views: "#{i}" } + MongoidArticle.default_scope -> { MongoidArticle.gt(views: 3) } + example.run + MongoidArticle.default_scoping = nil + end + + before do + MongoidArticle.__elasticsearch__.import + MongoidArticle.__elasticsearch__.refresh_index! + end + + it 'uses the default scope' do + expect(MongoidArticle.search('*').results.total).to eq(6) + end + end + + context 'when there is a default scope and a query specified' do + + around(:all) do |example| + 10.times { |i| MongoidArticle.create! title: 'Test', views: "#{i}" } + MongoidArticle.default_scope -> { MongoidArticle.gt(views: 3) } + example.run + MongoidArticle.default_scoping = nil + end + + before do + MongoidArticle.import(query: -> { lte(views: 4) }) + MongoidArticle.__elasticsearch__.refresh_index! + end + + it 'combines the query and the default scope' do + expect(MongoidArticle.search('*').results.total).to eq(1) + end + end + + context 'when the batch is empty' do + + before do + MongoidArticle.delete_all + MongoidArticle.import + MongoidArticle.__elasticsearch__.refresh_index! + end + + it 'does not make any requests to create documents' do + expect(MongoidArticle.search('*').results.total).to eq(0) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid/multi_model_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid/multi_model_spec.rb new file mode 100644 index 000000000..e4308ab98 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid/multi_model_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Adapter::ActiveRecord Multimodel', if: test_mongoid? do + + before(:all) do + connect_mongoid('mongoid_test') + + begin + ActiveRecord::Schema.define(:version => 1) do + create_table Episode.table_name do |t| + t.string :name + t.datetime :created_at, :default => 'NOW()' + end + end + rescue + end + end + + before do + clear_tables(Episode, Image) + Episode.__elasticsearch__.create_index! force: true + Episode.create name: "TheEpisode" + Episode.create name: "A great Episode" + Episode.create name: "The greatest Episode" + Episode.__elasticsearch__.refresh_index! + + Image.__elasticsearch__.create_index! force: true + Image.create! name: "The Image" + Image.create! name: "A great Image" + Image.create! name: "The greatest Image" + Image.__elasticsearch__.refresh_index! + Image.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' + end + + after do + [Episode, Image].each do |model| + model.__elasticsearch__.client.delete_by_query(index: model.index_name, q: '*') + model.delete_all + model.__elasticsearch__.refresh_index! + end + end + + context 'when the search is across multimodels with different adapters' do + + let(:search_result) do + Elasticsearch::Model.search(%q<"greatest Episode" OR "greatest Image"^2>, [Episode, Image]) + end + + it 'executes the search across models' do + expect(search_result.results.size).to eq(2) + expect(search_result.records.size).to eq(2) + end + + it 'returns the correct type of model instance' do + expect(search_result.records[0]).to be_a(Image) + expect(search_result.records[1]).to be_a(Episode) + end + + it 'creates the model instances with the correct attributes' do + expect(search_result.results[0].name).to eq('The greatest Image') + expect(search_result.records[0].name).to eq('The greatest Image') + expect(search_result.results[1].name).to eq('The greatest Episode') + expect(search_result.records[1].name).to eq('The greatest Episode') + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid_spec.rb new file mode 100644 index 000000000..3a01a6635 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/mongoid_spec.rb @@ -0,0 +1,235 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Adapter::Mongoid do + + before(:all) do + class DummyClassForMongoid; end + ::Symbol.class_eval { def in; self; end } + end + + after(:all) do + Elasticsearch::Model::Adapter::Adapter.adapters.delete(DummyClassForMongoid) + remove_classes(DummyClassForMongoid) + end + + let(:response) do + { 'hits' => {'hits' => [ {'_id' => 2}, {'_id' => 1} ]} } + end + + let(:ids) do + [2, 1] + end + + let(:record_1) do + double('record').tap do |rec| + allow(rec).to receive(:id).and_return(1) + end + end + + let(:record_2) do + double('record').tap do |rec| + allow(rec).to receive(:load).and_return(true) + allow(rec).to receive(:id).and_return(2) + end + end + + let(:records) do + [record_1, record_2] + end + + let(:model) do + DummyClassForMongoid.new.tap do |m| + allow(m).to receive(:response).and_return(double('response', response: response)) + allow(m).to receive(:ids).and_return(ids) + end + end + + describe 'adapter registration' do + + it 'registers an adapater' do + expect(Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::Mongoid]).not_to be_nil + expect(Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::Mongoid].call(DummyClassForMongoid)).to be(false) + end + + it 'registers the records module' do + expect(Elasticsearch::Model::Adapter::Mongoid::Records).to be_a(Module) + end + end + + describe '#records' do + + before(:all) do + DummyClassForMongoid.__send__ :include, Elasticsearch::Model::Adapter::Mongoid::Records + end + + let(:instance) do + model.tap do |inst| + allow(inst).to receive(:klass).and_return(double('class', where: records)).at_least(:once) + end + end + + it 'returns the records' do + expect(instance.records).to eq(records) + end + + context 'when an order is not defined for the Mongoid query' do + + context 'when the records have a different order than the hits' do + + before do + records.instance_variable_set(:@records, records) + end + + it 'reorders the records based on hits order' do + expect(records.collect(&:id)).to eq([1, 2]) + expect(instance.records.to_a.collect(&:id)).to eq([2, 1]) + end + end + + context 'when an order is defined for the Mongoid query' do + + context 'when the records have a different order than the hits' do + + before do + records.instance_variable_set(:@records, records) + expect(instance.records).to receive(:asc).and_return(records) + end + + it 'reorders the records based on hits order' do + expect(records.collect(&:id)).to eq([1, 2]) + expect(instance.records.to_a.collect(&:id)).to eq([2, 1]) + expect(instance.asc.to_a.collect(&:id)).to eq([1, 2]) + end + end + end + end + + describe 'callbacks registration' do + + before do + expect(DummyClassForMongoid).to receive(:after_create).once + expect(DummyClassForMongoid).to receive(:after_update).once + expect(DummyClassForMongoid).to receive(:after_destroy).once + end + + it 'should register the model class for callbacks' do + Elasticsearch::Model::Adapter::Mongoid::Callbacks.included(DummyClassForMongoid) + end + end + end + + describe 'importing' do + + before(:all) do + DummyClassForMongoid.__send__ :extend, Elasticsearch::Model::Adapter::Mongoid::Importing + end + + let(:relation) do + double('relation', each_slice: []).tap do |rel| + allow(rel).to receive(:published).and_return(rel) + allow(rel).to receive(:no_timeout).and_return(rel) + allow(rel).to receive(:class_exec).and_return(rel) + end + end + + before do + allow(DummyClassForMongoid).to receive(:all).and_return(relation) + end + + context 'when a scope is specified' do + + it 'applies the scope' do + expect(DummyClassForMongoid.__find_in_batches(scope: :published) do; end).to eq([]) + end + end + + context 'query criteria specified as a proc' do + + let(:query) do + Proc.new { where(color: "red") } + end + + it 'execites the query' do + expect(DummyClassForMongoid.__find_in_batches(query: query) do; end).to eq([]) + end + end + + context 'query criteria specified as a hash' do + + before do + expect(relation).to receive(:where).with(color: 'red').and_return(relation) + end + + let(:query) do + { color: "red" } + end + + it 'execites the query' do + expect(DummyClassForMongoid.__find_in_batches(query: query) do; end).to eq([]) + end + end + + context 'when preprocessing batches' do + + context 'if the query returns results' do + + before do + class << DummyClassForMongoid + def find_in_batches(options = {}, &block) + yield [:a, :b] + end + + def update_batch(batch) + batch.collect { |b| b.to_s + '!' } + end + end + end + + it 'applies the preprocessing method' do + DummyClassForMongoid.__find_in_batches(preprocess: :update_batch) do |batch| + expect(batch).to match(['a!', 'b!']) + end + end + end + + context 'if the query does not return results' do + + before do + class << DummyClassForMongoid + def find_in_batches(options = {}, &block) + yield [:a, :b] + end + + def update_batch(batch) + [] + end + end + end + + it 'applies the preprocessing method' do + DummyClassForMongoid.__find_in_batches(preprocess: :update_batch) do |batch| + expect(batch).to match([]) + end + end + end + end + + context 'when transforming models' do + + let(:instance) do + model.tap do |inst| + allow(inst).to receive(:as_indexed_json).and_return({}) + allow(inst).to receive(:id).and_return(1) + end + end + + it 'returns an proc' do + expect(DummyClassForMongoid.__transform.respond_to?(:call)).to be(true) + end + + it 'provides a default transformation' do + expect(DummyClassForMongoid.__transform.call(instance)).to eq(index: { _id: '1', data: {} }) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/adapters/multiple_spec.rb b/elasticsearch-model/spec/elasticsearch/model/adapters/multiple_spec.rb new file mode 100644 index 000000000..947df1717 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/adapters/multiple_spec.rb @@ -0,0 +1,125 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Adapter::Multiple do + + before(:all) do + class DummyOne + include Elasticsearch::Model + + index_name 'dummy' + document_type 'dummy_one' + + def self.find(ids) + ids.map { |id| new(id) } + end + + attr_reader :id + + def initialize(id) + @id = id.to_i + end + end + + module Namespace + class DummyTwo + include Elasticsearch::Model + + index_name 'dummy' + document_type 'dummy_two' + + def self.find(ids) + ids.map { |id| new(id) } + end + + attr_reader :id + + def initialize(id) + @id = id.to_i + end + end + end + + class DummyTwo + include Elasticsearch::Model + + index_name 'other_index' + document_type 'dummy_two' + + def self.find(ids) + ids.map { |id| new(id) } + end + + attr_reader :id + + def initialize(id) + @id = id.to_i + end + end + end + + after(:all) do + [DummyOne, Namespace::DummyTwo, DummyTwo].each do |adapter| + Elasticsearch::Model::Adapter::Adapter.adapters.delete(adapter) + end + Namespace.send(:remove_const, :DummyTwo) if defined?(Namespace::DummyTwo) + remove_classes(DummyOne, DummyTwo, Namespace) + end + + let(:hits) do + [ + { + _index: 'dummy', + _type: 'dummy_two', + _id: '2' + }, + { + _index: 'dummy', + _type: 'dummy_one', + _id: '2' + }, + { + _index: 'other_index', + _type: 'dummy_two', + _id: '1' + }, + { + _index: 'dummy', + _type: 'dummy_two', + _id: '1' + }, + { + _index: 'dummy', + _type: 'dummy_one', + _id: '3' + } + ] + end + + let(:response) do + double('response', response: { 'hits' => { 'hits' => hits } }) + end + + let(:multimodel) do + Elasticsearch::Model::Multimodel.new(DummyOne, DummyTwo, Namespace::DummyTwo) + end + + describe '#records' do + + before do + multimodel.class.send :include, Elasticsearch::Model::Adapter::Multiple::Records + expect(multimodel).to receive(:response).at_least(:once).and_return(response) + end + + it 'instantiates the correct types of instances' do + expect(multimodel.records[0]).to be_a(Namespace::DummyTwo) + expect(multimodel.records[1]).to be_a(DummyOne) + expect(multimodel.records[2]).to be_a(DummyTwo) + expect(multimodel.records[3]).to be_a(Namespace::DummyTwo) + expect(multimodel.records[4]).to be_a(DummyOne) + end + + it 'returns the results in the correct order' do + expect(multimodel.records.map(&:id)).to eq([2, 2, 1, 1, 3]) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/callbacks_spec.rb b/elasticsearch-model/spec/elasticsearch/model/callbacks_spec.rb new file mode 100644 index 000000000..d10ce656c --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/callbacks_spec.rb @@ -0,0 +1,33 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Callbacks do + + before(:all) do + class ::DummyCallbacksModel + end + + module DummyCallbacksAdapter + module CallbacksMixin + end + + def callbacks_mixin + CallbacksMixin + end; module_function :callbacks_mixin + end + end + + after(:all) do + remove_classes(DummyCallbacksModel, DummyCallbacksAdapter) + end + + context 'when a model includes the Callbacks module' do + + before do + Elasticsearch::Model::Callbacks.included(DummyCallbacksModel) + end + + it 'includes the callbacks mixin from the model adapter' do + expect(DummyCallbacksModel.ancestors).to include(Elasticsearch::Model::Adapter::Default::Callbacks) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/client_spec.rb b/elasticsearch-model/spec/elasticsearch/model/client_spec.rb new file mode 100644 index 000000000..ea273af73 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/client_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Client do + + before(:all) do + class ::DummyClientModel + extend Elasticsearch::Model::Client::ClassMethods + include Elasticsearch::Model::Client::InstanceMethods + end + end + + after(:all) do + remove_classes(DummyClientModel) + end + + context 'when a class includes the client module class methods' do + + it 'defines the client module class methods on the model' do + expect(DummyClientModel.client).to be_a(Elasticsearch::Transport::Client) + end + end + + context 'when a class includes the client module instance methods' do + + it 'defines the client module class methods on the model' do + expect(DummyClientModel.new.client).to be_a(Elasticsearch::Transport::Client) + end + end + + context 'when the client is set on the class' do + + around do |example| + original_client = DummyClientModel.client + DummyClientModel.client = 'foobar' + example.run + DummyClientModel.client = original_client + end + + it 'sets the client on the class' do + expect(DummyClientModel.client).to eq('foobar') + end + + it 'sets the client on an instance' do + expect(DummyClientModel.new.client).to eq('foobar') + end + end + + context 'when the client is set on an instance' do + + before do + model_instance.client = 'foo' + end + + let(:model_instance) do + DummyClientModel.new + end + + it 'sets the client on an instance' do + expect(model_instance.client).to eq('foo') + end + + it 'does not set the client on the class' do + expect(DummyClientModel.client).to be_a(Elasticsearch::Transport::Client) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/hash_wrapper_spec.rb b/elasticsearch-model/spec/elasticsearch/model/hash_wrapper_spec.rb new file mode 100644 index 000000000..53f018726 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/hash_wrapper_spec.rb @@ -0,0 +1,12 @@ +require 'spec_helper' + +describe Elasticsearch::Model::HashWrapper, if: Hashie::VERSION >= '3.5.3' do + + before do + expect(Hashie.logger).to receive(:warn).never + end + + it 'does not print a warning for re-defined methods' do + Elasticsearch::Model::HashWrapper.new(:foo => 'bar', :sort => true) + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/importing_spec.rb b/elasticsearch-model/spec/elasticsearch/model/importing_spec.rb new file mode 100644 index 000000000..c46f00ba6 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/importing_spec.rb @@ -0,0 +1,214 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Importing do + + before(:all) do + class DummyImportingModel + end + + module DummyImportingAdapter + module ImportingMixin + def __find_in_batches(options={}, &block) + yield if block_given? + end + def __transform + lambda {|a|} + end + end + + def importing_mixin + ImportingMixin + end; module_function :importing_mixin + end + end + + after(:all) do + remove_classes(DummyImportingModel, DummyImportingAdapter) + end + + before do + allow(Elasticsearch::Model::Adapter).to receive(:from_class).with(DummyImportingModel).and_return(DummyImportingAdapter) + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + end + + context 'when a model includes the Importing module' do + + it 'provides importing methods' do + expect(DummyImportingModel.respond_to?(:import)).to be(true) + expect(DummyImportingModel.respond_to?(:__find_in_batches)).to be(true) + end + end + + describe '#import' do + + before do + allow(DummyImportingModel).to receive(:index_name).and_return('foo') + allow(DummyImportingModel).to receive(:document_type).and_return('foo') + allow(DummyImportingModel).to receive(:index_exists?).and_return(true) + allow(DummyImportingModel).to receive(:__batch_to_bulk) + allow(client).to receive(:bulk).and_return(response) + end + + let(:client) do + double('client') + end + + let(:response) do + { 'items' => [] } + end + + context 'when no options are provided' do + + before do + expect(DummyImportingModel).to receive(:client).and_return(client) + allow(DummyImportingModel).to receive(:index_exists?).and_return(true) + end + + it 'uses the client to import documents' do + expect(DummyImportingModel.import).to eq(0) + end + end + + context 'when there is an error' do + + before do + expect(DummyImportingModel).to receive(:client).and_return(client) + allow(DummyImportingModel).to receive(:index_exists?).and_return(true) + end + + let(:response) do + { 'items' => [{ 'index' => { } }, { 'index' => { 'error' => 'FAILED' } }] } + end + + it 'returns the number of errors' do + expect(DummyImportingModel.import).to eq(1) + end + + context 'when the method is called with the option to return the errors' do + + it 'returns the errors' do + expect(DummyImportingModel.import(return: 'errors')).to eq([{ 'index' => { 'error' => 'FAILED' } }]) + end + end + + context 'when the method is called with a block' do + + it 'yields the response to the block' do + DummyImportingModel.import do |response| + expect(response['items'].size).to eq(2) + end + end + end + end + + context 'when the index does not exist' do + + before do + allow(DummyImportingModel).to receive(:index_exists?).and_return(false) + end + + it 'raises an exception' do + expect { + DummyImportingModel.import + }.to raise_exception(ArgumentError) + end + end + + context 'when the method is called with the force option' do + + before do + expect(DummyImportingModel).to receive(:create_index!).with(force: true, index: 'foo').and_return(true) + expect(DummyImportingModel).to receive(:__find_in_batches).with(foo: 'bar').and_return(true) + end + + it 'deletes and creates the index' do + expect(DummyImportingModel.import(force: true, foo: 'bar')).to eq(0) + end + end + + context 'when the method is called with the refresh option' do + + before do + expect(DummyImportingModel).to receive(:refresh_index!).with(index: 'foo').and_return(true) + expect(DummyImportingModel).to receive(:__find_in_batches).with(foo: 'bar').and_return(true) + end + + it 'refreshes the index' do + expect(DummyImportingModel.import(refresh: true, foo: 'bar')).to eq(0) + end + end + + context 'when a different index name is provided' do + + before do + expect(DummyImportingModel).to receive(:client).and_return(client) + expect(client).to receive(:bulk).with(body: nil, index: 'my-new-index', type: 'foo').and_return(response) + end + + it 'uses the alternate index name' do + expect(DummyImportingModel.import(index: 'my-new-index')).to eq(0) + end + end + + context 'when a different document type is provided' do + + before do + expect(DummyImportingModel).to receive(:client).and_return(client) + expect(client).to receive(:bulk).with(body: nil, index: 'foo', type: 'my-new-type').and_return(response) + end + + it 'uses the alternate index name' do + expect(DummyImportingModel.import(type: 'my-new-type')).to eq(0) + end + end + + context 'the transform method' do + + before do + expect(DummyImportingModel).to receive(:client).and_return(client) + expect(DummyImportingModel).to receive(:__transform).and_return(transform) + expect(DummyImportingModel).to receive(:__batch_to_bulk).with(anything, transform) + end + + let(:transform) do + lambda {|a|} + end + + it 'applies the transform method to the results' do + expect(DummyImportingModel.import).to eq(0) + end + end + + context 'when a transform is provided as an option' do + + context 'when the transform option is not a lambda' do + + let(:transform) do + 'not_callable' + end + + it 'raises an error' do + expect { + DummyImportingModel.import(transform: transform) + }.to raise_exception(ArgumentError) + end + end + + context 'when the transform option is a lambda' do + + before do + expect(DummyImportingModel).to receive(:client).and_return(client) + expect(DummyImportingModel).to receive(:__batch_to_bulk).with(anything, transform) + end + + let(:transform) do + lambda {|a|} + end + + it 'applies the transform lambda to the results' do + expect(DummyImportingModel.import(transform: transform)).to eq(0) + end + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/indexing_spec.rb b/elasticsearch-model/spec/elasticsearch/model/indexing_spec.rb new file mode 100644 index 000000000..54656d4df --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/indexing_spec.rb @@ -0,0 +1,918 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Indexing do + + before(:all) do + class ::DummyIndexingModel + extend ActiveModel::Naming + extend Elasticsearch::Model::Naming::ClassMethods + extend Elasticsearch::Model::Indexing::ClassMethods + + def self.foo + 'bar' + end + end + + class NotFound < Exception; end + end + + after(:all) do + remove_classes(DummyIndexingModel, NotFound) + end + + describe 'the Settings class' do + + it 'should be convertible to a hash' do + expect(Elasticsearch::Model::Indexing::Settings.new(foo: 'bar').to_hash).to eq(foo: 'bar') + end + + it 'should be convertible to json' do + expect(Elasticsearch::Model::Indexing::Settings.new(foo: 'bar').as_json).to eq(foo: 'bar') + end + end + + describe '#settings' do + + it 'returns an instance of the Settings class' do + expect(DummyIndexingModel.settings).to be_a(Elasticsearch::Model::Indexing::Settings) + end + + context 'when the settings are updated' do + + before do + DummyIndexingModel.settings(foo: 'boo') + DummyIndexingModel.settings(bar: 'bam') + end + + it 'updates the settings on the class' do + expect(DummyIndexingModel.settings.to_hash).to eq(foo: 'boo', bar: 'bam') + end + end + + context 'when the settings are updated with a yml file' do + + before do + DummyIndexingModel.settings File.open('spec/support/model.yml') + DummyIndexingModel.settings bar: 'bam' + end + + it 'updates the settings on the class' do + expect(DummyIndexingModel.settings.to_hash).to eq(foo: 'boo', bar: 'bam', 'baz' => 'qux') + end + end + + context 'when the settings are updated with a json file' do + + before do + DummyIndexingModel.settings File.open('spec/support/model.json') + DummyIndexingModel.settings bar: 'bam' + end + + it 'updates the settings on the class' do + expect(DummyIndexingModel.settings.to_hash).to eq(foo: 'boo', bar: 'bam', 'baz' => 'qux', 'laz' => 'qux') + end + end + end + + describe '#mappings' do + + let(:expected_mapping_hash) do + { :mytype => { foo: 'bar', :properties => {} } } + end + + it 'returns an instance of the Mappings class' do + expect(DummyIndexingModel.mappings).to be_a(Elasticsearch::Model::Indexing::Mappings) + end + + it 'raises an exception when there is no type passed to the #initialize method' do + expect { + Elasticsearch::Model::Indexing::Mappings.new + }.to raise_exception(ArgumentError) + end + + it 'should be convertible to a hash' do + expect(Elasticsearch::Model::Indexing::Mappings.new(:mytype, { foo: 'bar' }).to_hash).to eq(expected_mapping_hash) + end + + it 'should be convertible to json' do + expect(Elasticsearch::Model::Indexing::Mappings.new(:mytype, { foo: 'bar' }).as_json).to eq(expected_mapping_hash) + end + + context 'when specific mappings are defined' do + + let(:mappings) do + Elasticsearch::Model::Indexing::Mappings.new(:mytype) + end + + before do + mappings.indexes :foo, { type: 'boolean', include_in_all: false } + mappings.indexes :bar + end + + it 'creates the correct mapping definition' do + expect(mappings.to_hash[:mytype][:properties][:foo][:type]).to eq('boolean') + end + + it 'uses text as the default type' do + expect(mappings.to_hash[:mytype][:properties][:bar][:type]).to eq('text') + end + + context 'when mappings are defined for multiple fields' do + + before do + mappings.indexes :my_field, type: 'text' do + indexes :raw, type: 'keyword' + end + end + + it 'defines the mapping for all the fields' do + expect(mappings.to_hash[:mytype][:properties][:my_field][:type]).to eq('text') + expect(mappings.to_hash[:mytype][:properties][:my_field][:fields][:raw][:type]).to eq('keyword') + expect(mappings.to_hash[:mytype][:properties][:my_field][:fields][:raw][:properties]).to be_nil + end + end + + context 'when embedded properties are defined' do + + before do + mappings.indexes :foo do + indexes :bar + end + + mappings.indexes :foo_object, type: 'object' do + indexes :bar + end + + mappings.indexes :foo_nested, type: 'nested' do + indexes :bar + end + + mappings.indexes :foo_nested_as_symbol, type: :nested do + indexes :bar + end + end + + it 'defines mappings for the embedded properties' do + expect(mappings.to_hash[:mytype][:properties][:foo][:type]).to eq('object') + expect(mappings.to_hash[:mytype][:properties][:foo][:properties][:bar][:type]).to eq('text') + expect(mappings.to_hash[:mytype][:properties][:foo][:fields]).to be_nil + + expect(mappings.to_hash[:mytype][:properties][:foo_object][:type]).to eq('object') + expect(mappings.to_hash[:mytype][:properties][:foo_object][:properties][:bar][:type]).to eq('text') + expect(mappings.to_hash[:mytype][:properties][:foo_object][:fields]).to be_nil + + expect(mappings.to_hash[:mytype][:properties][:foo_nested][:type]).to eq('nested') + expect(mappings.to_hash[:mytype][:properties][:foo_nested][:properties][:bar][:type]).to eq('text') + expect(mappings.to_hash[:mytype][:properties][:foo_nested][:fields]).to be_nil + + expect(mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:type]).to eq(:nested) + expect(mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:properties]).not_to be_nil + expect(mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:fields]).to be_nil + end + end + end + + context 'when the method is called on a class' do + + before do + DummyIndexingModel.mappings(foo: 'boo') + DummyIndexingModel.mappings(bar: 'bam') + end + + let(:expected_mappings_hash) do + { _doc: { foo: "boo", bar: "bam", properties: {} } } + end + + it 'sets the mappings' do + expect(DummyIndexingModel.mappings.to_hash).to eq(expected_mappings_hash) + end + + context 'when the method is called with a block' do + + before do + DummyIndexingModel.mapping do + indexes :foo, type: 'boolean' + end + end + + it 'sets the mappings' do + expect(DummyIndexingModel.mapping.to_hash[:_doc][:properties][:foo][:type]).to eq('boolean') + end + end + end + end + + describe 'instance methods' do + + before(:all) do + class ::DummyIndexingModelWithCallbacks + extend Elasticsearch::Model::Indexing::ClassMethods + include Elasticsearch::Model::Indexing::InstanceMethods + + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changes_to_save + {:foo => ['One', 'Two']} + end + end + + class ::DummyIndexingModelWithNoChanges + extend Elasticsearch::Model::Indexing::ClassMethods + include Elasticsearch::Model::Indexing::InstanceMethods + + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changes_to_save + {} + end + end + + class ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson + extend Elasticsearch::Model::Indexing::ClassMethods + include Elasticsearch::Model::Indexing::InstanceMethods + + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changes_to_save + {:foo => ['A', 'B'], :bar => ['C', 'D']} + end + + def as_indexed_json(options={}) + { :foo => 'B' } + end + end + + class ::DummyIndexingModelWithOldDirty + extend Elasticsearch::Model::Indexing::ClassMethods + include Elasticsearch::Model::Indexing::InstanceMethods + + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changes + {:foo => ['One', 'Two']} + end + end + end + + after(:all) do + Object.send(:remove_const, :DummyIndexingModelWithCallbacks) if defined?(DummyIndexingModelWithCallbacks) + Object.send(:remove_const, :DummyIndexingModelWithNoChanges) if defined?(DummyIndexingModelWithNoChanges) + Object.send(:remove_const, :DummyIndexingModelWithCallbacksAndCustomAsIndexedJson) if defined?(DummyIndexingModelWithCallbacksAndCustomAsIndexedJson) + Object.send(:remove_const, :DummyIndexingModelWithOldDirty) if defined?(DummyIndexingModelWithOldDirty) + end + + context 'when the module is included' do + + context 'when the model uses the old ActiveModel::Dirty' do + + before do + DummyIndexingModelWithOldDirty.__send__ :include, Elasticsearch::Model::Indexing::InstanceMethods + end + + it 'registers callbacks' do + expect(DummyIndexingModelWithOldDirty.instance_variable_get(:@callbacks)).not_to be_empty + end + + let(:instance) do + DummyIndexingModelWithOldDirty.new + end + + it 'sets the @__changed_model_attributes variable before the callback' do + DummyIndexingModelWithOldDirty.instance_variable_get(:@callbacks).each do |n, callback| + instance.instance_eval(&callback) + expect(instance.instance_variable_get(:@__changed_model_attributes)).to eq(foo: 'Two') + end + end + end + + context 'when the model users the current ActiveModel::Dirty' do + + before do + DummyIndexingModelWithCallbacks.__send__ :include, Elasticsearch::Model::Indexing::InstanceMethods + end + + it 'registers callbacks' do + expect(DummyIndexingModelWithCallbacks.instance_variable_get(:@callbacks)).not_to be_empty + end + + let(:instance) do + DummyIndexingModelWithCallbacks.new + end + + it 'sets the @__changed_model_attributes variable before the callback' do + DummyIndexingModelWithCallbacks.instance_variable_get(:@callbacks).each do |n, callback| + instance.instance_eval(&callback) + expect(instance.instance_variable_get(:@__changed_model_attributes)).to eq(foo: 'Two') + end + end + end + end + + describe '#index_document' do + + before do + expect(instance).to receive(:client).and_return(client) + expect(instance).to receive(:as_indexed_json).and_return('JSON') + expect(instance).to receive(:index_name).and_return('foo') + expect(instance).to receive(:document_type).and_return('bar') + expect(instance).to receive(:id).and_return('1') + end + + let(:client) do + double('client') + end + + let(:instance) do + DummyIndexingModelWithCallbacks.new + end + + context 'when no options are passed to the method' do + + before do + expect(client).to receive(:index).with(index: 'foo', type: 'bar', id: '1', body: 'JSON').and_return(true) + end + + it 'provides the method on an instance' do + expect(instance.index_document).to be(true) + end + end + + context 'when extra options are passed to the method' do + + before do + expect(client).to receive(:index).with(index: 'foo', type: 'bar', id: '1', body: 'JSON', parent: 'A').and_return(true) + end + + it 'passes the extra options to the method call on the client' do + expect(instance.index_document(parent: 'A')).to be(true) + end + end + end + + describe '#delete_document' do + + before do + expect(instance).to receive(:client).and_return(client) + expect(instance).to receive(:index_name).and_return('foo') + expect(instance).to receive(:document_type).and_return('bar') + expect(instance).to receive(:id).and_return('1') + end + + let(:client) do + double('client') + end + + let(:instance) do + DummyIndexingModelWithCallbacks.new + end + + context 'when no options are passed to the method' do + + before do + expect(client).to receive(:delete).with(index: 'foo', type: 'bar', id: '1').and_return(true) + end + + it 'provides the method on an instance' do + expect(instance.delete_document).to be(true) + end + end + + context 'when extra options are passed to the method' do + + before do + expect(client).to receive(:delete).with(index: 'foo', type: 'bar', id: '1', parent: 'A').and_return(true) + end + + it 'passes the extra options to the method call on the client' do + expect(instance.delete_document(parent: 'A')).to be(true) + end + end + end + + describe '#update_document' do + + let(:client) do + double('client') + end + + let(:instance) do + DummyIndexingModelWithCallbacks.new + end + + context 'when no changes are present' do + + before do + expect(instance).to receive(:index_document).and_return(true) + expect(client).to receive(:update).never + instance.instance_variable_set(:@__changed_model_attributes, nil) + end + + it 'updates the document' do + expect(instance.update_document).to be(true) + end + end + + context 'when changes are present' do + + before do + allow(instance).to receive(:client).and_return(client) + allow(instance).to receive(:index_name).and_return('foo') + allow(instance).to receive(:document_type).and_return('bar') + allow(instance).to receive(:id).and_return('1') + end + + context 'when the changes are included in the as_indexed_json representation' do + + before do + instance.instance_variable_set(:@__changed_model_attributes, { foo: 'bar' }) + expect(client).to receive(:update).with(index: 'foo', type: 'bar', id: '1', body: { doc: { foo: 'bar' } }).and_return(true) + end + + it 'updates the document' do + expect(instance.update_document).to be(true) + end + end + + context 'when the changes are not all included in the as_indexed_json representation' do + + let(:instance) do + DummyIndexingModelWithCallbacksAndCustomAsIndexedJson.new + end + + before do + instance.instance_variable_set(:@__changed_model_attributes, {'foo' => 'B', 'bar' => 'D' }) + expect(client).to receive(:update).with(index: 'foo', type: 'bar', id: '1', body: { doc: { foo: 'B' } }).and_return(true) + end + + it 'updates the document' do + expect(instance.update_document).to be(true) + end + end + + context 'when none of the changes are included in the as_indexed_json representation' do + + let(:instance) do + DummyIndexingModelWithCallbacksAndCustomAsIndexedJson.new + end + + before do + instance.instance_variable_set(:@__changed_model_attributes, {'bar' => 'D' }) + end + + it 'does not update the document' do + expect(instance.update_document).to_not be(true) + end + end + + context 'when there are partial updates' do + + let(:instance) do + DummyIndexingModelWithCallbacksAndCustomAsIndexedJson.new + end + + before do + instance.instance_variable_set(:@__changed_model_attributes, { 'foo' => { 'bar' => 'BAR'} }) + expect(instance).to receive(:as_indexed_json).and_return('foo' => 'BAR') + expect(client).to receive(:update).with(index: 'foo', type: 'bar', id: '1', body: { doc: { 'foo' => 'BAR' } }).and_return(true) + end + + it 'updates the document' do + expect(instance.update_document).to be(true) + end + end + end + end + + describe '#update_document_attributes' do + + let(:client) do + double('client') + end + + let(:instance) do + DummyIndexingModelWithCallbacks.new + end + + context 'when changes are present' do + + before do + expect(instance).to receive(:client).and_return(client) + expect(instance).to receive(:index_name).and_return('foo') + expect(instance).to receive(:document_type).and_return('bar') + expect(instance).to receive(:id).and_return('1') + instance.instance_variable_set(:@__changed_model_attributes, { author: 'john' }) + end + + context 'when no options are specified' do + + before do + expect(client).to receive(:update).with(index: 'foo', type: 'bar', id: '1', body: { doc: { title: 'green' } }).and_return(true) + end + + it 'updates the document' do + expect(instance.update_document_attributes(title: 'green')).to be(true) + end + end + + context 'when extra options are provided' do + + before do + expect(client).to receive(:update).with(index: 'foo', type: 'bar', id: '1', body: { doc: { title: 'green' } }, refresh: true).and_return(true) + end + + it 'updates the document' do + expect(instance.update_document_attributes({ title: 'green' }, refresh: true)).to be(true) + end + end + end + end + end + + describe '#index_exists?' do + + before do + expect(DummyIndexingModel).to receive(:client).and_return(client) + end + + context 'when the index exists' do + + let(:client) do + double('client', indices: double('indices', exists: true)) + end + + it 'returns true' do + expect(DummyIndexingModel.index_exists?).to be(true) + end + end + + context 'when the index does not exists' do + + let(:client) do + double('client', indices: double('indices', exists: false)) + end + + it 'returns false' do + expect(DummyIndexingModel.index_exists?).to be(false) + end + end + + context 'when the index API raises an error' do + + let(:client) do + double('client').tap do |cl| + expect(cl).to receive(:indices).and_raise(StandardError) + end + end + + it 'returns false' do + expect(DummyIndexingModel.index_exists?).to be(false) + end + end + + context 'when the indices.exists API raises an error' do + + let(:client) do + double('client', indices: indices) + end + + let(:indices) do + double('indices').tap do |ind| + expect(ind).to receive(:exists).and_raise(StandardError) + end + end + + it 'returns false' do + expect(DummyIndexingModel.index_exists?).to be(false) + end + end + end + + describe '#delete_index!' do + + before(:all) do + class ::DummyIndexingModelForRecreate + extend ActiveModel::Naming + extend Elasticsearch::Model::Naming::ClassMethods + extend Elasticsearch::Model::Indexing::ClassMethods + end + end + + after(:all) do + Object.send(:remove_const, :DummyIndexingModelForRecreate) if defined?(DummyIndexingModelForRecreate) + end + + context 'when the index is not found' do + + let(:client) do + double('client', indices: indices, transport: double('transport', { logger: nil })) + end + + let(:indices) do + double('indices').tap do |ind| + expect(ind).to receive(:delete).and_raise(NotFound) + end + end + + before do + expect(DummyIndexingModelForRecreate).to receive(:client).at_most(3).times.and_return(client) + end + + context 'when the force option is true' do + + it 'deletes the index without raising an exception' do + expect(DummyIndexingModelForRecreate.delete_index!(force: true)).to be_nil + end + + context 'when the client has a logger' do + + let(:logger) do + Logger.new(STDOUT).tap { |l| l.level = Logger::DEBUG } + end + + let(:client) do + double('client', indices: indices, transport: double('transport', { logger: logger })) + end + + it 'deletes the index without raising an exception' do + expect(DummyIndexingModelForRecreate.delete_index!(force: true)).to be_nil + end + + it 'logs the message that the index is not found' do + expect(logger).to receive(:debug) + expect(DummyIndexingModelForRecreate.delete_index!(force: true)).to be_nil + end + end + end + + context 'when the force option is not provided' do + + it 'raises an exception' do + expect { + DummyIndexingModelForRecreate.delete_index! + }.to raise_exception(NotFound) + end + end + + context 'when the exception is not NotFound' do + + let(:indices) do + double('indices').tap do |ind| + expect(ind).to receive(:delete).and_raise(Exception) + end + end + + it 'raises an exception' do + expect { + DummyIndexingModelForRecreate.delete_index! + }.to raise_exception(Exception) + end + end + end + + context 'when an index name is provided in the options' do + + before do + expect(DummyIndexingModelForRecreate).to receive(:client).and_return(client) + expect(indices).to receive(:delete).with(index: 'custom-foo') + end + + let(:client) do + double('client', indices: indices) + end + + let(:indices) do + double('indices', delete: true) + end + + it 'uses the index name' do + expect(DummyIndexingModelForRecreate.delete_index!(index: 'custom-foo')) + end + end + end + + describe '#create_index' do + + before(:all) do + class ::DummyIndexingModelForCreate + extend ActiveModel::Naming + extend Elasticsearch::Model::Naming::ClassMethods + extend Elasticsearch::Model::Indexing::ClassMethods + + index_name 'foo' + + settings index: { number_of_shards: 1 } do + mappings do + indexes :foo, analyzer: 'keyword' + end + end + end + end + + after(:all) do + Object.send(:remove_const, :DummyIndexingModelForCreate) if defined?(DummyIndexingModelForCreate) + end + + let(:client) do + double('client', indices: indices) + end + + let(:indices) do + double('indices') + end + + context 'when the index does not exist' do + + before do + expect(DummyIndexingModelForCreate).to receive(:client).and_return(client) + expect(DummyIndexingModelForCreate).to receive(:index_exists?).and_return(false) + end + + context 'when options are not provided' do + + let(:expected_body) do + { mappings: { _doc: { properties: { foo: { analyzer: 'keyword', + type: 'text' } } } }, + settings: { index: { number_of_shards: 1 } } } + end + + before do + expect(indices).to receive(:create).with(index: 'foo', body: expected_body).and_return(true) + end + + it 'creates the index' do + expect(DummyIndexingModelForCreate.create_index!).to be(true) + end + end + + context 'when options are provided' do + + let(:expected_body) do + { mappings: { foobar: { properties: { foo: { analyzer: 'bar' } } } }, + settings: { index: { number_of_shards: 3 } } } + end + + before do + expect(indices).to receive(:create).with(index: 'foobar', body: expected_body).and_return(true) + end + + it 'creates the index' do + expect(DummyIndexingModelForCreate.create_index! \ + index: 'foobar', + settings: { index: { number_of_shards: 3 } }, + mappings: { foobar: { properties: { foo: { analyzer: 'bar' } } } } + ).to be(true) + end + end + end + + context 'when the index exists' do + + before do + expect(DummyIndexingModelForCreate).to receive(:index_exists?).and_return(true) + expect(indices).to receive(:create).never + end + + it 'does not create the index' do + expect(DummyIndexingModelForCreate.create_index!).to be_nil + end + end + + context 'when creating the index raises an exception' do + + before do + expect(DummyIndexingModelForCreate).to receive(:client).and_return(client) + expect(DummyIndexingModelForCreate).to receive(:index_exists?).and_return(false) + expect(DummyIndexingModelForCreate).to receive(:delete_index!).and_return(true) + expect(indices).to receive(:create).and_raise(Exception) + end + + it 'raises the exception' do + expect { + DummyIndexingModelForCreate.create_index!(force: true) + }.to raise_exception(Exception) + end + end + + context 'when an index name is provided in the options' do + + before do + expect(DummyIndexingModelForCreate).to receive(:client).and_return(client).twice + expect(indices).to receive(:exists).and_return(false) + expect(indices).to receive(:create).with(index: 'custom-foo', body: expected_body) + end + + let(:expected_body) do + { mappings: { _doc: { properties: { foo: { analyzer: 'keyword', + type: 'text' } } } }, + settings: { index: { number_of_shards: 1 } } } + end + + it 'uses the index name' do + expect(DummyIndexingModelForCreate.create_index!(index: 'custom-foo')) + end + end + + context 'when the logging level is debug' + end + + describe '#refresh_index!' do + + before(:all) do + class ::DummyIndexingModelForRefresh + extend ActiveModel::Naming + extend Elasticsearch::Model::Naming::ClassMethods + extend Elasticsearch::Model::Indexing::ClassMethods + + index_name 'foo' + + settings index: { number_of_shards: 1 } do + mappings do + indexes :foo, analyzer: 'keyword' + end + end + end + end + + after(:all) do + Object.send(:remove_const, :DummyIndexingModelForRefresh) if defined?(DummyIndexingModelForRefresh) + end + + let(:client) do + double('client', indices: indices, transport: double('transport', { logger: nil })) + end + + let(:indices) do + double('indices') + end + + before do + expect(DummyIndexingModelForRefresh).to receive(:client).at_most(3).times.and_return(client) + end + + context 'when the force option is true' do + + context 'when the operation raises a NotFound exception' do + + before do + expect(indices).to receive(:refresh).and_raise(NotFound) + end + + it 'does not raise an exception' do + expect(DummyIndexingModelForRefresh.refresh_index!(force: true)).to be_nil + end + + context 'when the client has a logger' do + + let(:logger) do + Logger.new(STDOUT).tap { |l| l.level = Logger::DEBUG } + end + + let(:client) do + double('client', indices: indices, transport: double('transport', { logger: logger })) + end + + it 'does not raise an exception' do + expect(DummyIndexingModelForRefresh.refresh_index!(force: true)).to be_nil + end + + it 'logs the message that the index is not found' do + expect(logger).to receive(:debug) + expect(DummyIndexingModelForRefresh.refresh_index!(force: true)).to be_nil + end + end + end + + context 'when the operation raises another type of exception' do + + before do + expect(indices).to receive(:refresh).and_raise(Exception) + end + + it 'does not raise an exception' do + expect { + DummyIndexingModelForRefresh.refresh_index!(force: true) + }.to raise_exception(Exception) + end + end + end + + context 'when an index name is provided in the options' do + + before do + expect(indices).to receive(:refresh).with(index: 'custom-foo') + end + + it 'uses the index name' do + expect(DummyIndexingModelForRefresh.refresh_index!(index: 'custom-foo')) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/module_spec.rb b/elasticsearch-model/spec/elasticsearch/model/module_spec.rb new file mode 100644 index 000000000..0b8986d69 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/module_spec.rb @@ -0,0 +1,101 @@ +require 'spec_helper' + +describe Elasticsearch::Model do + + describe '#client' do + + it 'should have a default' do + expect(Elasticsearch::Model.client).to be_a(Elasticsearch::Transport::Client) + end + end + + describe '#client=' do + + before do + Elasticsearch::Model.client = 'Foobar' + end + + it 'should allow the client to be set' do + expect(Elasticsearch::Model.client).to eq('Foobar') + end + end + + describe 'mixin' do + + before(:all) do + class ::DummyIncludingModel; end + class ::DummyIncludingModelWithSearchMethodDefined + def self.search(query, options={}) + "SEARCH" + end + end + + DummyIncludingModel.__send__ :include, Elasticsearch::Model + end + + after(:all) do + remove_classes(DummyIncludingModel, DummyIncludingModelWithSearchMethodDefined) + end + + it 'should include and set up the proxy' do + expect(DummyIncludingModel).to respond_to(:__elasticsearch__) + expect(DummyIncludingModel.new).to respond_to(:__elasticsearch__) + end + + it 'should delegate methods to the proxy' do + expect(DummyIncludingModel).to respond_to(:search) + expect(DummyIncludingModel).to respond_to(:mapping) + expect(DummyIncludingModel).to respond_to(:settings) + expect(DummyIncludingModel).to respond_to(:index_name) + expect(DummyIncludingModel).to respond_to(:document_type) + expect(DummyIncludingModel).to respond_to(:import) + end + + it 'should not interfere with existing methods' do + expect(DummyIncludingModelWithSearchMethodDefined.search('foo')).to eq('SEARCH') + end + end + + describe '#settings' do + + it 'allows access to the settings' do + expect(Elasticsearch::Model.settings).to eq({}) + end + + context 'when settings are changed' do + + before do + Elasticsearch::Model.settings[:foo] = 'bar' + end + + it 'persists the changes' do + expect(Elasticsearch::Model.settings[:foo]).to eq('bar') + end + end + + context 'when \'inheritance_enabled\' is set' do + + around do |example| + original_value = Elasticsearch::Model.settings[:inheritance_enabled] + example.run + Elasticsearch::Model.settings[:inheritance_enabled] = original_value + end + + context 'when \'inheritance_enabled\' is true' do + + it 'warns with a deprecation message' do + expect(Elasticsearch::Model).to receive(:warn) + Elasticsearch::Model.inheritance_enabled = true + end + end + + context 'when \'inheritance_enabled\' is false' do + + it 'does not warn' do + expect(Elasticsearch::Model).not_to receive(:warn) + Elasticsearch::Model.inheritance_enabled = false + end + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/multimodel_spec.rb b/elasticsearch-model/spec/elasticsearch/model/multimodel_spec.rb new file mode 100644 index 000000000..44ee0bd84 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/multimodel_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Multimodel do + + let(:multimodel) do + Elasticsearch::Model::Multimodel.new(model_1, model_2) + end + + let(:model_1) do + double('Foo', index_name: 'foo_index', document_type: 'foo', to_ary: nil) + end + + let(:model_2) do + double('Bar', index_name: 'bar_index', document_type: 'bar', to_ary: nil) + end + + it 'has an index name' do + expect(multimodel.index_name).to eq(['foo_index', 'bar_index']) + end + + it 'has an document type' do + expect(multimodel.document_type).to eq(['foo', 'bar']) + end + + it 'has a client' do + expect(multimodel.client).to eq(Elasticsearch::Model.client) + end + + describe 'the model registry' do + + before(:all) do + + class JustAModel + include Elasticsearch::Model + end + + class JustAnotherModel + include Elasticsearch::Model + end + end + + after(:all) do + remove_classes(JustAModel, JustAnotherModel) + end + + let(:multimodel) do + Elasticsearch::Model::Multimodel.new + end + + it 'includes model in the registry' do + expect(multimodel.models).to include(JustAModel) + expect(multimodel.models).to include(JustAnotherModel) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/naming_inheritance_spec.rb b/elasticsearch-model/spec/elasticsearch/model/naming_inheritance_spec.rb new file mode 100644 index 000000000..764bc5295 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/naming_inheritance_spec.rb @@ -0,0 +1,184 @@ +require 'spec_helper' + +describe 'naming inheritance' do + + context 'without using proxy' do + before(:all) do + TestBase = Class.new do + extend ActiveModel::Naming + + extend Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Naming::InstanceMethods + end + + Animal = Class.new TestBase do + extend ActiveModel::Naming + + extend Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Naming::InstanceMethods + + index_name "mammals" + document_type "mammal" + end + + Dog = Class.new Animal + + module ::MyNamespace + Dog = Class.new Animal + end + + Cat = Class.new Animal do + extend ActiveModel::Naming + + extend Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Naming::InstanceMethods + + index_name "cats" + document_type "cat" + end + + end + + after(:all) do + remove_classes(TestBase, Animal, MyNamespace, Cat) + end + + around(:all) do |example| + original_value = Elasticsearch::Model.inheritance_enabled + Elasticsearch::Model.inheritance_enabled = true + example.run + Elasticsearch::Model.inheritance_enabled = original_value + end + + describe '#index_name' do + + it 'returns the default index name' do + expect(TestBase.index_name).to eq('test_bases') + expect(TestBase.new.index_name).to eq('test_bases') + end + + it 'returns the explicit index name' do + expect(Animal.index_name).to eq('mammals') + expect(Animal.new.index_name).to eq('mammals') + + expect(Cat.index_name).to eq('cats') + expect(Cat.new.index_name).to eq('cats') + end + + it 'returns the ancestor index name' do + expect(Dog.index_name).to eq('mammals') + expect(Dog.new.index_name).to eq('mammals') + end + + it 'returns the ancestor index name for namespaced models' do + expect(::MyNamespace::Dog.index_name).to eq('mammals') + expect(::MyNamespace::Dog.new.index_name).to eq('mammals') + end + end + + describe '#document_type' do + + it 'returns nil' do + expect(TestBase.document_type).to eq('_doc') + expect(TestBase.new.document_type).to eq('_doc') + end + + it 'returns the explicit document type' do + expect(Animal.document_type).to eq('mammal') + expect(Animal.new.document_type).to eq('mammal') + + expect(Cat.document_type).to eq('cat') + expect(Cat.new.document_type).to eq('cat') + end + + it 'returns the ancestor document type' do + expect(Dog.document_type).to eq('mammal') + expect(Dog.new.document_type).to eq('mammal') + end + + it 'returns the ancestor document type for namespaced models' do + expect(::MyNamespace::Dog.document_type).to eq('mammal') + expect(::MyNamespace::Dog.new.document_type).to eq('mammal') + end + end + end + + context 'when using proxy' do + before(:all) do + TestBase = Class.new do + extend ActiveModel::Naming + + include Elasticsearch::Model + end + + Animal = Class.new TestBase do + index_name "mammals" + document_type "mammal" + end + + Dog = Class.new Animal + + module MyNamespace + Dog = Class.new Animal + end + + Cat = Class.new Animal do + index_name "cats" + document_type "cat" + end + end + + after(:all) do + remove_classes(TestBase, Animal, MyNamespace, Cat) + end + + around(:all) do |example| + original_value = Elasticsearch::Model.settings[:inheritance_enabled] + Elasticsearch::Model.settings[:inheritance_enabled] = true + example.run + Elasticsearch::Model.settings[:inheritance_enabled] = original_value + end + + describe '#index_name' do + + it 'returns the default index name' do + expect(TestBase.index_name).to eq('test_bases') + end + + it 'returns the explicit index name' do + expect(Animal.index_name).to eq('mammals') + + expect(Cat.index_name).to eq('cats') + end + + it 'returns the ancestor index name' do + expect(Dog.index_name).to eq('mammals') + end + + it 'returns the ancestor index name for namespaced models' do + expect(::MyNamespace::Dog.index_name).to eq('mammals') + end + end + + describe '#document_type' do + + it 'returns nil' do + expect(TestBase.document_type).to eq('_doc') + end + + it 'returns the explicit document type' do + expect(Animal.document_type).to eq('mammal') + + expect(Cat.document_type).to eq('cat') + end + + it 'returns the ancestor document type' do + expect(Dog.document_type).to eq('mammal') + end + + it 'returns the ancestor document type for namespaced models' do + expect(::MyNamespace::Dog.document_type).to eq('mammal') + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/naming_spec.rb b/elasticsearch-model/spec/elasticsearch/model/naming_spec.rb new file mode 100644 index 000000000..8e0d8d54d --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/naming_spec.rb @@ -0,0 +1,186 @@ +require 'spec_helper' + +describe 'naming' do + + before(:all) do + class ::DummyNamingModel + extend ActiveModel::Naming + + extend Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Naming::InstanceMethods + end + + module ::MyNamespace + class DummyNamingModelInNamespace + extend ActiveModel::Naming + + extend Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Naming::InstanceMethods + end + end + end + + after(:all) do + remove_classes(DummyNamingModel, MyNamespace) + end + + it 'returns the default index name' do + expect(DummyNamingModel.index_name).to eq('dummy_naming_models') + expect(DummyNamingModel.new.index_name).to eq('dummy_naming_models') + end + + it 'returns the sanitized defualt index name for namespaced models' do + expect(::MyNamespace::DummyNamingModelInNamespace.index_name).to eq('my_namespace-dummy_naming_model_in_namespaces') + expect(::MyNamespace::DummyNamingModelInNamespace.new.index_name).to eq('my_namespace-dummy_naming_model_in_namespaces') + end + + it 'returns the document type' do + expect(DummyNamingModel.document_type).to eq('_doc') + expect(DummyNamingModel.new.document_type).to eq('_doc') + end + + describe '#index_name' do + + context 'when the index name is set on the class' do + + before do + DummyNamingModel.index_name 'foobar' + end + + it 'sets the index_name' do + expect(DummyNamingModel.index_name).to eq('foobar') + end + end + + context 'when the index name is set on an instance' do + + before do + instance.index_name 'foobar_d' + end + + let(:instance) do + DummyNamingModel.new + end + + it 'sets the index name on the instance' do + expect(instance.index_name).to eq('foobar_d') + end + + context 'when the index name is set with a proc' do + + before do + modifier = 'r' + instance.index_name Proc.new{ "foobar_#{modifier}" } + end + + it 'sets the index name on the instance' do + expect(instance.index_name).to eq('foobar_r') + end + end + end + end + + describe '#index_name=' do + + before do + DummyNamingModel.index_name = 'foobar_index_S' + end + + it 'changes the index name' do + expect(DummyNamingModel.index_name).to eq('foobar_index_S') + end + + context 'when the method is called on an instance' do + + let(:instance) do + DummyNamingModel.new + end + + before do + instance.index_name = 'foobar_index_s' + end + + it 'changes the index name' do + expect(instance.index_name).to eq('foobar_index_s') + end + + it 'does not change the index name on the class' do + expect(DummyNamingModel.index_name).to eq('foobar_index_S') + end + end + + context 'when the index name is changed with a proc' do + + before do + modifier2 = 'y' + DummyNamingModel.index_name = Proc.new{ "foobar_index_#{modifier2}" } + end + + it 'changes the index name' do + expect(DummyNamingModel.index_name).to eq('foobar_index_y') + end + end + end + + describe '#document_type' do + + it 'returns the document type' do + expect(DummyNamingModel.document_type).to eq('_doc') + end + + context 'when the method is called with an argument' do + + before do + DummyNamingModel.document_type 'foo' + end + + it 'changes the document type' do + expect(DummyNamingModel.document_type).to eq('foo') + end + end + + context 'when the method is called on an instance' do + + let(:instance) do + DummyNamingModel.new + end + + before do + instance.document_type 'foobar_d' + end + + it 'changes the document type' do + expect(instance.document_type).to eq('foobar_d') + end + end + end + + describe '#document_type=' do + + context 'when the method is called on the class' do + + before do + DummyNamingModel.document_type = 'foo_z' + end + + it 'changes the document type' do + expect(DummyNamingModel.document_type).to eq('foo_z') + end + end + + context 'when the method is called on an instance' do + + let(:instance) do + DummyNamingModel.new + end + + before do + instance.document_type = 'foobar_b' + end + + it 'changes the document type' do + expect(instance.document_type).to eq('foobar_b') + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/proxy_spec.rb b/elasticsearch-model/spec/elasticsearch/model/proxy_spec.rb new file mode 100644 index 000000000..6e484f896 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/proxy_spec.rb @@ -0,0 +1,107 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Proxy do + + before(:all) do + class ::DummyProxyModel + include Elasticsearch::Model::Proxy + + def self.foo + 'classy foo' + end + + def bar + 'insta barr' + end + + def as_json(options) + {foo: 'bar'} + end + end + + class ::DummyProxyModelWithCallbacks + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changes_to_save + {:foo => ['One', 'Two']} + end + end + + DummyProxyModelWithCallbacks.__send__ :include, Elasticsearch::Model::Proxy + end + + after(:all) do + remove_classes(DummyProxyModel, DummyProxyModelWithCallbacks) + end + + it 'sets up a proxy method on the class' do + expect(DummyProxyModel).to respond_to(:__elasticsearch__) + end + + it 'sets up a proxy method on instances' do + expect(DummyProxyModel.new).to respond_to(:__elasticsearch__) + end + + it 'sets up hooks for before_save callbacks' do + expect(DummyProxyModelWithCallbacks).to respond_to(:before_save) + end + + it 'delegates methods to the target' do + expect(DummyProxyModel.__elasticsearch__).to respond_to(:foo) + expect(DummyProxyModel.__elasticsearch__.foo).to eq('classy foo') + expect(DummyProxyModel.new.__elasticsearch__).to respond_to(:bar) + expect(DummyProxyModel.new.__elasticsearch__.bar).to eq('insta barr') + + expect { + DummyProxyModel.__elasticsearch__.xoxo + }.to raise_exception(NoMethodError) + + expect { + DummyProxyModel.new.__elasticsearch__.xoxo + }.to raise_exception(NoMethodError) + end + + it 'returns the proxy class from an instance proxy' do + expect(DummyProxyModel.new.__elasticsearch__.class.class).to eq(Elasticsearch::Model::Proxy::ClassMethodsProxy) + end + + it 'returns the origin class from an instance proxy' do + expect(DummyProxyModel.new.__elasticsearch__.klass).to eq(DummyProxyModel) + end + + it 'delegates #as_json from the proxy to the target' do + expect(DummyProxyModel.new.__elasticsearch__.as_json).to eq(foo: 'bar') + end + + it 'includes the proxy in the inspect string' do + expect(DummyProxyModel.__elasticsearch__.inspect).to match(/PROXY/) + expect(DummyProxyModel.new.__elasticsearch__.inspect).to match(/PROXY/) + end + + context 'when instances are cloned' do + + let!(:model) do + DummyProxyModel.new + end + + let!(:model_target) do + model.__elasticsearch__.target + end + + let!(:duplicate) do + model.dup + end + + let!(:duplicate_target) do + duplicate.__elasticsearch__.target + end + + it 'resets the proxy target' do + expect(model).not_to eq(duplicate) + expect(model).to eq(model_target) + expect(duplicate).to eq(duplicate_target) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/aggregations_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/aggregations_spec.rb new file mode 100644 index 000000000..2d1f8f509 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/aggregations_spec.rb @@ -0,0 +1,66 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Response::Aggregations do + + before(:all) do + class OriginClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + end + + after(:all) do + remove_classes(OriginClass) + end + + let(:response_document) do + { + 'aggregations' => { + 'foo' => {'bar' => 10 }, + 'price' => { 'doc_count' => 123, + 'min' => { 'value' => 1.0}, + 'max' => { 'value' => 99 } + } + } + } + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(OriginClass, '*').tap do |request| + allow(request).to receive(:execute!).and_return(response_document) + end + end + + let(:aggregations) do + Elasticsearch::Model::Response::Response.new(OriginClass, search).aggregations + end + + describe 'method delegation' do + + it 'delegates methods to the response document' do + expect(aggregations.foo).to be_a(Hashie::Mash) + expect(aggregations.foo.bar).to be(10) + end + end + + describe '#doc_count' do + + it 'returns the doc count value from the response document' do + expect(aggregations.price.doc_count).to eq(123) + end + end + + describe '#min' do + + it 'returns the min value from the response document' do + expect(aggregations.price.min.value).to eq(1) + end + end + + describe '#max' do + + it 'returns the max value from the response document' do + expect(aggregations.price.max.value).to eq(99) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/base_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/base_spec.rb new file mode 100644 index 000000000..cfd77d7c7 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/base_spec.rb @@ -0,0 +1,90 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Response::Base do + + before(:all) do + class DummyBaseClass + include Elasticsearch::Model::Response::Base + end + + class OriginClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + end + + after(:all) do + remove_classes(DummyBaseClass, OriginClass) + end + + let(:response_document) do + { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [] } } + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(OriginClass, '*').tap do |request| + allow(request).to receive(:execute!).and_return(response_document) + end + end + + let(:response) do + Elasticsearch::Model::Response::Response.new(OriginClass, search) + end + + let(:response_base) do + DummyBaseClass.new(OriginClass, response) + end + + describe '#klass' do + + it 'returns the class' do + expect(response.klass).to be(OriginClass) + end + end + + describe '#response' do + + it 'returns the response object' do + expect(response_base.response).to eq(response) + end + end + + describe 'response document' do + + it 'returns the response document' do + expect(response_base.response.response).to eq(response_document) + end + end + + describe '#total' do + + it 'returns the total' do + expect(response_base.total).to eq(123) + end + end + + describe '#max_score' do + + it 'returns the total' do + expect(response_base.max_score).to eq(456) + end + end + + describe '#results' do + + it 'raises a NotImplemented error' do + expect { + response_base.results + }.to raise_exception(Elasticsearch::Model::NotImplemented) + end + end + + describe '#records' do + + it 'raises a NotImplemented error' do + expect { + response_base.records + }.to raise_exception(Elasticsearch::Model::NotImplemented) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/pagination/kaminari_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/pagination/kaminari_spec.rb new file mode 100644 index 000000000..e8fe1f86b --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/pagination/kaminari_spec.rb @@ -0,0 +1,410 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Response::Response Kaminari' do + + before(:all) do + class ModelClass + include ::Kaminari::ConfigurationMethods + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + end + + after(:all) do + remove_classes(ModelClass) + end + + let(:response_document) do + { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, + 'hits' => { 'total' => 100, 'hits' => (1..100).to_a.map { |i| { _id: i } } } } + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(model, '*') + end + + let(:response) do + allow(model).to receive(:client).and_return(client) + Elasticsearch::Model::Response::Response.new(model, search, response_document).tap do |resp| + allow(resp).to receive(:client).and_return(client) + end + end + + let(:client) do + double('client') + end + + shared_examples_for 'a search request that can be paginated' do + + describe '#page' do + + it 'does not set an initial from and size on the search definition' do + expect(response.search.definition[:from]).to be(nil) + expect(response.search.definition[:size]).to be(nil) + end + + context 'when page is called once' do + + let(:search_request) do + { index: index_field, from: 25, size: 25, q: '*', type: type_field} + end + + before do + expect(client).to receive(:search).with(search_request).and_return(response_document) + response.page(2).to_a + end + + it 'advances the from/size in the search request' do + expect(response.search.definition[:from]).to be(25) + expect(response.search.definition[:size]).to be(25) + end + end + + context 'when page is called more than once' do + + let(:search_request_one) do + { index: index_field, from: 25, size: 25, q: '*', type: type_field} + end + + let(:search_request_two) do + { index: index_field, from: 75, size: 25, q: '*', type: type_field} + end + + before do + expect(client).to receive(:search).with(search_request_one).and_return(response_document) + response.page(2).to_a + expect(client).to receive(:search).with(search_request_two).and_return(response_document) + response.page(4).to_a + end + + it 'advances the from/size in the search request' do + expect(response.search.definition[:from]).to be(75) + expect(response.search.definition[:size]).to be(25) + end + end + + context 'when limit is also set' do + + before do + response.records + response.results + end + + context 'when page is called before limit' do + + before do + response.page(3).limit(35) + end + + it 'sets the correct values' do + expect(response.search.definition[:size]).to eq(35) + expect(response.search.definition[:from]).to eq(70) + end + + it 'resets the instance variables' do + expect(response.instance_variable_get(:@response)).to be(nil) + expect(response.instance_variable_get(:@records)).to be(nil) + expect(response.instance_variable_get(:@results)).to be(nil) + end + end + + context 'when limit is called before page' do + + before do + response.limit(35).page(3) + end + + it 'sets the correct values' do + expect(response.search.definition[:size]).to eq(35) + expect(response.search.definition[:from]).to eq(70) + end + + it 'resets the instance variables' do + expect(response.instance_variable_get(:@response)).to be(nil) + expect(response.instance_variable_get(:@records)).to be(nil) + expect(response.instance_variable_get(:@results)).to be(nil) + end + end + end + end + + describe '#limit_value' do + + context 'when there is no default set' do + + it 'uses the limit value from the Kaminari configuration' do + expect(response.limit_value).to eq(Kaminari.config.default_per_page) + end + end + + context 'when there is a limit in the search definition' do + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(model, '*', size: 10) + end + + it 'gets the limit from the search definition' do + expect(response.limit_value).to eq(10) + end + end + + context 'when there is a limit in the search body' do + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(model, { query: { match_all: {} }, size: 999 }) + end + + it 'does not use the limit' do + expect(response.limit_value).to be(Kaminari.config.default_per_page) + end + end + end + + describe '#offset_value' do + + context 'when there is no default set' do + + it 'uses an offset of 0' do + expect(response.offset_value).to eq(0) + end + end + + context 'when there is an offset in the search definition' do + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(model, '*', from: 50) + end + + it 'gets the limit from the search definition' do + expect(response.offset_value).to eq(50) + end + end + + context 'when there is an offset in the search body' do + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(model, { query: { match_all: {} }, from: 333 }) + end + + it 'does not use the offset' do + expect(response.offset_value).to be(0) + end + end + end + + describe '#limit' do + + context 'when a limit is set' do + + before do + response.records + response.results + response.limit(35) + end + + it 'sets the limit on the search defintiion' do + expect(response.search.definition[:size]).to eq(35) + end + + it 'resets the instance variables' do + expect(response.instance_variable_get(:@response)).to be(nil) + expect(response.instance_variable_get(:@records)).to be(nil) + expect(response.instance_variable_get(:@results)).to be(nil) + end + + context 'when the limit is provided as a string' do + + before do + response.limit('35') + end + + it 'coerces the string to an integer' do + expect(response.search.definition[:size]).to eq(35) + end + end + + context 'when the limit is an invalid type' do + + before do + response.limit('asdf') + end + + it 'does not apply the setting' do + expect(response.search.definition[:size]).to eq(35) + end + end + end + end + + describe '#offset' do + + context 'when an offset is set' do + + before do + response.records + response.results + response.offset(15) + end + + it 'sets the limit on the search defintiion' do + expect(response.search.definition[:from]).to eq(15) + end + + it 'resets the instance variables' do + expect(response.instance_variable_get(:@response)).to be(nil) + expect(response.instance_variable_get(:@records)).to be(nil) + expect(response.instance_variable_get(:@results)).to be(nil) + end + + context 'when the offset is provided as a string' do + + before do + response.offset('15') + end + + it 'coerces the string to an integer' do + expect(response.search.definition[:from]).to eq(15) + end + end + + context 'when the offset is an invalid type' do + + before do + response.offset('asdf') + end + + it 'does not apply the setting' do + expect(response.search.definition[:from]).to eq(0) + end + end + end + end + + describe '#total' do + + before do + allow(response.results).to receive(:total).and_return(100) + end + + it 'returns the total number of hits' do + expect(response.total_count).to eq(100) + end + end + + context 'results' do + + before do + allow(search).to receive(:execute!).and_return(response_document) + end + + describe '#current_page' do + + it 'returns the current page' do + expect(response.results.current_page).to eq(1) + end + + context 'when a particular page is accessed' do + + it 'returns the correct current page' do + expect(response.page(5).results.current_page).to eq(5) + end + end + end + + describe '#prev_page' do + + it 'returns the previous page' do + expect(response.page(1).results.prev_page).to be(nil) + expect(response.page(2).results.prev_page).to be(1) + expect(response.page(3).results.prev_page).to be(2) + expect(response.page(4).results.prev_page).to be(3) + end + end + + describe '#next_page' do + + it 'returns the previous page' do + expect(response.page(1).results.next_page).to be(2) + expect(response.page(2).results.next_page).to be(3) + expect(response.page(3).results.next_page).to be(4) + expect(response.page(4).results.next_page).to be(nil) + end + end + end + + context 'records' do + + before do + allow(search).to receive(:execute!).and_return(response_document) + end + + describe '#current_page' do + + it 'returns the current page' do + expect(response.records.current_page).to eq(1) + end + + context 'when a particular page is accessed' do + + it 'returns the correct current page' do + expect(response.page(5).records.current_page).to eq(5) + end + end + end + + describe '#prev_page' do + + it 'returns the previous page' do + expect(response.page(1).records.prev_page).to be(nil) + expect(response.page(2).records.prev_page).to be(1) + expect(response.page(3).records.prev_page).to be(2) + expect(response.page(4).records.prev_page).to be(3) + end + end + + describe '#next_page' do + + it 'returns the previous page' do + expect(response.page(1).records.next_page).to be(2) + expect(response.page(2).records.next_page).to be(3) + expect(response.page(3).records.next_page).to be(4) + expect(response.page(4).records.next_page).to be(nil) + end + end + end + end + + context 'when the model is a single one' do + + let(:model) do + ModelClass + end + + let(:type_field) do + 'bar' + end + + let(:index_field) do + 'foo' + end + + it_behaves_like 'a search request that can be paginated' + end + + context 'when the model is a multimodel' do + + let(:model) do + Elasticsearch::Model::Multimodel.new(ModelClass) + end + + let(:type_field) do + ['bar'] + end + + let(:index_field) do + ['foo'] + end + + it_behaves_like 'a search request that can be paginated' + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/pagination/will_paginate_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/pagination/will_paginate_spec.rb new file mode 100644 index 000000000..0666b0f28 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/pagination/will_paginate_spec.rb @@ -0,0 +1,262 @@ +require 'spec_helper' + +describe 'Elasticsearch::Model::Response::Response WillPaginate' do + + before(:all) do + class ModelClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + + def self.per_page + 33 + end + end + + # Subclass Response so we can include WillPaginate module without conflicts with Kaminari. + class WillPaginateResponse < Elasticsearch::Model::Response::Response + include Elasticsearch::Model::Response::Pagination::WillPaginate + end + end + + after(:all) do + remove_classes(ModelClass, WillPaginateResponse) + end + + let(:response_document) do + { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, + 'hits' => { 'total' => 100, 'hits' => (1..100).to_a.map { |i| { _id: i } } } } + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(model, '*') + end + + let(:response) do + allow(model).to receive(:client).and_return(client) + WillPaginateResponse.new(model, search, response_document).tap do |resp| + allow(resp).to receive(:client).and_return(client) + end + end + + let(:client) do + double('client') + end + + shared_examples_for 'a search request that can be paginated' do + + describe '#offset' do + + context 'when per_page and page are set' do + + before do + response.per_page(3).page(3) + end + + it 'sets the correct offset' do + expect(response.offset).to eq(6) + end + end + end + + describe '#length' do + + context 'when per_page and page are set' do + + before do + response.per_page(3).page(3) + end + + it 'sets the correct offset' do + expect(response.length).to eq(3) + end + end + end + + describe '#paginate' do + + context 'when there are no settings' do + + context 'when page is set to nil' do + + before do + response.paginate(page: nil) + end + + it 'uses the defaults' do + expect(response.search.definition[:size]).to eq(default_per_page) + expect(response.search.definition[:from]).to eq(0) + end + end + + context 'when page is set to a value' do + + before do + response.paginate(page: 2) + end + + it 'uses the defaults' do + expect(response.search.definition[:size]).to eq(default_per_page) + expect(response.search.definition[:from]).to eq(default_per_page) + end + end + + context 'when a custom page and per_page is set' do + + before do + response.paginate(page: 3, per_page: 9) + end + + it 'uses the custom values' do + expect(response.search.definition[:size]).to eq(9) + expect(response.search.definition[:from]).to eq(18) + end + end + + context 'fall back to first page if invalid value is provided' do + + before do + response.paginate(page: -1) + end + + it 'uses the custom values' do + expect(response.search.definition[:size]).to eq(default_per_page) + expect(response.search.definition[:from]).to eq(0) + end + end + end + end + + describe '#page' do + + context 'when a value is provided for page' do + + before do + response.page(5) + end + + it 'calculates the correct :size and :from' do + expect(response.search.definition[:size]).to eq(default_per_page) + expect(response.search.definition[:from]).to eq(default_per_page * 4) + end + end + + context 'when a value is provided for page and per_page' do + + before do + response.page(5).per_page(3) + end + + it 'calculates the correct :size and :from' do + expect(response.search.definition[:size]).to eq(3) + expect(response.search.definition[:from]).to eq(12) + end + end + + context 'when a value is provided for per_page and page' do + + before do + response.per_page(3).page(5) + end + + it 'calculates the correct :size and :from' do + expect(response.search.definition[:size]).to eq(3) + expect(response.search.definition[:from]).to eq(12) + end + end + end + + describe '#current_page' do + + context 'when no values are set' do + + before do + response.paginate({}) + end + + it 'returns the first page' do + expect(response.current_page).to eq(1) + end + end + + context 'when values are provided for per_page and page' do + + before do + response.paginate(page: 3, per_page: 9) + end + + it 'calculates the correct current page' do + expect(response.current_page).to eq(3) + end + end + + context 'when #paginate has not been called on the response' do + + it 'returns nil' do + expect(response.current_page).to be_nil + end + end + end + + describe '#per_page' do + + context 'when a value is set via the #paginate method' do + + before do + response.paginate(per_page: 8) + end + + it 'returns the per_page value' do + expect(response.per_page).to eq(8) + end + end + + context 'when a value is set via the #per_page method' do + + before do + response.per_page(8) + end + + it 'returns the per_page value' do + expect(response.per_page).to eq(8) + end + end + end + + describe '#total_entries' do + + before do + allow(response).to receive(:results).and_return(double('results', total: 100)) + end + + it 'returns the total results' do + expect(response.total_entries).to eq(100) + end + end + end + + context 'when the model is a single one' do + + let(:model) do + ModelClass + end + + let(:default_per_page) do + 33 + end + + it_behaves_like 'a search request that can be paginated' + end + + context 'when the model is a multimodel' do + + let(:model) do + Elasticsearch::Model::Multimodel.new(ModelClass) + end + + let(:default_per_page) do + ::WillPaginate.per_page + end + + it_behaves_like 'a search request that can be paginated' + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/records_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/records_spec.rb new file mode 100644 index 000000000..882c763f3 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/records_spec.rb @@ -0,0 +1,118 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Response::Records do + + before(:all) do + class DummyCollection + include Enumerable + + def each(&block); ['FOO'].each(&block); end + def size; ['FOO'].size; end + def empty?; ['FOO'].empty?; end + def foo; 'BAR'; end + end + + class DummyModel + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + + def self.find(*args) + DummyCollection.new + end + end + end + + after(:all) do + remove_classes(DummyCollection, DummyModel) + end + + let(:response_document) do + { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [{'_id' => '1', 'foo' => 'bar'}] } } + end + + let(:results) do + Elasticsearch::Model::Response::Results.new(DummyModel, response_document) + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(DummyModel, '*').tap do |request| + allow(request).to receive(:execute!).and_return(response_document) + end + end + + let(:response) do + Elasticsearch::Model::Response::Response.new(DummyModel, search) + end + + let(:records) do + described_class.new(DummyModel, response) + end + + context 'when the records are accessed' do + + it 'returns the records' do + expect(records.records.size).to eq(1) + expect(records.records.first).to eq('FOO') + end + + it 'delegates methods to records' do + expect(records.foo).to eq('BAR') + end + end + + describe '#each_with_hit' do + + it 'returns each record with its Elasticsearch hit' do + records.each_with_hit do |record, hit| + expect(record).to eq('FOO') + expect(hit.foo).to eq('bar') + end + end + end + + describe '#map_with_hit' do + + let(:value) do + records.map_with_hit { |record, hit| "#{record}---#{hit.foo}" } + end + + it 'returns each record with its Elasticsearch hit' do + expect(value).to eq(['FOO---bar']) + end + end + + describe '#ids' do + + it 'returns the ids' do + expect(records.ids).to eq(['1']) + end + end + + context 'when an adapter is used' do + + before do + module DummyAdapter + module RecordsMixin + def records + ['FOOBAR'] + end + end + + def records_mixin + RecordsMixin + end; module_function :records_mixin + end + + allow(Elasticsearch::Model::Adapter).to receive(:from_class).and_return(DummyAdapter) + end + + after do + Elasticsearch::Model::Adapter::Adapter.adapters.delete(DummyAdapter) + Object.send(:remove_const, :DummyAdapter) if defined?(DummyAdapter) + end + + it 'delegates the records method to the adapter' do + expect(records.records).to eq(['FOOBAR']) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/response_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/response_spec.rb new file mode 100644 index 000000000..32f96a4d2 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/response_spec.rb @@ -0,0 +1,131 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Response::Response do + + before(:all) do + class OriginClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + end + + after(:all) do + remove_classes(OriginClass) + end + + let(:response_document) do + { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, 'hits' => { 'hits' => [] }, + 'aggregations' => {'foo' => {'bar' => 10}}, + 'suggest' => {'my_suggest' => [ { 'text' => 'foo', 'options' => [ { 'text' => 'Foo', 'score' => 2.0 }, + { 'text' => 'Bar', 'score' => 1.0 } ] } ]}} + + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(OriginClass, '*').tap do |request| + allow(request).to receive(:execute!).and_return(response_document) + end + end + + let(:response) do + Elasticsearch::Model::Response::Response.new(OriginClass, search) + end + + it 'performs the Elasticsearch request lazily' do + expect(search).not_to receive(:execute!) + response + end + + describe '#klass' do + + it 'returns the class' do + expect(response.klass).to be(OriginClass) + end + end + + describe '#search' do + + it 'returns the search object' do + expect(response.search).to eq(search) + end + end + + describe '#took' do + + it 'returns the took field' do + expect(response.took).to eq('5') + end + end + + describe '#timed_out' do + + it 'returns the timed_out field' do + expect(response.timed_out).to eq(false) + end + end + + describe '#shards' do + + it 'returns a Hashie::Mash' do + expect(response.shards.one).to eq('OK') + end + end + + describe '#response' do + + it 'returns the response document' do + expect(response.response).to eq(response_document) + end + end + + describe '#results' do + + it 'provides access to the results' do + expect(response.results).to be_a(Elasticsearch::Model::Response::Results) + expect(response.size).to be(0) + end + end + + describe '#records' do + + it 'provides access to the records' do + expect(response.records).to be_a(Elasticsearch::Model::Response::Records) + expect(response.size).to be(0) + end + end + + describe 'enumerable methods' do + + it 'delegates the methods to the results' do + expect(response.empty?).to be(true) + end + end + + describe 'aggregations' do + + it 'provides access to the aggregations' do + expect(response.aggregations).to be_a(Hashie::Mash) + expect(response.aggregations.foo.bar).to eq(10) + end + end + + describe 'suggestions' do + + it 'provides access to the suggestions' do + expect(response.suggestions).to be_a(Hashie::Mash) + expect(response.suggestions.my_suggest.first.options.first.text).to eq('Foo') + expect(response.suggestions.terms).to eq([ 'Foo', 'Bar' ]) + end + + context 'when there are no suggestions' do + + let(:response_document) do + { } + end + + it 'returns an empty list' do + expect(response.suggestions.terms).to eq([ ]) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/result_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/result_spec.rb new file mode 100644 index 000000000..6f1298a2b --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/result_spec.rb @@ -0,0 +1,122 @@ +require 'spec_helper' +require 'active_support/json/encoding' + +describe Elasticsearch::Model::Response::Result do + + let(:result) do + described_class.new(foo: 'bar', bar: { bam: 'baz' }) + end + + it 'provides access to the properties' do + expect(result.foo).to eq('bar') + expect(result.bar.bam).to eq('baz') + expect { result.xoxo }.to raise_exception(NoMethodError) + end + + describe '#id' do + + let(:result) do + described_class.new(foo: 'bar', _id: 42, _source: { id: 12 }) + end + + it 'returns the _id field' do + expect(result.id).to eq(42) + end + + it 'provides access to the source id field' do + expect(result._source.id).to eq(12) + end + end + + describe '#type' do + + let(:result) do + described_class.new(foo: 'bar', _type: 'baz', _source: { type: 'BAM' }) + end + + it 'returns the _type field' do + expect(result.type).to eq('baz') + end + + it 'provides access to the source type field' do + expect(result._source.type).to eq('BAM') + end + end + + describe 'method delegation' do + + let(:result) do + described_class.new(foo: 'bar', _source: { bar: { bam: 'baz' } }) + end + + it 'provides access to the _source field via a method' do + expect(result._source).to eq('bar' => { 'bam' => 'baz' }) + end + + it 'is recognized by #method' do + expect(result.method :bar).to be_a Method + end + + it 'respond_to? still works' do + expect(result.respond_to? :bar).to be true + end + + context 'when methods map to keys in subdocuments of the response from Elasticsearch' do + + it 'provides access to top level fields via a method' do + expect(result.foo).to eq('bar') + expect(result.fetch(:foo)).to eq('bar') + expect(result.fetch(:does_not_exist, 'moo')).to eq('moo') + end + + it 'responds to hash methods' do + expect(result.keys).to eq(['foo', '_source']) + expect(result.to_hash).to eq('foo' => 'bar', '_source' => { 'bar' => { 'bam' => 'baz' } }) + end + + it 'provides access to fields in the _source subdocument via a method' do + expect(result.bar).to eq('bam' => 'baz') + expect(result.bar.bam).to eq('baz') + expect(result._source.bar).to eq('bam' => 'baz') + expect(result._source.bar.bam).to eq('baz') + end + + context 'when boolean methods are called' do + + it 'provides access to top level fields via a method' do + expect(result.foo?).to eq(true) + expect(result.boo?).to eq(false) + end + + it 'delegates to fields in the _source subdocument via a method' do + expect(result.bar?).to eq(true) + expect(result.bar.bam?).to eq(true) + expect(result.boo?).to eq(false) + expect(result.bar.boo?).to eq(false) + expect(result._source.bar?).to eq(true) + expect(result._source.bar.bam?).to eq(true) + expect(result._source.boo?).to eq(false) + expect(result._source.bar.boo?).to eq(false) + end + end + end + + context 'when methods do not map to keys in subdocuments of the response from Elasticsearch' do + + it 'raises a NoMethodError' do + expect { result.does_not_exist }.to raise_exception(NoMethodError) + end + end + end + + describe '#as_json' do + + let(:result) do + described_class.new(foo: 'bar', _source: { bar: { bam: 'baz' } }) + end + + it 'returns a json string' do + expect(result.as_json(except: 'foo')).to eq({'_source'=>{'bar'=>{'bam'=>'baz'}}}) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/response/results_spec.rb b/elasticsearch-model/spec/elasticsearch/model/response/results_spec.rb new file mode 100644 index 000000000..f7149003d --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/response/results_spec.rb @@ -0,0 +1,56 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Response::Results do + + before(:all) do + class OriginClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + end + + after(:all) do + remove_classes(OriginClass) + end + + let(:response_document) do + { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [{'foo' => 'bar'}] } } + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(OriginClass, '*').tap do |request| + allow(request).to receive(:execute!).and_return(response_document) + end + end + + let(:response) do + Elasticsearch::Model::Response::Response.new(OriginClass, search) + end + + let(:results) do + response.results + end + + describe '#results' do + + it 'provides access to the results' do + expect(results.results.size).to be(1) + expect(results.results.first.foo).to eq('bar') + end + end + + describe 'Enumerable' do + + it 'deletebates enumerable methods to the results' do + expect(results.empty?).to be(false) + expect(results.first.foo).to eq('bar') + end + end + + describe '#raw_response' do + + it 'returns the raw response document' do + expect(response.raw_response).to eq(response_document) + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/searching_search_request_spec.rb b/elasticsearch-model/spec/elasticsearch/model/searching_search_request_spec.rb new file mode 100644 index 000000000..db3ac6dab --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/searching_search_request_spec.rb @@ -0,0 +1,112 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Serializing do + + before(:all) do + class ::DummySearchingModel + extend Elasticsearch::Model::Searching::ClassMethods + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + end + + after(:all) do + remove_classes(DummySearchingModel) + end + + before do + allow(DummySearchingModel).to receive(:client).and_return(client) + end + + let(:client) do + double('client') + end + + describe '#initialize' do + + context 'when the search definition is a simple query' do + + before do + expect(client).to receive(:search).with(index: 'foo', type: 'bar', q: 'foo').and_return({}) + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(DummySearchingModel, 'foo') + end + + it 'passes the query to the client' do + expect(search.execute!).to eq({}) + end + end + + context 'when the search definition is a hash' do + + before do + expect(client).to receive(:search).with(index: 'foo', type: 'bar', body: { foo: 'bar' }).and_return({}) + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(DummySearchingModel, foo: 'bar') + end + + it 'passes the hash to the client' do + expect(search.execute!).to eq({}) + end + end + + context 'when the search definition is a json string' do + + before do + expect(client).to receive(:search).with(index: 'foo', type: 'bar', body: '{"foo":"bar"}').and_return({}) + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(DummySearchingModel, '{"foo":"bar"}') + end + + it 'passes the json string to the client' do + expect(search.execute!).to eq({}) + end + end + + context 'when the search definition is a custom object' do + + before(:all) do + class MySpecialQueryBuilder + def to_hash; {foo: 'bar'}; end + end + end + + after(:all) do + Object.send(:remove_const, :MySpecialQueryBuilder) if defined?(MySpecialQueryBuilder) + end + + before do + expect(client).to receive(:search).with(index: 'foo', type: 'bar', body: {foo: 'bar'}).and_return({}) + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(DummySearchingModel, MySpecialQueryBuilder.new) + end + + it 'passes the query builder to the client and calls #to_hash on it' do + expect(search.execute!).to eq({}) + end + end + + context 'when extra options are specified' do + + before do + expect(client).to receive(:search).with(index: 'foo', type: 'bar', q: 'foo', size: 15).and_return({}) + end + + let(:search) do + Elasticsearch::Model::Searching::SearchRequest.new(DummySearchingModel, 'foo', size: 15) + end + + it 'passes the extra options to the client as part of the request' do + expect(search.execute!).to eq({}) + end + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/searching_spec.rb b/elasticsearch-model/spec/elasticsearch/model/searching_spec.rb new file mode 100644 index 000000000..ca95f282b --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/searching_spec.rb @@ -0,0 +1,49 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Searching::ClassMethods do + + before(:all) do + class ::DummySearchingModel + extend Elasticsearch::Model::Searching::ClassMethods + + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + end + + after(:all) do + remove_classes(DummySearchingModel) + end + + it 'has the search method' do + expect(DummySearchingModel).to respond_to(:search) + end + + describe '#search' do + + let(:response) do + double('search', execute!: { 'hits' => {'hits' => [ {'_id' => 2 }, {'_id' => 1 } ]} }) + end + + before do + expect(Elasticsearch::Model::Searching::SearchRequest).to receive(:new).with(DummySearchingModel, 'foo', { default_operator: 'AND' }).and_return(response) + end + + it 'creates a search object' do + expect(DummySearchingModel.search('foo', default_operator: 'AND')).to be_a(Elasticsearch::Model::Response::Response) + end + end + + describe 'lazy execution' do + + let(:response) do + double('search').tap do |r| + expect(r).to receive(:execute!).never + end + end + + it 'does not execute the search until the results are accessed' do + DummySearchingModel.search('foo') + end + end +end diff --git a/elasticsearch-model/spec/elasticsearch/model/serializing_spec.rb b/elasticsearch-model/spec/elasticsearch/model/serializing_spec.rb new file mode 100644 index 000000000..ac4a850f3 --- /dev/null +++ b/elasticsearch-model/spec/elasticsearch/model/serializing_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' + +describe Elasticsearch::Model::Serializing do + + before(:all) do + class DummyClass + include Elasticsearch::Model::Serializing::InstanceMethods + + def as_json(options={}) + 'HASH' + end + end + end + + after(:all) do + remove_classes(DummyClass) + end + + it 'delegates to #as_json by default' do + expect(DummyClass.new.as_indexed_json).to eq('HASH') + end +end diff --git a/elasticsearch-model/spec/spec_helper.rb b/elasticsearch-model/spec/spec_helper.rb new file mode 100644 index 000000000..9e46a27de --- /dev/null +++ b/elasticsearch-model/spec/spec_helper.rb @@ -0,0 +1,161 @@ +require 'pry-nav' +require 'kaminari' +require 'kaminari/version' +require 'will_paginate' +require 'will_paginate/collection' +require 'elasticsearch/model' +require 'hashie/version' +require 'active_model' +require 'mongoid' +require 'yaml' +require 'active_record' + +unless defined?(ELASTICSEARCH_URL) + ELASTICSEARCH_URL = ENV['ELASTICSEARCH_URL'] || "localhost:#{(ENV['TEST_CLUSTER_PORT'] || 9200)}" +end + +RSpec.configure do |config| + config.formatter = 'documentation' + config.color = true + + config.before(:suite) do + require 'ansi' + tracer = ::Logger.new(STDERR) + tracer.formatter = lambda { |s, d, p, m| "#{m.gsub(/^.*$/) { |n| ' ' + n }.ansi(:faint)}\n" } + Elasticsearch::Model.client = Elasticsearch::Client.new host: ELASTICSEARCH_URL, + tracer: (ENV['QUIET'] ? nil : tracer) + + unless ActiveRecord::Base.connected? + ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) + end + require 'support/app' + + if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' + ::ActiveRecord::Base.raise_in_transactional_callbacks = true + end + end + + config.after(:all) do + drop_all_tables! + delete_all_indices! + end +end + +# Is the ActiveRecord version at least 4.0? +# +# @return [ true, false ] Whether the ActiveRecord version is at least 4.0. +# +# @since 6.0.1 +def active_record_at_least_4? + defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 +end + +# Delete all documents from the indices of the provided list of models. +# +# @param [ Array ] models The list of models. +# +# @return [ true ] +# +# @since 6.0.1 +def clear_indices(*models) + models.each do |model| + begin; Elasticsearch::Model.client.delete_by_query(index: model.index_name, q: '*'); rescue; end + end and true +end + +# Delete all documents from the tables of the provided list of models. +# +# @param [ Array ] models The list of models. +# +# @return [ true ] +# +# @since 6.0.1 +def clear_tables(*models) + begin; models.map(&:delete_all); rescue; end and true +end + +# Drop all tables of models registered as subclasses of ActiveRecord::Base. +# +# @return [ true ] +# +# @since 6.0.1 +def drop_all_tables! + ActiveRecord::Base.descendants.each do |model| + begin + ActiveRecord::Schema.define do + drop_table model + end if model.table_exists? + rescue + end + end and true +end + +# Drop all indices of models registered as subclasses of ActiveRecord::Base. +# +# @return [ true ] +# +# @since 6.0.1 +def delete_all_indices! + client = Elasticsearch::Model.client + ActiveRecord::Base.descendants.each do |model| + begin + client.indices.delete(index: model.index_name) if model.__elasticsearch__.index_exists? + rescue + end + end and true +end + +# Remove all classes. +# +# @param [ Array ] classes The list of classes to remove. +# +# @return [ true ] +# +# @since 6.0.1 +def remove_classes(*classes) + classes.each do |_class| + Object.send(:remove_const, _class.name.to_sym) if defined?(_class) + end and true +end + +# Determine whether the tests with Mongoid should be run. +# Depends on whether MongoDB is running on the default host and port, `localhost:27017`. +# +# @return [ true, false ] +# +# @since 6.0.1 +def test_mongoid? + $mongoid_available ||= begin + require 'mongoid' + if defined?(Mongo) # older versions of Mongoid use the driver, Moped + client = Mongo::Client.new(['localhost:27017']) + Timeout.timeout(1) do + client.database.command(ping: 1) && true + end + end and true + rescue Timeout::Error, LoadError, Mongo::Error => e + client.close + $stderr.puts("MongoDB not installed or running: #{e}") + end +end + +# Connect Mongoid and set up its Logger if Mongoid tests should be run. +# +# @since 6.0.1 +def connect_mongoid(source) + if test_mongoid? + $stderr.puts "Mongoid #{Mongoid::VERSION}", '-'*80 + + if !ENV['QUIET'] == 'true' + logger = ::Logger.new($stderr) + logger.formatter = lambda { |s, d, p, m| " #{m.ansi(:faint, :cyan)}\n" } + logger.level = ::Logger::DEBUG + Mongoid.logger = logger + Mongo::Logger.logger = logger + else + Mongo::Logger.logger.level = ::Logger::WARN + end + + Mongoid.connect_to(source) + end +end diff --git a/elasticsearch-model/spec/support/app.rb b/elasticsearch-model/spec/support/app.rb new file mode 100644 index 000000000..c06ca9ca5 --- /dev/null +++ b/elasticsearch-model/spec/support/app.rb @@ -0,0 +1,21 @@ +require 'active_record' + +require 'support/app/question' +require 'support/app/answer' +require 'support/app/parent_and_child_searchable' +require 'support/app/article_with_custom_serialization' +require 'support/app/import_article' +require 'support/app/namespaced_book' +require 'support/app/article_for_pagination' +require 'support/app/article_with_dynamic_index_name' +require 'support/app/episode' +require 'support/app/image' +require 'support/app/series' +require 'support/app/mongoid_article' +require 'support/app/article' +require 'support/app/searchable' +require 'support/app/category' +require 'support/app/author' +require 'support/app/authorship' +require 'support/app/comment' +require 'support/app/post' diff --git a/elasticsearch-model/spec/support/app/answer.rb b/elasticsearch-model/spec/support/app/answer.rb new file mode 100644 index 000000000..7de32dc7b --- /dev/null +++ b/elasticsearch-model/spec/support/app/answer.rb @@ -0,0 +1,33 @@ +class Answer < ActiveRecord::Base + include Elasticsearch::Model + + belongs_to :question + + JOIN_TYPE = 'answer'.freeze + + index_name 'questions_and_answers'.freeze + document_type 'doc'.freeze + + before_create :randomize_id + + def randomize_id + begin + self.id = SecureRandom.random_number(1_000_000) + end while Answer.where(id: self.id).exists? + end + + mapping do + indexes :text + indexes :author + end + + def as_indexed_json(options={}) + # This line is necessary for differences between ActiveModel::Serializers::JSON#as_json versions + json = as_json(options)[JOIN_TYPE] || as_json(options) + json.merge(join_field: { name: JOIN_TYPE, parent: question_id }) + end + + after_commit lambda { __elasticsearch__.index_document(routing: (question_id || 1)) }, on: :create + after_commit lambda { __elasticsearch__.update_document(routing: (question_id || 1)) }, on: :update + after_commit lambda {__elasticsearch__.delete_document(routing: (question_id || 1)) }, on: :destroy +end \ No newline at end of file diff --git a/elasticsearch-model/spec/support/app/article.rb b/elasticsearch-model/spec/support/app/article.rb new file mode 100644 index 000000000..ddff706ef --- /dev/null +++ b/elasticsearch-model/spec/support/app/article.rb @@ -0,0 +1,22 @@ +class ::Article < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + document_type 'article' + + settings index: {number_of_shards: 1, number_of_replicas: 0} do + mapping do + indexes :title, type: 'text', analyzer: 'snowball' + indexes :body, type: 'text' + indexes :clicks, type: 'integer' + indexes :created_at, type: 'date' + end + end + + def as_indexed_json(options = {}) + attributes + .symbolize_keys + .slice(:title, :body, :clicks, :created_at) + .merge(suggest_title: title) + end +end diff --git a/elasticsearch-model/spec/support/app/article_for_pagination.rb b/elasticsearch-model/spec/support/app/article_for_pagination.rb new file mode 100644 index 000000000..8bea633c1 --- /dev/null +++ b/elasticsearch-model/spec/support/app/article_for_pagination.rb @@ -0,0 +1,12 @@ +class ::ArticleForPagination < ActiveRecord::Base + include Elasticsearch::Model + + scope :published, -> { where(published: true) } + + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, type: 'text', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end +end diff --git a/elasticsearch-model/spec/support/app/article_with_custom_serialization.rb b/elasticsearch-model/spec/support/app/article_with_custom_serialization.rb new file mode 100644 index 000000000..c03b19ea5 --- /dev/null +++ b/elasticsearch-model/spec/support/app/article_with_custom_serialization.rb @@ -0,0 +1,13 @@ +class ::ArticleWithCustomSerialization < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + mapping do + indexes :title + end + + def as_indexed_json(options={}) + # as_json(options.merge root: false).slice('title') + { title: self.title } + end +end diff --git a/elasticsearch-model/spec/support/app/article_with_dynamic_index_name.rb b/elasticsearch-model/spec/support/app/article_with_dynamic_index_name.rb new file mode 100644 index 000000000..7c53d04bf --- /dev/null +++ b/elasticsearch-model/spec/support/app/article_with_dynamic_index_name.rb @@ -0,0 +1,15 @@ +class ::ArticleWithDynamicIndexName < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + def self.counter=(value) + @counter = 0 + end + + def self.counter + (@counter ||= 0) && @counter += 1 + end + + mapping { indexes :title } + index_name { "articles-#{counter}" } +end diff --git a/elasticsearch-model/spec/support/app/author.rb b/elasticsearch-model/spec/support/app/author.rb new file mode 100644 index 000000000..ff1664af7 --- /dev/null +++ b/elasticsearch-model/spec/support/app/author.rb @@ -0,0 +1,9 @@ +class Author < ActiveRecord::Base + has_many :authorships + + after_update { self.authorships.each(&:touch) } + + def full_name + [first_name, last_name].compact.join(' ') + end +end diff --git a/elasticsearch-model/spec/support/app/authorship.rb b/elasticsearch-model/spec/support/app/authorship.rb new file mode 100644 index 000000000..70bc2458f --- /dev/null +++ b/elasticsearch-model/spec/support/app/authorship.rb @@ -0,0 +1,4 @@ +class Authorship < ActiveRecord::Base + belongs_to :author + belongs_to :post, touch: true +end diff --git a/elasticsearch-model/spec/support/app/category.rb b/elasticsearch-model/spec/support/app/category.rb new file mode 100644 index 000000000..751413c0d --- /dev/null +++ b/elasticsearch-model/spec/support/app/category.rb @@ -0,0 +1,3 @@ +class Category < ActiveRecord::Base + has_and_belongs_to_many :posts +end diff --git a/elasticsearch-model/spec/support/app/comment.rb b/elasticsearch-model/spec/support/app/comment.rb new file mode 100644 index 000000000..49a25832c --- /dev/null +++ b/elasticsearch-model/spec/support/app/comment.rb @@ -0,0 +1,3 @@ +class Comment < ActiveRecord::Base + belongs_to :post, touch: true +end diff --git a/elasticsearch-model/spec/support/app/episode.rb b/elasticsearch-model/spec/support/app/episode.rb new file mode 100644 index 000000000..6cd159c26 --- /dev/null +++ b/elasticsearch-model/spec/support/app/episode.rb @@ -0,0 +1,11 @@ +class Episode < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + settings index: {number_of_shards: 1, number_of_replicas: 0} do + mapping do + indexes :name, type: 'text', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end +end diff --git a/elasticsearch-model/spec/support/app/image.rb b/elasticsearch-model/spec/support/app/image.rb new file mode 100644 index 000000000..8bddcd08b --- /dev/null +++ b/elasticsearch-model/spec/support/app/image.rb @@ -0,0 +1,19 @@ +class Image + include Mongoid::Document + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + field :name, type: String + attr_accessible :name if respond_to? :attr_accessible + + settings index: {number_of_shards: 1, number_of_replicas: 0} do + mapping do + indexes :name, type: 'text', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end + + def as_indexed_json(options={}) + as_json(except: [:_id]) + end +end diff --git a/elasticsearch-model/spec/support/app/import_article.rb b/elasticsearch-model/spec/support/app/import_article.rb new file mode 100644 index 000000000..d25580786 --- /dev/null +++ b/elasticsearch-model/spec/support/app/import_article.rb @@ -0,0 +1,12 @@ +class ImportArticle < ActiveRecord::Base + include Elasticsearch::Model + + scope :popular, -> { where('views >= 5') } + + mapping do + indexes :title, type: 'text' + indexes :views, type: 'integer' + indexes :numeric, type: 'integer' + indexes :created_at, type: 'date' + end +end diff --git a/elasticsearch-model/spec/support/app/mongoid_article.rb b/elasticsearch-model/spec/support/app/mongoid_article.rb new file mode 100644 index 000000000..cf3a67a84 --- /dev/null +++ b/elasticsearch-model/spec/support/app/mongoid_article.rb @@ -0,0 +1,21 @@ +class ::MongoidArticle + include Mongoid::Document + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + field :id, type: String + field :title, type: String + field :views, type: Integer + attr_accessible :title if respond_to? :attr_accessible + + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, type: 'text', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end + + def as_indexed_json(options={}) + as_json(except: [:id, :_id]) + end +end diff --git a/elasticsearch-model/spec/support/app/namespaced_book.rb b/elasticsearch-model/spec/support/app/namespaced_book.rb new file mode 100644 index 000000000..07a500928 --- /dev/null +++ b/elasticsearch-model/spec/support/app/namespaced_book.rb @@ -0,0 +1,10 @@ +module MyNamespace + class Book < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + document_type 'book' + + mapping { indexes :title } + end +end diff --git a/elasticsearch-model/spec/support/app/parent_and_child_searchable.rb b/elasticsearch-model/spec/support/app/parent_and_child_searchable.rb new file mode 100644 index 000000000..fd2f4417a --- /dev/null +++ b/elasticsearch-model/spec/support/app/parent_and_child_searchable.rb @@ -0,0 +1,24 @@ +module ParentChildSearchable + INDEX_NAME = 'questions_and_answers'.freeze + JOIN = 'join'.freeze + + def create_index!(options={}) + client = Question.__elasticsearch__.client + client.indices.delete index: INDEX_NAME rescue nil if options[:force] + + settings = Question.settings.to_hash.merge Answer.settings.to_hash + mapping_properties = { join_field: { type: JOIN, + relations: { Question::JOIN_TYPE => Answer::JOIN_TYPE } } } + + merged_properties = mapping_properties.merge(Question.mappings.to_hash[:doc][:properties]).merge( + Answer.mappings.to_hash[:doc][:properties]) + mappings = { doc: { properties: merged_properties }} + + client.indices.create index: INDEX_NAME, + body: { + settings: settings.to_hash, + mappings: mappings } + end + + extend self +end diff --git a/elasticsearch-model/spec/support/app/post.rb b/elasticsearch-model/spec/support/app/post.rb new file mode 100644 index 000000000..0cdbba7bb --- /dev/null +++ b/elasticsearch-model/spec/support/app/post.rb @@ -0,0 +1,14 @@ +class Post < ActiveRecord::Base + include Searchable + + has_and_belongs_to_many :categories, after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], + after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] + has_many :authorships + has_many :authors, through: :authorships, + after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], + after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] + has_many :comments, after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], + after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] + + after_touch() { __elasticsearch__.index_document } +end diff --git a/elasticsearch-model/spec/support/app/question.rb b/elasticsearch-model/spec/support/app/question.rb new file mode 100644 index 000000000..f64a97a92 --- /dev/null +++ b/elasticsearch-model/spec/support/app/question.rb @@ -0,0 +1,27 @@ +class Question < ActiveRecord::Base + include Elasticsearch::Model + + has_many :answers, dependent: :destroy + + JOIN_TYPE = 'question'.freeze + JOIN_METADATA = { join_field: JOIN_TYPE}.freeze + + index_name 'questions_and_answers'.freeze + document_type 'doc'.freeze + + mapping do + indexes :title + indexes :text + indexes :author + end + + def as_indexed_json(options={}) + # This line is necessary for differences between ActiveModel::Serializers::JSON#as_json versions + json = as_json(options)[JOIN_TYPE] || as_json(options) + json.merge(JOIN_METADATA) + end + + after_commit lambda { __elasticsearch__.index_document }, on: :create + after_commit lambda { __elasticsearch__.update_document }, on: :update + after_commit lambda { __elasticsearch__.delete_document }, on: :destroy +end diff --git a/elasticsearch-model/spec/support/app/searchable.rb b/elasticsearch-model/spec/support/app/searchable.rb new file mode 100644 index 000000000..826a64875 --- /dev/null +++ b/elasticsearch-model/spec/support/app/searchable.rb @@ -0,0 +1,48 @@ +module Searchable + extend ActiveSupport::Concern + + included do + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + # Set up the mapping + # + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, analyzer: 'snowball' + indexes :created_at, type: 'date' + + indexes :authors do + indexes :first_name + indexes :last_name + indexes :full_name, type: 'text' do + indexes :raw, type: 'keyword' + end + end + + indexes :categories, type: 'keyword' + + indexes :comments, type: 'nested' do + indexes :text + indexes :author + end + end + end + + # Customize the JSON serialization for Elasticsearch + # + def as_indexed_json(options={}) + { + title: title, + text: text, + categories: categories.map(&:title), + authors: authors.as_json(methods: [:full_name], only: [:full_name, :first_name, :last_name]), + comments: comments.as_json(only: [:text, :author]) + } + end + + # Update document in the index after touch + # + after_touch() { __elasticsearch__.index_document } + end +end diff --git a/elasticsearch-model/spec/support/app/series.rb b/elasticsearch-model/spec/support/app/series.rb new file mode 100644 index 000000000..d10a04748 --- /dev/null +++ b/elasticsearch-model/spec/support/app/series.rb @@ -0,0 +1,11 @@ +class Series < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + settings index: {number_of_shards: 1, number_of_replicas: 0} do + mapping do + indexes :name, type: 'text', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end +end diff --git a/elasticsearch-model/spec/support/model.json b/elasticsearch-model/spec/support/model.json new file mode 100644 index 000000000..0c3e4a388 --- /dev/null +++ b/elasticsearch-model/spec/support/model.json @@ -0,0 +1 @@ +{ "laz": "qux" } diff --git a/elasticsearch-model/test/support/model.yml b/elasticsearch-model/spec/support/model.yml similarity index 100% rename from elasticsearch-model/test/support/model.yml rename to elasticsearch-model/spec/support/model.yml diff --git a/elasticsearch-model/test/integration/active_record_associations_parent_child_test.rb b/elasticsearch-model/test/integration/active_record_associations_parent_child_test.rb deleted file mode 100644 index de5db34b0..000000000 --- a/elasticsearch-model/test/integration/active_record_associations_parent_child_test.rb +++ /dev/null @@ -1,147 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -class Question < ActiveRecord::Base - include Elasticsearch::Model - - has_many :answers, dependent: :destroy - - index_name 'questions_and_answers' - - mapping do - indexes :title - indexes :text - indexes :author - end - - after_commit lambda { __elasticsearch__.index_document }, on: :create - after_commit lambda { __elasticsearch__.update_document }, on: :update - after_commit lambda { __elasticsearch__.delete_document }, on: :destroy -end - -class Answer < ActiveRecord::Base - include Elasticsearch::Model - - belongs_to :question - - index_name 'questions_and_answers' - - mapping _parent: { type: 'question' }, _routing: { required: true } do - indexes :text - indexes :author - end - - after_commit lambda { __elasticsearch__.index_document(parent: question_id) }, on: :create - after_commit lambda { __elasticsearch__.update_document(parent: question_id) }, on: :update - after_commit lambda { __elasticsearch__.delete_document(parent: question_id) }, on: :destroy -end - -module ParentChildSearchable - INDEX_NAME = 'questions_and_answers' - - def create_index!(options={}) - client = Question.__elasticsearch__.client - client.indices.delete index: INDEX_NAME rescue nil if options[:force] - - settings = Question.settings.to_hash.merge Answer.settings.to_hash - mappings = Question.mappings.to_hash.merge Answer.mappings.to_hash - - client.indices.create index: INDEX_NAME, - body: { - settings: settings.to_hash, - mappings: mappings.to_hash } - end - - extend self -end - -module Elasticsearch - module Model - class ActiveRecordAssociationsParentChildIntegrationTest < Elasticsearch::Test::IntegrationTestCase - - context "ActiveRecord associations with parent/child modelling" do - setup do - ActiveRecord::Schema.define(version: 1) do - create_table :questions do |t| - t.string :title - t.text :text - t.string :author - t.timestamps null: false - end - - create_table :answers do |t| - t.text :text - t.string :author - t.references :question - t.timestamps null: false - end - - add_index(:answers, :question_id) unless index_exists?(:answers, :question_id) - end - - Question.delete_all - ParentChildSearchable.create_index! force: true - - q_1 = Question.create! title: 'First Question', author: 'John' - q_2 = Question.create! title: 'Second Question', author: 'Jody' - - q_1.answers.create! text: 'Lorem Ipsum', author: 'Adam' - q_1.answers.create! text: 'Dolor Sit', author: 'Ryan' - - q_2.answers.create! text: 'Amet Et', author: 'John' - - Question.__elasticsearch__.refresh_index! - end - - should "find questions by matching answers" do - response = Question.search( - { query: { - has_child: { - type: 'answer', - query: { - match: { - author: 'john' - } - } - } - } - }) - - assert_equal 'Second Question', response.records.first.title - end - - should "find answers for matching questions" do - response = Answer.search( - { query: { - has_parent: { - parent_type: 'question', - query: { - match: { - author: 'john' - } - } - } - } - }) - - assert_same_elements ['Adam', 'Ryan'], response.records.map(&:author) - end - - should "delete answers when the question is deleted" do - Question.where(title: 'First Question').each(&:destroy) - Question.__elasticsearch__.refresh_index! - - response = Answer.search query: { match_all: {} } - - assert_equal 1, response.results.total - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/active_record_associations_test.rb b/elasticsearch-model/test/integration/active_record_associations_test.rb deleted file mode 100644 index 87a1301d8..000000000 --- a/elasticsearch-model/test/integration/active_record_associations_test.rb +++ /dev/null @@ -1,339 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -module Elasticsearch - module Model - class ActiveRecordAssociationsIntegrationTest < Elasticsearch::Test::IntegrationTestCase - - # ----- Search integration via Concern module ----------------------------------------------------- - - module Searchable - extend ActiveSupport::Concern - - included do - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - # Set up the mapping - # - settings index: { number_of_shards: 1, number_of_replicas: 0 } do - mapping do - indexes :title, analyzer: 'snowball' - indexes :created_at, type: 'date' - - indexes :authors do - indexes :first_name - indexes :last_name - indexes :full_name, type: 'text' do - indexes :raw, type: 'keyword' - end - end - - indexes :categories, type: 'keyword' - - indexes :comments, type: 'nested' do - indexes :text - indexes :author - end - end - end - - # Customize the JSON serialization for Elasticsearch - # - def as_indexed_json(options={}) - { - title: title, - text: text, - categories: categories.map(&:title), - authors: authors.as_json(methods: [:full_name], only: [:full_name, :first_name, :last_name]), - comments: comments.as_json(only: [:text, :author]) - } - end - - # Update document in the index after touch - # - after_touch() { __elasticsearch__.index_document } - end - end - - context "ActiveRecord associations" do - setup do - - # ----- Schema definition --------------------------------------------------------------- - - ActiveRecord::Schema.define(version: 1) do - create_table :categories do |t| - t.string :title - t.timestamps null: false - end - - create_table :categories_posts, id: false do |t| - t.references :post, :category - end - - create_table :authors do |t| - t.string :first_name, :last_name - t.timestamps null: false - end - - create_table :authorships do |t| - t.string :first_name, :last_name - t.references :post - t.references :author - t.timestamps null: false - end - - create_table :comments do |t| - t.string :text - t.string :author - t.references :post - t.timestamps null: false - end - - add_index(:comments, :post_id) unless index_exists?(:comments, :post_id) - - create_table :posts do |t| - t.string :title - t.text :text - t.boolean :published - t.timestamps null: false - end - end - - # ----- Models definition ------------------------------------------------------------------------- - - class Category < ActiveRecord::Base - has_and_belongs_to_many :posts - end - - class Author < ActiveRecord::Base - has_many :authorships - - after_update { self.authorships.each(&:touch) } - - def full_name - [first_name, last_name].compact.join(' ') - end - end - - class Authorship < ActiveRecord::Base - belongs_to :author - belongs_to :post, touch: true - end - - class Comment < ActiveRecord::Base - belongs_to :post, touch: true - end - - class Post < ActiveRecord::Base - has_and_belongs_to_many :categories, after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], - after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] - has_many :authorships - has_many :authors, through: :authorships, - after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], - after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] - has_many :comments, after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], - after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] - - after_touch() { __elasticsearch__.index_document } - end - - # Include the search integration - # - Post.__send__ :include, Searchable - Comment.__send__ :include, Elasticsearch::Model - Comment.__send__ :include, Elasticsearch::Model::Callbacks - - # ----- Reset the indices ----------------------------------------------------------------- - - Post.delete_all - Post.__elasticsearch__.create_index! force: true - - Comment.delete_all - Comment.__elasticsearch__.create_index! force: true - end - - should "index and find a document" do - Post.create! title: 'Test' - Post.create! title: 'Testing Coding' - Post.create! title: 'Coding' - Post.__elasticsearch__.refresh_index! - - response = Post.search('title:test') - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - - assert_equal 'Test', response.results.first.title - assert_equal 'Test', response.records.first.title - end - - should "reindex a document after categories are changed" do - # Create categories - category_a = Category.where(title: "One").first_or_create! - category_b = Category.where(title: "Two").first_or_create! - - # Create post - post = Post.create! title: "First Post", text: "This is the first post..." - - # Assign categories - post.categories = [category_a, category_b] - - Post.__elasticsearch__.refresh_index! - - query = { query: { - bool: { - must: { - multi_match: { - fields: ['title'], - query: 'first' - } - }, - filter: { - terms: { - categories: ['One'] - } - } - } - } - } - - response = Post.search query - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - - # Remove category "One" - post.categories = [category_b] - - Post.__elasticsearch__.refresh_index! - response = Post.search query - - assert_equal 0, response.results.size - assert_equal 0, response.records.size - end - - should "reindex a document after authors are changed" do - # Create authors - author_a = Author.where(first_name: "John", last_name: "Smith").first_or_create! - author_b = Author.where(first_name: "Mary", last_name: "Smith").first_or_create! - author_c = Author.where(first_name: "Kobe", last_name: "Griss").first_or_create! - - # Create posts - post_1 = Post.create! title: "First Post", text: "This is the first post..." - post_2 = Post.create! title: "Second Post", text: "This is the second post..." - post_3 = Post.create! title: "Third Post", text: "This is the third post..." - - # Assign authors - post_1.authors = [author_a, author_b] - post_2.authors = [author_a] - post_3.authors = [author_c] - - Post.__elasticsearch__.refresh_index! - - response = Post.search 'authors.full_name:john' - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - - post_3.authors << author_a - - Post.__elasticsearch__.refresh_index! - - response = Post.search 'authors.full_name:john' - - assert_equal 3, response.results.size - assert_equal 3, response.records.size - end if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 - - should "reindex a document after comments are added" do - # Create posts - post_1 = Post.create! title: "First Post", text: "This is the first post..." - post_2 = Post.create! title: "Second Post", text: "This is the second post..." - - # Add comments - post_1.comments.create! author: 'John', text: 'Excellent' - post_1.comments.create! author: 'Abby', text: 'Good' - - post_2.comments.create! author: 'John', text: 'Terrible' - - Post.__elasticsearch__.refresh_index! - - response = Post.search 'comments.author:john AND comments.text:good' - assert_equal 0, response.results.size - - # Add comment - post_1.comments.create! author: 'John', text: 'Or rather just good...' - - Post.__elasticsearch__.refresh_index! - - response = Post.search 'comments.author:john AND comments.text:good' - assert_equal 0, response.results.size - - response = Post.search \ - query: { - nested: { - path: 'comments', - query: { - bool: { - must: [ - { match: { 'comments.author' => 'john' } }, - { match: { 'comments.text' => 'good' } } - ] - } - } - } - } - - assert_equal 1, response.results.size - end if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 - - should "reindex a document after Post#touch" do - # Create categories - category_a = Category.where(title: "One").first_or_create! - - # Create post - post = Post.create! title: "First Post", text: "This is the first post..." - - # Assign category - post.categories << category_a - - Post.__elasticsearch__.refresh_index! - - assert_equal 1, Post.search('categories:One').size - - # Update category - category_a.update_attribute :title, "Updated" - - # Trigger touch on posts in category - category_a.posts.each { |p| p.touch } - - Post.__elasticsearch__.refresh_index! - - assert_equal 0, Post.search('categories:One').size - assert_equal 1, Post.search('categories:Updated').size - end - - should "eagerly load associated records" do - post_1 = Post.create(title: 'One') - post_2 = Post.create(title: 'Two') - post_1.comments.create text: 'First comment' - post_1.comments.create text: 'Second comment' - - Comment.__elasticsearch__.refresh_index! - - records = Comment.search('first').records(includes: :post) - - assert records.first.association(:post).loaded?, "The associated Post should be eagerly loaded" - assert_equal 'One', records.first.post.title - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/active_record_basic_test.rb b/elasticsearch-model/test/integration/active_record_basic_test.rb deleted file mode 100644 index 26c5785f2..000000000 --- a/elasticsearch-model/test/integration/active_record_basic_test.rb +++ /dev/null @@ -1,251 +0,0 @@ -require 'test_helper' -require 'active_record' - -puts "ActiveRecord #{ActiveRecord::VERSION::STRING}", '-'*80 - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -module Elasticsearch - module Model - class ActiveRecordBasicIntegrationTest < Elasticsearch::Test::IntegrationTestCase - - class ::Article < ActiveRecord::Base - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - settings index: { number_of_shards: 1, number_of_replicas: 0 } do - mapping do - indexes :title, type: 'text', analyzer: 'snowball' - indexes :body, type: 'text' - indexes :clicks, type: 'integer' - indexes :created_at, type: 'date' - end - end - - def as_indexed_json(options = {}) - attributes - .symbolize_keys - .slice(:title, :body, :clicks, :created_at) - .merge(suggest_title: title) - end - end - - context "ActiveRecord basic integration" do - setup do - ActiveRecord::Schema.define(:version => 1) do - create_table :articles do |t| - t.string :title - t.string :body - t.integer :clicks, :default => 0 - t.datetime :created_at, :default => 'NOW()' - end - end - - Article.delete_all - Article.__elasticsearch__.create_index! force: true - - ::Article.create! title: 'Test', body: '', clicks: 1 - ::Article.create! title: 'Testing Coding', body: '', clicks: 2 - ::Article.create! title: 'Coding', body: '', clicks: 3 - - Article.__elasticsearch__.refresh_index! - end - - should "index and find a document" do - response = Article.search('title:test') - - assert response.any?, "Response should not be empty: #{response.to_a.inspect}" - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - - assert_instance_of Elasticsearch::Model::Response::Result, response.results.first - assert_instance_of Article, response.records.first - - assert_equal 'Test', response.results.first.title - assert_equal 'Test', response.records.first.title - end - - should "provide access to result" do - response = Article.search query: { match: { title: 'test' } }, highlight: { fields: { title: {} } } - - assert_equal 'Test', response.results.first.title - - assert_equal true, response.results.first.title? - assert_equal false, response.results.first.boo? - - assert_equal true, response.results.first.highlight? - assert_equal true, response.results.first.highlight.title? - assert_equal false, response.results.first.highlight.boo? - end - - should "iterate over results" do - response = Article.search('title:test') - - assert_equal ['1', '2'], response.results.map(&:_id) - assert_equal [1, 2], response.records.map(&:id) - end - - should "return _id and _type as #id and #type" do - response = Article.search('title:test') - - assert_equal '1', response.results.first.id - assert_equal 'article', response.results.first.type - end - - should "access results from records" do - response = Article.search('title:test') - - response.records.each_with_hit do |r, h| - assert_not_nil h._score - assert_not_nil h._source.title - end - end - - should "preserve the search results order for records" do - response = Article.search query: { match: { title: 'code' }}, sort: { clicks: :desc } - - assert_equal response.records[0].clicks, 3 - assert_equal response.records[1].clicks, 2 - - response.records.each_with_hit do |r, h| - assert_equal h._id, r.id.to_s - end - - response.records.map_with_hit do |r, h| - assert_equal h._id, r.id.to_s - end - end - - should "remove document from index on destroy" do - article = Article.first - - article.destroy - assert_equal 2, Article.count - - Article.__elasticsearch__.refresh_index! - - response = Article.search 'title:test' - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - end - - should "index updates to the document" do - article = Article.first - - article.title = 'Writing' - article.save - - Article.__elasticsearch__.refresh_index! - - response = Article.search 'title:write' - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - end - - should "update specific attributes" do - article = Article.first - - response = Article.search 'title:special' - - assert_equal 0, response.results.size - assert_equal 0, response.records.size - - article.__elasticsearch__.update_document_attributes title: 'special' - - Article.__elasticsearch__.refresh_index! - - response = Article.search 'title:special' - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - end - - should "update document when save is called multiple times in a transaction" do - article = Article.first - response = Article.search 'body:dummy' - - assert_equal 0, response.results.size - assert_equal 0, response.records.size - - ActiveRecord::Base.transaction do - article.body = 'dummy' - article.save - - article.title = 'special' - article.save - end - - article.__elasticsearch__.update_document - Article.__elasticsearch__.refresh_index! - - response = Article.search 'body:dummy' - assert_equal 1, response.results.size - assert_equal 1, response.records.size - end - - should "return results for a DSL search" do - response = Article.search query: { match: { title: { query: 'test' } } } - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - end - - should "return a paged collection" do - response = Article.search query: { match: { title: { query: 'test' } } }, - size: 2, - from: 1 - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - - assert_equal 'Testing Coding', response.results.first.title - assert_equal 'Testing Coding', response.records.first.title - end - - should "allow chaining SQL commands on response.records" do - response = Article.search query: { match: { title: { query: 'test' } } } - - assert_equal 2, response.records.size - assert_equal 1, response.records.where(title: 'Test').size - assert_equal 'Test', response.records.where(title: 'Test').first.title - end - - should "allow ordering response.records in SQL" do - response = Article.search query: { match: { title: { query: 'test' } } } - - if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 - assert_equal 'Testing Coding', response.records.order(title: :desc).first.title - else - assert_equal 'Testing Coding', response.records.order('title DESC').first.title - end - end - - should "allow dot access to response" do - response = Article.search query: { match: { title: { query: 'test' } } }, - aggregations: { - dates: { date_histogram: { field: 'created_at', interval: 'hour' } }, - clicks: { global: {}, aggregations: { min: { min: { field: 'clicks' } } } } - }, - suggest: { text: 'tezt', title: { term: { field: 'title', suggest_mode: 'always' } } } - - response.response.respond_to?(:aggregations) - assert_equal 2, response.aggregations.dates.buckets.first.doc_count - assert_equal 3, response.aggregations.clicks.doc_count - assert_equal 1.0, response.aggregations.clicks.min.value - assert_nil response.aggregations.clicks.max - - response.response.respond_to?(:suggest) - assert_equal 1, response.suggestions.title.first.options.size - assert_equal ['test'], response.suggestions.terms - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/active_record_custom_serialization_test.rb b/elasticsearch-model/test/integration/active_record_custom_serialization_test.rb deleted file mode 100644 index cb706cf99..000000000 --- a/elasticsearch-model/test/integration/active_record_custom_serialization_test.rb +++ /dev/null @@ -1,67 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -module Elasticsearch - module Model - class ActiveRecordCustomSerializationTest < Elasticsearch::Test::IntegrationTestCase - context "ActiveRecord model with custom JSON serialization" do - setup do - class ::ArticleWithCustomSerialization < ActiveRecord::Base - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - mapping do - indexes :title - end - - def as_indexed_json(options={}) - # as_json(options.merge root: false).slice('title') - { title: self.title } - end - end - - ActiveRecord::Schema.define(:version => 1) do - create_table ArticleWithCustomSerialization.table_name do |t| - t.string :title - t.string :status - end - end - - ArticleWithCustomSerialization.delete_all - ArticleWithCustomSerialization.__elasticsearch__.create_index! force: true - end - - should "index only the title attribute when creating" do - ArticleWithCustomSerialization.create! title: 'Test', status: 'green' - - a = ArticleWithCustomSerialization.__elasticsearch__.client.get \ - index: 'article_with_custom_serializations', - type: 'article_with_custom_serialization', - id: '1' - - assert_equal( { 'title' => 'Test' }, a['_source'] ) - end - - should "index only the title attribute when updating" do - ArticleWithCustomSerialization.create! title: 'Test', status: 'green' - - article = ArticleWithCustomSerialization.first - article.update_attributes title: 'UPDATED', status: 'red' - - a = ArticleWithCustomSerialization.__elasticsearch__.client.get \ - index: 'article_with_custom_serializations', - type: 'article_with_custom_serialization', - id: '1' - - assert_equal( { 'title' => 'UPDATED' }, a['_source'] ) - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/active_record_import_test.rb b/elasticsearch-model/test/integration/active_record_import_test.rb deleted file mode 100644 index ed8b85c1d..000000000 --- a/elasticsearch-model/test/integration/active_record_import_test.rb +++ /dev/null @@ -1,115 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -module Elasticsearch - module Model - class ActiveRecordImportIntegrationTest < Elasticsearch::Test::IntegrationTestCase - - class ::ImportArticle < ActiveRecord::Base - include Elasticsearch::Model - - scope :popular, -> { where('views >= 50') } - - mapping do - indexes :title, type: 'text' - indexes :views, type: 'integer' - indexes :numeric, type: 'integer' - indexes :created_at, type: 'date' - end - end - - context "ActiveRecord importing" do - setup do - ActiveRecord::Schema.define(:version => 1) do - create_table :import_articles do |t| - t.string :title - t.integer :views - t.string :numeric # For the sake of invalid data sent to Elasticsearch - t.datetime :created_at, :default => 'NOW()' - end - end - - ImportArticle.delete_all - ImportArticle.__elasticsearch__.create_index! force: true - ImportArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' - - 100.times { |i| ImportArticle.create! title: "Test #{i}", views: i } - end - - should "import all the documents" do - assert_equal 100, ImportArticle.count - - ImportArticle.__elasticsearch__.refresh_index! - assert_equal 0, ImportArticle.search('*').results.total - - batches = 0 - errors = ImportArticle.import(batch_size: 10) do |response| - batches += 1 - end - - assert_equal 0, errors - assert_equal 10, batches - - ImportArticle.__elasticsearch__.refresh_index! - assert_equal 100, ImportArticle.search('*').results.total - end - - should "import only documents from a specific scope" do - assert_equal 100, ImportArticle.count - - assert_equal 0, ImportArticle.import(scope: 'popular') - - ImportArticle.__elasticsearch__.refresh_index! - assert_equal 50, ImportArticle.search('*').results.total - end - - should "import only documents from a specific query" do - assert_equal 100, ImportArticle.count - - assert_equal 0, ImportArticle.import(query: -> { where('views >= 30') }) - - ImportArticle.__elasticsearch__.refresh_index! - assert_equal 70, ImportArticle.search('*').results.total - end - - should "report and not store/index invalid documents" do - ImportArticle.create! title: "Test INVALID", numeric: "INVALID" - - assert_equal 101, ImportArticle.count - - ImportArticle.__elasticsearch__.refresh_index! - assert_equal 0, ImportArticle.search('*').results.total - - batches = 0 - errors = ImportArticle.__elasticsearch__.import(batch_size: 10) do |response| - batches += 1 - end - - assert_equal 1, errors - assert_equal 11, batches - - ImportArticle.__elasticsearch__.refresh_index! - assert_equal 100, ImportArticle.search('*').results.total - end - - should "transform documents with the option" do - assert_equal 100, ImportArticle.count - - assert_equal 0, ImportArticle.import( transform: ->(a) {{ index: { data: { name: a.title, foo: 'BAR' } }}} ) - - ImportArticle.__elasticsearch__.refresh_index! - assert_contains ImportArticle.search('*').results.first._source.keys, 'name' - assert_contains ImportArticle.search('*').results.first._source.keys, 'foo' - assert_equal 100, ImportArticle.search('test').results.total - assert_equal 100, ImportArticle.search('bar').results.total - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/active_record_namespaced_model_test.rb b/elasticsearch-model/test/integration/active_record_namespaced_model_test.rb deleted file mode 100644 index 9885b3a1a..000000000 --- a/elasticsearch-model/test/integration/active_record_namespaced_model_test.rb +++ /dev/null @@ -1,54 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -module Elasticsearch - module Model - class ActiveRecordNamespacedModelIntegrationTest < Elasticsearch::Test::IntegrationTestCase - context "Namespaced ActiveRecord model integration" do - setup do - ActiveRecord::Schema.define(:version => 1) do - create_table :articles do |t| - t.string :title - end - end - - module ::MyNamespace - class Article < ActiveRecord::Base - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - mapping { indexes :title } - end - end - - MyNamespace::Article.delete_all - MyNamespace::Article.__elasticsearch__.create_index! force: true - - MyNamespace::Article.create! title: 'Test' - - MyNamespace::Article.__elasticsearch__.refresh_index! - end - - should "have proper index name and document type" do - assert_equal "my_namespace-articles", MyNamespace::Article.index_name - assert_equal "article", MyNamespace::Article.document_type - end - - should "save document into index on save and find it" do - response = MyNamespace::Article.search 'title:test' - - assert response.any?, "No results returned: #{response.inspect}" - assert_equal 1, response.size - - assert_equal 'Test', response.results.first.title - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/active_record_pagination_test.rb b/elasticsearch-model/test/integration/active_record_pagination_test.rb deleted file mode 100644 index 0b5e259ee..000000000 --- a/elasticsearch-model/test/integration/active_record_pagination_test.rb +++ /dev/null @@ -1,149 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -module Elasticsearch - module Model - class ActiveRecordPaginationTest < Elasticsearch::Test::IntegrationTestCase - class ::ArticleForPagination < ActiveRecord::Base - include Elasticsearch::Model - - scope :published, -> { where(published: true) } - - settings index: { number_of_shards: 1, number_of_replicas: 0 } do - mapping do - indexes :title, type: 'text', analyzer: 'snowball' - indexes :created_at, type: 'date' - end - end - end - - context "ActiveRecord pagination" do - setup do - ActiveRecord::Schema.define(:version => 1) do - create_table ::ArticleForPagination.table_name do |t| - t.string :title - t.datetime :created_at, :default => 'NOW()' - t.boolean :published - end - end - - Kaminari::Hooks.init if defined?(Kaminari::Hooks) - - ArticleForPagination.delete_all - ArticleForPagination.__elasticsearch__.create_index! force: true - - 68.times do |i| - ::ArticleForPagination.create! title: "Test #{i}", published: (i % 2 == 0) - end - - ArticleForPagination.import - ArticleForPagination.__elasticsearch__.refresh_index! - end - - should "be on the first page by default" do - records = ArticleForPagination.search('title:test').page(1).records - - assert_equal 25, records.size - assert_equal 1, records.current_page - assert_equal nil, records.prev_page - assert_equal 2, records.next_page - assert_equal 3, records.total_pages - - assert records.first_page?, "Should be the first page" - assert ! records.last_page?, "Should NOT be the last page" - assert ! records.out_of_range?, "Should NOT be out of range" - end - - should "load next page" do - records = ArticleForPagination.search('title:test').page(2).records - - assert_equal 25, records.size - assert_equal 2, records.current_page - assert_equal 1, records.prev_page - assert_equal 3, records.next_page - assert_equal 3, records.total_pages - - assert ! records.first_page?, "Should NOT be the first page" - assert ! records.last_page?, "Should NOT be the last page" - assert ! records.out_of_range?, "Should NOT be out of range" - end - - should "load last page" do - records = ArticleForPagination.search('title:test').page(3).records - - assert_equal 18, records.size - assert_equal 3, records.current_page - assert_equal 2, records.prev_page - assert_equal nil, records.next_page - assert_equal 3, records.total_pages - - assert ! records.first_page?, "Should NOT be the first page" - assert records.last_page?, "Should be the last page" - assert ! records.out_of_range?, "Should NOT be out of range" - end - - should "not load invalid page" do - records = ArticleForPagination.search('title:test').page(6).records - - assert_equal 0, records.size - assert_equal 6, records.current_page - - assert_equal nil, records.next_page - assert_equal 3, records.total_pages - - assert ! records.first_page?, "Should NOT be the first page" - assert records.out_of_range?, "Should be out of range" - end - - should "be combined with scopes" do - records = ArticleForPagination.search('title:test').page(2).records.published - assert records.all? { |r| r.published? } - assert_equal 12, records.size - end - - should "respect sort" do - search = ArticleForPagination.search({ query: { match: { title: 'test' } }, sort: [ { id: 'desc' } ] }) - - records = search.page(2).records - assert_equal 43, records.first.id # 68 - 25 = 42 - - records = search.page(3).records - assert_equal 18, records.first.id # 68 - (2 * 25) = 18 - - records = search.page(2).per(5).records - assert_equal 63, records.first.id # 68 - 5 = 63 - end - - should "set the limit per request" do - records = ArticleForPagination.search('title:test').limit(50).page(2).records - - assert_equal 18, records.size - assert_equal 2, records.current_page - assert_equal 1, records.prev_page - assert_equal nil, records.next_page - assert_equal 2, records.total_pages - - assert records.last_page?, "Should be the last page" - end - - context "with specific model settings" do - teardown do - ArticleForPagination.instance_variable_set(:@_default_per_page, nil) - end - - should "respect paginates_per" do - ArticleForPagination.paginates_per 50 - - assert_equal 50, ArticleForPagination.search('*').page(1).records.size - end - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/dynamic_index_name_test.rb b/elasticsearch-model/test/integration/dynamic_index_name_test.rb deleted file mode 100755 index f87db7978..000000000 --- a/elasticsearch-model/test/integration/dynamic_index_name_test.rb +++ /dev/null @@ -1,52 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -module Elasticsearch - module Model - class DynamicIndexNameTest < Elasticsearch::Test::IntegrationTestCase - context "Dynamic index name" do - setup do - class ::ArticleWithDynamicIndexName < ActiveRecord::Base - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - def self.counter=(value) - @counter = 0 - end - - def self.counter - (@counter ||= 0) && @counter += 1 - end - - mapping { indexes :title } - index_name { "articles-#{counter}" } - end - - ::ActiveRecord::Schema.define(:version => 1) do - create_table ::ArticleWithDynamicIndexName.table_name do |t| - t.string :title - end - end - - ::ArticleWithDynamicIndexName.counter = 0 - end - - should 'evaluate the index_name value' do - assert_equal ArticleWithDynamicIndexName.index_name, "articles-1" - end - - should 're-evaluate the index_name value each time' do - assert_equal ArticleWithDynamicIndexName.index_name, "articles-1" - assert_equal ArticleWithDynamicIndexName.index_name, "articles-2" - assert_equal ArticleWithDynamicIndexName.index_name, "articles-3" - end - end - - end - end -end diff --git a/elasticsearch-model/test/integration/mongoid_basic_test.rb b/elasticsearch-model/test/integration/mongoid_basic_test.rb deleted file mode 100644 index d46a75d05..000000000 --- a/elasticsearch-model/test/integration/mongoid_basic_test.rb +++ /dev/null @@ -1,177 +0,0 @@ -require 'test_helper' - -Mongo.setup! - -if Mongo.available? - Mongo.connect_to 'mongoid_articles' - - module Elasticsearch - module Model - class MongoidBasicIntegrationTest < Elasticsearch::Test::IntegrationTestCase - - class ::MongoidArticle - include Mongoid::Document - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - field :id, type: String - field :title, type: String - attr_accessible :title if respond_to? :attr_accessible - - settings index: { number_of_shards: 1, number_of_replicas: 0 } do - mapping do - indexes :title, type: 'text', analyzer: 'snowball' - indexes :created_at, type: 'date' - end - end - - def as_indexed_json(options={}) - as_json(except: [:id, :_id]) - end - end - - context "Mongoid integration" do - setup do - Elasticsearch::Model::Adapter.register \ - Elasticsearch::Model::Adapter::Mongoid, - lambda { |klass| !!defined?(::Mongoid::Document) && klass.respond_to?(:ancestors) && klass.ancestors.include?(::Mongoid::Document) } - - MongoidArticle.__elasticsearch__.create_index! force: true - - MongoidArticle.delete_all - - MongoidArticle.create! title: 'Test' - MongoidArticle.create! title: 'Testing Coding' - MongoidArticle.create! title: 'Coding' - - MongoidArticle.__elasticsearch__.refresh_index! - MongoidArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' - end - - should "index and find a document" do - response = MongoidArticle.search('title:test') - - assert response.any? - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - - assert_instance_of Elasticsearch::Model::Response::Result, response.results.first - assert_instance_of MongoidArticle, response.records.first - - assert_equal 'Test', response.results.first.title - assert_equal 'Test', response.records.first.title - end - - should "iterate over results" do - response = MongoidArticle.search('title:test') - - assert_equal ['Test', 'Testing Coding'], response.results.map(&:title) - assert_equal ['Test', 'Testing Coding'], response.records.map(&:title) - end - - should "access results from records" do - response = MongoidArticle.search('title:test') - - response.records.each_with_hit do |r, h| - assert_not_nil h._score - assert_not_nil h._source.title - end - end - - should "preserve the search results order for records" do - response = MongoidArticle.search('title:code') - - response.records.each_with_hit do |r, h| - assert_equal h._id, r.id.to_s - end - - response.records.map_with_hit do |r, h| - assert_equal h._id, r.id.to_s - end - end - - should "remove document from index on destroy" do - article = MongoidArticle.first - - article.destroy - assert_equal 2, MongoidArticle.count - - MongoidArticle.__elasticsearch__.refresh_index! - - response = MongoidArticle.search 'title:test' - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - end - - should "index updates to the document" do - article = MongoidArticle.first - - article.title = 'Writing' - article.save - - MongoidArticle.__elasticsearch__.refresh_index! - - response = MongoidArticle.search 'title:write' - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - end - - should "return results for a DSL search" do - response = MongoidArticle.search query: { match: { title: { query: 'test' } } } - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - end - - should "return a paged collection" do - response = MongoidArticle.search query: { match: { title: { query: 'test' } } }, - size: 2, - from: 1 - - assert_equal 1, response.results.size - assert_equal 1, response.records.size - - assert_equal 'Testing Coding', response.results.first.title - assert_equal 'Testing Coding', response.records.first.title - end - - - context "importing" do - setup do - MongoidArticle.delete_all - 97.times { |i| MongoidArticle.create! title: "Test #{i}" } - MongoidArticle.__elasticsearch__.create_index! force: true - MongoidArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' - end - - should "import all the documents" do - assert_equal 97, MongoidArticle.count - - MongoidArticle.__elasticsearch__.refresh_index! - assert_equal 0, MongoidArticle.search('*').results.total - - batches = 0 - errors = MongoidArticle.import(batch_size: 10) do |response| - batches += 1 - end - - assert_equal 0, errors - assert_equal 10, batches - - MongoidArticle.__elasticsearch__.refresh_index! - assert_equal 97, MongoidArticle.search('*').results.total - - response = MongoidArticle.search('test') - assert response.results.any?, "Search has not returned results: #{response.to_a}" - end - end - end - - end - end - end - -end diff --git a/elasticsearch-model/test/integration/multiple_models_test.rb b/elasticsearch-model/test/integration/multiple_models_test.rb deleted file mode 100644 index 7d3a62705..000000000 --- a/elasticsearch-model/test/integration/multiple_models_test.rb +++ /dev/null @@ -1,176 +0,0 @@ -require 'test_helper' -require 'active_record' - -# Needed for ActiveRecord 3.x ? -ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) unless ActiveRecord::Base.connected? - -::ActiveRecord::Base.raise_in_transactional_callbacks = true if ::ActiveRecord::Base.respond_to?(:raise_in_transactional_callbacks) && ::ActiveRecord::VERSION::MAJOR.to_s < '5' - -Mongo.setup! - -module Elasticsearch - module Model - class MultipleModelsIntegration < Elasticsearch::Test::IntegrationTestCase - module ::NameSearch - extend ActiveSupport::Concern - - included do - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - settings index: {number_of_shards: 1, number_of_replicas: 0} do - mapping do - indexes :name, type: 'text', analyzer: 'snowball' - indexes :created_at, type: 'date' - end - end - end - end - - class ::Episode < ActiveRecord::Base - include NameSearch - end - - class ::Series < ActiveRecord::Base - include NameSearch - end - - context "Multiple models" do - setup do - ActiveRecord::Schema.define(:version => 1) do - create_table :episodes do |t| - t.string :name - t.datetime :created_at, :default => 'NOW()' - end - - create_table :series do |t| - t.string :name - t.datetime :created_at, :default => 'NOW()' - end - end - - [::Episode, ::Series].each do |model| - model.delete_all - model.__elasticsearch__.create_index! force: true - model.create name: "The #{model.name}" - model.create name: "A great #{model.name}" - model.create name: "The greatest #{model.name}" - model.__elasticsearch__.refresh_index! - end - end - - should "find matching documents across multiple models" do - response = Elasticsearch::Model.search(%q<"The greatest Episode"^2 OR "The greatest Series">, [Series, Episode]) - - assert response.any?, "Response should not be empty: #{response.to_a.inspect}" - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - - assert_instance_of Elasticsearch::Model::Response::Result, response.results.first - assert_instance_of Episode, response.records.first - assert_instance_of Series, response.records.last - - assert_equal 'The greatest Episode', response.results[0].name - assert_equal 'The greatest Episode', response.records[0].name - - assert_equal 'The greatest Series', response.results[1].name - assert_equal 'The greatest Series', response.records[1].name - end - - should "provide access to results" do - response = Elasticsearch::Model.search(%q<"A great Episode"^2 OR "A great Series">, [Series, Episode]) - - assert_equal 'A great Episode', response.results[0].name - assert_equal true, response.results[0].name? - assert_equal false, response.results[0].boo? - - assert_equal 'A great Series', response.results[1].name - assert_equal true, response.results[1].name? - assert_equal false, response.results[1].boo? - end - - should "only retrieve records for existing results" do - ::Series.find_by_name("The greatest Series").delete - ::Series.__elasticsearch__.refresh_index! - response = Elasticsearch::Model.search(%q<"The greatest Episode"^2 OR "The greatest Series">, [Series, Episode]) - - assert response.any?, "Response should not be empty: #{response.to_a.inspect}" - - assert_equal 2, response.results.size - assert_equal 1, response.records.size - - assert_instance_of Elasticsearch::Model::Response::Result, response.results.first - assert_instance_of Episode, response.records.first - - assert_equal 'The greatest Episode', response.results[0].name - assert_equal 'The greatest Episode', response.records[0].name - end - - should "paginate the results" do - response = Elasticsearch::Model.search('series OR episode', [Series, Episode]) - - assert_equal 3, response.page(1).per(3).results.size - assert_equal 3, response.page(2).per(3).results.size - assert_equal 0, response.page(3).per(3).results.size - end - - if Mongo.available? - Mongo.connect_to 'mongoid_collections' - - context "Across mongoid models" do - setup do - class ::Image - include Mongoid::Document - include Elasticsearch::Model - include Elasticsearch::Model::Callbacks - - field :name, type: String - attr_accessible :name if respond_to? :attr_accessible - - settings index: {number_of_shards: 1, number_of_replicas: 0} do - mapping do - indexes :name, type: 'text', analyzer: 'snowball' - indexes :created_at, type: 'date' - end - end - - def as_indexed_json(options={}) - as_json(except: [:_id]) - end - end - - Image.delete_all - Image.__elasticsearch__.create_index! force: true - Image.create! name: "The Image" - Image.create! name: "A great Image" - Image.create! name: "The greatest Image" - Image.__elasticsearch__.refresh_index! - Image.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' - end - - should "find matching documents across multiple models" do - response = Elasticsearch::Model.search(%q<"greatest Episode" OR "greatest Image"^2>, [Episode, Image]) - - assert response.any?, "Response should not be empty: #{response.to_a.inspect}" - - assert_equal 2, response.results.size - assert_equal 2, response.records.size - - assert_instance_of Elasticsearch::Model::Response::Result, response.results.first - assert_instance_of Image, response.records.first - assert_instance_of Episode, response.records.last - - assert_equal 'The greatest Image', response.results[0].name - assert_equal 'The greatest Image', response.records[0].name - - assert_equal 'The greatest Episode', response.results[1].name - assert_equal 'The greatest Episode', response.records[1].name - end - end - end - - end - end - end -end diff --git a/elasticsearch-model/test/support/model.json b/elasticsearch-model/test/support/model.json deleted file mode 100644 index fcf3a6473..000000000 --- a/elasticsearch-model/test/support/model.json +++ /dev/null @@ -1 +0,0 @@ -{ "baz": "qux" } diff --git a/elasticsearch-model/test/test_helper.rb b/elasticsearch-model/test/test_helper.rb deleted file mode 100644 index ff3a6d935..000000000 --- a/elasticsearch-model/test/test_helper.rb +++ /dev/null @@ -1,93 +0,0 @@ -RUBY_1_8 = defined?(RUBY_VERSION) && RUBY_VERSION < '1.9' - -exit(0) if RUBY_1_8 - -require 'simplecov' and SimpleCov.start { add_filter "/test|test_/" } if ENV["COVERAGE"] - -# Register `at_exit` handler for integration tests shutdown. -# MUST be called before requiring `test/unit`. -at_exit { Elasticsearch::Test::IntegrationTestCase.__run_at_exit_hooks } - -puts '-'*80 - -if defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' - require 'test-unit' - require 'mocha/test_unit' -else - require 'minitest/autorun' - require 'mocha/mini_test' -end - -require 'shoulda-context' - -require 'turn' unless ENV["TM_FILEPATH"] || ENV["NOTURN"] || defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' - -require 'ansi' -require 'oj' - -require 'active_model' - -require 'kaminari' - -require 'elasticsearch/model' - -require 'elasticsearch/extensions/test/cluster' -require 'elasticsearch/extensions/test/startup_shutdown' - -module Elasticsearch - module Test - class IntegrationTestCase < ::Test::Unit::TestCase - extend Elasticsearch::Extensions::Test::StartupShutdown - - startup { Elasticsearch::Extensions::Test::Cluster.start(nodes: 1) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running? } - shutdown { Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] && started? } - context "IntegrationTest" do; should "noop on Ruby 1.8" do; end; end if RUBY_1_8 - - def setup - ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) - logger = ::Logger.new(STDERR) - logger.formatter = lambda { |s, d, p, m| "\e[2;36m#{m}\e[0m\n" } - ActiveRecord::Base.logger = logger unless ENV['QUIET'] - - ActiveRecord::LogSubscriber.colorize_logging = false - ActiveRecord::Migration.verbose = false - - tracer = ::Logger.new(STDERR) - tracer.formatter = lambda { |s, d, p, m| "#{m.gsub(/^.*$/) { |n| ' ' + n }.ansi(:faint)}\n" } - - Elasticsearch::Model.client = Elasticsearch::Client.new host: "localhost:#{(ENV['TEST_CLUSTER_PORT'] || 9250)}", - tracer: (ENV['QUIET'] ? nil : tracer) - end - end - end -end - -class Mongo - def self.setup! - begin - require 'mongoid' - session = Moped::Connection.new("localhost", 27017, 0.5) - session.connect - ENV['MONGODB_AVAILABLE'] = 'yes' - rescue LoadError, Moped::Errors::ConnectionFailure => e - $stderr.puts "MongoDB not installed or running: #{e}" - end - end - - def self.available? - !!ENV['MONGODB_AVAILABLE'] - end - - def self.connect_to(source) - $stderr.puts "Mongoid #{Mongoid::VERSION}", '-'*80 - - logger = ::Logger.new($stderr) - logger.formatter = lambda { |s, d, p, m| " #{m.ansi(:faint, :cyan)}\n" } - logger.level = ::Logger::DEBUG - - Mongoid.logger = logger unless ENV['QUIET'] - Moped.logger = logger unless ENV['QUIET'] - - Mongoid.connect_to source - end -end diff --git a/elasticsearch-model/test/unit/adapter_active_record_test.rb b/elasticsearch-model/test/unit/adapter_active_record_test.rb deleted file mode 100644 index 335e3bd10..000000000 --- a/elasticsearch-model/test/unit/adapter_active_record_test.rb +++ /dev/null @@ -1,157 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::AdapterActiveRecordTest < Test::Unit::TestCase - context "Adapter ActiveRecord module: " do - class ::DummyClassForActiveRecord - RESPONSE = Struct.new('DummyActiveRecordResponse') do - def response - { 'hits' => {'hits' => [ {'_id' => 2}, {'_id' => 1} ]} } - end - end.new - - def response - RESPONSE - end - - def ids - [2, 1] - end - end - - RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [] } } - - setup do - @records = [ stub(id: 1, inspect: ''), stub(id: 2, inspect: '') ] - @records.stubs(:load).returns(true) - @records.stubs(:exec_queries).returns(true) - end - - should "have the register condition" do - assert_not_nil Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::ActiveRecord] - assert_equal false, Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::ActiveRecord].call(DummyClassForActiveRecord) - end - - context "Records" do - setup do - DummyClassForActiveRecord.__send__ :include, Elasticsearch::Model::Adapter::ActiveRecord::Records - end - - should "have the implementation" do - assert_instance_of Module, Elasticsearch::Model::Adapter::ActiveRecord::Records - - instance = DummyClassForActiveRecord.new - instance.expects(:klass).returns(mock('class', primary_key: :some_key, where: @records)).at_least_once - - assert_equal @records, instance.records - end - - should "load the records" do - instance = DummyClassForActiveRecord.new - instance.expects(:records).returns(@records) - instance.load - end - - should "load the records with its submodels when using :includes" do - klass = mock('class', primary_key: :some_key, where: @records) - @records.expects(:includes).with([:submodel]).at_least_once - - instance = DummyClassForActiveRecord.new - instance.expects(:klass).returns(klass).at_least_once - instance.options[:includes] = [:submodel] - instance.records - end - - should "reorder the records based on hits order" do - @records.instance_variable_set(:@records, @records) - - instance = DummyClassForActiveRecord.new - instance.expects(:klass).returns(mock('class', primary_key: :some_key, where: @records)).at_least_once - - assert_equal [1, 2], @records. to_a.map(&:id) - assert_equal [2, 1], instance.records.to_a.map(&:id) - end - - should "not reorder records when SQL order is present" do - @records.instance_variable_set(:@records, @records) - - instance = DummyClassForActiveRecord.new - instance.expects(:klass).returns(stub('class', primary_key: :some_key, where: @records)).at_least_once - instance.records.expects(:order).returns(@records) - - assert_equal [2, 1], instance.records. to_a.map(&:id) - assert_equal [1, 2], instance.order(:foo).to_a.map(&:id) - end - end - - context "Callbacks" do - should "register hooks for automatically updating the index" do - DummyClassForActiveRecord.expects(:after_commit).times(3) - - Elasticsearch::Model::Adapter::ActiveRecord::Callbacks.included(DummyClassForActiveRecord) - end - end - - context "Importing" do - setup do - DummyClassForActiveRecord.__send__ :extend, Elasticsearch::Model::Adapter::ActiveRecord::Importing - end - - should "raise an exception when passing an invalid scope" do - assert_raise NoMethodError do - DummyClassForActiveRecord.__find_in_batches(scope: :not_found_method) do; end - end - end - - should "implement the __find_in_batches method" do - DummyClassForActiveRecord.expects(:find_in_batches).returns([]) - DummyClassForActiveRecord.__find_in_batches do; end - end - - should "limit the relation to a specific scope" do - DummyClassForActiveRecord.expects(:find_in_batches).returns([]) - DummyClassForActiveRecord.expects(:published).returns(DummyClassForActiveRecord) - - DummyClassForActiveRecord.__find_in_batches(scope: :published) do; end - end - - should "limit the relation to a specific query" do - DummyClassForActiveRecord.expects(:find_in_batches).returns([]) - DummyClassForActiveRecord.expects(:where).returns(DummyClassForActiveRecord) - - DummyClassForActiveRecord.__find_in_batches(query: -> { where(color: "red") }) do; end - end - - should "preprocess the batch if option provided" do - class << DummyClassForActiveRecord - # Updates/transforms the batch while fetching it from the database - # (eg. with information from an external system) - # - def update_batch(batch) - batch.collect { |b| b.to_s + '!' } - end - end - - DummyClassForActiveRecord.expects(:__find_in_batches).returns( [:a, :b] ) - - DummyClassForActiveRecord.__find_in_batches(preprocess: :update_batch) do |batch| - assert_same_elements ["a!", "b!"], batch - end - end - - context "when transforming models" do - setup do - @transform = DummyClassForActiveRecord.__transform - end - - should "provide an object that responds to #call" do - assert_respond_to @transform, :call - end - - should "provide default transformation" do - model = mock("model", id: 1, __elasticsearch__: stub(as_indexed_json: {})) - assert_equal @transform.call(model), { index: { _id: 1, data: {} } } - end - end - end - end -end diff --git a/elasticsearch-model/test/unit/adapter_default_test.rb b/elasticsearch-model/test/unit/adapter_default_test.rb deleted file mode 100644 index 48edd205d..000000000 --- a/elasticsearch-model/test/unit/adapter_default_test.rb +++ /dev/null @@ -1,41 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::AdapterDefaultTest < Test::Unit::TestCase - context "Adapter default module" do - class ::DummyClassForDefaultAdapter; end - - should "have the default Records implementation" do - assert_instance_of Module, Elasticsearch::Model::Adapter::Default::Records - - DummyClassForDefaultAdapter.__send__ :include, Elasticsearch::Model::Adapter::Default::Records - - instance = DummyClassForDefaultAdapter.new - klass = mock('class', find: [1]) - instance.expects(:klass).returns(klass) - instance.records - end - - should "have the default Callbacks implementation" do - assert_instance_of Module, Elasticsearch::Model::Adapter::Default::Callbacks - end - - context "concerning abstract methods" do - setup do - DummyClassForDefaultAdapter.__send__ :include, Elasticsearch::Model::Adapter::Default::Importing - end - - should "have the default Importing implementation" do - assert_raise Elasticsearch::Model::NotImplemented do - DummyClassForDefaultAdapter.new.__find_in_batches - end - end - - should "have the default transform implementation" do - assert_raise Elasticsearch::Model::NotImplemented do - DummyClassForDefaultAdapter.new.__transform - end - end - end - - end -end diff --git a/elasticsearch-model/test/unit/adapter_mongoid_test.rb b/elasticsearch-model/test/unit/adapter_mongoid_test.rb deleted file mode 100644 index ca9b0d20b..000000000 --- a/elasticsearch-model/test/unit/adapter_mongoid_test.rb +++ /dev/null @@ -1,104 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::AdapterMongoidTest < Test::Unit::TestCase - context "Adapter Mongoid module: " do - class ::DummyClassForMongoid - RESPONSE = Struct.new('DummyMongoidResponse') do - def response - { 'hits' => {'hits' => [ {'_id' => 2}, {'_id' => 1} ]} } - end - end.new - - def response - RESPONSE - end - - def ids - [2, 1] - end - end - - setup do - @records = [ stub(id: 1, inspect: ''), stub(id: 2, inspect: '') ] - ::Symbol.any_instance.stubs(:in).returns(@records) - end - - should "have the register condition" do - assert_not_nil Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::Mongoid] - assert_equal false, Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::Mongoid].call(DummyClassForMongoid) - end - - context "Records" do - setup do - DummyClassForMongoid.__send__ :include, Elasticsearch::Model::Adapter::Mongoid::Records - end - - should "have the implementation" do - assert_instance_of Module, Elasticsearch::Model::Adapter::Mongoid::Records - - instance = DummyClassForMongoid.new - instance.expects(:klass).returns(mock('class', where: @records)) - - assert_equal @records, instance.records - end - - should "reorder the records based on hits order" do - @records.instance_variable_set(:@records, @records) - - instance = DummyClassForMongoid.new - instance.expects(:klass).returns(mock('class', where: @records)) - - assert_equal [1, 2], @records. to_a.map(&:id) - assert_equal [2, 1], instance.records.to_a.map(&:id) - end - - should "not reorder records when SQL order is present" do - @records.instance_variable_set(:@records, @records) - - instance = DummyClassForMongoid.new - instance.expects(:klass).returns(stub('class', where: @records)).at_least_once - instance.records.expects(:asc).returns(@records) - - assert_equal [2, 1], instance.records.to_a.map(&:id) - assert_equal [1, 2], instance.asc.to_a.map(&:id) - end - end - - context "Callbacks" do - should "register hooks for automatically updating the index" do - DummyClassForMongoid.expects(:after_create) - DummyClassForMongoid.expects(:after_update) - DummyClassForMongoid.expects(:after_destroy) - - Elasticsearch::Model::Adapter::Mongoid::Callbacks.included(DummyClassForMongoid) - end - end - - context "Importing" do - should "implement the __find_in_batches method" do - relation = mock() - relation.stubs(:no_timeout).returns([]) - DummyClassForMongoid.expects(:all).returns(relation) - - DummyClassForMongoid.__send__ :extend, Elasticsearch::Model::Adapter::Mongoid::Importing - DummyClassForMongoid.__find_in_batches do; end - end - - context "when transforming models" do - setup do - @transform = DummyClassForMongoid.__transform - end - - should "provide an object that responds to #call" do - assert_respond_to @transform, :call - end - - should "provide basic transformation" do - model = mock("model", id: 1, as_indexed_json: {}) - assert_equal @transform.call(model), { index: { _id: "1", data: {} } } - end - end - end - - end -end diff --git a/elasticsearch-model/test/unit/adapter_multiple_test.rb b/elasticsearch-model/test/unit/adapter_multiple_test.rb deleted file mode 100644 index b848286fb..000000000 --- a/elasticsearch-model/test/unit/adapter_multiple_test.rb +++ /dev/null @@ -1,106 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::MultipleTest < Test::Unit::TestCase - - context "Adapter for multiple models" do - - class ::DummyOne - include Elasticsearch::Model - - index_name 'dummy' - document_type 'dummy_one' - - def self.find(ids) - ids.map { |id| new(id) } - end - - attr_reader :id - - def initialize(id) - @id = id.to_i - end - end - - module ::Namespace - class DummyTwo - include Elasticsearch::Model - - index_name 'dummy' - document_type 'dummy_two' - - def self.find(ids) - ids.map { |id| new(id) } - end - - attr_reader :id - - def initialize(id) - @id = id.to_i - end - end - end - - class ::DummyTwo - include Elasticsearch::Model - - index_name 'other_index' - document_type 'dummy_two' - - def self.find(ids) - ids.map { |id| new(id) } - end - - attr_reader :id - - def initialize(id) - @id = id.to_i - end - end - - HITS = [{_index: 'dummy', - _type: 'dummy_two', - _id: '2', - }, { - _index: 'dummy', - _type: 'dummy_one', - _id: '2', - }, { - _index: 'other_index', - _type: 'dummy_two', - _id: '1', - }, { - _index: 'dummy', - _type: 'dummy_two', - _id: '1', - }, { - _index: 'dummy', - _type: 'dummy_one', - _id: '3'}] - - setup do - @multimodel = Elasticsearch::Model::Multimodel.new(DummyOne, DummyTwo, Namespace::DummyTwo) - end - - context "when returning records" do - setup do - @multimodel.class.send :include, Elasticsearch::Model::Adapter::Multiple::Records - @multimodel.expects(:response).at_least_once.returns(stub(response: { 'hits' => { 'hits' => HITS } })) - end - - should "keep the order from response" do - assert_instance_of Module, Elasticsearch::Model::Adapter::Multiple::Records - records = @multimodel.records - - assert_equal 5, records.count - - assert_kind_of ::Namespace::DummyTwo, records[0] - assert_kind_of ::DummyOne, records[1] - assert_kind_of ::DummyTwo, records[2] - assert_kind_of ::Namespace::DummyTwo, records[3] - assert_kind_of ::DummyOne, records[4] - - assert_equal [2, 2, 1, 1, 3], records.map(&:id) - end - end - end -end diff --git a/elasticsearch-model/test/unit/adapter_test.rb b/elasticsearch-model/test/unit/adapter_test.rb deleted file mode 100644 index 71b4e7cea..000000000 --- a/elasticsearch-model/test/unit/adapter_test.rb +++ /dev/null @@ -1,69 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::AdapterTest < Test::Unit::TestCase - context "Adapter module" do - class ::DummyAdapterClass; end - class ::DummyAdapterClassWithAdapter; end - class ::DummyAdapter - Records = Module.new - Callbacks = Module.new - Importing = Module.new - end - - should "return an Adapter instance" do - assert_instance_of Elasticsearch::Model::Adapter::Adapter, - Elasticsearch::Model::Adapter.from_class(DummyAdapterClass) - end - - should "return a list of adapters" do - Elasticsearch::Model::Adapter::Adapter.expects(:adapters) - Elasticsearch::Model::Adapter.adapters - end - - should "register an adapter" do - begin - Elasticsearch::Model::Adapter::Adapter.expects(:register) - Elasticsearch::Model::Adapter.register(:foo, lambda { |c| false }) - ensure - Elasticsearch::Model::Adapter::Adapter.instance_variable_set(:@adapters, {}) - end - end - end - - context "Adapter class" do - should "register an adapter" do - begin - Elasticsearch::Model::Adapter::Adapter.register(:foo, lambda { |c| false }) - assert Elasticsearch::Model::Adapter::Adapter.adapters[:foo] - ensure - Elasticsearch::Model::Adapter::Adapter.instance_variable_set(:@adapters, {}) - end - end - - should "return the default adapter" do - adapter = Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClass) - assert_equal Elasticsearch::Model::Adapter::Default, adapter.adapter - end - - should "return a specific adapter" do - Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, - lambda { |c| c == DummyAdapterClassWithAdapter }) - - adapter = Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) - assert_equal DummyAdapter, adapter.adapter - end - - should "return the modules" do - assert_nothing_raised do - Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, - lambda { |c| c == DummyAdapterClassWithAdapter }) - - adapter = Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) - - assert_instance_of Module, adapter.records_mixin - assert_instance_of Module, adapter.callbacks_mixin - assert_instance_of Module, adapter.importing_mixin - end - end - end -end diff --git a/elasticsearch-model/test/unit/callbacks_test.rb b/elasticsearch-model/test/unit/callbacks_test.rb deleted file mode 100644 index 95617a414..000000000 --- a/elasticsearch-model/test/unit/callbacks_test.rb +++ /dev/null @@ -1,31 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::CallbacksTest < Test::Unit::TestCase - context "Callbacks module" do - class ::DummyCallbacksModel - end - - module DummyCallbacksAdapter - module CallbacksMixin - end - - def callbacks_mixin - CallbacksMixin - end; module_function :callbacks_mixin - end - - should "include the callbacks mixin from adapter" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyCallbacksModel) - .returns(DummyCallbacksAdapter) - - ::DummyCallbacksModel.expects(:__send__).with do |method, parameter| - assert_equal :include, method - assert_equal DummyCallbacksAdapter::CallbacksMixin, parameter - true - end - - Elasticsearch::Model::Callbacks.included(DummyCallbacksModel) - end - end -end diff --git a/elasticsearch-model/test/unit/client_test.rb b/elasticsearch-model/test/unit/client_test.rb deleted file mode 100644 index 315a3ab44..000000000 --- a/elasticsearch-model/test/unit/client_test.rb +++ /dev/null @@ -1,27 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::ClientTest < Test::Unit::TestCase - context "Client module" do - class ::DummyClientModel - extend Elasticsearch::Model::Client::ClassMethods - include Elasticsearch::Model::Client::InstanceMethods - end - - should "have the default client method" do - assert_instance_of Elasticsearch::Transport::Client, DummyClientModel.client - assert_instance_of Elasticsearch::Transport::Client, DummyClientModel.new.client - end - - should "set the client for the model" do - DummyClientModel.client = 'foobar' - assert_equal 'foobar', DummyClientModel.client - assert_equal 'foobar', DummyClientModel.new.client - end - - should "set the client for a model instance" do - instance = DummyClientModel.new - instance.client = 'moobam' - assert_equal 'moobam', instance.client - end - end -end diff --git a/elasticsearch-model/test/unit/hash_wrapper_test.rb b/elasticsearch-model/test/unit/hash_wrapper_test.rb deleted file mode 100644 index b7b1989d5..000000000 --- a/elasticsearch-model/test/unit/hash_wrapper_test.rb +++ /dev/null @@ -1,13 +0,0 @@ -require 'test_helper' - -require 'hashie/version' - -class Elasticsearch::Model::HashWrapperTest < Test::Unit::TestCase - context "The HashWrapper class" do - should "not print the warning for re-defined methods" do - Hashie.logger.expects(:warn).never - - subject = Elasticsearch::Model::HashWrapper.new(:foo => 'bar', :sort => true) - end if Hashie::VERSION >= '3.5.3' - end -end diff --git a/elasticsearch-model/test/unit/importing_test.rb b/elasticsearch-model/test/unit/importing_test.rb deleted file mode 100644 index 6f739acec..000000000 --- a/elasticsearch-model/test/unit/importing_test.rb +++ /dev/null @@ -1,203 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::ImportingTest < Test::Unit::TestCase - context "Importing module" do - class ::DummyImportingModel - end - - module ::DummyImportingAdapter - module ImportingMixin - def __find_in_batches(options={}, &block) - yield if block_given? - end - def __transform - lambda {|a|} - end - end - - def importing_mixin - ImportingMixin - end; module_function :importing_mixin - end - - should "include methods from the module and adapter" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyImportingModel) - .returns(DummyImportingAdapter) - - DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing - - assert_respond_to DummyImportingModel, :import - assert_respond_to DummyImportingModel, :__find_in_batches - end - - should "call the client when importing" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyImportingModel) - .returns(DummyImportingAdapter) - - DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing - - client = mock('client') - client.expects(:bulk).returns({'items' => []}) - - DummyImportingModel.expects(:client).returns(client) - DummyImportingModel.expects(:index_name).returns('foo') - DummyImportingModel.expects(:document_type).returns('foo') - DummyImportingModel.stubs(:index_exists?).returns(true) - DummyImportingModel.stubs(:__batch_to_bulk) - assert_equal 0, DummyImportingModel.import - end - - should "return the number of errors" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyImportingModel) - .returns(DummyImportingAdapter) - - DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing - - client = mock('client') - client.expects(:bulk).returns({'items' => [ {'index' => {}}, {'index' => {'error' => 'FAILED'}} ]}) - - DummyImportingModel.stubs(:client).returns(client) - DummyImportingModel.stubs(:index_name).returns('foo') - DummyImportingModel.stubs(:document_type).returns('foo') - DummyImportingModel.stubs(:index_exists?).returns(true) - DummyImportingModel.stubs(:__batch_to_bulk) - - assert_equal 1, DummyImportingModel.import - end - - should "return an array of error elements" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyImportingModel) - .returns(DummyImportingAdapter) - - DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing - - client = mock('client') - client.expects(:bulk).returns({'items' => [ {'index' => {}}, {'index' => {'error' => 'FAILED'}} ]}) - - DummyImportingModel.stubs(:client).returns(client) - DummyImportingModel.stubs(:index_name).returns('foo') - DummyImportingModel.stubs(:document_type).returns('foo') - DummyImportingModel.stubs(:index_exists?).returns(true) - DummyImportingModel.stubs(:__batch_to_bulk) - - assert_equal [{'index' => {'error' => 'FAILED'}}], DummyImportingModel.import(return: 'errors') - end - - should "yield the response" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyImportingModel) - .returns(DummyImportingAdapter) - - DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing - - client = mock('client') - client.expects(:bulk).returns({'items' => [ {'index' => {}}, {'index' => {'error' => 'FAILED'}} ]}) - - DummyImportingModel.stubs(:client).returns(client) - DummyImportingModel.stubs(:index_name).returns('foo') - DummyImportingModel.stubs(:document_type).returns('foo') - DummyImportingModel.stubs(:index_exists?).returns(true) - DummyImportingModel.stubs(:__batch_to_bulk) - - DummyImportingModel.import do |response| - assert_equal 2, response['items'].size - end - end - - context "when the index does not exist" do - should "raise an exception" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyImportingModel) - .returns(DummyImportingAdapter) - - DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing - - DummyImportingModel.expects(:index_name).returns('foo') - DummyImportingModel.expects(:document_type).returns('foo') - DummyImportingModel.expects(:index_exists?).returns(false) - - assert_raise ArgumentError do - DummyImportingModel.import - end - end - end - - context "with the force option" do - should "delete and create the index" do - DummyImportingModel.expects(:__find_in_batches).with do |options| - assert_equal 'bar', options[:foo] - assert_nil options[:force] - true - end - - DummyImportingModel.expects(:create_index!).with do |options| - assert_equal true, options[:force] - true - end - - DummyImportingModel.expects(:index_name).returns('foo') - DummyImportingModel.expects(:document_type).returns('foo') - - DummyImportingModel.import force: true, foo: 'bar' - end - end - - should "allow passing a different index / type" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyImportingModel) - .returns(DummyImportingAdapter) - - DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing - - client = mock('client') - - client - .expects(:bulk) - .with do |options| - assert_equal 'my-new-index', options[:index] - assert_equal 'my-other-type', options[:type] - true - end - .returns({'items' => [ {'index' => {} }]}) - - DummyImportingModel.stubs(:client).returns(client) - DummyImportingModel.stubs(:index_exists?).returns(true) - DummyImportingModel.stubs(:__batch_to_bulk) - - DummyImportingModel.import index: 'my-new-index', type: 'my-other-type' - end - - should "use the default transform from adapter" do - client = mock('client', bulk: {'items' => []}) - transform = lambda {|a|} - - DummyImportingModel.stubs(:client).returns(client) - DummyImportingModel.stubs(:index_exists?).returns(true) - DummyImportingModel.expects(:__transform).returns(transform) - DummyImportingModel.expects(:__batch_to_bulk).with(anything, transform) - - DummyImportingModel.import index: 'foo', type: 'bar' - end - - should "use the transformer from options" do - client = mock('client', bulk: {'items' => []}) - transform = lambda {|a|} - - DummyImportingModel.stubs(:client).returns(client) - DummyImportingModel.stubs(:index_exists?).returns(true) - DummyImportingModel.expects(:__batch_to_bulk).with(anything, transform) - - DummyImportingModel.import index: 'foo', type: 'bar', transform: transform - end - - should "raise an ArgumentError if transform doesn't respond to the call method" do - assert_raise ArgumentError do - DummyImportingModel.import index: 'foo', type: 'bar', transform: "not_callable" - end - end - end -end diff --git a/elasticsearch-model/test/unit/indexing_test.rb b/elasticsearch-model/test/unit/indexing_test.rb deleted file mode 100644 index e3bfad868..000000000 --- a/elasticsearch-model/test/unit/indexing_test.rb +++ /dev/null @@ -1,687 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::IndexingTest < Test::Unit::TestCase - context "Indexing module: " do - class ::DummyIndexingModel - extend ActiveModel::Naming - extend Elasticsearch::Model::Naming::ClassMethods - extend Elasticsearch::Model::Indexing::ClassMethods - - def self.foo - 'bar' - end - end - - class NotFound < Exception; end - - context "Settings class" do - should "be convertible to hash" do - hash = { foo: 'bar' } - settings = Elasticsearch::Model::Indexing::Settings.new hash - assert_equal hash, settings.to_hash - assert_equal settings.to_hash, settings.as_json - end - end - - context "Settings method" do - should "initialize the index settings" do - assert_instance_of Elasticsearch::Model::Indexing::Settings, DummyIndexingModel.settings - end - - should "update and return the index settings from a hash" do - DummyIndexingModel.settings foo: 'boo' - DummyIndexingModel.settings bar: 'bam' - - assert_equal( {foo: 'boo', bar: 'bam'}, DummyIndexingModel.settings.to_hash) - end - - should "update and return the index settings from a yml file" do - DummyIndexingModel.settings File.open("test/support/model.yml") - DummyIndexingModel.settings bar: 'bam' - - assert_equal( {foo: 'boo', bar: 'bam', 'baz' => 'qux'}, DummyIndexingModel.settings.to_hash) - end - - should "update and return the index settings from a json file" do - DummyIndexingModel.settings File.open("test/support/model.json") - DummyIndexingModel.settings bar: 'bam' - - assert_equal( {foo: 'boo', bar: 'bam', 'baz' => 'qux'}, DummyIndexingModel.settings.to_hash) - end - - should "evaluate the block" do - DummyIndexingModel.expects(:foo) - - DummyIndexingModel.settings do - foo - end - end - end - - context "Mappings class" do - should "initialize the index mappings" do - assert_instance_of Elasticsearch::Model::Indexing::Mappings, DummyIndexingModel.mappings - end - - should "raise an exception when not passed type" do - assert_raise ArgumentError do - Elasticsearch::Model::Indexing::Mappings.new - end - end - - should "be convertible to hash" do - mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype, { foo: 'bar' } - assert_equal( { :mytype => { foo: 'bar', :properties => {} } }, mappings.to_hash ) - assert_equal mappings.to_hash, mappings.as_json - end - - should "define properties" do - mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype - assert_respond_to mappings, :indexes - - mappings.indexes :foo, { type: 'boolean', include_in_all: false } - assert_equal 'boolean', mappings.to_hash[:mytype][:properties][:foo][:type] - end - - should "define type as 'text' by default" do - mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype - - mappings.indexes :bar - assert_equal 'text', mappings.to_hash[:mytype][:properties][:bar][:type] - end - - should "define multiple fields" do - mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype - - mappings.indexes :my_field, type: 'text' do - indexes :raw, type: 'keyword' - end - - assert_equal 'text', mappings.to_hash[:mytype][:properties][:my_field][:type] - assert_equal 'keyword', mappings.to_hash[:mytype][:properties][:my_field][:fields][:raw][:type] - assert_nil mappings.to_hash[:mytype][:properties][:my_field][:properties] - end - - should "define embedded properties" do - mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype - - mappings.indexes :foo do - indexes :bar - end - - mappings.indexes :foo_object, type: 'object' do - indexes :bar - end - - mappings.indexes :foo_nested, type: 'nested' do - indexes :bar - end - - mappings.indexes :foo_nested_as_symbol, type: :nested do - indexes :bar - end - - # Object is the default when `type` is missing and there's a block passed - # - assert_equal 'object', mappings.to_hash[:mytype][:properties][:foo][:type] - assert_equal 'text', mappings.to_hash[:mytype][:properties][:foo][:properties][:bar][:type] - assert_nil mappings.to_hash[:mytype][:properties][:foo][:fields] - - assert_equal 'object', mappings.to_hash[:mytype][:properties][:foo_object][:type] - assert_equal 'text', mappings.to_hash[:mytype][:properties][:foo_object][:properties][:bar][:type] - assert_nil mappings.to_hash[:mytype][:properties][:foo_object][:fields] - - assert_equal 'nested', mappings.to_hash[:mytype][:properties][:foo_nested][:type] - assert_equal 'text', mappings.to_hash[:mytype][:properties][:foo_nested][:properties][:bar][:type] - assert_nil mappings.to_hash[:mytype][:properties][:foo_nested][:fields] - - assert_equal :nested, mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:type] - assert_not_nil mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:properties] - assert_nil mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:fields] - end - end - - context "Mappings method" do - should "initialize the index mappings" do - assert_instance_of Elasticsearch::Model::Indexing::Mappings, DummyIndexingModel.mappings - end - - should "update and return the index mappings" do - DummyIndexingModel.mappings foo: 'boo' - DummyIndexingModel.mappings bar: 'bam' - assert_equal( { dummy_indexing_model: { foo: "boo", bar: "bam", properties: {} } }, - DummyIndexingModel.mappings.to_hash ) - end - - should "evaluate the block" do - DummyIndexingModel.mappings.expects(:indexes).with(:foo).returns(true) - - DummyIndexingModel.mappings do - indexes :foo - end - end - end - - context "Instance methods" do - class ::DummyIndexingModelWithCallbacks - extend Elasticsearch::Model::Indexing::ClassMethods - include Elasticsearch::Model::Indexing::InstanceMethods - - def self.before_save(&block) - (@callbacks ||= {})[block.hash] = block - end - - def changes_to_save - {:foo => ['One', 'Two']} - end - end - - class ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson - extend Elasticsearch::Model::Indexing::ClassMethods - include Elasticsearch::Model::Indexing::InstanceMethods - - def self.before_save(&block) - (@callbacks ||= {})[block.hash] = block - end - - def changes_to_save - {:foo => ['A', 'B'], :bar => ['C', 'D']} - end - - def as_indexed_json(options={}) - { :foo => 'B' } - end - end - - class ::DummyIndexingModelWithOldDirty - extend Elasticsearch::Model::Indexing::ClassMethods - include Elasticsearch::Model::Indexing::InstanceMethods - - def self.before_save(&block) - (@callbacks ||= {})[block.hash] = block - end - - def changes - {:foo => ['One', 'Two']} - end - end - - should "register before_save callback when included" do - ::DummyIndexingModelWithCallbacks.expects(:before_save).returns(true) - ::DummyIndexingModelWithCallbacks.__send__ :include, Elasticsearch::Model::Indexing::InstanceMethods - end - - should "set the @__changed_model_attributes variable before save" do - instance = ::DummyIndexingModelWithCallbacks.new - instance.expects(:instance_variable_set).with do |name, value| - assert_equal :@__changed_model_attributes, name - assert_equal({foo: 'Two'}, value) - true - end - - ::DummyIndexingModelWithCallbacks.__send__ :include, Elasticsearch::Model::Indexing::InstanceMethods - - ::DummyIndexingModelWithCallbacks.instance_variable_get(:@callbacks).each do |n,b| - instance.instance_eval(&b) - end - end - - # https://github.com/elastic/elasticsearch-rails/issues/714 - # https://github.com/rails/rails/pull/25337#issuecomment-225166796 - should "set the @__changed_model_attributes variable before save for old ActiveModel::Dirty" do - instance = ::DummyIndexingModelWithOldDirty.new - instance.expects(:instance_variable_set).with do |name, value| - assert_equal :@__changed_model_attributes, name - assert_equal({foo: 'Two'}, value) - true - end - - ::DummyIndexingModelWithOldDirty.__send__ :include, Elasticsearch::Model::Indexing::InstanceMethods - - ::DummyIndexingModelWithOldDirty.instance_variable_get(:@callbacks).each do |n,b| - instance.instance_eval(&b) - end - end - - should "have the index_document method" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - client.expects(:index).with do |payload| - assert_equal 'foo', payload[:index] - assert_equal 'bar', payload[:type] - assert_equal '1', payload[:id] - assert_equal 'JSON', payload[:body] - true - end - - instance.expects(:client).returns(client) - instance.expects(:as_indexed_json).returns('JSON') - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.index_document - end - - should "pass extra options to the index_document method to client.index" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - client.expects(:index).with do |payload| - assert_equal 'A', payload[:parent] - true - end - - instance.expects(:client).returns(client) - instance.expects(:as_indexed_json).returns('JSON') - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.index_document(parent: 'A') - end - - should "have the delete_document method" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - client.expects(:delete).with do |payload| - assert_equal 'foo', payload[:index] - assert_equal 'bar', payload[:type] - assert_equal '1', payload[:id] - true - end - - instance.expects(:client).returns(client) - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.delete_document() - end - - should "pass extra options to the delete_document method to client.delete" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - client.expects(:delete).with do |payload| - assert_equal 'A', payload[:parent] - true - end - - instance.expects(:client).returns(client) - instance.expects(:id).returns('1') - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - - instance.delete_document(parent: 'A') - end - - should "update the document by re-indexing when no changes are present" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - # Reset the fake `changes` - instance.instance_variable_set(:@__changed_model_attributes, nil) - - instance.expects(:index_document) - instance.update_document - end - - should "update the document by partial update when changes are present" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - # Set the fake `changes` hash - instance.instance_variable_set(:@__changed_model_attributes, {foo: 'bar'}) - - client.expects(:update).with do |payload| - assert_equal 'foo', payload[:index] - assert_equal 'bar', payload[:type] - assert_equal '1', payload[:id] - assert_equal({foo: 'bar'}, payload[:body][:doc]) - true - end - - instance.expects(:client).returns(client) - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.update_document - end - - should "exclude attributes not contained in custom as_indexed_json during partial update" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson.new - - # Set the fake `changes` hash - instance.instance_variable_set(:@__changed_model_attributes, {'foo' => 'B', 'bar' => 'D' }) - - client.expects(:update).with do |payload| - assert_equal({:foo => 'B'}, payload[:body][:doc]) - true - end - - instance.expects(:client).returns(client) - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.update_document - end - - should "get attributes from as_indexed_json during partial update" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson.new - - instance.instance_variable_set(:@__changed_model_attributes, { 'foo' => { 'bar' => 'BAR'} }) - # Overload as_indexed_json - instance.expects(:as_indexed_json).returns({ 'foo' => 'BAR' }) - - client.expects(:update).with do |payload| - assert_equal({'foo' => 'BAR'}, payload[:body][:doc]) - true - end - - instance.expects(:client).returns(client) - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.update_document - end - - should "update only the specific attributes" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - # Set the fake `changes` hash - instance.instance_variable_set(:@__changed_model_attributes, {author: 'john'}) - - client.expects(:update).with do |payload| - assert_equal 'foo', payload[:index] - assert_equal 'bar', payload[:type] - assert_equal '1', payload[:id] - assert_equal({title: 'green'}, payload[:body][:doc]) - true - end - - instance.expects(:client).returns(client) - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.update_document_attributes title: "green" - end - - should "pass options to the update_document_attributes method" do - client = mock('client') - instance = ::DummyIndexingModelWithCallbacks.new - - client.expects(:update).with do |payload| - assert_equal 'foo', payload[:index] - assert_equal 'bar', payload[:type] - assert_equal '1', payload[:id] - assert_equal({title: 'green'}, payload[:body][:doc]) - assert_equal true, payload[:refresh] - true - end - - instance.expects(:client).returns(client) - instance.expects(:index_name).returns('foo') - instance.expects(:document_type).returns('bar') - instance.expects(:id).returns('1') - - instance.update_document_attributes( { title: "green" }, { refresh: true } ) - end - end - - context "Checking for index existence" do - context "when the index exists" do - should "return true" do - indices = mock('indices', exists: true) - client = stub('client', indices: indices) - - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_equal true, DummyIndexingModelForRecreate.index_exists? - end - end - - context "when the index does not exists" do - should "return false" do - indices = mock('indices', exists: false) - client = stub('client', indices: indices) - - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_equal false, DummyIndexingModelForRecreate.index_exists? - end - end - - context "when the indices API raises an error" do - should "return false" do - client = stub('client') - client.expects(:indices).raises(StandardError) - - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_equal false, DummyIndexingModelForRecreate.index_exists? - end - end - - context "the indices.exists API raises an error" do - should "return false" do - indices = stub('indices') - client = stub('client') - client.expects(:indices).returns(indices) - - indices.expects(:exists).raises(StandardError) - - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_equal false, DummyIndexingModelForRecreate.index_exists? - end - end - end - - context "Re-creating the index" do - class ::DummyIndexingModelForRecreate - extend ActiveModel::Naming - extend Elasticsearch::Model::Naming::ClassMethods - extend Elasticsearch::Model::Indexing::ClassMethods - - settings index: { number_of_shards: 1 } do - mappings do - indexes :foo, analyzer: 'keyword' - end - end - end - - should "delete the index without raising exception when the index is not found" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:delete).returns({}).then.raises(NotFound).at_least_once - - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_nothing_raised { DummyIndexingModelForRecreate.delete_index! force: true } - end - - should "raise an exception without the force option" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:delete).raises(NotFound) - - DummyIndexingModelForRecreate.expects(:client).returns(client) - - assert_raise(NotFound) { DummyIndexingModelForRecreate.delete_index! } - end - - should "raise a regular exception when deleting the index" do - client = stub('client') - - indices = stub('indices') - indices.expects(:delete).raises(Exception) - client.stubs(:indices).returns(indices) - - DummyIndexingModelForRecreate.expects(:client).returns(client) - - assert_raise(Exception) { DummyIndexingModelForRecreate.delete_index! force: true } - end - - should "create the index with correct settings and mappings when it doesn't exist" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:create).with do |payload| - assert_equal 'dummy_indexing_model_for_recreates', payload[:index] - assert_equal 1, payload[:body][:settings][:index][:number_of_shards] - assert_equal 'keyword', payload[:body][:mappings][:dummy_indexing_model_for_recreate][:properties][:foo][:analyzer] - true - end.returns({}) - - DummyIndexingModelForRecreate.expects(:index_exists?).returns(false) - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_nothing_raised { DummyIndexingModelForRecreate.create_index! } - end - - should "get the index settings and mappings from options" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:create).with do |payload| - assert_equal 'foobar', payload[:index] - assert_equal 3, payload[:body][:settings][:index][:number_of_shards] - assert_equal 'bar', payload[:body][:mappings][:foobar][:properties][:foo][:analyzer] - true - end.returns({}) - - DummyIndexingModelForRecreate.expects(:index_exists?).returns(false) - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - DummyIndexingModelForRecreate.create_index! \ - index: 'foobar', - settings: { index: { number_of_shards: 3 } }, - mappings: { foobar: { properties: { foo: { analyzer: 'bar' } } } } - end - - should "not create the index when it exists" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:create).never - - DummyIndexingModelForRecreate.expects(:index_exists?).returns(true) - DummyIndexingModelForRecreate.expects(:client).returns(client).never - - assert_nothing_raised { DummyIndexingModelForRecreate.create_index! } - end - - should "raise exception during index creation" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:delete).returns({}) - indices.expects(:create).raises(Exception).at_least_once - - DummyIndexingModelForRecreate.expects(:index_exists?).returns(false) - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_raise(Exception) { DummyIndexingModelForRecreate.create_index! force: true } - end - - should "delete the index first with the force option" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:delete).returns({}) - indices.expects(:create).returns({}).at_least_once - - DummyIndexingModelForRecreate.expects(:index_exists?).returns(false) - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_nothing_raised do - DummyIndexingModelForRecreate.create_index! force: true - end - end - - should "refresh the index without raising exception with the force option" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:refresh).returns({}).then.raises(NotFound).at_least_once - - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_nothing_raised { DummyIndexingModelForRecreate.refresh_index! force: true } - end - - should "raise a regular exception when refreshing the index" do - client = stub('client') - indices = stub('indices') - client.stubs(:indices).returns(indices) - - indices.expects(:refresh).returns({}).then.raises(Exception).at_least_once - - DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once - - assert_nothing_raised { DummyIndexingModelForRecreate.refresh_index! force: true } - end - - context "with a custom index name" do - setup do - @client = stub('client') - @indices = stub('indices') - @client.stubs(:indices).returns(@indices) - DummyIndexingModelForRecreate.expects(:client).returns(@client).at_least_once - end - - should "create the custom index" do - @indices.expects(:create).with do |arguments| - assert_equal 'custom-foo', arguments[:index] - true - end - DummyIndexingModelForRecreate.expects(:index_exists?).with do |arguments| - assert_equal 'custom-foo', arguments[:index] - true - end - - DummyIndexingModelForRecreate.create_index! index: 'custom-foo' - end - - should "delete the custom index" do - @indices.expects(:delete).with do |arguments| - assert_equal 'custom-foo', arguments[:index] - true - end - - DummyIndexingModelForRecreate.delete_index! index: 'custom-foo' - end - - should "refresh the custom index" do - @indices.expects(:refresh).with do |arguments| - assert_equal 'custom-foo', arguments[:index] - true - end - - DummyIndexingModelForRecreate.refresh_index! index: 'custom-foo' - end - end - end - - end -end diff --git a/elasticsearch-model/test/unit/module_test.rb b/elasticsearch-model/test/unit/module_test.rb deleted file mode 100644 index fb8e0ba61..000000000 --- a/elasticsearch-model/test/unit/module_test.rb +++ /dev/null @@ -1,68 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::ModuleTest < Test::Unit::TestCase - context "The main module" do - - context "client" do - should "have a default" do - client = Elasticsearch::Model.client - assert_not_nil client - assert_instance_of Elasticsearch::Transport::Client, client - end - - should "be settable" do - begin - Elasticsearch::Model.client = "Foobar" - assert_equal "Foobar", Elasticsearch::Model.client - ensure - Elasticsearch::Model.client = nil - end - end - end - - context "when included in module/class, " do - class ::DummyIncludingModel; end - class ::DummyIncludingModelWithSearchMethodDefined - def self.search(query, options={}) - "SEARCH" - end - end - - should "include and set up the proxy" do - DummyIncludingModel.__send__ :include, Elasticsearch::Model - - assert_respond_to DummyIncludingModel, :__elasticsearch__ - assert_respond_to DummyIncludingModel.new, :__elasticsearch__ - end - - should "delegate important methods to the proxy" do - DummyIncludingModel.__send__ :include, Elasticsearch::Model - - assert_respond_to DummyIncludingModel, :search - assert_respond_to DummyIncludingModel, :mappings - assert_respond_to DummyIncludingModel, :settings - assert_respond_to DummyIncludingModel, :index_name - assert_respond_to DummyIncludingModel, :document_type - assert_respond_to DummyIncludingModel, :import - end - - should "not override existing method" do - DummyIncludingModelWithSearchMethodDefined.__send__ :include, Elasticsearch::Model - - assert_equal 'SEARCH', DummyIncludingModelWithSearchMethodDefined.search('foo') - end - end - - context "settings" do - should "access the settings" do - assert_not_nil Elasticsearch::Model.settings - end - - should "allow to set settings" do - assert_nothing_raised { Elasticsearch::Model.settings[:foo] = 'bar' } - assert_equal 'bar', Elasticsearch::Model.settings[:foo] - end - end - - end -end diff --git a/elasticsearch-model/test/unit/multimodel_test.rb b/elasticsearch-model/test/unit/multimodel_test.rb deleted file mode 100644 index daf9f4043..000000000 --- a/elasticsearch-model/test/unit/multimodel_test.rb +++ /dev/null @@ -1,38 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::MultimodelTest < Test::Unit::TestCase - - context "Multimodel class" do - setup do - title = stub('Foo', index_name: 'foo_index', document_type: 'foo', to_ary: nil) - series = stub('Bar', index_name: 'bar_index', document_type: 'bar', to_ary: nil) - @multimodel = Elasticsearch::Model::Multimodel.new(title, series) - end - - should "have an index_name" do - assert_equal ['foo_index', 'bar_index'], @multimodel.index_name - end - - should "have a document_type" do - assert_equal ['foo', 'bar'], @multimodel.document_type - end - - should "have a client" do - assert_equal Elasticsearch::Model.client, @multimodel.client - end - - should "include models in the registry" do - class ::JustAModel - include Elasticsearch::Model - end - - class ::JustAnotherModel - include Elasticsearch::Model - end - - multimodel = Elasticsearch::Model::Multimodel.new - assert multimodel.models.include?(::JustAModel) - assert multimodel.models.include?(::JustAnotherModel) - end - end -end diff --git a/elasticsearch-model/test/unit/naming_inheritance_test.rb b/elasticsearch-model/test/unit/naming_inheritance_test.rb deleted file mode 100644 index b66d415a0..000000000 --- a/elasticsearch-model/test/unit/naming_inheritance_test.rb +++ /dev/null @@ -1,94 +0,0 @@ -require "test_helper" - -class Elasticsearch::Model::NamingInheritanceTest < Test::Unit::TestCase - def setup - Elasticsearch::Model.settings[:inheritance_enabled] = true - end - - def teardown - Elasticsearch::Model.settings[:inheritance_enabled] = false - end - - context "Naming module with inheritance" do - class ::TestBase - extend ActiveModel::Naming - - extend Elasticsearch::Model::Naming::ClassMethods - include Elasticsearch::Model::Naming::InstanceMethods - end - - class ::Animal < ::TestBase - extend ActiveModel::Naming - - extend Elasticsearch::Model::Naming::ClassMethods - include Elasticsearch::Model::Naming::InstanceMethods - - index_name "mammals" - document_type "mammal" - end - - class ::Dog < ::Animal - end - - module ::MyNamespace - class Dog < ::Animal - end - end - - class ::Cat < ::Animal - extend ActiveModel::Naming - - extend Elasticsearch::Model::Naming::ClassMethods - include Elasticsearch::Model::Naming::InstanceMethods - - index_name "cats" - document_type "cat" - end - - should "return the default index_name" do - assert_equal "test_bases", TestBase.index_name - assert_equal "test_bases", TestBase.new.index_name - end - - should "return the explicit index_name" do - assert_equal "mammals", Animal.index_name - assert_equal "mammals", Animal.new.index_name - - assert_equal "cats", Cat.index_name - assert_equal "cats", Cat.new.index_name - end - - should "return the ancestor index_name" do - assert_equal "mammals", Dog.index_name - assert_equal "mammals", Dog.new.index_name - end - - should "return the ancestor index_name for namespaced model" do - assert_equal "mammals", ::MyNamespace::Dog.index_name - assert_equal "mammals", ::MyNamespace::Dog.new.index_name - end - - should "return the default document_type" do - assert_equal "test_base", TestBase.document_type - assert_equal "test_base", TestBase.new.document_type - end - - should "return the explicit document_type" do - assert_equal "mammal", Animal.document_type - assert_equal "mammal", Animal.new.document_type - - assert_equal "cat", Cat.document_type - assert_equal "cat", Cat.new.document_type - end - - should "return the ancestor document_type" do - assert_equal "mammal", Dog.document_type - assert_equal "mammal", Dog.new.document_type - end - - should "return the ancestor document_type for namespaced model" do - assert_equal "mammal", ::MyNamespace::Dog.document_type - assert_equal "mammal", ::MyNamespace::Dog.new.document_type - end - end -end diff --git a/elasticsearch-model/test/unit/naming_test.rb b/elasticsearch-model/test/unit/naming_test.rb deleted file mode 100644 index 424adf7cc..000000000 --- a/elasticsearch-model/test/unit/naming_test.rb +++ /dev/null @@ -1,103 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::NamingTest < Test::Unit::TestCase - context "Naming module" do - class ::DummyNamingModel - extend ActiveModel::Naming - - extend Elasticsearch::Model::Naming::ClassMethods - include Elasticsearch::Model::Naming::InstanceMethods - end - - module ::MyNamespace - class DummyNamingModelInNamespace - extend ActiveModel::Naming - - extend Elasticsearch::Model::Naming::ClassMethods - include Elasticsearch::Model::Naming::InstanceMethods - end - end - - should "return the default index_name" do - assert_equal 'dummy_naming_models', DummyNamingModel.index_name - assert_equal 'dummy_naming_models', DummyNamingModel.new.index_name - end - - should "return the sanitized default index_name for namespaced model" do - assert_equal 'my_namespace-dummy_naming_model_in_namespaces', ::MyNamespace::DummyNamingModelInNamespace.index_name - assert_equal 'my_namespace-dummy_naming_model_in_namespaces', ::MyNamespace::DummyNamingModelInNamespace.new.index_name - end - - should "return the default document_type" do - assert_equal 'dummy_naming_model', DummyNamingModel.document_type - assert_equal 'dummy_naming_model', DummyNamingModel.new.document_type - end - - should "set and return the index_name" do - DummyNamingModel.index_name 'foobar' - assert_equal 'foobar', DummyNamingModel.index_name - - d = DummyNamingModel.new - d.index_name 'foobar_d' - assert_equal 'foobar_d', d.index_name - - modifier = 'r' - d.index_name Proc.new{ "foobar_#{modifier}" } - assert_equal 'foobar_r', d.index_name - - modifier = 'z' - assert_equal 'foobar_z', d.index_name - - modifier = 'f' - d.index_name { "foobar_#{modifier}" } - assert_equal 'foobar_f', d.index_name - - modifier = 't' - assert_equal 'foobar_t', d.index_name - end - - should "set the index_name with setter" do - DummyNamingModel.index_name = 'foobar_index_S' - assert_equal 'foobar_index_S', DummyNamingModel.index_name - - d = DummyNamingModel.new - d.index_name = 'foobar_index_s' - assert_equal 'foobar_index_s', d.index_name - - assert_equal 'foobar_index_S', DummyNamingModel.index_name - - modifier2 = 'y' - DummyNamingModel.index_name = Proc.new{ "foobar_index_#{modifier2}" } - assert_equal 'foobar_index_y', DummyNamingModel.index_name - - modifier = 'r' - d.index_name = Proc.new{ "foobar_index_#{modifier}" } - assert_equal 'foobar_index_r', d.index_name - - modifier = 'z' - assert_equal 'foobar_index_z', d.index_name - - assert_equal 'foobar_index_y', DummyNamingModel.index_name - end - - should "set and return the document_type" do - DummyNamingModel.document_type 'foobar' - assert_equal 'foobar', DummyNamingModel.document_type - - d = DummyNamingModel.new - d.document_type 'foobar_d' - assert_equal 'foobar_d', d.document_type - end - - should "set the document_type with setter" do - DummyNamingModel.document_type = 'foobar_type_S' - assert_equal 'foobar_type_S', DummyNamingModel.document_type - - d = DummyNamingModel.new - d.document_type = 'foobar_type_s' - assert_equal 'foobar_type_s', d.document_type - - assert_equal 'foobar_type_S', DummyNamingModel.document_type - end - end -end diff --git a/elasticsearch-model/test/unit/proxy_test.rb b/elasticsearch-model/test/unit/proxy_test.rb deleted file mode 100644 index a64b5b175..000000000 --- a/elasticsearch-model/test/unit/proxy_test.rb +++ /dev/null @@ -1,98 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::SearchTest < Test::Unit::TestCase - context "Searching module" do - class ::DummyProxyModel - include Elasticsearch::Model::Proxy - - def self.foo - 'classy foo' - end - - def bar - 'insta barr' - end - - def as_json(options) - {foo: 'bar'} - end - end - - class ::DummyProxyModelWithCallbacks - def self.before_save(&block) - (@callbacks ||= {})[block.hash] = block - end - - def changes_to_save - {:foo => ['One', 'Two']} - end - end - - should "setup the class proxy method" do - assert_respond_to DummyProxyModel, :__elasticsearch__ - end - - should "setup the instance proxy method" do - assert_respond_to DummyProxyModel.new, :__elasticsearch__ - end - - should "register the hook for before_save callback" do - ::DummyProxyModelWithCallbacks.expects(:before_save).returns(true) - DummyProxyModelWithCallbacks.__send__ :include, Elasticsearch::Model::Proxy - end - - should "set the @__changed_model_attributes variable before save" do - instance = ::DummyProxyModelWithCallbacks.new - instance.__elasticsearch__.expects(:instance_variable_set).with do |name, value| - assert_equal :@__changed_model_attributes, name - assert_equal({foo: 'Two'}, value) - true - end - - ::DummyProxyModelWithCallbacks.__send__ :include, Elasticsearch::Model::Proxy - - ::DummyProxyModelWithCallbacks.instance_variable_get(:@callbacks).each do |n,b| - instance.instance_eval(&b) - end - end - - should "delegate methods to the target" do - assert_respond_to DummyProxyModel.__elasticsearch__, :foo - assert_respond_to DummyProxyModel.new.__elasticsearch__, :bar - - assert_raise(NoMethodError) { DummyProxyModel.__elasticsearch__.xoxo } - assert_raise(NoMethodError) { DummyProxyModel.new.__elasticsearch__.xoxo } - - assert_equal 'classy foo', DummyProxyModel.__elasticsearch__.foo - assert_equal 'insta barr', DummyProxyModel.new.__elasticsearch__.bar - end - - should "reset the proxy target for duplicates" do - model = DummyProxyModel.new - model_target = model.__elasticsearch__.target - duplicate = model.dup - duplicate_target = duplicate.__elasticsearch__.target - - assert_not_equal model, duplicate - assert_equal model, model_target - assert_equal duplicate, duplicate_target - end - - should "return the proxy class from instance proxy" do - assert_equal Elasticsearch::Model::Proxy::ClassMethodsProxy, DummyProxyModel.new.__elasticsearch__.class.class - end - - should "return the origin class from instance proxy" do - assert_equal DummyProxyModel, DummyProxyModel.new.__elasticsearch__.klass - end - - should "delegate as_json from the proxy to target" do - assert_equal({foo: 'bar'}, DummyProxyModel.new.__elasticsearch__.as_json) - end - - should "have inspect method indicating the proxy" do - assert_match /PROXY/, DummyProxyModel.__elasticsearch__.inspect - assert_match /PROXY/, DummyProxyModel.new.__elasticsearch__.inspect - end - end -end diff --git a/elasticsearch-model/test/unit/response_aggregations_test.rb b/elasticsearch-model/test/unit/response_aggregations_test.rb deleted file mode 100644 index cac17759d..000000000 --- a/elasticsearch-model/test/unit/response_aggregations_test.rb +++ /dev/null @@ -1,46 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::ResponseAggregationsTest < Test::Unit::TestCase - context "Response aggregations" do - class OriginClass - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - end - - RESPONSE = { - 'aggregations' => { - 'foo' => {'bar' => 10 }, - 'price' => { 'doc_count' => 123, - 'min' => { 'value' => 1.0}, - 'max' => { 'value' => 99 } - } - } - } - - setup do - @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' - @search.stubs(:execute!).returns(RESPONSE) - end - - should "access the aggregations" do - @search.expects(:execute!).returns(RESPONSE) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - assert_respond_to response, :aggregations - assert_kind_of Elasticsearch::Model::Response::Aggregations, response.aggregations - assert_kind_of Hashie::Mash, response.aggregations.foo - assert_equal 10, response.aggregations.foo.bar - end - - should "properly return min and max values" do - @search.expects(:execute!).returns(RESPONSE) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - - assert_equal 123, response.aggregations.price.doc_count - assert_equal 1, response.aggregations.price.min.value - assert_equal 99, response.aggregations.price.max.value - end - - end -end diff --git a/elasticsearch-model/test/unit/response_base_test.rb b/elasticsearch-model/test/unit/response_base_test.rb deleted file mode 100644 index aa9b4244d..000000000 --- a/elasticsearch-model/test/unit/response_base_test.rb +++ /dev/null @@ -1,40 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::BaseTest < Test::Unit::TestCase - context "Response base module" do - class OriginClass - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - end - - class DummyBaseClass - include Elasticsearch::Model::Response::Base - end - - RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [] } } - - setup do - @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' - @response = Elasticsearch::Model::Response::Response.new OriginClass, @search - @search.stubs(:execute!).returns(RESPONSE) - end - - should "access klass, response, total and max_score" do - r = DummyBaseClass.new OriginClass, @response - - assert_equal OriginClass, r.klass - assert_equal @response, r.response - assert_equal RESPONSE, r.response.response - assert_equal 123, r.total - assert_equal 456, r.max_score - end - - should "have abstract methods results and records" do - r = DummyBaseClass.new OriginClass, @response - - assert_raise(Elasticsearch::Model::NotImplemented) { |e| r.results } - assert_raise(Elasticsearch::Model::NotImplemented) { |e| r.records } - end - - end -end diff --git a/elasticsearch-model/test/unit/response_pagination_kaminari_test.rb b/elasticsearch-model/test/unit/response_pagination_kaminari_test.rb deleted file mode 100644 index 1fc9b2f3c..000000000 --- a/elasticsearch-model/test/unit/response_pagination_kaminari_test.rb +++ /dev/null @@ -1,433 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::ResponsePaginationKaminariTest < Test::Unit::TestCase - class ModelClass - include ::Kaminari::ConfigurationMethods - - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - end - - RESPONSE = { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, - 'hits' => { 'total' => 100, 'hits' => (1..100).to_a.map { |i| { _id: i } } } } - - context "Response pagination" do - - setup do - @search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*' - @response = Elasticsearch::Model::Response::Response.new ModelClass, @search, RESPONSE - @response.klass.stubs(:client).returns mock('client') - end - - should "have pagination methods" do - assert_respond_to @response, :page - assert_respond_to @response, :limit_value - assert_respond_to @response, :offset_value - assert_respond_to @response, :limit - assert_respond_to @response, :offset - assert_respond_to @response, :total_count - end - - context "#page method" do - should "advance the from/size" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 25, definition[:from] - assert_equal 25, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.page(2).to_a - assert_equal 25, @response.search.definition[:from] - assert_equal 25, @response.search.definition[:size] - end - - should "advance the from/size further" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 75, definition[:from] - assert_equal 25, definition[:size] - true - end - .returns(RESPONSE) - - @response.page(4).to_a - assert_equal 75, @response.search.definition[:from] - assert_equal 25, @response.search.definition[:size] - end - end - - context "limit/offset readers" do - should "return the default" do - assert_equal Kaminari.config.default_per_page, @response.limit_value - assert_equal 0, @response.offset_value - end - - should "return the value from URL parameters" do - search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*', size: 10, from: 50 - @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE - - assert_equal 10, @response.limit_value - assert_equal 50, @response.offset_value - end - - should "ignore the value from request body" do - search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, - { query: { match_all: {} }, from: 333, size: 999 } - @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE - - assert_equal Kaminari.config.default_per_page, @response.limit_value - assert_equal 0, @response.offset_value - end - end - - context "limit setter" do - setup do - @response.records - @response.results - end - - should "set the values" do - @response.limit(35) - assert_equal 35, @response.search.definition[:size] - end - - should "reset the variables" do - @response.limit(35) - - assert_nil @response.instance_variable_get(:@response) - assert_nil @response.instance_variable_get(:@records) - assert_nil @response.instance_variable_get(:@results) - end - - should 'coerce string parameters' do - @response.limit("35") - assert_equal 35, @response.search.definition[:size] - end - - should 'ignore invalid string parameters' do - @response.limit(35) - @response.limit("asdf") - assert_equal 35, @response.search.definition[:size] - end - end - - context "with the page() and limit() methods" do - setup do - @response.records - @response.results - end - - should "set the values" do - @response.page(3).limit(35) - assert_equal 35, @response.search.definition[:size] - assert_equal 70, @response.search.definition[:from] - end - - should "set the values when limit is called first" do - @response.limit(35).page(3) - assert_equal 35, @response.search.definition[:size] - assert_equal 70, @response.search.definition[:from] - end - - should "reset the instance variables" do - @response.page(3).limit(35) - - assert_nil @response.instance_variable_get(:@response) - assert_nil @response.instance_variable_get(:@records) - assert_nil @response.instance_variable_get(:@results) - end - end - - context "offset setter" do - setup do - @response.records - @response.results - end - - should "set the values" do - @response.offset(15) - assert_equal 15, @response.search.definition[:from] - end - - should "reset the variables" do - @response.offset(35) - - assert_nil @response.instance_variable_get(:@response) - assert_nil @response.instance_variable_get(:@records) - assert_nil @response.instance_variable_get(:@results) - end - - should 'coerce string parameters' do - @response.offset("35") - assert_equal 35, @response.search.definition[:from] - end - - should 'coerce invalid string parameters' do - @response.offset(35) - @response.offset("asdf") - assert_equal 0, @response.search.definition[:from] - end - end - - context "total" do - should "return the number of hits" do - @response.expects(:results).returns(mock('results', total: 100)) - assert_equal 100, @response.total_count - end - end - - context "results" do - setup do - @search.stubs(:execute!).returns RESPONSE - end - - should "return current page and total count" do - assert_equal 1, @response.page(1).results.current_page - assert_equal 100, @response.results.total_count - - assert_equal 5, @response.page(5).results.current_page - end - - should "return previous page and next page" do - assert_equal nil, @response.page(1).results.prev_page - assert_equal 2, @response.page(1).results.next_page - - assert_equal 3, @response.page(4).results.prev_page - assert_equal nil, @response.page(4).results.next_page - - assert_equal 2, @response.page(3).results.prev_page - assert_equal 4, @response.page(3).results.next_page - end - end - - context "records" do - setup do - @search.stubs(:execute!).returns RESPONSE - end - - should "return current page and total count" do - assert_equal 1, @response.page(1).records.current_page - assert_equal 100, @response.records.total_count - - assert_equal 5, @response.page(5).records.current_page - end - - should "return previous page and next page" do - assert_equal nil, @response.page(1).records.prev_page - assert_equal 2, @response.page(1).records.next_page - - assert_equal 3, @response.page(4).records.prev_page - assert_equal nil, @response.page(4).records.next_page - - assert_equal 2, @response.page(3).records.prev_page - assert_equal 4, @response.page(3).records.next_page - end - end - end - - context "Multimodel response pagination" do - setup do - @multimodel = Elasticsearch::Model::Multimodel.new(ModelClass) - @search = Elasticsearch::Model::Searching::SearchRequest.new @multimodel, '*' - @response = Elasticsearch::Model::Response::Response.new @multimodel, @search, RESPONSE - @response.klass.stubs(:client).returns mock('client') - end - - should "have pagination methods" do - assert_respond_to @response, :page - assert_respond_to @response, :limit_value - assert_respond_to @response, :offset_value - assert_respond_to @response, :limit - assert_respond_to @response, :offset - assert_respond_to @response, :total_count - end - - context "#page method" do - should "advance the from/size" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 25, definition[:from] - assert_equal 25, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.page(2).to_a - assert_equal 25, @response.search.definition[:from] - assert_equal 25, @response.search.definition[:size] - end - - should "advance the from/size further" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 75, definition[:from] - assert_equal 25, definition[:size] - true - end - .returns(RESPONSE) - - @response.page(4).to_a - assert_equal 75, @response.search.definition[:from] - assert_equal 25, @response.search.definition[:size] - end - end - - context "limit/offset readers" do - should "return the default" do - assert_equal Kaminari.config.default_per_page, @response.limit_value - assert_equal 0, @response.offset_value - end - - should "return the value from URL parameters" do - search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*', size: 10, from: 50 - @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE - - assert_equal 10, @response.limit_value - assert_equal 50, @response.offset_value - end - - should "ignore the value from request body" do - search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, - { query: { match_all: {} }, from: 333, size: 999 } - @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE - - assert_equal Kaminari.config.default_per_page, @response.limit_value - assert_equal 0, @response.offset_value - end - end - - context "limit setter" do - setup do - @response.records - @response.results - end - - should "set the values" do - @response.limit(35) - assert_equal 35, @response.search.definition[:size] - end - - should "reset the variables" do - @response.limit(35) - - assert_nil @response.instance_variable_get(:@response) - assert_nil @response.instance_variable_get(:@records) - assert_nil @response.instance_variable_get(:@results) - end - end - - context "with the page() and limit() methods" do - setup do - @response.records - @response.results - end - - should "set the values" do - @response.page(3).limit(35) - assert_equal 35, @response.search.definition[:size] - assert_equal 70, @response.search.definition[:from] - end - - should "set the values when limit is called first" do - @response.limit(35).page(3) - assert_equal 35, @response.search.definition[:size] - assert_equal 70, @response.search.definition[:from] - end - - should "reset the instance variables" do - @response.page(3).limit(35) - - assert_nil @response.instance_variable_get(:@response) - assert_nil @response.instance_variable_get(:@records) - assert_nil @response.instance_variable_get(:@results) - end - end - - context "offset setter" do - setup do - @response.records - @response.results - end - - should "set the values" do - @response.offset(15) - assert_equal 15, @response.search.definition[:from] - end - - should "reset the variables" do - @response.offset(35) - - assert_nil @response.instance_variable_get(:@response) - assert_nil @response.instance_variable_get(:@records) - assert_nil @response.instance_variable_get(:@results) - end - end - - context "total" do - should "return the number of hits" do - @response.expects(:results).returns(mock('results', total: 100)) - assert_equal 100, @response.total_count - end - end - - context "results" do - setup do - @search.stubs(:execute!).returns RESPONSE - end - - should "return current page and total count" do - assert_equal 1, @response.page(1).results.current_page - assert_equal 100, @response.results.total_count - - assert_equal 5, @response.page(5).results.current_page - end - - should "return previous page and next page" do - assert_equal nil, @response.page(1).results.prev_page - assert_equal 2, @response.page(1).results.next_page - - assert_equal 3, @response.page(4).results.prev_page - assert_equal nil, @response.page(4).results.next_page - - assert_equal 2, @response.page(3).results.prev_page - assert_equal 4, @response.page(3).results.next_page - end - end - - context "records" do - setup do - @search.stubs(:execute!).returns RESPONSE - end - - should "return current page and total count" do - assert_equal 1, @response.page(1).records.current_page - assert_equal 100, @response.records.total_count - - assert_equal 5, @response.page(5).records.current_page - end - - should "return previous page and next page" do - assert_equal nil, @response.page(1).records.prev_page - assert_equal 2, @response.page(1).records.next_page - - assert_equal 3, @response.page(4).records.prev_page - assert_equal nil, @response.page(4).records.next_page - - assert_equal 2, @response.page(3).records.prev_page - assert_equal 4, @response.page(3).records.next_page - end - end - end -end diff --git a/elasticsearch-model/test/unit/response_pagination_will_paginate_test.rb b/elasticsearch-model/test/unit/response_pagination_will_paginate_test.rb deleted file mode 100644 index 6c9383525..000000000 --- a/elasticsearch-model/test/unit/response_pagination_will_paginate_test.rb +++ /dev/null @@ -1,398 +0,0 @@ -require 'test_helper' -require 'will_paginate' -require 'will_paginate/collection' - -class Elasticsearch::Model::ResponsePaginationWillPaginateTest < Test::Unit::TestCase - class ModelClass - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - - # WillPaginate adds this method to models (see WillPaginate::PerPage module) - def self.per_page - 33 - end - end - - # Subsclass Response so we can include WillPaginate module without conflicts with Kaminari. - class WillPaginateResponse < Elasticsearch::Model::Response::Response - include Elasticsearch::Model::Response::Pagination::WillPaginate - end - - RESPONSE = { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, - 'hits' => { 'total' => 100, 'hits' => (1..100).to_a.map { |i| { _id: i } } } } - - context "Response pagination" do - - setup do - @search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*' - @response = WillPaginateResponse.new ModelClass, @search, RESPONSE - @response.klass.stubs(:client).returns mock('client') - - @expected_methods = [ - # methods needed by WillPaginate::CollectionMethods - :current_page, - :offset, - :per_page, - :total_entries, - :length, - - # methods defined by WillPaginate::CollectionMethods - :total_pages, - :previous_page, - :next_page, - :out_of_bounds?, - ] - end - - should "have pagination methods" do - assert_respond_to @response, :paginate - - @expected_methods.each do |method| - assert_respond_to @response, method - end - end - - context "response.results" do - should "have pagination methods" do - @expected_methods.each do |method| - assert_respond_to @response.results, method - end - end - end - - context "response.records" do - should "have pagination methods" do - @expected_methods.each do |method| - @response.klass.stubs(:find).returns([]) - assert_respond_to @response.records, method - end - end - end - - context "#offset method" do - should "calculate offset using current_page and per_page" do - @response.per_page(3).page(3) - assert_equal 6, @response.offset - end - end - context "#length method" do - should "return count of paginated results" do - @response.per_page(3).page(3) - assert_equal 3, @response.length - end - end - - context "#paginate method" do - should "set from/size using defaults" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 0, definition[:from] - assert_equal 33, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: nil).to_a - assert_equal 0, @response.search.definition[:from] - assert_equal 33, @response.search.definition[:size] - end - - should "set from/size using default per_page" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 33, definition[:from] - assert_equal 33, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: 2).to_a - assert_equal 33, @response.search.definition[:from] - assert_equal 33, @response.search.definition[:size] - end - - should "set from/size using custom page and per_page" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 18, definition[:from] - assert_equal 9, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: 3, per_page: 9).to_a - assert_equal 18, @response.search.definition[:from] - assert_equal 9, @response.search.definition[:size] - end - - should "search for first page if specified page is < 1" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 0, definition[:from] - assert_equal 33, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: "-1").to_a - assert_equal 0, @response.search.definition[:from] - assert_equal 33, @response.search.definition[:size] - end - - should "use the param_name" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 10, definition[:from] - true - end - .returns(RESPONSE) - - @response.paginate(my_page: 2, per_page: 10, param_name: :my_page).to_a - end - end - - context "#page and #per_page shorthand methods" do - should "set from/size using default per_page" do - @response.page(5) - assert_equal 132, @response.search.definition[:from] - assert_equal 33, @response.search.definition[:size] - end - - should "set from/size when calling #page then #per_page" do - @response.page(5).per_page(3) - assert_equal 12, @response.search.definition[:from] - assert_equal 3, @response.search.definition[:size] - end - - should "set from/size when calling #per_page then #page" do - @response.per_page(3).page(5) - assert_equal 12, @response.search.definition[:from] - assert_equal 3, @response.search.definition[:size] - end - end - - context "#current_page method" do - should "return 1 by default" do - @response.paginate({}) - assert_equal 1, @response.current_page - end - - should "return current page number" do - @response.paginate(page: 3, per_page: 9) - assert_equal 3, @response.current_page - end - - should "return nil if not pagination set" do - assert_equal nil, @response.current_page - end - end - - context "#per_page method" do - should "return value set in paginate call" do - @response.paginate(per_page: 8) - assert_equal 8, @response.per_page - end - end - - context "#total_entries method" do - should "return total from response" do - @response.expects(:results).returns(mock('results', total: 100)) - assert_equal 100, @response.total_entries - end - end - end - - context "Multimodel response pagination" do - setup do - @multimodel = Elasticsearch::Model::Multimodel.new ModelClass - @search = Elasticsearch::Model::Searching::SearchRequest.new @multimodel, '*' - @response = WillPaginateResponse.new @multimodel, @search, RESPONSE - @response.klass.stubs(:client).returns mock('client') - - @expected_methods = [ - # methods needed by WillPaginate::CollectionMethods - :current_page, - :offset, - :per_page, - :total_entries, - :length, - - # methods defined by WillPaginate::CollectionMethods - :total_pages, - :previous_page, - :next_page, - :out_of_bounds?, - ] - end - - should "have pagination methods" do - assert_respond_to @response, :paginate - - @expected_methods.each do |method| - assert_respond_to @response, method - end - end - - context "response.results" do - should "have pagination methods" do - @expected_methods.each do |method| - assert_respond_to @response.results, method - end - end - end - - context "#offset method" do - should "calculate offset using current_page and per_page" do - @response.per_page(3).page(3) - assert_equal 6, @response.offset - end - end - context "#length method" do - should "return count of paginated results" do - @response.per_page(3).page(3) - assert_equal 3, @response.length - end - end - - context "#paginate method" do - should "set from/size using WillPaginate defaults, ignoring aggregated models configuration" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 0, definition[:from] - assert_equal ::WillPaginate.per_page, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: nil).to_a - assert_equal 0, @response.search.definition[:from] - assert_equal ::WillPaginate.per_page, @response.search.definition[:size] - end - - should "set from/size using default per_page, ignoring aggregated models' configuration" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal ::WillPaginate.per_page, definition[:from] - assert_equal ::WillPaginate.per_page, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: 2).to_a - assert_equal ::WillPaginate.per_page, @response.search.definition[:from] - assert_equal ::WillPaginate.per_page, @response.search.definition[:size] - end - - should "set from/size using custom page and per_page" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 18, definition[:from] - assert_equal 9, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: 3, per_page: 9).to_a - assert_equal 18, @response.search.definition[:from] - assert_equal 9, @response.search.definition[:size] - end - - should "search for first page if specified page is < 1" do - @response.klass.client - .expects(:search) - .with do |definition| - assert_equal 0, definition[:from] - assert_equal ::WillPaginate.per_page, definition[:size] - true - end - .returns(RESPONSE) - - assert_nil @response.search.definition[:from] - assert_nil @response.search.definition[:size] - - @response.paginate(page: "-1").to_a - assert_equal 0, @response.search.definition[:from] - assert_equal ::WillPaginate.per_page, @response.search.definition[:size] - end - end - - context "#page and #per_page shorthand methods" do - should "set from/size using default per_page" do - @response.page(5) - assert_equal 120, @response.search.definition[:from] - assert_equal ::WillPaginate.per_page, @response.search.definition[:size] - end - - should "set from/size when calling #page then #per_page" do - @response.page(5).per_page(3) - assert_equal 12, @response.search.definition[:from] - assert_equal 3, @response.search.definition[:size] - end - - should "set from/size when calling #per_page then #page" do - @response.per_page(3).page(5) - assert_equal 12, @response.search.definition[:from] - assert_equal 3, @response.search.definition[:size] - end - end - - context "#current_page method" do - should "return 1 by default" do - @response.paginate({}) - assert_equal 1, @response.current_page - end - - should "return current page number" do - @response.paginate(page: 3, per_page: 9) - assert_equal 3, @response.current_page - end - - should "return nil if not pagination set" do - assert_equal nil, @response.current_page - end - end - - context "#per_page method" do - should "return value set in paginate call" do - @response.paginate(per_page: 8) - assert_equal 8, @response.per_page - end - end - - context "#total_entries method" do - should "return total from response" do - @response.expects(:results).returns(mock('results', total: 100)) - assert_equal 100, @response.total_entries - end - end - end -end diff --git a/elasticsearch-model/test/unit/response_records_test.rb b/elasticsearch-model/test/unit/response_records_test.rb deleted file mode 100644 index 8a78255d7..000000000 --- a/elasticsearch-model/test/unit/response_records_test.rb +++ /dev/null @@ -1,91 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::RecordsTest < Test::Unit::TestCase - context "Response records" do - class DummyCollection - include Enumerable - - def each(&block); ['FOO'].each(&block); end - def size; ['FOO'].size; end - def empty?; ['FOO'].empty?; end - def foo; 'BAR'; end - end - - class DummyModel - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - - def self.find(*args) - DummyCollection.new - end - end - - RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [{'_id' => '1', 'foo' => 'bar'}] } } - RESULTS = Elasticsearch::Model::Response::Results.new DummyModel, RESPONSE - - setup do - search = Elasticsearch::Model::Searching::SearchRequest.new DummyModel, '*' - search.stubs(:execute!).returns RESPONSE - - response = Elasticsearch::Model::Response::Response.new DummyModel, search - @records = Elasticsearch::Model::Response::Records.new DummyModel, response - end - - should "access the records" do - assert_respond_to @records, :records - assert_equal 1, @records.records.size - assert_equal 'FOO', @records.records.first - end - - should "delegate Enumerable methods to records" do - assert ! @records.empty? - assert_equal 'FOO', @records.first - end - - should "delegate methods to records" do - assert_respond_to @records, :foo - assert_equal 'BAR', @records.foo - end - - should "have each_with_hit method" do - @records.each_with_hit do |record, hit| - assert_equal 'FOO', record - assert_equal 'bar', hit.foo - end - end - - should "have map_with_hit method" do - assert_equal ['FOO---bar'], @records.map_with_hit { |record, hit| "#{record}---#{hit.foo}" } - end - - should "return the IDs" do - assert_equal ['1'], @records.ids - end - - context "with adapter" do - module DummyAdapter - module RecordsMixin - def records - ['FOOBAR'] - end - end - - def records_mixin - RecordsMixin - end; module_function :records_mixin - end - - should "delegate the records method to the adapter" do - Elasticsearch::Model::Adapter.expects(:from_class) - .with(DummyModel) - .returns(DummyAdapter) - - @records = Elasticsearch::Model::Response::Records.new DummyModel, - RESPONSE - - assert_equal ['FOOBAR'], @records.records - end - end - - end -end diff --git a/elasticsearch-model/test/unit/response_result_test.rb b/elasticsearch-model/test/unit/response_result_test.rb deleted file mode 100644 index c357d46ba..000000000 --- a/elasticsearch-model/test/unit/response_result_test.rb +++ /dev/null @@ -1,90 +0,0 @@ -require 'test_helper' -require 'active_support/json/encoding' - -class Elasticsearch::Model::ResultTest < Test::Unit::TestCase - context "Response result" do - - should "have method access to properties" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar', bar: { bam: 'baz' } - - assert_respond_to result, :foo - assert_respond_to result, :bar - - assert_equal 'bar', result.foo - assert_equal 'baz', result.bar.bam - - assert_raise(NoMethodError) { result.xoxo } - end - - should "return _id as #id" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar', _id: 42, _source: { id: 12 } - - assert_equal 42, result.id - assert_equal 12, result._source.id - end - - should "return _type as #type" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar', _type: 'baz', _source: { type: 'BAM' } - - assert_equal 'baz', result.type - assert_equal 'BAM', result._source.type - end - - should "delegate method calls to `_source` when available" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar', _source: { bar: 'baz' } - - assert_respond_to result, :foo - assert_respond_to result, :_source - assert_respond_to result, :bar - - assert_equal 'bar', result.foo - assert_equal 'baz', result._source.bar - assert_equal 'baz', result.bar - end - - should "delegate existence method calls to `_source`" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar', _source: { bar: { bam: 'baz' } } - - assert_respond_to result._source, :bar? - assert_respond_to result, :bar? - - assert_equal true, result._source.bar? - assert_equal true, result.bar? - assert_equal false, result.boo? - - assert_equal true, result.bar.bam? - assert_equal false, result.bar.boo? - end - - should "delegate methods to @result" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar' - - assert_equal 'bar', result.foo - assert_equal 'bar', result.fetch('http://github.com/foo') - assert_equal 'moo', result.fetch('http://github.com/NOT_EXIST', 'moo') - assert_equal ['foo'], result.keys - - assert_respond_to result, :to_hash - assert_equal({'foo' => 'bar'}, result.to_hash) - - assert_raise(NoMethodError) { result.does_not_exist } - end - - should "delegate existence method calls to @result" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar', _source: { bar: 'bam' } - assert_respond_to result, :foo? - - assert_equal true, result.foo? - assert_equal false, result.boo? - assert_equal false, result._source.foo? - assert_equal false, result._source.boo? - end - - should "delegate as_json to @result even when ActiveSupport changed half of Ruby" do - result = Elasticsearch::Model::Response::Result.new foo: 'bar' - - result.instance_variable_get(:@result).expects(:as_json) - result.as_json(except: 'foo') - end - end -end diff --git a/elasticsearch-model/test/unit/response_results_test.rb b/elasticsearch-model/test/unit/response_results_test.rb deleted file mode 100644 index e97539ecd..000000000 --- a/elasticsearch-model/test/unit/response_results_test.rb +++ /dev/null @@ -1,31 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::ResultsTest < Test::Unit::TestCase - context "Response results" do - class OriginClass - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - end - - RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [{'foo' => 'bar'}] } } - - setup do - @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' - @response = Elasticsearch::Model::Response::Response.new OriginClass, @search - @results = Elasticsearch::Model::Response::Results.new OriginClass, @response - @search.stubs(:execute!).returns(RESPONSE) - end - - should "access the results" do - assert_respond_to @results, :results - assert_equal 1, @results.results.size - assert_equal 'bar', @results.results.first.foo - end - - should "delegate Enumerable methods to results" do - assert ! @results.empty? - assert_equal 'bar', @results.first.foo - end - - end -end diff --git a/elasticsearch-model/test/unit/response_test.rb b/elasticsearch-model/test/unit/response_test.rb deleted file mode 100644 index e18f6c7ef..000000000 --- a/elasticsearch-model/test/unit/response_test.rb +++ /dev/null @@ -1,104 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::ResponseTest < Test::Unit::TestCase - context "Response" do - class OriginClass - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - end - - RESPONSE = { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, 'hits' => { 'hits' => [] }, - 'aggregations' => {'foo' => {'bar' => 10}}, - 'suggest' => {'my_suggest' => [ { 'text' => 'foo', 'options' => [ { 'text' => 'Foo', 'score' => 2.0 }, { 'text' => 'Bar', 'score' => 1.0 } ] } ]}} - - setup do - @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' - @search.stubs(:execute!).returns(RESPONSE) - end - - should "access klass, response, took, timed_out, shards" do - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - - assert_equal OriginClass, response.klass - assert_equal @search, response.search - assert_equal RESPONSE, response.response - assert_equal '5', response.took - assert_equal false, response.timed_out - assert_equal 'OK', response.shards.one - end - - should "wrap the raw Hash response in a HashWrapper" do - @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' - @search.stubs(:execute!).returns({'hits' => { 'hits' => [] }, 'aggregations' => { 'dates' => 'FOO' }}) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - - assert_respond_to response.response, :aggregations - assert_equal 'FOO', response.response.aggregations.dates - end - - should "load and access the results" do - @search.expects(:execute!).returns(RESPONSE) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - assert_instance_of Elasticsearch::Model::Response::Results, response.results - assert_equal 0, response.size - end - - should "load and access the records" do - @search.expects(:execute!).returns(RESPONSE) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - assert_instance_of Elasticsearch::Model::Response::Records, response.records - assert_equal 0, response.size - end - - should "delegate Enumerable methods to results" do - @search.expects(:execute!).returns(RESPONSE) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - assert response.empty? - end - - should "be initialized lazily" do - @search.expects(:execute!).never - - Elasticsearch::Model::Response::Response.new OriginClass, @search - end - - should "access the aggregations" do - @search.expects(:execute!).returns(RESPONSE) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - assert_respond_to response, :aggregations - assert_kind_of Hashie::Mash, response.aggregations.foo - assert_equal 10, response.aggregations.foo.bar - end - - should "access the suggest" do - @search.expects(:execute!).returns(RESPONSE) - - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - - assert_respond_to response, :suggestions - assert_kind_of Hashie::Mash, response.suggestions - assert_equal 'Foo', response.suggestions.my_suggest.first.options.first.text - end - - should "return array of terms from the suggestions" do - @search.expects(:execute!).returns(RESPONSE) - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - - assert_not_empty response.suggestions - assert_equal [ 'Foo', 'Bar' ], response.suggestions.terms - end - - should "return empty array as suggest terms when there are no suggestions" do - @search.expects(:execute!).returns({}) - response = Elasticsearch::Model::Response::Response.new OriginClass, @search - - assert_empty response.suggestions - assert_equal [], response.suggestions.terms - end - end -end diff --git a/elasticsearch-model/test/unit/searching_search_request_test.rb b/elasticsearch-model/test/unit/searching_search_request_test.rb deleted file mode 100644 index b2e84aecc..000000000 --- a/elasticsearch-model/test/unit/searching_search_request_test.rb +++ /dev/null @@ -1,78 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::SearchRequestTest < Test::Unit::TestCase - context "SearchRequest class" do - class ::DummySearchingModel - extend Elasticsearch::Model::Searching::ClassMethods - - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - - end - - setup do - @client = mock('client') - DummySearchingModel.stubs(:client).returns(@client) - end - - should "pass the search definition as a simple query" do - @client.expects(:search).with do |params| - assert_equal 'foo', params[:q] - true - end - .returns({}) - - s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, 'foo' - s.execute! - end - - should "pass the search definition as a Hash" do - @client.expects(:search).with do |params| - assert_equal( {foo: 'bar'}, params[:body] ) - true - end - .returns({}) - - s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, foo: 'bar' - s.execute! - end - - should "pass the search definition as a JSON string" do - @client.expects(:search).with do |params| - assert_equal( '{"foo":"bar"}', params[:body] ) - true - end - .returns({}) - - s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, '{"foo":"bar"}' - s.execute! - end - - should "pass the search definition as an object which responds to to_hash" do - class MySpecialQueryBuilder - def to_hash; {foo: 'bar'}; end - end - - @client.expects(:search).with do |params| - assert_equal( {foo: 'bar'}, params[:body] ) - true - end - .returns({}) - - s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, MySpecialQueryBuilder.new - s.execute! - end - - should "pass the options to the client" do - @client.expects(:search).with do |params| - assert_equal 'foo', params[:q] - assert_equal 15, params[:size] - true - end - .returns({}) - - s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, 'foo', size: 15 - s.execute! - end - end -end diff --git a/elasticsearch-model/test/unit/searching_test.rb b/elasticsearch-model/test/unit/searching_test.rb deleted file mode 100644 index f6cb78136..000000000 --- a/elasticsearch-model/test/unit/searching_test.rb +++ /dev/null @@ -1,41 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::SearchingTest < Test::Unit::TestCase - context "Searching module" do - class ::DummySearchingModel - extend Elasticsearch::Model::Searching::ClassMethods - - def self.index_name; 'foo'; end - def self.document_type; 'bar'; end - end - - setup do - @client = mock('client') - DummySearchingModel.stubs(:client).returns(@client) - end - - should "have the search method" do - assert_respond_to DummySearchingModel, :search - end - - should "initialize the search object" do - Elasticsearch::Model::Searching::SearchRequest - .expects(:new).with do |klass, query, options| - assert_equal DummySearchingModel, klass - assert_equal 'foo', query - assert_equal({default_operator: 'AND'}, options) - true - end - .returns( stub('search') ) - - DummySearchingModel.search 'foo', default_operator: 'AND' - end - - should "not execute the search" do - Elasticsearch::Model::Searching::SearchRequest - .expects(:new).returns( mock('search').expects(:execute!).never ) - - DummySearchingModel.search 'foo' - end - end -end diff --git a/elasticsearch-model/test/unit/serializing_test.rb b/elasticsearch-model/test/unit/serializing_test.rb deleted file mode 100644 index 201329257..000000000 --- a/elasticsearch-model/test/unit/serializing_test.rb +++ /dev/null @@ -1,17 +0,0 @@ -require 'test_helper' - -class Elasticsearch::Model::SerializingTest < Test::Unit::TestCase - context "Serializing module" do - class DummyClass - include Elasticsearch::Model::Serializing::InstanceMethods - - def as_json(options={}) - 'HASH' - end - end - - should "delegate to as_json by default" do - assert_equal 'HASH', DummyClass.new.as_indexed_json - end - end -end diff --git a/elasticsearch-persistence/.rspec b/elasticsearch-persistence/.rspec new file mode 100644 index 000000000..77d185827 --- /dev/null +++ b/elasticsearch-persistence/.rspec @@ -0,0 +1,2 @@ +--tty +--colour diff --git a/elasticsearch-persistence/Gemfile b/elasticsearch-persistence/Gemfile index a60150cf4..de011df05 100644 --- a/elasticsearch-persistence/Gemfile +++ b/elasticsearch-persistence/Gemfile @@ -2,3 +2,12 @@ source 'https://rubygems.org/' # Specify your gem's dependencies in elasticsearch-persistence.gemspec gemspec + +gem 'elasticsearch-model', :path => File.expand_path("../../elasticsearch-model", __FILE__), :require => false + +gem 'virtus' + +group :development, :testing do + gem 'rspec' + gem 'pry-nav' +end diff --git a/elasticsearch-persistence/README.md b/elasticsearch-persistence/README.md index 4c6dead10..cf4856536 100644 --- a/elasticsearch-persistence/README.md +++ b/elasticsearch-persistence/README.md @@ -1,6 +1,6 @@ # Elasticsearch::Persistence -Persistence layer for Ruby domain objects in Elasticsearch, using the Repository and ActiveRecord patterns. +Persistence layer for Ruby domain objects in Elasticsearch, using the Repository pattern. ## Compatibility @@ -14,6 +14,7 @@ is compatible with the Elasticsearch `master` branch, therefore, with the next m | 0.1 | → | 1.x | | 2.x | → | 2.x | | 5.x | → | 5.x | +| 6.x | → | 6.x | | master | → | master | ## Installation @@ -24,7 +25,7 @@ Install the package from [Rubygems](https://rubygems.org): To use an unreleased version, either add it to your `Gemfile` for [Bundler](http://bundler.io): - gem 'elasticsearch-persistence', git: 'git://github.com/elastic/elasticsearch-rails.git', branch: '5.x' + gem 'elasticsearch-persistence', git: 'git://github.com/elastic/elasticsearch-rails.git', branch: '6.x' or install it from a source code checkout: @@ -35,16 +36,13 @@ or install it from a source code checkout: ## Usage -The library provides two different patterns for adding persistence to your Ruby objects: - -* [Repository Pattern](#the-repository-pattern) -* [ActiveRecord Pattern](#the-activerecord-pattern) +The library provides the Repository pattern for adding persistence to your Ruby objects. ### The Repository Pattern The `Elasticsearch::Persistence::Repository` module provides an implementation of the [repository pattern](http://martinfowler.com/eaaCatalog/repository.html) and allows -to save, delete, find and search objects stored in Elasticsearch, as well as configure +you to save, delete, find and search objects stored in Elasticsearch, as well as configure mappings and settings for the index. It's an unobtrusive and decoupled way of adding persistence to your Ruby objects. @@ -68,7 +66,8 @@ Let's create a default, "dumb" repository, as a first step: ```ruby require 'elasticsearch/persistence' -repository = Elasticsearch::Persistence::Repository.new +class MyRepository; include Elasticsearch::Persistence::Repository; end +repository = MyRepository.new ``` We can save a `Note` instance into the repository... @@ -77,7 +76,7 @@ We can save a `Note` instance into the repository... note = Note.new id: 1, text: 'Test' repository.save(note) -# PUT http://localhost:9200/repository/note/1 [status:201, request:0.210s, query:n/a] +# PUT http://localhost:9200/repository/_doc/1 [status:201, request:0.210s, query:n/a] # > {"id":1,"text":"Test"} # < {"_index":"repository","_type":"note","_id":"1","_version":1,"created":true} ``` @@ -86,7 +85,7 @@ repository.save(note) ```ruby n = repository.find(1) -# GET http://localhost:9200/repository/_all/1 [status:200, request:0.003s, query:n/a] +# GET http://localhost:9200/repository/_doc/1 [status:200, request:0.003s, query:n/a] # < {"_index":"repository","_type":"note","_id":"1","_version":2,"found":true, "_source" : {"id":1,"text":"Test"}} => 1, "text"=>"Test"}> ``` @@ -105,7 +104,7 @@ repository.search(query: { match: { text: 'test' } }).first ```ruby repository.delete(note) -# DELETE http://localhost:9200/repository/note/1 [status:200, request:0.014s, query:n/a] +# DELETE http://localhost:9200/repository/_doc/1 [status:200, request:0.014s, query:n/a] # < {"found":true,"_index":"repository","_type":"note","_id":"1","_version":3} => {"found"=>true, "_index"=>"repository", "_type"=>"note", "_id"=>"1", "_version"=>2} ``` @@ -121,32 +120,17 @@ The repository module provides a number of features and facilities to configure * Providing access to the Elasticsearch response for search results (aggregations, total, ...) * Defining the methods for serialization and deserialization -You can use the default repository class, or include the module in your own. Let's review it in detail. +There are two mixins you can include in your Repository class. The first `Elasticsearch::Persistence::Repository`, +provides the basic methods and settings you'll need. The second, `Elasticsearch::Persistence::Repository::DSL` adds +some additional class methods that allow you to set options that instances of the class will share. -#### The Default Class +#### Basic Repository mixin -For simple cases, you can use the default, bundled repository class, and configure/customize it: +For simple cases, you can just include the Elasticsearch::Persistence::Repository mixin to your class: ```ruby -repository = Elasticsearch::Persistence::Repository.new do - # Configure the Elasticsearch client - client Elasticsearch::Client.new url: ENV['ELASTICSEARCH_URL'], log: true - - # Set a custom index name - index :my_notes - - # Set a custom document type - type :my_note - - # Specify the class to initialize when deserializing documents - klass Note - - # Configure the settings and mappings for the Elasticsearch index - settings number_of_shards: 1 do - mapping do - indexes :text, analyzer: 'snowball' - end - end +class MyRepository + include Elasticsearch::Persistence::Repository # Customize the serialization logic def serialize(document) @@ -155,10 +139,18 @@ repository = Elasticsearch::Persistence::Repository.new do # Customize the de-serialization logic def deserialize(document) - puts "# ***** CUSTOM DESERIALIZE LOGIC KICKING IN... *****" + puts "# ***** CUSTOM DESERIALIZE LOGIC... *****" super end end + +client = Elasticsearch::Client.new(url: ENV['ELASTICSEARCH_URL'], log: true) +repository = MyRepository.new(client: client, index_name: :my_notes, type: :note, klass: Note) +repository.settings number_of_shards: 1 do + mapping do + indexes :text, analyzer: 'snowball' + end +end ``` The custom Elasticsearch client will be used now, with a custom index and type names, @@ -176,7 +168,7 @@ Save the document with extra properties added by the `serialize` method: ```ruby repository.save(note) -# PUT http://localhost:9200/my_notes/my_note/1 +# PUT http://localhost:9200/my_notes/note/1 # > {"id":1,"text":"Test","my_special_key":"my_special_stuff"} {"_index"=>"my_notes", "_type"=>"my_note", "_id"=>"1", "_version"=>4, ... } ``` @@ -185,32 +177,31 @@ And `deserialize` it: ```ruby repository.find(1) -# ***** CUSTOM DESERIALIZE LOGIC KICKING IN... ***** +# ***** CUSTOM DESERIALIZE LOGIC... ***** "my_special_stuff"}> ``` -#### A Custom Class +#### The DSL mixin -In most cases, though, you'll want to use a custom class for the repository, so let's do that: +In some cases, you'll want to set some of the repository configurations at the class level. This makes +most sense when the instances of the repository will use that same configuration: ```ruby require 'base64' class NoteRepository include Elasticsearch::Persistence::Repository + include Elasticsearch::Persistence::Repository::DSL - def initialize(options={}) - index options[:index] || 'notes' - client Elasticsearch::Client.new url: options[:url], log: options[:log] - end - + index_name 'notes' + document_type 'note' klass Note settings number_of_shards: 1 do mapping do indexes :text, analyzer: 'snowball' # Do not index images - indexes :image, index: 'no' + indexes :image, index: false end end @@ -232,74 +223,160 @@ class NoteRepository end ``` -Include the `Elasticsearch::Persistence::Repository` module to add the repository methods into the class. +You can create an instance of this custom class and get each of the configurations. -You can customize the repository in the familiar way, by calling the DSL-like methods. +```ruby +client = Elasticsearch::Client.new(url: 'http://localhost:9200/', log: true) +repository = NoteRepository.new(client: client) +repository.index_name +# => 'notes' -You can implement a custom initializer for your repository, add complex logic in its -class and instance methods -- in general, have all the freedom of a standard Ruby class. +``` -```ruby -repository = NoteRepository.new url: 'http://localhost:9200/', log: true +You can also override the default configuration with options passed to the initialize method: -# Configure the repository instance -repository.index = 'notes_development' -repository.client.transport.logger.formatter = proc { |s, d, p, m| "\e[2m# #{m}\n\e[0m" } +```ruby +client = Elasticsearch::Client.new(url: 'http://localhost:9250/', log: true) +client.transport.logger.formatter = proc { |s, d, p, m| "\e[2m# #{m}\n\e[0m" } +repository = NoteRepository.new(client: client, index_name: 'notes_development') -repository.create_index! force: true +repository.create_index!(force: true) -note = Note.new 'id' => 1, 'text' => 'Document with image', 'image' => '... BINARY DATA ...' +note = Note.new('id' => 1, 'text' => 'Document with image', 'image' => '... BINARY DATA ...') repository.save(note) -# PUT http://localhost:9200/notes_development/note/1 +# PUT http://localhost:9200/notes_development/_doc/1 # > {"id":1,"text":"Document with image","image":"Li4uIEJJTkFSWSBEQVRBIC4uLg==\n"} puts repository.find(1).attributes['image'] -# GET http://localhost:9200/notes_development/note/1 +# GET http://localhost:9200/notes_development/_doc/1 # < {... "_source" : { ... "image":"Li4uIEJJTkFSWSBEQVRBIC4uLg==\n"}} # => ... BINARY DATA ... ``` -#### Methods Provided by the Repository +#### Functionality Provided by the Repository mixin + +Each of the following configurations can be set for a repository instance. +If you have included the `Elasticsearch::Persistence::Repository::DSL` mixin, then you can use the class-level DSL +methods to set each value. You can still override the configuration for any instance by passing options to the +`#initialize` method. +Even if you don't use the DSL mixin, you can set the instance configuration with options passed the `#initialize` method. ##### Client -The repository uses the standard Elasticsearch [client](https://github.com/elastic/elasticsearch-ruby#usage), -which is accessible with the `client` getter and setter methods: +The repository uses the standard Elasticsearch [client](https://github.com/elastic/elasticsearch-ruby#usage). ```ruby -repository.client = Elasticsearch::Client.new url: 'http://search.server.org/' +client = Elasticsearch::Client.new(url: 'http://search.server.org/') +repository = NoteRepository.new(client: client) repository.client.transport.logger = Logger.new(STDERR) +repository.client +# => Elasticsearch::Client + +``` + +or with the DSL mixin: + +```ruby +class NoteRepository + include Elasticsearch::Persistence::Repository + include Elasticsearch::Persistence::Repository::DSL + + client Elasticsearch::Client.new url: 'http://search.server.org/' +end + +repository = NoteRepository.new +repository.client +# => Elasticsearch::Client + ``` ##### Naming -The `index` method specifies the Elasticsearch index to use for storage, lookup and search -(when not set, the value is inferred from the repository class name): +The `index_name` method specifies the Elasticsearch index to use for storage, lookup and search. The default index name +is 'repository'. + +```ruby +repository = NoteRepository.new(index_name: 'notes_development') +repository.index_name +# => 'notes_development' + +``` + +or with the DSL mixin: + +```ruby +class NoteRepository + include Elasticsearch::Persistence::Repository + include Elasticsearch::Persistence::Repository::DSL + + index_name 'notes_development' +end + +repository = NoteRepository.new +repository.index_name +# => 'notes_development' + +``` + +The `document_type` method specifies the Elasticsearch document type to use for storage, lookup and search. The default value is +'_doc'. Keep in mind that future versions of Elasticsearch will not allow you to set this yourself and will use the type, +'_doc'. ```ruby -repository.index = 'notes_development' +repository = NoteRepository.new(document_type: 'note') +repository.document_type +# => 'note' + ``` -The `type` method specifies the Elasticsearch document type to use for storage, lookup and search -(when not set, the value is inferred from the document class name, or `_all` is used): +or with the DSL mixin: ```ruby -repository.type = 'my_note' +class NoteRepository + include Elasticsearch::Persistence::Repository + include Elasticsearch::Persistence::Repository::DSL + + document_type 'note' +end + +repository = NoteRepository.new +repository.document_type +# => 'note' + ``` The `klass` method specifies the Ruby class name to use when initializing objects from -documents retrieved from the repository (when not set, the value is inferred from the -document `_type` as fetched from Elasticsearch): +documents retrieved from the repository. If this value is not set, a Hash representation of the document will be +returned instead. + +```ruby +repository = NoteRepository.new(klass: Note) +repository.klass +# => Note + +``` + +or with the DSL mixin: ```ruby -repository.klass = MyNote +class NoteRepository + include Elasticsearch::Persistence::Repository + include Elasticsearch::Persistence::Repository::DSL + + klass Note +end + +repository = NoteRepository.new +repository.klass +# => Note + ``` ##### Index Configuration The `settings` and `mappings` methods, provided by the [`elasticsearch-model`](http://rubydoc.info/gems/elasticsearch-model/Elasticsearch/Model/Indexing/ClassMethods) -gem, allow to configure the index properties: +gem, allow you to configure the index properties: ```ruby repository.settings number_of_shards: 1 @@ -311,18 +388,57 @@ repository.mappings.to_hash # => { :note => {:properties=> ... }} ``` +or with the DSL mixin: + +```ruby +class NoteRepository + include Elasticsearch::Persistence::Repository + include Elasticsearch::Persistence::Repository::DSL + + mappings { indexes :title, analyzer: 'snowball' } + settings number_of_shards: 1 +end + +repository = NoteRepository.new + +``` + +##### Create a Repository and set its configuration with a block + +You can also use the `#create` method to instantiate and set the mappings and settings on an instance +with a block in one call: + +```ruby +repository = NoteRepository.create(index_name: 'notes_development') do + settings number_of_shards: 1, number_of_replicas: 0 do + mapping dynamic: 'strict' do + indexes :foo do + indexes :bar + end + indexes :baz + end + end +end +``` + +##### Index Management + The convenience methods `create_index!`, `delete_index!` and `refresh_index!` allow you to manage the index lifecycle. +These methods can only be called on repository instances and are not implemented at the class level. ##### Serialization -The `serialize` and `deserialize` methods allow you to customize the serialization of the document when passing it -to the storage, and the initialization procedure when loading it from the storage: +The `serialize` and `deserialize` methods allow you to customize the serialization of the document when it +is persisted to Elasticsearch, and define the initialization procedure when loading it from the storage: ```ruby class NoteRepository + include Elasticsearch::Persistence::Repository + def serialize(document) Hash[document.to_hash.map() { |k,v| v.upcase! if k == :title; [k,v] }] end + def deserialize(document) MyNote.new ActiveSupport::HashWithIndifferentAccess.new(document['_source']).deep_symbolize_keys end @@ -336,7 +452,7 @@ The `save` method allows you to store a domain object in the repository: ```ruby note = Note.new id: 1, title: 'Quick Brown Fox' repository.save(note) -# => {"_index"=>"notes_development", "_type"=>"my_note", "_id"=>"1", "_version"=>1, "created"=>true} +# => {"_index"=>"notes_development", "_type"=>"_doc", "_id"=>"1", "_version"=>1, "created"=>true} ``` The `update` method allows you to perform a partial update of a document in the repository. @@ -344,18 +460,18 @@ Use either a partial document: ```ruby repository.update id: 1, title: 'UPDATED', tags: [] -# => {"_index"=>"notes_development", "_type"=>"note", "_id"=>"1", "_version"=>2} +# => {"_index"=>"notes_development", "_type"=>"_doc", "_id"=>"1", "_version"=>2} ``` Or a script (optionally with parameters): ```ruby repository.update 1, script: 'if (!ctx._source.tags.contains(t)) { ctx._source.tags += t }', params: { t: 'foo' } -# => {"_index"=>"notes_development", "_type"=>"note", "_id"=>"1", "_version"=>3} +# => {"_index"=>"notes_development", "_type"=>"_doc", "_id"=>"1", "_version"=>3} ``` -The `delete` method allows to remove objects from the repository (pass either the object itself or its ID): +The `delete` method allows you to remove objects from the repository (pass either the object itself or its ID): ```ruby repository.delete(note) @@ -364,7 +480,7 @@ repository.delete(1) ##### Finding -The `find` method allows to find one or many documents in the storage and returns them as deserialized Ruby objects: +The `find` method allows you to find one or many documents in the storage and returns them as deserialized Ruby objects: ```ruby repository.save Note.new(id: 2, title: 'Fast White Dog') @@ -387,15 +503,15 @@ Handle the missing objects in the application code, or call `compact` on the res ##### Search -The `search` method to retrieve objects from the repository by a query string or definition in the Elasticsearch DSL: +The `search` method is used to retrieve objects from the repository by a query string or definition in the Elasticsearch DSL: ```ruby repository.search('fox or dog').to_a -# GET http://localhost:9200/notes_development/my_note/_search?q=fox +# GET http://localhost:9200/notes_development/_doc/_search?q=fox # => [, ] repository.search(query: { match: { title: 'fox dog' } }).to_a -# GET http://localhost:9200/notes_development/my_note/_search +# GET http://localhost:9200/notes_development/_doc/_search # > {"query":{"match":{"title":"fox dog"}}} # => [, ] ``` @@ -427,9 +543,15 @@ end results.total # => 2 -# Access the raw response as a Hashie::Mash instance +# Access the raw response as a Hashie::Mash instance. +# Note that a Hashie::Mash will only be created if the 'response' method is called on the results. results.response._shards.failed # => 0 + +# Access the raw response +results.raw_response +# => {...} + ``` #### Example Application @@ -445,262 +567,8 @@ and demonstrates a rich set of features: ### The ActiveRecord Pattern -The `Elasticsearch::Persistence::Model` module provides an implementation of the -active record [pattern](http://www.martinfowler.com/eaaCatalog/activeRecord.html), -with a familiar interface for using Elasticsearch as a persistence layer in -Ruby on Rails applications. - -All the methods are documented with comprehensive examples in the source code, -available also online at . - -#### Installation/Usage - -To use the library in a Rails application, add it to your `Gemfile` with a `require` statement: - -```ruby -gem "elasticsearch-persistence", require: 'elasticsearch/persistence/model' -``` - -To use the library without Bundler, install it, and require the file: - -```bash -gem install elasticsearch-persistence -``` - -```ruby -# In your code -require 'elasticsearch/persistence/model' -``` - -#### Model Definition - -The integration is implemented by including the module in a Ruby class. -The model attribute definition support is implemented with the -[_Virtus_](https://github.com/solnic/virtus) Rubygem, and the -naming, validation, etc. features with the -[_ActiveModel_](https://github.com/rails/rails/tree/master/activemodel) Rubygem. - -```ruby -class Article - include Elasticsearch::Persistence::Model - - # Define a plain `title` attribute - # - attribute :title, String - - # Define an `author` attribute, with multiple analyzers for this field - # - attribute :author, String, mapping: { fields: { - author: { type: 'text'}, - raw: { type: 'keyword' } - } } - - - # Define a `views` attribute, with default value - # - attribute :views, Integer, default: 0, mapping: { type: 'integer' } - - # Validate the presence of the `title` attribute - # - validates :title, presence: true - - # Execute code after saving the model. - # - after_save { puts "Successfully saved: #{self}" } -end -``` - -Attribute validations work like for any other _ActiveModel_-compatible implementation: - -```ruby -article = Article.new # => #
- -article.valid? -# => false - -article.errors.to_a -# => ["Title can't be blank"] -``` - -#### Persistence - -We can create a new article in the database... - -```ruby -Article.create id: 1, title: 'Test', author: 'John' -# PUT http://localhost:9200/articles/article/1 [status:201, request:0.015s, query:n/a] -``` - -... and find it: - -```ruby -article = Article.find(1) -# => #
- -article._index -# => "articles" - -article.id -# => "1" - -article.title -# => "Test" -``` - -To update the model, either update the attribute and save the model: - -```ruby -article.title = 'Updated' - -article.save -# => {"_index"=>"articles", "_type"=>"article", "_id"=>"1", "_version"=>2, "created"=>false} -``` - -... or use the `update_attributes` method: - -```ruby -article.update_attributes title: 'Test', author: 'Mary' -# => {"_index"=>"articles", "_type"=>"article", "_id"=>"1", "_version"=>3} -``` - -The implementation supports the familiar interface for updating model timestamps: - -```ruby -article.touch -# => => { ... "_version"=>4} -``` - -... and numeric attributes: - -```ruby -article.views -# => 0 - -article.increment :views -article.views -# => 1 -``` - -Any callbacks defined in the model will be triggered during the persistence operations: - -```ruby -article.save -# Successfully saved: #
-``` - -The model also supports familiar `find_in_batches` and `find_each` methods to efficiently -retrieve big collections of model instances, using the Elasticsearch's _Scan API_: - -```ruby -Article.find_each(_source_include: 'title') { |a| puts "===> #{a.title.upcase}" } -# GET http://localhost:9200/articles/article/_search?scroll=5m&size=20 -# GET http://localhost:9200/_search/scroll?scroll=5m&scroll_id=c2Nhb... -# ===> TEST -# GET http://localhost:9200/_search/scroll?scroll=5m&scroll_id=c2Nhb... -# => "c2Nhb..." -``` - -#### Search - -The model class provides a `search` method to retrieve model instances with a regular -search definition, including highlighting, aggregations, etc: - -```ruby -results = Article.search query: { match: { title: 'test' } }, - aggregations: { authors: { terms: { field: 'author.raw' } } }, - highlight: { fields: { title: {} } } - -puts results.first.title -# Test - -puts results.first.hit.highlight['title'] -# Test - -puts results.response.aggregations.authors.buckets.each { |b| puts "#{b['key']} : #{b['doc_count']}" } -# John : 1 -``` - -#### The Elasticsearch Client - -The module will set up a [client](https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch), -connected to `localhost:9200`, by default. - -To use a client with different configuration: - -```ruby -Elasticsearch::Persistence.client = Elasticsearch::Client.new log: true -``` - -To set up a specific client for a specific model: - -```ruby -Article.gateway.client = Elasticsearch::Client.new host: 'api.server.org' -``` - -You might want to do this during you application bootstrap process, e.g. in a Rails initializer. - -Please refer to the -[`elasticsearch-transport`](https://github.com/elasticsearch/elasticsearch-ruby/tree/master/elasticsearch-transport) -library documentation for all the configuration options, and to the -[`elasticsearch-api`](http://rubydoc.info/gems/elasticsearch-api) library documentation -for information about the Ruby client API. - -#### Accessing the Repository Gateway and the Client - -The integration with Elasticsearch is implemented by embedding the repository object in the model. -You can access it through the `gateway` method: - -```ruby -Artist.gateway.client.info -# GET http://localhost:9200/ [status:200, request:0.011s, query:n/a] -# => {"status"=>200, "name"=>"Lightspeed", ...} -``` - -#### Rails Compatibility - -The model instances are fully compatible with Rails' conventions and helpers: - -```ruby -url_for article -# => "http://localhost:3000/articles/1" - -div_for article -# => '
' -``` - -... as well as form values for dates and times: - -```ruby -article = Article.new "title" => "Date", "published(1i)"=>"2014", "published(2i)"=>"1", "published(3i)"=>"1" - -article.published.iso8601 -# => "2014-01-01" -``` - -The library provides a Rails ORM generator to facilitate building the application scaffolding: - -```bash -rails generate scaffold Person name:String email:String birthday:Date --orm=elasticsearch -``` - -#### Example application - -A fully working Ruby on Rails application can be generated with the following command: - -```bash -rails new music --force --skip --skip-bundle --skip-active-record --template https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-persistence/examples/music/template.rb -``` - -The application demonstrates: - -* How to set up model attributes with custom mappings -* How to define model relationships with Elasticsearch's parent/child -* How to configure models to use a common index, and create the index with proper mappings -* How to use Elasticsearch's completion suggester to drive auto-complete functionality -* How to use Elasticsearch-persisted models in Rails' views and forms -* How to write controller tests - -The source files for the application are available in the [`examples/music`](examples/music) folder. +The ActiveRecord pattern has been deprecated as of version 6.0.0 of this gem. Please use the +[Repository Pattern](#the-repository-pattern) instead. ## License diff --git a/elasticsearch-persistence/Rakefile b/elasticsearch-persistence/Rakefile index 61038d08c..a2f219a9f 100644 --- a/elasticsearch-persistence/Rakefile +++ b/elasticsearch-persistence/Rakefile @@ -7,24 +7,27 @@ task :test => 'test:unit' # ----- Test tasks ------------------------------------------------------------ require 'rake/testtask' +require 'rspec/core/rake_task' + namespace :test do - Rake::TestTask.new(:unit) do |test| - test.libs << 'lib' << 'test' - test.test_files = FileList["test/unit/**/*_test.rb"] - test.verbose = false - test.warning = false - end - Rake::TestTask.new(:integration) do |test| - test.libs << 'lib' << 'test' - test.test_files = FileList["test/integration/**/*_test.rb"] + RSpec::Core::RakeTask.new(:spec) + + Rake::TestTask.new(:all) do |test| test.verbose = false test.warning = false + test.deps = [ :spec ] end +end - Rake::TestTask.new(:all) do |test| - test.libs << 'lib' << 'test' - test.test_files = FileList["test/unit/**/*_test.rb", "test/integration/**/*_test.rb"] +namespace :bundle do + desc 'Install gem dependencies' + task :install do + puts '-' * 80 + Bundler.with_unbundled_env do + sh 'bundle install' + end + puts '-' * 80 end end diff --git a/elasticsearch-persistence/elasticsearch-persistence.gemspec b/elasticsearch-persistence/elasticsearch-persistence.gemspec index 311325fdc..7a93a80c9 100644 --- a/elasticsearch-persistence/elasticsearch-persistence.gemspec +++ b/elasticsearch-persistence/elasticsearch-persistence.gemspec @@ -21,19 +21,18 @@ Gem::Specification.new do |s| s.extra_rdoc_files = [ "README.md", "LICENSE.txt" ] s.rdoc_options = [ "--charset=UTF-8" ] - s.required_ruby_version = ">= 1.9.3" + s.required_ruby_version = ">= 2.2" - s.add_dependency "elasticsearch", '~> 5' - s.add_dependency "elasticsearch-model", '~> 5' + s.add_dependency "elasticsearch", '~> 6' + s.add_dependency "elasticsearch-model", '>= 5' s.add_dependency "activesupport", '> 4' s.add_dependency "activemodel", '> 4' s.add_dependency "hashie" - s.add_dependency "virtus" - s.add_development_dependency "bundler", "~> 1.5" + s.add_development_dependency "bundler" s.add_development_dependency "rake", "~> 11.1" - s.add_development_dependency "oj" + s.add_development_dependency "oj" unless defined?(JRUBY_VERSION) s.add_development_dependency "rails", '> 4' @@ -45,7 +44,7 @@ Gem::Specification.new do |s| s.add_development_dependency "mocha" s.add_development_dependency "turn" s.add_development_dependency "yard" - s.add_development_dependency "ruby-prof" + s.add_development_dependency "ruby-prof" unless defined?(JRUBY_VERSION) s.add_development_dependency "pry" s.add_development_dependency "simplecov" diff --git a/elasticsearch-persistence/examples/music/album.rb b/elasticsearch-persistence/examples/music/album.rb deleted file mode 100644 index a805a2111..000000000 --- a/elasticsearch-persistence/examples/music/album.rb +++ /dev/null @@ -1,54 +0,0 @@ -class Meta - include Virtus.model - - attribute :rating - attribute :have - attribute :want - attribute :formats -end - -class Album - include Elasticsearch::Persistence::Model - - index_name [Rails.application.engine_name, Rails.env].join('-') - - - mapping _parent: { type: 'artist' } do - end - - attribute :artist - attribute :artist_id, String, mapping: { index: 'not_analyzed' } - attribute :label, Hash, mapping: { type: 'object' } - - attribute :title - attribute :released, Date - attribute :notes - attribute :uri - - attribute :tracklist, Array, mapping: { type: 'object' } - - attribute :styles - attribute :meta, Meta, mapping: { type: 'object' } - - attribute :suggest, Hashie::Mash, mapping: { - type: 'object', - properties: { - title: { - type: 'object', - properties: { - input: { type: 'completion' }, - output: { type: 'keyword', index: false }, - payload: { type: 'object', enabled: false } - } - }, - track: { - type: 'object', - properties: { - input: { type: 'completion' }, - output: { type: 'keyword', index: false }, - payload: { type: 'object', enabled: false } - } - } - } - } -end diff --git a/elasticsearch-persistence/examples/music/artist.rb b/elasticsearch-persistence/examples/music/artist.rb deleted file mode 100644 index 17c525619..000000000 --- a/elasticsearch-persistence/examples/music/artist.rb +++ /dev/null @@ -1,70 +0,0 @@ -class Artist - include Elasticsearch::Persistence::Model - - index_name [Rails.application.engine_name, Rails.env].join('-') - - analyzed_and_raw = { fields: { - name: { type: 'text', analyzer: 'snowball' }, - raw: { type: 'keyword' } - } } - - attribute :name, String, mapping: analyzed_and_raw - - attribute :profile - attribute :date, Date - - attribute :members, String, default: [], mapping: analyzed_and_raw - attribute :members_combined, String, default: [], mapping: { analyzer: 'snowball' } - - attribute :urls, String, default: [] - attribute :album_count, Integer, default: 0 - - attribute :suggest, Hashie::Mash, mapping: { - type: 'object', - properties: { - name: { - type: 'object', - properties: { - input: { type: 'completion' }, - output: { type: 'keyword', index: false }, - payload: { type: 'object', enabled: false } - } - }, - member: { - type: 'object', - properties: { - input: { type: 'completion' }, - output: { type: 'keyword', index: false }, - payload: { type: 'object', enabled: false } - } - } - } - } - - validates :name, presence: true - - def albums - Album.search( - { query: { - has_parent: { - type: 'artist', - query: { - bool: { - filter: { - ids: { values: [ self.id ] } - } - } - } - } - }, - sort: 'released', - size: 100 - }, - { type: 'album' } - ) - end - - def to_param - [id, name.parameterize].join('-') - end -end diff --git a/elasticsearch-persistence/examples/music/artists/_form.html.erb b/elasticsearch-persistence/examples/music/artists/_form.html.erb deleted file mode 100644 index 55273679c..000000000 --- a/elasticsearch-persistence/examples/music/artists/_form.html.erb +++ /dev/null @@ -1,8 +0,0 @@ -<%= simple_form_for @artist do |f| %> - <%= f.input :name %> - <%= f.input :profile, as: :text %> - <%= f.input :date, as: :date %> - <%= f.input :members, hint: 'Separate names by comma', input_html: { value: f.object.members.join(', ') } %> - - <%= f.button :submit %> -<% end %> diff --git a/elasticsearch-persistence/examples/music/artists/artists_controller.rb b/elasticsearch-persistence/examples/music/artists/artists_controller.rb deleted file mode 100644 index 458c243f7..000000000 --- a/elasticsearch-persistence/examples/music/artists/artists_controller.rb +++ /dev/null @@ -1,67 +0,0 @@ -class ArtistsController < ApplicationController - before_action :set_artist, only: [:show, :edit, :update, :destroy] - - rescue_from Elasticsearch::Persistence::Repository::DocumentNotFound do - render file: "public/404.html", status: 404, layout: false - end - - def index - @artists = Artist.all sort: 'name.raw', _source: ['name', 'album_count'] - end - - def show - @albums = @artist.albums - end - - def new - @artist = Artist.new - end - - def edit - end - - def create - @artist = Artist.new(artist_params) - - respond_to do |format| - if @artist.save refresh: true - format.html { redirect_to @artist, notice: 'Artist was successfully created.' } - format.json { render :show, status: :created, location: @artist } - else - format.html { render :new } - format.json { render json: @artist.errors, status: :unprocessable_entity } - end - end - end - - def update - respond_to do |format| - if @artist.update(artist_params, refresh: true) - format.html { redirect_to @artist, notice: 'Artist was successfully updated.' } - format.json { render :show, status: :ok, location: @artist } - else - format.html { render :edit } - format.json { render json: @artist.errors, status: :unprocessable_entity } - end - end - end - - def destroy - @artist.destroy refresh: true - respond_to do |format| - format.html { redirect_to artists_url, notice: 'Artist was successfully destroyed.' } - format.json { head :no_content } - end - end - - private - def set_artist - @artist = Artist.find(params[:id].split('-').first) - end - - def artist_params - a = params.require(:artist) - a[:members] = a[:members].split(/,\s?/) unless a[:members].is_a?(Array) || a[:members].blank? - return a - end -end diff --git a/elasticsearch-persistence/examples/music/artists/artists_controller_test.rb b/elasticsearch-persistence/examples/music/artists/artists_controller_test.rb deleted file mode 100644 index 3307f5e47..000000000 --- a/elasticsearch-persistence/examples/music/artists/artists_controller_test.rb +++ /dev/null @@ -1,53 +0,0 @@ -require 'test_helper' - -class ArtistsControllerTest < ActionController::TestCase - setup do - IndexManager.create_index force: true - @artist = Artist.create(id: 1, name: 'TEST') - Artist.gateway.refresh_index! - end - - test "should get index" do - get :index - assert_response :success - assert_not_nil assigns(:artists) - end - - test "should get new" do - get :new - assert_response :success - end - - test "should create artist" do - assert_difference('Artist.count') do - post :create, artist: { name: @artist.name } - Artist.gateway.refresh_index! - end - - assert_redirected_to artist_path(assigns(:artist)) - end - - test "should show artist" do - get :show, id: @artist - assert_response :success - end - - test "should get edit" do - get :edit, id: @artist - assert_response :success - end - - test "should update artist" do - patch :update, id: @artist, artist: { name: @artist.name } - assert_redirected_to artist_path(assigns(:artist)) - end - - test "should destroy artist" do - assert_difference('Artist.count', -1) do - delete :destroy, id: @artist - Artist.gateway.refresh_index! - end - - assert_redirected_to artists_path - end -end diff --git a/elasticsearch-persistence/examples/music/artists/index.html.erb b/elasticsearch-persistence/examples/music/artists/index.html.erb deleted file mode 100644 index 6747b3056..000000000 --- a/elasticsearch-persistence/examples/music/artists/index.html.erb +++ /dev/null @@ -1,60 +0,0 @@ -
-

- Artists - <%= button_to 'New Artist', new_artist_path, method: 'get', tabindex: 5 %> -

-
- - - -
- <% @artists.each do |artist| %> - <%= div_for artist, class: 'result clearfix' do %> -

- <%= image_tag "http://ruby.elastic.co.s3-website-us-east-1.amazonaws.com/demo/music/bands/#{artist.id}.jpeg", height: '50px', class: 'band' %> - <%= link_to artist do %> - <%= artist.name %> - <%= pluralize artist.album_count, 'album' %> - <% end %> -

-
- <%= button_to 'Edit', edit_artist_path(artist), method: 'get' %> - <%= button_to 'Destroy', artist, method: :delete, data: { confirm: 'Are you sure?' } %> -
- <% end %> - <% end %> -
- -<% if @artists.empty? %> -
-

The search hasn't returned any results...

-
-<% end %> - - - - diff --git a/elasticsearch-persistence/examples/music/artists/show.html.erb b/elasticsearch-persistence/examples/music/artists/show.html.erb deleted file mode 100644 index e1a9cdf01..000000000 --- a/elasticsearch-persistence/examples/music/artists/show.html.erb +++ /dev/null @@ -1,54 +0,0 @@ -
-
-

- <%= link_to "〈".html_safe, artists_path, title: "Back" %> - <%= image_tag "http://ruby.elastic.co.s3-website-us-east-1.amazonaws.com/demo/music/bands/#{@artist.id}.jpeg", height: '50px', class: 'band' %> - <%= @artist.name %> - <%= button_to 'Edit', edit_artist_path(@artist), method: 'get' %> -

-
- -

<%= notice %>

- -
- <%= @artist.members.to_sentence last_word_connector: ' and ' %> | - <%= pluralize @albums.size, 'album' %> -

<%= @artist.profile %>

-
- -
- <% @albums.each do |album| %> - <%= div_for album, class: 'clearfix' do %> -

- <%= album.title %> -
- <%= album.meta.formats.join(', ') %> - <%= album.released %> -
-

- -
- <%= image_tag "http://ruby.elastic.co.s3-website-us-east-1.amazonaws.com/demo/music/covers/#{album.id}.jpeg", width: '100px', class: 'cover' %> -
- -
- <% album.tracklist.in_groups_of(album.tracklist.size/2+1).each_with_index do |half, g| %> -
    start="<%= g < 1 ? 1 : album.tracklist.size/2+2 %>"> - <% half.compact.each_with_index do |track, i| %> -
  • - <%= g < 1 ? i+1 : i+(g*album.tracklist.size/2+2) %> - <%= track['title'] %> - <%= track['duration'] %> -
  • - <% end %> -
- <% end %> -
- <% end %> - - <% end %> - - - -
-
diff --git a/elasticsearch-persistence/examples/music/assets/application.css b/elasticsearch-persistence/examples/music/assets/application.css deleted file mode 100644 index 816ebff8c..000000000 --- a/elasticsearch-persistence/examples/music/assets/application.css +++ /dev/null @@ -1,257 +0,0 @@ -/* - *= require_tree . - *= require_self - *= require ui-lightness/jquery-ui-1.10.4.custom.min.css - */ - -.clearfix { - *zoom: 1; -} - -.clearfix:before, -.clearfix:after { - display: table; - line-height: 0; - content: ""; -} - -.clearfix:after { - clear: both; -} - -body { - font-family: 'Helvetica Neue', Helvetica, sans-serif !important; - margin: 2em 4em; -} - -header { - margin: 0; - padding: 0 0 1em 0; - border-bottom: 1px solid #666; -} - -header h1 { - color: #999; - font-weight: 100; - text-transform: uppercase; - margin: 0; padding: 0; -} - -header a { - color: #0b6aff; - text-decoration: none; -} - -header .back { - font-size: 100%; - margin: 0 0.5em 0 -0.5em; -} - -h1 form { - float: right; -} - -#searchbox { - border-bottom: 1px solid #666; -} - -#searchbox input { - color: #444; - font-size: 100%; - font-weight: 100; - border: none; - padding: 1em 0 1em 0; - width: 100%; -} - -#searchbox input:focus { - outline-width: 0; -} - -.actions form { - float: right; - position: relative; - top: 0.2em; -} - -.no-results { - font-weight: 200; - font-size: 200%; -} - -.result, -.artist { - padding: 1em 0 1em 0; - margin: 0; - border-bottom: 1px solid #999; -} - -.result:hover, -.artist:hover { - background: #f9f9f9; -} - -.result h2, -.artist h2 { - color: #444; - margin: 0; - padding: 0; -} - -.artist h2 { - float: left; - margin-left: 50px; -} - -.artist.search.result h2 { - float: none; -} - -.artist h1 .back { - margin-right: 65px; -} - -.artist h1 img.band { - left: 120px; - top: 50px; -} - -.result h2 a, -.artist h2 a { - color: #444; -} - -.result h2 small, -.artist h2 small { - font-size: 70%; - font-weight: 100; - margin-left: 0.5em; -} - -.result h2 a, -.artist h2 a { - text-decoration: none; -} - -.result h2 a:hover name, -.artist h2 a:hover .name { - text-decoration: underline; -} - -.result .highlight.small { - font-size: 90%; - font-weight: 200; - padding: 0; - margin: 0.25em 0 0.25em 50px; -} - -.result .small .label { - color: #999; - font-size: 80%; - /*min-width: 5em;*/ - display: inline-block; -} - -.artist-info { - color: #5f5f5f; - text-transform: uppercase; - font-weight: 200; - border-bottom: 1px solid #666; - padding: 0 0 1em 0; - margin: 0 0 1em 0; -} - -.artist-profile { - color: #999; - font-size: 95%; - font-weight: 100; - text-transform: none; - padding: 0; - margin: 0.25em 0 0 0; -} - -.artist img.band { - position: absolute; - left: 85px; - margin-top: 14px; - transform: translate(-50%,-50%); - clip-path: circle(20px at center); -} - -.album { - margin: 0 0 4em 0; -} - -.album.search.result { - margin: 0; -} - -.album .cover { - float: left; - width: 150px; -} - -.album.search.result .cover { - width: 40px; - margin-right: 10px; -} - -.album .cover img { - border: 1px solid rgba(0,0,0,0.15); - box-shadow: 0px 0px 1px 0px rgba(0,0,0,0.05); -} - -.album .content { - float: left; - margin-left: 25px; -} - -.album .content ul { - float: left; - margin: 0 2em 0 0; - padding: 0; - min-width: 18em; -} - -.album .content ul li { - line-height: 1.5em; - padding: 0.5em 0 0.5em 0; - border-bottom:1px solid #f8f8f8; - list-style: none; -} - -.album .content ul li .counter { - color: #999; - font-style: normal; - font-size: 80%; - font-weight: 100; - margin-right: 0.5em; -} - -.album h3 { - margin: 0; padding: 0; - border-bottom: 2px solid #e0e0e0; - padding: 0 0 0.5em 0; - margin: 0 0 1em 0; -} - -.album h3 .title { - text-transform: uppercase; - font-weight: 200; -} - -.album small { - color: #a3a3a3; - font-weight: 200; -} - -.album .info { - float: right; -} - -em[class^=hl] { - font-style: normal; - background: #e6efff; - padding: 0.15em 0.35em; - border-radius: 5px; -} \ No newline at end of file diff --git a/elasticsearch-persistence/examples/music/assets/autocomplete.css b/elasticsearch-persistence/examples/music/assets/autocomplete.css deleted file mode 100644 index 7f2340969..000000000 --- a/elasticsearch-persistence/examples/music/assets/autocomplete.css +++ /dev/null @@ -1,48 +0,0 @@ -.ui-autocomplete { - font-family: 'Helvetica Neue', Helvetica, sans-serif !important; - border: none !important; - border-radius: 0 !important; - background-color: #fff !important; - margin: 0 !important; - padding: 0 !important; - box-shadow: 0px 3px 3px 0px rgba(0,0,0,0.75); -} - -.ui-autocomplete-category { - color: #fff; - background: #222; - font-size: 90%; - font-weight: 300; - text-transform: uppercase; - margin: 0 !important; - padding: 0.25em 0.5em 0.25em 0.5em; -} - -.ui-autocomplete-item { - border-bottom: 1px solid #000; - margin: 0 !important; - padding: 0 !important; -} - -.ui-autocomplete-item:hover, -.ui-autocomplete-item:focus { - color: #fff !important; - background: #0b6aff !important; -} - -.ui-state-focus, -.ui-state-focus a, -.ui-state-active, -.ui-state-active a, -.ui-autocomplete-item:hover a { - color: #fff !important; - background: #0b6aff !important; - outline: none !important; - border: none !important; - border-radius: 0 !important; -} - -a.ui-state-focus, -a.ui-state-active { - margin: 0px !important; -} diff --git a/elasticsearch-persistence/examples/music/assets/blank_artist.png b/elasticsearch-persistence/examples/music/assets/blank_artist.png deleted file mode 100644 index 0dfda13bd..000000000 Binary files a/elasticsearch-persistence/examples/music/assets/blank_artist.png and /dev/null differ diff --git a/elasticsearch-persistence/examples/music/assets/blank_cover.png b/elasticsearch-persistence/examples/music/assets/blank_cover.png deleted file mode 100644 index 8c513407a..000000000 Binary files a/elasticsearch-persistence/examples/music/assets/blank_cover.png and /dev/null differ diff --git a/elasticsearch-persistence/examples/music/assets/form.css b/elasticsearch-persistence/examples/music/assets/form.css deleted file mode 100644 index 3a937e310..000000000 --- a/elasticsearch-persistence/examples/music/assets/form.css +++ /dev/null @@ -1,113 +0,0 @@ -/* Based on https://github.com/plataformatec/simple_form/wiki/CSS-for-simple_form */ - -body.edit h1, -body.new h1 { - color: #999; - font-size: 100%; - text-transform: uppercase; - margin: 0 0 1em 5.5em; -} - -body.edit a[href^="/artists"], -body.new a[href^="/artists"], -body.edit a[href^="/music/artists"], -body.new a[href^="/music/artists"] { - color: #222; - background: #ccc; - text-decoration: none; - border-radius: 0.3em; - padding: 0.25em 0.5em; - margin: 2em 0 0 5.5em; - display: inline-block; -} - -body.edit a[href^="/artists"]:hover, -body.new a[href^="/artists"]:hover, -body.edit a[href^="/music/artists"]:hover, -body.new a[href^="/music/artists"]:hover { - color: #fff; - background: #333; -} - -body.edit a[href^="/artists"]:last-child, -body.new a[href^="/artists"]:last-child, -body.edit a[href^="/music/artists"]:last-child, -body.new a[href^="/music/artists"]:last-child { - margin-left: 0; -} - -.simple_form div.input { - margin-bottom: 1em; - clear: both; -} - -.simple_form label { - color: #878787; - font-size: 80%; - text-transform: uppercase; - font-weight: 200; - float: left; - width: 5em; - text-align: right; - margin: 0.25em 1em; -} - -div.boolean, .simple_form input[type='submit'] { - margin-left: 8.5em; -} - -.field_with_errors input { - border: 2px solid #c70008 !important; -} - -.simple_form .error { - color: #fff !important; - background: #c70008; - font-weight: bold; - clear: left; - display: block; - padding: 0.25em 0.5em; - margin-left: 5.6em; - width: 27.45em; -} - -.simple_form .hint { - color: #878787; - font-size: 80%; - font-style: italic; - display: block; - margin: 0.25em 0 0 7em; - clear: left; -} - -input { - margin: 0; -} - -input.radio { - margin-right: 5px; - vertical-align: -3px; -} - -input.check_boxes { - margin-left: 3px; - vertical-align: -3px; -} - -label.collection_check_boxes { - float: none; - margin: 0; - vertical-align: -2px; - margin-left: 2px; -} - -input.string, -textarea.text { - padding: 0.5em; - min-width: 40em; - border: 1px solid #ccc; -} - -textarea.text { - min-height: 5em; -} diff --git a/elasticsearch-persistence/examples/music/index_manager.rb b/elasticsearch-persistence/examples/music/index_manager.rb deleted file mode 100644 index cbf9e09ee..000000000 --- a/elasticsearch-persistence/examples/music/index_manager.rb +++ /dev/null @@ -1,73 +0,0 @@ -require 'open-uri' - -class IndexManager - def self.create_index(options={}) - client = Artist.gateway.client - index_name = Artist.index_name - - client.indices.delete index: index_name rescue nil if options[:force] - - settings = Artist.settings.to_hash.merge(Album.settings.to_hash) - mappings = Artist.mappings.to_hash.merge(Album.mappings.to_hash) - - client.indices.create index: index_name, - body: { - settings: settings.to_hash, - mappings: mappings.to_hash } - end - - def self.import_from_yaml(source, options={}) - create_index force: true if options[:force] - - input = open(source) - artists = YAML.load_documents input - - artists.each do |artist| - Artist.create artist.update( - 'album_count' => artist['releases'].size, - 'members_combined' => artist['members'].join(', '), - 'suggest' => { - 'name' => { - 'input' => { 'input' => artist['namevariations'].unshift(artist['name']).reject { |d| d.to_s.empty? } }, - 'output' => artist['name'], - 'payload' => { - 'url' => "/artists/#{artist['id']}" - } - }, - 'member' => { - 'input' => { 'input' => artist['members'] }, - 'output' => artist['name'], - 'payload' => { - 'url' => "/artists/#{artist['id']}" - } - } - } - ) - - artist['releases'].each do |album| - album.update( - 'suggest' => { - 'title' => { - 'input' => { 'input' => album['title'] }, - 'output' => album['title'], - 'payload' => { - 'url' => "/artists/#{artist['id']}#album_#{album['id']}" - } - }, - 'track' => { - 'input' => { 'input' => album['tracklist'].map { |d| d['title'] }.reject { |d| d.to_s.empty? } }, - 'output' => album['title'], - 'payload' => { - 'url' => "/artists/#{artist['id']}#album_#{album['id']}" - } - } - } - ) - album['notes'] = album['notes'].to_s.gsub(/<.+?>/, '').gsub(/ {2,}/, '') - album['released'] = nil if album['released'] < 1 - - Album.create album, id: album['id'], parent: artist['id'] - end - end - end -end diff --git a/elasticsearch-persistence/examples/music/search/index.html.erb b/elasticsearch-persistence/examples/music/search/index.html.erb deleted file mode 100644 index 098f626e5..000000000 --- a/elasticsearch-persistence/examples/music/search/index.html.erb +++ /dev/null @@ -1,95 +0,0 @@ -
-

- <%= link_to "〈".html_safe, :back, title: "Back" %> - Artists & Albums -

-
- - - -
- <% @artists.each do |artist| %> - <%= content_tag :div, class: 'artist search result clearfix' do %> -

- <%= image_tag "http://ruby.elastic.co.s3-website-us-east-1.amazonaws.com/demo/music/bands/#{artist.id}.jpeg", height: '45px', class: 'band' %> - <%= link_to artist do %> - <%= highlighted(artist, :name) %> - <%= pluralize artist.album_count, 'album' %> - <% end %> -

- <% if highlight = highlight(artist, :members_combined) %> -

- Members - <%= highlight.first.html_safe %> -

- <% end %> - <% if highlight = highlight(artist, :profile) %> -

- Profile - <%= highlight.join('…').html_safe %> -

- <% end %> - <% end %> - <% end %> -
- -
- <% @albums.each do |album| %> - <%= content_tag :div, class: 'album search result clearfix' do %> -

- <%= image_tag "http://ruby.elastic.co.s3-website-us-east-1.amazonaws.com/demo/music/covers/#{album.id}.jpeg", width: '45px', class: 'cover' %> - <%= link_to artist_path(album.artist_id, anchor: "album_#{album.id}") do %> - <%= highlighted(album, :title) %> - <%= album.artist %> - (<%= [album.meta.formats.first, album.released].compact.join(' ') %>) - <% end %> -

- - <% if highlight = highlight(album, 'tracklist.title') %> -

- Tracks - <%= highlight.join('…').html_safe %> -

- <% end %> - - <% if highlight = highlight(album, :notes) %> -

- Notes - <%= highlight.map { |d| d.gsub(/^\.\s?/, '') }.join('…').html_safe %> -

- <% end %> - <% end %> - <% end %> -
- -<% if @artists.empty? && @albums.empty? %> -
-

The search hasn't returned any results...

-
-<% end %> - - diff --git a/elasticsearch-persistence/examples/music/search/search_controller.rb b/elasticsearch-persistence/examples/music/search/search_controller.rb deleted file mode 100644 index bb845c5b6..000000000 --- a/elasticsearch-persistence/examples/music/search/search_controller.rb +++ /dev/null @@ -1,41 +0,0 @@ -class SearchController < ApplicationController - - def index - tags = { pre_tags: '', post_tags: '' } - @artists = Artist.search \ - query: { - multi_match: { - query: params[:q], - fields: ['name^10','members^2','profile'] - } - }, - highlight: { - tags_schema: 'styled', - fields: { - name: { number_of_fragments: 0 }, - members_combined: { number_of_fragments: 0 }, - profile: { fragment_size: 50 } - } - } - - @albums = Album.search \ - query: { - multi_match: { - query: params[:q], - fields: ['title^100','tracklist.title^10','notes^1'] - } - }, - highlight: { - tags_schema: 'styled', - fields: { - title: { number_of_fragments: 0 }, - 'tracklist.title' => { number_of_fragments: 0 }, - notes: { fragment_size: 50 } - } - } - end - - def suggest - render json: Suggester.new(params) - end -end diff --git a/elasticsearch-persistence/examples/music/search/search_controller_test.rb b/elasticsearch-persistence/examples/music/search/search_controller_test.rb deleted file mode 100644 index a1c95cd0c..000000000 --- a/elasticsearch-persistence/examples/music/search/search_controller_test.rb +++ /dev/null @@ -1,12 +0,0 @@ -require 'test_helper' - -class SearchControllerTest < ActionController::TestCase - setup do - IndexManager.create_index force: true - end - - test "should get suggest" do - get :suggest, term: 'foo' - assert_response :success - end -end diff --git a/elasticsearch-persistence/examples/music/search/search_helper.rb b/elasticsearch-persistence/examples/music/search/search_helper.rb deleted file mode 100644 index 65a57c322..000000000 --- a/elasticsearch-persistence/examples/music/search/search_helper.rb +++ /dev/null @@ -1,15 +0,0 @@ -module SearchHelper - - def highlight(object, field) - object.try(:hit).try(:highlight).try(field) - end - - def highlighted(object, field) - if h = object.try(:hit).try(:highlight).try(field).try(:first) - h.html_safe - else - field.to_s.split('.').reduce(object) { |result,item| result.try(item) } - end - end - -end diff --git a/elasticsearch-persistence/examples/music/suggester.rb b/elasticsearch-persistence/examples/music/suggester.rb deleted file mode 100644 index 5438cf11a..000000000 --- a/elasticsearch-persistence/examples/music/suggester.rb +++ /dev/null @@ -1,69 +0,0 @@ -class Suggester - attr_reader :response - - def initialize(params={}) - @term = params[:term] - end - - def response - @response ||= begin - Elasticsearch::Persistence.client.search \ - index: Artist.index_name, - body: { - suggest: { - artists: { - text: @term, - completion: { field: 'suggest.name.input', size: 25 } - }, - members: { - text: @term, - completion: { field: 'suggest.member.input', size: 25 } - }, - albums: { - text: @term, - completion: { field: 'suggest.title.input', size: 25 } - }, - tracks: { - text: @term, - completion: { field: 'suggest.track.input', size: 25 } - } - }, - _source: ['suggest.*'] - } - end - end - - def as_json(options={}) - return [] unless response['suggest'] - - output = [ - { label: 'Bands', - value: response['suggest']['artists'][0]['options'].map do |d| - { text: d['_source']['suggest']['name']['output'], - url: d['_source']['suggest']['name']['payload']['url'] } - end - }, - - { label: 'Albums', - value: response['suggest']['albums'][0]['options'].map do |d| - { text: d['_source']['suggest']['title']['output'], - url: d['_source']['suggest']['title']['payload']['url'] } - end - }, - - { label: 'Band Members', - value: response['suggest']['members'][0]['options'].map do |d| - { text: "#{d['text']} (#{d['_source']['suggest']['member']['output']})", - url: d['_source']['suggest']['member']['payload']['url'] } - end - }, - - { label: 'Album Tracks', - value: response['suggest']['tracks'][0]['options'].map do |d| - { text: "#{d['text']} (#{d['_source']['suggest']['track']['output']})", - url: d['_source']['suggest']['track']['payload']['url'] } - end - } - ] - end -end diff --git a/elasticsearch-persistence/examples/music/template.rb b/elasticsearch-persistence/examples/music/template.rb deleted file mode 100644 index 4759b642d..000000000 --- a/elasticsearch-persistence/examples/music/template.rb +++ /dev/null @@ -1,430 +0,0 @@ -# ====================================================================================== -# Template for generating a Rails application with support for Elasticsearch persistence -# ====================================================================================== -# -# This file creates a fully working Rails application with support for storing and retrieving models -# in Elasticsearch, using the `elasticsearch-persistence` gem -# (https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-persistence). -# -# Requirements: -# ------------- -# -# * Git -# * Ruby >= 1.9.3 -# * Rails >= 5 -# * Java >= 8 (for Elasticsearch) -# -# Usage: -# ------ -# -# $ time rails new music --force --skip --skip-bundle --skip-active-record --template https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-persistence/examples/music/template.rb -# -# ===================================================================================================== - -STDOUT.sync = true -STDERR.sync = true - -require 'uri' -require 'json' -require 'net/http' - -at_exit do - pid = File.read("#{destination_root}/tmp/pids/elasticsearch.pid") rescue nil - if pid - say_status "Stop", "Elasticsearch", :yellow - run "kill #{pid}" - end -end - -$elasticsearch_url = ENV.fetch('http://github.com/ELASTICSEARCH_URL', 'http://localhost:9200/') - -# ----- Check & download Elasticsearch ------------------------------------------------------------ - -cluster_info = Net::HTTP.get(URI.parse($elasticsearch_url)) rescue nil -cluster_info = JSON.parse(cluster_info) if cluster_info - -if cluster_info.nil? || cluster_info['version']['number'] < '5' - # Change the port when incompatible Elasticsearch version is running on localhost:9200 - if $elasticsearch_url == 'http://localhost:9200/' && cluster_info && cluster_info['version']['number'] < '5' - $change_port = '9280' - $elasticsearch_url = "http://localhost/#{$change_port}" - end - - COMMAND = <<-COMMAND.gsub(/^ /, '') - curl -# -O "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.2.1.tar.gz" - tar -zxf elasticsearch-5.2.1.tar.gz - rm -f elasticsearch-5.2.1.tar.gz - ./elasticsearch-5.2.1/bin/elasticsearch -d -p #{destination_root}/tmp/pids/elasticsearch.pid #{$change_port.nil? ? '' : "-E http.port=#{$change_port}" } - COMMAND - - puts "\n" - say_status "ERROR", "Elasticsearch not running!\n", :red - puts '-'*80 - say_status '', "It appears that Elasticsearch 5 is not running on this machine." - say_status '', "Is it installed? Do you want me to install and run it for you with this command?\n\n" - COMMAND.each_line { |l| say_status '', "$ #{l}" } - puts - say_status '', "(To uninstall, just remove the generated application directory.)" - puts '-'*80, '' - - if yes?("Install Elasticsearch?", :bold) - puts - say_status "Install", "Elasticsearch", :yellow - - java_info = `java -version 2>&1` - - unless java_info.match /1\.[8-9]/ - puts - say_status "ERROR", "Required Java version (1.8) not found, exiting...", :red - exit(1) - end - - commands = COMMAND.split("\n") - exec = commands.pop - inside("vendor") do - commands.each { |command| run command } - run "(#{exec})" # Launch Elasticsearch in subshell - end - - # Wait for Elasticsearch to be up... - # - system <<-COMMAND - until $(curl --silent --head --fail #{$elasticsearch_url} > /dev/null 2>&1); do - printf '.'; sleep 1 - done - COMMAND - end -end unless ENV['RAILS_NO_ES_INSTALL'] - -# ----- Application skeleton ---------------------------------------------------------------------- - -run "touch tmp/.gitignore" - -append_to_file ".gitignore", "vendor/elasticsearch-5.2.1/\n" - -git :init -git add: "." -git commit: "-m 'Initial commit: Clean application'" - -# ----- Add README -------------------------------------------------------------------------------- - -puts -say_status "README", "Adding Readme...\n", :yellow -puts '-'*80, ''; sleep 0.25 - -remove_file 'README.md' - -create_file 'README.md', <<-README -= Ruby on Rails and Elasticsearch persistence: Example application - -README - - -git add: "." -git commit: "-m 'Added README for the application'" - -# ----- Use Pry as the Rails console -------------------------------------------------------------- - -puts -say_status "Rubygems", "Adding Pry into Gemfile...\n", :yellow -puts '-'*80, ''; - -gem_group :development do - gem 'pry' - gem 'pry-rails' -end - -git add: "Gemfile*" -git commit: "-m 'Added Pry into the Gemfile'" - -# ----- Auxiliary gems ---------------------------------------------------------------------------- - -puts -say_status "Rubygems", "Adding libraries into the Gemfile...\n", :yellow -puts '-'*80, ''; sleep 0.75 - -gem "simple_form" - -git add: "Gemfile*" -git commit: "-m 'Added auxiliary libraries into the Gemfile'" - -# ----- Remove CoffeeScript, Sass and "all that jazz" --------------------------------------------- - -comment_lines 'Gemfile', /gem 'coffee/ -comment_lines 'Gemfile', /gem 'sass/ -comment_lines 'Gemfile', /gem 'uglifier/ -uncomment_lines 'Gemfile', /gem 'therubyracer/ - -# ----- Add gems into Gemfile --------------------------------------------------------------------- - -puts -say_status "Rubygems", "Adding Elasticsearch libraries into Gemfile...\n", :yellow -puts '-'*80, ''; sleep 0.75 - -gem 'elasticsearch', git: 'https://github.com/elastic/elasticsearch-ruby.git' -gem 'elasticsearch-model', git: 'https://github.com/elastic/elasticsearch-rails.git', require: 'elasticsearch/model' -gem 'elasticsearch-persistence', git: 'https://github.com/elastic/elasticsearch-rails.git', require: 'elasticsearch/persistence/model' -gem 'elasticsearch-rails', git: 'https://github.com/elastic/elasticsearch-rails.git' - -git add: "Gemfile*" -git commit: "-m 'Added the Elasticsearch libraries into the Gemfile'" - -# ----- Install gems ------------------------------------------------------------------------------ - -puts -say_status "Rubygems", "Installing Rubygems...", :yellow -puts '-'*80, '' - -run "bundle install" - -# ----- Autoload ./lib ---------------------------------------------------------------------------- - -puts -say_status "Application", "Adding autoloading of ./lib...", :yellow -puts '-'*80, '' - -insert_into_file 'config/application.rb', - ' - config.autoload_paths += %W(#{config.root}/lib) - -', - after: 'class Application < Rails::Application' - -git commit: "-a -m 'Added autoloading of the ./lib folder'" - -# ----- Add jQuery UI ---------------------------------------------------------------------------- - -puts -say_status "Assets", "Adding jQuery UI...", :yellow -puts '-'*80, ''; sleep 0.25 - -if ENV['LOCAL'] - copy_file File.expand_path('../vendor/assets/jquery-ui-1.10.4.custom.min.js', __FILE__), - 'vendor/assets/javascripts/jquery-ui-1.10.4.custom.min.js' - copy_file File.expand_path('../vendor/assets/jquery-ui-1.10.4.custom.min.css', __FILE__), - 'vendor/assets/stylesheets/ui-lightness/jquery-ui-1.10.4.custom.min.css' - copy_file File.expand_path('../vendor/assets/stylesheets/ui-lightness/images/ui-bg_highlight-soft_100_eeeeee_1x100.png', __FILE__), - 'vendor/assets/stylesheets/ui-lightness/images/ui-bg_highlight-soft_100_eeeeee_1x100.png' -else - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.js', - 'vendor/assets/javascripts/jquery-ui-1.10.4.custom.min.js' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.css', - 'vendor/assets/stylesheets/ui-lightness/jquery-ui-1.10.4.custom.min.css' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/vendor/assets/stylesheets/ui-lightness/images/ui-bg_highlight-soft_100_eeeeee_1x100.png', - 'vendor/assets/stylesheets/ui-lightness/images/ui-bg_highlight-soft_100_eeeeee_1x100.png' -end - -append_to_file 'app/assets/javascripts/application.js', "//= require jquery-ui-1.10.4.custom.min.js" - -git commit: "-a -m 'Added jQuery UI'" - -# ----- Generate Artist scaffold ------------------------------------------------------------------ - -puts -say_status "Model", "Generating the Artist scaffold...", :yellow -puts '-'*80, ''; sleep 0.25 - -generate :scaffold, "Artist name:String --orm=elasticsearch" -route "root to: 'artists#index'" - -git add: "." -git commit: "-m 'Added the generated Artist scaffold'" - -# ----- Generate Album model ---------------------------------------------------------------------- - -puts -say_status "Model", "Generating the Album model...", :yellow -puts '-'*80, ''; sleep 0.25 - -generate :model, "Album --orm=elasticsearch" - -git add: "." -git commit: "-m 'Added the generated Album model'" - -# ----- Add proper model classes ------------------------------------------------------------------ - -puts -say_status "Model", "Adding Album, Artist and Suggester models implementation...", :yellow -puts '-'*80, ''; sleep 0.25 - -if ENV['LOCAL'] - copy_file File.expand_path('../artist.rb', __FILE__), 'app/models/artist.rb' - copy_file File.expand_path('../album.rb', __FILE__), 'app/models/album.rb' - copy_file File.expand_path('../suggester.rb', __FILE__), 'app/models/suggester.rb' -else - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/artist.rb', - 'app/models/artist.rb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/album.rb', - 'app/models/album.rb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/suggester.rb', - 'app/models/suggester.rb' -end - -git add: "./app/models" -git commit: "-m 'Added Album, Artist and Suggester models implementation'" - -# ----- Add controllers and views ----------------------------------------------------------------- - -puts -say_status "Views", "Adding ArtistsController and views...", :yellow -puts '-'*80, ''; sleep 0.25 - -if ENV['LOCAL'] - copy_file File.expand_path('../artists/artists_controller.rb', __FILE__), 'app/controllers/artists_controller.rb' - copy_file File.expand_path('../artists/index.html.erb', __FILE__), 'app/views/artists/index.html.erb' - copy_file File.expand_path('../artists/show.html.erb', __FILE__), 'app/views/artists/show.html.erb' - copy_file File.expand_path('../artists/_form.html.erb', __FILE__), 'app/views/artists/_form.html.erb' - copy_file File.expand_path('../artists/artists_controller_test.rb', __FILE__), - 'test/controllers/artists_controller_test.rb' -else - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/artists/artists_controller.rb', - 'app/controllers/artists_controller.rb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/artists/index.html.erb', - 'app/views/artists/index.html.erb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/artists/show.html.erb', - 'app/views/artists/show.html.erb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/artists/_form.html.erb', - 'app/views/artists/_form.html.erb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/artists/artists_controller_test.rb', - 'test/controllers/artists_controller_test.rb' -end - -git commit: "-a -m 'Added ArtistsController and related views'" - -puts -say_status "Views", "Adding SearchController and views...", :yellow -puts '-'*80, ''; sleep 0.25 - -if ENV['LOCAL'] - copy_file File.expand_path('../search/search_controller.rb', __FILE__), 'app/controllers/search_controller.rb' - copy_file File.expand_path('../search/search_helper.rb', __FILE__), 'app/helpers/search_helper.rb' - copy_file File.expand_path('../search/index.html.erb', __FILE__), 'app/views/search/index.html.erb' - copy_file File.expand_path('../search/search_controller_test.rb', __FILE__), - 'test/controllers/search_controller_test.rb' -else - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/search/search_controller.rb', - 'app/controllers/search_controller.rb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/search/search_helper.rb', - 'app/helpers/search_helper.rb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/search/index.html.erb', - 'app/views/search/index.html.erb' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/search/search_controller_test.rb', - 'test/controllers/search_controller_test.rb' -end - -route "get 'search', to: 'search#index'" -route "get 'suggest', to: 'search#suggest'" - -comment_lines 'test/test_helper.rb', /fixtures \:all/ - -git add: "." -git commit: "-m 'Added SearchController and related views'" - -# ----- Add assets ----------------------------------------------------------------- - -puts -say_status "Views", "Adding application assets...", :yellow -puts '-'*80, ''; sleep 0.25 - -git rm: 'app/assets/stylesheets/scaffold.css' - -gsub_file 'app/views/layouts/application.html.erb', //, '' - -if ENV['LOCAL'] - copy_file File.expand_path('../assets/application.css', __FILE__), 'app/assets/stylesheets/application.css' - copy_file File.expand_path('../assets/autocomplete.css', __FILE__), 'app/assets/stylesheets/autocomplete.css' - copy_file File.expand_path('../assets/form.css', __FILE__), 'app/assets/stylesheets/form.css' - copy_file File.expand_path('../assets/blank_cover.png', __FILE__), 'public/images/blank_cover.png' - copy_file File.expand_path('../assets/blank_artist.png', __FILE__), 'public/images/blank_artist.png' -else - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/assets/application.css', - 'app/assets/stylesheets/application.css' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/assets/autocomplete.css', - 'app/assets/stylesheets/autocomplete.css' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/assets/form.css', - 'app/assets/stylesheets/form.css' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/assets/blank_cover.png', - 'public/images/blank_cover.png' - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/assets/blank_artist.png', - 'public/images/blank_artist.png' -end - -git add: "." -git commit: "-m 'Added application assets'" - -# ----- Add an Elasticsearch initializer ---------------------------------------------------------- - -puts -say_status "Initializer", "Adding an Elasticsearch initializer...", :yellow -puts '-'*80, ''; sleep 0.25 - -initializer 'elasticsearch.rb', %q{ - Elasticsearch::Persistence.client = Elasticsearch::Client.new host: ENV['ELASTICSEARCH_URL'] || 'localhost:9200' - - if Rails.env.development? - logger = ActiveSupport::Logger.new(STDERR) - logger.level = Logger::INFO - logger.formatter = proc { |s, d, p, m| "\e[2m#{m}\n\e[0m" } - Elasticsearch::Persistence.client.transport.logger = logger - end -}.gsub(/^ /, '') - -git add: "./config" -git commit: "-m 'Added an Elasticsearch initializer'" - -# ----- Add IndexManager ----------------------------------------------------------------- - -puts -say_status "Application", "Adding the IndexManager class...", :yellow -puts '-'*80, ''; sleep 0.25 - -if ENV['LOCAL'] - copy_file File.expand_path('../index_manager.rb', __FILE__), 'lib/index_manager.rb' -else - get 'https://raw.githubusercontent.com/elasticsearch/elasticsearch-rails/master/elasticsearch-persistence/examples/music/index_manager.rb', - 'lib/index_manager.rb' -end - -# TODO: get 'https://raw.github.com/...', '...' - -git add: "." -git commit: "-m 'Added the IndexManager class'" - -# ----- Import the data --------------------------------------------------------------------------- - -puts -say_status "Data", "Import the data...", :yellow -puts '-'*80, ''; sleep 0.25 - -source = ENV.fetch('http://github.com/DATA_SOURCE', 'https://github.com/elastic/elasticsearch-rails/releases/download/dischord.yml/dischord.yml') - -run "ELASTICSEARCH_URL=#{$elasticsearch_url} rails runner 'IndexManager.import_from_yaml(\"#{source}\", force: true)'" - -# ----- Print Git log ----------------------------------------------------------------------------- - -puts -say_status "Git", "Details about the application:", :yellow -puts '-'*80, '' - -run "git --no-pager log --reverse --oneline" - -# ----- Start the application --------------------------------------------------------------------- - -unless ENV['RAILS_NO_SERVER_START'] - require 'net/http' - if (begin; Net::HTTP.get(URI('http://localhost:3000/')); rescue Errno::ECONNREFUSED; false; rescue Exception; true; end) - puts "\n" - say_status "ERROR", "Some other application is running on port 3000!\n", :red - puts '-'*80 - - port = ask("Please provide free port:", :bold) - else - port = '3000' - end - - puts "", "="*80 - say_status "DONE", "\e[1mStarting the application.\e[0m", :yellow - puts "="*80, "" - - run "ELASTICSEARCH_URL=#{$elasticsearch_url} rails server --port=#{port}" -end diff --git a/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.css b/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.css deleted file mode 100755 index 672cea658..000000000 --- a/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.css +++ /dev/null @@ -1,7 +0,0 @@ -/*! jQuery UI - v1.10.4 - 2014-06-04 -* http://jqueryui.com -* Includes: jquery.ui.core.css, jquery.ui.autocomplete.css, jquery.ui.menu.css, jquery.ui.theme.css -* To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Trebuchet%20MS%2CTahoma%2CVerdana%2CArial%2Csans-serif&fwDefault=bold&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=f6a828&bgTextureHeader=gloss_wave&bgImgOpacityHeader=35&borderColorHeader=e78f08&fcHeader=ffffff&iconColorHeader=ffffff&bgColorContent=eeeeee&bgTextureContent=highlight_soft&bgImgOpacityContent=100&borderColorContent=dddddd&fcContent=333333&iconColorContent=222222&bgColorDefault=f6f6f6&bgTextureDefault=glass&bgImgOpacityDefault=100&borderColorDefault=cccccc&fcDefault=1c94c4&iconColorDefault=ef8c08&bgColorHover=fdf5ce&bgTextureHover=glass&bgImgOpacityHover=100&borderColorHover=fbcb09&fcHover=c77405&iconColorHover=ef8c08&bgColorActive=ffffff&bgTextureActive=glass&bgImgOpacityActive=65&borderColorActive=fbd850&fcActive=eb8f00&iconColorActive=ef8c08&bgColorHighlight=ffe45c&bgTextureHighlight=highlight_soft&bgImgOpacityHighlight=75&borderColorHighlight=fed22f&fcHighlight=363636&iconColorHighlight=228ef1&bgColorError=b81900&bgTextureError=diagonals_thick&bgImgOpacityError=18&borderColorError=cd0a0a&fcError=ffffff&iconColorError=ffd27a&bgColorOverlay=666666&bgTextureOverlay=diagonals_thick&bgImgOpacityOverlay=20&opacityOverlay=50&bgColorShadow=000000&bgTextureShadow=flat&bgImgOpacityShadow=10&opacityShadow=20&thicknessShadow=5px&offsetTopShadow=-5px&offsetLeftShadow=-5px&cornerRadiusShadow=5px -* Copyright 2014 jQuery Foundation and other contributors; Licensed MIT */ - -.ui-helper-hidden{display:none}.ui-helper-hidden-accessible{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.ui-helper-reset{margin:0;padding:0;border:0;outline:0;line-height:1.3;text-decoration:none;font-size:100%;list-style:none}.ui-helper-clearfix:before,.ui-helper-clearfix:after{content:"";display:table;border-collapse:collapse}.ui-helper-clearfix:after{clear:both}.ui-helper-clearfix{min-height:0}.ui-helper-zfix{width:100%;height:100%;top:0;left:0;position:absolute;opacity:0;filter:Alpha(Opacity=0)}.ui-front{z-index:100}.ui-state-disabled{cursor:default!important}.ui-icon{display:block;text-indent:-99999px;overflow:hidden;background-repeat:no-repeat}.ui-widget-overlay{position:fixed;top:0;left:0;width:100%;height:100%}.ui-autocomplete{position:absolute;top:0;left:0;cursor:default}.ui-menu{list-style:none;padding:2px;margin:0;display:block;outline:none}.ui-menu .ui-menu{margin-top:-3px;position:absolute}.ui-menu .ui-menu-item{margin:0;padding:0;width:100%;list-style-image:url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)}.ui-menu .ui-menu-divider{margin:5px -2px 5px -2px;height:0;font-size:0;line-height:0;border-width:1px 0 0 0}.ui-menu .ui-menu-item a{text-decoration:none;display:block;padding:2px .4em;line-height:1.5;min-height:0;font-weight:normal}.ui-menu .ui-menu-item a.ui-state-focus,.ui-menu .ui-menu-item a.ui-state-active{font-weight:normal;margin:-1px}.ui-menu .ui-state-disabled{font-weight:normal;margin:.4em 0 .2em;line-height:1.5}.ui-menu .ui-state-disabled a{cursor:default}.ui-menu-icons{position:relative}.ui-menu-icons .ui-menu-item a{position:relative;padding-left:2em}.ui-menu .ui-icon{position:absolute;top:.2em;left:.2em}.ui-menu .ui-menu-icon{position:static;float:right}.ui-widget{font-family:Trebuchet MS,Tahoma,Verdana,Arial,sans-serif;font-size:1.1em}.ui-widget .ui-widget{font-size:1em}.ui-widget input,.ui-widget select,.ui-widget textarea,.ui-widget button{font-family:Trebuchet MS,Tahoma,Verdana,Arial,sans-serif;font-size:1em}.ui-widget-content{border:1px solid #ddd;background:#eee url("http://github.com/images/ui-bg_highlight-soft_100_eeeeee_1x100.png") 50% top repeat-x;color:#333}.ui-widget-content a{color:#333}.ui-widget-header{border:1px solid #e78f08;background:#f6a828 url("http://github.com/images/ui-bg_gloss-wave_35_f6a828_500x100.png") 50% 50% repeat-x;color:#fff;font-weight:bold}.ui-widget-header a{color:#fff}.ui-state-default,.ui-widget-content .ui-state-default,.ui-widget-header .ui-state-default{border:1px solid #ccc;background:#f6f6f6 url("http://github.com/images/ui-bg_glass_100_f6f6f6_1x400.png") 50% 50% repeat-x;font-weight:bold;color:#1c94c4}.ui-state-default a,.ui-state-default a:link,.ui-state-default a:visited{color:#1c94c4;text-decoration:none}.ui-state-hover,.ui-widget-content .ui-state-hover,.ui-widget-header .ui-state-hover,.ui-state-focus,.ui-widget-content .ui-state-focus,.ui-widget-header .ui-state-focus{border:1px solid #fbcb09;background:#fdf5ce url("http://github.com/images/ui-bg_glass_100_fdf5ce_1x400.png") 50% 50% repeat-x;font-weight:bold;color:#c77405}.ui-state-hover a,.ui-state-hover a:hover,.ui-state-hover a:link,.ui-state-hover a:visited,.ui-state-focus a,.ui-state-focus a:hover,.ui-state-focus a:link,.ui-state-focus a:visited{color:#c77405;text-decoration:none}.ui-state-active,.ui-widget-content .ui-state-active,.ui-widget-header .ui-state-active{border:1px solid #fbd850;background:#fff url("http://github.com/images/ui-bg_glass_65_ffffff_1x400.png") 50% 50% repeat-x;font-weight:bold;color:#eb8f00}.ui-state-active a,.ui-state-active a:link,.ui-state-active a:visited{color:#eb8f00;text-decoration:none}.ui-state-highlight,.ui-widget-content .ui-state-highlight,.ui-widget-header .ui-state-highlight{border:1px solid #fed22f;background:#ffe45c url("http://github.com/images/ui-bg_highlight-soft_75_ffe45c_1x100.png") 50% top repeat-x;color:#363636}.ui-state-highlight a,.ui-widget-content .ui-state-highlight a,.ui-widget-header .ui-state-highlight a{color:#363636}.ui-state-error,.ui-widget-content .ui-state-error,.ui-widget-header .ui-state-error{border:1px solid #cd0a0a;background:#b81900 url("http://github.com/images/ui-bg_diagonals-thick_18_b81900_40x40.png") 50% 50% repeat;color:#fff}.ui-state-error a,.ui-widget-content .ui-state-error a,.ui-widget-header .ui-state-error a{color:#fff}.ui-state-error-text,.ui-widget-content .ui-state-error-text,.ui-widget-header .ui-state-error-text{color:#fff}.ui-priority-primary,.ui-widget-content .ui-priority-primary,.ui-widget-header .ui-priority-primary{font-weight:bold}.ui-priority-secondary,.ui-widget-content .ui-priority-secondary,.ui-widget-header .ui-priority-secondary{opacity:.7;filter:Alpha(Opacity=70);font-weight:normal}.ui-state-disabled,.ui-widget-content .ui-state-disabled,.ui-widget-header .ui-state-disabled{opacity:.35;filter:Alpha(Opacity=35);background-image:none}.ui-state-disabled .ui-icon{filter:Alpha(Opacity=35)}.ui-icon{width:16px;height:16px}.ui-icon,.ui-widget-content .ui-icon{background-image:url("http://github.com/images/ui-icons_222222_256x240.png")}.ui-widget-header .ui-icon{background-image:url("http://github.com/images/ui-icons_ffffff_256x240.png")}.ui-state-default .ui-icon{background-image:url("http://github.com/images/ui-icons_ef8c08_256x240.png")}.ui-state-hover .ui-icon,.ui-state-focus .ui-icon{background-image:url("http://github.com/images/ui-icons_ef8c08_256x240.png")}.ui-state-active .ui-icon{background-image:url("http://github.com/images/ui-icons_ef8c08_256x240.png")}.ui-state-highlight .ui-icon{background-image:url("http://github.com/images/ui-icons_228ef1_256x240.png")}.ui-state-error .ui-icon,.ui-state-error-text .ui-icon{background-image:url("http://github.com/images/ui-icons_ffd27a_256x240.png")}.ui-icon-blank{background-position:16px 16px}.ui-icon-carat-1-n{background-position:0 0}.ui-icon-carat-1-ne{background-position:-16px 0}.ui-icon-carat-1-e{background-position:-32px 0}.ui-icon-carat-1-se{background-position:-48px 0}.ui-icon-carat-1-s{background-position:-64px 0}.ui-icon-carat-1-sw{background-position:-80px 0}.ui-icon-carat-1-w{background-position:-96px 0}.ui-icon-carat-1-nw{background-position:-112px 0}.ui-icon-carat-2-n-s{background-position:-128px 0}.ui-icon-carat-2-e-w{background-position:-144px 0}.ui-icon-triangle-1-n{background-position:0 -16px}.ui-icon-triangle-1-ne{background-position:-16px -16px}.ui-icon-triangle-1-e{background-position:-32px -16px}.ui-icon-triangle-1-se{background-position:-48px -16px}.ui-icon-triangle-1-s{background-position:-64px -16px}.ui-icon-triangle-1-sw{background-position:-80px -16px}.ui-icon-triangle-1-w{background-position:-96px -16px}.ui-icon-triangle-1-nw{background-position:-112px -16px}.ui-icon-triangle-2-n-s{background-position:-128px -16px}.ui-icon-triangle-2-e-w{background-position:-144px -16px}.ui-icon-arrow-1-n{background-position:0 -32px}.ui-icon-arrow-1-ne{background-position:-16px -32px}.ui-icon-arrow-1-e{background-position:-32px -32px}.ui-icon-arrow-1-se{background-position:-48px -32px}.ui-icon-arrow-1-s{background-position:-64px -32px}.ui-icon-arrow-1-sw{background-position:-80px -32px}.ui-icon-arrow-1-w{background-position:-96px -32px}.ui-icon-arrow-1-nw{background-position:-112px -32px}.ui-icon-arrow-2-n-s{background-position:-128px -32px}.ui-icon-arrow-2-ne-sw{background-position:-144px -32px}.ui-icon-arrow-2-e-w{background-position:-160px -32px}.ui-icon-arrow-2-se-nw{background-position:-176px -32px}.ui-icon-arrowstop-1-n{background-position:-192px -32px}.ui-icon-arrowstop-1-e{background-position:-208px -32px}.ui-icon-arrowstop-1-s{background-position:-224px -32px}.ui-icon-arrowstop-1-w{background-position:-240px -32px}.ui-icon-arrowthick-1-n{background-position:0 -48px}.ui-icon-arrowthick-1-ne{background-position:-16px -48px}.ui-icon-arrowthick-1-e{background-position:-32px -48px}.ui-icon-arrowthick-1-se{background-position:-48px -48px}.ui-icon-arrowthick-1-s{background-position:-64px -48px}.ui-icon-arrowthick-1-sw{background-position:-80px -48px}.ui-icon-arrowthick-1-w{background-position:-96px -48px}.ui-icon-arrowthick-1-nw{background-position:-112px -48px}.ui-icon-arrowthick-2-n-s{background-position:-128px -48px}.ui-icon-arrowthick-2-ne-sw{background-position:-144px -48px}.ui-icon-arrowthick-2-e-w{background-position:-160px -48px}.ui-icon-arrowthick-2-se-nw{background-position:-176px -48px}.ui-icon-arrowthickstop-1-n{background-position:-192px -48px}.ui-icon-arrowthickstop-1-e{background-position:-208px -48px}.ui-icon-arrowthickstop-1-s{background-position:-224px -48px}.ui-icon-arrowthickstop-1-w{background-position:-240px -48px}.ui-icon-arrowreturnthick-1-w{background-position:0 -64px}.ui-icon-arrowreturnthick-1-n{background-position:-16px -64px}.ui-icon-arrowreturnthick-1-e{background-position:-32px -64px}.ui-icon-arrowreturnthick-1-s{background-position:-48px -64px}.ui-icon-arrowreturn-1-w{background-position:-64px -64px}.ui-icon-arrowreturn-1-n{background-position:-80px -64px}.ui-icon-arrowreturn-1-e{background-position:-96px -64px}.ui-icon-arrowreturn-1-s{background-position:-112px -64px}.ui-icon-arrowrefresh-1-w{background-position:-128px -64px}.ui-icon-arrowrefresh-1-n{background-position:-144px -64px}.ui-icon-arrowrefresh-1-e{background-position:-160px -64px}.ui-icon-arrowrefresh-1-s{background-position:-176px -64px}.ui-icon-arrow-4{background-position:0 -80px}.ui-icon-arrow-4-diag{background-position:-16px -80px}.ui-icon-extlink{background-position:-32px -80px}.ui-icon-newwin{background-position:-48px -80px}.ui-icon-refresh{background-position:-64px -80px}.ui-icon-shuffle{background-position:-80px -80px}.ui-icon-transfer-e-w{background-position:-96px -80px}.ui-icon-transferthick-e-w{background-position:-112px -80px}.ui-icon-folder-collapsed{background-position:0 -96px}.ui-icon-folder-open{background-position:-16px -96px}.ui-icon-document{background-position:-32px -96px}.ui-icon-document-b{background-position:-48px -96px}.ui-icon-note{background-position:-64px -96px}.ui-icon-mail-closed{background-position:-80px -96px}.ui-icon-mail-open{background-position:-96px -96px}.ui-icon-suitcase{background-position:-112px -96px}.ui-icon-comment{background-position:-128px -96px}.ui-icon-person{background-position:-144px -96px}.ui-icon-print{background-position:-160px -96px}.ui-icon-trash{background-position:-176px -96px}.ui-icon-locked{background-position:-192px -96px}.ui-icon-unlocked{background-position:-208px -96px}.ui-icon-bookmark{background-position:-224px -96px}.ui-icon-tag{background-position:-240px -96px}.ui-icon-home{background-position:0 -112px}.ui-icon-flag{background-position:-16px -112px}.ui-icon-calendar{background-position:-32px -112px}.ui-icon-cart{background-position:-48px -112px}.ui-icon-pencil{background-position:-64px -112px}.ui-icon-clock{background-position:-80px -112px}.ui-icon-disk{background-position:-96px -112px}.ui-icon-calculator{background-position:-112px -112px}.ui-icon-zoomin{background-position:-128px -112px}.ui-icon-zoomout{background-position:-144px -112px}.ui-icon-search{background-position:-160px -112px}.ui-icon-wrench{background-position:-176px -112px}.ui-icon-gear{background-position:-192px -112px}.ui-icon-heart{background-position:-208px -112px}.ui-icon-star{background-position:-224px -112px}.ui-icon-link{background-position:-240px -112px}.ui-icon-cancel{background-position:0 -128px}.ui-icon-plus{background-position:-16px -128px}.ui-icon-plusthick{background-position:-32px -128px}.ui-icon-minus{background-position:-48px -128px}.ui-icon-minusthick{background-position:-64px -128px}.ui-icon-close{background-position:-80px -128px}.ui-icon-closethick{background-position:-96px -128px}.ui-icon-key{background-position:-112px -128px}.ui-icon-lightbulb{background-position:-128px -128px}.ui-icon-scissors{background-position:-144px -128px}.ui-icon-clipboard{background-position:-160px -128px}.ui-icon-copy{background-position:-176px -128px}.ui-icon-contact{background-position:-192px -128px}.ui-icon-image{background-position:-208px -128px}.ui-icon-video{background-position:-224px -128px}.ui-icon-script{background-position:-240px -128px}.ui-icon-alert{background-position:0 -144px}.ui-icon-info{background-position:-16px -144px}.ui-icon-notice{background-position:-32px -144px}.ui-icon-help{background-position:-48px -144px}.ui-icon-check{background-position:-64px -144px}.ui-icon-bullet{background-position:-80px -144px}.ui-icon-radio-on{background-position:-96px -144px}.ui-icon-radio-off{background-position:-112px -144px}.ui-icon-pin-w{background-position:-128px -144px}.ui-icon-pin-s{background-position:-144px -144px}.ui-icon-play{background-position:0 -160px}.ui-icon-pause{background-position:-16px -160px}.ui-icon-seek-next{background-position:-32px -160px}.ui-icon-seek-prev{background-position:-48px -160px}.ui-icon-seek-end{background-position:-64px -160px}.ui-icon-seek-start{background-position:-80px -160px}.ui-icon-seek-first{background-position:-80px -160px}.ui-icon-stop{background-position:-96px -160px}.ui-icon-eject{background-position:-112px -160px}.ui-icon-volume-off{background-position:-128px -160px}.ui-icon-volume-on{background-position:-144px -160px}.ui-icon-power{background-position:0 -176px}.ui-icon-signal-diag{background-position:-16px -176px}.ui-icon-signal{background-position:-32px -176px}.ui-icon-battery-0{background-position:-48px -176px}.ui-icon-battery-1{background-position:-64px -176px}.ui-icon-battery-2{background-position:-80px -176px}.ui-icon-battery-3{background-position:-96px -176px}.ui-icon-circle-plus{background-position:0 -192px}.ui-icon-circle-minus{background-position:-16px -192px}.ui-icon-circle-close{background-position:-32px -192px}.ui-icon-circle-triangle-e{background-position:-48px -192px}.ui-icon-circle-triangle-s{background-position:-64px -192px}.ui-icon-circle-triangle-w{background-position:-80px -192px}.ui-icon-circle-triangle-n{background-position:-96px -192px}.ui-icon-circle-arrow-e{background-position:-112px -192px}.ui-icon-circle-arrow-s{background-position:-128px -192px}.ui-icon-circle-arrow-w{background-position:-144px -192px}.ui-icon-circle-arrow-n{background-position:-160px -192px}.ui-icon-circle-zoomin{background-position:-176px -192px}.ui-icon-circle-zoomout{background-position:-192px -192px}.ui-icon-circle-check{background-position:-208px -192px}.ui-icon-circlesmall-plus{background-position:0 -208px}.ui-icon-circlesmall-minus{background-position:-16px -208px}.ui-icon-circlesmall-close{background-position:-32px -208px}.ui-icon-squaresmall-plus{background-position:-48px -208px}.ui-icon-squaresmall-minus{background-position:-64px -208px}.ui-icon-squaresmall-close{background-position:-80px -208px}.ui-icon-grip-dotted-vertical{background-position:0 -224px}.ui-icon-grip-dotted-horizontal{background-position:-16px -224px}.ui-icon-grip-solid-vertical{background-position:-32px -224px}.ui-icon-grip-solid-horizontal{background-position:-48px -224px}.ui-icon-gripsmall-diagonal-se{background-position:-64px -224px}.ui-icon-grip-diagonal-se{background-position:-80px -224px}.ui-corner-all,.ui-corner-top,.ui-corner-left,.ui-corner-tl{border-top-left-radius:4px}.ui-corner-all,.ui-corner-top,.ui-corner-right,.ui-corner-tr{border-top-right-radius:4px}.ui-corner-all,.ui-corner-bottom,.ui-corner-left,.ui-corner-bl{border-bottom-left-radius:4px}.ui-corner-all,.ui-corner-bottom,.ui-corner-right,.ui-corner-br{border-bottom-right-radius:4px}.ui-widget-overlay{background:#666 url("http://github.com/images/ui-bg_diagonals-thick_20_666666_40x40.png") 50% 50% repeat;opacity:.5;filter:Alpha(Opacity=50)}.ui-widget-shadow{margin:-5px 0 0 -5px;padding:5px;background:#000 url("http://github.com/images/ui-bg_flat_10_000000_40x100.png") 50% 50% repeat-x;opacity:.2;filter:Alpha(Opacity=20);border-radius:5px} \ No newline at end of file diff --git a/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.js b/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.js deleted file mode 100755 index 8af84cb1e..000000000 --- a/elasticsearch-persistence/examples/music/vendor/assets/jquery-ui-1.10.4.custom.min.js +++ /dev/null @@ -1,6 +0,0 @@ -/*! jQuery UI - v1.10.4 - 2014-06-05 -* http://jqueryui.com -* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.position.js, jquery.ui.autocomplete.js, jquery.ui.menu.js, jquery.ui.effect.js, jquery.ui.effect-highlight.js -* Copyright 2014 jQuery Foundation and other contributors; Licensed MIT */ - -(function(e,t){function i(t,i){var s,a,o,r=t.nodeName.toLowerCase();return"area"===r?(s=t.parentNode,a=s.name,t.href&&a&&"map"===s.nodeName.toLowerCase()?(o=e("img[usemap=#"+a+"]")[0],!!o&&n(o)):!1):(/input|select|textarea|button|object/.test(r)?!t.disabled:"a"===r?t.href||i:i)&&n(t)}function n(t){return e.expr.filters.visible(t)&&!e(t).parents().addBack().filter(function(){return"hidden"===e.css(this,"visibility")}).length}var s=0,a=/^ui-id-\d+$/;e.ui=e.ui||{},e.extend(e.ui,{version:"1.10.4",keyCode:{BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38}}),e.fn.extend({focus:function(t){return function(i,n){return"number"==typeof i?this.each(function(){var t=this;setTimeout(function(){e(t).focus(),n&&n.call(t)},i)}):t.apply(this,arguments)}}(e.fn.focus),scrollParent:function(){var t;return t=e.ui.ie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?this.parents().filter(function(){return/(relative|absolute|fixed)/.test(e.css(this,"position"))&&/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0):this.parents().filter(function(){return/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0),/fixed/.test(this.css("position"))||!t.length?e(document):t},zIndex:function(i){if(i!==t)return this.css("zIndex",i);if(this.length)for(var n,s,a=e(this[0]);a.length&&a[0]!==document;){if(n=a.css("position"),("absolute"===n||"relative"===n||"fixed"===n)&&(s=parseInt(a.css("zIndex"),10),!isNaN(s)&&0!==s))return s;a=a.parent()}return 0},uniqueId:function(){return this.each(function(){this.id||(this.id="ui-id-"+ ++s)})},removeUniqueId:function(){return this.each(function(){a.test(this.id)&&e(this).removeAttr("id")})}}),e.extend(e.expr[":"],{data:e.expr.createPseudo?e.expr.createPseudo(function(t){return function(i){return!!e.data(i,t)}}):function(t,i,n){return!!e.data(t,n[3])},focusable:function(t){return i(t,!isNaN(e.attr(t,"tabindex")))},tabbable:function(t){var n=e.attr(t,"tabindex"),s=isNaN(n);return(s||n>=0)&&i(t,!s)}}),e("").outerWidth(1).jquery||e.each(["Width","Height"],function(i,n){function s(t,i,n,s){return e.each(a,function(){i-=parseFloat(e.css(t,"padding"+this))||0,n&&(i-=parseFloat(e.css(t,"border"+this+"Width"))||0),s&&(i-=parseFloat(e.css(t,"margin"+this))||0)}),i}var a="Width"===n?["Left","Right"]:["Top","Bottom"],o=n.toLowerCase(),r={innerWidth:e.fn.innerWidth,innerHeight:e.fn.innerHeight,outerWidth:e.fn.outerWidth,outerHeight:e.fn.outerHeight};e.fn["inner"+n]=function(i){return i===t?r["inner"+n].call(this):this.each(function(){e(this).css(o,s(this,i)+"px")})},e.fn["outer"+n]=function(t,i){return"number"!=typeof t?r["outer"+n].call(this,t):this.each(function(){e(this).css(o,s(this,t,!0,i)+"px")})}}),e.fn.addBack||(e.fn.addBack=function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}),e("").data("a-b","a").removeData("a-b").data("a-b")&&(e.fn.removeData=function(t){return function(i){return arguments.length?t.call(this,e.camelCase(i)):t.call(this)}}(e.fn.removeData)),e.ui.ie=!!/msie [\w.]+/.exec(navigator.userAgent.toLowerCase()),e.support.selectstart="onselectstart"in document.createElement("div"),e.fn.extend({disableSelection:function(){return this.bind((e.support.selectstart?"selectstart":"mousedown")+".ui-disableSelection",function(e){e.preventDefault()})},enableSelection:function(){return this.unbind(".ui-disableSelection")}}),e.extend(e.ui,{plugin:{add:function(t,i,n){var s,a=e.ui[t].prototype;for(s in n)a.plugins[s]=a.plugins[s]||[],a.plugins[s].push([i,n[s]])},call:function(e,t,i){var n,s=e.plugins[t];if(s&&e.element[0].parentNode&&11!==e.element[0].parentNode.nodeType)for(n=0;s.length>n;n++)e.options[s[n][0]]&&s[n][1].apply(e.element,i)}},hasScroll:function(t,i){if("hidden"===e(t).css("overflow"))return!1;var n=i&&"left"===i?"scrollLeft":"scrollTop",s=!1;return t[n]>0?!0:(t[n]=1,s=t[n]>0,t[n]=0,s)}})})(jQuery);(function(t,e){var i=0,s=Array.prototype.slice,n=t.cleanData;t.cleanData=function(e){for(var i,s=0;null!=(i=e[s]);s++)try{t(i).triggerHandler("remove")}catch(o){}n(e)},t.widget=function(i,s,n){var o,a,r,h,l={},c=i.split(".")[0];i=i.split(".")[1],o=c+"-"+i,n||(n=s,s=t.Widget),t.expr[":"][o.toLowerCase()]=function(e){return!!t.data(e,o)},t[c]=t[c]||{},a=t[c][i],r=t[c][i]=function(t,i){return this._createWidget?(arguments.length&&this._createWidget(t,i),e):new r(t,i)},t.extend(r,a,{version:n.version,_proto:t.extend({},n),_childConstructors:[]}),h=new s,h.options=t.widget.extend({},h.options),t.each(n,function(i,n){return t.isFunction(n)?(l[i]=function(){var t=function(){return s.prototype[i].apply(this,arguments)},e=function(t){return s.prototype[i].apply(this,t)};return function(){var i,s=this._super,o=this._superApply;return this._super=t,this._superApply=e,i=n.apply(this,arguments),this._super=s,this._superApply=o,i}}(),e):(l[i]=n,e)}),r.prototype=t.widget.extend(h,{widgetEventPrefix:a?h.widgetEventPrefix||i:i},l,{constructor:r,namespace:c,widgetName:i,widgetFullName:o}),a?(t.each(a._childConstructors,function(e,i){var s=i.prototype;t.widget(s.namespace+"."+s.widgetName,r,i._proto)}),delete a._childConstructors):s._childConstructors.push(r),t.widget.bridge(i,r)},t.widget.extend=function(i){for(var n,o,a=s.call(arguments,1),r=0,h=a.length;h>r;r++)for(n in a[r])o=a[r][n],a[r].hasOwnProperty(n)&&o!==e&&(i[n]=t.isPlainObject(o)?t.isPlainObject(i[n])?t.widget.extend({},i[n],o):t.widget.extend({},o):o);return i},t.widget.bridge=function(i,n){var o=n.prototype.widgetFullName||i;t.fn[i]=function(a){var r="string"==typeof a,h=s.call(arguments,1),l=this;return a=!r&&h.length?t.widget.extend.apply(null,[a].concat(h)):a,r?this.each(function(){var s,n=t.data(this,o);return n?t.isFunction(n[a])&&"_"!==a.charAt(0)?(s=n[a].apply(n,h),s!==n&&s!==e?(l=s&&s.jquery?l.pushStack(s.get()):s,!1):e):t.error("no such method '"+a+"' for "+i+" widget instance"):t.error("cannot call methods on "+i+" prior to initialization; "+"attempted to call method '"+a+"'")}):this.each(function(){var e=t.data(this,o);e?e.option(a||{})._init():t.data(this,o,new n(a,this))}),l}},t.Widget=function(){},t.Widget._childConstructors=[],t.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",defaultElement:"
",options:{disabled:!1,create:null},_createWidget:function(e,s){s=t(s||this.defaultElement||this)[0],this.element=t(s),this.uuid=i++,this.eventNamespace="."+this.widgetName+this.uuid,this.options=t.widget.extend({},this.options,this._getCreateOptions(),e),this.bindings=t(),this.hoverable=t(),this.focusable=t(),s!==this&&(t.data(s,this.widgetFullName,this),this._on(!0,this.element,{remove:function(t){t.target===s&&this.destroy()}}),this.document=t(s.style?s.ownerDocument:s.document||s),this.window=t(this.document[0].defaultView||this.document[0].parentWindow)),this._create(),this._trigger("create",null,this._getCreateEventData()),this._init()},_getCreateOptions:t.noop,_getCreateEventData:t.noop,_create:t.noop,_init:t.noop,destroy:function(){this._destroy(),this.element.unbind(this.eventNamespace).removeData(this.widgetName).removeData(this.widgetFullName).removeData(t.camelCase(this.widgetFullName)),this.widget().unbind(this.eventNamespace).removeAttr("aria-disabled").removeClass(this.widgetFullName+"-disabled "+"ui-state-disabled"),this.bindings.unbind(this.eventNamespace),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")},_destroy:t.noop,widget:function(){return this.element},option:function(i,s){var n,o,a,r=i;if(0===arguments.length)return t.widget.extend({},this.options);if("string"==typeof i)if(r={},n=i.split("."),i=n.shift(),n.length){for(o=r[i]=t.widget.extend({},this.options[i]),a=0;n.length-1>a;a++)o[n[a]]=o[n[a]]||{},o=o[n[a]];if(i=n.pop(),1===arguments.length)return o[i]===e?null:o[i];o[i]=s}else{if(1===arguments.length)return this.options[i]===e?null:this.options[i];r[i]=s}return this._setOptions(r),this},_setOptions:function(t){var e;for(e in t)this._setOption(e,t[e]);return this},_setOption:function(t,e){return this.options[t]=e,"disabled"===t&&(this.widget().toggleClass(this.widgetFullName+"-disabled ui-state-disabled",!!e).attr("aria-disabled",e),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")),this},enable:function(){return this._setOption("disabled",!1)},disable:function(){return this._setOption("disabled",!0)},_on:function(i,s,n){var o,a=this;"boolean"!=typeof i&&(n=s,s=i,i=!1),n?(s=o=t(s),this.bindings=this.bindings.add(s)):(n=s,s=this.element,o=this.widget()),t.each(n,function(n,r){function h(){return i||a.options.disabled!==!0&&!t(this).hasClass("ui-state-disabled")?("string"==typeof r?a[r]:r).apply(a,arguments):e}"string"!=typeof r&&(h.guid=r.guid=r.guid||h.guid||t.guid++);var l=n.match(/^(\w+)\s*(.*)$/),c=l[1]+a.eventNamespace,u=l[2];u?o.delegate(u,c,h):s.bind(c,h)})},_off:function(t,e){e=(e||"").split(" ").join(this.eventNamespace+" ")+this.eventNamespace,t.unbind(e).undelegate(e)},_delay:function(t,e){function i(){return("string"==typeof t?s[t]:t).apply(s,arguments)}var s=this;return setTimeout(i,e||0)},_hoverable:function(e){this.hoverable=this.hoverable.add(e),this._on(e,{mouseenter:function(e){t(e.currentTarget).addClass("ui-state-hover")},mouseleave:function(e){t(e.currentTarget).removeClass("ui-state-hover")}})},_focusable:function(e){this.focusable=this.focusable.add(e),this._on(e,{focusin:function(e){t(e.currentTarget).addClass("ui-state-focus")},focusout:function(e){t(e.currentTarget).removeClass("ui-state-focus")}})},_trigger:function(e,i,s){var n,o,a=this.options[e];if(s=s||{},i=t.Event(i),i.type=(e===this.widgetEventPrefix?e:this.widgetEventPrefix+e).toLowerCase(),i.target=this.element[0],o=i.originalEvent)for(n in o)n in i||(i[n]=o[n]);return this.element.trigger(i,s),!(t.isFunction(a)&&a.apply(this.element[0],[i].concat(s))===!1||i.isDefaultPrevented())}},t.each({show:"fadeIn",hide:"fadeOut"},function(e,i){t.Widget.prototype["_"+e]=function(s,n,o){"string"==typeof n&&(n={effect:n});var a,r=n?n===!0||"number"==typeof n?i:n.effect||i:e;n=n||{},"number"==typeof n&&(n={duration:n}),a=!t.isEmptyObject(n),n.complete=o,n.delay&&s.delay(n.delay),a&&t.effects&&t.effects.effect[r]?s[e](n):r!==e&&s[r]?s[r](n.duration,n.easing,o):s.queue(function(i){t(this)[e](),o&&o.call(s[0]),i()})}})})(jQuery);(function(t,e){function i(t,e,i){return[parseFloat(t[0])*(p.test(t[0])?e/100:1),parseFloat(t[1])*(p.test(t[1])?i/100:1)]}function s(e,i){return parseInt(t.css(e,i),10)||0}function n(e){var i=e[0];return 9===i.nodeType?{width:e.width(),height:e.height(),offset:{top:0,left:0}}:t.isWindow(i)?{width:e.width(),height:e.height(),offset:{top:e.scrollTop(),left:e.scrollLeft()}}:i.preventDefault?{width:0,height:0,offset:{top:i.pageY,left:i.pageX}}:{width:e.outerWidth(),height:e.outerHeight(),offset:e.offset()}}t.ui=t.ui||{};var a,o=Math.max,r=Math.abs,l=Math.round,h=/left|center|right/,c=/top|center|bottom/,u=/[\+\-]\d+(\.[\d]+)?%?/,d=/^\w+/,p=/%$/,f=t.fn.position;t.position={scrollbarWidth:function(){if(a!==e)return a;var i,s,n=t("
"),o=n.children()[0];return t("body").append(n),i=o.offsetWidth,n.css("overflow","scroll"),s=o.offsetWidth,i===s&&(s=n[0].clientWidth),n.remove(),a=i-s},getScrollInfo:function(e){var i=e.isWindow||e.isDocument?"":e.element.css("overflow-x"),s=e.isWindow||e.isDocument?"":e.element.css("overflow-y"),n="scroll"===i||"auto"===i&&e.widths?"left":i>0?"right":"center",vertical:0>a?"top":n>0?"bottom":"middle"};u>p&&p>r(i+s)&&(l.horizontal="center"),d>g&&g>r(n+a)&&(l.vertical="middle"),l.important=o(r(i),r(s))>o(r(n),r(a))?"horizontal":"vertical",e.using.call(this,t,l)}),c.offset(t.extend(M,{using:h}))})},t.ui.position={fit:{left:function(t,e){var i,s=e.within,n=s.isWindow?s.scrollLeft:s.offset.left,a=s.width,r=t.left-e.collisionPosition.marginLeft,l=n-r,h=r+e.collisionWidth-a-n;e.collisionWidth>a?l>0&&0>=h?(i=t.left+l+e.collisionWidth-a-n,t.left+=l-i):t.left=h>0&&0>=l?n:l>h?n+a-e.collisionWidth:n:l>0?t.left+=l:h>0?t.left-=h:t.left=o(t.left-r,t.left)},top:function(t,e){var i,s=e.within,n=s.isWindow?s.scrollTop:s.offset.top,a=e.within.height,r=t.top-e.collisionPosition.marginTop,l=n-r,h=r+e.collisionHeight-a-n;e.collisionHeight>a?l>0&&0>=h?(i=t.top+l+e.collisionHeight-a-n,t.top+=l-i):t.top=h>0&&0>=l?n:l>h?n+a-e.collisionHeight:n:l>0?t.top+=l:h>0?t.top-=h:t.top=o(t.top-r,t.top)}},flip:{left:function(t,e){var i,s,n=e.within,a=n.offset.left+n.scrollLeft,o=n.width,l=n.isWindow?n.scrollLeft:n.offset.left,h=t.left-e.collisionPosition.marginLeft,c=h-l,u=h+e.collisionWidth-o-l,d="left"===e.my[0]?-e.elemWidth:"right"===e.my[0]?e.elemWidth:0,p="left"===e.at[0]?e.targetWidth:"right"===e.at[0]?-e.targetWidth:0,f=-2*e.offset[0];0>c?(i=t.left+d+p+f+e.collisionWidth-o-a,(0>i||r(c)>i)&&(t.left+=d+p+f)):u>0&&(s=t.left-e.collisionPosition.marginLeft+d+p+f-l,(s>0||u>r(s))&&(t.left+=d+p+f))},top:function(t,e){var i,s,n=e.within,a=n.offset.top+n.scrollTop,o=n.height,l=n.isWindow?n.scrollTop:n.offset.top,h=t.top-e.collisionPosition.marginTop,c=h-l,u=h+e.collisionHeight-o-l,d="top"===e.my[1],p=d?-e.elemHeight:"bottom"===e.my[1]?e.elemHeight:0,f="top"===e.at[1]?e.targetHeight:"bottom"===e.at[1]?-e.targetHeight:0,g=-2*e.offset[1];0>c?(s=t.top+p+f+g+e.collisionHeight-o-a,t.top+p+f+g>c&&(0>s||r(c)>s)&&(t.top+=p+f+g)):u>0&&(i=t.top-e.collisionPosition.marginTop+p+f+g-l,t.top+p+f+g>u&&(i>0||u>r(i))&&(t.top+=p+f+g))}},flipfit:{left:function(){t.ui.position.flip.left.apply(this,arguments),t.ui.position.fit.left.apply(this,arguments)},top:function(){t.ui.position.flip.top.apply(this,arguments),t.ui.position.fit.top.apply(this,arguments)}}},function(){var e,i,s,n,a,o=document.getElementsByTagName("body")[0],r=document.createElement("div");e=document.createElement(o?"div":"body"),s={visibility:"hidden",width:0,height:0,border:0,margin:0,background:"none"},o&&t.extend(s,{position:"absolute",left:"-1000px",top:"-1000px"});for(a in s)e.style[a]=s[a];e.appendChild(r),i=o||document.documentElement,i.insertBefore(e,i.firstChild),r.style.cssText="position: absolute; left: 10.7432222px;",n=t(r).offset().left,t.support.offsetFractions=n>10&&11>n,e.innerHTML="",i.removeChild(e)}()})(jQuery);(function(e){e.widget("ui.autocomplete",{version:"1.10.4",defaultElement:"",options:{appendTo:null,autoFocus:!1,delay:300,minLength:1,position:{my:"left top",at:"left bottom",collision:"none"},source:null,change:null,close:null,focus:null,open:null,response:null,search:null,select:null},requestIndex:0,pending:0,_create:function(){var t,i,s,n=this.element[0].nodeName.toLowerCase(),a="textarea"===n,o="input"===n;this.isMultiLine=a?!0:o?!1:this.element.prop("isContentEditable"),this.valueMethod=this.element[a||o?"val":"text"],this.isNewMenu=!0,this.element.addClass("ui-autocomplete-input").attr("autocomplete","off"),this._on(this.element,{keydown:function(n){if(this.element.prop("readOnly"))return t=!0,s=!0,i=!0,undefined;t=!1,s=!1,i=!1;var a=e.ui.keyCode;switch(n.keyCode){case a.PAGE_UP:t=!0,this._move("previousPage",n);break;case a.PAGE_DOWN:t=!0,this._move("nextPage",n);break;case a.UP:t=!0,this._keyEvent("previous",n);break;case a.DOWN:t=!0,this._keyEvent("next",n);break;case a.ENTER:case a.NUMPAD_ENTER:this.menu.active&&(t=!0,n.preventDefault(),this.menu.select(n));break;case a.TAB:this.menu.active&&this.menu.select(n);break;case a.ESCAPE:this.menu.element.is(":visible")&&(this._value(this.term),this.close(n),n.preventDefault());break;default:i=!0,this._searchTimeout(n)}},keypress:function(s){if(t)return t=!1,(!this.isMultiLine||this.menu.element.is(":visible"))&&s.preventDefault(),undefined;if(!i){var n=e.ui.keyCode;switch(s.keyCode){case n.PAGE_UP:this._move("previousPage",s);break;case n.PAGE_DOWN:this._move("nextPage",s);break;case n.UP:this._keyEvent("previous",s);break;case n.DOWN:this._keyEvent("next",s)}}},input:function(e){return s?(s=!1,e.preventDefault(),undefined):(this._searchTimeout(e),undefined)},focus:function(){this.selectedItem=null,this.previous=this._value()},blur:function(e){return this.cancelBlur?(delete this.cancelBlur,undefined):(clearTimeout(this.searching),this.close(e),this._change(e),undefined)}}),this._initSource(),this.menu=e("