diff --git a/derailed-benchmarks/CHANGELOG.md b/derailed-benchmarks/CHANGELOG.md index 740231210b..83a5a654f3 100644 --- a/derailed-benchmarks/CHANGELOG.md +++ b/derailed-benchmarks/CHANGELOG.md @@ -1,5 +1,9 @@ ## master (unreleased) +## 1.4.3 + +- perf:library now uses median instead of average (https://github.com/schneems/derailed_benchmarks/pull/160) + ## 1.4.2 - Fixed syntax error that resulted in ensure end error inside tasks.rb for older rubies (https://github.com/schneems/derailed_benchmarks/pull/155) diff --git a/derailed-benchmarks/README.md b/derailed-benchmarks/README.md index 8f309691b7..91c2df71a3 100644 --- a/derailed-benchmarks/README.md +++ b/derailed-benchmarks/README.md @@ -431,7 +431,7 @@ Use a comma to seperate your branch names with the `SHAS_TO_TEST` env var, or om If you only include one SHA, then derailed will grab the latest commit and compare it to that SHA. -These tests might take a along time to run so the output is stored on disk incase you want to see them in the future, they're at `tmp/library_branches/` and labeled with the same names as your commits. +These tests might take a along time to run so the output is stored on disk incase you want to see them in the future, they're at `tmp/compare_branches/` and labeled with the same names as your commits. When the test is done it will output which commit "won" and by how much: diff --git a/derailed-benchmarks/lib/derailed_benchmarks/stats_from_dir.rb b/derailed-benchmarks/lib/derailed_benchmarks/stats_from_dir.rb index c47976fa1b..378da4d066 100644 --- a/derailed-benchmarks/lib/derailed_benchmarks/stats_from_dir.rb +++ b/derailed-benchmarks/lib/derailed_benchmarks/stats_from_dir.rb @@ -66,15 +66,27 @@ module DerailedBenchmarks end def x_faster - FORMAT % (oldest.average/newest.average).to_f + (oldest.median/newest.median).to_f + end + + def faster? + newest.median < oldest.median end def percent_faster - FORMAT % (((oldest.average - newest.average) / oldest.average).to_f * 100) + (((oldest.median - newest.median) / oldest.median).to_f * 100) end def change_direction - newest.average < oldest.average ? "FASTER" : "SLOWER" + if faster? + "FASTER 🚀🚀🚀" + else + "SLOWER 🐢🐢🐢" + end + end + + def align + " " * (("%i" % percent_faster).length - ("%i" % x_faster).length) end def banner(io = Kernel) @@ -85,11 +97,11 @@ module DerailedBenchmarks io.puts "👎👎👎(NOT Statistically Significant) 👎👎👎" end io.puts - io.puts "[#{newest.name}] #{newest.desc.inspect} - (#{newest.average} seconds)" + io.puts "[#{newest.name}] #{newest.desc.inspect} - (#{newest.median} seconds)" io.puts " #{change_direction} by:" - io.puts " #{x_faster}x [older/newer]" - io.puts " #{percent_faster}\% [(older - newer) / older * 100]" - io.puts "[#{oldest.name}] #{oldest.desc.inspect} - (#{oldest.average} seconds)" + io.puts " #{align}#{FORMAT % x_faster}x [older/newer]" + io.puts " #{FORMAT % percent_faster}\% [(older - newer) / older * 100]" + io.puts "[#{oldest.name}] #{oldest.desc.inspect} - (#{oldest.median} seconds)" io.puts io.puts "Iterations per sample: #{ENV["TEST_COUNT"]}" io.puts "Samples: #{newest.values.length}" diff --git a/derailed-benchmarks/lib/derailed_benchmarks/stats_in_file.rb b/derailed-benchmarks/lib/derailed_benchmarks/stats_in_file.rb index a524f0e0e1..d7f6ce88bd 100644 --- a/derailed-benchmarks/lib/derailed_benchmarks/stats_in_file.rb +++ b/derailed-benchmarks/lib/derailed_benchmarks/stats_in_file.rb @@ -30,9 +30,14 @@ module DerailedBenchmarks def call load_file! + @median = (values[(values.length - 1) / 2] + values[values.length/ 2]) / 2.0 @average = values.inject(:+) / values.length end + def median + @median.to_f + end + def average @average.to_f end @@ -47,6 +52,8 @@ module DerailedBenchmarks raise e, "Problem with file #{@file.inspect}:\n#{@file.read}\n#{e.message}" end end + + values.sort! values.freeze end end diff --git a/derailed-benchmarks/lib/derailed_benchmarks/tasks.rb b/derailed-benchmarks/lib/derailed_benchmarks/tasks.rb index 7f7f5fd5c0..675d55ac86 100644 --- a/derailed-benchmarks/lib/derailed_benchmarks/tasks.rb +++ b/derailed-benchmarks/lib/derailed_benchmarks/tasks.rb @@ -39,7 +39,7 @@ namespace :perf do current_library_branch = "" Dir.chdir(library_dir) { current_library_branch = run!('git describe --contains --all HEAD').chomp } - out_dir = Pathname.new("tmp/library_branches/#{Time.now.strftime('%Y-%m-%d-%H-%M-%s-%N')}") + out_dir = Pathname.new("tmp/compare_branches/#{Time.now.strftime('%Y-%m-%d-%H-%M-%s-%N')}") out_dir.mkpath branches_to_test = branch_names.each_with_object({}) {|elem, hash| hash[elem] = out_dir + "#{elem.gsub('/', ':')}.bench.txt" } @@ -93,10 +93,18 @@ namespace :perf do end end - stats.call.banner if stats + if stats + stats.call.banner + + result_file = out_dir + "results.txt" + File.open(result_file, "w") do |f| + stats.banner(f) + end + + puts "Output: #{result_file.to_s}" + end end - end - + end desc "hits the url TEST_COUNT times" task :test => [:setup] do diff --git a/derailed-benchmarks/lib/derailed_benchmarks/version.rb b/derailed-benchmarks/lib/derailed_benchmarks/version.rb index e77d2c2c89..ec8dd18b05 100644 --- a/derailed-benchmarks/lib/derailed_benchmarks/version.rb +++ b/derailed-benchmarks/lib/derailed_benchmarks/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module DerailedBenchmarks - VERSION = "1.4.2" + VERSION = "1.4.3" end diff --git a/derailed-benchmarks/test/derailed_benchmarks/stats_from_dir_test.rb b/derailed-benchmarks/test/derailed_benchmarks/stats_from_dir_test.rb index 8b4a45a4c4..7aad7b037c 100644 --- a/derailed-benchmarks/test/derailed_benchmarks/stats_from_dir_test.rb +++ b/derailed-benchmarks/test/derailed_benchmarks/stats_from_dir_test.rb @@ -22,8 +22,11 @@ class StatsFromDirTest < ActiveSupport::TestCase assert_in_delta 0.1730818382602285, stats.d_critical, 0.00001 assert_equal true, stats.significant? - assert_equal "1.0062", stats.x_faster - assert_equal "0.6131", stats.percent_faster + format = DerailedBenchmarks::StatsFromDir::FORMAT + assert_equal "1.0062", format % stats.x_faster + assert_equal "0.6147", format % stats.percent_faster + + assert_equal "11.3844", format % newest.median end test "banner faster" do @@ -44,17 +47,17 @@ class StatsFromDirTest < ActiveSupport::TestCase "0.001" end - def newest.average + def newest.median 10.5 end - def oldest.average + def oldest.median 11.0 end - expected = <<-EOM + expected = <<~EOM [winner] "I am the new commit" - (10.5 seconds) - FASTER by: + FASTER 🚀🚀🚀 by: 1.0476x [older/newer] 4.5455% [(older - newer) / older * 100] [loser] "Old commit" - (11.0 seconds) @@ -75,18 +78,18 @@ EOM newest = stats.newest oldest = stats.oldest - def oldest.average + def oldest.median 10.5 end - def newest.average + def newest.median 11.0 end - expected = <<-EOM + expected = <<~EOM [loser] "I am the new commit" - (11.0 seconds) - SLOWER by: - 0.9545x [older/newer] + SLOWER 🐢🐢🐢 by: + 0.9545x [older/newer] -4.7619% [(older - newer) / older * 100] [winner] "Old commit" - (10.5 seconds) EOM diff --git a/derailed-benchmarks/test/rails_app/app/assets/config/manifest.js b/derailed-benchmarks/test/rails_app/app/assets/config/manifest.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/elasticsearch-model/.gitignore b/elasticsearch-model/.gitignore new file mode 100644 index 0000000000..3934d7e554 --- /dev/null +++ b/elasticsearch-model/.gitignore @@ -0,0 +1,20 @@ +*.gem +*.rbc +.bundle +.config +.yardoc +Gemfile.lock +InstalledFiles +_yardoc +coverage +doc/ +lib/bundler/man +pkg +rdoc +spec/reports +test/tmp +test/version_tmp +tmp + +gemfiles/3.0.gemfile.lock +gemfiles/4.0.gemfile.lock diff --git a/elasticsearch-model/CHANGELOG.md b/elasticsearch-model/CHANGELOG.md new file mode 100644 index 0000000000..1b28033835 --- /dev/null +++ b/elasticsearch-model/CHANGELOG.md @@ -0,0 +1,74 @@ +## 0.1.9 + +* Added a `suggest` method to wrap the suggestions in response +* Added the `:includes` option to Adapter::ActiveRecord::Records for eagerly loading associated models +* Delegated `max_pages` method properly for Kaminari's `next_page` +* Fixed `#dup` behaviour for Elasticsearch::Model +* Fixed typos in the README and examples + +## 0.1.8 + +* Added "default per page" methods for pagination with multi model searches +* Added a convenience accessor for the `aggregations` part of response +* Added a full example with mapping for the completion suggester +* Added an integration test for paginating multiple models +* Added proper support for the new "multi_fields" in the mapping DSL +* Added the `no_timeout` option for `__find_in_batches` in the Mongoid adapter +* Added, that index settings can be loaded from any object that responds to `:read` +* Added, that index settings/mappings can be loaded from a YAML or JSON file +* Added, that String pagination parameters are converted to numbers +* Added, that empty block is not required for setting mapping options +* Added, that on MyModel#import, an exception is raised if the index does not exists +* Changed the Elasticsearch port in the Mongoid example to 9200 +* Cleaned up the tests for multiple fields/properties in mapping DSL +* Fixed a bug where continuous `#save` calls emptied the `@__changed_attributes` variable +* Fixed a buggy test introduced in #335 +* Fixed incorrect deserialization of records in the Multiple adapter +* Fixed incorrect examples and documentation +* Fixed unreliable order of returned results/records in the integration test for the multiple adapter +* Fixed, that `param_name` is used when paginating with WillPaginate +* Fixed the problem where `document_type` configuration was not propagated to mapping [6 months ago by Miguel Ferna +* Refactored the code in `__find_in_batches` to use Enumerable#each_slice +* Refactored the string queries in multiple_models_test.rb to avoid quote escaping + +## 0.1.7 + +* Improved examples and instructions in README and code annotations +* Prevented index methods to swallow all exceptions +* Added the `:validate` option to the `save` method for models +* Added support for searching across multiple models (elastic/elasticsearch-rails#345), + including documentation, examples and tests + +## 0.1.6 + +* Improved documentation +* Added dynamic getter/setter (block/proc) for `MyModel.index_name` +* Added the `update_document_attributes` method +* Added, that records to import can be limited by the `query` option + +## 0.1.5 + +* Improved documentation +* Fixes and improvements to the "will_paginate" integration +* Added a `:preprocess` option to the `import` method +* Changed, that attributes are fetched from `as_indexed_json` in the `update_document` method +* Added an option to the import method to return an array of error messages instead of just count +* Fixed many problems with dependency hell +* Fixed tests so they run on Ruby 2.2 + +## 0.1.2 + +* Properly delegate existence methods like `result.foo?` to `result._source.foo` +* Exception is raised when `type` is not passed to Mappings#new +* Allow passing an ActiveRecord scope to the `import` method +* Added, that `each_with_hit` and `map_with_hit` in `Elasticsearch::Model::Response::Records` call `to_a` +* Added support for [`will_paginate`](https://github.com/mislav/will_paginate) pagination library +* Added the ability to transform models during indexing +* Added explicit `type` and `id` methods to Response::Result, aliasing `_type` and `_id` + +## 0.1.1 + +* Improved documentation and tests +* Fixed Kaminari implementation bugs and inconsistencies + +## 0.1.0 (Initial Version) diff --git a/elasticsearch-model/Gemfile b/elasticsearch-model/Gemfile new file mode 100644 index 0000000000..a54f5084ea --- /dev/null +++ b/elasticsearch-model/Gemfile @@ -0,0 +1,4 @@ +source 'https://rubygems.org' + +# Specify your gem's dependencies in elasticsearch-model.gemspec +gemspec diff --git a/elasticsearch-model/LICENSE.txt b/elasticsearch-model/LICENSE.txt new file mode 100644 index 0000000000..7dc94b3e5a --- /dev/null +++ b/elasticsearch-model/LICENSE.txt @@ -0,0 +1,13 @@ +Copyright (c) 2014 Elasticsearch + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/elasticsearch-model/README.md b/elasticsearch-model/README.md new file mode 100644 index 0000000000..c219a46001 --- /dev/null +++ b/elasticsearch-model/README.md @@ -0,0 +1,720 @@ +# Elasticsearch::Model + +The `elasticsearch-model` library builds on top of the +the [`elasticsearch`](https://github.com/elasticsearch/elasticsearch-ruby) library. + +It aims to simplify integration of Ruby classes ("models"), commonly found +e.g. in [Ruby on Rails](http://rubyonrails.org) applications, with the +[Elasticsearch](http://www.elasticsearch.org) search and analytics engine. + +The library is compatible with Ruby 1.9.3 and higher. + +## Installation + +Install the package from [Rubygems](https://rubygems.org): + + gem install elasticsearch-model + +To use an unreleased version, either add it to your `Gemfile` for [Bundler](http://bundler.io): + + gem 'elasticsearch-model', git: 'git://github.com/elasticsearch/elasticsearch-rails.git' + +or install it from a source code checkout: + + git clone https://github.com/elasticsearch/elasticsearch-rails.git + cd elasticsearch-rails/elasticsearch-model + bundle install + rake install + + +## Usage + +Let's suppose you have an `Article` model: + +```ruby +require 'active_record' +ActiveRecord::Base.establish_connection( adapter: 'sqlite3', database: ":memory:" ) +ActiveRecord::Schema.define(version: 1) { create_table(:articles) { |t| t.string :title } } + +class Article < ActiveRecord::Base; end + +Article.create title: 'Quick brown fox' +Article.create title: 'Fast black dogs' +Article.create title: 'Swift green frogs' +``` + +### Setup + +To add the Elasticsearch integration for this model, require `elasticsearch/model` +and include the main module in your class: + +```ruby +require 'elasticsearch/model' + +class Article < ActiveRecord::Base + include Elasticsearch::Model +end +``` + +This will extend the model with functionality related to Elasticsearch. + +#### Feature Extraction Pattern + +Instead of including the `Elasticsearch::Model` module directly in your model, +you can include it in a "concern" or "trait" module, which is quite common pattern in Rails applications, +using e.g. `ActiveSupport::Concern` as the instrumentation: + +```ruby +# In: app/models/concerns/searchable.rb +# +module Searchable + extend ActiveSupport::Concern + + included do + include Elasticsearch::Model + + mapping do + # ... + end + + def self.search(query) + # ... + end + end +end + +# In: app/models/article.rb +# +class Article + include Searchable +end +``` + +#### The `__elasticsearch__` Proxy + +The `Elasticsearch::Model` module contains a big amount of class and instance methods to provide +all its functionality. To prevent polluting your model namespace, this functionality is primarily +available via the `__elasticsearch__` class and instance level proxy methods; +see the `Elasticsearch::Model::Proxy` class documentation for technical information. + +The module will include important methods, such as `search`, into the class or module only +when they haven't been defined already. Following two calls are thus functionally equivalent: + +```ruby +Article.__elasticsearch__.search 'fox' +Article.search 'fox' +``` + +See the `Elasticsearch::Model` module documentation for technical information. + +### The Elasticsearch client + +The module will set up a [client](https://github.com/elasticsearch/elasticsearch-ruby/tree/master/elasticsearch), +connected to `localhost:9200`, by default. You can access and use it as any other `Elasticsearch::Client`: + +```ruby +Article.__elasticsearch__.client.cluster.health +# => { "cluster_name"=>"elasticsearch", "status"=>"yellow", ... } +``` + +To use a client with different configuration, just set up a client for the model: + +```ruby +Article.__elasticsearch__.client = Elasticsearch::Client.new host: 'api.server.org' +``` + +Or configure the client for all models: + +```ruby +Elasticsearch::Model.client = Elasticsearch::Client.new log: true +``` + +You might want to do this during your application bootstrap process, e.g. in a Rails initializer. + +Please refer to the +[`elasticsearch-transport`](https://github.com/elasticsearch/elasticsearch-ruby/tree/master/elasticsearch-transport) +library documentation for all the configuration options, and to the +[`elasticsearch-api`](http://rubydoc.info/gems/elasticsearch-api) library documentation +for information about the Ruby client API. + +### Importing the data + +The first thing you'll want to do is importing your data into the index: + +```ruby +Article.import +# => 0 +``` + +It's possible to import only records from a specific `scope` or `query`, transform the batch with the `transform` +and `preprocess` options, or re-create the index by deleting it and creating it with correct mapping with the `force` option -- look for examples in the method documentation. + +No errors were reported during importing, so... let's search the index! + + +### Searching + +For starters, we can try the "simple" type of search: + +```ruby +response = Article.search 'fox dogs' + +response.took +# => 3 + +response.results.total +# => 2 + +response.results.first._score +# => 0.02250402 + +response.results.first._source.title +# => "Quick brown fox" +``` + +#### Search results + +The returned `response` object is a rich wrapper around the JSON returned from Elasticsearch, +providing access to response metadata and the actual results ("hits"). + +Each "hit" is wrapped in the `Result` class, and provides method access +to its properties via [`Hashie::Mash`](http://github.com/intridea/hashie). + +The `results` object supports the `Enumerable` interface: + +```ruby +response.results.map { |r| r._source.title } +# => ["Quick brown fox", "Fast black dogs"] + +response.results.select { |r| r.title =~ /^Q/ } +# => [#{"title"=>"Quick brown fox"}}>] +``` + +In fact, the `response` object will delegate `Enumerable` methods to `results`: + +```ruby +response.any? { |r| r.title =~ /fox|dog/ } +# => true +``` + +To use `Array`'s methods (including any _ActiveSupport_ extensions), just call `to_a` on the object: + +```ruby +response.to_a.last.title +# "Fast black dogs" +``` + +#### Search results as database records + +Instead of returning documents from Elasticsearch, the `records` method will return a collection +of model instances, fetched from the primary database, ordered by score: + +```ruby +response.records.to_a +# Article Load (0.3ms) SELECT "articles".* FROM "articles" WHERE "articles"."id" IN (1, 2) +# => [#
, #
] +``` + +The returned object is the genuine collection of model instances returned by your database, +i.e. `ActiveRecord::Relation` for ActiveRecord, or `Mongoid::Criteria` in case of MongoDB. + +This allows you to chain other methods on top of search results, as you would normally do: + +```ruby +response.records.where(title: 'Quick brown fox').to_a +# Article Load (0.2ms) SELECT "articles".* FROM "articles" WHERE "articles"."id" IN (1, 2) AND "articles"."title" = 'Quick brown fox' +# => [#
] + +response.records.records.class +# => ActiveRecord::Relation::ActiveRecord_Relation_Article +``` + +The ordering of the records by score will be preserved, unless you explicitly specify a different +order in your model query language: + +```ruby +response.records.order(:title).to_a +# Article Load (0.2ms) SELECT "articles".* FROM "articles" WHERE "articles"."id" IN (1, 2) ORDER BY "articles".title ASC +# => [#
, #
] +``` + +The `records` method returns the real instances of your model, which is useful when you want to access your +model methods -- at the expense of slowing down your application, of course. +In most cases, working with `results` coming from Elasticsearch is sufficient, and much faster. See the +[`elasticsearch-rails`](https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-rails) +library for more information about compatibility with the Ruby on Rails framework. + +When you want to access both the database `records` and search `results`, use the `each_with_hit` +(or `map_with_hit`) iterator: + +```ruby +response.records.each_with_hit { |record, hit| puts "* #{record.title}: #{hit._score}" } +# * Quick brown fox: 0.02250402 +# * Fast black dogs: 0.02250402 +``` + +#### Searching multiple models + +It is possible to search across multiple models with the module method: + +```ruby +Elasticsearch::Model.search('fox', [Article, Comment]).results.to_a.map(&:to_hash) +# => [ +# {"_index"=>"articles", "_type"=>"article", "_id"=>"1", "_score"=>0.35136628, "_source"=>...}, +# {"_index"=>"comments", "_type"=>"comment", "_id"=>"1", "_score"=>0.35136628, "_source"=>...} +# ] + +Elasticsearch::Model.search('fox', [Article, Comment]).records.to_a +# Article Load (0.3ms) SELECT "articles".* FROM "articles" WHERE "articles"."id" IN (1) +# Comment Load (0.2ms) SELECT "comments".* FROM "comments" WHERE "comments"."id" IN (1,5) +# => [#
, #, ...] +``` + +By default, all models which include the `Elasticsearch::Model` module are searched. + +NOTE: It is _not_ possible to chain other methods on top of the `records` object, since it + is a heterogenous collection, with models potentially backed by different databases. + +#### Pagination + +You can implement pagination with the `from` and `size` search parameters. However, search results +can be automatically paginated with the [`kaminari`](http://rubygems.org/gems/kaminari) or +[`will_paginate`](https://github.com/mislav/will_paginate) gems. +(The pagination gems must be added before the Elasticsearch gems in your Gemfile, +or loaded first in your application.) + +If Kaminari or WillPaginate is loaded, use the familiar paging methods: + +```ruby +response.page(2).results +response.page(2).records +``` + +In a Rails controller, use the the `params[:page]` parameter to paginate through results: + +```ruby +@articles = Article.search(params[:q]).page(params[:page]).records + +@articles.current_page +# => 2 +@articles.next_page +# => 3 +``` +To initialize and include the Kaminari pagination support manually: + +```ruby +Kaminari::Hooks.init +Elasticsearch::Model::Response::Response.__send__ :include, Elasticsearch::Model::Response::Pagination::Kaminari +``` + +#### The Elasticsearch DSL + +In most situation, you'll want to pass the search definition +in the Elasticsearch [domain-specific language](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl.html) to the client: + +```ruby +response = Article.search query: { match: { title: "Fox Dogs" } }, + highlight: { fields: { title: {} } } + +response.results.first.highlight.title +# ["Quick brown fox"] +``` + +You can pass any object which implements a `to_hash` method, or you can use your favourite JSON builder +to build the search definition as a JSON string: + +```ruby +require 'jbuilder' + +query = Jbuilder.encode do |json| + json.query do + json.match do + json.title do + json.query "fox dogs" + end + end + end +end + +response = Article.search query +response.results.first.title +# => "Quick brown fox" +``` + +### Index Configuration + +For proper search engine function, it's often necessary to configure the index properly. +The `Elasticsearch::Model` integration provides class methods to set up index settings and mappings. + +```ruby +class Article + settings index: { number_of_shards: 1 } do + mappings dynamic: 'false' do + indexes :title, analyzer: 'english', index_options: 'offsets' + end + end +end + +Article.mappings.to_hash +# => { +# :article => { +# :dynamic => "false", +# :properties => { +# :title => { +# :type => "string", +# :analyzer => "english", +# :index_options => "offsets" +# } +# } +# } +# } + +Article.settings.to_hash +# { :index => { :number_of_shards => 1 } } +``` + +You can use the defined settings and mappings to create an index with desired configuration: + +```ruby +Article.__elasticsearch__.client.indices.delete index: Article.index_name rescue nil +Article.__elasticsearch__.client.indices.create \ + index: Article.index_name, + body: { settings: Article.settings.to_hash, mappings: Article.mappings.to_hash } +``` + +There's a shortcut available for this common operation (convenient e.g. in tests): + +```ruby +Article.__elasticsearch__.create_index! force: true +Article.__elasticsearch__.refresh_index! +``` + +By default, index name and document type will be inferred from your class name, +you can set it explicitely, however: + +```ruby +class Article + index_name "articles-#{Rails.env}" + document_type "post" +end +``` + +### Updating the Documents in the Index + +Usually, we need to update the Elasticsearch index when records in the database are created, updated or deleted; +use the `index_document`, `update_document` and `delete_document` methods, respectively: + +```ruby +Article.first.__elasticsearch__.index_document +# => {"ok"=>true, ... "_version"=>2} +``` + +#### Automatic Callbacks + +You can automatically update the index whenever the record changes, by including +the `Elasticsearch::Model::Callbacks` module in your model: + +```ruby +class Article + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks +end + +Article.first.update_attribute :title, 'Updated!' + +Article.search('*').map { |r| r.title } +# => ["Updated!", "Lime green frogs", "Fast black dogs"] +``` + +The automatic callback on record update keeps track of changes in your model +(via [`ActiveModel::Dirty`](http://api.rubyonrails.org/classes/ActiveModel/Dirty.html)-compliant implementation), +and performs a _partial update_ when this support is available. + +The automatic callbacks are implemented in database adapters coming with `Elasticsearch::Model`. You can easily +implement your own adapter: please see the relevant chapter below. + +#### Custom Callbacks + +In case you would need more control of the indexing process, you can implement these callbacks yourself, +by hooking into `after_create`, `after_save`, `after_update` or `after_destroy` operations: + +```ruby +class Article + include Elasticsearch::Model + + after_save { logger.debug ["Updating document... ", index_document ].join } + after_destroy { logger.debug ["Deleting document... ", delete_document].join } +end +``` + +For ActiveRecord-based models, use the `after_commit` callback to protect +your data against inconsistencies caused by transaction rollbacks: + +```ruby +class Article < ActiveRecord::Base + include Elasticsearch::Model + + after_commit on: [:create] do + __elasticsearch__.index_document if self.published? + end + + after_commit on: [:update] do + __elasticsearch__.update_document if self.published? + end + + after_commit on: [:destroy] do + __elasticsearch__.delete_document if self.published? + end +end +``` + +#### Asynchronous Callbacks + +Of course, you're still performing an HTTP request during your database transaction, which is not optimal +for large-scale applications. A better option would be to process the index operations in background, +with a tool like [_Resque_](https://github.com/resque/resque) or [_Sidekiq_](https://github.com/mperham/sidekiq): + +```ruby +class Article + include Elasticsearch::Model + + after_save { Indexer.perform_async(:index, self.id) } + after_destroy { Indexer.perform_async(:delete, self.id) } +end +``` + +An example implementation of the `Indexer` worker class could look like this: + +```ruby +class Indexer + include Sidekiq::Worker + sidekiq_options queue: 'elasticsearch', retry: false + + Logger = Sidekiq.logger.level == Logger::DEBUG ? Sidekiq.logger : nil + Client = Elasticsearch::Client.new host: 'localhost:9200', logger: Logger + + def perform(operation, record_id) + logger.debug [operation, "ID: #{record_id}"] + + case operation.to_s + when /index/ + record = Article.find(record_id) + Client.index index: 'articles', type: 'article', id: record.id, body: record.as_indexed_json + when /delete/ + Client.delete index: 'articles', type: 'article', id: record_id + else raise ArgumentError, "Unknown operation '#{operation}'" + end + end +end +``` + +Start the _Sidekiq_ workers with `bundle exec sidekiq --queue elasticsearch --verbose` and +update a model: + +```ruby +Article.first.update_attribute :title, 'Updated' +``` + +You'll see the job being processed in the console where you started the _Sidekiq_ worker: + +``` +Indexer JID-eb7e2daf389a1e5e83697128 DEBUG: ["index", "ID: 7"] +Indexer JID-eb7e2daf389a1e5e83697128 INFO: PUT http://localhost:9200/articles/article/1 [status:200, request:0.004s, query:n/a] +Indexer JID-eb7e2daf389a1e5e83697128 DEBUG: > {"id":1,"title":"Updated", ...} +Indexer JID-eb7e2daf389a1e5e83697128 DEBUG: < {"ok":true,"_index":"articles","_type":"article","_id":"1","_version":6} +Indexer JID-eb7e2daf389a1e5e83697128 INFO: done: 0.006 sec +``` + +### Model Serialization + +By default, the model instance will be serialized to JSON using the `as_indexed_json` method, +which is defined automatically by the `Elasticsearch::Model::Serializing` module: + +```ruby +Article.first.__elasticsearch__.as_indexed_json +# => {"id"=>1, "title"=>"Quick brown fox"} +``` + +If you want to customize the serialization, just implement the `as_indexed_json` method yourself, +for instance with the [`as_json`](http://api.rubyonrails.org/classes/ActiveModel/Serializers/JSON.html#method-i-as_json) method: + +```ruby +class Article + include Elasticsearch::Model + + def as_indexed_json(options={}) + as_json(only: 'title') + end +end + +Article.first.as_indexed_json +# => {"title"=>"Quick brown fox"} +``` + +The re-defined method will be used in the indexing methods, such as `index_document`. + +Please note that in Rails 3, you need to either set `include_root_in_json: false`, or prevent adding +the "root" in the JSON representation with other means. + +#### Relationships and Associations + +When you have a more complicated structure/schema, you need to customize the `as_indexed_json` method - +or perform the indexing separately, on your own. +For example, let's have an `Article` model, which _has_many_ `Comment`s, +`Author`s and `Categories`. We might want to define the serialization like this: + +```ruby +def as_indexed_json(options={}) + self.as_json( + include: { categories: { only: :title}, + authors: { methods: [:full_name], only: [:full_name] }, + comments: { only: :text } + }) +end + +Article.first.as_indexed_json +# => { "id" => 1, +# "title" => "First Article", +# "created_at" => 2013-12-03 13:39:02 UTC, +# "updated_at" => 2013-12-03 13:39:02 UTC, +# "categories" => [ { "title" => "One" } ], +# "authors" => [ { "full_name" => "John Smith" } ], +# "comments" => [ { "text" => "First comment" } ] } +``` + +Of course, when you want to use the automatic indexing callbacks, you need to hook into the appropriate +_ActiveRecord_ callbacks -- please see the full example in `examples/activerecord_associations.rb`. + +### Other ActiveModel Frameworks + +The `Elasticsearch::Model` module is fully compatible with any ActiveModel-compatible model, such as _Mongoid_: + +```ruby +require 'mongoid' + +Mongoid.connect_to 'articles' + +class Article + include Mongoid::Document + + field :id, type: String + field :title, type: String + + attr_accessible :id, :title, :published_at + + include Elasticsearch::Model + + def as_indexed_json(options={}) + as_json(except: [:id, :_id]) + end +end + +Article.create id: '1', title: 'Quick brown fox' +Article.import + +response = Article.search 'fox'; +response.records.to_a +# MOPED: 127.0.0.1:27017 QUERY database=articles collection=articles selector={"_id"=>{"$in"=>["1"]}} ... +# => [#
] +``` + +Full examples for CouchBase, DataMapper, Mongoid, Ohm and Riak models can be found in the `examples` folder. + +### Adapters + +To support various "OxM" (object-relational- or object-document-mapper) implementations and frameworks, +the `Elasticsearch::Model` integration supports an "adapter" concept. + +An adapter provides implementations for common behaviour, such as fetching records from the database, +hooking into model callbacks for automatic index updates, or efficient bulk loading from the database. +The integration comes with adapters for _ActiveRecord_ and _Mongoid_ out of the box. + +Writing an adapter for your favourite framework is straightforward -- let's see +a simplified adapter for [_DataMapper_](http://datamapper.org): + +```ruby +module DataMapperAdapter + + # Implement the interface for fetching records + # + module Records + def records + klass.all(id: @ids) + end + + # ... + end +end + +# Register the adapter +# +Elasticsearch::Model::Adapter.register( + DataMapperAdapter, + lambda { |klass| defined?(::DataMapper::Resource) and klass.ancestors.include?(::DataMapper::Resource) } +) +``` + +Require the adapter and include `Elasticsearch::Model` in the class: + +```ruby +require 'datamapper_adapter' + +class Article + include DataMapper::Resource + include Elasticsearch::Model + + property :id, Serial + property :title, String +end +``` + +When accessing the `records` method of the response, for example, +the implementation from our adapter will be used now: + +```ruby +response = Article.search 'foo' + +response.records.to_a +# ~ (0.000057) SELECT "id", "title", "published_at" FROM "articles" WHERE "id" IN (3, 1) ORDER BY "id" +# => [#
, #
] + +response.records.records.class +# => DataMapper::Collection +``` + +More examples can be found in the `examples` folder. Please see the `Elasticsearch::Model::Adapter` +module and its submodules for technical information. + +## Development and Community + +For local development, clone the repository and run `bundle install`. See `rake -T` for a list of +available Rake tasks for running tests, generating documentation, starting a testing cluster, etc. + +Bug fixes and features must be covered by unit tests. + +Github's pull requests and issues are used to communicate, send bug reports and code contributions. + +To run all tests against a test Elasticsearch cluster, use a command like this: + +```bash +curl -# https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.0.0.RC1.tar.gz | tar xz -C tmp/ +SERVER=start TEST_CLUSTER_COMMAND=$PWD/tmp/elasticsearch-1.0.0.RC1/bin/elasticsearch bundle exec rake test:all +``` + +## License + +This software is licensed under the Apache 2 license, quoted below. + + Copyright (c) 2014 Elasticsearch + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/elasticsearch-model/Rakefile b/elasticsearch-model/Rakefile new file mode 100644 index 0000000000..2825f58b01 --- /dev/null +++ b/elasticsearch-model/Rakefile @@ -0,0 +1,61 @@ +require "bundler/gem_tasks" + +desc "Run unit tests" +task :default => 'test:unit' +task :test => 'test:unit' + +# ----- Test tasks ------------------------------------------------------------ + +require 'rake/testtask' +namespace :test do + task :ci_reporter do + ENV['CI_REPORTS'] ||= 'tmp/reports' + require 'ci/reporter/rake/minitest' + Rake::Task['ci:setup:minitest'].invoke + end + + Rake::TestTask.new(:unit) do |test| + Rake::Task['test:ci_reporter'].invoke if ENV['CI'] + test.libs << 'lib' << 'test' + test.test_files = FileList["test/unit/**/*_test.rb"] + # test.verbose = true + # test.warning = true + end + + Rake::TestTask.new(:run_integration) do |test| + Rake::Task['test:ci_reporter'].invoke if ENV['CI'] + test.libs << 'lib' << 'test' + test.test_files = FileList["test/integration/**/*_test.rb"] + end + + desc "Run integration tests against ActiveModel 3 and 4" + task :integration do + sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/3.0.gemfile', __FILE__)}' bundle exec rake test:run_integration" unless defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + sh "BUNDLE_GEMFILE='#{File.expand_path('../gemfiles/4.0.gemfile', __FILE__)}' bundle exec rake test:run_integration" + end + + desc "Run unit and integration tests" + task :all do + Rake::Task['test:ci_reporter'].invoke if ENV['CI'] + + Rake::Task['test:unit'].invoke + Rake::Task['test:integration'].invoke + end +end + +# ----- Documentation tasks --------------------------------------------------- + +require 'yard' +YARD::Rake::YardocTask.new(:doc) do |t| + t.options = %w| --embed-mixins --markup=markdown | +end + +# ----- Code analysis tasks --------------------------------------------------- + +if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9' + require 'cane/rake_task' + Cane::RakeTask.new(:quality) do |cane| + cane.abc_max = 15 + cane.no_style = true + end +end diff --git a/elasticsearch-model/elasticsearch-model.gemspec b/elasticsearch-model/elasticsearch-model.gemspec new file mode 100644 index 0000000000..df9509f064 --- /dev/null +++ b/elasticsearch-model/elasticsearch-model.gemspec @@ -0,0 +1,57 @@ +# coding: utf-8 +lib = File.expand_path('../lib', __FILE__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) +require 'elasticsearch/model/version' + +Gem::Specification.new do |s| + s.name = "elasticsearch-model" + s.version = Elasticsearch::Model::VERSION + s.authors = ["Karel Minarik"] + s.email = ["karel.minarik@elasticsearch.org"] + s.description = "ActiveModel/Record integrations for Elasticsearch." + s.summary = "ActiveModel/Record integrations for Elasticsearch." + s.homepage = "https://github.com/elasticsearch/elasticsearch-rails/" + s.license = "Apache 2" + + s.files = `git ls-files`.split($/) + s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) } + s.test_files = s.files.grep(%r{^(test|spec|features)/}) + s.require_paths = ["lib"] + + s.extra_rdoc_files = [ "README.md", "LICENSE.txt" ] + s.rdoc_options = [ "--charset=UTF-8" ] + + s.required_ruby_version = ">= 1.9.3" + + s.add_dependency "elasticsearch", '> 0.4' + s.add_dependency "activesupport", '> 3' + s.add_dependency "hashie" + + s.add_development_dependency "bundler", "~> 1.3" + s.add_development_dependency "rake", "< 11.0" + + s.add_development_dependency "elasticsearch-extensions" + + s.add_development_dependency "sqlite3" + s.add_development_dependency "activemodel", "> 3.0" + + s.add_development_dependency "oj" + s.add_development_dependency "kaminari" + s.add_development_dependency "will_paginate" + + s.add_development_dependency "minitest", "~> 4.2" + s.add_development_dependency "test-unit" if defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + s.add_development_dependency "shoulda-context" + s.add_development_dependency "mocha" + s.add_development_dependency "turn" + s.add_development_dependency "yard" + s.add_development_dependency "ruby-prof" + s.add_development_dependency "pry" + s.add_development_dependency "ci_reporter", "~> 1.9" + + if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9' + s.add_development_dependency "simplecov" + s.add_development_dependency "cane" + s.add_development_dependency "require-prof" + end +end diff --git a/elasticsearch-model/examples/activerecord_article.rb b/elasticsearch-model/examples/activerecord_article.rb new file mode 100644 index 0000000000..b18ee9c7bd --- /dev/null +++ b/elasticsearch-model/examples/activerecord_article.rb @@ -0,0 +1,77 @@ +# ActiveRecord and Elasticsearch +# ============================== +# +# https://github.com/rails/rails/tree/master/activerecord + +$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) + +require 'pry' +Pry.config.history.file = File.expand_path('../../tmp/elasticsearch_development.pry', __FILE__) + +require 'logger' +require 'ansi/core' +require 'active_record' +require 'kaminari' + +require 'elasticsearch/model' + +ActiveRecord::Base.logger = ActiveSupport::Logger.new(STDOUT) +ActiveRecord::Base.establish_connection( adapter: 'sqlite3', database: ":memory:" ) + +ActiveRecord::Schema.define(version: 1) do + create_table :articles do |t| + t.string :title + t.date :published_at + t.timestamps + end +end + +Kaminari::Hooks.init + +class Article < ActiveRecord::Base +end + +# Store data +# +Article.delete_all +Article.create title: 'Foo' +Article.create title: 'Bar' +Article.create title: 'Foo Foo' + +# Index data +# +client = Elasticsearch::Client.new log:true + +# client.indices.delete index: 'articles' rescue nil +# client.indices.create index: 'articles', body: { mappings: { article: { dynamic: 'strict' }, properties: {} } } + +client.indices.delete index: 'articles' rescue nil +client.bulk index: 'articles', + type: 'article', + body: Article.all.as_json.map { |a| { index: { _id: a.delete('id'), data: a } } }, + refresh: true + +# Extend the model with Elasticsearch support +# +Article.__send__ :include, Elasticsearch::Model +# Article.__send__ :include, Elasticsearch::Model::Callbacks + +# ActiveRecord::Base.logger.silence do +# 10_000.times do |i| +# Article.create title: "Foo #{i}" +# end +# end + +puts '', '-'*Pry::Terminal.width! + +Elasticsearch::Model.client = Elasticsearch::Client.new log: true + +response = Article.search 'foo'; + +p response.size +p response.results.size +p response.records.size + +Pry.start(binding, prompt: lambda { |obj, nest_level, _| '> ' }, + input: StringIO.new('response.records.to_a'), + quiet: true) diff --git a/elasticsearch-model/examples/activerecord_associations.rb b/elasticsearch-model/examples/activerecord_associations.rb new file mode 100644 index 0000000000..6143a03560 --- /dev/null +++ b/elasticsearch-model/examples/activerecord_associations.rb @@ -0,0 +1,177 @@ +# ActiveRecord associations and Elasticsearch +# =========================================== +# +# https://github.com/rails/rails/tree/master/activerecord +# http://guides.rubyonrails.org/association_basics.html +# +# Run me with: +# +# ruby -I lib examples/activerecord_associations.rb +# + +$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) + +require 'pry' +Pry.config.history.file = File.expand_path('../../tmp/elasticsearch_development.pry', __FILE__) + +require 'logger' +require 'ansi/core' +require 'active_record' + +require 'elasticsearch/model' + +ActiveRecord::Base.logger = ActiveSupport::Logger.new(STDOUT) +ActiveRecord::Base.establish_connection( adapter: 'sqlite3', database: ":memory:" ) + +# ----- Schema definition ------------------------------------------------------------------------- + +ActiveRecord::Schema.define(version: 1) do + create_table :categories do |t| + t.string :title + t.timestamps + end + + create_table :authors do |t| + t.string :first_name, :last_name + t.timestamps + end + + create_table :authorships do |t| + t.references :article + t.references :author + t.timestamps + end + + create_table :articles do |t| + t.string :title + t.timestamps + end + + create_table :articles_categories, id: false do |t| + t.references :article, :category + end + + create_table :comments do |t| + t.string :text + t.references :article + t.timestamps + end + add_index(:comments, :article_id) +end + +# ----- Elasticsearch client setup ---------------------------------------------------------------- + +Elasticsearch::Model.client = Elasticsearch::Client.new log: true +Elasticsearch::Model.client.transport.logger.formatter = proc { |s, d, p, m| "\e[32m#{m}\n\e[0m" } + +# ----- Search integration ------------------------------------------------------------------------ + +module Searchable + extend ActiveSupport::Concern + + included do + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + include Indexing + after_touch() { __elasticsearch__.index_document } + end + + module Indexing + + # Customize the JSON serialization for Elasticsearch + def as_indexed_json(options={}) + self.as_json( + include: { categories: { only: :title}, + authors: { methods: [:full_name], only: [:full_name] }, + comments: { only: :text } + }) + end + end +end + +# ----- Model definitions ------------------------------------------------------------------------- + +class Category < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + has_and_belongs_to_many :articles +end + +class Author < ActiveRecord::Base + has_many :authorships + + after_update { self.authorships.each(&:touch) } + + def full_name + [first_name, last_name].compact.join(' ') + end +end + +class Authorship < ActiveRecord::Base + belongs_to :author + belongs_to :article, touch: true +end + +class Article < ActiveRecord::Base + include Searchable + + has_and_belongs_to_many :categories, after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], + after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] + has_many :authorships + has_many :authors, through: :authorships + has_many :comments +end + +class Comment < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + belongs_to :article, touch: true +end + +# ----- Insert data ------------------------------------------------------------------------------- + +# Create category +# +category = Category.create title: 'One' + +# Create author +# +author = Author.create first_name: 'John', last_name: 'Smith' + +# Create article + +article = Article.create title: 'First Article' + +# Assign category +# +article.categories << category + +# Assign author +# +article.authors << author + +# Add comment +# +article.comments.create text: 'First comment for article One' +article.comments.create text: 'Second comment for article One' + +Elasticsearch::Model.client.indices.refresh index: Elasticsearch::Model::Registry.all.map(&:index_name) + +puts "\n\e[1mArticles containing 'one':\e[0m", Article.search('one').records.to_a.map(&:inspect), "" + +puts "\n\e[1mModels containing 'one':\e[0m", Elasticsearch::Model.search('one').records.to_a.map(&:inspect), "" + +# Load model +# +article = Article.all.includes(:categories, :authors, :comments).first + +# ----- Pry --------------------------------------------------------------------------------------- + +puts '', '-'*Pry::Terminal.width! + +Pry.start(binding, prompt: lambda { |obj, nest_level, _| '> ' }, + input: StringIO.new("article.as_indexed_json\n"), + quiet: true) diff --git a/elasticsearch-model/examples/activerecord_mapping_completion.rb b/elasticsearch-model/examples/activerecord_mapping_completion.rb new file mode 100644 index 0000000000..46d986011c --- /dev/null +++ b/elasticsearch-model/examples/activerecord_mapping_completion.rb @@ -0,0 +1,69 @@ +require 'ansi' +require 'active_record' +require 'elasticsearch/model' + +ActiveRecord::Base.logger = ActiveSupport::Logger.new(STDOUT) +ActiveRecord::Base.establish_connection( adapter: 'sqlite3', database: ":memory:" ) + +ActiveRecord::Schema.define(version: 1) do + create_table :articles do |t| + t.string :title + t.date :published_at + t.timestamps + end +end + +class Article < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + mapping do + indexes :title + indexes :title_suggest, type: 'completion', payloads: true + end + + def as_indexed_json(options={}) + as_json.merge \ + title_suggest: { + input: title, + output: title, + payload: { url: "/articles/#{id}" } + } + end +end + +Article.__elasticsearch__.client = Elasticsearch::Client.new log: true + +# Create index + +Article.__elasticsearch__.create_index! force: true + +# Store data + +Article.delete_all +Article.create title: 'Foo' +Article.create title: 'Bar' +Article.create title: 'Foo Foo' +Article.__elasticsearch__.refresh_index! + +# Search and suggest + +response_1 = Article.search 'foo'; + +puts "Article search:".ansi(:bold), + response_1.to_a.map { |d| "Title: #{d.title}" }.inspect.ansi(:bold, :yellow) + +response_2 = Article.__elasticsearch__.client.suggest \ + index: Article.index_name, + body: { + articles: { + text: 'foo', + completion: { field: 'title_suggest', size: 25 } + } + }; + +puts "Article suggest:".ansi(:bold), + response_2['articles'].first['options'].map { |d| "#{d['text']} -> #{d['payload']['url']}" }. + inspect.ansi(:bold, :green) + +require 'pry'; binding.pry; diff --git a/elasticsearch-model/examples/couchbase_article.rb b/elasticsearch-model/examples/couchbase_article.rb new file mode 100644 index 0000000000..57cc421b01 --- /dev/null +++ b/elasticsearch-model/examples/couchbase_article.rb @@ -0,0 +1,66 @@ +# Couchbase and Elasticsearch +# =========================== +# +# https://github.com/couchbase/couchbase-ruby-model + +$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) + +require 'pry' +Pry.config.history.file = File.expand_path('../../tmp/elasticsearch_development.pry', __FILE__) + +require 'logger' +require 'couchbase/model' + +require 'elasticsearch/model' + +# Documents are stored as JSON objects in Riak but have rich +# semantics, including validations and associations. +class Article < Couchbase::Model + attribute :title + attribute :published_at + + # view :all, :limit => 10, :descending => true + # TODO: Implement view a la + # bucket.save_design_doc <<-JSON + # { + # "_id": "_design/article", + # "language": "javascript", + # "views": { + # "all": { + # "map": "function(doc, meta) { emit(doc.id, doc.title); }" + # } + # } + # } + # JSON + +end + +# Extend the model with Elasticsearch support +# +Article.__send__ :extend, Elasticsearch::Model::Client::ClassMethods +Article.__send__ :extend, Elasticsearch::Model::Searching::ClassMethods +Article.__send__ :extend, Elasticsearch::Model::Naming::ClassMethods + +# Create documents in Riak +# +Article.create id: '1', title: 'Foo' rescue nil +Article.create id: '2', title: 'Bar' rescue nil +Article.create id: '3', title: 'Foo Foo' rescue nil + +# Index data into Elasticsearch +# +client = Elasticsearch::Client.new log:true + +client.indices.delete index: 'articles' rescue nil +client.bulk index: 'articles', + type: 'article', + body: Article.find(['1', '2', '3']).map { |a| + { index: { _id: a.id, data: a.attributes } } + }, + refresh: true + +response = Article.search 'foo', index: 'articles', type: 'article'; + +Pry.start(binding, prompt: lambda { |obj, nest_level, _| '> ' }, + input: StringIO.new('response.records.to_a'), + quiet: true) diff --git a/elasticsearch-model/examples/datamapper_article.rb b/elasticsearch-model/examples/datamapper_article.rb new file mode 100644 index 0000000000..383b3738f0 --- /dev/null +++ b/elasticsearch-model/examples/datamapper_article.rb @@ -0,0 +1,71 @@ +# DataMapper and Elasticsearch +# ============================ +# +# https://github.com/datamapper/dm-core +# https://github.com/datamapper/dm-active_model + + +$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) + +require 'pry' +Pry.config.history.file = File.expand_path('../../tmp/elasticsearch_development.pry', __FILE__) + +require 'logger' +require 'ansi/core' + +require 'data_mapper' +require 'dm-active_model' + +require 'active_support/all' + +require 'elasticsearch/model' + +DataMapper::Logger.new(STDOUT, :debug) +DataMapper.setup(:default, 'sqlite::memory:') + +class Article + include DataMapper::Resource + + property :id, Serial + property :title, String + property :published_at, DateTime +end + +DataMapper.auto_migrate! +DataMapper.finalize + +Article.create title: 'Foo' +Article.create title: 'Bar' +Article.create title: 'Foo Foo' + +# Extend the model with Elasticsearch support +# +Article.__send__ :include, Elasticsearch::Model + +# The DataMapper adapter +# +module DataMapperAdapter + + # Implement the interface for fetching records + # + module Records + def records + klass.all(id: @ids) + end + + # ... + end +end + +# Register the adapter +# +Elasticsearch::Model::Adapter.register( + DataMapperAdapter, + lambda { |klass| defined?(::DataMapper::Resource) and klass.ancestors.include?(::DataMapper::Resource) } +) + +response = Article.search 'foo'; + +Pry.start(binding, prompt: lambda { |obj, nest_level, _| '> ' }, + input: StringIO.new('response.records.to_a'), + quiet: true) diff --git a/elasticsearch-model/examples/mongoid_article.rb b/elasticsearch-model/examples/mongoid_article.rb new file mode 100644 index 0000000000..5cd12ca4fa --- /dev/null +++ b/elasticsearch-model/examples/mongoid_article.rb @@ -0,0 +1,68 @@ +# Mongoid and Elasticsearch +# ========================= +# +# http://mongoid.org/en/mongoid/index.html + +$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) + +require 'pry' +Pry.config.history.file = File.expand_path('../../tmp/elasticsearch_development.pry', __FILE__) + +require 'benchmark' +require 'logger' +require 'ansi/core' +require 'mongoid' + +require 'elasticsearch/model' +require 'elasticsearch/model/callbacks' + +Mongoid.logger.level = Logger::DEBUG +Moped.logger.level = Logger::DEBUG + +Mongoid.connect_to 'articles' + +Elasticsearch::Model.client = Elasticsearch::Client.new host: 'localhost:9200', log: true + +class Article + include Mongoid::Document + field :id, type: String + field :title, type: String + field :published_at, type: DateTime + attr_accessible :id, :title, :published_at if respond_to? :attr_accessible + + def as_indexed_json(options={}) + as_json(except: [:id, :_id]) + end +end + +# Extend the model with Elasticsearch support +# +Article.__send__ :include, Elasticsearch::Model +# Article.__send__ :include, Elasticsearch::Model::Callbacks + +# Store data +# +Article.delete_all +Article.create id: '1', title: 'Foo' +Article.create id: '2', title: 'Bar' +Article.create id: '3', title: 'Foo Foo' + +# Index data +# +client = Elasticsearch::Client.new host:'localhost:9200', log:true + +client.indices.delete index: 'articles' rescue nil +client.bulk index: 'articles', + type: 'article', + body: Article.all.map { |a| { index: { _id: a.id, data: a.attributes } } }, + refresh: true + +# puts Benchmark.realtime { 9_875.times { |i| Article.create title: "Foo #{i}" } } + +puts '', '-'*Pry::Terminal.width! + +response = Article.search 'foo'; + +Pry.start(binding, prompt: lambda { |obj, nest_level, _| '> ' }, + input: StringIO.new('response.records.to_a'), + quiet: true) diff --git a/elasticsearch-model/examples/ohm_article.rb b/elasticsearch-model/examples/ohm_article.rb new file mode 100644 index 0000000000..3145085e79 --- /dev/null +++ b/elasticsearch-model/examples/ohm_article.rb @@ -0,0 +1,70 @@ +# Ohm for Redis and Elasticsearch +# =============================== +# +# https://github.com/soveran/ohm#example + +$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) + +require 'pry' +Pry.config.history.file = File.expand_path('../../tmp/elasticsearch_development.pry', __FILE__) + +require 'logger' +require 'ansi/core' +require 'active_model' +require 'ohm' + +require 'elasticsearch/model' + +class Article < Ohm::Model + # Include JSON serialization from ActiveModel + include ActiveModel::Serializers::JSON + + attribute :title + attribute :published_at +end + +# Extend the model with Elasticsearch support +# +Article.__send__ :include, Elasticsearch::Model + +# Register a custom adapter +# +module Elasticsearch + module Model + module Adapter + module Ohm + Adapter.register self, + lambda { |klass| defined?(::Ohm::Model) and klass.ancestors.include?(::Ohm::Model) } + module Records + def records + klass.fetch(@ids) + end + end + end + end + end +end + +# Configure the Elasticsearch client to log operations +# +Elasticsearch::Model.client = Elasticsearch::Client.new log: true + +puts '', '-'*Pry::Terminal.width! + +Article.all.map { |a| a.delete } +Article.create id: '1', title: 'Foo' +Article.create id: '2', title: 'Bar' +Article.create id: '3', title: 'Foo Foo' + +Article.__elasticsearch__.client.indices.delete index: 'articles' rescue nil +Article.__elasticsearch__.client.bulk index: 'articles', + type: 'article', + body: Article.all.map { |a| { index: { _id: a.id, data: a.attributes } } }, + refresh: true + + +response = Article.search 'foo', index: 'articles', type: 'article'; + +Pry.start(binding, prompt: lambda { |obj, nest_level, _| '> ' }, + input: StringIO.new('response.records.to_a'), + quiet: true) diff --git a/elasticsearch-model/examples/riak_article.rb b/elasticsearch-model/examples/riak_article.rb new file mode 100644 index 0000000000..8013cda7ea --- /dev/null +++ b/elasticsearch-model/examples/riak_article.rb @@ -0,0 +1,52 @@ +# Riak and Elasticsearch +# ====================== +# +# https://github.com/basho-labs/ripple + +$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) + +require 'pry' +Pry.config.history.file = File.expand_path('../../tmp/elasticsearch_development.pry', __FILE__) + +require 'logger' +require 'ripple' + +require 'elasticsearch/model' + +# Documents are stored as JSON objects in Riak but have rich +# semantics, including validations and associations. +class Article + include Ripple::Document + + property :title, String + property :published_at, Time, :default => proc { Time.now } +end + +# Extend the model with Elasticsearch support +# +Article.__send__ :include, Elasticsearch::Model + +# Create documents in Riak +# +Article.destroy_all +Article.create id: '1', title: 'Foo' +Article.create id: '2', title: 'Bar' +Article.create id: '3', title: 'Foo Foo' + +# Index data into Elasticsearch +# +client = Elasticsearch::Client.new log:true + +client.indices.delete index: 'articles' rescue nil +client.bulk index: 'articles', + type: 'article', + body: Article.all.map { |a| + { index: { _id: a.key, data: JSON.parse(a.robject.raw_data) } } + }.as_json, + refresh: true + +response = Article.search 'foo'; + +Pry.start(binding, prompt: lambda { |obj, nest_level, _| '> ' }, + input: StringIO.new('response.records.to_a'), + quiet: true) diff --git a/elasticsearch-model/gemfiles/3.0.gemfile b/elasticsearch-model/gemfiles/3.0.gemfile new file mode 100644 index 0000000000..23cbdf53d5 --- /dev/null +++ b/elasticsearch-model/gemfiles/3.0.gemfile @@ -0,0 +1,13 @@ +# Usage: +# +# $ BUNDLE_GEMFILE=./gemfiles/3.0.gemfile bundle install +# $ BUNDLE_GEMFILE=./gemfiles/3.0.gemfile bundle exec rake test:integration + +source 'https://rubygems.org' + +gemspec path: '../' + +gem 'activemodel', '>= 3.0' +gem 'activerecord', '~> 3.2' +gem 'mongoid', '>= 3.0' +gem 'sqlite3' diff --git a/elasticsearch-model/gemfiles/4.0.gemfile b/elasticsearch-model/gemfiles/4.0.gemfile new file mode 100644 index 0000000000..89044bb19e --- /dev/null +++ b/elasticsearch-model/gemfiles/4.0.gemfile @@ -0,0 +1,12 @@ +# Usage: +# +# $ BUNDLE_GEMFILE=./gemfiles/4.0.gemfile bundle install +# $ BUNDLE_GEMFILE=./gemfiles/4.0.gemfile bundle exec rake test:integration + +source 'https://rubygems.org' + +gemspec path: '../' + +gem 'activemodel', '~> 4' +gem 'activerecord', '~> 4' +gem 'sqlite3' diff --git a/elasticsearch-model/lib/elasticsearch/model.rb b/elasticsearch-model/lib/elasticsearch/model.rb new file mode 100644 index 0000000000..9d2b93da51 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model.rb @@ -0,0 +1,188 @@ +require 'elasticsearch' + +require 'hashie' + +require 'active_support/core_ext/module/delegation' + +require 'elasticsearch/model/version' + +require 'elasticsearch/model/client' + +require 'elasticsearch/model/multimodel' + +require 'elasticsearch/model/adapter' +require 'elasticsearch/model/adapters/default' +require 'elasticsearch/model/adapters/active_record' +require 'elasticsearch/model/adapters/mongoid' +require 'elasticsearch/model/adapters/multiple' + +require 'elasticsearch/model/importing' +require 'elasticsearch/model/indexing' +require 'elasticsearch/model/naming' +require 'elasticsearch/model/serializing' +require 'elasticsearch/model/searching' +require 'elasticsearch/model/callbacks' + +require 'elasticsearch/model/proxy' + +require 'elasticsearch/model/response' +require 'elasticsearch/model/response/base' +require 'elasticsearch/model/response/result' +require 'elasticsearch/model/response/results' +require 'elasticsearch/model/response/records' +require 'elasticsearch/model/response/pagination' +require 'elasticsearch/model/response/suggestions' + +require 'elasticsearch/model/ext/active_record' + +case +when defined?(::Kaminari) + Elasticsearch::Model::Response::Response.__send__ :include, Elasticsearch::Model::Response::Pagination::Kaminari +when defined?(::WillPaginate) + Elasticsearch::Model::Response::Response.__send__ :include, Elasticsearch::Model::Response::Pagination::WillPaginate +end + +module Elasticsearch + + # Elasticsearch integration for Ruby models + # ========================================= + # + # `Elasticsearch::Model` contains modules for integrating the Elasticsearch search and analytical engine + # with ActiveModel-based classes, or models, for the Ruby programming language. + # + # It facilitates importing your data into an index, automatically updating it when a record changes, + # searching the specific index, setting up the index mapping or the model JSON serialization. + # + # When the `Elasticsearch::Model` module is included in your class, it automatically extends it + # with the functionality; see {Elasticsearch::Model.included}. Most methods are available via + # the `__elasticsearch__` class and instance method proxies. + # + # It is possible to include/extend the model with the corresponding + # modules directly, if that is desired: + # + # MyModel.__send__ :extend, Elasticsearch::Model::Client::ClassMethods + # MyModel.__send__ :include, Elasticsearch::Model::Client::InstanceMethods + # MyModel.__send__ :extend, Elasticsearch::Model::Searching::ClassMethods + # # ... + # + module Model + METHODS = [:search, :mapping, :mappings, :settings, :index_name, :document_type, :import] + + # Adds the `Elasticsearch::Model` functionality to the including class. + # + # * Creates the `__elasticsearch__` class and instance methods, pointing to the proxy object + # * Includes the necessary modules in the proxy classes + # * Sets up delegation for crucial methods such as `search`, etc. + # + # @example Include the module in the `Article` model definition + # + # class Article < ActiveRecord::Base + # include Elasticsearch::Model + # end + # + # @example Inject the module into the `Article` model during run time + # + # Article.__send__ :include, Elasticsearch::Model + # + # + def self.included(base) + base.class_eval do + include Elasticsearch::Model::Proxy + + Elasticsearch::Model::Proxy::ClassMethodsProxy.class_eval do + include Elasticsearch::Model::Client::ClassMethods + include Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Indexing::ClassMethods + include Elasticsearch::Model::Searching::ClassMethods + end + + Elasticsearch::Model::Proxy::InstanceMethodsProxy.class_eval do + include Elasticsearch::Model::Client::InstanceMethods + include Elasticsearch::Model::Naming::InstanceMethods + include Elasticsearch::Model::Indexing::InstanceMethods + include Elasticsearch::Model::Serializing::InstanceMethods + end + + Elasticsearch::Model::Proxy::InstanceMethodsProxy.class_eval <<-CODE, __FILE__, __LINE__ + 1 + def as_indexed_json(options={}) + target.respond_to?(:as_indexed_json) ? target.__send__(:as_indexed_json, options) : super + end + CODE + + # Delegate important methods to the `__elasticsearch__` proxy, unless they are defined already + # + class << self + METHODS.each do |method| + delegate method, to: :__elasticsearch__ unless self.public_instance_methods.include?(method) + end + end + + # Mix the importing module into the proxy + # + self.__elasticsearch__.class_eval do + include Elasticsearch::Model::Importing::ClassMethods + include Adapter.from_class(base).importing_mixin + end + + # Add to the registry if it's a class (and not in intermediate module) + Registry.add(base) if base.is_a?(Class) + end + end + + module ClassMethods + + # Get the client common for all models + # + # @example Get the client + # + # Elasticsearch::Model.client + # => # + # + def client + @client ||= Elasticsearch::Client.new + end + + # Set the client for all models + # + # @example Configure (set) the client for all models + # + # Elasticsearch::Model.client = Elasticsearch::Client.new host: 'http://localhost:9200', tracer: true + # => # + # + # @note You have to set the client before you call Elasticsearch methods on the model, + # or set it directly on the model; see {Elasticsearch::Model::Client::ClassMethods#client} + # + def client=(client) + @client = client + end + + # Search across multiple models + # + # By default, all models which include the `Elasticsearch::Model` module are searched + # + # @param query_or_payload [String,Hash,Object] The search request definition + # (string, JSON, Hash, or object responding to `to_hash`) + # @param models [Array] The Array of Model objects to search + # @param options [Hash] Optional parameters to be passed to the Elasticsearch client + # + # @return [Elasticsearch::Model::Response::Response] + # + # @example Search across specific models + # + # Elasticsearch::Model.search('foo', [Author, Article]) + # + # @example Search across all models which include the `Elasticsearch::Model` module + # + # Elasticsearch::Model.search('foo') + # + def search(query_or_payload, models=[], options={}) + models = Multimodel.new(models) + request = Searching::SearchRequest.new(models, query_or_payload, options) + Response::Response.new(models, request) + end + end + extend ClassMethods + + class NotImplemented < NoMethodError; end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/adapter.rb b/elasticsearch-model/lib/elasticsearch/model/adapter.rb new file mode 100644 index 0000000000..3a25e5d97b --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/adapter.rb @@ -0,0 +1,145 @@ +module Elasticsearch + module Model + + # Contains an adapter which provides OxM-specific implementations for common behaviour: + # + # * {Adapter::Adapter#records_mixin Fetching records from the database} + # * {Adapter::Adapter#callbacks_mixin Model callbacks for automatic index updates} + # * {Adapter::Adapter#importing_mixin Efficient bulk loading from the database} + # + # @see Elasticsearch::Model::Adapter::Default + # @see Elasticsearch::Model::Adapter::ActiveRecord + # @see Elasticsearch::Model::Adapter::Mongoid + # + module Adapter + + # Returns an adapter based on the Ruby class passed + # + # @example Create an adapter for an ActiveRecord-based model + # + # class Article < ActiveRecord::Base; end + # + # myadapter = Elasticsearch::Model::Adapter.from_class(Article) + # myadapter.adapter + # # => Elasticsearch::Model::Adapter::ActiveRecord + # + # @see Adapter.adapters The list of included adapters + # @see Adapter.register Register a custom adapter + # + def from_class(klass) + Adapter.new(klass) + end; module_function :from_class + + # Returns registered adapters + # + # @see ::Elasticsearch::Model::Adapter::Adapter.adapters + # + def adapters + Adapter.adapters + end; module_function :adapters + + # Registers an adapter + # + # @see ::Elasticsearch::Model::Adapter::Adapter.register + # + def register(name, condition) + Adapter.register(name, condition) + end; module_function :register + + # Contains an adapter for specific OxM or architecture. + # + class Adapter + attr_reader :klass + + def initialize(klass) + @klass = klass + end + + # Registers an adapter for specific condition + # + # @param name [Module] The module containing the implemented interface + # @param condition [Proc] An object with a `call` method which is evaluated in {.adapter} + # + # @example Register an adapter for DataMapper + # + # module DataMapperAdapter + # + # # Implement the interface for fetching records + # # + # module Records + # def records + # klass.all(id: @ids) + # end + # + # # ... + # end + # end + # + # # Register the adapter + # # + # Elasticsearch::Model::Adapter.register( + # DataMapperAdapter, + # lambda { |klass| + # defined?(::DataMapper::Resource) and klass.ancestors.include?(::DataMapper::Resource) + # } + # ) + # + def self.register(name, condition) + self.adapters[name] = condition + end + + # Return the collection of registered adapters + # + # @example Return the currently registered adapters + # + # Elasticsearch::Model::Adapter.adapters + # # => { + # # Elasticsearch::Model::Adapter::ActiveRecord => #, + # # Elasticsearch::Model::Adapter::Mongoid => #, + # # } + # + # @return [Hash] The collection of adapters + # + def self.adapters + @adapters ||= {} + end + + # Return the module with {Default::Records} interface implementation + # + # @api private + # + def records_mixin + adapter.const_get(:Records) + end + + # Return the module with {Default::Callbacks} interface implementation + # + # @api private + # + def callbacks_mixin + adapter.const_get(:Callbacks) + end + + # Return the module with {Default::Importing} interface implementation + # + # @api private + # + def importing_mixin + adapter.const_get(:Importing) + end + + # Returns the adapter module + # + # @api private + # + def adapter + @adapter ||= begin + self.class.adapters.find( lambda {[]} ) { |name, condition| condition.call(klass) }.first \ + || Elasticsearch::Model::Adapter::Default + end + end + + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/adapters/active_record.rb b/elasticsearch-model/lib/elasticsearch/model/adapters/active_record.rb new file mode 100644 index 0000000000..2d9bb53786 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/adapters/active_record.rb @@ -0,0 +1,114 @@ +module Elasticsearch + module Model + module Adapter + + # An adapter for ActiveRecord-based models + # + module ActiveRecord + + Adapter.register self, + lambda { |klass| !!defined?(::ActiveRecord::Base) && klass.respond_to?(:ancestors) && klass.ancestors.include?(::ActiveRecord::Base) } + + module Records + attr_writer :options + + def options + @options ||= {} + end + + # Returns an `ActiveRecord::Relation` instance + # + def records + sql_records = klass.where(klass.primary_key => ids) + sql_records = sql_records.includes(self.options[:includes]) if self.options[:includes] + + # Re-order records based on the order from Elasticsearch hits + # by redefining `to_a`, unless the user has called `order()` + # + sql_records.instance_exec(response.response['hits']['hits']) do |hits| + define_singleton_method :to_a do + if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 + self.load + else + self.__send__(:exec_queries) + end + @records.sort_by { |record| hits.index { |hit| hit['_id'].to_s == record.id.to_s } } + end + end + + sql_records + end + + # Prevent clash with `ActiveSupport::Dependencies::Loadable` + # + def load + records.load + end + + # Intercept call to the `order` method, so we can ignore the order from Elasticsearch + # + def order(*args) + sql_records = records.__send__ :order, *args + + # Redefine the `to_a` method to the original one + # + sql_records.instance_exec do + define_singleton_method(:to_a) do + if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 + self.load + else + self.__send__(:exec_queries) + end + @records + end + end + + sql_records + end + end + + module Callbacks + + # Handle index updates (creating, updating or deleting documents) + # when the model changes, by hooking into the lifecycle + # + # @see http://guides.rubyonrails.org/active_record_callbacks.html + # + def self.included(base) + base.class_eval do + after_commit lambda { __elasticsearch__.index_document }, on: :create + after_commit lambda { __elasticsearch__.update_document }, on: :update + after_commit lambda { __elasticsearch__.delete_document }, on: :destroy + end + end + end + + module Importing + + # Fetch batches of records from the database (used by the import method) + # + # + # @see http://api.rubyonrails.org/classes/ActiveRecord/Batches.html ActiveRecord::Batches.find_in_batches + # + def __find_in_batches(options={}, &block) + query = options.delete(:query) + named_scope = options.delete(:scope) + preprocess = options.delete(:preprocess) + + scope = self + scope = scope.__send__(named_scope) if named_scope + scope = scope.instance_exec(&query) if query + + scope.find_in_batches(options) do |batch| + yield (preprocess ? self.__send__(preprocess, batch) : batch) + end + end + + def __transform + lambda { |model| { index: { _id: model.id, data: model.__elasticsearch__.as_indexed_json } } } + end + end + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/adapters/default.rb b/elasticsearch-model/lib/elasticsearch/model/adapters/default.rb new file mode 100644 index 0000000000..e58cf4ceb3 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/adapters/default.rb @@ -0,0 +1,50 @@ +module Elasticsearch + module Model + module Adapter + + # The default adapter for models which haven't one registered + # + module Default + + # Module for implementing methods and logic related to fetching records from the database + # + module Records + + # Return the collection of records fetched from the database + # + # By default uses `MyModel#find[1, 2, 3]` + # + def records + klass.find(@ids) + end + end + + # Module for implementing methods and logic related to hooking into model lifecycle + # (e.g. to perform automatic index updates) + # + # @see http://api.rubyonrails.org/classes/ActiveModel/Callbacks.html + module Callbacks + # noop + end + + # Module for efficiently fetching records from the database to import them into the index + # + module Importing + + # @abstract Implement this method in your adapter + # + def __find_in_batches(options={}, &block) + raise NotImplemented, "Method not implemented for default adapter" + end + + # @abstract Implement this method in your adapter + # + def __transform + raise NotImplemented, "Method not implemented for default adapter" + end + end + + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/adapters/mongoid.rb b/elasticsearch-model/lib/elasticsearch/model/adapters/mongoid.rb new file mode 100644 index 0000000000..5117dbf58d --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/adapters/mongoid.rb @@ -0,0 +1,82 @@ +module Elasticsearch + module Model + module Adapter + + # An adapter for Mongoid-based models + # + # @see http://mongoid.org + # + module Mongoid + + Adapter.register self, + lambda { |klass| !!defined?(::Mongoid::Document) && klass.respond_to?(:ancestors) && klass.ancestors.include?(::Mongoid::Document) } + + module Records + + # Return a `Mongoid::Criteria` instance + # + def records + criteria = klass.where(:id.in => ids) + + criteria.instance_exec(response.response['hits']['hits']) do |hits| + define_singleton_method :to_a do + self.entries.sort_by { |e| hits.index { |hit| hit['_id'].to_s == e.id.to_s } } + end + end + + criteria + end + + # Intercept call to sorting methods, so we can ignore the order from Elasticsearch + # + %w| asc desc order_by |.each do |name| + define_method name do |*args| + criteria = records.__send__ name, *args + criteria.instance_exec do + define_singleton_method(:to_a) { self.entries } + end + + criteria + end + end + end + + module Callbacks + + # Handle index updates (creating, updating or deleting documents) + # when the model changes, by hooking into the lifecycle + # + # @see http://mongoid.org/en/mongoid/docs/callbacks.html + # + def self.included(base) + base.after_create { |document| document.__elasticsearch__.index_document } + base.after_update { |document| document.__elasticsearch__.update_document } + base.after_destroy { |document| document.__elasticsearch__.delete_document } + end + end + + module Importing + + # Fetch batches of records from the database + # + # @see https://github.com/mongoid/mongoid/issues/1334 + # @see https://github.com/karmi/retire/pull/724 + # + def __find_in_batches(options={}, &block) + options[:batch_size] ||= 1_000 + + all.no_timeout.each_slice(options[:batch_size]) do |items| + yield items + end + end + + def __transform + lambda {|a| { index: { _id: a.id.to_s, data: a.as_indexed_json } }} + end + end + + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/adapters/multiple.rb b/elasticsearch-model/lib/elasticsearch/model/adapters/multiple.rb new file mode 100644 index 0000000000..9a0bc4e8eb --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/adapters/multiple.rb @@ -0,0 +1,112 @@ +module Elasticsearch + module Model + module Adapter + + # An adapter to be used for deserializing results from multiple models, + # retrieved through `Elasticsearch::Model.search` + # + # @see Elasticsearch::Model.search + # + module Multiple + Adapter.register self, lambda { |klass| klass.is_a? Multimodel } + + module Records + # Returns a collection of model instances, possibly of different classes (ActiveRecord, Mongoid, ...) + # + # @note The order of results in the Elasticsearch response is preserved + # + def records + records_by_type = __records_by_type + + records = response.response["hits"]["hits"].map do |hit| + records_by_type[ __type_for_hit(hit) ][ hit[:_id] ] + end + + records.compact + end + + # Returns the collection of records grouped by class based on `_type` + # + # Example: + # + # { + # Foo => {"1"=> # {"1"=> # ids) + when Elasticsearch::Model::Adapter::Mongoid.equal?(adapter) + klass.where(:id.in => ids) + else + klass.find(ids) + end + end + + # Returns the record IDs grouped by class based on type `_type` + # + # Example: + # + # { Foo => ["1"], Bar => ["1", "5"] } + # + # @api private + # + def __ids_by_type + ids_by_type = {} + + response.response["hits"]["hits"].each do |hit| + type = __type_for_hit(hit) + ids_by_type[type] ||= [] + ids_by_type[type] << hit[:_id] + end + ids_by_type + end + + # Returns the class of the model corresponding to a specific `hit` in Elasticsearch results + # + # @see Elasticsearch::Model::Registry + # + # @api private + # + def __type_for_hit(hit) + @@__types ||= {} + + @@__types[ "#{hit[:_index]}::#{hit[:_type]}" ] ||= begin + Registry.all.detect do |model| + model.index_name == hit[:_index] && model.document_type == hit[:_type] + end + end + end + + # Returns the adapter registered for a particular `klass` or `nil` if not available + # + # @api private + # + def __adapter_for_klass(klass) + Adapter.adapters.select { |name, checker| checker.call(klass) }.keys.first + end + end + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/callbacks.rb b/elasticsearch-model/lib/elasticsearch/model/callbacks.rb new file mode 100644 index 0000000000..1b72cb2a03 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/callbacks.rb @@ -0,0 +1,35 @@ +module Elasticsearch + module Model + + # Allows to automatically update index based on model changes, + # by hooking into the model lifecycle. + # + # @note A blocking HTTP request is done during the update process. + # If you need a more performant/resilient way of updating the index, + # consider adapting the callbacks behaviour, and use a background + # processing solution such as [Sidekiq](http://sidekiq.org) + # or [Resque](https://github.com/resque/resque). + # + module Callbacks + + # When included in a model, automatically injects the callback subscribers (`after_save`, etc) + # + # @example Automatically update Elasticsearch index when the model changes + # + # class Article + # include Elasticsearch::Model + # include Elasticsearch::Model::Callbacks + # end + # + # Article.first.update_attribute :title, 'Updated' + # # SQL (0.3ms) UPDATE "articles" SET "title" = ... + # # 2013-11-20 15:08:52 +0100: POST http://localhost:9200/articles/article/1/_update ... + # + def self.included(base) + adapter = Adapter.from_class(base) + base.__send__ :include, adapter.callbacks_mixin + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/client.rb b/elasticsearch-model/lib/elasticsearch/model/client.rb new file mode 100644 index 0000000000..c1a9b4ed91 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/client.rb @@ -0,0 +1,61 @@ +module Elasticsearch + module Model + + # Contains an `Elasticsearch::Client` instance + # + module Client + + module ClassMethods + + # Get the client for a specific model class + # + # @example Get the client for `Article` and perform API request + # + # Article.client.cluster.health + # # => { "cluster_name" => "elasticsearch" ... } + # + def client client=nil + @client ||= Elasticsearch::Model.client + end + + # Set the client for a specific model class + # + # @example Configure the client for the `Article` model + # + # Article.client = Elasticsearch::Client.new host: 'http://api.server:8080' + # Article.search ... + # + def client=(client) + @client = client + end + end + + module InstanceMethods + + # Get or set the client for a specific model instance + # + # @example Get the client for a specific record and perform API request + # + # @article = Article.first + # @article.client.info + # # => { "name" => "Node-1", ... } + # + def client + @client ||= self.class.client + end + + # Set the client for a specific model instance + # + # @example Set the client for a specific record + # + # @article = Article.first + # @article.client = Elasticsearch::Client.new host: 'http://api.server:8080' + # + def client=(client) + @client = client + end + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/ext/active_record.rb b/elasticsearch-model/lib/elasticsearch/model/ext/active_record.rb new file mode 100644 index 0000000000..ffa6cc385a --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/ext/active_record.rb @@ -0,0 +1,14 @@ +# Prevent `MyModel.inspect` failing with `ActiveRecord::ConnectionNotEstablished` +# (triggered by elasticsearch-model/lib/elasticsearch/model.rb:79:in `included') +# +ActiveRecord::Base.instance_eval do + class << self + def inspect_with_rescue + inspect_without_rescue + rescue ActiveRecord::ConnectionNotEstablished + "#{self}(no database connection)" + end + + alias_method_chain :inspect, :rescue + end +end if defined?(ActiveRecord) && ActiveRecord::VERSION::STRING < '4' diff --git a/elasticsearch-model/lib/elasticsearch/model/importing.rb b/elasticsearch-model/lib/elasticsearch/model/importing.rb new file mode 100644 index 0000000000..7c42545d2a --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/importing.rb @@ -0,0 +1,151 @@ +module Elasticsearch + module Model + + # Provides support for easily and efficiently importing large amounts of + # records from the including class into the index. + # + # @see ClassMethods#import + # + module Importing + + # When included in a model, adds the importing methods. + # + # @example Import all records from the `Article` model + # + # Article.import + # + # @see #import + # + def self.included(base) + base.__send__ :extend, ClassMethods + + adapter = Adapter.from_class(base) + base.__send__ :include, adapter.importing_mixin + base.__send__ :extend, adapter.importing_mixin + end + + module ClassMethods + + # Import all model records into the index + # + # The method will pick up correct strategy based on the `Importing` module + # defined in the corresponding adapter. + # + # @param options [Hash] Options passed to the underlying `__find_in_batches`method + # @param block [Proc] Optional block to evaluate for each batch + # + # @yield [Hash] Gives the Hash with the Elasticsearch response to the block + # + # @return [Fixnum] Number of errors encountered during importing + # + # @example Import all records into the index + # + # Article.import + # + # @example Set the batch size to 100 + # + # Article.import batch_size: 100 + # + # @example Process the response from Elasticsearch + # + # Article.import do |response| + # puts "Got " + response['items'].select { |i| i['index']['error'] }.size.to_s + " errors" + # end + # + # @example Delete and create the index with appropriate settings and mappings + # + # Article.import force: true + # + # @example Refresh the index after importing all batches + # + # Article.import refresh: true + # + # @example Import the records into a different index/type than the default one + # + # Article.import index: 'my-new-index', type: 'my-other-type' + # + # @example Pass an ActiveRecord scope to limit the imported records + # + # Article.import scope: 'published' + # + # @example Pass an ActiveRecord query to limit the imported records + # + # Article.import query: -> { where(author_id: author_id) } + # + # @example Transform records during the import with a lambda + # + # transform = lambda do |a| + # {index: {_id: a.id, _parent: a.author_id, data: a.__elasticsearch__.as_indexed_json}} + # end + # + # Article.import transform: transform + # + # @example Update the batch before yielding it + # + # class Article + # # ... + # def self.enrich(batch) + # batch.each do |item| + # item.metadata = MyAPI.get_metadata(item.id) + # end + # batch + # end + # end + # + # Article.import preprocess: :enrich + # + # @example Return an array of error elements instead of the number of errors, eg. + # to try importing these records again + # + # Article.import return: 'errors' + # + def import(options={}, &block) + errors = [] + refresh = options.delete(:refresh) || false + target_index = options.delete(:index) || index_name + target_type = options.delete(:type) || document_type + transform = options.delete(:transform) || __transform + return_value = options.delete(:return) || 'count' + + unless transform.respond_to?(:call) + raise ArgumentError, + "Pass an object responding to `call` as the :transform option, #{transform.class} given" + end + + if options.delete(:force) + self.create_index! force: true, index: target_index + elsif !self.index_exists? index: target_index + raise ArgumentError, + "#{target_index} does not exist to be imported into. Use create_index! or the :force option to create it." + end + + __find_in_batches(options) do |batch| + response = client.bulk \ + index: target_index, + type: target_type, + body: __batch_to_bulk(batch, transform) + + yield response if block_given? + + errors += response['items'].select { |k, v| k.values.first['error'] } + end + + self.refresh_index! if refresh + + case return_value + when 'errors' + errors + else + errors.size + end + end + + def __batch_to_bulk(batch, transform) + batch.map { |model| transform.call(model) } + end + end + + end + + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/indexing.rb b/elasticsearch-model/lib/elasticsearch/model/indexing.rb new file mode 100644 index 0000000000..9c90e9d823 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/indexing.rb @@ -0,0 +1,434 @@ +module Elasticsearch + module Model + + # Provides the necessary support to set up index options (mappings, settings) + # as well as instance methods to create, update or delete documents in the index. + # + # @see ClassMethods#settings + # @see ClassMethods#mapping + # + # @see InstanceMethods#index_document + # @see InstanceMethods#update_document + # @see InstanceMethods#delete_document + # + module Indexing + + # Wraps the [index settings](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/setup-configuration.html#configuration-index-settings) + # + class Settings + attr_accessor :settings + + def initialize(settings={}) + @settings = settings + end + + def to_hash + @settings + end + + def as_json(options={}) + to_hash + end + end + + # Wraps the [index mappings](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/mapping.html) + # + class Mappings + attr_accessor :options, :type + + # @private + TYPES_WITH_EMBEDDED_PROPERTIES = %w(object nested) + + def initialize(type, options={}) + raise ArgumentError, "`type` is missing" if type.nil? + + @type = type + @options = options + @mapping = {} + end + + def indexes(name, options={}, &block) + @mapping[name] = options + + if block_given? + @mapping[name][:type] ||= 'object' + properties = TYPES_WITH_EMBEDDED_PROPERTIES.include?(@mapping[name][:type].to_s) ? :properties : :fields + + @mapping[name][properties] ||= {} + + previous = @mapping + begin + @mapping = @mapping[name][properties] + self.instance_eval(&block) + ensure + @mapping = previous + end + end + + # Set the type to `string` by default + @mapping[name][:type] ||= 'string' + + self + end + + def to_hash + { @type.to_sym => @options.merge( properties: @mapping ) } + end + + def as_json(options={}) + to_hash + end + end + + module ClassMethods + + # Defines mappings for the index + # + # @example Define mapping for model + # + # class Article + # mapping dynamic: 'strict' do + # indexes :foo do + # indexes :bar + # end + # indexes :baz + # end + # end + # + # Article.mapping.to_hash + # + # # => { :article => + # # { :dynamic => "strict", + # # :properties=> + # # { :foo => { + # # :type=>"object", + # # :properties => { + # # :bar => { :type => "string" } + # # } + # # } + # # }, + # # :baz => { :type=> "string" } + # # } + # # } + # + # @example Define index settings and mappings + # + # class Article + # settings number_of_shards: 1 do + # mappings do + # indexes :foo + # end + # end + # end + # + # @example Call the mapping method directly + # + # Article.mapping(dynamic: 'strict') { indexes :foo, type: 'long' } + # + # Article.mapping.to_hash + # + # # => {:article=>{:dynamic=>"strict", :properties=>{:foo=>{:type=>"long"}}}} + # + # The `mappings` and `settings` methods are accessible directly on the model class, + # when it doesn't already define them. Use the `__elasticsearch__` proxy otherwise. + # + def mapping(options={}, &block) + @mapping ||= Mappings.new(document_type, options) + + @mapping.options.update(options) unless options.empty? + + if block_given? + @mapping.instance_eval(&block) + return self + else + @mapping + end + end; alias_method :mappings, :mapping + + # Define settings for the index + # + # @example Define index settings + # + # Article.settings(index: { number_of_shards: 1 }) + # + # Article.settings.to_hash + # + # # => {:index=>{:number_of_shards=>1}} + # + # You can read settings from any object that responds to :read + # as long as its return value can be parsed as either YAML or JSON. + # + # @example Define index settings from YAML file + # + # # config/elasticsearch/articles.yml: + # # + # # index: + # # number_of_shards: 1 + # # + # + # Article.settings File.open("config/elasticsearch/articles.yml") + # + # Article.settings.to_hash + # + # # => { "index" => { "number_of_shards" => 1 } } + # + # + # @example Define index settings from JSON file + # + # # config/elasticsearch/articles.json: + # # + # # { "index": { "number_of_shards": 1 } } + # # + # + # Article.settings File.open("config/elasticsearch/articles.json") + # + # Article.settings.to_hash + # + # # => { "index" => { "number_of_shards" => 1 } } + # + def settings(settings={}, &block) + settings = YAML.load(settings.read) if settings.respond_to?(:read) + @settings ||= Settings.new(settings) + + @settings.settings.update(settings) unless settings.empty? + + if block_given? + self.instance_eval(&block) + return self + else + @settings + end + end + + def load_settings_from_io(settings) + YAML.load(settings.read) + end + + # Creates an index with correct name, automatically passing + # `settings` and `mappings` defined in the model + # + # @example Create an index for the `Article` model + # + # Article.__elasticsearch__.create_index! + # + # @example Forcefully create (delete first) an index for the `Article` model + # + # Article.__elasticsearch__.create_index! force: true + # + # @example Pass a specific index name + # + # Article.__elasticsearch__.create_index! index: 'my-index' + # + def create_index!(options={}) + target_index = options.delete(:index) || self.index_name + + delete_index!(options.merge index: target_index) if options[:force] + + unless index_exists?(index: target_index) + self.client.indices.create index: target_index, + body: { + settings: self.settings.to_hash, + mappings: self.mappings.to_hash } + end + end + + # Returns true if the index exists + # + # @example Check whether the model's index exists + # + # Article.__elasticsearch__.index_exists? + # + # @example Check whether a specific index exists + # + # Article.__elasticsearch__.index_exists? index: 'my-index' + # + def index_exists?(options={}) + target_index = options[:index] || self.index_name + + self.client.indices.exists(index: target_index) rescue false + end + + # Deletes the index with corresponding name + # + # @example Delete the index for the `Article` model + # + # Article.__elasticsearch__.delete_index! + # + # @example Pass a specific index name + # + # Article.__elasticsearch__.delete_index! index: 'my-index' + # + def delete_index!(options={}) + target_index = options.delete(:index) || self.index_name + + begin + self.client.indices.delete index: target_index + rescue Exception => e + if e.class.to_s =~ /NotFound/ && options[:force] + STDERR.puts "[!!!] Index does not exist (#{e.class})" + else + raise e + end + end + end + + # Performs the "refresh" operation for the index (useful e.g. in tests) + # + # @example Refresh the index for the `Article` model + # + # Article.__elasticsearch__.refresh_index! + # + # @example Pass a specific index name + # + # Article.__elasticsearch__.refresh_index! index: 'my-index' + # + # @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-refresh.html + # + def refresh_index!(options={}) + target_index = options.delete(:index) || self.index_name + + begin + self.client.indices.refresh index: target_index + rescue Exception => e + if e.class.to_s =~ /NotFound/ && options[:force] + STDERR.puts "[!!!] Index does not exist (#{e.class})" + else + raise e + end + end + end + end + + module InstanceMethods + + def self.included(base) + # Register callback for storing changed attributes for models + # which implement `before_save` and `changed_attributes` methods + # + # @note This is typically triggered only when the module would be + # included in the model directly, not within the proxy. + # + # @see #update_document + # + base.before_save do |instance| + instance.instance_variable_set(:@__changed_attributes, + Hash[ instance.changes.map { |key, value| [key, value.last] } ]) + end if base.respond_to?(:before_save) && base.instance_methods.include?(:changed_attributes) + end + + # Serializes the model instance into JSON (by calling `as_indexed_json`), + # and saves the document into the Elasticsearch index. + # + # @param options [Hash] Optional arguments for passing to the client + # + # @example Index a record + # + # @article.__elasticsearch__.index_document + # 2013-11-20 16:25:57 +0100: PUT http://localhost:9200/articles/article/1 ... + # + # @return [Hash] The response from Elasticsearch + # + # @see http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions:index + # + def index_document(options={}) + document = self.as_indexed_json + + client.index( + { index: index_name, + type: document_type, + id: self.id, + body: document }.merge(options) + ) + end + + # Deletes the model instance from the index + # + # @param options [Hash] Optional arguments for passing to the client + # + # @example Delete a record + # + # @article.__elasticsearch__.delete_document + # 2013-11-20 16:27:00 +0100: DELETE http://localhost:9200/articles/article/1 + # + # @return [Hash] The response from Elasticsearch + # + # @see http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions:delete + # + def delete_document(options={}) + client.delete( + { index: index_name, + type: document_type, + id: self.id }.merge(options) + ) + end + + # Tries to gather the changed attributes of a model instance + # (via [ActiveModel::Dirty](http://api.rubyonrails.org/classes/ActiveModel/Dirty.html)), + # performing a _partial_ update of the document. + # + # When the changed attributes are not available, performs full re-index of the record. + # + # See the {#update_document_attributes} method for updating specific attributes directly. + # + # @param options [Hash] Optional arguments for passing to the client + # + # @example Update a document corresponding to the record + # + # @article = Article.first + # @article.update_attribute :title, 'Updated' + # # SQL (0.3ms) UPDATE "articles" SET "title" = ?... + # + # @article.__elasticsearch__.update_document + # # 2013-11-20 17:00:05 +0100: POST http://localhost:9200/articles/article/1/_update ... + # # 2013-11-20 17:00:05 +0100: > {"doc":{"title":"Updated"}} + # + # @return [Hash] The response from Elasticsearch + # + # @see http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions:update + # + def update_document(options={}) + if changed_attributes = self.instance_variable_get(:@__changed_attributes) + attributes = if respond_to?(:as_indexed_json) + self.as_indexed_json.select { |k,v| changed_attributes.keys.map(&:to_s).include? k.to_s } + else + changed_attributes + end + + client.update( + { index: index_name, + type: document_type, + id: self.id, + body: { doc: attributes } }.merge(options) + ) + else + index_document(options) + end + end + + # Perform a _partial_ update of specific document attributes + # (without consideration for changed attributes as in {#update_document}) + # + # @param attributes [Hash] Attributes to be updated + # @param options [Hash] Optional arguments for passing to the client + # + # @example Update the `title` attribute + # + # @article = Article.first + # @article.title = "New title" + # @article.__elasticsearch__.update_document_attributes title: "New title" + # + # @return [Hash] The response from Elasticsearch + # + def update_document_attributes(attributes, options={}) + client.update( + { index: index_name, + type: document_type, + id: self.id, + body: { doc: attributes } }.merge(options) + ) + end + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/multimodel.rb b/elasticsearch-model/lib/elasticsearch/model/multimodel.rb new file mode 100644 index 0000000000..8831d4fd09 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/multimodel.rb @@ -0,0 +1,83 @@ +module Elasticsearch + module Model + + # Keeps a global registry of classes that include `Elasticsearch::Model` + # + class Registry + def initialize + @models = [] + end + + # Returns the unique instance of the registry (Singleton) + # + # @api private + # + def self.__instance + @instance ||= new + end + + # Adds a model to the registry + # + def self.add(klass) + __instance.add(klass) + end + + # Returns an Array of registered models + # + def self.all + __instance.models + end + + # Adds a model to the registry + # + def add(klass) + @models << klass + end + + # Returns a copy of the registered models + # + def models + @models.dup + end + end + + # Wraps a collection of models when querying multiple indices + # + # @see Elasticsearch::Model.search + # + class Multimodel + attr_reader :models + + # @param models [Class] The list of models across which the search will be performed + # + def initialize(*models) + @models = models.flatten + @models = Model::Registry.all if @models.empty? + end + + # Get an Array of index names used for retrieving documents when doing a search across multiple models + # + # @return [Array] the list of index names used for retrieving documents + # + def index_name + models.map { |m| m.index_name } + end + + # Get an Array of document types used for retrieving documents when doing a search across multiple models + # + # @return [Array] the list of document types used for retrieving documents + # + def document_type + models.map { |m| m.document_type } + end + + # Get the client common for all models + # + # @return Elasticsearch::Transport::Client + # + def client + Elasticsearch::Model.client + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/naming.rb b/elasticsearch-model/lib/elasticsearch/model/naming.rb new file mode 100644 index 0000000000..ce510d2d47 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/naming.rb @@ -0,0 +1,122 @@ +module Elasticsearch + module Model + + # Provides methods for getting and setting index name and document type for the model + # + module Naming + + module ClassMethods + + # Get or set the name of the index + # + # @example Set the index name for the `Article` model + # + # class Article + # index_name "articles-#{Rails.env}" + # end + # + # @example Set the index name for the `Article` model and re-evaluate it on each call + # + # class Article + # index_name { "articles-#{Time.now.year}" } + # end + # + # @example Directly set the index name for the `Article` model + # + # Article.index_name "articles-#{Rails.env}" + # + # + def index_name name=nil, &block + if name || block_given? + return (@index_name = name || block) + end + + if @index_name.respond_to?(:call) + @index_name.call + else + @index_name || self.model_name.collection.gsub(/\//, '-') + end + end + + # Set the index name + # + # @see index_name + def index_name=(name) + @index_name = name + end + + # Get or set the document type + # + # @example Set the document type for the `Article` model + # + # class Article + # document_type "my-article" + # end + # + # @example Directly set the document type for the `Article` model + # + # Article.document_type "my-article" + # + def document_type name=nil + @document_type = name || @document_type || self.model_name.element + end + + + # Set the document type + # + # @see document_type + # + def document_type=(name) + @document_type = name + end + end + + module InstanceMethods + + # Get or set the index name for the model instance + # + # @example Set the index name for an instance of the `Article` model + # + # @article.index_name "articles-#{@article.user_id}" + # @article.__elasticsearch__.update_document + # + def index_name name=nil, &block + if name || block_given? + return (@index_name = name || block) + end + + if @index_name.respond_to?(:call) + @index_name.call + else + @index_name || self.class.index_name + end + end + + # Set the index name + # + # @see index_name + def index_name=(name) + @index_name = name + end + + # @example Set the document type for an instance of the `Article` model + # + # @article.document_type "my-article" + # @article.__elasticsearch__.update_document + # + def document_type name=nil + @document_type = name || @document_type || self.class.document_type + end + + # Set the document type + # + # @see document_type + # + def document_type=(name) + @document_type = name + end + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/proxy.rb b/elasticsearch-model/lib/elasticsearch/model/proxy.rb new file mode 100644 index 0000000000..3e37f28ec3 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/proxy.rb @@ -0,0 +1,137 @@ +module Elasticsearch + module Model + + # This module provides a proxy interfacing between the including class and + # {Elasticsearch::Model}, preventing the pollution of the including class namespace. + # + # The only "gateway" between the model and Elasticsearch::Model is the + # `__elasticsearch__` class and instance method. + # + # The including class must be compatible with + # [ActiveModel](https://github.com/rails/rails/tree/master/activemodel). + # + # @example Include the {Elasticsearch::Model} module into an `Article` model + # + # class Article < ActiveRecord::Base + # include Elasticsearch::Model + # end + # + # Article.__elasticsearch__.respond_to?(:search) + # # => true + # + # article = Article.first + # + # article.respond_to? :index_document + # # => false + # + # article.__elasticsearch__.respond_to?(:index_document) + # # => true + # + module Proxy + + # Define the `__elasticsearch__` class and instance methods in the including class + # and register a callback for intercepting changes in the model. + # + # @note The callback is triggered only when `Elasticsearch::Model` is included in the + # module and the functionality is accessible via the proxy. + # + def self.included(base) + base.class_eval do + # {ClassMethodsProxy} instance, accessed as `MyModel.__elasticsearch__` + # + def self.__elasticsearch__ &block + @__elasticsearch__ ||= ClassMethodsProxy.new(self) + @__elasticsearch__.instance_eval(&block) if block_given? + @__elasticsearch__ + end + + # {InstanceMethodsProxy}, accessed as `@mymodel.__elasticsearch__` + # + def __elasticsearch__ &block + @__elasticsearch__ ||= InstanceMethodsProxy.new(self) + @__elasticsearch__.instance_eval(&block) if block_given? + @__elasticsearch__ + end + + # Register a callback for storing changed attributes for models which implement + # `before_save` and `changed_attributes` methods (when `Elasticsearch::Model` is included) + # + # @see http://api.rubyonrails.org/classes/ActiveModel/Dirty.html + # + before_save do |i| + changed_attr = i.__elasticsearch__.instance_variable_get(:@__changed_attributes) || {} + i.__elasticsearch__.instance_variable_set(:@__changed_attributes, + changed_attr.merge(Hash[ i.changes.map { |key, value| [key, value.last] } ])) + end if respond_to?(:before_save) && instance_methods.include?(:changed_attributes) + end + end + + # @overload dup + # + # Returns a copy of this object. Resets the __elasticsearch__ proxy so + # the duplicate will build its own proxy. + def initialize_dup(_) + @__elasticsearch__ = nil + super + end + + # Common module for the proxy classes + # + module Base + attr_reader :target + + def initialize(target) + @target = target + end + + # Delegate methods to `@target` + # + def method_missing(method_name, *arguments, &block) + target.respond_to?(method_name) ? target.__send__(method_name, *arguments, &block) : super + end + + # Respond to methods from `@target` + # + def respond_to?(method_name, include_private = false) + target.respond_to?(method_name) || super + end + + def inspect + "[PROXY] #{target.inspect}" + end + end + + # A proxy interfacing between Elasticsearch::Model class methods and model class methods + # + # TODO: Inherit from BasicObject and make Pry's `ls` command behave? + # + class ClassMethodsProxy + include Base + end + + # A proxy interfacing between Elasticsearch::Model instance methods and model instance methods + # + # TODO: Inherit from BasicObject and make Pry's `ls` command behave? + # + class InstanceMethodsProxy + include Base + + def klass + target.class + end + + def class + klass.__elasticsearch__ + end + + # Need to redefine `as_json` because we're not inheriting from `BasicObject`; + # see TODO note above. + # + def as_json(options={}) + target.as_json(options) + end + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response.rb b/elasticsearch-model/lib/elasticsearch/model/response.rb new file mode 100644 index 0000000000..fad3828b39 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response.rb @@ -0,0 +1,83 @@ +module Elasticsearch + module Model + + # Contains modules and classes for wrapping the response from Elasticsearch + # + module Response + + # Encapsulate the response returned from the Elasticsearch client + # + # Implements Enumerable and forwards its methods to the {#results} object. + # + class Response + attr_reader :klass, :search, :response, + :took, :timed_out, :shards + + include Enumerable + + delegate :each, :empty?, :size, :slice, :[], :to_ary, to: :results + + def initialize(klass, search, options={}) + @klass = klass + @search = search + end + + # Returns the Elasticsearch response + # + # @return [Hash] + # + def response + @response ||= begin + Hashie::Mash.new(search.execute!) + end + end + + # Returns the collection of "hits" from Elasticsearch + # + # @return [Results] + # + def results + @results ||= Results.new(klass, self) + end + + # Returns the collection of records from the database + # + # @return [Records] + # + def records(options = {}) + @records ||= Records.new(klass, self, options) + end + + # Returns the "took" time + # + def took + response['took'] + end + + # Returns whether the response timed out + # + def timed_out + response['timed_out'] + end + + # Returns the statistics on shards + # + def shards + Hashie::Mash.new(response['_shards']) + end + + # Returns a Hashie::Mash of the aggregations + # + def aggregations + response['aggregations'] ? Hashie::Mash.new(response['aggregations']) : nil + end + + # Returns a Hashie::Mash of the suggestions + # + def suggestions + Suggestions.new(response['suggest']) + end + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/base.rb b/elasticsearch-model/lib/elasticsearch/model/response/base.rb new file mode 100644 index 0000000000..3bb8005b63 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/base.rb @@ -0,0 +1,44 @@ +module Elasticsearch + module Model + module Response + # Common funtionality for classes in the {Elasticsearch::Model::Response} module + # + module Base + attr_reader :klass, :response + + # @param klass [Class] The name of the model class + # @param response [Hash] The full response returned from Elasticsearch client + # @param options [Hash] Optional parameters + # + def initialize(klass, response, options={}) + @klass = klass + @response = response + end + + # @abstract Implement this method in specific class + # + def results + raise NotImplemented, "Implement this method in #{klass}" + end + + # @abstract Implement this method in specific class + # + def records + raise NotImplemented, "Implement this method in #{klass}" + end + + # Returns the total number of hits + # + def total + response.response['hits']['total'] + end + + # Returns the max_score + # + def max_score + response.response['hits']['max_score'] + end + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/pagination.rb b/elasticsearch-model/lib/elasticsearch/model/response/pagination.rb new file mode 100644 index 0000000000..c8e74b7934 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/pagination.rb @@ -0,0 +1,192 @@ +module Elasticsearch + module Model + module Response + + # Pagination for search results/records + # + module Pagination + # Allow models to be paginated with the "kaminari" gem [https://github.com/amatsuda/kaminari] + # + module Kaminari + def self.included(base) + # Include the Kaminari configuration and paging method in response + # + base.__send__ :include, ::Kaminari::ConfigurationMethods::ClassMethods + base.__send__ :include, ::Kaminari::PageScopeMethods + + # Include the Kaminari paging methods in results and records + # + Elasticsearch::Model::Response::Results.__send__ :include, ::Kaminari::ConfigurationMethods::ClassMethods + Elasticsearch::Model::Response::Results.__send__ :include, ::Kaminari::PageScopeMethods + Elasticsearch::Model::Response::Records.__send__ :include, ::Kaminari::PageScopeMethods + + Elasticsearch::Model::Response::Results.__send__ :delegate, :limit_value, :offset_value, :total_count, :max_pages, to: :response + Elasticsearch::Model::Response::Records.__send__ :delegate, :limit_value, :offset_value, :total_count, :max_pages, to: :response + + base.class_eval <<-RUBY, __FILE__, __LINE__ + 1 + # Define the `page` Kaminari method + # + def #{::Kaminari.config.page_method_name}(num=nil) + @results = nil + @records = nil + @response = nil + @page = [num.to_i, 1].max + @per_page ||= __default_per_page + + self.search.definition.update size: @per_page, + from: @per_page * (@page - 1) + + self + end + RUBY + end + + # Returns the current "limit" (`size`) value + # + def limit_value + case + when search.definition[:size] + search.definition[:size] + else + __default_per_page + end + end + + # Returns the current "offset" (`from`) value + # + def offset_value + case + when search.definition[:from] + search.definition[:from] + else + 0 + end + end + + # Set the "limit" (`size`) value + # + def limit(value) + return self if value.to_i <= 0 + @results = nil + @records = nil + @response = nil + @per_page = value.to_i + + search.definition.update :size => @per_page + search.definition.update :from => @per_page * (@page - 1) if @page + self + end + + # Set the "offset" (`from`) value + # + def offset(value) + return self if value.to_i < 0 + @results = nil + @records = nil + @response = nil + @page = nil + search.definition.update :from => value.to_i + self + end + + # Returns the total number of results + # + def total_count + results.total + end + + # Returns the models's `per_page` value or the default + # + # @api private + # + def __default_per_page + klass.respond_to?(:default_per_page) && klass.default_per_page || ::Kaminari.config.default_per_page + end + end + + # Allow models to be paginated with the "will_paginate" gem [https://github.com/mislav/will_paginate] + # + module WillPaginate + def self.included(base) + base.__send__ :include, ::WillPaginate::CollectionMethods + + # Include the paging methods in results and records + # + methods = [:current_page, :offset, :length, :per_page, :total_entries, :total_pages, :previous_page, :next_page, :out_of_bounds?] + Elasticsearch::Model::Response::Results.__send__ :delegate, *methods, to: :response + Elasticsearch::Model::Response::Records.__send__ :delegate, *methods, to: :response + end + + def offset + (current_page - 1) * per_page + end + + def length + search.definition[:size] + end + + # Main pagination method + # + # @example + # + # Article.search('foo').paginate(page: 1, per_page: 30) + # + def paginate(options) + param_name = options[:param_name] || :page + page = [options[param_name].to_i, 1].max + per_page = (options[:per_page] || __default_per_page).to_i + + search.definition.update size: per_page, + from: (page - 1) * per_page + self + end + + # Return the current page + # + def current_page + search.definition[:from] / per_page + 1 if search.definition[:from] && per_page + end + + # Pagination method + # + # @example + # + # Article.search('foo').page(2) + # + def page(num) + paginate(page: num, per_page: per_page) # shorthand + end + + # Return or set the "size" value + # + # @example + # + # Article.search('foo').per_page(15).page(2) + # + def per_page(num = nil) + if num.nil? + search.definition[:size] + else + paginate(page: current_page, per_page: num) # shorthand + end + end + + # Returns the total number of results + # + def total_entries + results.total + end + + # Returns the models's `per_page` value or the default + # + # @api private + # + def __default_per_page + klass.respond_to?(:per_page) && klass.per_page || ::WillPaginate.per_page + end + end + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/records.rb b/elasticsearch-model/lib/elasticsearch/model/response/records.rb new file mode 100644 index 0000000000..4638ca6892 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/records.rb @@ -0,0 +1,73 @@ +module Elasticsearch + module Model + module Response + + # Encapsulates the collection of records returned from the database + # + # Implements Enumerable and forwards its methods to the {#records} object, + # which is provided by an {Elasticsearch::Model::Adapter::Adapter} implementation. + # + class Records + include Enumerable + + delegate :each, :empty?, :size, :slice, :[], :to_a, :to_ary, to: :records + + attr_accessor :options + + include Base + + # @see Base#initialize + # + def initialize(klass, response, options={}) + super + + # Include module provided by the adapter in the singleton class ("metaclass") + # + adapter = Adapter.from_class(klass) + metaclass = class << self; self; end + metaclass.__send__ :include, adapter.records_mixin + + self.options = options + self + end + + # Returns the hit IDs + # + def ids + response.response['hits']['hits'].map { |hit| hit['_id'] } + end + + # Returns the {Results} collection + # + def results + response.results + end + + # Yields [record, hit] pairs to the block + # + def each_with_hit(&block) + records.to_a.zip(results).each(&block) + end + + # Yields [record, hit] pairs and returns the result + # + def map_with_hit(&block) + records.to_a.zip(results).map(&block) + end + + # Delegate methods to `@records` + # + def method_missing(method_name, *arguments) + records.respond_to?(method_name) ? records.__send__(method_name, *arguments) : super + end + + # Respond to methods from `@records` + # + def respond_to?(method_name, include_private = false) + records.respond_to?(method_name) || super + end + + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/result.rb b/elasticsearch-model/lib/elasticsearch/model/response/result.rb new file mode 100644 index 0000000000..217723e8b9 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/result.rb @@ -0,0 +1,63 @@ +module Elasticsearch + module Model + module Response + + # Encapsulates the "hit" returned from the Elasticsearch client + # + # Wraps the raw Hash with in a `Hashie::Mash` instance, providing + # access to the Hash properties by calling Ruby methods. + # + # @see https://github.com/intridea/hashie + # + class Result + + # @param attributes [Hash] A Hash with document properties + # + def initialize(attributes={}) + @result = Hashie::Mash.new(attributes) + end + + # Return document `_id` as `id` + # + def id + @result['_id'] + end + + # Return document `_type` as `_type` + # + def type + @result['_type'] + end + + # Delegate methods to `@result` or `@result._source` + # + def method_missing(name, *arguments) + case + when name.to_s.end_with?('?') + @result.__send__(name, *arguments) || ( @result._source && @result._source.__send__(name, *arguments) ) + when @result.respond_to?(name) + @result.__send__ name, *arguments + when @result._source && @result._source.respond_to?(name) + @result._source.__send__ name, *arguments + else + super + end + end + + # Respond to methods from `@result` or `@result._source` + # + def respond_to?(method_name, include_private = false) + @result.respond_to?(method_name.to_sym) || \ + @result._source && @result._source.respond_to?(method_name.to_sym) || \ + super + end + + def as_json(options={}) + @result.as_json(options) + end + + # TODO: #to_s, #inspect, with support for Pry + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/results.rb b/elasticsearch-model/lib/elasticsearch/model/response/results.rb new file mode 100644 index 0000000000..006e66a46b --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/results.rb @@ -0,0 +1,31 @@ +module Elasticsearch + module Model + module Response + + # Encapsulates the collection of documents returned from Elasticsearch + # + # Implements Enumerable and forwards its methods to the {#results} object. + # + class Results + include Base + include Enumerable + + delegate :each, :empty?, :size, :slice, :[], :to_a, :to_ary, to: :results + + # @see Base#initialize + # + def initialize(klass, response, options={}) + super + end + + # Returns the {Results} collection + # + def results + # TODO: Configurable custom wrapper + response.response['hits']['hits'].map { |hit| Result.new(hit) } + end + + end + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/response/suggestions.rb b/elasticsearch-model/lib/elasticsearch/model/response/suggestions.rb new file mode 100644 index 0000000000..5088767cef --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/response/suggestions.rb @@ -0,0 +1,13 @@ +module Elasticsearch + module Model + module Response + + class Suggestions < Hashie::Mash + def terms + self.to_a.map { |k,v| v.first['options'] }.flatten.map {|v| v['text']}.uniq + end + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/searching.rb b/elasticsearch-model/lib/elasticsearch/model/searching.rb new file mode 100644 index 0000000000..604657d5e0 --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/searching.rb @@ -0,0 +1,109 @@ +module Elasticsearch + module Model + + # Contains functionality related to searching. + # + module Searching + + # Wraps a search request definition + # + class SearchRequest + attr_reader :klass, :definition, :options + + # @param klass [Class] The class of the model + # @param query_or_payload [String,Hash,Object] The search request definition + # (string, JSON, Hash, or object responding to `to_hash`) + # @param options [Hash] Optional parameters to be passed to the Elasticsearch client + # + def initialize(klass, query_or_payload, options={}) + @klass = klass + @options = options + + __index_name = options[:index] || klass.index_name + __document_type = options[:type] || klass.document_type + + case + # search query: ... + when query_or_payload.respond_to?(:to_hash) + body = query_or_payload.to_hash + + # search '{ "query" : ... }' + when query_or_payload.is_a?(String) && query_or_payload =~ /^\s*{/ + body = query_or_payload + + # search '...' + else + q = query_or_payload + end + + if body + @definition = { index: __index_name, type: __document_type, body: body }.update options + else + @definition = { index: __index_name, type: __document_type, q: q }.update options + end + end + + # Performs the request and returns the response from client + # + # @return [Hash] The response from Elasticsearch + # + def execute! + klass.client.search(@definition) + end + end + + module ClassMethods + + # Provides a `search` method for the model to easily search within an index/type + # corresponding to the model settings. + # + # @param query_or_payload [String,Hash,Object] The search request definition + # (string, JSON, Hash, or object responding to `to_hash`) + # @param options [Hash] Optional parameters to be passed to the Elasticsearch client + # + # @return [Elasticsearch::Model::Response::Response] + # + # @example Simple search in `Article` + # + # Article.search 'foo' + # + # @example Search using a search definition as a Hash + # + # response = Article.search \ + # query: { + # match: { + # title: 'foo' + # } + # }, + # highlight: { + # fields: { + # title: {} + # } + # }, + # size: 50 + # + # response.results.first.title + # # => "Foo" + # + # response.results.first.highlight.title + # # => ["Foo"] + # + # response.records.first.title + # # Article Load (0.2ms) SELECT "articles".* FROM "articles" WHERE "articles"."id" IN (1, 3) + # # => "Foo" + # + # @example Search using a search definition as a JSON string + # + # Article.search '{"query" : { "match_all" : {} }}' + # + def search(query_or_payload, options={}) + search = SearchRequest.new(self, query_or_payload, options) + + Response::Response.new(self, search) + end + + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/serializing.rb b/elasticsearch-model/lib/elasticsearch/model/serializing.rb new file mode 100644 index 0000000000..659a58bb2a --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/serializing.rb @@ -0,0 +1,35 @@ +module Elasticsearch + module Model + + # Contains functionality for serializing model instances for the client + # + module Serializing + + module ClassMethods + end + + module InstanceMethods + + # Serialize the record as a Hash, to be passed to the client. + # + # Re-define this method to customize the serialization. + # + # @return [Hash] + # + # @example Return the model instance as a Hash + # + # Article.first.__elasticsearch__.as_indexed_json + # => {"title"=>"Foo"} + # + # @see Elasticsearch::Model::Indexing + # + def as_indexed_json(options={}) + # TODO: Play with the `MyModel.indexes` method -- reject non-mapped attributes, `:as` options, etc + self.as_json(options.merge root: false) + end + + end + + end + end +end diff --git a/elasticsearch-model/lib/elasticsearch/model/version.rb b/elasticsearch-model/lib/elasticsearch/model/version.rb new file mode 100644 index 0000000000..44cfdabeab --- /dev/null +++ b/elasticsearch-model/lib/elasticsearch/model/version.rb @@ -0,0 +1,5 @@ +module Elasticsearch + module Model + VERSION = "0.1.9" + end +end diff --git a/elasticsearch-model/test/integration/active_record_associations_parent_child.rb b/elasticsearch-model/test/integration/active_record_associations_parent_child.rb new file mode 100644 index 0000000000..39be1144b3 --- /dev/null +++ b/elasticsearch-model/test/integration/active_record_associations_parent_child.rb @@ -0,0 +1,139 @@ +require 'test_helper' +require 'active_record' + +class Question < ActiveRecord::Base + include Elasticsearch::Model + + has_many :answers, dependent: :destroy + + index_name 'questions_and_answers' + + mapping do + indexes :title + indexes :text + indexes :author + end + + after_commit lambda { __elasticsearch__.index_document }, on: :create + after_commit lambda { __elasticsearch__.update_document }, on: :update + after_commit lambda { __elasticsearch__.delete_document }, on: :destroy +end + +class Answer < ActiveRecord::Base + include Elasticsearch::Model + + belongs_to :question + + index_name 'questions_and_answers' + + mapping _parent: { type: 'question', required: true } do + indexes :text + indexes :author + end + + after_commit lambda { __elasticsearch__.index_document(parent: question_id) }, on: :create + after_commit lambda { __elasticsearch__.update_document(parent: question_id) }, on: :update + after_commit lambda { __elasticsearch__.delete_document(parent: question_id) }, on: :destroy +end + +module ParentChildSearchable + INDEX_NAME = 'questions_and_answers' + + def create_index!(options={}) + client = Question.__elasticsearch__.client + client.indices.delete index: INDEX_NAME rescue nil if options[:force] + + settings = Question.settings.to_hash.merge Answer.settings.to_hash + mappings = Question.mappings.to_hash.merge Answer.mappings.to_hash + + client.indices.create index: INDEX_NAME, + body: { + settings: settings.to_hash, + mappings: mappings.to_hash } + end + + extend self +end + +module Elasticsearch + module Model + class ActiveRecordAssociationsParentChildIntegrationTest < Elasticsearch::Test::IntegrationTestCase + + context "ActiveRecord associations with parent/child modelling" do + setup do + ActiveRecord::Schema.define(version: 1) do + create_table :questions do |t| + t.string :title + t.text :text + t.string :author + t.timestamps + end + create_table :answers do |t| + t.text :text + t.string :author + t.references :question + t.timestamps + end and add_index(:answers, :question_id) + end + + Question.delete_all + ParentChildSearchable.create_index! force: true + + q_1 = Question.create! title: 'First Question', author: 'John' + q_2 = Question.create! title: 'Second Question', author: 'Jody' + + q_1.answers.create! text: 'Lorem Ipsum', author: 'Adam' + q_1.answers.create! text: 'Dolor Sit', author: 'Ryan' + + q_2.answers.create! text: 'Amet Et', author: 'John' + + Question.__elasticsearch__.refresh_index! + end + + should "find questions by matching answers" do + response = Question.search( + { query: { + has_child: { + type: 'answer', + query: { + match: { + author: 'john' + } + } + } + } + }) + + assert_equal 'Second Question', response.records.first.title + end + + should "find answers for matching questions" do + response = Answer.search( + { query: { + has_parent: { + parent_type: 'question', + query: { + match: { + author: 'john' + } + } + } + } + }) + + assert_same_elements ['Adam', 'Ryan'], response.records.map(&:author) + end + + should "delete answers when the question is deleted" do + Question.where(title: 'First Question').each(&:destroy) + Question.__elasticsearch__.refresh_index! + + response = Answer.search query: { match_all: {} } + + assert_equal 1, response.results.total + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/active_record_associations_test.rb b/elasticsearch-model/test/integration/active_record_associations_test.rb new file mode 100644 index 0000000000..af67ad889c --- /dev/null +++ b/elasticsearch-model/test/integration/active_record_associations_test.rb @@ -0,0 +1,326 @@ +require 'test_helper' +require 'active_record' + +module Elasticsearch + module Model + class ActiveRecordAssociationsIntegrationTest < Elasticsearch::Test::IntegrationTestCase + + context "ActiveRecord associations" do + setup do + + # ----- Schema definition --------------------------------------------------------------- + + ActiveRecord::Schema.define(version: 1) do + create_table :categories do |t| + t.string :title + t.timestamps + end + + create_table :categories_posts, id: false do |t| + t.references :post, :category + end + + create_table :authors do |t| + t.string :first_name, :last_name + t.timestamps + end + + create_table :authorships do |t| + t.string :first_name, :last_name + t.references :post + t.references :author + t.timestamps + end + + create_table :comments do |t| + t.string :text + t.string :author + t.references :post + t.timestamps + end and add_index(:comments, :post_id) + + create_table :posts do |t| + t.string :title + t.text :text + t.boolean :published + t.timestamps + end + end + + # ----- Models definition ------------------------------------------------------------------------- + + class Category < ActiveRecord::Base + has_and_belongs_to_many :posts + end + + class Author < ActiveRecord::Base + has_many :authorships + + def full_name + [first_name, last_name].compact.join(' ') + end + end + + class Authorship < ActiveRecord::Base + belongs_to :author + belongs_to :post, touch: true + end + + class Comment < ActiveRecord::Base + belongs_to :post, touch: true + end + + class Post < ActiveRecord::Base + has_and_belongs_to_many :categories, after_add: [ lambda { |a,c| a.__elasticsearch__.index_document } ], + after_remove: [ lambda { |a,c| a.__elasticsearch__.index_document } ] + has_many :authorships + has_many :authors, through: :authorships + has_many :comments + end + + # ----- Search integration via Concern module ----------------------------------------------------- + + module Searchable + extend ActiveSupport::Concern + + included do + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + # Set up the mapping + # + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, analyzer: 'snowball' + indexes :created_at, type: 'date' + + indexes :authors do + indexes :first_name + indexes :last_name + indexes :full_name, type: 'multi_field' do + indexes :full_name + indexes :raw, analyzer: 'keyword' + end + end + + indexes :categories, analyzer: 'keyword' + + indexes :comments, type: 'nested' do + indexes :text + indexes :author + end + end + end + + # Customize the JSON serialization for Elasticsearch + # + def as_indexed_json(options={}) + { + title: title, + text: text, + categories: categories.map(&:title), + authors: authors.as_json(methods: [:full_name], only: [:full_name, :first_name, :last_name]), + comments: comments.as_json(only: [:text, :author]) + } + end + + # Update document in the index after touch + # + after_touch() { __elasticsearch__.index_document } + end + end + + # Include the search integration + # + Post.__send__ :include, Searchable + Comment.__send__ :include, Elasticsearch::Model + Comment.__send__ :include, Elasticsearch::Model::Callbacks + + # ----- Reset the indices ----------------------------------------------------------------- + + Post.delete_all + Post.__elasticsearch__.create_index! force: true + + Comment.delete_all + Comment.__elasticsearch__.create_index! force: true + end + + should "index and find a document" do + Post.create! title: 'Test' + Post.create! title: 'Testing Coding' + Post.create! title: 'Coding' + Post.__elasticsearch__.refresh_index! + + response = Post.search('title:test') + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + + assert_equal 'Test', response.results.first.title + assert_equal 'Test', response.records.first.title + end + + should "reindex a document after categories are changed" do + # Create categories + category_a = Category.where(title: "One").first_or_create! + category_b = Category.where(title: "Two").first_or_create! + + # Create post + post = Post.create! title: "First Post", text: "This is the first post..." + + # Assign categories + post.categories = [category_a, category_b] + + Post.__elasticsearch__.refresh_index! + + query = { query: { + filtered: { + query: { + multi_match: { + fields: ['title'], + query: 'first' + } + }, + filter: { + terms: { + categories: ['One'] + } + } + } + } + } + + response = Post.search query + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + + # Remove category "One" + post.categories = [category_b] + + Post.__elasticsearch__.refresh_index! + response = Post.search query + + assert_equal 0, response.results.size + assert_equal 0, response.records.size + end + + should "reindex a document after authors are changed" do + # Create authors + author_a = Author.where(first_name: "John", last_name: "Smith").first_or_create! + author_b = Author.where(first_name: "Mary", last_name: "Smith").first_or_create! + author_c = Author.where(first_name: "Kobe", last_name: "Griss").first_or_create! + + # Create posts + post_1 = Post.create! title: "First Post", text: "This is the first post..." + post_2 = Post.create! title: "Second Post", text: "This is the second post..." + post_3 = Post.create! title: "Third Post", text: "This is the third post..." + + # Assign authors + post_1.authors = [author_a, author_b] + post_2.authors = [author_a] + post_3.authors = [author_c] + + Post.__elasticsearch__.refresh_index! + + response = Post.search 'authors.full_name:john' + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + + post_3.authors << author_a + + Post.__elasticsearch__.refresh_index! + + response = Post.search 'authors.full_name:john' + + assert_equal 3, response.results.size + assert_equal 3, response.records.size + end if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 + + should "reindex a document after comments are added" do + # Create posts + post_1 = Post.create! title: "First Post", text: "This is the first post..." + post_2 = Post.create! title: "Second Post", text: "This is the second post..." + + # Add comments + post_1.comments.create! author: 'John', text: 'Excellent' + post_1.comments.create! author: 'Abby', text: 'Good' + + post_2.comments.create! author: 'John', text: 'Terrible' + + Post.__elasticsearch__.refresh_index! + + response = Post.search 'comments.author:john AND comments.text:good' + assert_equal 0, response.results.size + + # Add comment + post_1.comments.create! author: 'John', text: 'Or rather just good...' + + Post.__elasticsearch__.refresh_index! + + response = Post.search 'comments.author:john AND comments.text:good' + assert_equal 0, response.results.size + + response = Post.search \ + query: { + nested: { + path: 'comments', + query: { + bool: { + must: [ + { match: { 'comments.author' => 'john' } }, + { match: { 'comments.text' => 'good' } } + ] + } + } + } + } + + assert_equal 1, response.results.size + end if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 + + should "reindex a document after Post#touch" do + # Create categories + category_a = Category.where(title: "One").first_or_create! + + # Create post + post = Post.create! title: "First Post", text: "This is the first post..." + + # Assign category + post.categories << category_a + + Post.__elasticsearch__.refresh_index! + + assert_equal 1, Post.search('categories:One').size + + # Update category + category_a.update_attribute :title, "Updated" + + # Trigger touch on posts in category + category_a.posts.each { |p| p.touch } + + Post.__elasticsearch__.refresh_index! + + assert_equal 0, Post.search('categories:One').size + assert_equal 1, Post.search('categories:Updated').size + end + + should "eagerly load associated records" do + post_1 = Post.create(title: 'One') + post_2 = Post.create(title: 'Two') + post_1.comments.create text: 'First comment' + post_1.comments.create text: 'Second comment' + + Comment.__elasticsearch__.refresh_index! + + records = Comment.search('first').records(includes: :post) + + assert records.first.association(:post).loaded?, "The associated Post should be eagerly loaded" + assert_equal 'One', records.first.post.title + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/active_record_basic_test.rb b/elasticsearch-model/test/integration/active_record_basic_test.rb new file mode 100644 index 0000000000..e6ca97d6dc --- /dev/null +++ b/elasticsearch-model/test/integration/active_record_basic_test.rb @@ -0,0 +1,234 @@ +require 'test_helper' +require 'active_record' + +puts "ActiveRecord #{ActiveRecord::VERSION::STRING}", '-'*80 + +module Elasticsearch + module Model + class ActiveRecordBasicIntegrationTest < Elasticsearch::Test::IntegrationTestCase + context "ActiveRecord basic integration" do + setup do + ActiveRecord::Schema.define(:version => 1) do + create_table :articles do |t| + t.string :title + t.string :body + t.datetime :created_at, :default => 'NOW()' + end + end + + class ::Article < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, type: 'string', analyzer: 'snowball' + indexes :body, type: 'string' + indexes :created_at, type: 'date' + end + end + + def as_indexed_json(options = {}) + attributes + .symbolize_keys + .slice(:title, :body, :created_at) + .merge(suggest_title: title) + end + end + + Article.delete_all + Article.__elasticsearch__.create_index! force: true + + ::Article.create! title: 'Test', body: '' + ::Article.create! title: 'Testing Coding', body: '' + ::Article.create! title: 'Coding', body: '' + + Article.__elasticsearch__.refresh_index! + end + + should "index and find a document" do + response = Article.search('title:test') + + assert response.any?, "Response should not be empty: #{response.to_a.inspect}" + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + + assert_instance_of Elasticsearch::Model::Response::Result, response.results.first + assert_instance_of Article, response.records.first + + assert_equal 'Test', response.results.first.title + assert_equal 'Test', response.records.first.title + end + + should "provide access to result" do + response = Article.search query: { match: { title: 'test' } }, highlight: { fields: { title: {} } } + + assert_equal 'Test', response.results.first.title + + assert_equal true, response.results.first.title? + assert_equal false, response.results.first.boo? + + assert_equal true, response.results.first.highlight? + assert_equal true, response.results.first.highlight.title? + assert_equal false, response.results.first.highlight.boo? + end + + should "iterate over results" do + response = Article.search('title:test') + + assert_equal ['1', '2'], response.results.map(&:_id) + assert_equal [1, 2], response.records.map(&:id) + end + + should "return _id and _type as #id and #type" do + response = Article.search('title:test') + + assert_equal '1', response.results.first.id + assert_equal 'article', response.results.first.type + end + + should "access results from records" do + response = Article.search('title:test') + + response.records.each_with_hit do |r, h| + assert_not_nil h._score + assert_not_nil h._source.title + end + end + + should "preserve the search results order for records" do + response = Article.search('title:code') + + response.records.each_with_hit do |r, h| + assert_equal h._id, r.id.to_s + end + + response.records.map_with_hit do |r, h| + assert_equal h._id, r.id.to_s + end + end + + should "remove document from index on destroy" do + article = Article.first + + article.destroy + assert_equal 2, Article.count + + Article.__elasticsearch__.refresh_index! + + response = Article.search 'title:test' + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + end + + should "index updates to the document" do + article = Article.first + + article.title = 'Writing' + article.save + + Article.__elasticsearch__.refresh_index! + + response = Article.search 'title:write' + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + end + + should "update specific attributes" do + article = Article.first + + response = Article.search 'title:special' + + assert_equal 0, response.results.size + assert_equal 0, response.records.size + + article.__elasticsearch__.update_document_attributes title: 'special' + + Article.__elasticsearch__.refresh_index! + + response = Article.search 'title:special' + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + end + + should "update document when save is called multiple times in a transaction" do + article = Article.first + response = Article.search 'body:dummy' + + assert_equal 0, response.results.size + assert_equal 0, response.records.size + + ActiveRecord::Base.transaction do + article.body = 'dummy' + article.save + + article.title = 'special' + article.save + end + + article.__elasticsearch__.update_document + Article.__elasticsearch__.refresh_index! + + response = Article.search 'body:dummy' + assert_equal 1, response.results.size + assert_equal 1, response.records.size + end + + should "return results for a DSL search" do + response = Article.search query: { match: { title: { query: 'test' } } } + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + end + + should "return a paged collection" do + response = Article.search query: { match: { title: { query: 'test' } } }, + size: 2, + from: 1 + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + + assert_equal 'Testing Coding', response.results.first.title + assert_equal 'Testing Coding', response.records.first.title + end + + should "allow chaining SQL commands on response.records" do + response = Article.search query: { match: { title: { query: 'test' } } } + + assert_equal 2, response.records.size + assert_equal 1, response.records.where(title: 'Test').size + assert_equal 'Test', response.records.where(title: 'Test').first.title + end + + should "allow ordering response.records in SQL" do + response = Article.search query: { match: { title: { query: 'test' } } } + + if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4 + assert_equal 'Testing Coding', response.records.order(title: :desc).first.title + else + assert_equal 'Testing Coding', response.records.order('title DESC').first.title + end + end + + should "allow dot access to response" do + response = Article.search query: { match: { title: { query: 'test' } } }, + aggregations: { dates: { date_histogram: { field: 'created_at', interval: 'hour' } } }, + suggest: { text: 'tezt', title: { term: { field: 'title', suggest_mode: 'always' } } } + + response.response.respond_to?(:aggregations) + assert_equal 2, response.aggregations.dates.buckets.first.doc_count + + response.response.respond_to?(:suggest) + assert_equal 1, response.suggestions.title.first.options.size + assert_equal ['test'], response.suggestions.terms + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/active_record_custom_serialization_test.rb b/elasticsearch-model/test/integration/active_record_custom_serialization_test.rb new file mode 100644 index 0000000000..03eb9a4410 --- /dev/null +++ b/elasticsearch-model/test/integration/active_record_custom_serialization_test.rb @@ -0,0 +1,62 @@ +require 'test_helper' +require 'active_record' + +module Elasticsearch + module Model + class ActiveRecordCustomSerializationTest < Elasticsearch::Test::IntegrationTestCase + context "ActiveRecord model with custom JSON serialization" do + setup do + class ::ArticleWithCustomSerialization < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + mapping do + indexes :title + end + + def as_indexed_json(options={}) + # as_json(options.merge root: false).slice('title') + { title: self.title } + end + end + + ActiveRecord::Schema.define(:version => 1) do + create_table ArticleWithCustomSerialization.table_name do |t| + t.string :title + t.string :status + end + end + + ArticleWithCustomSerialization.delete_all + ArticleWithCustomSerialization.__elasticsearch__.create_index! force: true + end + + should "index only the title attribute when creating" do + ArticleWithCustomSerialization.create! title: 'Test', status: 'green' + + a = ArticleWithCustomSerialization.__elasticsearch__.client.get \ + index: 'article_with_custom_serializations', + type: 'article_with_custom_serialization', + id: '1' + + assert_equal( { 'title' => 'Test' }, a['_source'] ) + end + + should "index only the title attribute when updating" do + ArticleWithCustomSerialization.create! title: 'Test', status: 'green' + + article = ArticleWithCustomSerialization.first + article.update_attributes title: 'UPDATED', status: 'red' + + a = ArticleWithCustomSerialization.__elasticsearch__.client.get \ + index: 'article_with_custom_serializations', + type: 'article_with_custom_serialization', + id: '1' + + assert_equal( { 'title' => 'UPDATED' }, a['_source'] ) + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/active_record_import_test.rb b/elasticsearch-model/test/integration/active_record_import_test.rb new file mode 100644 index 0000000000..8cc7448cc3 --- /dev/null +++ b/elasticsearch-model/test/integration/active_record_import_test.rb @@ -0,0 +1,109 @@ +require 'test_helper' +require 'active_record' + +module Elasticsearch + module Model + class ActiveRecordImportIntegrationTest < Elasticsearch::Test::IntegrationTestCase + context "ActiveRecord importing" do + setup do + ActiveRecord::Schema.define(:version => 1) do + create_table :import_articles do |t| + t.string :title + t.integer :views + t.string :numeric # For the sake of invalid data sent to Elasticsearch + t.datetime :created_at, :default => 'NOW()' + end + end + + class ::ImportArticle < ActiveRecord::Base + include Elasticsearch::Model + + scope :popular, -> { where('views >= 50') } + + mapping do + indexes :title, type: 'string' + indexes :views, type: 'integer' + indexes :numeric, type: 'integer' + indexes :created_at, type: 'date' + end + end + + ImportArticle.delete_all + ImportArticle.__elasticsearch__.create_index! force: true + ImportArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' + + 100.times { |i| ImportArticle.create! title: "Test #{i}", views: i } + end + + should "import all the documents" do + assert_equal 100, ImportArticle.count + + ImportArticle.__elasticsearch__.refresh_index! + assert_equal 0, ImportArticle.search('*').results.total + + batches = 0 + errors = ImportArticle.import(batch_size: 10) do |response| + batches += 1 + end + + assert_equal 0, errors + assert_equal 10, batches + + ImportArticle.__elasticsearch__.refresh_index! + assert_equal 100, ImportArticle.search('*').results.total + end + + should "import only documents from a specific scope" do + assert_equal 100, ImportArticle.count + + assert_equal 0, ImportArticle.import(scope: 'popular') + + ImportArticle.__elasticsearch__.refresh_index! + assert_equal 50, ImportArticle.search('*').results.total + end + + should "import only documents from a specific query" do + assert_equal 100, ImportArticle.count + + assert_equal 0, ImportArticle.import(query: -> { where('views >= 30') }) + + ImportArticle.__elasticsearch__.refresh_index! + assert_equal 70, ImportArticle.search('*').results.total + end + + should "report and not store/index invalid documents" do + ImportArticle.create! title: "Test INVALID", numeric: "INVALID" + + assert_equal 101, ImportArticle.count + + ImportArticle.__elasticsearch__.refresh_index! + assert_equal 0, ImportArticle.search('*').results.total + + batches = 0 + errors = ImportArticle.__elasticsearch__.import(batch_size: 10) do |response| + batches += 1 + end + + assert_equal 1, errors + assert_equal 11, batches + + ImportArticle.__elasticsearch__.refresh_index! + assert_equal 100, ImportArticle.search('*').results.total + end + + should "transform documents with the option" do + assert_equal 100, ImportArticle.count + + assert_equal 0, ImportArticle.import( transform: ->(a) {{ index: { data: { name: a.title, foo: 'BAR' } }}} ) + + ImportArticle.__elasticsearch__.refresh_index! + assert_contains ImportArticle.search('*').results.first._source.keys, 'name' + assert_contains ImportArticle.search('*').results.first._source.keys, 'foo' + assert_equal 100, ImportArticle.search('test').results.total + assert_equal 100, ImportArticle.search('bar').results.total + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/active_record_namespaced_model_test.rb b/elasticsearch-model/test/integration/active_record_namespaced_model_test.rb new file mode 100644 index 0000000000..be047f4e7a --- /dev/null +++ b/elasticsearch-model/test/integration/active_record_namespaced_model_test.rb @@ -0,0 +1,49 @@ +require 'test_helper' +require 'active_record' + +module Elasticsearch + module Model + class ActiveRecordNamespacedModelIntegrationTest < Elasticsearch::Test::IntegrationTestCase + context "Namespaced ActiveRecord model integration" do + setup do + ActiveRecord::Schema.define(:version => 1) do + create_table :articles do |t| + t.string :title + end + end + + module ::MyNamespace + class Article < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + mapping { indexes :title } + end + end + + MyNamespace::Article.delete_all + MyNamespace::Article.__elasticsearch__.create_index! force: true + + MyNamespace::Article.create! title: 'Test' + + MyNamespace::Article.__elasticsearch__.refresh_index! + end + + should "have proper index name and document type" do + assert_equal "my_namespace-articles", MyNamespace::Article.index_name + assert_equal "article", MyNamespace::Article.document_type + end + + should "save document into index on save and find it" do + response = MyNamespace::Article.search 'title:test' + + assert response.any?, "No results returned: #{response.inspect}" + assert_equal 1, response.size + + assert_equal 'Test', response.results.first.title + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/active_record_pagination_test.rb b/elasticsearch-model/test/integration/active_record_pagination_test.rb new file mode 100644 index 0000000000..e1d6fefb11 --- /dev/null +++ b/elasticsearch-model/test/integration/active_record_pagination_test.rb @@ -0,0 +1,145 @@ +require 'test_helper' +require 'active_record' + +module Elasticsearch + module Model + class ActiveRecordPaginationTest < Elasticsearch::Test::IntegrationTestCase + context "ActiveRecord pagination" do + setup do + class ::ArticleForPagination < ActiveRecord::Base + include Elasticsearch::Model + + scope :published, -> { where(published: true) } + + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, type: 'string', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end + end + + ActiveRecord::Schema.define(:version => 1) do + create_table ::ArticleForPagination.table_name do |t| + t.string :title + t.datetime :created_at, :default => 'NOW()' + t.boolean :published + end + end + + Kaminari::Hooks.init + + ArticleForPagination.delete_all + ArticleForPagination.__elasticsearch__.create_index! force: true + + 68.times do |i| + ::ArticleForPagination.create! title: "Test #{i}", published: (i % 2 == 0) + end + + ArticleForPagination.import + ArticleForPagination.__elasticsearch__.refresh_index! + end + + should "be on the first page by default" do + records = ArticleForPagination.search('title:test').page(1).records + + assert_equal 25, records.size + assert_equal 1, records.current_page + assert_equal nil, records.prev_page + assert_equal 2, records.next_page + assert_equal 3, records.total_pages + + assert records.first_page?, "Should be the first page" + assert ! records.last_page?, "Should NOT be the last page" + assert ! records.out_of_range?, "Should NOT be out of range" + end + + should "load next page" do + records = ArticleForPagination.search('title:test').page(2).records + + assert_equal 25, records.size + assert_equal 2, records.current_page + assert_equal 1, records.prev_page + assert_equal 3, records.next_page + assert_equal 3, records.total_pages + + assert ! records.first_page?, "Should NOT be the first page" + assert ! records.last_page?, "Should NOT be the last page" + assert ! records.out_of_range?, "Should NOT be out of range" + end + + should "load last page" do + records = ArticleForPagination.search('title:test').page(3).records + + assert_equal 18, records.size + assert_equal 3, records.current_page + assert_equal 2, records.prev_page + assert_equal nil, records.next_page + assert_equal 3, records.total_pages + + assert ! records.first_page?, "Should NOT be the first page" + assert records.last_page?, "Should be the last page" + assert ! records.out_of_range?, "Should NOT be out of range" + end + + should "not load invalid page" do + records = ArticleForPagination.search('title:test').page(6).records + + assert_equal 0, records.size + assert_equal 6, records.current_page + assert_equal 5, records.prev_page + assert_equal nil, records.next_page + assert_equal 3, records.total_pages + + assert ! records.first_page?, "Should NOT be the first page" + assert records.last_page?, "Should be the last page" + assert records.out_of_range?, "Should be out of range" + end + + should "be combined with scopes" do + records = ArticleForPagination.search('title:test').page(2).records.published + assert records.all? { |r| r.published? } + assert_equal 12, records.size + end + + should "respect sort" do + search = ArticleForPagination.search({ query: { match: { title: 'test' } }, sort: [ { id: 'desc' } ] }) + + records = search.page(2).records + assert_equal 43, records.first.id # 68 - 25 = 42 + + records = search.page(3).records + assert_equal 18, records.first.id # 68 - (2 * 25) = 18 + + records = search.page(2).per(5).records + assert_equal 63, records.first.id # 68 - 5 = 63 + end + + should "set the limit per request" do + records = ArticleForPagination.search('title:test').limit(50).page(2).records + + assert_equal 18, records.size + assert_equal 2, records.current_page + assert_equal 1, records.prev_page + assert_equal nil, records.next_page + assert_equal 2, records.total_pages + + assert records.last_page?, "Should be the last page" + end + + context "with specific model settings" do + teardown do + ArticleForPagination.instance_variable_set(:@_default_per_page, nil) + end + + should "respect paginates_per" do + ArticleForPagination.paginates_per 50 + + assert_equal 50, ArticleForPagination.search('*').page(1).records.size + end + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/dynamic_index_name_test.rb b/elasticsearch-model/test/integration/dynamic_index_name_test.rb new file mode 100755 index 0000000000..a71633c6ae --- /dev/null +++ b/elasticsearch-model/test/integration/dynamic_index_name_test.rb @@ -0,0 +1,47 @@ +require 'test_helper' +require 'active_record' + +module Elasticsearch + module Model + class DynamicIndexNameTest < Elasticsearch::Test::IntegrationTestCase + context "Dynamic index name" do + setup do + class ::ArticleWithDynamicIndexName < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + def self.counter=(value) + @counter = 0 + end + + def self.counter + (@counter ||= 0) && @counter += 1 + end + + mapping { indexes :title } + index_name { "articles-#{counter}" } + end + + ::ActiveRecord::Schema.define(:version => 1) do + create_table ::ArticleWithDynamicIndexName.table_name do |t| + t.string :title + end + end + + ::ArticleWithDynamicIndexName.counter = 0 + end + + should 'evaluate the index_name value' do + assert_equal ArticleWithDynamicIndexName.index_name, "articles-1" + end + + should 're-evaluate the index_name value each time' do + assert_equal ArticleWithDynamicIndexName.index_name, "articles-1" + assert_equal ArticleWithDynamicIndexName.index_name, "articles-2" + assert_equal ArticleWithDynamicIndexName.index_name, "articles-3" + end + end + + end + end +end diff --git a/elasticsearch-model/test/integration/mongoid_basic_test.rb b/elasticsearch-model/test/integration/mongoid_basic_test.rb new file mode 100644 index 0000000000..e370bd82aa --- /dev/null +++ b/elasticsearch-model/test/integration/mongoid_basic_test.rb @@ -0,0 +1,177 @@ +require 'test_helper' + +Mongo.setup! + +if Mongo.available? + Mongo.connect_to 'mongoid_articles' + + module Elasticsearch + module Model + class MongoidBasicIntegrationTest < Elasticsearch::Test::IntegrationTestCase + + class ::MongoidArticle + include Mongoid::Document + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + field :id, type: String + field :title, type: String + attr_accessible :title if respond_to? :attr_accessible + + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, type: 'string', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end + + def as_indexed_json(options={}) + as_json(except: [:id, :_id]) + end + end + + context "Mongoid integration" do + setup do + Elasticsearch::Model::Adapter.register \ + Elasticsearch::Model::Adapter::Mongoid, + lambda { |klass| !!defined?(::Mongoid::Document) && klass.respond_to?(:ancestors) && klass.ancestors.include?(::Mongoid::Document) } + + MongoidArticle.__elasticsearch__.create_index! force: true + + MongoidArticle.delete_all + + MongoidArticle.create! title: 'Test' + MongoidArticle.create! title: 'Testing Coding' + MongoidArticle.create! title: 'Coding' + + MongoidArticle.__elasticsearch__.refresh_index! + MongoidArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' + end + + should "index and find a document" do + response = MongoidArticle.search('title:test') + + assert response.any? + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + + assert_instance_of Elasticsearch::Model::Response::Result, response.results.first + assert_instance_of MongoidArticle, response.records.first + + assert_equal 'Test', response.results.first.title + assert_equal 'Test', response.records.first.title + end + + should "iterate over results" do + response = MongoidArticle.search('title:test') + + assert_equal ['Test', 'Testing Coding'], response.results.map(&:title) + assert_equal ['Test', 'Testing Coding'], response.records.map(&:title) + end + + should "access results from records" do + response = MongoidArticle.search('title:test') + + response.records.each_with_hit do |r, h| + assert_not_nil h._score + assert_not_nil h._source.title + end + end + + should "preserve the search results order for records" do + response = MongoidArticle.search('title:code') + + response.records.each_with_hit do |r, h| + assert_equal h._id, r.id.to_s + end + + response.records.map_with_hit do |r, h| + assert_equal h._id, r.id.to_s + end + end + + should "remove document from index on destroy" do + article = MongoidArticle.first + + article.destroy + assert_equal 2, MongoidArticle.count + + MongoidArticle.__elasticsearch__.refresh_index! + + response = MongoidArticle.search 'title:test' + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + end + + should "index updates to the document" do + article = MongoidArticle.first + + article.title = 'Writing' + article.save + + MongoidArticle.__elasticsearch__.refresh_index! + + response = MongoidArticle.search 'title:write' + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + end + + should "return results for a DSL search" do + response = MongoidArticle.search query: { match: { title: { query: 'test' } } } + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + end + + should "return a paged collection" do + response = MongoidArticle.search query: { match: { title: { query: 'test' } } }, + size: 2, + from: 1 + + assert_equal 1, response.results.size + assert_equal 1, response.records.size + + assert_equal 'Testing Coding', response.results.first.title + assert_equal 'Testing Coding', response.records.first.title + end + + + context "importing" do + setup do + MongoidArticle.delete_all + 97.times { |i| MongoidArticle.create! title: "Test #{i}" } + MongoidArticle.__elasticsearch__.create_index! force: true + MongoidArticle.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' + end + + should "import all the documents" do + assert_equal 97, MongoidArticle.count + + MongoidArticle.__elasticsearch__.refresh_index! + assert_equal 0, MongoidArticle.search('*').results.total + + batches = 0 + errors = MongoidArticle.import(batch_size: 10) do |response| + batches += 1 + end + + assert_equal 0, errors + assert_equal 10, batches + + MongoidArticle.__elasticsearch__.refresh_index! + assert_equal 97, MongoidArticle.search('*').results.total + + response = MongoidArticle.search('test') + assert response.results.any?, "Search has not returned results: #{response.to_a}" + end + end + end + + end + end + end + +end diff --git a/elasticsearch-model/test/integration/multiple_models_test.rb b/elasticsearch-model/test/integration/multiple_models_test.rb new file mode 100644 index 0000000000..7d0bf7b6b6 --- /dev/null +++ b/elasticsearch-model/test/integration/multiple_models_test.rb @@ -0,0 +1,172 @@ +require 'test_helper' +require 'active_record' + +Mongo.setup! + +module Elasticsearch + module Model + class MultipleModelsIntegration < Elasticsearch::Test::IntegrationTestCase + context "Multiple models" do + setup do + ActiveRecord::Schema.define(:version => 1) do + create_table :episodes do |t| + t.string :name + t.datetime :created_at, :default => 'NOW()' + end + + create_table :series do |t| + t.string :name + t.datetime :created_at, :default => 'NOW()' + end + end + + module ::NameSearch + extend ActiveSupport::Concern + + included do + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + settings index: {number_of_shards: 1, number_of_replicas: 0} do + mapping do + indexes :name, type: 'string', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end + end + end + + class ::Episode < ActiveRecord::Base + include NameSearch + end + + class ::Series < ActiveRecord::Base + include NameSearch + end + + [::Episode, ::Series].each do |model| + model.delete_all + model.__elasticsearch__.create_index! force: true + model.create name: "The #{model.name}" + model.create name: "A great #{model.name}" + model.create name: "The greatest #{model.name}" + model.__elasticsearch__.refresh_index! + end + + end + + should "find matching documents across multiple models" do + response = Elasticsearch::Model.search(%q<"The greatest Episode"^2 OR "The greatest Series">, [Series, Episode]) + + assert response.any?, "Response should not be empty: #{response.to_a.inspect}" + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + + assert_instance_of Elasticsearch::Model::Response::Result, response.results.first + assert_instance_of Episode, response.records.first + assert_instance_of Series, response.records.last + + assert_equal 'The greatest Episode', response.results[0].name + assert_equal 'The greatest Episode', response.records[0].name + + assert_equal 'The greatest Series', response.results[1].name + assert_equal 'The greatest Series', response.records[1].name + end + + should "provide access to results" do + response = Elasticsearch::Model.search(%q<"A great Episode"^2 OR "A great Series">, [Series, Episode]) + + assert_equal 'A great Episode', response.results[0].name + assert_equal true, response.results[0].name? + assert_equal false, response.results[0].boo? + + assert_equal 'A great Series', response.results[1].name + assert_equal true, response.results[1].name? + assert_equal false, response.results[1].boo? + end + + should "only retrieve records for existing results" do + ::Series.find_by_name("The greatest Series").delete + ::Series.__elasticsearch__.refresh_index! + response = Elasticsearch::Model.search(%q<"The greatest Episode"^2 OR "The greatest Series">, [Series, Episode]) + + assert response.any?, "Response should not be empty: #{response.to_a.inspect}" + + assert_equal 2, response.results.size + assert_equal 1, response.records.size + + assert_instance_of Elasticsearch::Model::Response::Result, response.results.first + assert_instance_of Episode, response.records.first + + assert_equal 'The greatest Episode', response.results[0].name + assert_equal 'The greatest Episode', response.records[0].name + end + + should "paginate the results" do + response = Elasticsearch::Model.search('series OR episode', [Series, Episode]) + + assert_equal 3, response.page(1).per(3).results.size + assert_equal 3, response.page(2).per(3).results.size + assert_equal 0, response.page(3).per(3).results.size + end + + if Mongo.available? + Mongo.connect_to 'mongoid_collections' + + context "Across mongoid models" do + setup do + class ::Image + include Mongoid::Document + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + + field :name, type: String + attr_accessible :name if respond_to? :attr_accessible + + settings index: {number_of_shards: 1, number_of_replicas: 0} do + mapping do + indexes :name, type: 'string', analyzer: 'snowball' + indexes :created_at, type: 'date' + end + end + + def as_indexed_json(options={}) + as_json(except: [:_id]) + end + end + + Image.delete_all + Image.__elasticsearch__.create_index! force: true + Image.create! name: "The Image" + Image.create! name: "A great Image" + Image.create! name: "The greatest Image" + Image.__elasticsearch__.refresh_index! + Image.__elasticsearch__.client.cluster.health wait_for_status: 'yellow' + end + + should "find matching documents across multiple models" do + response = Elasticsearch::Model.search(%q<"greatest Episode" OR "greatest Image"^2>, [Episode, Image]) + + assert response.any?, "Response should not be empty: #{response.to_a.inspect}" + + assert_equal 2, response.results.size + assert_equal 2, response.records.size + + assert_instance_of Elasticsearch::Model::Response::Result, response.results.first + assert_instance_of Image, response.records.first + assert_instance_of Episode, response.records.last + + assert_equal 'The greatest Image', response.results[0].name + assert_equal 'The greatest Image', response.records[0].name + + assert_equal 'The greatest Episode', response.results[1].name + assert_equal 'The greatest Episode', response.records[1].name + end + end + end + + end + end + end +end diff --git a/elasticsearch-model/test/support/model.json b/elasticsearch-model/test/support/model.json new file mode 100644 index 0000000000..fcf3a64730 --- /dev/null +++ b/elasticsearch-model/test/support/model.json @@ -0,0 +1 @@ +{ "baz": "qux" } diff --git a/elasticsearch-model/test/support/model.yml b/elasticsearch-model/test/support/model.yml new file mode 100644 index 0000000000..ba8ca60f34 --- /dev/null +++ b/elasticsearch-model/test/support/model.yml @@ -0,0 +1,2 @@ +baz: + 'qux' diff --git a/elasticsearch-model/test/test_helper.rb b/elasticsearch-model/test/test_helper.rb new file mode 100644 index 0000000000..ff3a6d9354 --- /dev/null +++ b/elasticsearch-model/test/test_helper.rb @@ -0,0 +1,93 @@ +RUBY_1_8 = defined?(RUBY_VERSION) && RUBY_VERSION < '1.9' + +exit(0) if RUBY_1_8 + +require 'simplecov' and SimpleCov.start { add_filter "/test|test_/" } if ENV["COVERAGE"] + +# Register `at_exit` handler for integration tests shutdown. +# MUST be called before requiring `test/unit`. +at_exit { Elasticsearch::Test::IntegrationTestCase.__run_at_exit_hooks } + +puts '-'*80 + +if defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + require 'test-unit' + require 'mocha/test_unit' +else + require 'minitest/autorun' + require 'mocha/mini_test' +end + +require 'shoulda-context' + +require 'turn' unless ENV["TM_FILEPATH"] || ENV["NOTURN"] || defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + +require 'ansi' +require 'oj' + +require 'active_model' + +require 'kaminari' + +require 'elasticsearch/model' + +require 'elasticsearch/extensions/test/cluster' +require 'elasticsearch/extensions/test/startup_shutdown' + +module Elasticsearch + module Test + class IntegrationTestCase < ::Test::Unit::TestCase + extend Elasticsearch::Extensions::Test::StartupShutdown + + startup { Elasticsearch::Extensions::Test::Cluster.start(nodes: 1) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running? } + shutdown { Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] && started? } + context "IntegrationTest" do; should "noop on Ruby 1.8" do; end; end if RUBY_1_8 + + def setup + ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) + logger = ::Logger.new(STDERR) + logger.formatter = lambda { |s, d, p, m| "\e[2;36m#{m}\e[0m\n" } + ActiveRecord::Base.logger = logger unless ENV['QUIET'] + + ActiveRecord::LogSubscriber.colorize_logging = false + ActiveRecord::Migration.verbose = false + + tracer = ::Logger.new(STDERR) + tracer.formatter = lambda { |s, d, p, m| "#{m.gsub(/^.*$/) { |n| ' ' + n }.ansi(:faint)}\n" } + + Elasticsearch::Model.client = Elasticsearch::Client.new host: "localhost:#{(ENV['TEST_CLUSTER_PORT'] || 9250)}", + tracer: (ENV['QUIET'] ? nil : tracer) + end + end + end +end + +class Mongo + def self.setup! + begin + require 'mongoid' + session = Moped::Connection.new("localhost", 27017, 0.5) + session.connect + ENV['MONGODB_AVAILABLE'] = 'yes' + rescue LoadError, Moped::Errors::ConnectionFailure => e + $stderr.puts "MongoDB not installed or running: #{e}" + end + end + + def self.available? + !!ENV['MONGODB_AVAILABLE'] + end + + def self.connect_to(source) + $stderr.puts "Mongoid #{Mongoid::VERSION}", '-'*80 + + logger = ::Logger.new($stderr) + logger.formatter = lambda { |s, d, p, m| " #{m.ansi(:faint, :cyan)}\n" } + logger.level = ::Logger::DEBUG + + Mongoid.logger = logger unless ENV['QUIET'] + Moped.logger = logger unless ENV['QUIET'] + + Mongoid.connect_to source + end +end diff --git a/elasticsearch-model/test/unit/adapter_active_record_test.rb b/elasticsearch-model/test/unit/adapter_active_record_test.rb new file mode 100644 index 0000000000..335e3bd10e --- /dev/null +++ b/elasticsearch-model/test/unit/adapter_active_record_test.rb @@ -0,0 +1,157 @@ +require 'test_helper' + +class Elasticsearch::Model::AdapterActiveRecordTest < Test::Unit::TestCase + context "Adapter ActiveRecord module: " do + class ::DummyClassForActiveRecord + RESPONSE = Struct.new('DummyActiveRecordResponse') do + def response + { 'hits' => {'hits' => [ {'_id' => 2}, {'_id' => 1} ]} } + end + end.new + + def response + RESPONSE + end + + def ids + [2, 1] + end + end + + RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [] } } + + setup do + @records = [ stub(id: 1, inspect: ''), stub(id: 2, inspect: '') ] + @records.stubs(:load).returns(true) + @records.stubs(:exec_queries).returns(true) + end + + should "have the register condition" do + assert_not_nil Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::ActiveRecord] + assert_equal false, Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::ActiveRecord].call(DummyClassForActiveRecord) + end + + context "Records" do + setup do + DummyClassForActiveRecord.__send__ :include, Elasticsearch::Model::Adapter::ActiveRecord::Records + end + + should "have the implementation" do + assert_instance_of Module, Elasticsearch::Model::Adapter::ActiveRecord::Records + + instance = DummyClassForActiveRecord.new + instance.expects(:klass).returns(mock('class', primary_key: :some_key, where: @records)).at_least_once + + assert_equal @records, instance.records + end + + should "load the records" do + instance = DummyClassForActiveRecord.new + instance.expects(:records).returns(@records) + instance.load + end + + should "load the records with its submodels when using :includes" do + klass = mock('class', primary_key: :some_key, where: @records) + @records.expects(:includes).with([:submodel]).at_least_once + + instance = DummyClassForActiveRecord.new + instance.expects(:klass).returns(klass).at_least_once + instance.options[:includes] = [:submodel] + instance.records + end + + should "reorder the records based on hits order" do + @records.instance_variable_set(:@records, @records) + + instance = DummyClassForActiveRecord.new + instance.expects(:klass).returns(mock('class', primary_key: :some_key, where: @records)).at_least_once + + assert_equal [1, 2], @records. to_a.map(&:id) + assert_equal [2, 1], instance.records.to_a.map(&:id) + end + + should "not reorder records when SQL order is present" do + @records.instance_variable_set(:@records, @records) + + instance = DummyClassForActiveRecord.new + instance.expects(:klass).returns(stub('class', primary_key: :some_key, where: @records)).at_least_once + instance.records.expects(:order).returns(@records) + + assert_equal [2, 1], instance.records. to_a.map(&:id) + assert_equal [1, 2], instance.order(:foo).to_a.map(&:id) + end + end + + context "Callbacks" do + should "register hooks for automatically updating the index" do + DummyClassForActiveRecord.expects(:after_commit).times(3) + + Elasticsearch::Model::Adapter::ActiveRecord::Callbacks.included(DummyClassForActiveRecord) + end + end + + context "Importing" do + setup do + DummyClassForActiveRecord.__send__ :extend, Elasticsearch::Model::Adapter::ActiveRecord::Importing + end + + should "raise an exception when passing an invalid scope" do + assert_raise NoMethodError do + DummyClassForActiveRecord.__find_in_batches(scope: :not_found_method) do; end + end + end + + should "implement the __find_in_batches method" do + DummyClassForActiveRecord.expects(:find_in_batches).returns([]) + DummyClassForActiveRecord.__find_in_batches do; end + end + + should "limit the relation to a specific scope" do + DummyClassForActiveRecord.expects(:find_in_batches).returns([]) + DummyClassForActiveRecord.expects(:published).returns(DummyClassForActiveRecord) + + DummyClassForActiveRecord.__find_in_batches(scope: :published) do; end + end + + should "limit the relation to a specific query" do + DummyClassForActiveRecord.expects(:find_in_batches).returns([]) + DummyClassForActiveRecord.expects(:where).returns(DummyClassForActiveRecord) + + DummyClassForActiveRecord.__find_in_batches(query: -> { where(color: "red") }) do; end + end + + should "preprocess the batch if option provided" do + class << DummyClassForActiveRecord + # Updates/transforms the batch while fetching it from the database + # (eg. with information from an external system) + # + def update_batch(batch) + batch.collect { |b| b.to_s + '!' } + end + end + + DummyClassForActiveRecord.expects(:__find_in_batches).returns( [:a, :b] ) + + DummyClassForActiveRecord.__find_in_batches(preprocess: :update_batch) do |batch| + assert_same_elements ["a!", "b!"], batch + end + end + + context "when transforming models" do + setup do + @transform = DummyClassForActiveRecord.__transform + end + + should "provide an object that responds to #call" do + assert_respond_to @transform, :call + end + + should "provide default transformation" do + model = mock("model", id: 1, __elasticsearch__: stub(as_indexed_json: {})) + assert_equal @transform.call(model), { index: { _id: 1, data: {} } } + end + end + end + end +end diff --git a/elasticsearch-model/test/unit/adapter_default_test.rb b/elasticsearch-model/test/unit/adapter_default_test.rb new file mode 100644 index 0000000000..48edd205d5 --- /dev/null +++ b/elasticsearch-model/test/unit/adapter_default_test.rb @@ -0,0 +1,41 @@ +require 'test_helper' + +class Elasticsearch::Model::AdapterDefaultTest < Test::Unit::TestCase + context "Adapter default module" do + class ::DummyClassForDefaultAdapter; end + + should "have the default Records implementation" do + assert_instance_of Module, Elasticsearch::Model::Adapter::Default::Records + + DummyClassForDefaultAdapter.__send__ :include, Elasticsearch::Model::Adapter::Default::Records + + instance = DummyClassForDefaultAdapter.new + klass = mock('class', find: [1]) + instance.expects(:klass).returns(klass) + instance.records + end + + should "have the default Callbacks implementation" do + assert_instance_of Module, Elasticsearch::Model::Adapter::Default::Callbacks + end + + context "concerning abstract methods" do + setup do + DummyClassForDefaultAdapter.__send__ :include, Elasticsearch::Model::Adapter::Default::Importing + end + + should "have the default Importing implementation" do + assert_raise Elasticsearch::Model::NotImplemented do + DummyClassForDefaultAdapter.new.__find_in_batches + end + end + + should "have the default transform implementation" do + assert_raise Elasticsearch::Model::NotImplemented do + DummyClassForDefaultAdapter.new.__transform + end + end + end + + end +end diff --git a/elasticsearch-model/test/unit/adapter_mongoid_test.rb b/elasticsearch-model/test/unit/adapter_mongoid_test.rb new file mode 100644 index 0000000000..ca9b0d20bc --- /dev/null +++ b/elasticsearch-model/test/unit/adapter_mongoid_test.rb @@ -0,0 +1,104 @@ +require 'test_helper' + +class Elasticsearch::Model::AdapterMongoidTest < Test::Unit::TestCase + context "Adapter Mongoid module: " do + class ::DummyClassForMongoid + RESPONSE = Struct.new('DummyMongoidResponse') do + def response + { 'hits' => {'hits' => [ {'_id' => 2}, {'_id' => 1} ]} } + end + end.new + + def response + RESPONSE + end + + def ids + [2, 1] + end + end + + setup do + @records = [ stub(id: 1, inspect: ''), stub(id: 2, inspect: '') ] + ::Symbol.any_instance.stubs(:in).returns(@records) + end + + should "have the register condition" do + assert_not_nil Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::Mongoid] + assert_equal false, Elasticsearch::Model::Adapter.adapters[Elasticsearch::Model::Adapter::Mongoid].call(DummyClassForMongoid) + end + + context "Records" do + setup do + DummyClassForMongoid.__send__ :include, Elasticsearch::Model::Adapter::Mongoid::Records + end + + should "have the implementation" do + assert_instance_of Module, Elasticsearch::Model::Adapter::Mongoid::Records + + instance = DummyClassForMongoid.new + instance.expects(:klass).returns(mock('class', where: @records)) + + assert_equal @records, instance.records + end + + should "reorder the records based on hits order" do + @records.instance_variable_set(:@records, @records) + + instance = DummyClassForMongoid.new + instance.expects(:klass).returns(mock('class', where: @records)) + + assert_equal [1, 2], @records. to_a.map(&:id) + assert_equal [2, 1], instance.records.to_a.map(&:id) + end + + should "not reorder records when SQL order is present" do + @records.instance_variable_set(:@records, @records) + + instance = DummyClassForMongoid.new + instance.expects(:klass).returns(stub('class', where: @records)).at_least_once + instance.records.expects(:asc).returns(@records) + + assert_equal [2, 1], instance.records.to_a.map(&:id) + assert_equal [1, 2], instance.asc.to_a.map(&:id) + end + end + + context "Callbacks" do + should "register hooks for automatically updating the index" do + DummyClassForMongoid.expects(:after_create) + DummyClassForMongoid.expects(:after_update) + DummyClassForMongoid.expects(:after_destroy) + + Elasticsearch::Model::Adapter::Mongoid::Callbacks.included(DummyClassForMongoid) + end + end + + context "Importing" do + should "implement the __find_in_batches method" do + relation = mock() + relation.stubs(:no_timeout).returns([]) + DummyClassForMongoid.expects(:all).returns(relation) + + DummyClassForMongoid.__send__ :extend, Elasticsearch::Model::Adapter::Mongoid::Importing + DummyClassForMongoid.__find_in_batches do; end + end + + context "when transforming models" do + setup do + @transform = DummyClassForMongoid.__transform + end + + should "provide an object that responds to #call" do + assert_respond_to @transform, :call + end + + should "provide basic transformation" do + model = mock("model", id: 1, as_indexed_json: {}) + assert_equal @transform.call(model), { index: { _id: "1", data: {} } } + end + end + end + + end +end diff --git a/elasticsearch-model/test/unit/adapter_multiple_test.rb b/elasticsearch-model/test/unit/adapter_multiple_test.rb new file mode 100644 index 0000000000..b848286fbd --- /dev/null +++ b/elasticsearch-model/test/unit/adapter_multiple_test.rb @@ -0,0 +1,106 @@ +require 'test_helper' + +class Elasticsearch::Model::MultipleTest < Test::Unit::TestCase + + context "Adapter for multiple models" do + + class ::DummyOne + include Elasticsearch::Model + + index_name 'dummy' + document_type 'dummy_one' + + def self.find(ids) + ids.map { |id| new(id) } + end + + attr_reader :id + + def initialize(id) + @id = id.to_i + end + end + + module ::Namespace + class DummyTwo + include Elasticsearch::Model + + index_name 'dummy' + document_type 'dummy_two' + + def self.find(ids) + ids.map { |id| new(id) } + end + + attr_reader :id + + def initialize(id) + @id = id.to_i + end + end + end + + class ::DummyTwo + include Elasticsearch::Model + + index_name 'other_index' + document_type 'dummy_two' + + def self.find(ids) + ids.map { |id| new(id) } + end + + attr_reader :id + + def initialize(id) + @id = id.to_i + end + end + + HITS = [{_index: 'dummy', + _type: 'dummy_two', + _id: '2', + }, { + _index: 'dummy', + _type: 'dummy_one', + _id: '2', + }, { + _index: 'other_index', + _type: 'dummy_two', + _id: '1', + }, { + _index: 'dummy', + _type: 'dummy_two', + _id: '1', + }, { + _index: 'dummy', + _type: 'dummy_one', + _id: '3'}] + + setup do + @multimodel = Elasticsearch::Model::Multimodel.new(DummyOne, DummyTwo, Namespace::DummyTwo) + end + + context "when returning records" do + setup do + @multimodel.class.send :include, Elasticsearch::Model::Adapter::Multiple::Records + @multimodel.expects(:response).at_least_once.returns(stub(response: { 'hits' => { 'hits' => HITS } })) + end + + should "keep the order from response" do + assert_instance_of Module, Elasticsearch::Model::Adapter::Multiple::Records + records = @multimodel.records + + assert_equal 5, records.count + + assert_kind_of ::Namespace::DummyTwo, records[0] + assert_kind_of ::DummyOne, records[1] + assert_kind_of ::DummyTwo, records[2] + assert_kind_of ::Namespace::DummyTwo, records[3] + assert_kind_of ::DummyOne, records[4] + + assert_equal [2, 2, 1, 1, 3], records.map(&:id) + end + end + end +end diff --git a/elasticsearch-model/test/unit/adapter_test.rb b/elasticsearch-model/test/unit/adapter_test.rb new file mode 100644 index 0000000000..71b4e7cea3 --- /dev/null +++ b/elasticsearch-model/test/unit/adapter_test.rb @@ -0,0 +1,69 @@ +require 'test_helper' + +class Elasticsearch::Model::AdapterTest < Test::Unit::TestCase + context "Adapter module" do + class ::DummyAdapterClass; end + class ::DummyAdapterClassWithAdapter; end + class ::DummyAdapter + Records = Module.new + Callbacks = Module.new + Importing = Module.new + end + + should "return an Adapter instance" do + assert_instance_of Elasticsearch::Model::Adapter::Adapter, + Elasticsearch::Model::Adapter.from_class(DummyAdapterClass) + end + + should "return a list of adapters" do + Elasticsearch::Model::Adapter::Adapter.expects(:adapters) + Elasticsearch::Model::Adapter.adapters + end + + should "register an adapter" do + begin + Elasticsearch::Model::Adapter::Adapter.expects(:register) + Elasticsearch::Model::Adapter.register(:foo, lambda { |c| false }) + ensure + Elasticsearch::Model::Adapter::Adapter.instance_variable_set(:@adapters, {}) + end + end + end + + context "Adapter class" do + should "register an adapter" do + begin + Elasticsearch::Model::Adapter::Adapter.register(:foo, lambda { |c| false }) + assert Elasticsearch::Model::Adapter::Adapter.adapters[:foo] + ensure + Elasticsearch::Model::Adapter::Adapter.instance_variable_set(:@adapters, {}) + end + end + + should "return the default adapter" do + adapter = Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClass) + assert_equal Elasticsearch::Model::Adapter::Default, adapter.adapter + end + + should "return a specific adapter" do + Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, + lambda { |c| c == DummyAdapterClassWithAdapter }) + + adapter = Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) + assert_equal DummyAdapter, adapter.adapter + end + + should "return the modules" do + assert_nothing_raised do + Elasticsearch::Model::Adapter::Adapter.register(DummyAdapter, + lambda { |c| c == DummyAdapterClassWithAdapter }) + + adapter = Elasticsearch::Model::Adapter::Adapter.new(DummyAdapterClassWithAdapter) + + assert_instance_of Module, adapter.records_mixin + assert_instance_of Module, adapter.callbacks_mixin + assert_instance_of Module, adapter.importing_mixin + end + end + end +end diff --git a/elasticsearch-model/test/unit/callbacks_test.rb b/elasticsearch-model/test/unit/callbacks_test.rb new file mode 100644 index 0000000000..95617a414e --- /dev/null +++ b/elasticsearch-model/test/unit/callbacks_test.rb @@ -0,0 +1,31 @@ +require 'test_helper' + +class Elasticsearch::Model::CallbacksTest < Test::Unit::TestCase + context "Callbacks module" do + class ::DummyCallbacksModel + end + + module DummyCallbacksAdapter + module CallbacksMixin + end + + def callbacks_mixin + CallbacksMixin + end; module_function :callbacks_mixin + end + + should "include the callbacks mixin from adapter" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyCallbacksModel) + .returns(DummyCallbacksAdapter) + + ::DummyCallbacksModel.expects(:__send__).with do |method, parameter| + assert_equal :include, method + assert_equal DummyCallbacksAdapter::CallbacksMixin, parameter + true + end + + Elasticsearch::Model::Callbacks.included(DummyCallbacksModel) + end + end +end diff --git a/elasticsearch-model/test/unit/client_test.rb b/elasticsearch-model/test/unit/client_test.rb new file mode 100644 index 0000000000..315a3ab44a --- /dev/null +++ b/elasticsearch-model/test/unit/client_test.rb @@ -0,0 +1,27 @@ +require 'test_helper' + +class Elasticsearch::Model::ClientTest < Test::Unit::TestCase + context "Client module" do + class ::DummyClientModel + extend Elasticsearch::Model::Client::ClassMethods + include Elasticsearch::Model::Client::InstanceMethods + end + + should "have the default client method" do + assert_instance_of Elasticsearch::Transport::Client, DummyClientModel.client + assert_instance_of Elasticsearch::Transport::Client, DummyClientModel.new.client + end + + should "set the client for the model" do + DummyClientModel.client = 'foobar' + assert_equal 'foobar', DummyClientModel.client + assert_equal 'foobar', DummyClientModel.new.client + end + + should "set the client for a model instance" do + instance = DummyClientModel.new + instance.client = 'moobam' + assert_equal 'moobam', instance.client + end + end +end diff --git a/elasticsearch-model/test/unit/importing_test.rb b/elasticsearch-model/test/unit/importing_test.rb new file mode 100644 index 0000000000..6f739acecc --- /dev/null +++ b/elasticsearch-model/test/unit/importing_test.rb @@ -0,0 +1,203 @@ +require 'test_helper' + +class Elasticsearch::Model::ImportingTest < Test::Unit::TestCase + context "Importing module" do + class ::DummyImportingModel + end + + module ::DummyImportingAdapter + module ImportingMixin + def __find_in_batches(options={}, &block) + yield if block_given? + end + def __transform + lambda {|a|} + end + end + + def importing_mixin + ImportingMixin + end; module_function :importing_mixin + end + + should "include methods from the module and adapter" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyImportingModel) + .returns(DummyImportingAdapter) + + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + + assert_respond_to DummyImportingModel, :import + assert_respond_to DummyImportingModel, :__find_in_batches + end + + should "call the client when importing" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyImportingModel) + .returns(DummyImportingAdapter) + + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + + client = mock('client') + client.expects(:bulk).returns({'items' => []}) + + DummyImportingModel.expects(:client).returns(client) + DummyImportingModel.expects(:index_name).returns('foo') + DummyImportingModel.expects(:document_type).returns('foo') + DummyImportingModel.stubs(:index_exists?).returns(true) + DummyImportingModel.stubs(:__batch_to_bulk) + assert_equal 0, DummyImportingModel.import + end + + should "return the number of errors" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyImportingModel) + .returns(DummyImportingAdapter) + + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + + client = mock('client') + client.expects(:bulk).returns({'items' => [ {'index' => {}}, {'index' => {'error' => 'FAILED'}} ]}) + + DummyImportingModel.stubs(:client).returns(client) + DummyImportingModel.stubs(:index_name).returns('foo') + DummyImportingModel.stubs(:document_type).returns('foo') + DummyImportingModel.stubs(:index_exists?).returns(true) + DummyImportingModel.stubs(:__batch_to_bulk) + + assert_equal 1, DummyImportingModel.import + end + + should "return an array of error elements" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyImportingModel) + .returns(DummyImportingAdapter) + + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + + client = mock('client') + client.expects(:bulk).returns({'items' => [ {'index' => {}}, {'index' => {'error' => 'FAILED'}} ]}) + + DummyImportingModel.stubs(:client).returns(client) + DummyImportingModel.stubs(:index_name).returns('foo') + DummyImportingModel.stubs(:document_type).returns('foo') + DummyImportingModel.stubs(:index_exists?).returns(true) + DummyImportingModel.stubs(:__batch_to_bulk) + + assert_equal [{'index' => {'error' => 'FAILED'}}], DummyImportingModel.import(return: 'errors') + end + + should "yield the response" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyImportingModel) + .returns(DummyImportingAdapter) + + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + + client = mock('client') + client.expects(:bulk).returns({'items' => [ {'index' => {}}, {'index' => {'error' => 'FAILED'}} ]}) + + DummyImportingModel.stubs(:client).returns(client) + DummyImportingModel.stubs(:index_name).returns('foo') + DummyImportingModel.stubs(:document_type).returns('foo') + DummyImportingModel.stubs(:index_exists?).returns(true) + DummyImportingModel.stubs(:__batch_to_bulk) + + DummyImportingModel.import do |response| + assert_equal 2, response['items'].size + end + end + + context "when the index does not exist" do + should "raise an exception" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyImportingModel) + .returns(DummyImportingAdapter) + + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + + DummyImportingModel.expects(:index_name).returns('foo') + DummyImportingModel.expects(:document_type).returns('foo') + DummyImportingModel.expects(:index_exists?).returns(false) + + assert_raise ArgumentError do + DummyImportingModel.import + end + end + end + + context "with the force option" do + should "delete and create the index" do + DummyImportingModel.expects(:__find_in_batches).with do |options| + assert_equal 'bar', options[:foo] + assert_nil options[:force] + true + end + + DummyImportingModel.expects(:create_index!).with do |options| + assert_equal true, options[:force] + true + end + + DummyImportingModel.expects(:index_name).returns('foo') + DummyImportingModel.expects(:document_type).returns('foo') + + DummyImportingModel.import force: true, foo: 'bar' + end + end + + should "allow passing a different index / type" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyImportingModel) + .returns(DummyImportingAdapter) + + DummyImportingModel.__send__ :include, Elasticsearch::Model::Importing + + client = mock('client') + + client + .expects(:bulk) + .with do |options| + assert_equal 'my-new-index', options[:index] + assert_equal 'my-other-type', options[:type] + true + end + .returns({'items' => [ {'index' => {} }]}) + + DummyImportingModel.stubs(:client).returns(client) + DummyImportingModel.stubs(:index_exists?).returns(true) + DummyImportingModel.stubs(:__batch_to_bulk) + + DummyImportingModel.import index: 'my-new-index', type: 'my-other-type' + end + + should "use the default transform from adapter" do + client = mock('client', bulk: {'items' => []}) + transform = lambda {|a|} + + DummyImportingModel.stubs(:client).returns(client) + DummyImportingModel.stubs(:index_exists?).returns(true) + DummyImportingModel.expects(:__transform).returns(transform) + DummyImportingModel.expects(:__batch_to_bulk).with(anything, transform) + + DummyImportingModel.import index: 'foo', type: 'bar' + end + + should "use the transformer from options" do + client = mock('client', bulk: {'items' => []}) + transform = lambda {|a|} + + DummyImportingModel.stubs(:client).returns(client) + DummyImportingModel.stubs(:index_exists?).returns(true) + DummyImportingModel.expects(:__batch_to_bulk).with(anything, transform) + + DummyImportingModel.import index: 'foo', type: 'bar', transform: transform + end + + should "raise an ArgumentError if transform doesn't respond to the call method" do + assert_raise ArgumentError do + DummyImportingModel.import index: 'foo', type: 'bar', transform: "not_callable" + end + end + end +end diff --git a/elasticsearch-model/test/unit/indexing_test.rb b/elasticsearch-model/test/unit/indexing_test.rb new file mode 100644 index 0000000000..a52603a1ec --- /dev/null +++ b/elasticsearch-model/test/unit/indexing_test.rb @@ -0,0 +1,650 @@ +require 'test_helper' + +class Elasticsearch::Model::IndexingTest < Test::Unit::TestCase + context "Indexing module: " do + class ::DummyIndexingModel + extend ActiveModel::Naming + extend Elasticsearch::Model::Naming::ClassMethods + extend Elasticsearch::Model::Indexing::ClassMethods + + def self.foo + 'bar' + end + end + + class NotFound < Exception; end + + context "Settings class" do + should "be convertible to hash" do + hash = { foo: 'bar' } + settings = Elasticsearch::Model::Indexing::Settings.new hash + assert_equal hash, settings.to_hash + assert_equal settings.to_hash, settings.as_json + end + end + + context "Settings method" do + should "initialize the index settings" do + assert_instance_of Elasticsearch::Model::Indexing::Settings, DummyIndexingModel.settings + end + + should "update and return the index settings from a hash" do + DummyIndexingModel.settings foo: 'boo' + DummyIndexingModel.settings bar: 'bam' + + assert_equal( {foo: 'boo', bar: 'bam'}, DummyIndexingModel.settings.to_hash) + end + + should "update and return the index settings from a yml file" do + DummyIndexingModel.settings File.open("test/support/model.yml") + DummyIndexingModel.settings bar: 'bam' + + assert_equal( {foo: 'boo', bar: 'bam', 'baz' => 'qux'}, DummyIndexingModel.settings.to_hash) + end + + should "update and return the index settings from a json file" do + DummyIndexingModel.settings File.open("test/support/model.json") + DummyIndexingModel.settings bar: 'bam' + + assert_equal( {foo: 'boo', bar: 'bam', 'baz' => 'qux'}, DummyIndexingModel.settings.to_hash) + end + + should "evaluate the block" do + DummyIndexingModel.expects(:foo) + + DummyIndexingModel.settings do + foo + end + end + end + + context "Mappings class" do + should "initialize the index mappings" do + assert_instance_of Elasticsearch::Model::Indexing::Mappings, DummyIndexingModel.mappings + end + + should "raise an exception when not passed type" do + assert_raise ArgumentError do + Elasticsearch::Model::Indexing::Mappings.new + end + end + + should "be convertible to hash" do + mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype, { foo: 'bar' } + assert_equal( { :mytype => { foo: 'bar', :properties => {} } }, mappings.to_hash ) + assert_equal mappings.to_hash, mappings.as_json + end + + should "define properties" do + mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype + assert_respond_to mappings, :indexes + + mappings.indexes :foo, { type: 'boolean', include_in_all: false } + assert_equal 'boolean', mappings.to_hash[:mytype][:properties][:foo][:type] + end + + should "define type as string by default" do + mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype + + mappings.indexes :bar, {} + assert_equal 'string', mappings.to_hash[:mytype][:properties][:bar][:type] + end + + should "define multiple fields" do + mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype + + mappings.indexes :foo_1, type: 'string' do + indexes :raw, analyzer: 'keyword' + end + + mappings.indexes :foo_2, type: 'multi_field' do + indexes :raw, analyzer: 'keyword' + end + + assert_equal 'string', mappings.to_hash[:mytype][:properties][:foo_1][:type] + assert_equal 'string', mappings.to_hash[:mytype][:properties][:foo_1][:fields][:raw][:type] + assert_equal 'keyword', mappings.to_hash[:mytype][:properties][:foo_1][:fields][:raw][:analyzer] + assert_nil mappings.to_hash[:mytype][:properties][:foo_1][:properties] + + assert_equal 'multi_field', mappings.to_hash[:mytype][:properties][:foo_2][:type] + assert_equal 'string', mappings.to_hash[:mytype][:properties][:foo_2][:fields][:raw][:type] + assert_equal 'keyword', mappings.to_hash[:mytype][:properties][:foo_2][:fields][:raw][:analyzer] + assert_nil mappings.to_hash[:mytype][:properties][:foo_2][:properties] + end + + should "define embedded properties" do + mappings = Elasticsearch::Model::Indexing::Mappings.new :mytype + + mappings.indexes :foo do + indexes :bar + end + + mappings.indexes :foo_object, type: 'object' do + indexes :bar + end + + mappings.indexes :foo_nested, type: 'nested' do + indexes :bar + end + + mappings.indexes :foo_nested_as_symbol, type: :nested do + indexes :bar + end + + # Object is the default when `type` is missing and there's a block passed + # + assert_equal 'object', mappings.to_hash[:mytype][:properties][:foo][:type] + assert_equal 'string', mappings.to_hash[:mytype][:properties][:foo][:properties][:bar][:type] + assert_nil mappings.to_hash[:mytype][:properties][:foo][:fields] + + assert_equal 'object', mappings.to_hash[:mytype][:properties][:foo_object][:type] + assert_equal 'string', mappings.to_hash[:mytype][:properties][:foo_object][:properties][:bar][:type] + assert_nil mappings.to_hash[:mytype][:properties][:foo_object][:fields] + + assert_equal 'nested', mappings.to_hash[:mytype][:properties][:foo_nested][:type] + assert_equal 'string', mappings.to_hash[:mytype][:properties][:foo_nested][:properties][:bar][:type] + assert_nil mappings.to_hash[:mytype][:properties][:foo_nested][:fields] + + assert_equal :nested, mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:type] + assert_not_nil mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:properties] + assert_nil mappings.to_hash[:mytype][:properties][:foo_nested_as_symbol][:fields] + end + end + + context "Mappings method" do + should "initialize the index mappings" do + assert_instance_of Elasticsearch::Model::Indexing::Mappings, DummyIndexingModel.mappings + end + + should "update and return the index mappings" do + DummyIndexingModel.mappings foo: 'boo' + DummyIndexingModel.mappings bar: 'bam' + assert_equal( { dummy_indexing_model: { foo: "boo", bar: "bam", properties: {} } }, + DummyIndexingModel.mappings.to_hash ) + end + + should "evaluate the block" do + DummyIndexingModel.mappings.expects(:indexes).with(:foo).returns(true) + + DummyIndexingModel.mappings do + indexes :foo + end + end + end + + context "Instance methods" do + class ::DummyIndexingModelWithCallbacks + extend Elasticsearch::Model::Indexing::ClassMethods + include Elasticsearch::Model::Indexing::InstanceMethods + + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changed_attributes; [:foo]; end + + def changes + {:foo => ['One', 'Two']} + end + end + + class ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson + extend Elasticsearch::Model::Indexing::ClassMethods + include Elasticsearch::Model::Indexing::InstanceMethods + + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changed_attributes; [:foo, :bar]; end + + def changes + {:foo => ['A', 'B'], :bar => ['C', 'D']} + end + + def as_indexed_json(options={}) + { :foo => 'B' } + end + end + + should "register before_save callback when included" do + ::DummyIndexingModelWithCallbacks.expects(:before_save).returns(true) + ::DummyIndexingModelWithCallbacks.__send__ :include, Elasticsearch::Model::Indexing::InstanceMethods + end + + should "set the @__changed_attributes variable before save" do + instance = ::DummyIndexingModelWithCallbacks.new + instance.expects(:instance_variable_set).with do |name, value| + assert_equal :@__changed_attributes, name + assert_equal({foo: 'Two'}, value) + true + end + + ::DummyIndexingModelWithCallbacks.__send__ :include, Elasticsearch::Model::Indexing::InstanceMethods + + ::DummyIndexingModelWithCallbacks.instance_variable_get(:@callbacks).each do |n,b| + instance.instance_eval(&b) + end + end + + should "have the index_document method" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + client.expects(:index).with do |payload| + assert_equal 'foo', payload[:index] + assert_equal 'bar', payload[:type] + assert_equal '1', payload[:id] + assert_equal 'JSON', payload[:body] + true + end + + instance.expects(:client).returns(client) + instance.expects(:as_indexed_json).returns('JSON') + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.index_document + end + + should "pass extra options to the index_document method to client.index" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + client.expects(:index).with do |payload| + assert_equal 'A', payload[:parent] + true + end + + instance.expects(:client).returns(client) + instance.expects(:as_indexed_json).returns('JSON') + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.index_document(parent: 'A') + end + + should "have the delete_document method" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + client.expects(:delete).with do |payload| + assert_equal 'foo', payload[:index] + assert_equal 'bar', payload[:type] + assert_equal '1', payload[:id] + true + end + + instance.expects(:client).returns(client) + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.delete_document() + end + + should "pass extra options to the delete_document method to client.delete" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + client.expects(:delete).with do |payload| + assert_equal 'A', payload[:parent] + true + end + + instance.expects(:client).returns(client) + instance.expects(:id).returns('1') + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + + instance.delete_document(parent: 'A') + end + + should "update the document by re-indexing when no changes are present" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + # Reset the fake `changes` + instance.instance_variable_set(:@__changed_attributes, nil) + + instance.expects(:index_document) + instance.update_document + end + + should "update the document by partial update when changes are present" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + # Set the fake `changes` hash + instance.instance_variable_set(:@__changed_attributes, {foo: 'bar'}) + + client.expects(:update).with do |payload| + assert_equal 'foo', payload[:index] + assert_equal 'bar', payload[:type] + assert_equal '1', payload[:id] + assert_equal({foo: 'bar'}, payload[:body][:doc]) + true + end + + instance.expects(:client).returns(client) + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.update_document + end + + should "exclude attributes not contained in custom as_indexed_json during partial update" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson.new + + # Set the fake `changes` hash + instance.instance_variable_set(:@__changed_attributes, {'foo' => 'B', 'bar' => 'D' }) + + client.expects(:update).with do |payload| + assert_equal({:foo => 'B'}, payload[:body][:doc]) + true + end + + instance.expects(:client).returns(client) + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.update_document + end + + should "get attributes from as_indexed_json during partial update" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson.new + + instance.instance_variable_set(:@__changed_attributes, { 'foo' => { 'bar' => 'BAR'} }) + # Overload as_indexed_json + instance.expects(:as_indexed_json).returns({ 'foo' => 'BAR' }) + + client.expects(:update).with do |payload| + assert_equal({'foo' => 'BAR'}, payload[:body][:doc]) + true + end + + instance.expects(:client).returns(client) + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.update_document + end + + should "update only the specific attributes" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + # Set the fake `changes` hash + instance.instance_variable_set(:@__changed_attributes, {author: 'john'}) + + client.expects(:update).with do |payload| + assert_equal 'foo', payload[:index] + assert_equal 'bar', payload[:type] + assert_equal '1', payload[:id] + assert_equal({title: 'green'}, payload[:body][:doc]) + true + end + + instance.expects(:client).returns(client) + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.update_document_attributes title: "green" + end + + should "pass options to the update_document_attributes method" do + client = mock('client') + instance = ::DummyIndexingModelWithCallbacks.new + + client.expects(:update).with do |payload| + assert_equal 'foo', payload[:index] + assert_equal 'bar', payload[:type] + assert_equal '1', payload[:id] + assert_equal({title: 'green'}, payload[:body][:doc]) + assert_equal true, payload[:refresh] + true + end + + instance.expects(:client).returns(client) + instance.expects(:index_name).returns('foo') + instance.expects(:document_type).returns('bar') + instance.expects(:id).returns('1') + + instance.update_document_attributes( { title: "green" }, { refresh: true } ) + end + end + + context "Checking for index existence" do + context "the index exists" do + should "return true" do + indices = mock('indices', exists: true) + client = stub('client', indices: indices) + + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_equal true, DummyIndexingModelForRecreate.index_exists? + end + end + + context "the index does not exists" do + should "return false" do + indices = mock('indices', exists: false) + client = stub('client', indices: indices) + + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_equal false, DummyIndexingModelForRecreate.index_exists? + end + end + + context "the indices raises" do + should "return false" do + client = stub('client') + client.expects(:indices).raises(StandardError) + + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_equal false, DummyIndexingModelForRecreate.index_exists? + end + end + + context "the indices raises" do + should "return false" do + indices = stub('indices') + client = stub('client') + client.expects(:indices).returns(indices) + + indices.expects(:exists).raises(StandardError) + + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_equal false, DummyIndexingModelForRecreate.index_exists? + end + end + end + + context "Re-creating the index" do + class ::DummyIndexingModelForRecreate + extend ActiveModel::Naming + extend Elasticsearch::Model::Naming::ClassMethods + extend Elasticsearch::Model::Indexing::ClassMethods + + settings index: { number_of_shards: 1 } do + mappings do + indexes :foo, analyzer: 'keyword' + end + end + end + + should "delete the index without raising exception when the index is not found" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:delete).returns({}).then.raises(NotFound).at_least_once + + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_nothing_raised { DummyIndexingModelForRecreate.delete_index! force: true } + end + + should "raise an exception without the force option" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:delete).raises(NotFound) + + DummyIndexingModelForRecreate.expects(:client).returns(client) + + assert_raise(NotFound) { DummyIndexingModelForRecreate.delete_index! } + end + + should "raise a regular exception when deleting the index" do + client = stub('client') + + indices = stub('indices') + indices.expects(:delete).raises(Exception) + client.stubs(:indices).returns(indices) + + DummyIndexingModelForRecreate.expects(:client).returns(client) + + assert_raise(Exception) { DummyIndexingModelForRecreate.delete_index! force: true } + end + + should "create the index with correct settings and mappings when it doesn't exist" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:create).with do |payload| + assert_equal 'dummy_indexing_model_for_recreates', payload[:index] + assert_equal 1, payload[:body][:settings][:index][:number_of_shards] + assert_equal 'keyword', payload[:body][:mappings][:dummy_indexing_model_for_recreate][:properties][:foo][:analyzer] + true + end.returns({}) + + DummyIndexingModelForRecreate.expects(:index_exists?).returns(false) + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_nothing_raised { DummyIndexingModelForRecreate.create_index! } + end + + should "not create the index when it exists" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:create).never + + DummyIndexingModelForRecreate.expects(:index_exists?).returns(true) + DummyIndexingModelForRecreate.expects(:client).returns(client).never + + assert_nothing_raised { DummyIndexingModelForRecreate.create_index! } + end + + should "raise exception during index creation" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:delete).returns({}) + indices.expects(:create).raises(Exception).at_least_once + + DummyIndexingModelForRecreate.expects(:index_exists?).returns(false) + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_raise(Exception) { DummyIndexingModelForRecreate.create_index! force: true } + end + + should "delete the index first with the force option" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:delete).returns({}) + indices.expects(:create).returns({}).at_least_once + + DummyIndexingModelForRecreate.expects(:index_exists?).returns(false) + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_nothing_raised do + DummyIndexingModelForRecreate.create_index! force: true + end + end + + should "refresh the index without raising exception with the force option" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:refresh).returns({}).then.raises(NotFound).at_least_once + + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_nothing_raised { DummyIndexingModelForRecreate.refresh_index! force: true } + end + + should "raise a regular exception when refreshing the index" do + client = stub('client') + indices = stub('indices') + client.stubs(:indices).returns(indices) + + indices.expects(:refresh).returns({}).then.raises(Exception).at_least_once + + DummyIndexingModelForRecreate.expects(:client).returns(client).at_least_once + + assert_nothing_raised { DummyIndexingModelForRecreate.refresh_index! force: true } + end + + context "with a custom index name" do + setup do + @client = stub('client') + @indices = stub('indices') + @client.stubs(:indices).returns(@indices) + DummyIndexingModelForRecreate.expects(:client).returns(@client).at_least_once + end + + should "create the custom index" do + @indices.expects(:create).with do |arguments| + assert_equal 'custom-foo', arguments[:index] + true + end + DummyIndexingModelForRecreate.expects(:index_exists?).with do |arguments| + assert_equal 'custom-foo', arguments[:index] + true + end + + DummyIndexingModelForRecreate.create_index! index: 'custom-foo' + end + + should "delete the custom index" do + @indices.expects(:delete).with do |arguments| + assert_equal 'custom-foo', arguments[:index] + true + end + + DummyIndexingModelForRecreate.delete_index! index: 'custom-foo' + end + + should "refresh the custom index" do + @indices.expects(:refresh).with do |arguments| + assert_equal 'custom-foo', arguments[:index] + true + end + + DummyIndexingModelForRecreate.refresh_index! index: 'custom-foo' + end + end + end + + end +end diff --git a/elasticsearch-model/test/unit/module_test.rb b/elasticsearch-model/test/unit/module_test.rb new file mode 100644 index 0000000000..a429b3d11f --- /dev/null +++ b/elasticsearch-model/test/unit/module_test.rb @@ -0,0 +1,57 @@ +require 'test_helper' + +class Elasticsearch::Model::ModuleTest < Test::Unit::TestCase + context "The main module" do + + context "client" do + should "have a default" do + client = Elasticsearch::Model.client + assert_not_nil client + assert_instance_of Elasticsearch::Transport::Client, client + end + + should "be settable" do + begin + Elasticsearch::Model.client = "Foobar" + assert_equal "Foobar", Elasticsearch::Model.client + ensure + Elasticsearch::Model.client = nil + end + end + end + + context "when included in module/class, " do + class ::DummyIncludingModel; end + class ::DummyIncludingModelWithSearchMethodDefined + def self.search(query, options={}) + "SEARCH" + end + end + + should "include and set up the proxy" do + DummyIncludingModel.__send__ :include, Elasticsearch::Model + + assert_respond_to DummyIncludingModel, :__elasticsearch__ + assert_respond_to DummyIncludingModel.new, :__elasticsearch__ + end + + should "delegate important methods to the proxy" do + DummyIncludingModel.__send__ :include, Elasticsearch::Model + + assert_respond_to DummyIncludingModel, :search + assert_respond_to DummyIncludingModel, :mappings + assert_respond_to DummyIncludingModel, :settings + assert_respond_to DummyIncludingModel, :index_name + assert_respond_to DummyIncludingModel, :document_type + assert_respond_to DummyIncludingModel, :import + end + + should "not override existing method" do + DummyIncludingModelWithSearchMethodDefined.__send__ :include, Elasticsearch::Model + + assert_equal 'SEARCH', DummyIncludingModelWithSearchMethodDefined.search('foo') + end + end + + end +end diff --git a/elasticsearch-model/test/unit/multimodel_test.rb b/elasticsearch-model/test/unit/multimodel_test.rb new file mode 100644 index 0000000000..89e88f7a16 --- /dev/null +++ b/elasticsearch-model/test/unit/multimodel_test.rb @@ -0,0 +1,38 @@ +require 'test_helper' + +class Elasticsearch::Model::MultimodelTest < Test::Unit::TestCase + + context "Multimodel class" do + setup do + title = stub('Foo', index_name: 'foo_index', document_type: 'foo') + series = stub('Bar', index_name: 'bar_index', document_type: 'bar') + @multimodel = Elasticsearch::Model::Multimodel.new(title, series) + end + + should "have an index_name" do + assert_equal ['foo_index', 'bar_index'], @multimodel.index_name + end + + should "have a document_type" do + assert_equal ['foo', 'bar'], @multimodel.document_type + end + + should "have a client" do + assert_equal Elasticsearch::Model.client, @multimodel.client + end + + should "include models in the registry" do + class ::JustAModel + include Elasticsearch::Model + end + + class ::JustAnotherModel + include Elasticsearch::Model + end + + multimodel = Elasticsearch::Model::Multimodel.new + assert multimodel.models.include?(::JustAModel) + assert multimodel.models.include?(::JustAnotherModel) + end + end +end diff --git a/elasticsearch-model/test/unit/naming_test.rb b/elasticsearch-model/test/unit/naming_test.rb new file mode 100644 index 0000000000..424adf7cc5 --- /dev/null +++ b/elasticsearch-model/test/unit/naming_test.rb @@ -0,0 +1,103 @@ +require 'test_helper' + +class Elasticsearch::Model::NamingTest < Test::Unit::TestCase + context "Naming module" do + class ::DummyNamingModel + extend ActiveModel::Naming + + extend Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Naming::InstanceMethods + end + + module ::MyNamespace + class DummyNamingModelInNamespace + extend ActiveModel::Naming + + extend Elasticsearch::Model::Naming::ClassMethods + include Elasticsearch::Model::Naming::InstanceMethods + end + end + + should "return the default index_name" do + assert_equal 'dummy_naming_models', DummyNamingModel.index_name + assert_equal 'dummy_naming_models', DummyNamingModel.new.index_name + end + + should "return the sanitized default index_name for namespaced model" do + assert_equal 'my_namespace-dummy_naming_model_in_namespaces', ::MyNamespace::DummyNamingModelInNamespace.index_name + assert_equal 'my_namespace-dummy_naming_model_in_namespaces', ::MyNamespace::DummyNamingModelInNamespace.new.index_name + end + + should "return the default document_type" do + assert_equal 'dummy_naming_model', DummyNamingModel.document_type + assert_equal 'dummy_naming_model', DummyNamingModel.new.document_type + end + + should "set and return the index_name" do + DummyNamingModel.index_name 'foobar' + assert_equal 'foobar', DummyNamingModel.index_name + + d = DummyNamingModel.new + d.index_name 'foobar_d' + assert_equal 'foobar_d', d.index_name + + modifier = 'r' + d.index_name Proc.new{ "foobar_#{modifier}" } + assert_equal 'foobar_r', d.index_name + + modifier = 'z' + assert_equal 'foobar_z', d.index_name + + modifier = 'f' + d.index_name { "foobar_#{modifier}" } + assert_equal 'foobar_f', d.index_name + + modifier = 't' + assert_equal 'foobar_t', d.index_name + end + + should "set the index_name with setter" do + DummyNamingModel.index_name = 'foobar_index_S' + assert_equal 'foobar_index_S', DummyNamingModel.index_name + + d = DummyNamingModel.new + d.index_name = 'foobar_index_s' + assert_equal 'foobar_index_s', d.index_name + + assert_equal 'foobar_index_S', DummyNamingModel.index_name + + modifier2 = 'y' + DummyNamingModel.index_name = Proc.new{ "foobar_index_#{modifier2}" } + assert_equal 'foobar_index_y', DummyNamingModel.index_name + + modifier = 'r' + d.index_name = Proc.new{ "foobar_index_#{modifier}" } + assert_equal 'foobar_index_r', d.index_name + + modifier = 'z' + assert_equal 'foobar_index_z', d.index_name + + assert_equal 'foobar_index_y', DummyNamingModel.index_name + end + + should "set and return the document_type" do + DummyNamingModel.document_type 'foobar' + assert_equal 'foobar', DummyNamingModel.document_type + + d = DummyNamingModel.new + d.document_type 'foobar_d' + assert_equal 'foobar_d', d.document_type + end + + should "set the document_type with setter" do + DummyNamingModel.document_type = 'foobar_type_S' + assert_equal 'foobar_type_S', DummyNamingModel.document_type + + d = DummyNamingModel.new + d.document_type = 'foobar_type_s' + assert_equal 'foobar_type_s', d.document_type + + assert_equal 'foobar_type_S', DummyNamingModel.document_type + end + end +end diff --git a/elasticsearch-model/test/unit/proxy_test.rb b/elasticsearch-model/test/unit/proxy_test.rb new file mode 100644 index 0000000000..d7299f884b --- /dev/null +++ b/elasticsearch-model/test/unit/proxy_test.rb @@ -0,0 +1,100 @@ +require 'test_helper' + +class Elasticsearch::Model::SearchTest < Test::Unit::TestCase + context "Searching module" do + class ::DummyProxyModel + include Elasticsearch::Model::Proxy + + def self.foo + 'classy foo' + end + + def bar + 'insta barr' + end + + def as_json(options) + {foo: 'bar'} + end + end + + class ::DummyProxyModelWithCallbacks + def self.before_save(&block) + (@callbacks ||= {})[block.hash] = block + end + + def changed_attributes; [:foo]; end + + def changes + {:foo => ['One', 'Two']} + end + end + + should "setup the class proxy method" do + assert_respond_to DummyProxyModel, :__elasticsearch__ + end + + should "setup the instance proxy method" do + assert_respond_to DummyProxyModel.new, :__elasticsearch__ + end + + should "register the hook for before_save callback" do + ::DummyProxyModelWithCallbacks.expects(:before_save).returns(true) + DummyProxyModelWithCallbacks.__send__ :include, Elasticsearch::Model::Proxy + end + + should "set the @__changed_attributes variable before save" do + instance = ::DummyProxyModelWithCallbacks.new + instance.__elasticsearch__.expects(:instance_variable_set).with do |name, value| + assert_equal :@__changed_attributes, name + assert_equal({foo: 'Two'}, value) + true + end + + ::DummyProxyModelWithCallbacks.__send__ :include, Elasticsearch::Model::Proxy + + ::DummyProxyModelWithCallbacks.instance_variable_get(:@callbacks).each do |n,b| + instance.instance_eval(&b) + end + end + + should "delegate methods to the target" do + assert_respond_to DummyProxyModel.__elasticsearch__, :foo + assert_respond_to DummyProxyModel.new.__elasticsearch__, :bar + + assert_raise(NoMethodError) { DummyProxyModel.__elasticsearch__.xoxo } + assert_raise(NoMethodError) { DummyProxyModel.new.__elasticsearch__.xoxo } + + assert_equal 'classy foo', DummyProxyModel.__elasticsearch__.foo + assert_equal 'insta barr', DummyProxyModel.new.__elasticsearch__.bar + end + + should "reset the proxy target for duplicates" do + model = DummyProxyModel.new + model_target = model.__elasticsearch__.target + duplicate = model.dup + duplicate_target = duplicate.__elasticsearch__.target + + assert_not_equal model, duplicate + assert_equal model, model_target + assert_equal duplicate, duplicate_target + end + + should "return the proxy class from instance proxy" do + assert_equal Elasticsearch::Model::Proxy::ClassMethodsProxy, DummyProxyModel.new.__elasticsearch__.class.class + end + + should "return the origin class from instance proxy" do + assert_equal DummyProxyModel, DummyProxyModel.new.__elasticsearch__.klass + end + + should "delegate as_json from the proxy to target" do + assert_equal({foo: 'bar'}, DummyProxyModel.new.__elasticsearch__.as_json) + end + + should "have inspect method indicating the proxy" do + assert_match /PROXY/, DummyProxyModel.__elasticsearch__.inspect + assert_match /PROXY/, DummyProxyModel.new.__elasticsearch__.inspect + end + end +end diff --git a/elasticsearch-model/test/unit/response_base_test.rb b/elasticsearch-model/test/unit/response_base_test.rb new file mode 100644 index 0000000000..aa9b4244d6 --- /dev/null +++ b/elasticsearch-model/test/unit/response_base_test.rb @@ -0,0 +1,40 @@ +require 'test_helper' + +class Elasticsearch::Model::BaseTest < Test::Unit::TestCase + context "Response base module" do + class OriginClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + + class DummyBaseClass + include Elasticsearch::Model::Response::Base + end + + RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [] } } + + setup do + @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' + @response = Elasticsearch::Model::Response::Response.new OriginClass, @search + @search.stubs(:execute!).returns(RESPONSE) + end + + should "access klass, response, total and max_score" do + r = DummyBaseClass.new OriginClass, @response + + assert_equal OriginClass, r.klass + assert_equal @response, r.response + assert_equal RESPONSE, r.response.response + assert_equal 123, r.total + assert_equal 456, r.max_score + end + + should "have abstract methods results and records" do + r = DummyBaseClass.new OriginClass, @response + + assert_raise(Elasticsearch::Model::NotImplemented) { |e| r.results } + assert_raise(Elasticsearch::Model::NotImplemented) { |e| r.records } + end + + end +end diff --git a/elasticsearch-model/test/unit/response_pagination_kaminari_test.rb b/elasticsearch-model/test/unit/response_pagination_kaminari_test.rb new file mode 100644 index 0000000000..1fc9b2f3c0 --- /dev/null +++ b/elasticsearch-model/test/unit/response_pagination_kaminari_test.rb @@ -0,0 +1,433 @@ +require 'test_helper' + +class Elasticsearch::Model::ResponsePaginationKaminariTest < Test::Unit::TestCase + class ModelClass + include ::Kaminari::ConfigurationMethods + + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + + RESPONSE = { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, + 'hits' => { 'total' => 100, 'hits' => (1..100).to_a.map { |i| { _id: i } } } } + + context "Response pagination" do + + setup do + @search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*' + @response = Elasticsearch::Model::Response::Response.new ModelClass, @search, RESPONSE + @response.klass.stubs(:client).returns mock('client') + end + + should "have pagination methods" do + assert_respond_to @response, :page + assert_respond_to @response, :limit_value + assert_respond_to @response, :offset_value + assert_respond_to @response, :limit + assert_respond_to @response, :offset + assert_respond_to @response, :total_count + end + + context "#page method" do + should "advance the from/size" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 25, definition[:from] + assert_equal 25, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.page(2).to_a + assert_equal 25, @response.search.definition[:from] + assert_equal 25, @response.search.definition[:size] + end + + should "advance the from/size further" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 75, definition[:from] + assert_equal 25, definition[:size] + true + end + .returns(RESPONSE) + + @response.page(4).to_a + assert_equal 75, @response.search.definition[:from] + assert_equal 25, @response.search.definition[:size] + end + end + + context "limit/offset readers" do + should "return the default" do + assert_equal Kaminari.config.default_per_page, @response.limit_value + assert_equal 0, @response.offset_value + end + + should "return the value from URL parameters" do + search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*', size: 10, from: 50 + @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE + + assert_equal 10, @response.limit_value + assert_equal 50, @response.offset_value + end + + should "ignore the value from request body" do + search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, + { query: { match_all: {} }, from: 333, size: 999 } + @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE + + assert_equal Kaminari.config.default_per_page, @response.limit_value + assert_equal 0, @response.offset_value + end + end + + context "limit setter" do + setup do + @response.records + @response.results + end + + should "set the values" do + @response.limit(35) + assert_equal 35, @response.search.definition[:size] + end + + should "reset the variables" do + @response.limit(35) + + assert_nil @response.instance_variable_get(:@response) + assert_nil @response.instance_variable_get(:@records) + assert_nil @response.instance_variable_get(:@results) + end + + should 'coerce string parameters' do + @response.limit("35") + assert_equal 35, @response.search.definition[:size] + end + + should 'ignore invalid string parameters' do + @response.limit(35) + @response.limit("asdf") + assert_equal 35, @response.search.definition[:size] + end + end + + context "with the page() and limit() methods" do + setup do + @response.records + @response.results + end + + should "set the values" do + @response.page(3).limit(35) + assert_equal 35, @response.search.definition[:size] + assert_equal 70, @response.search.definition[:from] + end + + should "set the values when limit is called first" do + @response.limit(35).page(3) + assert_equal 35, @response.search.definition[:size] + assert_equal 70, @response.search.definition[:from] + end + + should "reset the instance variables" do + @response.page(3).limit(35) + + assert_nil @response.instance_variable_get(:@response) + assert_nil @response.instance_variable_get(:@records) + assert_nil @response.instance_variable_get(:@results) + end + end + + context "offset setter" do + setup do + @response.records + @response.results + end + + should "set the values" do + @response.offset(15) + assert_equal 15, @response.search.definition[:from] + end + + should "reset the variables" do + @response.offset(35) + + assert_nil @response.instance_variable_get(:@response) + assert_nil @response.instance_variable_get(:@records) + assert_nil @response.instance_variable_get(:@results) + end + + should 'coerce string parameters' do + @response.offset("35") + assert_equal 35, @response.search.definition[:from] + end + + should 'coerce invalid string parameters' do + @response.offset(35) + @response.offset("asdf") + assert_equal 0, @response.search.definition[:from] + end + end + + context "total" do + should "return the number of hits" do + @response.expects(:results).returns(mock('results', total: 100)) + assert_equal 100, @response.total_count + end + end + + context "results" do + setup do + @search.stubs(:execute!).returns RESPONSE + end + + should "return current page and total count" do + assert_equal 1, @response.page(1).results.current_page + assert_equal 100, @response.results.total_count + + assert_equal 5, @response.page(5).results.current_page + end + + should "return previous page and next page" do + assert_equal nil, @response.page(1).results.prev_page + assert_equal 2, @response.page(1).results.next_page + + assert_equal 3, @response.page(4).results.prev_page + assert_equal nil, @response.page(4).results.next_page + + assert_equal 2, @response.page(3).results.prev_page + assert_equal 4, @response.page(3).results.next_page + end + end + + context "records" do + setup do + @search.stubs(:execute!).returns RESPONSE + end + + should "return current page and total count" do + assert_equal 1, @response.page(1).records.current_page + assert_equal 100, @response.records.total_count + + assert_equal 5, @response.page(5).records.current_page + end + + should "return previous page and next page" do + assert_equal nil, @response.page(1).records.prev_page + assert_equal 2, @response.page(1).records.next_page + + assert_equal 3, @response.page(4).records.prev_page + assert_equal nil, @response.page(4).records.next_page + + assert_equal 2, @response.page(3).records.prev_page + assert_equal 4, @response.page(3).records.next_page + end + end + end + + context "Multimodel response pagination" do + setup do + @multimodel = Elasticsearch::Model::Multimodel.new(ModelClass) + @search = Elasticsearch::Model::Searching::SearchRequest.new @multimodel, '*' + @response = Elasticsearch::Model::Response::Response.new @multimodel, @search, RESPONSE + @response.klass.stubs(:client).returns mock('client') + end + + should "have pagination methods" do + assert_respond_to @response, :page + assert_respond_to @response, :limit_value + assert_respond_to @response, :offset_value + assert_respond_to @response, :limit + assert_respond_to @response, :offset + assert_respond_to @response, :total_count + end + + context "#page method" do + should "advance the from/size" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 25, definition[:from] + assert_equal 25, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.page(2).to_a + assert_equal 25, @response.search.definition[:from] + assert_equal 25, @response.search.definition[:size] + end + + should "advance the from/size further" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 75, definition[:from] + assert_equal 25, definition[:size] + true + end + .returns(RESPONSE) + + @response.page(4).to_a + assert_equal 75, @response.search.definition[:from] + assert_equal 25, @response.search.definition[:size] + end + end + + context "limit/offset readers" do + should "return the default" do + assert_equal Kaminari.config.default_per_page, @response.limit_value + assert_equal 0, @response.offset_value + end + + should "return the value from URL parameters" do + search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*', size: 10, from: 50 + @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE + + assert_equal 10, @response.limit_value + assert_equal 50, @response.offset_value + end + + should "ignore the value from request body" do + search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, + { query: { match_all: {} }, from: 333, size: 999 } + @response = Elasticsearch::Model::Response::Response.new ModelClass, search, RESPONSE + + assert_equal Kaminari.config.default_per_page, @response.limit_value + assert_equal 0, @response.offset_value + end + end + + context "limit setter" do + setup do + @response.records + @response.results + end + + should "set the values" do + @response.limit(35) + assert_equal 35, @response.search.definition[:size] + end + + should "reset the variables" do + @response.limit(35) + + assert_nil @response.instance_variable_get(:@response) + assert_nil @response.instance_variable_get(:@records) + assert_nil @response.instance_variable_get(:@results) + end + end + + context "with the page() and limit() methods" do + setup do + @response.records + @response.results + end + + should "set the values" do + @response.page(3).limit(35) + assert_equal 35, @response.search.definition[:size] + assert_equal 70, @response.search.definition[:from] + end + + should "set the values when limit is called first" do + @response.limit(35).page(3) + assert_equal 35, @response.search.definition[:size] + assert_equal 70, @response.search.definition[:from] + end + + should "reset the instance variables" do + @response.page(3).limit(35) + + assert_nil @response.instance_variable_get(:@response) + assert_nil @response.instance_variable_get(:@records) + assert_nil @response.instance_variable_get(:@results) + end + end + + context "offset setter" do + setup do + @response.records + @response.results + end + + should "set the values" do + @response.offset(15) + assert_equal 15, @response.search.definition[:from] + end + + should "reset the variables" do + @response.offset(35) + + assert_nil @response.instance_variable_get(:@response) + assert_nil @response.instance_variable_get(:@records) + assert_nil @response.instance_variable_get(:@results) + end + end + + context "total" do + should "return the number of hits" do + @response.expects(:results).returns(mock('results', total: 100)) + assert_equal 100, @response.total_count + end + end + + context "results" do + setup do + @search.stubs(:execute!).returns RESPONSE + end + + should "return current page and total count" do + assert_equal 1, @response.page(1).results.current_page + assert_equal 100, @response.results.total_count + + assert_equal 5, @response.page(5).results.current_page + end + + should "return previous page and next page" do + assert_equal nil, @response.page(1).results.prev_page + assert_equal 2, @response.page(1).results.next_page + + assert_equal 3, @response.page(4).results.prev_page + assert_equal nil, @response.page(4).results.next_page + + assert_equal 2, @response.page(3).results.prev_page + assert_equal 4, @response.page(3).results.next_page + end + end + + context "records" do + setup do + @search.stubs(:execute!).returns RESPONSE + end + + should "return current page and total count" do + assert_equal 1, @response.page(1).records.current_page + assert_equal 100, @response.records.total_count + + assert_equal 5, @response.page(5).records.current_page + end + + should "return previous page and next page" do + assert_equal nil, @response.page(1).records.prev_page + assert_equal 2, @response.page(1).records.next_page + + assert_equal 3, @response.page(4).records.prev_page + assert_equal nil, @response.page(4).records.next_page + + assert_equal 2, @response.page(3).records.prev_page + assert_equal 4, @response.page(3).records.next_page + end + end + end +end diff --git a/elasticsearch-model/test/unit/response_pagination_will_paginate_test.rb b/elasticsearch-model/test/unit/response_pagination_will_paginate_test.rb new file mode 100644 index 0000000000..6c93835256 --- /dev/null +++ b/elasticsearch-model/test/unit/response_pagination_will_paginate_test.rb @@ -0,0 +1,398 @@ +require 'test_helper' +require 'will_paginate' +require 'will_paginate/collection' + +class Elasticsearch::Model::ResponsePaginationWillPaginateTest < Test::Unit::TestCase + class ModelClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + + # WillPaginate adds this method to models (see WillPaginate::PerPage module) + def self.per_page + 33 + end + end + + # Subsclass Response so we can include WillPaginate module without conflicts with Kaminari. + class WillPaginateResponse < Elasticsearch::Model::Response::Response + include Elasticsearch::Model::Response::Pagination::WillPaginate + end + + RESPONSE = { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, + 'hits' => { 'total' => 100, 'hits' => (1..100).to_a.map { |i| { _id: i } } } } + + context "Response pagination" do + + setup do + @search = Elasticsearch::Model::Searching::SearchRequest.new ModelClass, '*' + @response = WillPaginateResponse.new ModelClass, @search, RESPONSE + @response.klass.stubs(:client).returns mock('client') + + @expected_methods = [ + # methods needed by WillPaginate::CollectionMethods + :current_page, + :offset, + :per_page, + :total_entries, + :length, + + # methods defined by WillPaginate::CollectionMethods + :total_pages, + :previous_page, + :next_page, + :out_of_bounds?, + ] + end + + should "have pagination methods" do + assert_respond_to @response, :paginate + + @expected_methods.each do |method| + assert_respond_to @response, method + end + end + + context "response.results" do + should "have pagination methods" do + @expected_methods.each do |method| + assert_respond_to @response.results, method + end + end + end + + context "response.records" do + should "have pagination methods" do + @expected_methods.each do |method| + @response.klass.stubs(:find).returns([]) + assert_respond_to @response.records, method + end + end + end + + context "#offset method" do + should "calculate offset using current_page and per_page" do + @response.per_page(3).page(3) + assert_equal 6, @response.offset + end + end + context "#length method" do + should "return count of paginated results" do + @response.per_page(3).page(3) + assert_equal 3, @response.length + end + end + + context "#paginate method" do + should "set from/size using defaults" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 0, definition[:from] + assert_equal 33, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: nil).to_a + assert_equal 0, @response.search.definition[:from] + assert_equal 33, @response.search.definition[:size] + end + + should "set from/size using default per_page" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 33, definition[:from] + assert_equal 33, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: 2).to_a + assert_equal 33, @response.search.definition[:from] + assert_equal 33, @response.search.definition[:size] + end + + should "set from/size using custom page and per_page" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 18, definition[:from] + assert_equal 9, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: 3, per_page: 9).to_a + assert_equal 18, @response.search.definition[:from] + assert_equal 9, @response.search.definition[:size] + end + + should "search for first page if specified page is < 1" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 0, definition[:from] + assert_equal 33, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: "-1").to_a + assert_equal 0, @response.search.definition[:from] + assert_equal 33, @response.search.definition[:size] + end + + should "use the param_name" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 10, definition[:from] + true + end + .returns(RESPONSE) + + @response.paginate(my_page: 2, per_page: 10, param_name: :my_page).to_a + end + end + + context "#page and #per_page shorthand methods" do + should "set from/size using default per_page" do + @response.page(5) + assert_equal 132, @response.search.definition[:from] + assert_equal 33, @response.search.definition[:size] + end + + should "set from/size when calling #page then #per_page" do + @response.page(5).per_page(3) + assert_equal 12, @response.search.definition[:from] + assert_equal 3, @response.search.definition[:size] + end + + should "set from/size when calling #per_page then #page" do + @response.per_page(3).page(5) + assert_equal 12, @response.search.definition[:from] + assert_equal 3, @response.search.definition[:size] + end + end + + context "#current_page method" do + should "return 1 by default" do + @response.paginate({}) + assert_equal 1, @response.current_page + end + + should "return current page number" do + @response.paginate(page: 3, per_page: 9) + assert_equal 3, @response.current_page + end + + should "return nil if not pagination set" do + assert_equal nil, @response.current_page + end + end + + context "#per_page method" do + should "return value set in paginate call" do + @response.paginate(per_page: 8) + assert_equal 8, @response.per_page + end + end + + context "#total_entries method" do + should "return total from response" do + @response.expects(:results).returns(mock('results', total: 100)) + assert_equal 100, @response.total_entries + end + end + end + + context "Multimodel response pagination" do + setup do + @multimodel = Elasticsearch::Model::Multimodel.new ModelClass + @search = Elasticsearch::Model::Searching::SearchRequest.new @multimodel, '*' + @response = WillPaginateResponse.new @multimodel, @search, RESPONSE + @response.klass.stubs(:client).returns mock('client') + + @expected_methods = [ + # methods needed by WillPaginate::CollectionMethods + :current_page, + :offset, + :per_page, + :total_entries, + :length, + + # methods defined by WillPaginate::CollectionMethods + :total_pages, + :previous_page, + :next_page, + :out_of_bounds?, + ] + end + + should "have pagination methods" do + assert_respond_to @response, :paginate + + @expected_methods.each do |method| + assert_respond_to @response, method + end + end + + context "response.results" do + should "have pagination methods" do + @expected_methods.each do |method| + assert_respond_to @response.results, method + end + end + end + + context "#offset method" do + should "calculate offset using current_page and per_page" do + @response.per_page(3).page(3) + assert_equal 6, @response.offset + end + end + context "#length method" do + should "return count of paginated results" do + @response.per_page(3).page(3) + assert_equal 3, @response.length + end + end + + context "#paginate method" do + should "set from/size using WillPaginate defaults, ignoring aggregated models configuration" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 0, definition[:from] + assert_equal ::WillPaginate.per_page, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: nil).to_a + assert_equal 0, @response.search.definition[:from] + assert_equal ::WillPaginate.per_page, @response.search.definition[:size] + end + + should "set from/size using default per_page, ignoring aggregated models' configuration" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal ::WillPaginate.per_page, definition[:from] + assert_equal ::WillPaginate.per_page, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: 2).to_a + assert_equal ::WillPaginate.per_page, @response.search.definition[:from] + assert_equal ::WillPaginate.per_page, @response.search.definition[:size] + end + + should "set from/size using custom page and per_page" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 18, definition[:from] + assert_equal 9, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: 3, per_page: 9).to_a + assert_equal 18, @response.search.definition[:from] + assert_equal 9, @response.search.definition[:size] + end + + should "search for first page if specified page is < 1" do + @response.klass.client + .expects(:search) + .with do |definition| + assert_equal 0, definition[:from] + assert_equal ::WillPaginate.per_page, definition[:size] + true + end + .returns(RESPONSE) + + assert_nil @response.search.definition[:from] + assert_nil @response.search.definition[:size] + + @response.paginate(page: "-1").to_a + assert_equal 0, @response.search.definition[:from] + assert_equal ::WillPaginate.per_page, @response.search.definition[:size] + end + end + + context "#page and #per_page shorthand methods" do + should "set from/size using default per_page" do + @response.page(5) + assert_equal 120, @response.search.definition[:from] + assert_equal ::WillPaginate.per_page, @response.search.definition[:size] + end + + should "set from/size when calling #page then #per_page" do + @response.page(5).per_page(3) + assert_equal 12, @response.search.definition[:from] + assert_equal 3, @response.search.definition[:size] + end + + should "set from/size when calling #per_page then #page" do + @response.per_page(3).page(5) + assert_equal 12, @response.search.definition[:from] + assert_equal 3, @response.search.definition[:size] + end + end + + context "#current_page method" do + should "return 1 by default" do + @response.paginate({}) + assert_equal 1, @response.current_page + end + + should "return current page number" do + @response.paginate(page: 3, per_page: 9) + assert_equal 3, @response.current_page + end + + should "return nil if not pagination set" do + assert_equal nil, @response.current_page + end + end + + context "#per_page method" do + should "return value set in paginate call" do + @response.paginate(per_page: 8) + assert_equal 8, @response.per_page + end + end + + context "#total_entries method" do + should "return total from response" do + @response.expects(:results).returns(mock('results', total: 100)) + assert_equal 100, @response.total_entries + end + end + end +end diff --git a/elasticsearch-model/test/unit/response_records_test.rb b/elasticsearch-model/test/unit/response_records_test.rb new file mode 100644 index 0000000000..8a78255d7c --- /dev/null +++ b/elasticsearch-model/test/unit/response_records_test.rb @@ -0,0 +1,91 @@ +require 'test_helper' + +class Elasticsearch::Model::RecordsTest < Test::Unit::TestCase + context "Response records" do + class DummyCollection + include Enumerable + + def each(&block); ['FOO'].each(&block); end + def size; ['FOO'].size; end + def empty?; ['FOO'].empty?; end + def foo; 'BAR'; end + end + + class DummyModel + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + + def self.find(*args) + DummyCollection.new + end + end + + RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [{'_id' => '1', 'foo' => 'bar'}] } } + RESULTS = Elasticsearch::Model::Response::Results.new DummyModel, RESPONSE + + setup do + search = Elasticsearch::Model::Searching::SearchRequest.new DummyModel, '*' + search.stubs(:execute!).returns RESPONSE + + response = Elasticsearch::Model::Response::Response.new DummyModel, search + @records = Elasticsearch::Model::Response::Records.new DummyModel, response + end + + should "access the records" do + assert_respond_to @records, :records + assert_equal 1, @records.records.size + assert_equal 'FOO', @records.records.first + end + + should "delegate Enumerable methods to records" do + assert ! @records.empty? + assert_equal 'FOO', @records.first + end + + should "delegate methods to records" do + assert_respond_to @records, :foo + assert_equal 'BAR', @records.foo + end + + should "have each_with_hit method" do + @records.each_with_hit do |record, hit| + assert_equal 'FOO', record + assert_equal 'bar', hit.foo + end + end + + should "have map_with_hit method" do + assert_equal ['FOO---bar'], @records.map_with_hit { |record, hit| "#{record}---#{hit.foo}" } + end + + should "return the IDs" do + assert_equal ['1'], @records.ids + end + + context "with adapter" do + module DummyAdapter + module RecordsMixin + def records + ['FOOBAR'] + end + end + + def records_mixin + RecordsMixin + end; module_function :records_mixin + end + + should "delegate the records method to the adapter" do + Elasticsearch::Model::Adapter.expects(:from_class) + .with(DummyModel) + .returns(DummyAdapter) + + @records = Elasticsearch::Model::Response::Records.new DummyModel, + RESPONSE + + assert_equal ['FOOBAR'], @records.records + end + end + + end +end diff --git a/elasticsearch-model/test/unit/response_result_test.rb b/elasticsearch-model/test/unit/response_result_test.rb new file mode 100644 index 0000000000..ff78d25790 --- /dev/null +++ b/elasticsearch-model/test/unit/response_result_test.rb @@ -0,0 +1,90 @@ +require 'test_helper' + +class Elasticsearch::Model::ResultTest < Test::Unit::TestCase + context "Response result" do + + should "have method access to properties" do + result = Elasticsearch::Model::Response::Result.new foo: 'bar', bar: { bam: 'baz' } + + assert_respond_to result, :foo + assert_respond_to result, :bar + + assert_equal 'bar', result.foo + assert_equal 'baz', result.bar.bam + + assert_raise(NoMethodError) { result.xoxo } + end + + should "return _id as #id" do + result = Elasticsearch::Model::Response::Result.new foo: 'bar', _id: 42, _source: { id: 12 } + + assert_equal 42, result.id + assert_equal 12, result._source.id + end + + should "return _type as #type" do + result = Elasticsearch::Model::Response::Result.new foo: 'bar', _type: 'baz', _source: { type: 'BAM' } + + assert_equal 'baz', result.type + assert_equal 'BAM', result._source.type + end + + should "delegate method calls to `_source` when available" do + result = Elasticsearch::Model::Response::Result.new foo: 'bar', _source: { bar: 'baz' } + + assert_respond_to result, :foo + assert_respond_to result, :_source + assert_respond_to result, :bar + + assert_equal 'bar', result.foo + assert_equal 'baz', result._source.bar + assert_equal 'baz', result.bar + end + + should "delegate existence method calls to `_source`" do + result = Elasticsearch::Model::Response::Result.new foo: 'bar', _source: { bar: { bam: 'baz' } } + + assert_respond_to result._source, :bar? + assert_respond_to result, :bar? + + assert_equal true, result._source.bar? + assert_equal true, result.bar? + assert_equal false, result.boo? + + assert_equal true, result.bar.bam? + assert_equal false, result.bar.boo? + end + + should "delegate methods to @result" do + result = Elasticsearch::Model::Response::Result.new foo: 'bar' + + assert_equal 'bar', result.foo + assert_equal 'bar', result.fetch('foo') + assert_equal 'moo', result.fetch('NOT_EXIST', 'moo') + assert_equal ['foo'], result.keys + + assert_respond_to result, :to_hash + assert_equal({'foo' => 'bar'}, result.to_hash) + + assert_raise(NoMethodError) { result.does_not_exist } + end + + should "delegate existence method calls to @result" do + result = Elasticsearch::Model::Response::Result.new foo: 'bar', _source: { bar: 'bam' } + assert_respond_to result, :foo? + + assert_equal true, result.foo? + assert_equal false, result.boo? + assert_equal false, result._source.foo? + assert_equal false, result._source.boo? + end + + should "delegate as_json to @result even when ActiveSupport changed half of Ruby" do + require 'active_support/json/encoding' + result = Elasticsearch::Model::Response::Result.new foo: 'bar' + + result.instance_variable_get(:@result).expects(:as_json) + result.as_json(except: 'foo') + end + end +end diff --git a/elasticsearch-model/test/unit/response_results_test.rb b/elasticsearch-model/test/unit/response_results_test.rb new file mode 100644 index 0000000000..e97539ecdc --- /dev/null +++ b/elasticsearch-model/test/unit/response_results_test.rb @@ -0,0 +1,31 @@ +require 'test_helper' + +class Elasticsearch::Model::ResultsTest < Test::Unit::TestCase + context "Response results" do + class OriginClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + + RESPONSE = { 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [{'foo' => 'bar'}] } } + + setup do + @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' + @response = Elasticsearch::Model::Response::Response.new OriginClass, @search + @results = Elasticsearch::Model::Response::Results.new OriginClass, @response + @search.stubs(:execute!).returns(RESPONSE) + end + + should "access the results" do + assert_respond_to @results, :results + assert_equal 1, @results.results.size + assert_equal 'bar', @results.results.first.foo + end + + should "delegate Enumerable methods to results" do + assert ! @results.empty? + assert_equal 'bar', @results.first.foo + end + + end +end diff --git a/elasticsearch-model/test/unit/response_test.rb b/elasticsearch-model/test/unit/response_test.rb new file mode 100644 index 0000000000..71cfb2d6d9 --- /dev/null +++ b/elasticsearch-model/test/unit/response_test.rb @@ -0,0 +1,104 @@ +require 'test_helper' + +class Elasticsearch::Model::ResponseTest < Test::Unit::TestCase + context "Response" do + class OriginClass + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + + RESPONSE = { 'took' => '5', 'timed_out' => false, '_shards' => {'one' => 'OK'}, 'hits' => { 'hits' => [] }, + 'aggregations' => {'foo' => {'bar' => 10}}, + 'suggest' => {'my_suggest' => [ { 'text' => 'foo', 'options' => [ { 'text' => 'Foo', 'score' => 2.0 }, { 'text' => 'Bar', 'score' => 1.0 } ] } ]}} + + setup do + @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' + @search.stubs(:execute!).returns(RESPONSE) + end + + should "access klass, response, took, timed_out, shards" do + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + + assert_equal OriginClass, response.klass + assert_equal @search, response.search + assert_equal RESPONSE, response.response + assert_equal '5', response.took + assert_equal false, response.timed_out + assert_equal 'OK', response.shards.one + end + + should "wrap the raw Hash response in Hashie::Mash" do + @search = Elasticsearch::Model::Searching::SearchRequest.new OriginClass, '*' + @search.stubs(:execute!).returns({'hits' => { 'hits' => [] }, 'aggregations' => { 'dates' => 'FOO' }}) + + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + + assert_respond_to response.response, :aggregations + assert_equal 'FOO', response.response.aggregations.dates + end + + should "load and access the results" do + @search.expects(:execute!).returns(RESPONSE) + + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + assert_instance_of Elasticsearch::Model::Response::Results, response.results + assert_equal 0, response.size + end + + should "load and access the records" do + @search.expects(:execute!).returns(RESPONSE) + + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + assert_instance_of Elasticsearch::Model::Response::Records, response.records + assert_equal 0, response.size + end + + should "delegate Enumerable methods to results" do + @search.expects(:execute!).returns(RESPONSE) + + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + assert response.empty? + end + + should "be initialized lazily" do + @search.expects(:execute!).never + + Elasticsearch::Model::Response::Response.new OriginClass, @search + end + + should "access the aggregations" do + @search.expects(:execute!).returns(RESPONSE) + + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + assert_respond_to response, :aggregations + assert_kind_of Hashie::Mash, response.aggregations.foo + assert_equal 10, response.aggregations.foo.bar + end + + should "access the suggest" do + @search.expects(:execute!).returns(RESPONSE) + + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + + assert_respond_to response, :suggestions + assert_kind_of Hashie::Mash, response.suggestions + assert_equal 'Foo', response.suggestions.my_suggest.first.options.first.text + end + + should "return array of terms from the suggestions" do + @search.expects(:execute!).returns(RESPONSE) + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + + assert_not_empty response.suggestions + assert_equal [ 'Foo', 'Bar' ], response.suggestions.terms + end + + should "return empty array as suggest terms when there are no suggestions" do + @search.expects(:execute!).returns({}) + response = Elasticsearch::Model::Response::Response.new OriginClass, @search + + assert_empty response.suggestions + assert_equal [], response.suggestions.terms + end + end +end diff --git a/elasticsearch-model/test/unit/searching_search_request_test.rb b/elasticsearch-model/test/unit/searching_search_request_test.rb new file mode 100644 index 0000000000..b2e84aecce --- /dev/null +++ b/elasticsearch-model/test/unit/searching_search_request_test.rb @@ -0,0 +1,78 @@ +require 'test_helper' + +class Elasticsearch::Model::SearchRequestTest < Test::Unit::TestCase + context "SearchRequest class" do + class ::DummySearchingModel + extend Elasticsearch::Model::Searching::ClassMethods + + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + + end + + setup do + @client = mock('client') + DummySearchingModel.stubs(:client).returns(@client) + end + + should "pass the search definition as a simple query" do + @client.expects(:search).with do |params| + assert_equal 'foo', params[:q] + true + end + .returns({}) + + s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, 'foo' + s.execute! + end + + should "pass the search definition as a Hash" do + @client.expects(:search).with do |params| + assert_equal( {foo: 'bar'}, params[:body] ) + true + end + .returns({}) + + s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, foo: 'bar' + s.execute! + end + + should "pass the search definition as a JSON string" do + @client.expects(:search).with do |params| + assert_equal( '{"foo":"bar"}', params[:body] ) + true + end + .returns({}) + + s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, '{"foo":"bar"}' + s.execute! + end + + should "pass the search definition as an object which responds to to_hash" do + class MySpecialQueryBuilder + def to_hash; {foo: 'bar'}; end + end + + @client.expects(:search).with do |params| + assert_equal( {foo: 'bar'}, params[:body] ) + true + end + .returns({}) + + s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, MySpecialQueryBuilder.new + s.execute! + end + + should "pass the options to the client" do + @client.expects(:search).with do |params| + assert_equal 'foo', params[:q] + assert_equal 15, params[:size] + true + end + .returns({}) + + s = Elasticsearch::Model::Searching::SearchRequest.new ::DummySearchingModel, 'foo', size: 15 + s.execute! + end + end +end diff --git a/elasticsearch-model/test/unit/searching_test.rb b/elasticsearch-model/test/unit/searching_test.rb new file mode 100644 index 0000000000..f6cb78136f --- /dev/null +++ b/elasticsearch-model/test/unit/searching_test.rb @@ -0,0 +1,41 @@ +require 'test_helper' + +class Elasticsearch::Model::SearchingTest < Test::Unit::TestCase + context "Searching module" do + class ::DummySearchingModel + extend Elasticsearch::Model::Searching::ClassMethods + + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + + setup do + @client = mock('client') + DummySearchingModel.stubs(:client).returns(@client) + end + + should "have the search method" do + assert_respond_to DummySearchingModel, :search + end + + should "initialize the search object" do + Elasticsearch::Model::Searching::SearchRequest + .expects(:new).with do |klass, query, options| + assert_equal DummySearchingModel, klass + assert_equal 'foo', query + assert_equal({default_operator: 'AND'}, options) + true + end + .returns( stub('search') ) + + DummySearchingModel.search 'foo', default_operator: 'AND' + end + + should "not execute the search" do + Elasticsearch::Model::Searching::SearchRequest + .expects(:new).returns( mock('search').expects(:execute!).never ) + + DummySearchingModel.search 'foo' + end + end +end diff --git a/elasticsearch-model/test/unit/serializing_test.rb b/elasticsearch-model/test/unit/serializing_test.rb new file mode 100644 index 0000000000..201329257c --- /dev/null +++ b/elasticsearch-model/test/unit/serializing_test.rb @@ -0,0 +1,17 @@ +require 'test_helper' + +class Elasticsearch::Model::SerializingTest < Test::Unit::TestCase + context "Serializing module" do + class DummyClass + include Elasticsearch::Model::Serializing::InstanceMethods + + def as_json(options={}) + 'HASH' + end + end + + should "delegate to as_json by default" do + assert_equal 'HASH', DummyClass.new.as_indexed_json + end + end +end diff --git a/elasticsearch-rails/.gitignore b/elasticsearch-rails/.gitignore new file mode 100644 index 0000000000..d87d4be66f --- /dev/null +++ b/elasticsearch-rails/.gitignore @@ -0,0 +1,17 @@ +*.gem +*.rbc +.bundle +.config +.yardoc +Gemfile.lock +InstalledFiles +_yardoc +coverage +doc/ +lib/bundler/man +pkg +rdoc +spec/reports +test/tmp +test/version_tmp +tmp diff --git a/elasticsearch-rails/CHANGELOG.md b/elasticsearch-rails/CHANGELOG.md new file mode 100644 index 0000000000..5bb4e13bd3 --- /dev/null +++ b/elasticsearch-rails/CHANGELOG.md @@ -0,0 +1,44 @@ +## 0.1.9 + +* Added checks for proper launch order and other updates to the example application templates +* Updated the example application to work with Elasticsearch 2.x +* Used the `suggest` method instead of `response['suggest']` in the application template + +## 0.1.8 + +* Added an example application template that loads settings from a file +* Added missing require in the seeds.rb file for the expert template +* Fixed double include of the aliased method (execute_without_instrumentation) +* Fixed the error when getting the search_controller_test.rb asset in `03-expert.rb` template +* Updated URLs for getting raw assets from Github in the `03-expert.rb` template + +## 0.1.7 + +* Updated dependencies for the gem and example applications +* Fixed various small errors in the `01-basic.rb` template +* Fixed error when inserting the Kaminari gem into Gemfile in the 02-pretty.rb template +* Fixed incorrect regex for adding Rails instrumentation into the application.rb in the `02-pretty.rb` template +* Fixed other small errors in the `02-pretty.rb` template +* Improved and added tests for the generated application from the `02-pretty.rb` template +* Added the `04-dsl.rb` template which uses the `elasticsearch-dsl` gem to build the search definition + +## 0.1.6 + +* Fixed errors in templates for the Rails example applications +* Fixed errors in the importing Rake task +* Refactored and updated the instrumentation support to allow integration with `Persistence::Model` + +## 0.1.5 + +* Fixed an exception when no suggestions were returned in the `03-expert` example application template + +## 0.1.2 + +* Allow passing an ActiveRecord scope to the importing Rake task + +## 0.1.1 + +* Improved the Rake tasks +* Improved the example application templates + +## 0.1.0 (Initial Version) diff --git a/elasticsearch-rails/Gemfile b/elasticsearch-rails/Gemfile new file mode 100644 index 0000000000..1aeec6c9a0 --- /dev/null +++ b/elasticsearch-rails/Gemfile @@ -0,0 +1,9 @@ +source 'https://rubygems.org' + +# Specify your gem's dependencies in elasticsearch-rails.gemspec +gemspec + +# TODO: Figure out how to specify dependency on local elasticsearch-model without endless "Resolving dependencies" +# if File.exists? File.expand_path("../../elasticsearch-model", __FILE__) +# gem 'elasticsearch-model', :path => File.expand_path("../../elasticsearch-model", __FILE__), :require => true +# end diff --git a/elasticsearch-rails/LICENSE.txt b/elasticsearch-rails/LICENSE.txt new file mode 100644 index 0000000000..7dc94b3e5a --- /dev/null +++ b/elasticsearch-rails/LICENSE.txt @@ -0,0 +1,13 @@ +Copyright (c) 2014 Elasticsearch + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/elasticsearch-rails/README.md b/elasticsearch-rails/README.md new file mode 100644 index 0000000000..4760549385 --- /dev/null +++ b/elasticsearch-rails/README.md @@ -0,0 +1,132 @@ +# Elasticsearch::Rails + +The `elasticsearch-rails` library is a companion for the +the [`elasticsearch-model`](https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-model) +library, providing features suitable for Ruby on Rails applications. + +The library is compatible with Ruby 1.9.3 and higher. + +## Installation + +Install the package from [Rubygems](https://rubygems.org): + + gem install elasticsearch-rails + +To use an unreleased version, either add it to your `Gemfile` for [Bundler](http://bundler.io): + + gem 'elasticsearch-rails', git: 'git://github.com/elasticsearch/elasticsearch-rails.git' + +or install it from a source code checkout: + + git clone https://github.com/elasticsearch/elasticsearch-rails.git + cd elasticsearch-rails/elasticsearch-rails + bundle install + rake install + +## Features + +### Rake Tasks + +To facilitate importing data from your models into Elasticsearch, require the task definition in your application, +eg. in the `lib/tasks/elasticsearch.rake` file: + +```ruby +require 'elasticsearch/rails/tasks/import' +``` + +To import the records from your `Article` model, run: + +```bash +$ bundle exec rake environment elasticsearch:import:model CLASS='Article' +``` + +To limit the imported records to a certain +ActiveRecord [scope](http://guides.rubyonrails.org/active_record_querying.html#scopes), +pass it to the task: + +```bash +$ bundle exec rake environment elasticsearch:import:model CLASS='Article' SCOPE='published' +``` + +Run this command to display usage instructions: + +```bash +$ bundle exec rake -D elasticsearch +``` + +### ActiveSupport Instrumentation + +To display information about the search request (duration, search definition) during development, +and to include the information in the Rails log file, require the component in your `application.rb` file: + +```ruby +require 'elasticsearch/rails/instrumentation' +``` + +You should see an output like this in your application log in development environment: + + Article Search (321.3ms) { index: "articles", type: "article", body: { query: ... } } + +Also, the total duration of the request to Elasticsearch is displayed in the Rails request breakdown: + + Completed 200 OK in 615ms (Views: 230.9ms | ActiveRecord: 0.0ms | Elasticsearch: 321.3ms) + +There's a special component for the [Lograge](https://github.com/roidrage/lograge) logger. +Require the component in your `application.rb` file (and set `config.lograge.enabled`): + +```ruby +require 'elasticsearch/rails/lograge' +``` + +You should see the duration of the request to Elasticsearch as part of each log event: + + method=GET path=/search ... status=200 duration=380.89 view=99.64 db=0.00 es=279.37 + +### Rails Application Templates + +You can generate a fully working example Ruby on Rails application, with an `Article` model and a search form, +to play with (it even downloads _Elasticsearch_ itself, generates the application skeleton and leaves you with +a _Git_ repository to explore the steps and the code) with the +[`01-basic.rb`](https://github.com/elasticsearch/elasticsearch-rails/blob/master/elasticsearch-rails/lib/rails/templates/01-basic.rb) template: + +```bash +rails new searchapp --skip --skip-bundle --template https://raw.github.com/elasticsearch/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/01-basic.rb +``` + +Run the same command again, in the same folder, with the +[`02-pretty`](https://github.com/elasticsearch/elasticsearch-rails/blob/master/elasticsearch-rails/lib/rails/templates/02-pretty.rb) +template to add features such as a custom `Article.search` method, result highlighting and +[_Bootstrap_](http://getbootstrap.com) integration: + +```bash +rails new searchapp --skip --skip-bundle --template https://raw.github.com/elasticsearch/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/02-pretty.rb +``` + +Run the same command with the [`03-expert.rb`](https://github.com/elasticsearch/elasticsearch-rails/blob/master/elasticsearch-rails/lib/rails/templates/03-expert.rb) +template to refactor the application into a more complex use case, +with couple of hundreds of The New York Times articles as the example content. +The template will extract the Elasticsearch integration into a `Searchable` "concern" module, +define complex mapping, custom serialization, implement faceted navigation and suggestions as a part of +a complex query, and add a _Sidekiq_-based worker for updating the index in the background. + +```bash +rails new searchapp --skip --skip-bundle --template https://raw.github.com/elasticsearch/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/03-expert.rb +``` + +## License + +This software is licensed under the Apache 2 license, quoted below. + + Copyright (c) 2014 Elasticsearch + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/elasticsearch-rails/Rakefile b/elasticsearch-rails/Rakefile new file mode 100644 index 0000000000..3cf581a919 --- /dev/null +++ b/elasticsearch-rails/Rakefile @@ -0,0 +1,53 @@ +require "bundler/gem_tasks" + +desc "Run unit tests" +task :default => 'test:unit' +task :test => 'test:unit' + +# ----- Test tasks ------------------------------------------------------------ + +require 'rake/testtask' +namespace :test do + task :ci_reporter do + ENV['CI_REPORTS'] ||= 'tmp/reports' + require 'ci/reporter/rake/minitest' + Rake::Task['ci:setup:minitest'].invoke + end + + Rake::TestTask.new(:unit) do |test| + Rake::Task['test:ci_reporter'].invoke if ENV['CI'] + test.libs << 'lib' << 'test' + test.test_files = FileList["test/unit/**/*_test.rb"] + # test.verbose = true + # test.warning = true + end + + Rake::TestTask.new(:integration) do |test| + Rake::Task['test:ci_reporter'].invoke if ENV['CI'] + test.libs << 'lib' << 'test' + test.test_files = FileList["test/integration/**/*_test.rb"] + end + + Rake::TestTask.new(:all) do |test| + Rake::Task['test:ci_reporter'].invoke if ENV['CI'] + test.libs << 'lib' << 'test' + test.test_files = FileList["test/unit/**/*_test.rb", "test/integration/**/*_test.rb"] + end +end + +# ----- Documentation tasks --------------------------------------------------- + +require 'yard' +YARD::Rake::YardocTask.new(:doc) do |t| + t.options = %w| --embed-mixins --markup=markdown | +end + +# ----- Code analysis tasks --------------------------------------------------- + +if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9' + require 'cane/rake_task' + Cane::RakeTask.new(:quality) do |cane| + cane.abc_max = 15 + cane.no_style = true + end +end diff --git a/elasticsearch-rails/elasticsearch-rails.gemspec b/elasticsearch-rails/elasticsearch-rails.gemspec new file mode 100644 index 0000000000..14dee1ddf7 --- /dev/null +++ b/elasticsearch-rails/elasticsearch-rails.gemspec @@ -0,0 +1,52 @@ +# coding: utf-8 +lib = File.expand_path('../lib', __FILE__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) +require 'elasticsearch/rails/version' + +Gem::Specification.new do |s| + s.name = "elasticsearch-rails" + s.version = Elasticsearch::Rails::VERSION + s.authors = ["Karel Minarik"] + s.email = ["karel.minarik@elasticsearch.org"] + s.description = "Ruby on Rails integrations for Elasticsearch." + s.summary = "Ruby on Rails integrations for Elasticsearch." + s.homepage = "https://github.com/elasticsearch/elasticsearch-rails/" + s.license = "Apache 2" + + s.files = `git ls-files`.split($/) + s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) } + s.test_files = s.files.grep(%r{^(test|spec|features)/}) + s.require_paths = ["lib"] + + s.extra_rdoc_files = [ "README.md", "LICENSE.txt" ] + s.rdoc_options = [ "--charset=UTF-8" ] + + s.required_ruby_version = ">= 1.9.3" + + s.add_development_dependency "bundler", "~> 1.3" + s.add_development_dependency "rake", "< 11.0" + + s.add_development_dependency "elasticsearch-extensions" + s.add_development_dependency "elasticsearch-model" + + s.add_development_dependency "oj" + s.add_development_dependency "rails", ">= 3.1" + + s.add_development_dependency "lograge" + + s.add_development_dependency "minitest", "~> 4.2" + s.add_development_dependency "test-unit" if defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + s.add_development_dependency "shoulda-context" + s.add_development_dependency "mocha" + s.add_development_dependency "turn" + s.add_development_dependency "yard" + s.add_development_dependency "ruby-prof" + s.add_development_dependency "pry" + s.add_development_dependency "ci_reporter", "~> 1.9" + + if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9' + s.add_development_dependency "simplecov" + s.add_development_dependency "cane" + s.add_development_dependency "require-prof" + end +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails.rb b/elasticsearch-rails/lib/elasticsearch/rails.rb new file mode 100644 index 0000000000..f425f72763 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails.rb @@ -0,0 +1,7 @@ +require "elasticsearch/rails/version" + +module Elasticsearch + module Rails + # Your code goes here... + end +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails/instrumentation.rb b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation.rb new file mode 100644 index 0000000000..081791ab55 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation.rb @@ -0,0 +1,36 @@ +require 'elasticsearch/rails/instrumentation/railtie' +require 'elasticsearch/rails/instrumentation/publishers' + +module Elasticsearch + module Rails + + # This module adds support for displaying statistics about search duration in the Rails application log + # by integrating with the `ActiveSupport::Notifications` framework and `ActionController` logger. + # + # == Usage + # + # Require the component in your `application.rb` file: + # + # require 'elasticsearch/rails/instrumentation' + # + # You should see an output like this in your application log in development environment: + # + # Article Search (321.3ms) { index: "articles", type: "article", body: { query: ... } } + # + # Also, the total duration of the request to Elasticsearch is displayed in the Rails request breakdown: + # + # Completed 200 OK in 615ms (Views: 230.9ms | ActiveRecord: 0.0ms | Elasticsearch: 321.3ms) + # + # @note The displayed duration includes the HTTP transfer -- the time it took Elasticsearch + # to process your request is available in the `response.took` property. + # + # @see Elasticsearch::Rails::Instrumentation::Publishers + # @see Elasticsearch::Rails::Instrumentation::Railtie + # + # @see http://api.rubyonrails.org/classes/ActiveSupport/Notifications.html + # + # + module Instrumentation + end + end +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/controller_runtime.rb b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/controller_runtime.rb new file mode 100644 index 0000000000..461387c808 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/controller_runtime.rb @@ -0,0 +1,41 @@ +require 'active_support/core_ext/module/attr_internal' + +module Elasticsearch + module Rails + module Instrumentation + + # Hooks into ActionController to display Elasticsearch runtime + # + # @see https://github.com/rails/rails/blob/master/activerecord/lib/active_record/railties/controller_runtime.rb + # + module ControllerRuntime + extend ActiveSupport::Concern + + protected + + attr_internal :elasticsearch_runtime + + def cleanup_view_runtime + elasticsearch_rt_before_render = Elasticsearch::Rails::Instrumentation::LogSubscriber.reset_runtime + runtime = super + elasticsearch_rt_after_render = Elasticsearch::Rails::Instrumentation::LogSubscriber.reset_runtime + self.elasticsearch_runtime = elasticsearch_rt_before_render + elasticsearch_rt_after_render + runtime - elasticsearch_rt_after_render + end + + def append_info_to_payload(payload) + super + payload[:elasticsearch_runtime] = (elasticsearch_runtime || 0) + Elasticsearch::Rails::Instrumentation::LogSubscriber.reset_runtime + end + + module ClassMethods + def log_process_action(payload) + messages, elasticsearch_runtime = super, payload[:elasticsearch_runtime] + messages << ("Elasticsearch: %.1fms" % elasticsearch_runtime.to_f) if elasticsearch_runtime + messages + end + end + end + end + end +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/log_subscriber.rb b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/log_subscriber.rb new file mode 100644 index 0000000000..c02bc07049 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/log_subscriber.rb @@ -0,0 +1,41 @@ +module Elasticsearch + module Rails + module Instrumentation + + # A log subscriber to attach to Elasticsearch related events + # + # @see https://github.com/rails/rails/blob/master/activerecord/lib/active_record/log_subscriber.rb + # + class LogSubscriber < ActiveSupport::LogSubscriber + def self.runtime=(value) + Thread.current["elasticsearch_runtime"] = value + end + + def self.runtime + Thread.current["elasticsearch_runtime"] ||= 0 + end + + def self.reset_runtime + rt, self.runtime = runtime, 0 + rt + end + + # Intercept `search.elasticsearch` events, and display them in the Rails log + # + def search(event) + self.class.runtime += event.duration + return unless logger.debug? + + payload = event.payload + name = "#{payload[:klass]} #{payload[:name]} (#{event.duration.round(1)}ms)" + search = payload[:search].inspect.gsub(/:(\w+)=>/, '\1: ') + + debug %Q| #{color(name, GREEN, true)} #{colorize_logging ? "\e[2m#{search}\e[0m" : search}| + end + end + + end + end +end + +Elasticsearch::Rails::Instrumentation::LogSubscriber.attach_to :elasticsearch diff --git a/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/publishers.rb b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/publishers.rb new file mode 100644 index 0000000000..e054d53712 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/publishers.rb @@ -0,0 +1,36 @@ +module Elasticsearch + module Rails + module Instrumentation + module Publishers + + # Wraps the `SearchRequest` methods to perform the instrumentation + # + # @see SearchRequest#execute_with_instrumentation! + # @see http://api.rubyonrails.org/classes/ActiveSupport/Notifications.html + # + module SearchRequest + + def self.included(base) + base.class_eval do + unless method_defined?(:execute_without_instrumentation!) + alias_method :execute_without_instrumentation!, :execute! + alias_method :execute!, :execute_with_instrumentation! + end + end + end + + # Wrap `Search#execute!` and perform instrumentation + # + def execute_with_instrumentation! + ActiveSupport::Notifications.instrument "search.elasticsearch", + name: 'Search', + klass: (self.klass.is_a?(Elasticsearch::Model::Proxy::ClassMethodsProxy) ? self.klass.target.to_s : self.klass.to_s), + search: self.definition do + execute_without_instrumentation! + end + end + end + end + end + end +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/railtie.rb b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/railtie.rb new file mode 100644 index 0000000000..dbcd0fc389 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/instrumentation/railtie.rb @@ -0,0 +1,31 @@ +module Elasticsearch + module Rails + module Instrumentation + + # Rails initializer class to require Elasticsearch::Rails::Instrumentation files, + # set up Elasticsearch::Model and hook into ActionController to display Elasticsearch-related duration + # + # @see http://edgeguides.rubyonrails.org/active_support_instrumentation.html + # + class Railtie < ::Rails::Railtie + initializer "elasticsearch.instrumentation" do |app| + require 'elasticsearch/rails/instrumentation/log_subscriber' + require 'elasticsearch/rails/instrumentation/controller_runtime' + + Elasticsearch::Model::Searching::SearchRequest.class_eval do + include Elasticsearch::Rails::Instrumentation::Publishers::SearchRequest + end if defined?(Elasticsearch::Model::Searching::SearchRequest) + + Elasticsearch::Persistence::Model::Find::SearchRequest.class_eval do + include Elasticsearch::Rails::Instrumentation::Publishers::SearchRequest + end if defined?(Elasticsearch::Persistence::Model::Find::SearchRequest) + + ActiveSupport.on_load(:action_controller) do + include Elasticsearch::Rails::Instrumentation::ControllerRuntime + end + end + end + + end + end +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails/lograge.rb b/elasticsearch-rails/lib/elasticsearch/rails/lograge.rb new file mode 100644 index 0000000000..a8edd80848 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/lograge.rb @@ -0,0 +1,44 @@ +module Elasticsearch + module Rails + module Lograge + + # Rails initializer class to require Elasticsearch::Rails::Instrumentation files, + # set up Elasticsearch::Model and add Lograge configuration to display Elasticsearch-related duration + # + # Require the component in your `application.rb` file and enable Lograge: + # + # require 'elasticsearch/rails/lograge' + # + # You should see the full duration of the request to Elasticsearch as part of each log event: + # + # method=GET path=/search ... status=200 duration=380.89 view=99.64 db=0.00 es=279.37 + # + # @see https://github.com/roidrage/lograge + # + class Railtie < ::Rails::Railtie + initializer "elasticsearch.lograge" do |app| + require 'elasticsearch/rails/instrumentation/publishers' + require 'elasticsearch/rails/instrumentation/log_subscriber' + require 'elasticsearch/rails/instrumentation/controller_runtime' + + Elasticsearch::Model::Searching::SearchRequest.class_eval do + include Elasticsearch::Rails::Instrumentation::Publishers::SearchRequest + end if defined?(Elasticsearch::Model::Searching::SearchRequest) + + Elasticsearch::Persistence::Model::Find::SearchRequest.class_eval do + include Elasticsearch::Rails::Instrumentation::Publishers::SearchRequest + end if defined?(Elasticsearch::Persistence::Model::Find::SearchRequest) + + ActiveSupport.on_load(:action_controller) do + include Elasticsearch::Rails::Instrumentation::ControllerRuntime + end + + config.lograge.custom_options = lambda do |event| + { es: event.payload[:elasticsearch_runtime].to_f.round(2) } + end + end + end + + end + end +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails/tasks/import.rb b/elasticsearch-rails/lib/elasticsearch/rails/tasks/import.rb new file mode 100644 index 0000000000..bb2f9ff3d0 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/tasks/import.rb @@ -0,0 +1,112 @@ +# A collection of Rake tasks to facilitate importing data from yout models into Elasticsearch. +# +# Add this e.g. into the `lib/tasks/elasticsearch.rake` file in your Rails application: +# +# require 'elasticsearch/rails/tasks/import' +# +# To import the records from your `Article` model, run: +# +# $ bundle exec rake environment elasticsearch:import:model CLASS='MyModel' +# +# Run this command to display usage instructions: +# +# $ bundle exec rake -D elasticsearch +# +STDOUT.sync = true +STDERR.sync = true + +begin; require 'ansi/progressbar'; rescue LoadError; end + +namespace :elasticsearch do + + task :import => 'import:model' + + namespace :import do + import_model_desc = <<-DESC.gsub(/ /, '') + Import data from your model (pass name as CLASS environment variable). + + $ rake environment elasticsearch:import:model CLASS='MyModel' + + Force rebuilding the index (delete and create): + $ rake environment elasticsearch:import:model CLASS='Article' FORCE=y + + Customize the batch size: + $ rake environment elasticsearch:import:model CLASS='Article' BATCH=100 + + Set target index name: + $ rake environment elasticsearch:import:model CLASS='Article' INDEX='articles-new' + + Pass an ActiveRecord scope to limit the imported records: + $ rake environment elasticsearch:import:model CLASS='Article' SCOPE='published' + DESC + desc import_model_desc + task :model do + if ENV['CLASS'].to_s == '' + puts '='*90, 'USAGE', '='*90, import_model_desc, "" + exit(1) + end + + klass = eval(ENV['CLASS'].to_s) + total = klass.count rescue nil + pbar = ANSI::Progressbar.new(klass.to_s, total) rescue nil + pbar.__send__ :show if pbar + + unless ENV['DEBUG'] + begin + klass.__elasticsearch__.client.transport.logger.level = Logger::WARN + rescue NoMethodError; end + begin + klass.__elasticsearch__.client.transport.tracer.level = Logger::WARN + rescue NoMethodError; end + end + + total_errors = klass.__elasticsearch__.import force: ENV.fetch('FORCE', false), + batch_size: ENV.fetch('BATCH', 1000).to_i, + index: ENV.fetch('INDEX', nil), + type: ENV.fetch('TYPE', nil), + scope: ENV.fetch('SCOPE', nil) do |response| + pbar.inc response['items'].size if pbar + STDERR.flush + STDOUT.flush + end + pbar.finish if pbar + + puts "[IMPORT] #{total_errors} errors occurred" unless total_errors.zero? + puts '[IMPORT] Done' + end + + desc <<-DESC.gsub(/ /, '') + Import all indices from `app/models` (or use DIR environment variable). + + $ rake environment elasticsearch:import:all DIR=app/models + DESC + task :all do + dir = ENV['DIR'].to_s != '' ? ENV['DIR'] : Rails.root.join("app/models") + + puts "[IMPORT] Loading models from: #{dir}" + Dir.glob(File.join("#{dir}/**/*.rb")).each do |path| + model_filename = path[/#{Regexp.escape(dir.to_s)}\/([^\.]+).rb/, 1] + + next if model_filename.match(/^concerns\//i) # Skip concerns/ folder + + begin + klass = model_filename.camelize.constantize + rescue NameError + require(path) ? retry : raise(RuntimeError, "Cannot load class '#{klass}'") + end + + # Skip if the class doesn't have Elasticsearch integration + next unless klass.respond_to?(:__elasticsearch__) + + puts "[IMPORT] Processing model: #{klass}..." + + ENV['CLASS'] = klass.to_s + Rake::Task["elasticsearch:import:model"].invoke + Rake::Task["elasticsearch:import:model"].reenable + puts + end + end + + end + +end diff --git a/elasticsearch-rails/lib/elasticsearch/rails/version.rb b/elasticsearch-rails/lib/elasticsearch/rails/version.rb new file mode 100644 index 0000000000..88b2dd7589 --- /dev/null +++ b/elasticsearch-rails/lib/elasticsearch/rails/version.rb @@ -0,0 +1,5 @@ +module Elasticsearch + module Rails + VERSION = "0.1.9" + end +end diff --git a/elasticsearch-rails/lib/rails/templates/01-basic.rb b/elasticsearch-rails/lib/rails/templates/01-basic.rb new file mode 100644 index 0000000000..129d5ffd75 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/01-basic.rb @@ -0,0 +1,335 @@ +# ===================================================================================================== +# Template for generating a no-frills Rails application with support for Elasticsearch full-text search +# ===================================================================================================== +# +# This file creates a basic, fully working Rails application with support for Elasticsearch full-text +# search via the `elasticsearch-rails` gem; https://github.com/elasticsearch/elasticsearch-rails. +# +# Requirements: +# ------------- +# +# * Git +# * Ruby >= 1.9.3 +# * Rails >= 4 +# * Java >= 7 (for Elasticsearch) +# +# Usage: +# ------ +# +# $ rails new searchapp --skip --skip-bundle --template https://raw.github.com/elasticsearch/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/01-basic.rb +# +# ===================================================================================================== + +require 'uri' +require 'net/http' + +at_exit do + pid = File.read("#{destination_root}/tmp/pids/elasticsearch.pid") rescue nil + if pid + say_status "Stop", "Elasticsearch", :yellow + run "kill #{pid}" + end +end + +run "touch tmp/.gitignore" + +append_to_file ".gitignore", "vendor/elasticsearch-1.0.1/\n" + +git :init +git add: "." +git commit: "-m 'Initial commit: Clean application'" + +# ----- Download Elasticsearch -------------------------------------------------------------------- + +unless (Net::HTTP.get(URI.parse('http://localhost:9200')) rescue false) + COMMAND = <<-COMMAND.gsub(/^ /, '') + curl -# -O "http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.0.1.tar.gz" + tar -zxf elasticsearch-1.0.1.tar.gz + rm -f elasticsearch-1.0.1.tar.gz + ./elasticsearch-1.0.1/bin/elasticsearch -d -p #{destination_root}/tmp/pids/elasticsearch.pid + COMMAND + + puts "\n" + say_status "ERROR", "Elasticsearch not running!\n", :red + puts '-'*80 + say_status '', "It appears that Elasticsearch is not running on this machine." + say_status '', "Is it installed? Do you want me to install it for you with this command?\n\n" + COMMAND.each_line { |l| say_status '', "$ #{l}" } + puts + say_status '', "(To uninstall, just remove the generated application directory.)" + puts '-'*80, '' + + if yes?("Install Elasticsearch?", :bold) + puts + say_status "Install", "Elasticsearch", :yellow + + commands = COMMAND.split("\n") + exec = commands.pop + inside("vendor") do + commands.each { |command| run command } + run "(#{exec})" # Launch Elasticsearch in subshell + end + end +end unless ENV['RAILS_NO_ES_INSTALL'] + +# ----- Add README -------------------------------------------------------------------------------- + +puts +say_status "README", "Adding Readme...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +remove_file 'README.rdoc' + +create_file 'README.rdoc', <<-README += Ruby on Rails and Elasticsearch: Example application + +This application is an example of integrating the {Elasticsearch}[http://www.elasticsearch.org] +search engine with the {Ruby On Rails}[http://rubyonrails.org] web framework. + +It has been generated by application templates available at +https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-rails/lib/rails/templates. + +== [1] Basic + +The `basic` version provides a simple integration for a simple Rails model, `Article`, showing how +to include the search engine support in your model, automatically index changes to records, +and use a form to perform simple search require 'requests.' + +README + + +git add: "." +git commit: "-m '[01] Added README for the application'" + +# ----- Use Thin ---------------------------------------------------------------------------------- + +begin + require 'thin' + puts + say_status "Rubygems", "Adding Thin into Gemfile...\n", :yellow + puts '-'*80, ''; + + gem 'thin' +rescue LoadError +end + +# ----- Auxiliary gems ---------------------------------------------------------------------------- + +gem 'mocha', group: 'test', require: 'mocha/api' + +# ----- Remove CoffeeScript, Sass and "all that jazz" --------------------------------------------- + +comment_lines 'Gemfile', /gem 'coffee/ +comment_lines 'Gemfile', /gem 'sass/ +comment_lines 'Gemfile', /gem 'uglifier/ +uncomment_lines 'Gemfile', /gem 'therubyracer/ + +# ----- Add gems into Gemfile --------------------------------------------------------------------- + +puts +say_status "Rubygems", "Adding Elasticsearch libraries into Gemfile...\n", :yellow +puts '-'*80, ''; sleep 0.75 + +gem 'elasticsearch', git: 'git://github.com/elasticsearch/elasticsearch-ruby.git' +gem 'elasticsearch-model', git: 'git://github.com/elasticsearch/elasticsearch-rails.git' +gem 'elasticsearch-rails', git: 'git://github.com/elasticsearch/elasticsearch-rails.git' + + +git add: "Gemfile*" +git commit: "-m 'Added libraries into Gemfile'" + +# ----- Disable asset logging in development ------------------------------------------------------ + +puts +say_status "Application", "Disabling asset logging in development...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +environment 'config.assets.logger = false', env: 'development' +gem 'quiet_assets', group: "development" + +git add: "Gemfile*" +git add: "config/" +git commit: "-m 'Disabled asset logging in development'" + +# ----- Install gems ------------------------------------------------------------------------------ + +puts +say_status "Rubygems", "Installing Rubygems...", :yellow +puts '-'*80, '' + +run "bundle install" + +# ----- Generate Article resource ----------------------------------------------------------------- + +puts +say_status "Model", "Generating the Article resource...", :yellow +puts '-'*80, ''; sleep 0.75 + +generate :scaffold, "Article title:string content:text published_on:date" +route "root to: 'articles#index'" +rake "db:migrate" + +git add: "." +git commit: "-m 'Added the generated Article resource'" + +# ----- Add Elasticsearch integration into the model ---------------------------------------------- + +puts +say_status "Model", "Adding search support into the Article model...", :yellow +puts '-'*80, ''; sleep 0.25 + +run "rm -f app/models/article.rb" +file 'app/models/article.rb', <<-CODE +class Article < ActiveRecord::Base + include Elasticsearch::Model + include Elasticsearch::Model::Callbacks + #{'attr_accessible :title, :content, :published_on' if Rails::VERSION::STRING < '4'} +end +CODE + +git commit: "-a -m 'Added Elasticsearch support into the Article model'" + +# ----- Add Elasticsearch integration into the interface ------------------------------------------ + +puts +say_status "Controller", "Adding controller action, route, and HTML for searching...", :yellow +puts '-'*80, ''; sleep 0.25 + +inject_into_file 'app/controllers/articles_controller.rb', before: %r|^\s*# GET /articles/1$| do + <<-CODE + + # GET /articles/search + def search + @articles = Article.search(params[:q]).records + + render action: "index" + end + + CODE +end + +inject_into_file 'app/views/articles/index.html.erb', after: %r{

Listing articles

}i do + <<-CODE + + +
+ + <%= form_tag search_articles_path, method: 'get' do %> + <%= label_tag :query %> + <%= text_field_tag :q, params[:q] %> + <%= submit_tag :search %> + <% end %> + +
+ CODE +end + +inject_into_file 'app/views/articles/index.html.erb', after: %r{<%= link_to 'New Article', new_article_path %>} do + <<-CODE + <%= link_to 'All Articles', articles_path if params[:q] %> + CODE +end + +gsub_file 'config/routes.rb', %r{resources :articles$}, <<-CODE +resources :articles do + collection { get :search } + end +CODE + +gsub_file "#{Rails::VERSION::STRING > '4' ? 'test/controllers' : 'test/functional'}/articles_controller_test.rb", %r{setup do.*?end}m, <<-CODE +setup do + @article = articles(:one) + + Article.__elasticsearch__.import + Article.__elasticsearch__.refresh_index! + end +CODE + +inject_into_file "#{Rails::VERSION::STRING > '4' ? 'test/controllers' : 'test/functional'}/articles_controller_test.rb", after: %r{test "should get index" do.*?end}m do + <<-CODE + + + test "should get search results" do + get :search, q: 'mystring' + assert_response :success + assert_not_nil assigns(:articles) + assert_equal 2, assigns(:articles).size + end + CODE +end + +git commit: "-a -m 'Added search form and controller action'" + +# ----- Seed the database ------------------------------------------------------------------------- + +puts +say_status "Database", "Seeding the database with data...", :yellow +puts '-'*80, ''; sleep 0.25 + +remove_file "db/seeds.rb" +create_file 'db/seeds.rb', %q{ +contents = [ +'Lorem ipsum dolor sit amet.', +'Consectetur adipisicing elit, sed do eiusmod tempor incididunt.', +'Labore et dolore magna aliqua.', +'Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris.', +'Excepteur sint occaecat cupidatat non proident.' +] + +puts "Deleting all articles..." +Article.delete_all + +unless ENV['COUNT'] + + puts "Creating articles..." + %w[ One Two Three Four Five ].each_with_index do |title, i| + Article.create title: title, content: contents[i], published_on: i.days.ago.utc + end + +else + + print "Generating articles..." + (1..ENV['COUNT'].to_i).each_with_index do |title, i| + Article.create title: "Title #{title}", content: 'Lorem ipsum dolor', published_on: i.days.ago.utc + print '.' if i % ENV['COUNT'].to_i/10 == 0 + end + puts "\n" + +end +} + +run "rails runner 'Article.__elasticsearch__.create_index! force: true'" +rake "db:seed" + +git add: "db/seeds.rb" +git commit: "-m 'Added the database seeding script'" + +# ----- Print Git log ----------------------------------------------------------------------------- + +puts +say_status "Git", "Details about the application:", :yellow +puts '-'*80, '' + +git tag: "basic" +git log: "--reverse --oneline" + +# ----- Start the application --------------------------------------------------------------------- + +unless ENV['RAILS_NO_SERVER_START'] + require 'net/http' + if (begin; Net::HTTP.get(URI('http://localhost:3000')); rescue Errno::ECONNREFUSED; false; rescue Exception; true; end) + puts "\n" + say_status "ERROR", "Some other application is running on port 3000!\n", :red + puts '-'*80 + + port = ask("Please provide free port:", :bold) + else + port = '3000' + end + + puts "", "="*80 + say_status "DONE", "\e[1mStarting the application.\e[0m", :yellow + puts "="*80, "" + + run "rails server --port=#{port}" +end diff --git a/elasticsearch-rails/lib/rails/templates/02-pretty.rb b/elasticsearch-rails/lib/rails/templates/02-pretty.rb new file mode 100644 index 0000000000..7fd6e5048f --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/02-pretty.rb @@ -0,0 +1,311 @@ +# $ rails new searchapp --skip --skip-bundle --template https://raw.github.com/elasticsearch/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/02-pretty.rb + +unless File.read('README.rdoc').include? '== [1] Basic' + say_status "ERROR", "You have to run the 01-basic.rb template first.", :red + exit(1) +end + +puts +say_status "README", "Updating Readme...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +append_to_file 'README.rdoc', <<-README + +== [2] Pretty + +The `pretty` template builds on the `basic` version and brings couple of improvements: + +* Using the [Bootstrap](http://getbootstrap.com) framework to enhance the visual style of the application +* Using an `Article.search` class method to customize the default search definition +* Highlighting matching phrases in search results +* Paginating results with Kaminari + +README + +git add: "README.rdoc" +git commit: "-m '[02] Updated the application README'" + +# ----- Update application.rb --------------------------------------------------------------------- + +puts +say_status "Rubygems", "Adding Rails logger integration...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +insert_into_file 'config/application.rb', + "\n\nrequire 'elasticsearch/rails/instrumentation'", + after: /Bundler\.require.+$/ + +git add: "config/application.rb" +git commit: "-m 'Added the Rails logger integration to application.rb'" + +# ----- Add gems into Gemfile --------------------------------------------------------------------- + +puts +say_status "Rubygems", "Adding Rubygems into Gemfile...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +# NOTE: Kaminari has to be loaded before Elasticsearch::Model so the callbacks are executed +# +insert_into_file 'Gemfile', <<-CODE, before: /gem ["']elasticsearch["'].+$/ + +# NOTE: Kaminari has to be loaded before Elasticsearch::Model so the callbacks are executed +gem 'kaminari' + +CODE + +run "bundle install" + +git add: "Gemfile*" +git commit: "-m 'Added the Kaminari gem'" + +# ----- Add `Article.search` class method --------------------------------------------------------- + +puts +say_status "Model", "Adding a `Article.search` class method...\n", :yellow +puts '-'*80, ''; sleep 0.5 + +insert_into_file 'app/models/article.rb', <<-CODE, after: 'include Elasticsearch::Model::Callbacks' + + + def self.search(query) + __elasticsearch__.search( + { + query: { + multi_match: { + query: query, + fields: ['title^10', 'content'] + } + }, + highlight: { + pre_tags: [''], + post_tags: [''], + fields: { + title: { number_of_fragments: 0 }, + content: { fragment_size: 25 } + } + } + } + ) + end +CODE + +insert_into_file "#{Rails::VERSION::STRING > '4' ? 'test/models' : 'test/unit' }/article_test.rb", <<-CODE, after: /class ArticleTest < ActiveSupport::TestCase$/ + + teardown do + Article.__elasticsearch__.unstub(:search) + end + +CODE + +gsub_file "#{Rails::VERSION::STRING > '4' ? 'test/models' : 'test/unit' }/article_test.rb", %r{# test "the truth" do.*?# end}m, <<-CODE + + test "has a search method delegating to __elasticsearch__" do + Article.__elasticsearch__.expects(:search).with do |definition| + assert_equal 'foo', definition[:query][:multi_match][:query] + true + end + + Article.search 'foo' + end +CODE + +git add: "app/models/article.rb" +git add: "test/**/article_test.rb" +git commit: "-m 'Added an `Article.search` method'" + +# ----- Add loading Bootstrap assets -------------------------------------------------------------- + +puts +say_status "Bootstrap", "Adding Bootstrap asset links into the 'application' layout...\n", :yellow +puts '-'*80, ''; sleep 0.5 + +gsub_file 'app/views/layouts/application.html.erb', %r{<%= yield %>}, <<-CODE unless File.read('app/views/layouts/application.html.erb').include?('class="container"') +
+<%= yield %> +
+CODE + +insert_into_file 'app/views/layouts/application.html.erb', <<-CODE, before: '' + + +CODE + +git commit: "-a -m 'Added loading Bootstrap assets in the application layout'" + +# ----- Customize the search form ----------------------------------------------------------------- + +puts +say_status "Bootstrap", "Customizing the index page...\n", :yellow +puts '-'*80, ''; sleep 0.5 + +gsub_file 'app/views/articles/index.html.erb', %r{<%= label_tag .* :search %>}m do |match| +<<-CODE +
+ <%= text_field_tag :q, params[:q], class: 'form-control', placeholder: 'Search...' %> + + + + +
+CODE +end + +# ----- Customize the header ----------------------------------------------------------------- + +gsub_file 'app/views/articles/index.html.erb', %r{

Listing articles

} do |match| + "

<%= controller.action_name == 'search' ? 'Searching articles' : 'Listing articles' %>

" +end + +# ----- Customize the results listing ------------------------------------------------------------- + +gsub_file 'app/views/articles/index.html.erb', %r{} do |match| + '
' +end + +gsub_file 'app/views/articles/index.html.erb', %r{$} do |match| + "" +end + +gsub_file 'app/views/articles/index.html.erb', %r{$} do |match| + "" +end + +git commit: "-a -m 'Added highlighting for matches'" + +# ----- Paginate the results ---------------------------------------------------------------------- + +gsub_file 'app/controllers/articles_controller.rb', %r{@articles = Article.all} do |match| + "@articles = Article.page(params[:page])" +end + +gsub_file 'app/controllers/articles_controller.rb', %r{@articles = Article.search\(params\[\:q\]\).records} do |match| + "@articles = Article.search(params[:q]).page(params[:page]).records" +end + +insert_into_file 'app/views/articles/index.html.erb', after: '
<%= link_to [^%]+} do |match| + match.gsub!('', '') + match.include?("btn") ? match : (match + ", class: 'btn btn-default btn-xs'") +end + +gsub_file 'app/views/articles/index.html.erb', %r{
\s*(<\%= link_to 'New Article'.*)}m do |content| + replace = content.match(%r{
\s*(<\%= link_to 'New Article'.*)}m)[1] + <<-END.gsub(/^ /, '') +
+ +

+ #{replace} +

+ END +end + +gsub_file 'app/views/articles/index.html.erb', %r{<%= link_to 'New Article',\s*new_article_path} do |match| + return match if match.include?('btn') + match + ", class: 'btn btn-primary btn-xs', style: 'color: #fff'" +end + +gsub_file 'app/views/articles/index.html.erb', %r{<%= link_to 'All Articles',\s*articles_path} do |match| + return match if match.include?('btn') + "\n " + match + ", class: 'btn btn-primary btn-xs', style: 'color: #fff'" +end + +git add: "app/views" +git commit: "-m 'Refactored the articles listing to use Bootstrap components'" + +# ----- Use highlighted excerpts in the listing --------------------------------------------------- + +gsub_file 'app/views/articles/index.html.erb', %r{<% @articles.each do \|article\| %>$} do |match| + "<% @articles.__send__ controller.action_name == 'search' ? :each_with_hit : :each do |article, hit| %>" +end + +gsub_file 'app/views/articles/index.html.erb', %r{
<%= article.title %><%= hit.try(:highlight).try(:title) ? hit.highlight.title.join.html_safe : article.title %><%= article.content %><%= hit.try(:highlight).try(:content) ? hit.highlight.content.join('…').html_safe : article.content %>
' do + <<-CODE.gsub(/^ /, '') + + +
+ <%= paginate @articles %> +
+ CODE +end + +generate "kaminari:views", "bootstrap2", "--force" + +gsub_file 'app/views/kaminari/_paginator.html.erb', %r{
    }, '
      ' + +git add: "." +git commit: "-m 'Added pagination to articles listing'" + +# ----- Custom CSS -------------------------------------------------------------------------------- + +puts +say_status "CSS", "Adding custom styles...\n", :yellow +puts '-'*80, ''; sleep 0.5 + +append_to_file 'app/assets/stylesheets/application.css' do + unless File.read('app/assets/stylesheets/application.css').include?('.label-highlight') +<<-CODE + +.label-highlight { + font-size: 100% !important; + font-weight: inherit !important; + font-style: inherit !important; + color: #333 !important; + background: #fff401 !important; +} + +div.pagination { + text-align: center; + display: block; +} + +div.pagination ul { + display: inline-block; +} + +CODE + else + '' + end +end + +git commit: "-a -m 'Added custom style definitions into application.css'" + +# ----- Generate 1,000 articles ------------------------------------------------------------------- + +puts +say_status "Database", "Creating 1,000 articles...", :yellow +puts '-'*80, ''; + +run "rails runner 'Article.__elasticsearch__.create_index! force: true'" +rake "db:seed COUNT=1_000" + +# ----- Print Git log ----------------------------------------------------------------------------- + +puts +say_status "Git", "Details about the application:", :yellow +puts '-'*80, '' + +git tag: "pretty" +git log: "--reverse --oneline pretty...basic" + +# ----- Start the application --------------------------------------------------------------------- + +unless ENV['RAILS_NO_SERVER_START'] + require 'net/http' + if (begin; Net::HTTP.get(URI('http://localhost:3000')); rescue Errno::ECONNREFUSED; false; rescue Exception; true; end) + puts "\n" + say_status "ERROR", "Some other application is running on port 3000!\n", :red + puts '-'*80 + + port = ask("Please provide free port:", :bold) + else + port = '3000' + end + + puts "", "="*80 + say_status "DONE", "\e[1mStarting the application. Open http://localhost:#{port}\e[0m", :yellow + puts "="*80, "" + + run "rails server --port=#{port}" +end diff --git a/elasticsearch-rails/lib/rails/templates/03-expert.rb b/elasticsearch-rails/lib/rails/templates/03-expert.rb new file mode 100644 index 0000000000..ec098a4209 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/03-expert.rb @@ -0,0 +1,349 @@ +# $ rails new searchapp --skip --skip-bundle --template https://raw.github.com/elasticsearch/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/03-expert.rb + +unless File.read('README.rdoc').include? '== [2] Pretty' + say_status "ERROR", "You have to run the 01-basic.rb and 02-pretty.rb templates first.", :red + exit(1) +end + +begin + require 'redis' +rescue LoadError + say_status "ERROR", "Please install the 'redis' gem before running this template", :red + exit(1) +end + +begin + Redis.new.info +rescue Redis::CannotConnectError + puts + say_status "ERROR", "Redis not available", :red + say_status "", "This template uses an asynchronous indexer via Sidekiq, and requires a running Redis server." + exit(1) +end + +append_to_file 'README.rdoc', <<-README + +== [3] Expert + +The `expert` template changes to a complex database schema with model relationships: article belongs +to a category, has many authors and comments. + +* The Elasticsearch integration is refactored into the `Searchable` concern +* A complex mapping for the index is defined +* A custom serialization is defined in `Article#as_indexed_json` +* The `search` method is amended with facets and suggestions +* A [Sidekiq](http://sidekiq.org) worker for handling index updates in background is added +* A custom `SearchController` with associated view is added +* A Rails initializer is added to customize the Elasticsearch client configuration +* Seed script and example data from New York Times is added + +README + +git add: "README.rdoc" +git commit: "-m '[03] Updated the application README'" + +# ----- Add gems into Gemfile --------------------------------------------------------------------- + +puts +say_status "Rubygems", "Adding Rubygems into Gemfile...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +gem "oj" + +git add: "Gemfile*" +git commit: "-m 'Added Ruby gems'" + +# ----- Customize the Rails console --------------------------------------------------------------- + +puts +say_status "Rails", "Customizing `rails console`...\n", :yellow +puts '-'*80, ''; sleep 0.25 + + +gem "pry", group: 'development' + +environment nil, env: 'development' do + %q{ + console do + config.console = Pry + Pry.config.history.file = Rails.root.join('tmp/console_history.rb').to_s + Pry.config.prompt = [ proc { |obj, nest_level, _| "(#{obj})> " }, + proc { |obj, nest_level, _| ' '*obj.to_s.size + ' '*(nest_level+1) + '| ' } ] + end + } +end + +git add: "Gemfile*" +git add: "config/" +git commit: "-m 'Added Pry as the console for development'" + +# ----- Disable asset logging in development ------------------------------------------------------ + +puts +say_status "Application", "Disabling asset logging in development...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +environment 'config.assets.logger = false', env: 'development' +gem 'quiet_assets', group: "development" + +git add: "Gemfile*" +git add: "config/" +git commit: "-m 'Disabled asset logging in development'" + +# ----- Run bundle install ------------------------------------------------------------------------ + +run "bundle install" + +# ----- Define and generate schema ---------------------------------------------------------------- + +puts +say_status "Models", "Adding complex schema...\n", :yellow +puts '-'*80, '' + +generate :scaffold, "Category title" +generate :scaffold, "Author first_name last_name" +generate :scaffold, "Authorship article:references author:references" + +generate :model, "Comment body:text user:string user_location:string stars:integer pick:boolean article:references" +generate :migration, "CreateArticlesCategories article:references category:references" + +rake "db:drop" +rake "db:migrate" + +insert_into_file "app/models/category.rb", :before => "end" do + <<-CODE + has_and_belongs_to_many :articles + CODE +end + +insert_into_file "app/models/author.rb", :before => "end" do + <<-CODE + has_many :authorships + + def full_name + [first_name, last_name].join(' ') + end + CODE +end + +gsub_file "app/models/authorship.rb", %r{belongs_to :article$}, <<-CODE +belongs_to :article, touch: true +CODE + +insert_into_file "app/models/article.rb", after: "ActiveRecord::Base" do + <<-CODE + + has_and_belongs_to_many :categories, after_add: [ lambda { |a,c| Indexer.perform_async(:update, a.class.to_s, a.id) } ], + after_remove: [ lambda { |a,c| Indexer.perform_async(:update, a.class.to_s, a.id) } ] + has_many :authorships + has_many :authors, through: :authorships + has_many :comments + CODE +end + +gsub_file "app/models/comment.rb", %r{belongs_to :article$}, <<-CODE +belongs_to :article, touch: true +CODE + +git add: "." +git commit: "-m 'Generated Category, Author and Comment resources'" + +# ----- Add the `abstract` column ----------------------------------------------------------------- + +puts +say_status "Model", "Adding the `abstract` column to Article...\n", :yellow +puts '-'*80, '' + +generate :migration, "AddColumnsToArticle abstract:text url:string shares:integer" +rake "db:migrate" + +git add: "db/" +git commit: "-m 'Added additional columns to Article'" + +# ----- Move the model integration into a concern ------------------------------------------------- + +puts +say_status "Model", "Refactoring the model integration...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +remove_file 'app/models/article.rb' +create_file 'app/models/article.rb', <<-CODE +class Article < ActiveRecord::Base + include Searchable +end +CODE + +gsub_file "#{Rails::VERSION::STRING > '4' ? 'test/models' : 'test/unit' }/article_test.rb", %r{assert_equal 'foo', definition\[:query\]\[:multi_match\]\[:query\]}, "assert_equal 'foo', definition.to_hash[:query][:bool][:should][0][:multi_match][:query]" + +# copy_file File.expand_path('../searchable.rb', __FILE__), 'app/models/concerns/searchable.rb' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/searchable.rb', + 'app/models/concerns/searchable.rb' + +insert_into_file "app/models/article.rb", after: "ActiveRecord::Base" do + <<-CODE + + has_and_belongs_to_many :categories, after_add: [ lambda { |a,c| Indexer.perform_async(:update, a.class.to_s, a.id) } ], + after_remove: [ lambda { |a,c| Indexer.perform_async(:update, a.class.to_s, a.id) } ] + has_many :authorships + has_many :authors, through: :authorships + has_many :comments + + CODE +end + +git add: "app/models/ test/models" +git commit: "-m 'Refactored the Elasticsearch integration into a concern\n\nSee:\n\n* http://37signals.com/svn/posts/3372-put-chubby-models-on-a-diet-with-concerns\n* http://joshsymonds.com/blog/2012/10/25/rails-concerns-v-searchable-with-elasticsearch/'" + +# ----- Add Sidekiq indexer ----------------------------------------------------------------------- + +puts +say_status "Application", "Adding Sidekiq worker for updating the index...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +gem "sidekiq" + +run "bundle install" + +# copy_file File.expand_path('../indexer.rb', __FILE__), 'app/workers/indexer.rb' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/indexer.rb', + 'app/workers/indexer.rb' + +insert_into_file "test/test_helper.rb", + "require 'sidekiq/testing'\n\n", + before: "class ActiveSupport::TestCase\n" + +git add: "Gemfile* app/workers/ test/test_helper.rb" +git commit: "-m 'Added a Sidekiq indexer\n\nRun:\n\n $ bundle exec sidekiq --queue elasticsearch --verbose\n\nSee http://sidekiq.org'" + +# ----- Add SearchController ----------------------------------------------------------------------- + +puts +say_status "Controllers", "Adding SearchController...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +create_file 'app/controllers/search_controller.rb' do + <<-CODE.gsub(/^ /, '') + class SearchController < ApplicationController + def index + options = { + category: params[:c], + author: params[:a], + published_week: params[:w], + published_day: params[:d], + sort: params[:s], + comments: params[:comments] + } + @articles = Article.search(params[:q], options).page(params[:page]).results + end + end + + CODE +end + +# copy_file File.expand_path('../search_controller_test.rb', __FILE__), 'test/controllers/search_controller_test.rb' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/search_controller_test.rb', + 'test/controllers/search_controller_test.rb' + +route "get '/search', to: 'search#index', as: 'search'" +gsub_file 'config/routes.rb', %r{root to: 'articles#index'$}, "root to: 'search#index'" + +# copy_file File.expand_path('../index.html.erb', __FILE__), 'app/views/search/index.html.erb' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/index.html.erb', + 'app/views/search/index.html.erb' + +# copy_file File.expand_path('../search.css', __FILE__), 'app/assets/stylesheets/search.css' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/search.css', + 'app/assets/stylesheets/search.css' + +git add: "app/controllers/ test/controllers/ config/routes.rb" +git add: "app/views/search/ app/assets/stylesheets/search.css" +git commit: "-m 'Added SearchController#index'" + +# ----- Add initializer --------------------------------------------------------------------------- + +puts +say_status "Application", "Adding Elasticsearch configuration in an initializer...\n", :yellow +puts '-'*80, ''; sleep 0.5 + +create_file 'config/initializers/elasticsearch.rb', <<-CODE +# Connect to specific Elasticsearch cluster +ELASTICSEARCH_URL = ENV['ELASTICSEARCH_URL'] || 'http://localhost:9200' + +Elasticsearch::Model.client = Elasticsearch::Client.new host: ELASTICSEARCH_URL + +# Print Curl-formatted traces in development into a file +# +if Rails.env.development? + tracer = ActiveSupport::Logger.new('log/elasticsearch.log') + tracer.level = Logger::DEBUG +end + +Elasticsearch::Model.client.transport.tracer = tracer +CODE + +git add: "config/initializers" +git commit: "-m 'Added Rails initializer with Elasticsearch configuration'" + +# ----- Add Rake tasks ---------------------------------------------------------------------------- + +puts +say_status "Application", "Adding Elasticsearch Rake tasks...\n", :yellow +puts '-'*80, ''; sleep 0.5 + +create_file 'lib/tasks/elasticsearch.rake', <<-CODE +require 'elasticsearch/rails/tasks/import' +CODE + +git add: "lib/tasks" +git commit: "-m 'Added Rake tasks for Elasticsearch'" + +# ----- Insert and index data --------------------------------------------------------------------- + +puts +say_status "Database", "Re-creating the database with data and importing into Elasticsearch...", :yellow +puts '-'*80, ''; sleep 0.25 + +# copy_file File.expand_path('../articles.yml.gz', __FILE__), 'db/articles.yml.gz' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/articles.yml.gz', + 'db/articles.yml.gz' + +remove_file 'db/seeds.rb' +# copy_file File.expand_path('../seeds.rb', __FILE__), 'db/seeds.rb' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/seeds.rb', + 'db/seeds.rb' + +rake "db:reset" +rake "environment elasticsearch:import:model CLASS='Article' BATCH=100 FORCE=y" + +git add: "db/seeds.rb db/articles.yml.gz" +git commit: "-m 'Added a seed script and source data'" + +# ----- Print Git log ----------------------------------------------------------------------------- + +puts +say_status "Git", "Details about the application:", :yellow +puts '-'*80, '' + +git tag: "expert" +git log: "--reverse --oneline HEAD...pretty" + +# ----- Start the application --------------------------------------------------------------------- + +unless ENV['RAILS_NO_SERVER_START'] + require 'net/http' + if (begin; Net::HTTP.get(URI('http://localhost:3000')); rescue Errno::ECONNREFUSED; false; rescue Exception; true; end) + puts "\n" + say_status "ERROR", "Some other application is running on port 3000!\n", :red + puts '-'*80 + + port = ask("Please provide free port:", :bold) + else + port = '3000' + end + + puts "", "="*80 + say_status "DONE", "\e[1mStarting the application. Open http://localhost:#{port}\e[0m", :yellow + puts "="*80, "" + + run "rails server --port=#{port}" +end diff --git a/elasticsearch-rails/lib/rails/templates/04-dsl.rb b/elasticsearch-rails/lib/rails/templates/04-dsl.rb new file mode 100644 index 0000000000..232903e33a --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/04-dsl.rb @@ -0,0 +1,131 @@ +# $ rails new searchapp --skip --skip-bundle --template https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/04-dsl.rb + +unless File.read('README.rdoc').include? '== [3] Expert' + say_status "ERROR", "You have to run the 01-basic.rb, 02-pretty.rb and 03-expert.rb templates first.", :red + exit(1) +end + +append_to_file 'README.rdoc', <<-README + +== [4] DSL + +The `dsl` template refactors the search definition in SearchController#index +to use the [`elasticsearch-dsl`](https://github.com/elastic/elasticsearch-ruby/tree/dsl/elasticsearch-dsl) +Rubygem for better expresivity and readability of the code. + +README + +git add: "README.rdoc" +git commit: "-m '[03] Updated the application README'" + +run 'rm -f app/assets/stylesheets/*.scss' +run 'rm -f app/assets/javascripts/*.coffee' + +# ----- Add gems into Gemfile --------------------------------------------------------------------- + +puts +say_status "Rubygems", "Adding Rubygems into Gemfile...\n", :yellow +puts '-'*80, ''; sleep 0.25 + +gem "elasticsearch-dsl", git: "git://github.com/elastic/elasticsearch-ruby.git" + +git add: "Gemfile*" +git commit: "-m 'Added the `elasticsearch-dsl` gem'" + +# ----- Run bundle install ------------------------------------------------------------------------ + +run "bundle install" + +# ----- Change the search definition implementation and associated views and tests ---------------- + +# copy_file File.expand_path('../searchable.dsl.rb', __FILE__), 'app/models/concerns/searchable.rb', force: true +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/searchable.dsl.rb', + 'app/models/concerns/searchable.rb', force: true + +# copy_file File.expand_path('../index.html.dsl.erb', __FILE__), 'app/views/search/index.html.erb', force: true +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/index.html.dsl.erb', + 'app/views/search/index.html.erb', force: true + +gsub_file "test/controllers/search_controller_test.rb", %r{test "should return facets" do.*?end}m, <<-CODE +test "should return aggregations" do + get :index, q: 'one' + assert_response :success + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 2, aggregations['categories']['categories']['buckets'].size + assert_equal 2, aggregations['authors']['authors']['buckets'].size + assert_equal 2, aggregations['published']['published']['buckets'].size + + assert_equal 'John Smith', aggregations['authors']['authors']['buckets'][0]['key'] + assert_equal 'One', aggregations['categories']['categories']['buckets'][0]['key'] + assert_equal '2015-03-02T00:00:00.000Z', aggregations['published']['published']['buckets'][0]['key_as_string'] + end +CODE + +gsub_file "test/controllers/search_controller_test.rb", %r{test "should filter search results and the author and published date facets when user selects a category" do.*?end}m, <<-CODE +test "should filter search results and the author and published date facets when user selects a category" do + get :index, q: 'one', c: 'One' + assert_response :success + + assert_equal 2, assigns(:articles).size + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 1, aggregations['authors']['authors']['buckets'].size + assert_equal 1, aggregations['published']['published']['buckets'].size + + # Do NOT filter the category facet + assert_equal 2, aggregations['categories']['categories']['buckets'].size + end +CODE + +gsub_file "test/controllers/search_controller_test.rb", %r{test "should filter search results and the category and published date facets when user selects a category" do.*?end}m, <<-CODE +test "should filter search results and the category and published date facets when user selects a category" do + get :index, q: 'one', a: 'Mary Smith' + assert_response :success + + assert_equal 1, assigns(:articles).size + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 1, aggregations['categories']['categories']['buckets'].size + assert_equal 1, aggregations['published']['published']['buckets'].size + + # Do NOT filter the authors facet + assert_equal 2, aggregations['authors']['authors']['buckets'].size + end +CODE + +git add: "app/models/concerns/ app/views/search/ test/controllers/search_controller_test.rb" +git commit: "-m 'Updated the Article.search method to use the Ruby DSL and updated the associated views and tests'" + +# ----- Print Git log ----------------------------------------------------------------------------- + +puts +say_status "Git", "Details about the application:", :yellow +puts '-'*80, '' + +git tag: "dsl" +git log: "--reverse --oneline HEAD...expert" + +# ----- Start the application --------------------------------------------------------------------- + +unless ENV['RAILS_NO_SERVER_START'] + require 'net/http' + if (begin; Net::HTTP.get(URI('http://localhost:3000')); rescue Errno::ECONNREFUSED; false; rescue Exception; true; end) + puts "\n" + say_status "ERROR", "Some other application is running on port 3000!\n", :red + puts '-'*80 + + port = ask("Please provide free port:", :bold) + else + port = '3000' + end + + puts "", "="*80 + say_status "DONE", "\e[1mStarting the application. Open http://localhost:#{port}\e[0m", :yellow + puts "="*80, "" + + run "rails server --port=#{port}" +end diff --git a/elasticsearch-rails/lib/rails/templates/05-settings-files.rb b/elasticsearch-rails/lib/rails/templates/05-settings-files.rb new file mode 100644 index 0000000000..9717699fa2 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/05-settings-files.rb @@ -0,0 +1,77 @@ +# $ rails new searchapp --skip --skip-bundle --template https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/05-settings-files.rb + +# (See: 01-basic.rb, 02-pretty.rb, 03-expert.rb, 04-dsl.rb) + +append_to_file 'README.rdoc', <<-README + +== [5] Settings Files + +The `settings-files` template refactors the `Searchable` module to load the index settings +from an external file. + +README + +git add: "README.rdoc" +git commit: "-m '[05] Updated the application README'" + +# ----- Setup the Searchable module to load settings from config/elasticsearch/articles_settings.json + +gsub_file "app/models/concerns/searchable.rb", + /index: { number_of_shards: 1, number_of_replicas: 0 }/, + "File.open('config/elasticsearch/articles_settings.json')" + +git add: "app/models/concerns/searchable.rb" +git commit: "-m 'Setup the Searchable module to load settings from file'" + +# ----- Copy the articles_settings.json file ------------------------------------------------------- + +# copy_file File.expand_path('../articles_settings.json', __FILE__), 'config/elasticsearch/articles_settings.json' +get 'https://raw.githubusercontent.com/elastic/elasticsearch-rails/master/elasticsearch-rails/lib/rails/templates/articles_settings.json', + 'config/elasticsearch/articles_settings.json', force: true + +git add: "config/elasticsearch/articles_settings.json" +git commit: "-m 'Create the articles settings file'" + +# ----- Temporarily set local repo for testing ---------------------------------------------------- + +gsub_file "Gemfile", + %r{gem 'elasticsearch-model', git: 'git://github.com/elasticsearch/elasticsearch-rails.git'}, + "gem 'elasticsearch-model', path: File.expand_path('../../../../../../elasticsearch-model', __FILE__)" + +# ----- Run bundle install ------------------------------------------------------------------------ + +run "bundle install" + +# ----- Recreate the index ------------------------------------------------------------------------ + +rake "environment elasticsearch:import:model CLASS='Article' BATCH=100 FORCE=y" + +# ----- Print Git log ----------------------------------------------------------------------------- + +puts +say_status "Git", "Details about the application:", :yellow +puts '-'*80, '' + +git tag: "settings-files" +git log: "--reverse --oneline HEAD...dsl" + +# ----- Start the application --------------------------------------------------------------------- + +unless ENV['RAILS_NO_SERVER_START'] + require 'net/http' + if (begin; Net::HTTP.get(URI('http://localhost:3000')); rescue Errno::ECONNREFUSED; false; rescue Exception; true; end) + puts "\n" + say_status "ERROR", "Some other application is running on port 3000!\n", :red + puts '-'*80 + + port = ask("Please provide free port:", :bold) + else + port = '3000' + end + + puts "", "="*80 + say_status "DONE", "\e[1mStarting the application. Open http://localhost:#{port}\e[0m", :yellow + puts "="*80, "" + + run "rails server --port=#{port}" +end diff --git a/elasticsearch-rails/lib/rails/templates/articles.yml.gz b/elasticsearch-rails/lib/rails/templates/articles.yml.gz new file mode 100644 index 0000000000..665335ded0 Binary files /dev/null and b/elasticsearch-rails/lib/rails/templates/articles.yml.gz differ diff --git a/elasticsearch-rails/lib/rails/templates/articles_settings.json b/elasticsearch-rails/lib/rails/templates/articles_settings.json new file mode 100644 index 0000000000..c8fd35ea66 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/articles_settings.json @@ -0,0 +1 @@ +{ "number_of_shards": 1, "number_of_replicas": 0 } diff --git a/elasticsearch-rails/lib/rails/templates/index.html.dsl.erb b/elasticsearch-rails/lib/rails/templates/index.html.dsl.erb new file mode 100644 index 0000000000..4f8d512b0a --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/index.html.dsl.erb @@ -0,0 +1,160 @@ +
      +

      <%= link_to 'Search New York Times articles', root_path %>

      + + <%= form_tag search_path, method: 'get', role: 'search' do %> +
      + <%= text_field_tag :q, params[:q], class: 'form-control', placeholder: 'Search...' %> + + + + +
      + +
      +
      + + <% params.slice(:a, :c, :s).each do |name, value| %> + <%= hidden_field_tag name, value %> + <% end %> +
      + +
      +

      Displaying <%= (params[:page] || 1).to_i.ordinalize %> page with <%= @articles.size %> articles + of total <%= @articles.total %>

      + + + +
      +
      + <% end %> + +
      +
      + +<% if @articles.size < 1 && @articles.response.suggestions.present? %> +
      +

      + No documents have been found. + <% if @articles.response.suggestions.terms.present? %> + Maybe you mean + <%= @articles.response.suggestions.terms.map do |term| + link_to term, search_path(params.except(:controller, :action).merge q: term) + end.to_sentence(last_word_connector: ' or ').html_safe %>? + <% end %> +

      +
      +<% end %> + +
      + <% unless @articles.size < 1 %> + +
      +

      <%= link_to 'All Sections →'.html_safe, search_path(params.except(:controller, :action).merge(c: nil))%>

      + +
      + <% @articles.response.response['aggregations']['categories']['categories']['buckets'].each do |c| %> + <%= + link_to search_path(params.except(:controller, :action).merge(c: c['key'])), + class: "list-group-item#{' active' if params[:c] == c['key']}" do + c['key'].titleize.html_safe + content_tag(:small, c['doc_count'], class: 'badge').html_safe + end + %> + <% end %> +
      +
      + +
      +

      <%= link_to 'All Authors →'.html_safe, search_path(params.except(:controller, :action).merge(a: nil))%>

      + +
      + <% @articles.response.response['aggregations']['authors']['authors']['buckets'].each do |a| %> + <%= + link_to search_path(params.except(:controller, :action).merge(a: a['key'])), + class: "list-group-item#{' active' if params[:a] == a['key']}" do + a['key'].titleize.html_safe + content_tag(:small, a['doc_count'], class: 'badge').html_safe + end + %> + <% end %> +
      +
      + +
      +

      <%= link_to 'Any Date →'.html_safe, search_path(params.except(:controller, :action).merge(w: nil))%>

      + +
      + <% @articles.response.response['aggregations']['published']['published']['buckets'].each do |w| %> + <%= + __start = Time.at(w['key']/1000) + __end = __start.end_of_week + __date = __start.to_date.to_s(:iso) + + link_to search_path(params.except(:controller, :action).merge(w: __date)), + class: "list-group-item#{' active' if params[:w] == __date}" do + "#{__start.to_date.to_s(:short)} — #{__end.to_date.to_s(:short)}".html_safe + \ + content_tag(:small, w['doc_count'], class: 'badge').html_safe + end + %> + <% end %> +
      +
      + <% end %> +
      + +
      +
      + <% @articles.each do |article| %> +
      +

      + <%= (article.try(:highlight).try(:title) ? article.highlight.title.join.html_safe : article.title) %> + <%= article.categories.to_sentence %> +

      + +

      + <% if article.try(:highlight).try(:abstract) %> + <%= article.highlight.abstract.join.html_safe %> + <% else %> + <%= article.try(:highlight).try(:content) ? article.highlight.content.join('…').html_safe : article.abstract %> + <% end %> +

      + + <% if comments = article.try(:highlight) && article.highlight['comments.body'] %> +

      + Comments: <%= comments.join('…').html_safe %> +

      + <% end %> + +

      + Authors: <%= article.authors.map(&:full_name).to_sentence %> | + Published: <%= article.published_on %> | + Score: <%= article._score %> +

      +
      + <% end %> +
      + +
        + + +
      + +
      + + diff --git a/elasticsearch-rails/lib/rails/templates/index.html.erb b/elasticsearch-rails/lib/rails/templates/index.html.erb new file mode 100644 index 0000000000..9d849ef046 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/index.html.erb @@ -0,0 +1,160 @@ +
      +

      <%= link_to 'Search New York Times articles', root_path %>

      + + <%= form_tag search_path, method: 'get', role: 'search' do %> +
      + <%= text_field_tag :q, params[:q], class: 'form-control', placeholder: 'Search...' %> + + + + +
      + +
      +
      + + <% params.slice(:a, :c, :s).each do |name, value| %> + <%= hidden_field_tag name, value %> + <% end %> +
      + +
      +

      Displaying <%= (params[:page] || 1).to_i.ordinalize %> page with <%= @articles.size %> articles + of total <%= @articles.total %>

      + + + +
      +
      + <% end %> + +
      +
      + +<% if @articles.size < 1 && @articles.response.suggest.present? %> +
      +

      + No documents have been found. + <% if @articles.response.suggest['suggest_title'].present? || @articles.response.suggest['suggest_body'].present? %> + Maybe you mean + <%= @articles.response.suggest.map { |k,v| v.first['options'] }.flatten.map {|v| v['text']}.uniq.map do |term| + link_to term, search_path(params.except(:controller, :action).merge q: term) + end.to_sentence(last_word_connector: ' or ').html_safe %>? + <% end %> +

      +
      +<% end %> + +
      + <% unless @articles.size < 1 %> + +
      +

      <%= link_to 'All Sections →'.html_safe, search_path(params.except(:controller, :action).merge(c: nil))%>

      + +
      + <% @articles.response.response['aggregations']['categories']['categories']['buckets'].each do |c| %> + <%= + link_to search_path(params.except(:controller, :action).merge(c: c['key'])), + class: "list-group-item#{' active' if params[:c] == c['key']}" do + c['key'].titleize.html_safe + content_tag(:small, c['doc_count'], class: 'badge').html_safe + end + %> + <% end %> +
      +
      + +
      +

      <%= link_to 'All Authors →'.html_safe, search_path(params.except(:controller, :action).merge(a: nil))%>

      + +
      + <% @articles.response.response['aggregations']['authors']['authors']['buckets'].each do |a| %> + <%= + link_to search_path(params.except(:controller, :action).merge(a: a['key'])), + class: "list-group-item#{' active' if params[:a] == a['key']}" do + a['key'].titleize.html_safe + content_tag(:small, a['doc_count'], class: 'badge').html_safe + end + %> + <% end %> +
      +
      + +
      +

      <%= link_to 'Any Date →'.html_safe, search_path(params.except(:controller, :action).merge(w: nil))%>

      + +
      + <% @articles.response.response['aggregations']['published']['published']['buckets'].each do |w| %> + <%= + __start = Time.at(w['key']/1000) + __end = __start.end_of_week + __date = __start.to_date.to_s(:iso) + + link_to search_path(params.except(:controller, :action).merge(w: __date)), + class: "list-group-item#{' active' if params[:w] == __date}" do + "#{__start.to_date.to_s(:short)} — #{__end.to_date.to_s(:short)}".html_safe + \ + content_tag(:small, w['doc_count'], class: 'badge').html_safe + end + %> + <% end %> +
      +
      + <% end %> +
      + +
      +
      + <% @articles.each do |article| %> +
      +

      + <%= (article.try(:highlight).try(:title) ? article.highlight.title.join.html_safe : article.title) %> + <%= article.categories.to_sentence %> +

      + +

      + <% if article.try(:highlight).try(:abstract) %> + <%= article.highlight.abstract.join.html_safe %> + <% else %> + <%= article.try(:highlight).try(:content) ? article.highlight.content.join('…').html_safe : article.abstract %> + <% end %> +

      + + <% if comments = article.try(:highlight) && article.highlight['comments.body'] %> +

      + Comments: <%= comments.join('…').html_safe %> +

      + <% end %> + +

      + Authors: <%= article.authors.map(&:full_name).to_sentence %> | + Published: <%= article.published_on %> | + Score: <%= article._score %> +

      +
      + <% end %> +
      + +
        + + +
      + +
      + + diff --git a/elasticsearch-rails/lib/rails/templates/indexer.rb b/elasticsearch-rails/lib/rails/templates/indexer.rb new file mode 100644 index 0000000000..407c061099 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/indexer.rb @@ -0,0 +1,27 @@ +# Indexer class for +# +# Run me with: +# +# $ bundle exec sidekiq --queue elasticsearch --verbose +# +class Indexer + include Sidekiq::Worker + sidekiq_options queue: 'elasticsearch', retry: false, backtrace: true + + Logger = Sidekiq.logger.level == Logger::DEBUG ? Sidekiq.logger : nil + Client = Elasticsearch::Client.new host: (ENV['ELASTICSEARCH_URL'] || 'http://localhost:9200'), logger: Logger + + def perform(operation, klass, record_id, options={}) + logger.debug [operation, "#{klass}##{record_id} #{options.inspect}"] + + case operation.to_s + when /index|update/ + record = klass.constantize.find(record_id) + record.__elasticsearch__.client = Client + record.__elasticsearch__.__send__ "#{operation}_document" + when /delete/ + Client.delete index: klass.constantize.index_name, type: klass.constantize.document_type, id: record_id + else raise ArgumentError, "Unknown operation '#{operation}'" + end + end +end diff --git a/elasticsearch-rails/lib/rails/templates/search.css b/elasticsearch-rails/lib/rails/templates/search.css new file mode 100644 index 0000000000..a04b9a6a2f --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/search.css @@ -0,0 +1,72 @@ +h1 { + font-size: 28px !important; + color: #a3a3a3 !important; + text-transform: uppercase; + letter-spacing: -2px; +} + +.label-highlight { + background: #f6fbfc !important; + box-shadow: 0px 1px 0px rgba(0,0,0,0.15); + padding: 0.2em 0.4em 0.2em 0.4em !important; +} + +h3 .label-highlight { + background: transparent !important; + padding: 0.1em 0.4em 0px 0.4em !important; + border-bottom: 1px solid #999; + box-shadow: 0px 2px 1px rgba(0,0,0,0.15); + border-radius: 0; +} + +.comments .label-highlight { + background: #fcfdf0 !important; +} + +small.badge { + font-size: 80% !important; + font-weight: normal !important; + display: inline-block; + float: right; +} + +form #form-options { + color: #666; + font-size: 95%; + margin-top: 1.5em; + padding: 0 0.25em; +} +form #form-options input { + margin-top: 0.25em; +} + +#facets .panel-heading { + margin-bottom: 0; +} + +.result { + border-bottom: 1px solid #ccc; + margin: 2em 0 0 0; + padding: 0 0 1em 0; +} +.result:first-child { + margin-top: 0.25em; +} + +.result h3.title { + font-family: 'Rokkitt', sans-serif; + margin-top: 0; +} + +.result .body { + font-family: Georgia, serif; +} + +.result .category { + font-family: 'Rokkitt', sans-serif; +} + +.result .comments { + color: #666666; + font-size: 80%; +} diff --git a/elasticsearch-rails/lib/rails/templates/search_controller_test.dsl.rb b/elasticsearch-rails/lib/rails/templates/search_controller_test.dsl.rb new file mode 100644 index 0000000000..7fe7dd9557 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/search_controller_test.dsl.rb @@ -0,0 +1,130 @@ +require 'test_helper' + +class SearchControllerTest < ActionController::TestCase + setup do + Time.stubs(:now).returns(Time.parse('2015-03-16 10:00:00 UTC')) + + Article.delete_all + + articles = [ + { title: 'Article One', abstract: 'One', content: 'One', published_on: 1.day.ago, category_title: 'One', author_first_name: 'John', author_last_name: 'Smith' }, + { title: 'Article One Another', abstract: '', content: '', published_on: 2.days.ago, category_title: 'One', author_first_name: 'John', author_last_name: 'Smith' }, + { title: 'Article One Two', abstract: '', content: '', published_on: 10.days.ago, category_title: 'Two', author_first_name: 'Mary', author_last_name: 'Smith' }, + { title: 'Article Two', abstract: '', content: '', published_on: 12.days.ago, category_title: 'Two', author_first_name: 'Mary', author_last_name: 'Smith' }, + { title: 'Article Three', abstract: '', content: '', published_on: 12.days.ago, category_title: 'Three', author_first_name: 'Alice', author_last_name: 'Smith' } + ] + + articles.each do |a| + article = Article.create! \ + title: a[:title], + abstract: a[:abstract], + content: a[:content], + published_on: a[:published_on] + + article.categories << Category.find_or_create_by!(title: a[:category_title]) + + article.authors << Author.find_or_create_by!(first_name: a[:author_first_name], last_name: a[:author_last_name]) + + article.save! + end + + Article.find_by_title('Article Three').comments.create body: 'One' + + Sidekiq::Queue.new("elasticsearch").clear + + Article.__elasticsearch__.import force: true + Article.__elasticsearch__.refresh_index! + end + + test "should return search results" do + get :index, q: 'one' + assert_response :success + assert_equal 3, assigns(:articles).size + end + + test "should return search results in comments" do + get :index, q: 'one', comments: 'y' + assert_response :success + assert_equal 4, assigns(:articles).size + end + + test "should return highlighted snippets" do + get :index, q: 'one' + assert_response :success + assert_match %r{One}, assigns(:articles).first.highlight.title.first + end + + test "should return suggestions" do + get :index, q: 'one' + assert_response :success + + suggestions = assigns(:articles).response.suggest + + assert_equal 'one', suggestions['suggest_title'][0]['text'] + end + + test "should return aggregations" do + get :index, q: 'one' + assert_response :success + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 2, aggregations['categories']['categories']['buckets'].size + assert_equal 2, aggregations['authors']['authors']['buckets'].size + assert_equal 2, aggregations['published']['published']['buckets'].size + + assert_equal 'John Smith', aggregations['authors']['authors']['buckets'][0]['key'] + assert_equal 'One', aggregations['categories']['categories']['buckets'][0]['key'] + assert_equal '2015-03-02T00:00:00.000Z', aggregations['published']['published']['buckets'][0]['key_as_string'] + end + + test "should sort on the published date" do + get :index, q: 'one', s: 'published_on' + assert_response :success + + assert_equal 3, assigns(:articles).size + assert_equal '2015-03-15', assigns(:articles)[0].published_on + assert_equal '2015-03-14', assigns(:articles)[1].published_on + assert_equal '2015-03-06', assigns(:articles)[2].published_on + end + + test "should sort on the published date when no query is provided" do + get :index, q: '' + assert_response :success + + assert_equal 5, assigns(:articles).size + assert_equal '2015-03-15', assigns(:articles)[0].published_on + assert_equal '2015-03-14', assigns(:articles)[1].published_on + assert_equal '2015-03-06', assigns(:articles)[2].published_on + end + + test "should filter search results and the author and published date facets when user selects a category" do + get :index, q: 'one', c: 'One' + assert_response :success + + assert_equal 2, assigns(:articles).size + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 1, aggregations['authors']['authors']['buckets'].size + assert_equal 1, aggregations['published']['published']['buckets'].size + + # Do NOT filter the category facet + assert_equal 2, aggregations['categories']['categories']['buckets'].size + end + + test "should filter search results and the category and published date facets when user selects a category" do + get :index, q: 'one', a: 'Mary Smith' + assert_response :success + + assert_equal 1, assigns(:articles).size + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 1, aggregations['categories']['categories']['buckets'].size + assert_equal 1, aggregations['published']['published']['buckets'].size + + # Do NOT filter the authors facet + assert_equal 2, aggregations['authors']['authors']['buckets'].size + end +end diff --git a/elasticsearch-rails/lib/rails/templates/search_controller_test.rb b/elasticsearch-rails/lib/rails/templates/search_controller_test.rb new file mode 100644 index 0000000000..472d35d75e --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/search_controller_test.rb @@ -0,0 +1,131 @@ +require 'test_helper' + +class SearchControllerTest < ActionController::TestCase + setup do + Time.stubs(:now).returns(Time.parse('2015-03-16 10:00:00 UTC')) + + Article.delete_all + + articles = [ + { title: 'Article One', abstract: 'One', content: 'One', published_on: 1.day.ago, category_title: 'One', author_first_name: 'John', author_last_name: 'Smith' }, + { title: 'Article One Another', abstract: '', content: '', published_on: 2.days.ago, category_title: 'One', author_first_name: 'John', author_last_name: 'Smith' }, + { title: 'Article One Two', abstract: '', content: '', published_on: 10.days.ago, category_title: 'Two', author_first_name: 'Mary', author_last_name: 'Smith' }, + { title: 'Article Two', abstract: '', content: '', published_on: 12.days.ago, category_title: 'Two', author_first_name: 'Mary', author_last_name: 'Smith' }, + { title: 'Article Three', abstract: '', content: '', published_on: 12.days.ago, category_title: 'Three', author_first_name: 'Alice', author_last_name: 'Smith' } + ] + + articles.each do |a| + article = Article.create! \ + title: a[:title], + abstract: a[:abstract], + content: a[:content], + published_on: a[:published_on] + + article.categories << Category.find_or_create_by!(title: a[:category_title]) + + article.authors << Author.find_or_create_by!(first_name: a[:author_first_name], last_name: a[:author_last_name]) + + article.save! + end + + Article.find_by_title('Article Three').comments.create body: 'One' + + Sidekiq::Worker.clear_all + + Article.__elasticsearch__.import force: true + Article.__elasticsearch__.refresh_index! + end + + test "should return search results" do + get :index, q: 'one' + assert_response :success + assert_equal 3, assigns(:articles).size + end + + test "should return search results in comments" do + get :index, q: 'one', comments: 'y' + assert_response :success + + assert_equal 4, assigns(:articles).size + end + + test "should return highlighted snippets" do + get :index, q: 'one' + assert_response :success + assert_match %r{One}, assigns(:articles).first.highlight.title.first + end + + test "should return suggestions" do + get :index, q: 'one' + assert_response :success + + suggestions = assigns(:articles).response.suggest + + assert_equal 'one', suggestions['suggest_title'][0]['text'] + end + + test "should return facets" do + get :index, q: 'one' + assert_response :success + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 2, aggregations['categories']['categories']['buckets'].size + assert_equal 2, aggregations['authors']['authors']['buckets'].size + assert_equal 2, aggregations['published']['published']['buckets'].size + + assert_equal 'One', aggregations['categories']['categories']['buckets'][0]['key'] + assert_equal 'John Smith', aggregations['authors']['authors']['buckets'][0]['key'] + assert_equal 1425254400000, aggregations['published']['published']['buckets'][0]['key'] + end + + test "should sort on the published date" do + get :index, q: 'one', s: 'published_on' + assert_response :success + + assert_equal 3, assigns(:articles).size + assert_equal '2015-03-15', assigns(:articles)[0].published_on + assert_equal '2015-03-14', assigns(:articles)[1].published_on + assert_equal '2015-03-06', assigns(:articles)[2].published_on + end + + test "should sort on the published date when no query is provided" do + get :index, q: '' + assert_response :success + + assert_equal 5, assigns(:articles).size + assert_equal '2015-03-15', assigns(:articles)[0].published_on + assert_equal '2015-03-14', assigns(:articles)[1].published_on + assert_equal '2015-03-06', assigns(:articles)[2].published_on + end + + test "should filter search results and the author and published date facets when user selects a category" do + get :index, q: 'one', c: 'One' + assert_response :success + + assert_equal 2, assigns(:articles).size + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 1, aggregations['authors']['authors']['buckets'].size + assert_equal 1, aggregations['published']['published']['buckets'].size + + # Do NOT filter the category facet + assert_equal 2, aggregations['categories']['categories']['buckets'].size + end + + test "should filter search results and the category and published date facets when user selects a category" do + get :index, q: 'one', a: 'Mary Smith' + assert_response :success + + assert_equal 1, assigns(:articles).size + + aggregations = assigns(:articles).response.response['aggregations'] + + assert_equal 1, aggregations['categories']['categories']['buckets'].size + assert_equal 1, aggregations['published']['published']['buckets'].size + + # Do NOT filter the authors facet + assert_equal 2, aggregations['authors']['authors']['buckets'].size + end +end diff --git a/elasticsearch-rails/lib/rails/templates/searchable.dsl.rb b/elasticsearch-rails/lib/rails/templates/searchable.dsl.rb new file mode 100644 index 0000000000..e585104699 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/searchable.dsl.rb @@ -0,0 +1,217 @@ +module Searchable + extend ActiveSupport::Concern + + included do + include Elasticsearch::Model + + # Customize the index name + # + index_name [Rails.application.engine_name, Rails.env].join('_') + + # Set up index configuration and mapping + # + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, type: 'multi_field' do + indexes :title, analyzer: 'snowball' + indexes :tokenized, analyzer: 'simple' + end + + indexes :content, type: 'multi_field' do + indexes :content, analyzer: 'snowball' + indexes :tokenized, analyzer: 'simple' + end + + indexes :published_on, type: 'date' + + indexes :authors do + indexes :full_name, type: 'multi_field' do + indexes :full_name + indexes :raw, analyzer: 'keyword' + end + end + + indexes :categories, analyzer: 'keyword' + + indexes :comments, type: 'nested' do + indexes :body, analyzer: 'snowball' + indexes :stars + indexes :pick + indexes :user, analyzer: 'keyword' + indexes :user_location, type: 'multi_field' do + indexes :user_location + indexes :raw, analyzer: 'keyword' + end + end + end + end + + # Set up callbacks for updating the index on model changes + # + after_commit lambda { Indexer.perform_async(:index, self.class.to_s, self.id) }, on: :create + after_commit lambda { Indexer.perform_async(:update, self.class.to_s, self.id) }, on: :update + after_commit lambda { Indexer.perform_async(:delete, self.class.to_s, self.id) }, on: :destroy + after_touch lambda { Indexer.perform_async(:update, self.class.to_s, self.id) } + + # Customize the JSON serialization for Elasticsearch + # + def as_indexed_json(options={}) + hash = self.as_json( + include: { authors: { methods: [:full_name], only: [:full_name] }, + comments: { only: [:body, :stars, :pick, :user, :user_location] } + }) + hash['categories'] = self.categories.map(&:title) + hash + end + + # Return documents matching the user's query, include highlights and aggregations in response, + # and implement a "cross" faceted navigation + # + # @param q [String] The user query + # @return [Elasticsearch::Model::Response::Response] + # + def self.search(q, options={}) + @search_definition = Elasticsearch::DSL::Search.search do + query do + + # If a user query is present... + # + unless q.blank? + bool do + + # ... search in `title`, `abstract` and `content`, boosting `title` + # + should do + multi_match do + query q + fields ['title^10', 'abstract^2', 'content'] + operator 'and' + end + end + + # ... search in comment body if user checked the comments checkbox + # + if q.present? && options[:comments] + should do + nested do + path :comments + query do + multi_match do + query q + fields 'body' + operator 'and' + end + end + end + end + end + end + + # ... otherwise, just return all articles + else + match_all + end + end + + # Filter the search results based on user selection + # + post_filter do + bool do + must { term categories: options[:category] } if options[:category] + must { match_all } if options.keys.none? { |k| [:c, :a, :w].include? k } + must { term 'authors.full_name.raw' => options[:author] } if options[:author] + must { range published_on: { gte: options[:published_week], lte: "#{options[:published_week]}||+1w" } } if options[:published_week] + end + end + + # Return top categories for faceted navigation + # + aggregation :categories do + # Filter the aggregation with any selected `author` and `published_week` + # + f = Elasticsearch::DSL::Search::Filters::Bool.new + f.must { match_all } + f.must { term 'authors.full_name.raw' => options[:author] } if options[:author] + f.must { range published_on: { gte: options[:published_week], lte: "#{options[:published_week]}||+1w" } } if options[:published_week] + + filter f.to_hash do + aggregation :categories do + terms field: 'categories' + end + end + end + + # Return top authors for faceted navigation + # + aggregation :authors do + # Filter the aggregation with any selected `category` and `published_week` + # + f = Elasticsearch::DSL::Search::Filters::Bool.new + f.must { match_all } + f.must { term categories: options[:category] } if options[:category] + f.must { range published_on: { gte: options[:published_week], lte: "#{options[:published_week]}||+1w" } } if options[:published_week] + + filter f do + aggregation :authors do + terms field: 'authors.full_name.raw' + end + end + end + + # Return the published date ranges for faceted navigation + # + aggregation :published do + # Filter the aggregation with any selected `author` and `category` + # + f = Elasticsearch::DSL::Search::Filters::Bool.new + f.must { match_all } + f.must { term 'authors.full_name.raw' => options[:author] } if options[:author] + f.must { term categories: options[:category] } if options[:category] + + filter f do + aggregation :published do + date_histogram do + field 'published_on' + interval 'week' + end + end + end + end + + # Highlight the snippets in results + # + highlight do + fields title: { number_of_fragments: 0 }, + abstract: { number_of_fragments: 0 }, + content: { fragment_size: 50 } + + field 'comments.body', fragment_size: 50 if q.present? && options[:comments] + + pre_tags '' + post_tags '' + end + + case + # By default, sort by relevance, but when a specific sort option is present, use it ... + # + when options[:sort] + sort options[:sort].to_sym => 'desc' + track_scores true + # + # ... when there's no user query, sort on published date + # + when q.blank? + sort published_on: 'desc' + end + + # Return suggestions unless there's no query from the user + unless q.blank? + suggest :suggest_title, text: q, term: { field: 'title.tokenized', suggest_mode: 'always' } + suggest :suggest_body, text: q, term: { field: 'content.tokenized', suggest_mode: 'always' } + end + end + + __elasticsearch__.search(@search_definition) + end + end +end diff --git a/elasticsearch-rails/lib/rails/templates/searchable.rb b/elasticsearch-rails/lib/rails/templates/searchable.rb new file mode 100644 index 0000000000..55b0315322 --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/searchable.rb @@ -0,0 +1,206 @@ +module Searchable + extend ActiveSupport::Concern + + included do + include Elasticsearch::Model + + # Customize the index name + # + index_name [Rails.application.engine_name, Rails.env].join('_') + + # Set up index configuration and mapping + # + settings index: { number_of_shards: 1, number_of_replicas: 0 } do + mapping do + indexes :title, type: 'multi_field' do + indexes :title, analyzer: 'snowball' + indexes :tokenized, analyzer: 'simple' + end + + indexes :content, type: 'multi_field' do + indexes :content, analyzer: 'snowball' + indexes :tokenized, analyzer: 'simple' + end + + indexes :published_on, type: 'date' + + indexes :authors do + indexes :full_name, type: 'multi_field' do + indexes :full_name + indexes :raw, analyzer: 'keyword' + end + end + + indexes :categories, analyzer: 'keyword' + + indexes :comments, type: 'nested' do + indexes :body, analyzer: 'snowball' + indexes :stars + indexes :pick + indexes :user, analyzer: 'keyword' + indexes :user_location, type: 'multi_field' do + indexes :user_location + indexes :raw, analyzer: 'keyword' + end + end + end + end + + # Set up callbacks for updating the index on model changes + # + after_commit lambda { Indexer.perform_async(:index, self.class.to_s, self.id) }, on: :create + after_commit lambda { Indexer.perform_async(:update, self.class.to_s, self.id) }, on: :update + after_commit lambda { Indexer.perform_async(:delete, self.class.to_s, self.id) }, on: :destroy + after_touch lambda { Indexer.perform_async(:update, self.class.to_s, self.id) } + + # Customize the JSON serialization for Elasticsearch + # + def as_indexed_json(options={}) + hash = self.as_json( + include: { authors: { methods: [:full_name], only: [:full_name] }, + comments: { only: [:body, :stars, :pick, :user, :user_location] } + }) + hash['categories'] = self.categories.map(&:title) + hash + end + + # Search in title and content fields for `query`, include highlights in response + # + # @param query [String] The user query + # @return [Elasticsearch::Model::Response::Response] + # + def self.search(query, options={}) + + # Prefill and set the filters (top-level `post_filter` and aggregation `filter` elements) + # + __set_filters = lambda do |key, f| + @search_definition[:post_filter][:and] ||= [] + @search_definition[:post_filter][:and] |= [f] + + @search_definition[:aggregations][key.to_sym][:filter][:bool][:must] ||= [] + @search_definition[:aggregations][key.to_sym][:filter][:bool][:must] |= [f] + end + + @search_definition = { + query: {}, + + highlight: { + pre_tags: [''], + post_tags: [''], + fields: { + title: { number_of_fragments: 0 }, + abstract: { number_of_fragments: 0 }, + content: { fragment_size: 50 } + } + }, + + post_filter: {}, + + aggregations: { + categories: { + filter: { bool: { must: [ match_all: {} ] } }, + aggregations: { categories: { terms: { field: 'categories' } } } + }, + authors: { + filter: { bool: { must: [ match_all: {} ] } }, + aggregations: { authors: { terms: { field: 'authors.full_name.raw' } } } + }, + published: { + filter: { bool: { must: [ match_all: {} ] } }, + aggregations: { + published: { date_histogram: { field: 'published_on', interval: 'week' } } + } + } + } + } + + unless query.blank? + @search_definition[:query] = { + bool: { + should: [ + { multi_match: { + query: query, + fields: ['title^10', 'abstract^2', 'content'], + operator: 'and' + } + } + ] + } + } + else + @search_definition[:query] = { match_all: {} } + @search_definition[:sort] = { published_on: 'desc' } + end + + if options[:category] + f = { term: { categories: options[:category] } } + + __set_filters.(:authors, f) + __set_filters.(:published, f) + end + + if options[:author] + f = { term: { 'authors.full_name.raw' => options[:author] } } + + __set_filters.(:categories, f) + __set_filters.(:published, f) + end + + if options[:published_week] + f = { + range: { + published_on: { + gte: options[:published_week], + lte: "#{options[:published_week]}||+1w" + } + } + } + + __set_filters.(:categories, f) + __set_filters.(:authors, f) + end + + if query.present? && options[:comments] + @search_definition[:query][:bool][:should] ||= [] + @search_definition[:query][:bool][:should] << { + nested: { + path: 'comments', + query: { + multi_match: { + query: query, + fields: ['comments.body'], + operator: 'and' + } + } + } + } + @search_definition[:highlight][:fields].update 'comments.body' => { fragment_size: 50 } + end + + if options[:sort] + @search_definition[:sort] = { options[:sort] => 'desc' } + @search_definition[:track_scores] = true + end + + unless query.blank? + @search_definition[:suggest] = { + text: query, + suggest_title: { + term: { + field: 'title.tokenized', + suggest_mode: 'always' + } + }, + suggest_body: { + term: { + field: 'content.tokenized', + suggest_mode: 'always' + } + } + } + end + + __elasticsearch__.search(@search_definition) + end + end +end diff --git a/elasticsearch-rails/lib/rails/templates/seeds.rb b/elasticsearch-rails/lib/rails/templates/seeds.rb new file mode 100644 index 0000000000..85bcd509be --- /dev/null +++ b/elasticsearch-rails/lib/rails/templates/seeds.rb @@ -0,0 +1,57 @@ +require 'zlib' +require 'yaml' + +Zlib::GzipReader.open(File.expand_path('../articles.yml.gz', __FILE__)) do |gzip| + puts "Reading articles from gzipped YAML..." + @documents = YAML.load_documents(gzip.read) +end + +# Truncate the default ActiveRecord logger output +ActiveRecord::Base.logger = ActiveSupport::Logger.new(STDERR) +ActiveRecord::Base.logger.instance_eval do + @formatter = lambda do |s, d, p, message| + message + .gsub(/\[("content", ".*?")\]/m) { |match| match[0..100] + '..."]' } + .gsub(/\[("body", ".*?")\]/m ) { |match| match[0..100] + '..."]' } + .strip + .concat("\n") + end +end + +# Reduce verbosity and truncate the request body of Elasticsearch logger +Article.__elasticsearch__.client.transport.tracer.level = Logger::INFO +Article.__elasticsearch__.client.transport.tracer.formatter = lambda do |s, d, p, message| + "\n\n" + (message.size > 105 ? message[0..105].concat("...}'") : message) + "\n\n" +end + +# Skip model callbacks +%w| _touch_callbacks + _commit_callbacks + after_add_for_categories + after_add_for_authorships + after_add_for_authors + after_add_for_comments |.each do |c| + Article.class.__send__ :define_method, c do; []; end + end + +@documents.each do |document| + article = Article.create! document.slice(:title, :abstract, :content, :url, :shares, :published_on) + + article.categories = document[:categories].map do |d| + Category.find_or_create_by! title: d + end + + article.authors = document[:authors].map do |d| + first_name, last_name = d.split(' ').compact.map(&:strip) + Author.find_or_create_by! first_name: first_name, last_name: last_name + end + + document[:comments].each { |d| article.comments.create! d } + + article.save! +end + +# Remove any jobs from the "elasticsearch" Sidekiq queue +# +require 'sidekiq/api' +Sidekiq::Queue.new("elasticsearch").clear diff --git a/elasticsearch-rails/test/test_helper.rb b/elasticsearch-rails/test/test_helper.rb new file mode 100644 index 0000000000..bf9c55bd13 --- /dev/null +++ b/elasticsearch-rails/test/test_helper.rb @@ -0,0 +1,64 @@ +RUBY_1_8 = defined?(RUBY_VERSION) && RUBY_VERSION < '1.9' + +exit(0) if RUBY_1_8 + +require 'simplecov' and SimpleCov.start { add_filter "/test|test_/" } if ENV["COVERAGE"] + +# Register `at_exit` handler for integration tests shutdown. +# MUST be called before requiring `test/unit`. +at_exit { Elasticsearch::Test::IntegrationTestCase.__run_at_exit_hooks } + +puts '-'*80 + +if defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + require 'test-unit' + require 'mocha/test_unit' +else + require 'minitest/autorun' + require 'mocha/mini_test' +end + +require 'shoulda-context' + +require 'turn' unless ENV["TM_FILEPATH"] || ENV["NOTURN"] || defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + +require 'ansi' +require 'oj' + +require 'rails/version' +require 'active_record' +require 'active_model' + +require 'elasticsearch/model' +require 'elasticsearch/rails' + +require 'elasticsearch/extensions/test/cluster' +require 'elasticsearch/extensions/test/startup_shutdown' + +module Elasticsearch + module Test + class IntegrationTestCase < ::Test::Unit::TestCase + extend Elasticsearch::Extensions::Test::StartupShutdown + + startup { Elasticsearch::Extensions::Test::Cluster.start(nodes: 1) if ENV['SERVER'] and not Elasticsearch::Extensions::Test::Cluster.running? } + shutdown { Elasticsearch::Extensions::Test::Cluster.stop if ENV['SERVER'] && started? } + context "IntegrationTest" do; should "noop on Ruby 1.8" do; end; end if RUBY_1_8 + + def setup + ActiveRecord::Base.establish_connection( :adapter => 'sqlite3', :database => ":memory:" ) + logger = ::Logger.new(STDERR) + logger.formatter = lambda { |s, d, p, m| "#{m.ansi(:faint, :cyan)}\n" } + ActiveRecord::Base.logger = logger unless ENV['QUIET'] + + ActiveRecord::LogSubscriber.colorize_logging = false + ActiveRecord::Migration.verbose = false + + tracer = ::Logger.new(STDERR) + tracer.formatter = lambda { |s, d, p, m| "#{m.gsub(/^.*$/) { |n| ' ' + n }.ansi(:faint)}\n" } + + Elasticsearch::Model.client = Elasticsearch::Client.new host: "localhost:#{(ENV['TEST_CLUSTER_PORT'] || 9250)}", + tracer: (ENV['QUIET'] ? nil : tracer) + end + end + end +end diff --git a/elasticsearch-rails/test/unit/instrumentation/instrumentation_test.rb b/elasticsearch-rails/test/unit/instrumentation/instrumentation_test.rb new file mode 100644 index 0000000000..51cd3c6699 --- /dev/null +++ b/elasticsearch-rails/test/unit/instrumentation/instrumentation_test.rb @@ -0,0 +1,61 @@ +require 'test_helper' + +require 'rails/railtie' +require 'active_support/log_subscriber/test_helper' + +require 'elasticsearch/rails/instrumentation' + +class Elasticsearch::Rails::InstrumentationTest < Test::Unit::TestCase + include ActiveSupport::LogSubscriber::TestHelper + + context "ActiveSupport::Instrumentation integration" do + class ::DummyInstrumentationModel + extend Elasticsearch::Model::Searching::ClassMethods + + def self.index_name; 'foo'; end + def self.document_type; 'bar'; end + end + + RESPONSE = { 'took' => '5ms', 'hits' => { 'total' => 123, 'max_score' => 456, 'hits' => [] } } + + setup do + @search = Elasticsearch::Model::Searching::SearchRequest.new ::DummyInstrumentationModel, '*' + + @client = stub('client', search: RESPONSE) + DummyInstrumentationModel.stubs(:client).returns(@client) + + Elasticsearch::Rails::Instrumentation::Railtie.run_initializers + end + + should "wrap SearchRequest#execute! with instrumentation" do + s = Elasticsearch::Model::Searching::SearchRequest.new ::DummyInstrumentationModel, 'foo' + assert_respond_to s, :execute_without_instrumentation! + assert_respond_to s, :execute_with_instrumentation! + end + + should "publish the notification" do + @query = { query: { match: { foo: 'bar' } } } + + ActiveSupport::Notifications.expects(:instrument).with do |name, payload| + assert_equal "search.elasticsearch", name + assert_equal 'DummyInstrumentationModel', payload[:klass] + assert_equal @query, payload[:search][:body] + true + end + + s = ::DummyInstrumentationModel.search @query + s.response + end + + should "print the debug information to the Rails log" do + s = ::DummyInstrumentationModel.search query: { match: { moo: 'bam' } } + s.response + + logged = @logger.logged(:debug).first + + assert_not_nil logged + assert_match /DummyInstrumentationModel Search \(\d+\.\d+ms\)/, logged + assert_match /body\: \{query\: \{match\: \{moo\: "bam"\}\}\}\}/, logged + end unless defined?(RUBY_VERSION) && RUBY_VERSION > '2.2' + end +end diff --git a/elasticsearch-rails/test/unit/instrumentation/lograge_test.rb b/elasticsearch-rails/test/unit/instrumentation/lograge_test.rb new file mode 100644 index 0000000000..f7984daef9 --- /dev/null +++ b/elasticsearch-rails/test/unit/instrumentation/lograge_test.rb @@ -0,0 +1,21 @@ +require 'test_helper' + +require 'rails/railtie' +require 'action_pack' +require 'lograge' + +require 'elasticsearch/rails/lograge' + +class Elasticsearch::Rails::LogrageTest < Test::Unit::TestCase + context "Lograge integration" do + setup do + Elasticsearch::Rails::Lograge::Railtie.run_initializers + end + + should "customize the Lograge configuration" do + assert_not_nil Elasticsearch::Rails::Lograge::Railtie.initializers + .select { |i| i.name == 'elasticsearch.lograge' } + .first + end + end +end diff --git a/faraday-middleware-aws-signers-v4/.gitignore b/faraday-middleware-aws-signers-v4/.gitignore new file mode 100644 index 0000000000..1362445432 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/.gitignore @@ -0,0 +1,10 @@ +/.bundle/ +/.yardoc +/Gemfile.lock +/_yardoc/ +/coverage/ +/doc/ +/pkg/ +/spec/reports/ +/tmp/ +test.rb diff --git a/faraday-middleware-aws-signers-v4/.rspec b/faraday-middleware-aws-signers-v4/.rspec new file mode 100644 index 0000000000..83e16f8044 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/.rspec @@ -0,0 +1,2 @@ +--color +--require spec_helper diff --git a/faraday-middleware-aws-signers-v4/.travis.yml b/faraday-middleware-aws-signers-v4/.travis.yml new file mode 100644 index 0000000000..1bc2e3ebf1 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/.travis.yml @@ -0,0 +1,12 @@ +sudo: false +cache: bundler +language: ruby +rvm: + - 2.0.0 + - 2.1.10 + - 2.2.6 + - 2.3.3 + - 2.4.0 +script: + - bundle install + - bundle exec rake diff --git a/faraday-middleware-aws-signers-v4/Gemfile b/faraday-middleware-aws-signers-v4/Gemfile new file mode 100644 index 0000000000..7fb943e485 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/Gemfile @@ -0,0 +1,4 @@ +source 'https://rubygems.org' + +# Specify your gem's dependencies in faraday_middleware-aws-signers-v4.gemspec +gemspec diff --git a/faraday-middleware-aws-signers-v4/LICENSE.txt b/faraday-middleware-aws-signers-v4/LICENSE.txt new file mode 100644 index 0000000000..c599da48de --- /dev/null +++ b/faraday-middleware-aws-signers-v4/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Genki Sugawara + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/faraday-middleware-aws-signers-v4/README.md b/faraday-middleware-aws-signers-v4/README.md new file mode 100644 index 0000000000..cf5edaf45a --- /dev/null +++ b/faraday-middleware-aws-signers-v4/README.md @@ -0,0 +1,67 @@ +# FaradayMiddleware::AwsSignersV4 + +[Faraday](https://github.com/lostisland/faraday) middleware for AWS Signature Version 4. + +[![Gem Version](https://badge.fury.io/rb/faraday_middleware-aws-signers-v4.svg)](http://badge.fury.io/rb/faraday_middleware-aws-signers-v4) +[![Build Status](https://travis-ci.org/winebarrel/faraday_middleware-aws-signers-v4.svg)](https://travis-ci.org/winebarrel/faraday_middleware-aws-signers-v4) +[![Coverage Status](https://coveralls.io/repos/winebarrel/faraday_middleware-aws-signers-v4/badge.svg?branch=master&service=github)](https://coveralls.io/github/winebarrel/faraday_middleware-aws-signers-v4?branch=master) + +**Currently developing new gem to support aws-sdk-v3.** + +**see https://github.com/winebarrel/faraday_middleware-aws-sigv4** + +## Installation + +Add this line to your application's Gemfile: + +```ruby +gem 'faraday_middleware-aws-signers-v4' +``` + +And then execute: + + $ bundle + +Or install it yourself as: + + $ gem install faraday_middleware-aws-signers-v4 + +## Usage + +```ruby +require 'faraday_middleware' +require 'faraday_middleware/aws_signers_v4' +require 'pp' + +conn = Faraday.new(url: 'https://apigateway.us-east-1.amazonaws.com') do |faraday| + faraday.request :aws_signers_v4, + credentials: Aws::Credentials.new(ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']), + # If you use the credentials file: + #credentials: Aws::SharedCredentials.new.credentials, + service_name: 'apigateway', + region: 'us-east-1' + + faraday.response :json, :content_type => /\bjson\b/ + faraday.response :raise_error + + faraday.adapter Faraday.default_adapter +end + +res = conn.get '/account' +pp res.body +# => {"accountUpdate"=> +# {"name"=>nil, +# "template"=>false, +# "templateSkipList"=>nil, +# "title"=>nil, +# "updateAccountInput"=>nil}, +# "cloudwatchRoleArn"=>nil, +# "self"=> +# {"__type"=> +# "GetAccountRequest:http://internal.amazon.com/coral/com.amazonaws.backplane.controlplane/", +# "name"=>nil, +# "template"=>false, +# "templateSkipList"=>nil, +# "title"=>nil}, +# "throttleSettings"=>{"burstLimit"=>1000, "rateLimit"=>500.0}} +``` diff --git a/faraday-middleware-aws-signers-v4/Rakefile b/faraday-middleware-aws-signers-v4/Rakefile new file mode 100644 index 0000000000..93cb94306b --- /dev/null +++ b/faraday-middleware-aws-signers-v4/Rakefile @@ -0,0 +1,6 @@ +require 'bundler/gem_tasks' +require 'rspec/core/rake_task' + +RSpec::Core::RakeTask.new(:spec) + +task :default => :spec diff --git a/faraday-middleware-aws-signers-v4/bin/console b/faraday-middleware-aws-signers-v4/bin/console new file mode 100755 index 0000000000..27001101f7 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/bin/console @@ -0,0 +1,14 @@ +#!/usr/bin/env ruby + +require "bundler/setup" +require "faraday_middleware/aws_signers_v4" + +# You can add fixtures and/or initialization code here to make experimenting +# with your gem easier. You can also use a different console, if you like. + +# (If you use this, don't forget to add pry to your Gemfile!) +# require "pry" +# Pry.start + +require "irb" +IRB.start diff --git a/faraday-middleware-aws-signers-v4/bin/setup b/faraday-middleware-aws-signers-v4/bin/setup new file mode 100755 index 0000000000..b65ed50ff1 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/bin/setup @@ -0,0 +1,7 @@ +#!/bin/bash +set -euo pipefail +IFS=$'\n\t' + +bundle install + +# Do any other automated setup that you need to do here diff --git a/faraday-middleware-aws-signers-v4/faraday_middleware-aws-signers-v4.gemspec b/faraday-middleware-aws-signers-v4/faraday_middleware-aws-signers-v4.gemspec new file mode 100644 index 0000000000..353468a4d2 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/faraday_middleware-aws-signers-v4.gemspec @@ -0,0 +1,28 @@ +# coding: utf-8 +Gem::Specification.new do |spec| + spec.name = 'faraday_middleware-aws-signers-v4' + spec.version = '0.1.9' + spec.authors = ['Genki Sugawara'] + spec.email = ['sgwr_dts@yahoo.co.jp'] + + spec.summary = %q{Faraday middleware for AWS Signature Version 4.} + spec.description = %q{Faraday middleware for AWS Signature Version 4.} + spec.homepage = 'https://github.com/winebarrel/faraday_middleware-aws-signers-v4' + spec.license = 'MIT' + + spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } + spec.bindir = 'exe' + spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } + spec.require_paths = ['lib'] + + spec.add_dependency 'faraday', '~> 0.9' + spec.add_dependency 'aws-sdk-resources', '>= 2', '< 3' + + spec.add_development_dependency 'bundler' + spec.add_development_dependency 'rake' + spec.add_development_dependency 'rspec', '>= 3.0.0' + spec.add_development_dependency 'timecop' + spec.add_development_dependency 'faraday_middleware' + spec.add_development_dependency 'coveralls' + spec.add_development_dependency 'webmock' +end diff --git a/faraday-middleware-aws-signers-v4/lib/faraday_middleware-aws-signers-v4.rb b/faraday-middleware-aws-signers-v4/lib/faraday_middleware-aws-signers-v4.rb new file mode 100644 index 0000000000..d10a391ad2 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/lib/faraday_middleware-aws-signers-v4.rb @@ -0,0 +1 @@ +require 'faraday_middleware/aws_signers_v4' diff --git a/faraday-middleware-aws-signers-v4/lib/faraday_middleware/aws_signers_v4.rb b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/aws_signers_v4.rb new file mode 100644 index 0000000000..a1c0dd4499 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/aws_signers_v4.rb @@ -0,0 +1,8 @@ +require 'aws-sdk-resources' +require 'faraday' + +module FaradayMiddleware + autoload :AwsSignersV4, 'faraday_middleware/request/aws_signers_v4' + + Faraday::Request.register_middleware :aws_signers_v4 => lambda { AwsSignersV4 } +end diff --git a/faraday-middleware-aws-signers-v4/lib/faraday_middleware/aws_signers_v4_ext.rb b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/aws_signers_v4_ext.rb new file mode 100644 index 0000000000..a16f470502 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/aws_signers_v4_ext.rb @@ -0,0 +1,13 @@ +require 'aws-sdk-core/signers/v4' + +module AwsSignersV4Ext + def signed_headers(request) + super.downcase + end +end + +class Aws::Signers::V4 + unless Aws::Signers::V4 < AwsSignersV4Ext + prepend AwsSignersV4Ext + end +end diff --git a/faraday-middleware-aws-signers-v4/lib/faraday_middleware/ext/uri_ext.rb b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/ext/uri_ext.rb new file mode 100644 index 0000000000..147864931b --- /dev/null +++ b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/ext/uri_ext.rb @@ -0,0 +1,25 @@ +require 'uri' +require 'aws-sdk-resources' + +module URI + def self.seahorse_encode_www_form(params) + params.map {|key, value| + encoded_key = encode_www_form_component(key) + + if value.nil? + encoded_key + elsif value.respond_to?(:to_ary) + value.to_ary.map {|v| + if v.nil? + # bug? + #encoded_key + else + encoded_key + '=' + Seahorse::Util.uri_escape(v) + end + }.join('&') + else + encoded_key + '=' + Seahorse::Util.uri_escape(value) + end + }.join('&') + end +end diff --git a/faraday-middleware-aws-signers-v4/lib/faraday_middleware/request/aws_signers_v4.rb b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/request/aws_signers_v4.rb new file mode 100644 index 0000000000..d80d03c126 --- /dev/null +++ b/faraday-middleware-aws-signers-v4/lib/faraday_middleware/request/aws_signers_v4.rb @@ -0,0 +1,57 @@ +require 'faraday_middleware/ext/uri_ext' +require 'faraday_middleware/aws_signers_v4_ext' + +class FaradayMiddleware::AwsSignersV4 < Faraday::Middleware + class Request + def initialize(env) + @env = env + end + + def headers + @env.request_headers + end + + def body + @env.body || '' + end + + def endpoint + url = @env.url.dup + + # Escape the query string or the request won't sign correctly + if url and url.query + re_escape_query!(url) + end + + url + end + + def http_method + @env.method.to_s.upcase + end + + private + + def re_escape_query!(url) + params = URI.decode_www_form(url.query) + + if params.any? {|k, v| v =~ / / } + url.query = URI.seahorse_encode_www_form(params) + end + end + end # of class Request + + def initialize(app, options = nil) + super(app) + + @credentials = options.fetch(:credentials) + @service_name = options.fetch(:service_name) + @region = options.fetch(:region) + end + + def call(env) + req = Request.new(env) + Aws::Signers::V4.new(@credentials, @service_name, @region).sign(req) + @app.call(env) + end +end diff --git a/gitlab-peek/.gitignore b/gitlab-peek/.gitignore new file mode 100644 index 0000000000..b40785bb9f --- /dev/null +++ b/gitlab-peek/.gitignore @@ -0,0 +1,19 @@ +*.gem +*.rbc +.bundle +.config +.yardoc +Gemfile.lock +InstalledFiles +_yardoc +coverage +doc/ +lib/bundler/man +pkg +rdoc +spec/reports +test/dummy/log +test/tmp +test/version_tmp +tmp +/bin diff --git a/gitlab-peek/.travis.yml b/gitlab-peek/.travis.yml new file mode 100644 index 0000000000..e199796b03 --- /dev/null +++ b/gitlab-peek/.travis.yml @@ -0,0 +1,12 @@ +language: ruby + +rvm: + - 2.3.1 + - 2.2.5 + +gemfile: + - Gemfile # Rails 5 + - Gemfile-rails42 + +notifications: + email: false diff --git a/gitlab-peek/CHANGELOG.md b/gitlab-peek/CHANGELOG.md new file mode 100644 index 0000000000..f7c9f935b4 --- /dev/null +++ b/gitlab-peek/CHANGELOG.md @@ -0,0 +1,96 @@ +# 0.0.1 + +- Initial release. + +# 0.0.2 + +- Add own tipsy plugin to allow for tooltips. + +# 0.0.3 + +- Change the scope of the .tipsy selector as it's inserted outside of the Glimpse div. + +# 0.0.4 + +- Don't capture ` being pressed when in combination with `cmd` +- Support for [Turbolinks](https://github.com/rails/turbolinks) (#14) + +# 0.0.5 + +- Namespace the tooltips to the `.glimpse-tooltip` class name to not conflict with any application styles for `.tooltip`. (#18) + +# 0.0.6 + +- Added Peek::Views::View#parse_options that gets called within initialize for subclasses to use to parse their options. + +# 0.1.0 + +- Introduced a new JS event `peek:render` that includes the request id and request payload data that is used to update the information in the bar. +- Request information has moved from the `peek/results` partial to an AJAX request that happens on page load, and when PJAX/Turbolinks change pages. +- Removed the need for `peek/results` partial. +- Introduced a Redis and Memcache adapter for multi-server environments to store request payloads. +- Tooltips automatically repositions depending on where the Peek bar is. + +# 0.1.1 + +- Fix bug with how `peek:render` was passing arguments around. + +# 0.1.2 + +- Fix path to memcache adapter - [#34](https://github.com/peek/peek/pull/34) [@grk](https://github.com/grk) +- Prevent namespace collision when using [peek-dalli](https://github.com/peek/peek-dalli) - [#34](https://github.com/peek/peek/pull/34) [@grk](https://github.com/grk) + +# 0.1.3 + +- Remove Redis dependency from Gemfile + +# 0.1.4 + +- Don't access xhr object when not present in pjax:end + +# 0.1.5 + +- Don't trigger `peek:update` event when the peek bar isn't present - [#37](https://github.com/peek/peek/issues/37) [@dewski](https://github.com/dewski) +- Add `after_request` helper method for Peek::Views::View to help reset state + +# 0.1.6 + +- Use `event.which` for normalization between `event.keyCode` and `event.charCode` - [#38](https://github.com/peek/peek/pull/38) [@leongersing](https://github.com/leongersing) + +# 0.1.7 + +- Support all Rails 3.x.x versions by not using `request.uuid` instead `env` - [#39](https://github.com/peek/peek/pull/39) [@bryanmikaelian](https://github.com/bryanmikaelian) + +# 0.1.8 + +- Include the ControllerHelpers directly into `ActionController::Base` - [#41](https://github.com/peek/peek/pull/41) [@lucasmazza](https://github.com/lucasmazza) + +# 0.1.9 + +- Rescue & log Dalli exceptions instead of crashing - [#50](https://github.com/peek/peek/pull/50) [@barunio](https://github.com/barunio) + +# 0.1.10 + +- Take off Responders - [#65](https://github.com/peek/peek/pull/65) [@nwjsmith](https://github.com/nwjsmith) + +# 0.2.0 + +- Add image dependencies for asset compilation - [#67](https://github.com/peek/peek/pull/67) [@fedesoria](https://github.com/fedesoria) +- Update JSON to 1.8.2 for compatibility with Ruby 2.2.2 - [#73](https://github.com/peek/peek/pull/73) [@eitoball](https://github.com/eitoball) +- Use concurrent-ruby gem in favor of deprecated atomic gem. - [#73](https://github.com/peek/peek/pull/73) [@eitoball](https://github.com/eitoball) +- Prevent errors if compatibility mode is enabled for jQuery - [#74](https://github.com/peek/peek/pull/74) [@warseph](https://github.com/warseph) +- Add dependency for coffee-rails for Rails. - [#80](https://github.com/peek/peek/pull/80) [@cllns](https://github.com/cllns) +- Fix deprecation warnings for `helper_method` for future versions of Rails past Rails v5. - [#85](https://github.com/peek/peek/pull/85) [@mgrachev](https://github.com/mgrachev) +- Listen to Turbolinks v5 `turbolinks:load` JS event to trigger peek updates. - [#88](https://github.com/peek/peek/pull/88) [@lucasmazza](https://github.com/lucasmazza) + +# 1.0 + +- Rails 4+ compatibility + - Update `before_filter` to `before_action` @mkcode + - Replace GIFs with colors @tarebyte +- Remove CoffeeScript @dewski +- Use Ruby JSON syntax for hashes + +# Next + +- Ensure that Peek can handle concurrent requests with a multithreaded application server diff --git a/gitlab-peek/Gemfile b/gitlab-peek/Gemfile new file mode 100644 index 0000000000..bf6a492734 --- /dev/null +++ b/gitlab-peek/Gemfile @@ -0,0 +1,30 @@ +source 'https://rubygems.org' + +# Specify your gem's dependencies in peek.gemspec +gemspec + +gem 'rake' +gem 'json', '~> 2.0', '>= 2.0.2' + +# For the test application +gem 'rails', '~> 5.0', '>= 5.0.0.1' +gem 'concurrent-ruby', '>= 0.9.0' +gem 'concurrent-ruby-ext', '>= 0.9.0' + +# Use SCSS for stylesheets +gem 'sass-rails', '~> 5.0' + +# Use Uglifier as compressor for JavaScript assets +gem 'uglifier', '>= 1.3.0' + +# Use CoffeeScript for .js.coffee assets and views +gem 'coffee-rails', '~> 4.2' + +# See https://github.com/sstephenson/execjs#readme for more supported runtimes +# gem 'therubyracer', platforms: :ruby + +# Use jquery as the JavaScript library +gem 'jquery-rails' + +# Turbolinks makes following links in your web application faster. Read more: https://github.com/rails/turbolinks +gem 'turbolinks' diff --git a/gitlab-peek/Gemfile-rails42 b/gitlab-peek/Gemfile-rails42 new file mode 100644 index 0000000000..67eec6c9f9 --- /dev/null +++ b/gitlab-peek/Gemfile-rails42 @@ -0,0 +1,27 @@ +source 'https://rubygems.org' + +# Specify your gem's dependencies in peek.gemspec +gemspec + +gem 'rake' +gem 'json', '~> 1.8.3' + +gem 'rails', '~> 4.2.7.1' + +# Use SCSS for stylesheets +gem 'sass-rails', '~> 5.0.0' + +# Use Uglifier as compressor for JavaScript assets +gem 'uglifier', '>= 1.3.0' + +# Use CoffeeScript for .js.coffee assets and views +gem 'coffee-rails', '~> 4.1.0' + +# See https://github.com/sstephenson/execjs#readme for more supported runtimes +# gem 'therubyracer', platforms: :ruby + +# Use jquery as the JavaScript library +gem 'jquery-rails' + +# Turbolinks makes following links in your web application faster. Read more: https://github.com/rails/turbolinks +gem 'turbolinks' diff --git a/gitlab-peek/LICENSE.txt b/gitlab-peek/LICENSE.txt new file mode 100644 index 0000000000..c3a550c459 --- /dev/null +++ b/gitlab-peek/LICENSE.txt @@ -0,0 +1,22 @@ +Copyright (c) 2013 Garrett Bjerkhoel + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/gitlab-peek/README.md b/gitlab-peek/README.md new file mode 100644 index 0000000000..974107887d --- /dev/null +++ b/gitlab-peek/README.md @@ -0,0 +1,253 @@ +# Peek + +## Fork notice + +This is a fork of https://github.com/peek/peek to support multi-threaded +application servers, like Puma. The upstream pull request to add this is +https://github.com/peek/peek/pull/113. + +This repo is a pull mirror of https://github.com/smcgivern/peek, the +source repository for that PR. + +## Original README + +[![Build Status](https://travis-ci.org/peek/peek.svg?branch=master)](https://travis-ci.org/peek/peek) [![Gem Version](https://badge.fury.io/rb/peek.svg)](http://badge.fury.io/rb/peek) [![Inline docs](http://inch-ci.org/github/peek/peek.svg)](http://inch-ci.org/github/peek/peek) + +Take a peek into your Rails application. + +![Preview](https://f.cloud.github.com/assets/79995/244991/03cee1fa-8a74-11e2-8e33-283cf1298a60.png) + +This is a profiling tool originally built at GitHub to help us get an insight into our application. Now, we have extracted this into Peek, so that other Rails application can experience the same benefit. + +Peek puts a little bar on top of your application to show you all sorts of helpful information about your application. From the screenshot above, you can see that Peek provides information about database queries, cache, Resque workers and more. However, this is only part of Peek's beauty. + +The true beauty of Peek lies in the fact that it is an extensible platform. If there are some performance metrics that you need but are not available on Peek, you can find it from the list of available [Peek Views](#available-peek-views) and integrate it into Peek. Even if you do not find what you want on Peek Views, you can always [create your own](#creating-your-own-peek-item). + +## Installation + +Add this line to your application's Gemfile: + + gem 'peek' + +And then execute: + + $ bundle + +Or install it yourself as: + + $ gem install peek + +## Usage + +Now that Peek is installed, you'll need to mount the engine within your `config/routes.rb` +file: + +```ruby +Some::Application.routes.draw do + mount Peek::Railtie => '/peek' + root to: 'home#show' +end +``` + +To pick which views you want to see in your Peek bar, just create a file at +`config/initializers/peek.rb` that has a list of the views you'd like to include: + +```ruby +Peek.into Peek::Views::Git, nwo: 'github/janky' +Peek.into Peek::Views::Mysql2 +Peek.into Peek::Views::Redis +Peek.into Peek::Views::Dalli +``` + +Feel free to pick and install from the [list](https://github.com/peek/peek#available-peek-views) or create your own. The order they +are added to Peek, the order they will appear in your bar. + +Next, to render the Peek bar in your application just add the following snippet +just after the opening `` tag in your application layout. + +```erb +<%= render 'peek/bar' %> +``` + +It will look like: + +```erb + + + Application + + + <%= render 'peek/bar' %> + <%= yield %> + + +``` + +Peek fetches the data collected throughout your requests by using the unique request id +that was assigned to the request by Rails. It will call out to its own controller at +[Peek::ResultsController](https://github.com/peek/peek/blob/master/app/controllers/peek/results_controller.rb) which will render the data and be inserted into the bar. + +Now that you have the partials in your application, you will need to include the +CSS and JS that help make Peek :sparkles: + +In `app/assets/stylesheets/application.scss`: + +```scss +//= require peek +``` + +In `app/assets/javascripts/application.coffee`: + +```coffeescript +#= require jquery +#= require jquery_ujs +#= require peek +``` + +Note: Each additional view may have their own CSS and JS that you may need to require +which should be stated in their usage documentation. + +### Configuring the default adapter + +For Peek to work, it keeps track of all requests made in your application +so it can report back and display that information in the Peek bar. By default +it stores this information in memory, which is not recommended for production environments. + +In production environments you may have application servers on multiple hosts, +at which Peek will not be able to access the request data if it was saved in memory on +another host. Peek provides 2 additional adapters for multi server environments. + +You can configure which adapter Peek uses by updating your application +config or an individual environment config file. We'll use production as an example. + +Note: Peek does not provide the dependencies for each of these adapters. If you use these +adapters be sure to include their dependencies in your application. + +- Redis - The [redis](https://github.com/redis/redis-rb) gem +- Dalli - The [dalli](https://github.com/mperham/dalli) gem +- Elasticsearch - The [elasticsearch](https://github.com/elasticsearch/elasticsearch-ruby) gem + +```ruby +Peeked::Application.configure do + # ... + + # Redis with no options + config.peek.adapter = :redis + + # Redis with options + config.peek.adapter = :redis, { + client: Redis.new, + expires_in: 60 * 30 # => 30 minutes in seconds + } + + # Memcache with no options + config.peek.adapter = :memcache + + # Memcache with options + config.peek.adapter = :memcache, { + client: Dalli::Client.new, + expires_in: 60 * 30 # => 30 minutes in seconds + } + + # Elasticsearch with no options + config.peek.adapter = :elasticsearch + + # Elasticsearch with options + config.peek.adapter = :elasticsearch, { + client: Elasticsearch::Client.new, + expires_in: 60 * 30, # => 30 minutes in seconds + index: 'peek_requests_index', + type: 'peek_request' + } + + # ... +end +``` + +Peek doesn't persist the request data forever. It uses a safe 30 minute cache +length that way data will be available if you'd like to aggregate it or +use it for other Peek views. You can update this to be 30 seconds if you don't +want the data to be available to stick around. + +### Customizing the bar + +You can customize the appearance of the bar by customizing it in your own application's CSS. + +One common example is fixing the peek bar to the bottom, rather than top, of a page, for use with [Bootstrap](http://getbootstrap.com/): + +```css +#peek { + position: fixed; + bottom: 0; + left: 0; + right: 0; + z-index: 999; +} +``` + +## Using Peek with PJAX + +It just works. + +## Using Peek with Turbolinks + +It just works. + +## Access Control + +Peek will only render in development and staging environments. If you'd +like to whitelist a select number of users to view Peek in production you +can override the `peek_enabled?` guard in `ApplicationController`: + +```ruby +class ApplicationController < ActionController::Base + def peek_enabled? + current_user.staff? + end +end +``` + +## Available Peek views + +- [peek-active_resource](https://github.com/gotmayonase/peek-active_resource) +- [peek-alt-routes](https://github.com/mkcode/peek-alt-routes) +- [peek-dalli](https://github.com/peek/peek-dalli) +- [peek-delayed_job](https://github.com/18F/peek-delayed_job) +- [peek-faraday](https://github.com/grk/peek-faraday) +- [peek-flexirest](https://github.com/andyjeffries/peek-flexirest) +- [peek-gc](https://github.com/peek/peek-gc) +- [peek-git](https://github.com/peek/peek-git) +- [peek-host](https://github.com/jacobbednarz/peek-host) +- [peek-mongo](https://github.com/peek/peek-mongo) +- [peek-moped](https://github.com/nodkz/peek-moped) +- [peek-mysql2](https://github.com/peek/peek-mysql2) +- [peek-performance_bar](https://github.com/peek/peek-performance_bar) +- [peek-pg](https://github.com/peek/peek-pg) +- [peek-rblineprof](https://github.com/peek/peek-rblineprof) +- [peek-redis](https://github.com/peek/peek-redis) +- [peek-resque](https://github.com/peek/peek-resque) +- [peek-sidekiq](https://github.com/suranyami/peek-sidekiq) +- [peek-svn](https://github.com/neilco/peek-svn) +- Unicorn :soon: + +Feel free to submit a Pull Request adding your own Peek item to this list. + +## Creating your own Peek item + +Each Peek item is a self contained Rails engine which gives you the power to +use all features of Ruby on Rails to dig in deep within your application and +report it back to the Peek bar. A Peek item is just a custom class that +is responsible for fetching and building the data that should be reported back +to the user. + +There are still some docs to be written, but if you'd like to checkout a simple +example of how to create your own, just checkout [peek-git](https://github.com/peek/peek-git). +To just look at an example view, there is [Peek::Views::Git](https://github.com/peek/peek-git/blob/master/lib/peek/views/git.rb). + +## Contributing + +1. Fork it +2. Create your feature branch (`git checkout -b my-new-feature`) +3. Commit your changes (`git commit -am 'Add some feature'`) +4. Push to the branch (`git push origin my-new-feature`) +5. Create new Pull Request diff --git a/gitlab-peek/Rakefile b/gitlab-peek/Rakefile new file mode 100644 index 0000000000..d3a4ffe76c --- /dev/null +++ b/gitlab-peek/Rakefile @@ -0,0 +1,13 @@ +require 'bundler/gem_tasks' +require 'rake/testtask' + +desc 'Default: run tests' +task default: :test + +desc 'Run Peek tests.' +Rake::TestTask.new do |t| + t.libs << 'lib' + t.libs << 'test' + t.test_files = FileList['test/**/*_test.rb'] + t.verbose = true +end diff --git a/gitlab-peek/app/assets/javascripts/peek.js b/gitlab-peek/app/assets/javascripts/peek.js new file mode 100644 index 0000000000..3f3ae4c2c8 --- /dev/null +++ b/gitlab-peek/app/assets/javascripts/peek.js @@ -0,0 +1,86 @@ +//= require peek/vendor/jquery.tipsy + +var requestId; + +requestId = null; + +(function($) { + var fetchRequestResults, getRequestId, initializeTipsy, peekEnabled, toggleBar, updatePerformanceBar; + getRequestId = function() { + if (requestId != null) { + return requestId; + } else { + return $('#peek').data('request-id'); + } + }; + peekEnabled = function() { + return $('#peek').length; + }; + updatePerformanceBar = function(results) { + var key, label; + for (key in results.data) { + for (label in results.data[key]) { + $("[data-defer-to=" + key + "-" + label + "]").text(results.data[key][label]); + } + } + return $(document).trigger('peek:render', [getRequestId(), results]); + }; + initializeTipsy = function() { + return $('#peek .peek-tooltip, #peek .tooltip').each(function() { + var el, gravity; + el = $(this); + gravity = el.hasClass('rightwards') || el.hasClass('leftwards') ? $.fn.tipsy.autoWE : $.fn.tipsy.autoNS; + return el.tipsy({ + gravity: gravity + }); + }); + }; + toggleBar = function(event) { + var wrapper; + if ($(event.target).is(':input')) { + return; + } + if (event.which === 96 && !event.metaKey) { + wrapper = $('#peek'); + if (wrapper.hasClass('disabled')) { + wrapper.removeClass('disabled'); + return document.cookie = "peek=true; path=/"; + } else { + wrapper.addClass('disabled'); + return document.cookie = "peek=false; path=/"; + } + } + }; + fetchRequestResults = function() { + return $.ajax('/peek/results', { + data: { + request_id: getRequestId() + }, + success: function(data, textStatus, xhr) { + return updatePerformanceBar(data); + }, + error: function(xhr, textStatus, error) {} + }); + }; + $(document).on('keypress', toggleBar); + $(document).on('peek:update', initializeTipsy); + $(document).on('peek:update', fetchRequestResults); + $(document).on('pjax:end', function(event, xhr, options) { + if (xhr != null) { + requestId = xhr.getResponseHeader('X-Request-Id'); + } + if (peekEnabled()) { + return $(this).trigger('peek:update'); + } + }); + $(document).on('page:change turbolinks:load', function() { + if (peekEnabled()) { + return $(this).trigger('peek:update'); + } + }); + return $(function() { + if (peekEnabled()) { + return $(this).trigger('peek:update'); + } + }); +})(jQuery); diff --git a/gitlab-peek/app/assets/javascripts/peek/vendor/jquery.tipsy.js b/gitlab-peek/app/assets/javascripts/peek/vendor/jquery.tipsy.js new file mode 100644 index 0000000000..db5db4755c --- /dev/null +++ b/gitlab-peek/app/assets/javascripts/peek/vendor/jquery.tipsy.js @@ -0,0 +1,258 @@ +// tipsy, facebook style tooltips for jquery +// version 1.0.0a +// (c) 2008-2010 jason frame [jason@onehackoranother.com] +// released under the MIT license + +(function($) { + + function maybeCall(thing, ctx) { + return (typeof thing == 'function') ? (thing.call(ctx)) : thing; + }; + + function isElementInDOM(ele) { + while (ele = ele.parentNode) { + if (ele == document) return true; + } + return false; + }; + + function Tipsy(element, options) { + this.$element = $(element); + this.options = options; + this.enabled = true; + this.fixTitle(); + }; + + Tipsy.prototype = { + show: function() { + var title = this.getTitle(); + if (title && this.enabled) { + var $tip = this.tip(); + + $tip.find('.tipsy-inner')[this.options.html ? 'html' : 'text'](title); + $tip[0].className = 'tipsy'; // reset classname in case of dynamic gravity + $tip.remove().css({top: 0, left: 0, visibility: 'hidden', display: 'block'}).prependTo(document.body); + + var pos = $.extend({}, this.$element.offset(), { + width: this.$element[0].offsetWidth, + height: this.$element[0].offsetHeight + }); + + var actualWidth = $tip[0].offsetWidth, + actualHeight = $tip[0].offsetHeight, + gravity = maybeCall(this.options.gravity, this.$element[0]); + + var tp; + switch (gravity.charAt(0)) { + case 'n': + tp = {top: pos.top + pos.height + this.options.offset, left: pos.left + pos.width / 2 - actualWidth / 2}; + break; + case 's': + tp = {top: pos.top - actualHeight - this.options.offset, left: pos.left + pos.width / 2 - actualWidth / 2}; + break; + case 'e': + tp = {top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth - this.options.offset}; + break; + case 'w': + tp = {top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width + this.options.offset}; + break; + } + + if (gravity.length == 2) { + if (gravity.charAt(1) == 'w') { + tp.left = pos.left + pos.width / 2 - 15; + } else { + tp.left = pos.left + pos.width / 2 - actualWidth + 15; + } + } + + $tip.css(tp).addClass('tipsy-' + gravity); + $tip.find('.tipsy-arrow')[0].className = 'tipsy-arrow tipsy-arrow-' + gravity.charAt(0); + if (this.options.className) { + $tip.addClass(maybeCall(this.options.className, this.$element[0])); + } + + if (this.options.fade) { + $tip.stop().css({opacity: 0, display: 'block', visibility: 'visible'}).animate({opacity: this.options.opacity}); + } else { + $tip.css({visibility: 'visible', opacity: this.options.opacity}); + } + } + }, + + hide: function() { + if (this.options.fade) { + this.tip().stop().fadeOut(function() { $(this).remove(); }); + } else { + this.tip().remove(); + } + }, + + fixTitle: function() { + var $e = this.$element; + if ($e.attr('title') || typeof($e.attr('original-title')) != 'string') { + $e.attr('original-title', $e.attr('title') || '').removeAttr('title'); + } + }, + + getTitle: function() { + var title, $e = this.$element, o = this.options; + this.fixTitle(); + var title, o = this.options; + if (typeof o.title == 'string') { + title = $e.attr(o.title == 'title' ? 'original-title' : o.title); + } else if (typeof o.title == 'function') { + title = o.title.call($e[0]); + } + title = ('' + title).replace(/(^\s*|\s*$)/, ""); + return title || o.fallback; + }, + + tip: function() { + if (!this.$tip) { + this.$tip = $('
      ').html('
      '); + this.$tip.data('tipsy-pointee', this.$element[0]); + } + return this.$tip; + }, + + validate: function() { + if (!this.$element[0].parentNode) { + this.hide(); + this.$element = null; + this.options = null; + } + }, + + enable: function() { this.enabled = true; }, + disable: function() { this.enabled = false; }, + toggleEnabled: function() { this.enabled = !this.enabled; } + }; + + $.fn.tipsy = function(options) { + + if (options === true) { + return this.data('tipsy'); + } else if (typeof options == 'string') { + var tipsy = this.data('tipsy'); + if (tipsy) tipsy[options](); + return this; + } + + options = $.extend({}, $.fn.tipsy.defaults, options); + + function get(ele) { + var tipsy = $.data(ele, 'tipsy'); + if (!tipsy) { + tipsy = new Tipsy(ele, $.fn.tipsy.elementOptions(ele, options)); + $.data(ele, 'tipsy', tipsy); + } + return tipsy; + } + + function enter() { + var tipsy = get(this); + tipsy.hoverState = 'in'; + if (options.delayIn == 0) { + tipsy.show(); + } else { + tipsy.fixTitle(); + setTimeout(function() { if (tipsy.hoverState == 'in') tipsy.show(); }, options.delayIn); + } + }; + + function leave() { + var tipsy = get(this); + tipsy.hoverState = 'out'; + if (options.delayOut == 0) { + tipsy.hide(); + } else { + setTimeout(function() { if (tipsy.hoverState == 'out') tipsy.hide(); }, options.delayOut); + } + }; + + if (!options.live) this.each(function() { get(this); }); + + if (options.trigger != 'manual') { + var binder = options.live ? 'live' : 'bind', + eventIn = options.trigger == 'hover' ? 'mouseenter' : 'focus', + eventOut = options.trigger == 'hover' ? 'mouseleave' : 'blur'; + this[binder](eventIn, enter)[binder](eventOut, leave); + } + + return this; + + }; + + $.fn.tipsy.defaults = { + className: null, + delayIn: 0, + delayOut: 0, + fade: false, + fallback: '', + gravity: 'n', + html: false, + live: false, + offset: 0, + opacity: 0.8, + title: 'title', + trigger: 'hover' + }; + + $.fn.tipsy.revalidate = function() { + $('.tipsy').each(function() { + var pointee = $.data(this, 'tipsy-pointee'); + if (!pointee || !isElementInDOM(pointee)) { + $(this).remove(); + } + }); + }; + + // Overwrite this method to provide options on a per-element basis. + // For example, you could store the gravity in a 'tipsy-gravity' attribute: + // return $.extend({}, options, {gravity: $(ele).attr('tipsy-gravity') || 'n' }); + // (remember - do not modify 'options' in place!) + $.fn.tipsy.elementOptions = function(ele, options) { + return $.metadata ? $.extend({}, options, $(ele).metadata()) : options; + }; + + $.fn.tipsy.autoNS = function() { + return $(this).offset().top > ($(document).scrollTop() + $(window).height() / 2) ? 's' : 'n'; + }; + + $.fn.tipsy.autoWE = function() { + return $(this).offset().left > ($(document).scrollLeft() + $(window).width() / 2) ? 'e' : 'w'; + }; + + /** + * yields a closure of the supplied parameters, producing a function that takes + * no arguments and is suitable for use as an autogravity function like so: + * + * @param margin (int) - distance from the viewable region edge that an + * element should be before setting its tooltip's gravity to be away + * from that edge. + * @param prefer (string, e.g. 'n', 'sw', 'w') - the direction to prefer + * if there are no viewable region edges effecting the tooltip's + * gravity. It will try to vary from this minimally, for example, + * if 'sw' is preferred and an element is near the right viewable + * region edge, but not the top edge, it will set the gravity for + * that element's tooltip to be 'se', preserving the southern + * component. + */ + $.fn.tipsy.autoBounds = function(margin, prefer) { + return function() { + var dir = {ns: prefer[0], ew: (prefer.length > 1 ? prefer[1] : false)}, + boundTop = $(document).scrollTop() + margin, + boundLeft = $(document).scrollLeft() + margin, + $this = $(this); + + if ($this.offset().top < boundTop) dir.ns = 'n'; + if ($this.offset().left < boundLeft) dir.ew = 'w'; + if ($(window).width() + $(document).scrollLeft() - $this.offset().left < margin) dir.ew = 'e'; + if ($(window).height() + $(document).scrollTop() - $this.offset().top < margin) dir.ns = 's'; + + return dir.ns + (dir.ew ? dir.ew : ''); + } + }; + +})(jQuery); diff --git a/gitlab-peek/app/assets/stylesheets/peek.scss b/gitlab-peek/app/assets/stylesheets/peek.scss new file mode 100644 index 0000000000..5e0195b6b3 --- /dev/null +++ b/gitlab-peek/app/assets/stylesheets/peek.scss @@ -0,0 +1,85 @@ +//= require peek/vendor/tipsy + +#peek { + background: #000; + height: 35px; + line-height: 35px; + color: #999; + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); + + .hidden { + display: none; + visibility: visible; + } + + &.disabled { + display: none; + } + + &.production { + background-color: #222; + } + + &.staging { + background-color: #291430; + } + + &.development { + background-color: #4c1210; + } + + .wrapper { + width: 800px; + margin: 0 auto; + } + + // UI Elements + .bucket { + background: #111; + display: inline-block; + padding: 4px 6px; + font-family: Consolas, "Liberation Mono", Courier, monospace; + line-height: 1; + color: #ccc; + border-radius: 3px; + box-shadow: 0 1px 0 rgba(255,255,255,.2), inset 0 1px 2px rgba(0,0,0,.25); + + .hidden { + display: none; + } + + &:hover .hidden { + display: inline; + } + } + + strong { + color: #fff; + } + + .view { + margin-right: 15px; + float: left; + + &:last-child { + margin-right: 0; + } + } + + .css-truncate { + &.css-truncate-target, + .css-truncate-target { + display: inline-block; + max-width: 125px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + vertical-align: top; + } + + &.expandable:hover .css-truncate-target, + &.expandable:hover.css-truncate-target { + max-width: 10000px !important; + } + } +} diff --git a/gitlab-peek/app/assets/stylesheets/peek/vendor/tipsy.scss b/gitlab-peek/app/assets/stylesheets/peek/vendor/tipsy.scss new file mode 100644 index 0000000000..7582034001 --- /dev/null +++ b/gitlab-peek/app/assets/stylesheets/peek/vendor/tipsy.scss @@ -0,0 +1,22 @@ +.tipsy { font-size: 10px; position: absolute; padding: 5px; z-index: 100000; } + .tipsy-inner { background-color: #000; color: #FFF; max-width: 200px; padding: 5px 8px 4px 8px; text-align: center; } + + /* Rounded corners */ + .tipsy-inner { border-radius: 3px; -moz-border-radius: 3px; -webkit-border-radius: 3px; } + + .tipsy-arrow { position: absolute; width: 0; height: 0; line-height: 0; border: 5px dashed #000; } + + /* Rules to colour arrows */ + .tipsy-arrow-n { border-bottom-color: #000; } + .tipsy-arrow-s { border-top-color: #000; } + .tipsy-arrow-e { border-left-color: #000; } + .tipsy-arrow-w { border-right-color: #000; } + + .tipsy-n .tipsy-arrow { top: 0px; left: 50%; margin-left: -5px; border-bottom-style: solid; border-top: none; border-left-color: transparent; border-right-color: transparent; } + .tipsy-nw .tipsy-arrow { top: 0; left: 10px; border-bottom-style: solid; border-top: none; border-left-color: transparent; border-right-color: transparent;} + .tipsy-ne .tipsy-arrow { top: 0; right: 10px; border-bottom-style: solid; border-top: none; border-left-color: transparent; border-right-color: transparent;} + .tipsy-s .tipsy-arrow { bottom: 0; left: 50%; margin-left: -5px; border-top-style: solid; border-bottom: none; border-left-color: transparent; border-right-color: transparent; } + .tipsy-sw .tipsy-arrow { bottom: 0; left: 10px; border-top-style: solid; border-bottom: none; border-left-color: transparent; border-right-color: transparent; } + .tipsy-se .tipsy-arrow { bottom: 0; right: 10px; border-top-style: solid; border-bottom: none; border-left-color: transparent; border-right-color: transparent; } + .tipsy-e .tipsy-arrow { right: 0; top: 50%; margin-top: -5px; border-left-style: solid; border-right: none; border-top-color: transparent; border-bottom-color: transparent; } + .tipsy-w .tipsy-arrow { left: 0; top: 50%; margin-top: -5px; border-right-style: solid; border-left: none; border-top-color: transparent; border-bottom-color: transparent; } diff --git a/gitlab-peek/app/controllers/peek/results_controller.rb b/gitlab-peek/app/controllers/peek/results_controller.rb new file mode 100644 index 0000000000..7554d6796f --- /dev/null +++ b/gitlab-peek/app/controllers/peek/results_controller.rb @@ -0,0 +1,25 @@ +module Peek + class ResultsController < ApplicationController + before_action :restrict_non_access + + def show + respond_to do |format| + format.json do + if request.xhr? + render json: Peek.adapter.get(params[:request_id]) + else + render nothing: true, status: :not_found + end + end + end + end + + private + + def restrict_non_access + unless peek_enabled? + raise ActionController::RoutingError.new('Not Found') + end + end + end +end diff --git a/gitlab-peek/app/views/peek/_bar.html.erb b/gitlab-peek/app/views/peek/_bar.html.erb new file mode 100644 index 0000000000..47f8d13a58 --- /dev/null +++ b/gitlab-peek/app/views/peek/_bar.html.erb @@ -0,0 +1,11 @@ +<% if peek_enabled? %> +
      +
      + <% Peek.views.each do |view| %> +
      + <%= render view.partial_path, view: view %> +
      + <% end %> +
      +
      +<% end %> diff --git a/gitlab-peek/config/routes.rb b/gitlab-peek/config/routes.rb new file mode 100644 index 0000000000..424fc3b9f2 --- /dev/null +++ b/gitlab-peek/config/routes.rb @@ -0,0 +1,3 @@ +Peek::Railtie.routes.draw do + get '/results' => 'results#show', as: :results +end diff --git a/gitlab-peek/gitlab-peek.gemspec b/gitlab-peek/gitlab-peek.gemspec new file mode 100644 index 0000000000..55db44eb12 --- /dev/null +++ b/gitlab-peek/gitlab-peek.gemspec @@ -0,0 +1,22 @@ +# -*- encoding: utf-8 -*- +lib = File.expand_path('../lib', __FILE__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) +require 'peek/version' + +Gem::Specification.new do |gem| + gem.name = 'gitlab-peek' + gem.version = Peek::VERSION + gem.authors = ['Garrett Bjerkhoel'] + gem.email = ['me@garrettbjerkhoel.com'] + gem.description = %q{Take a peek into your Rails application.} + gem.summary = %q{Take a peek into your Rails application.} + gem.homepage = 'https://github.com/peek/peek' + gem.license = 'MIT' + + gem.files = `git ls-files`.split($/) + gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) } + gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) + gem.require_paths = ['lib'] + + gem.add_dependency 'railties', '>= 4.0.0' +end diff --git a/gitlab-peek/lib/peek.rb b/gitlab-peek/lib/peek.rb new file mode 100644 index 0000000000..87006d55b2 --- /dev/null +++ b/gitlab-peek/lib/peek.rb @@ -0,0 +1,93 @@ +require 'peek/version' +require 'rails' + +require 'peek/adapters/memory' +require 'peek/views/view' + +module Peek + ALLOWED_ENVS = ['development', 'staging'].freeze + + def self.adapter + @adapter + end + + def self.adapter=(*adapter_options) + adapter, *parameters = *Array.wrap(adapter_options).flatten + + @adapter = case adapter + when Symbol + adapter_class_name = adapter.to_s.camelize + adapter_class = + begin + require "peek/adapters/#{adapter}" + rescue LoadError => e + raise "Could not find adapter for #{adapter} (#{e})" + else + Peek::Adapters.const_get(adapter_class_name) + end + adapter_class.new(*parameters) + when nil + Peek::Adapters::Memory.new + else + adapter + end + + @adapter + end + + def self.enabled? + ALLOWED_ENVS.include?(env) + end + + def self.env + Rails.env + end + + def self.views + @cached_views ||= if @views && @views.any? + @views.collect { |klass, options| klass.new(options.dup) }.select(&:enabled?) + else + [] + end + end + + def self.results + results = { + context: {}, + data: Hash.new { |h, k| h[k] = {} } + } + + views.each do |view| + if view.context? + results[:context][view.key] = view.context + end + + view.results.each do |key, value| + results[:data][view.key][key] = value + end + end + + results + end + + def self.into(klass, options = {}) + @views ||= [] + @views << [klass, options] + end + + # Clears out any and all views. + # + # Returns nothing. + def self.reset + @views = nil + @cached_views = nil + end + + def self.setup + ActiveSupport::Deprecation.warn "'Peek.setup' is deprecated and does nothing.", caller + end +end + +require 'peek/railtie' + +ActiveSupport.run_load_hooks(:peek, Peek) diff --git a/gitlab-peek/lib/peek/adapters/base.rb b/gitlab-peek/lib/peek/adapters/base.rb new file mode 100644 index 0000000000..069b8e7dfc --- /dev/null +++ b/gitlab-peek/lib/peek/adapters/base.rb @@ -0,0 +1,17 @@ +module Peek + module Adapters + class Base + def initialize(options = {}) + + end + + def get(request_id) + raise "#{self.class}#get(request_id) is not yet implemented" + end + + def save + raise "#{self.class}#save is not yet implemented" + end + end + end +end diff --git a/gitlab-peek/lib/peek/adapters/elasticsearch.rb b/gitlab-peek/lib/peek/adapters/elasticsearch.rb new file mode 100644 index 0000000000..72a28de96d --- /dev/null +++ b/gitlab-peek/lib/peek/adapters/elasticsearch.rb @@ -0,0 +1,33 @@ +require 'peek/adapters/base' +require 'elasticsearch' + +module Peek + module Adapters + class Elasticsearch < Base + def initialize(options = {}) + @client = options.fetch(:client, ::Elasticsearch::Client.new) + @expires_in = Integer(options.fetch(:expires_in, 60 * 30) * 1000) + @index = options.fetch(:index, 'peek_requests_index') + @type = options.fetch(:type, 'peek_request') + end + + def get(request_id) + result = @client.get_source index: @index, type: @type, id: "#{request_id}" + result.to_json + rescue ::Elasticsearch::Transport::Transport::Errors::NotFound + # pass + end + + def save(request_id) + @client.index index: @index, + type: @type, + id: "#{request_id}", + body: Peek.results.to_json, + ttl: @expires_in + rescue ::Elasticsearch::Transport::Transport::Errors::BadRequest + false + end + + end + end +end diff --git a/gitlab-peek/lib/peek/adapters/memcache.rb b/gitlab-peek/lib/peek/adapters/memcache.rb new file mode 100644 index 0000000000..ea7a1a9bf3 --- /dev/null +++ b/gitlab-peek/lib/peek/adapters/memcache.rb @@ -0,0 +1,25 @@ +require 'peek/adapters/base' +require 'dalli' + +module Peek + module Adapters + class Memcache < Base + def initialize(options = {}) + @client = options.fetch(:client, ::Dalli::Client.new) + @expires_in = options.fetch(:expires_in, 60 * 30) + end + + def get(request_id) + @client.get("peek:requests:#{request_id}") + rescue ::Dalli::DalliError => e + Rails.logger.error "#{e.class.name}: #{e.message}" + end + + def save(request_id) + @client.add("peek:requests:#{request_id}", Peek.results.to_json, @expires_in) + rescue ::Dalli::DalliError => e + Rails.logger.error "#{e.class.name}: #{e.message}" + end + end + end +end diff --git a/gitlab-peek/lib/peek/adapters/memory.rb b/gitlab-peek/lib/peek/adapters/memory.rb new file mode 100644 index 0000000000..1114323e2e --- /dev/null +++ b/gitlab-peek/lib/peek/adapters/memory.rb @@ -0,0 +1,25 @@ +require 'peek/adapters/base' + +module Peek + module Adapters + class Memory < Base + attr_accessor :requests + + def initialize(options = {}) + @requests = {} + end + + def get(request_id) + @requests[request_id] + end + + def save(request_id) + @requests[request_id] = Peek.results + end + + def reset + @requests.clear + end + end + end +end diff --git a/gitlab-peek/lib/peek/adapters/redis.rb b/gitlab-peek/lib/peek/adapters/redis.rb new file mode 100644 index 0000000000..45e82cac79 --- /dev/null +++ b/gitlab-peek/lib/peek/adapters/redis.rb @@ -0,0 +1,21 @@ +require 'peek/adapters/base' +require 'redis' + +module Peek + module Adapters + class Redis < Base + def initialize(options = {}) + @client = options.fetch(:client, ::Redis.new) + @expires_in = Integer(options.fetch(:expires_in, 60 * 30)) + end + + def get(request_id) + @client.get("peek:requests:#{request_id}") + end + + def save(request_id) + @client.setex("peek:requests:#{request_id}", @expires_in, Peek.results.to_json) + end + end + end +end diff --git a/gitlab-peek/lib/peek/controller_helpers.rb b/gitlab-peek/lib/peek/controller_helpers.rb new file mode 100644 index 0000000000..4124ae4a48 --- /dev/null +++ b/gitlab-peek/lib/peek/controller_helpers.rb @@ -0,0 +1,22 @@ +module Peek + module ControllerHelpers + extend ActiveSupport::Concern + + included do + if respond_to? :helper_method + helper_method :peek_enabled? + helper_method :peek_request_id + end + end + + protected + + def peek_enabled? + Peek.enabled? + end + + def peek_request_id + request.env['action_dispatch.request_id'] + end + end +end diff --git a/gitlab-peek/lib/peek/railtie.rb b/gitlab-peek/lib/peek/railtie.rb new file mode 100644 index 0000000000..7740eb44d9 --- /dev/null +++ b/gitlab-peek/lib/peek/railtie.rb @@ -0,0 +1,37 @@ +require 'peek/controller_helpers' + +module Peek + class Railtie < ::Rails::Engine + isolate_namespace Peek + engine_name :peek + + config.peek = ActiveSupport::OrderedOptions.new + + # Default adapter + config.peek.adapter = :memory + + initializer 'peek.set_configs' do |app| + ActiveSupport.on_load(:peek) do + app.config.peek.each do |k,v| + send "#{k}=", v + end + end + end + + initializer 'peek.persist_request_data' do + ActiveSupport::Notifications.subscribe('process_action.action_controller') do |_name, _start, _finish, _id, payload| + Peek.adapter.save(payload[:headers].env['action_dispatch.request_id']) + end + end + + initializer 'peek.include_controller_helpers' do + ActiveSupport.on_load(:action_controller) do + include Peek::ControllerHelpers + end + + config.to_prepare do + Peek.views + end + end + end +end diff --git a/gitlab-peek/lib/peek/version.rb b/gitlab-peek/lib/peek/version.rb new file mode 100644 index 0000000000..3d643ebd5a --- /dev/null +++ b/gitlab-peek/lib/peek/version.rb @@ -0,0 +1,3 @@ +module Peek + VERSION = '0.0.1' +end diff --git a/gitlab-peek/lib/peek/views/view.rb b/gitlab-peek/lib/peek/views/view.rb new file mode 100644 index 0000000000..be620e6f94 --- /dev/null +++ b/gitlab-peek/lib/peek/views/view.rb @@ -0,0 +1,124 @@ +module Peek + module Views + class View + def initialize(options = {}) + @options = options + + parse_options + setup_subscribers + end + + # Where any subclasses should pick and pull from @options to set any and + # all instance variables they like. + # + # Returns nothing. + def parse_options + # pass + end + + # Conditionally enable views based on any gathered data. Helpful + # if you don't want views to show up when they return 0 or are + # touched during the request. + # + # Returns true. + def enabled? + true + end + + # The path to the partial that will be rendered to the Peek bar. + # + # Examples: + # + # Peek::Views::PerformanceBar.partial_path => "peek/views/performance_bar" + # CustomResque.partial_path => "performance_bar" + # + # Returns String. + def partial_path + self.class.to_s.underscore + end + + # The defer key that is derived from the classname. + # + # Examples: + # + # Peek::Views::PerformanceBar => "performance-bar" + # Peek::Views::Resque => "resque" + # + # Returns String. + def key + self.class.to_s.split('::').last.underscore.gsub(/\_/, '-') + end + alias defer_key key + + # The context id that is derived from the classname. + # + # Examples: + # + # Peek::Views::PerformanceBar => "peek-context-performance-bar" + # Peek::Views::Resque => "peek-context-resque" + # + # Returns String. + def context_id + "peek-context-#{key}" + end + + # The wrapper ID for the individual view in the Peek bar. + # + # Returns String. + def dom_id + "peek-view-#{key}" + end + + # Additional context for any view to render tooltips for. + # + # Returns Hash. + def context + {} + end + + def context? + context.any? + end + + # The data results that are inserted at the end of the request for use in + # deferred placeholders in the Peek the bar. + # + # Returns Hash. + def results + {} + end + + def results? + results.any? + end + + def subscribe(*args) + ActiveSupport::Notifications.subscribe(*args) do |name, start, finish, id, payload| + yield name, start, finish, id, payload + end + end + + private + + def setup_subscribers + # pass + end + + # Helper method for subscribing to the event that is fired when new + # requests are made. + def before_request + subscribe 'start_processing.action_controller' do |name, start, finish, id, payload| + yield name, start, finish, id, payload + end + end + + # Helper method for subscribing to the event that is fired when requests + # are finished. + def after_request + subscribe 'process_action.action_controller' do |name, start, finish, id, payload| + yield name, start, finish, id, payload + end + end + end + end +end diff --git a/gitlab-peek/test/controllers/requests_test.rb b/gitlab-peek/test/controllers/requests_test.rb new file mode 100644 index 0000000000..4353da46f7 --- /dev/null +++ b/gitlab-peek/test/controllers/requests_test.rb @@ -0,0 +1,33 @@ +require 'test_helper' +require_relative '../dummy/lib/test_view' + +class RequestsTest < ActionDispatch::IntegrationTest + setup do + Peek.adapter.reset + Peek.reset + end + + test "the request id is set" do + assert_empty Peek.adapter.requests + + get '/' + + assert_not_empty Peek.adapter.requests + end + + test "the request ID and data are set correctly for concurrent requests" do + Peek.into TestView + concurrent_requests = 10 + + assert_empty Peek.adapter.requests + + concurrent_requests.times.map do + Thread.new { get '/' } + end.map(&:join) + + result_sequence = Peek.adapter.requests.values.map { |value| value[:data]['test-view'][:number] } + + assert_equal Peek.adapter.requests.length, concurrent_requests + assert_equal result_sequence, 1.upto(concurrent_requests).to_a + end +end diff --git a/gitlab-peek/test/dummy/README.rdoc b/gitlab-peek/test/dummy/README.rdoc new file mode 100644 index 0000000000..dd4e97e22e --- /dev/null +++ b/gitlab-peek/test/dummy/README.rdoc @@ -0,0 +1,28 @@ +== README + +This README would normally document whatever steps are necessary to get the +application up and running. + +Things you may want to cover: + +* Ruby version + +* System dependencies + +* Configuration + +* Database creation + +* Database initialization + +* How to run the test suite + +* Services (job queues, cache servers, search engines, etc.) + +* Deployment instructions + +* ... + + +Please feel free to use a different markup language if you do not plan to run +rake doc:app. diff --git a/gitlab-peek/test/dummy/Rakefile b/gitlab-peek/test/dummy/Rakefile new file mode 100644 index 0000000000..4135d7a46a --- /dev/null +++ b/gitlab-peek/test/dummy/Rakefile @@ -0,0 +1,6 @@ +# Add your own tasks in files placed in lib/tasks ending in .rake, +# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. + +require File.expand_path('../config/application', __FILE__) + +Dummy::Application.load_tasks diff --git a/gitlab-peek/test/dummy/app/assets/javascripts/application.js b/gitlab-peek/test/dummy/app/assets/javascripts/application.js new file mode 100644 index 0000000000..f3108ca642 --- /dev/null +++ b/gitlab-peek/test/dummy/app/assets/javascripts/application.js @@ -0,0 +1,16 @@ +// This is a manifest file that'll be compiled into application.js, which will include all the files +// listed below. +// +// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, vendor/assets/javascripts, +// or vendor/assets/javascripts of plugins, if any, can be referenced here using a relative path. +// +// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the +// compiled file. +// +// WARNING: THE FIRST BLANK LINE MARKS THE END OF WHAT'S TO BE PROCESSED, ANY BLANK LINE SHOULD +// GO AFTER THE REQUIRES BELOW. +// +//= require jquery +//= require jquery_ujs +//= require peek +//= require_tree . diff --git a/gitlab-peek/test/dummy/app/assets/stylesheets/application.css b/gitlab-peek/test/dummy/app/assets/stylesheets/application.css new file mode 100644 index 0000000000..365b7fac16 --- /dev/null +++ b/gitlab-peek/test/dummy/app/assets/stylesheets/application.css @@ -0,0 +1,14 @@ +/* + * This is a manifest file that'll be compiled into application.css, which will include all the files + * listed below. + * + * Any CSS and SCSS file within this directory, lib/assets/stylesheets, vendor/assets/stylesheets, + * or vendor/assets/stylesheets of plugins, if any, can be referenced here using a relative path. + * + * You're free to add application-wide styles to this file and they'll appear at the top of the + * compiled file, but it's generally better to create a new file per style scope. + * + *= require peek + *= require_self + *= require_tree . + */ diff --git a/gitlab-peek/test/dummy/app/controllers/application_controller.rb b/gitlab-peek/test/dummy/app/controllers/application_controller.rb new file mode 100644 index 0000000000..2afb5357af --- /dev/null +++ b/gitlab-peek/test/dummy/app/controllers/application_controller.rb @@ -0,0 +1,13 @@ +class ApplicationController < ActionController::Base + # Prevent CSRF attacks by raising an exception. + # For APIs, you may want to use :null_session instead. + protect_from_forgery with: :exception + + def peek_enabled? + if defined?(@peek_enabled) + !!@peek_enabled + else + true + end + end +end diff --git a/gitlab-peek/test/dummy/app/controllers/home_controller.rb b/gitlab-peek/test/dummy/app/controllers/home_controller.rb new file mode 100644 index 0000000000..4f12041fe2 --- /dev/null +++ b/gitlab-peek/test/dummy/app/controllers/home_controller.rb @@ -0,0 +1,11 @@ +class HomeController < ApplicationController + def enabled + @peek_enabled = true + render :show + end + + def disabled + @peek_enabled = false + render :show + end +end diff --git a/gitlab-peek/test/dummy/app/helpers/application_helper.rb b/gitlab-peek/test/dummy/app/helpers/application_helper.rb new file mode 100644 index 0000000000..de6be7945c --- /dev/null +++ b/gitlab-peek/test/dummy/app/helpers/application_helper.rb @@ -0,0 +1,2 @@ +module ApplicationHelper +end diff --git a/gitlab-peek/test/dummy/app/views/home/show.html.erb b/gitlab-peek/test/dummy/app/views/home/show.html.erb new file mode 100644 index 0000000000..f1c81d3fe2 --- /dev/null +++ b/gitlab-peek/test/dummy/app/views/home/show.html.erb @@ -0,0 +1,5 @@ +

      Home#show

      +

      Find me in app/views/home/show.html.erb

      + +<%= debug({ peek_enabled: @peek_enabled }) %> +<%= debug cookies %> diff --git a/gitlab-peek/test/dummy/app/views/layouts/application.html.erb b/gitlab-peek/test/dummy/app/views/layouts/application.html.erb new file mode 100644 index 0000000000..00cb6e33e8 --- /dev/null +++ b/gitlab-peek/test/dummy/app/views/layouts/application.html.erb @@ -0,0 +1,14 @@ + + + + Dummy + <%= stylesheet_link_tag "application", media: "all", "data-turbolinks-track" => true %> + <%= javascript_include_tag "application", "data-turbolinks-track" => true %> + <%= csrf_meta_tags %> + + +<%= render 'peek/bar' %> +<%= yield %> + + + diff --git a/gitlab-peek/test/dummy/app/views/peek/_test_view.html.erb b/gitlab-peek/test/dummy/app/views/peek/_test_view.html.erb new file mode 100644 index 0000000000..15a855c784 --- /dev/null +++ b/gitlab-peek/test/dummy/app/views/peek/_test_view.html.erb @@ -0,0 +1 @@ +... number diff --git a/gitlab-peek/test/dummy/bin/bundle b/gitlab-peek/test/dummy/bin/bundle new file mode 100755 index 0000000000..66e9889e8b --- /dev/null +++ b/gitlab-peek/test/dummy/bin/bundle @@ -0,0 +1,3 @@ +#!/usr/bin/env ruby +ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) +load Gem.bin_path('bundler', 'bundle') diff --git a/gitlab-peek/test/dummy/bin/rails b/gitlab-peek/test/dummy/bin/rails new file mode 100755 index 0000000000..728cd85aa5 --- /dev/null +++ b/gitlab-peek/test/dummy/bin/rails @@ -0,0 +1,4 @@ +#!/usr/bin/env ruby +APP_PATH = File.expand_path('../../config/application', __FILE__) +require_relative '../config/boot' +require 'rails/commands' diff --git a/gitlab-peek/test/dummy/bin/rake b/gitlab-peek/test/dummy/bin/rake new file mode 100755 index 0000000000..17240489f6 --- /dev/null +++ b/gitlab-peek/test/dummy/bin/rake @@ -0,0 +1,4 @@ +#!/usr/bin/env ruby +require_relative '../config/boot' +require 'rake' +Rake.application.run diff --git a/gitlab-peek/test/dummy/config.ru b/gitlab-peek/test/dummy/config.ru new file mode 100644 index 0000000000..5bc2a619e8 --- /dev/null +++ b/gitlab-peek/test/dummy/config.ru @@ -0,0 +1,4 @@ +# This file is used by Rack-based servers to start the application. + +require ::File.expand_path('../config/environment', __FILE__) +run Rails.application diff --git a/gitlab-peek/test/dummy/config/application.rb b/gitlab-peek/test/dummy/config/application.rb new file mode 100644 index 0000000000..1db93ce965 --- /dev/null +++ b/gitlab-peek/test/dummy/config/application.rb @@ -0,0 +1,25 @@ +require File.expand_path('../boot', __FILE__) + +require 'action_controller/railtie' +require 'action_mailer/railtie' +require 'sprockets/railtie' +require 'rails/test_unit/railtie' + +Bundler.require(*Rails.groups) +require 'peek' + +module Dummy + class Application < Rails::Application + # Settings in config/environments/* take precedence over those specified here. + # Application configuration should go into files in config/initializers + # -- all .rb files in that directory are automatically loaded. + + # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. + # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. + # config.time_zone = 'Central Time (US & Canada)' + + # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. + # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] + # config.i18n.default_locale = :de + end +end diff --git a/gitlab-peek/test/dummy/config/boot.rb b/gitlab-peek/test/dummy/config/boot.rb new file mode 100644 index 0000000000..ef360470a3 --- /dev/null +++ b/gitlab-peek/test/dummy/config/boot.rb @@ -0,0 +1,5 @@ +# Set up gems listed in the Gemfile. +ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../../../Gemfile', __FILE__) + +require 'bundler/setup' if File.exists?(ENV['BUNDLE_GEMFILE']) +$LOAD_PATH.unshift File.expand_path('../../../../lib', __FILE__) diff --git a/gitlab-peek/test/dummy/config/environment.rb b/gitlab-peek/test/dummy/config/environment.rb new file mode 100644 index 0000000000..82be82d53a --- /dev/null +++ b/gitlab-peek/test/dummy/config/environment.rb @@ -0,0 +1,5 @@ +# Load the rails application. +require File.expand_path('../application', __FILE__) + +# Initialize the rails application. +Dummy::Application.initialize! diff --git a/gitlab-peek/test/dummy/config/environments/development.rb b/gitlab-peek/test/dummy/config/environments/development.rb new file mode 100644 index 0000000000..3fa450d2c9 --- /dev/null +++ b/gitlab-peek/test/dummy/config/environments/development.rb @@ -0,0 +1,23 @@ +Dummy::Application.configure do + # Settings specified here will take precedence over those in config/application.rb. + + # In the development environment your application's code is reloaded on + # every request. This slows down response time but is perfect for development + # since you don't have to restart the web server when you make code changes. + config.cache_classes = false + + # Do not eager load code on boot. + config.eager_load = false + + # Show full error reports and disable caching. + config.consider_all_requests_local = true + config.action_controller.perform_caching = false + + # Don't care if the mailer can't send. + config.action_mailer.raise_delivery_errors = false + + # Debug mode disables concatenation and preprocessing of assets. + # This option may cause significant delays in view rendering with a large + # number of complex assets. + config.assets.debug = true +end diff --git a/gitlab-peek/test/dummy/config/environments/test.rb b/gitlab-peek/test/dummy/config/environments/test.rb new file mode 100644 index 0000000000..afbc0ae77a --- /dev/null +++ b/gitlab-peek/test/dummy/config/environments/test.rb @@ -0,0 +1,36 @@ +Dummy::Application.configure do + # Settings specified here will take precedence over those in config/application.rb. + + # The test environment is used exclusively to run your application's + # test suite. You never need to work with it otherwise. Remember that + # your test database is "scratch space" for the test suite and is wiped + # and recreated between test runs. Don't rely on the data there! + config.cache_classes = true + + # Do not eager load code on boot. This avoids loading your whole application + # just for the purpose of running a single test. If you are using a tool that + # preloads Rails for running tests, you may have to set it to true. + config.eager_load = false + + # Configure static asset server for tests with Cache-Control for performance. + config.serve_static_assets = true + config.static_cache_control = "public, max-age=3600" + + # Show full error reports and disable caching. + config.consider_all_requests_local = true + config.action_controller.perform_caching = false + + # Raise exceptions instead of rendering exception templates. + config.action_dispatch.show_exceptions = false + + # Disable request forgery protection in test environment. + config.action_controller.allow_forgery_protection = false + + # Tell Action Mailer not to deliver emails to the real world. + # The :test delivery method accumulates sent emails in the + # ActionMailer::Base.deliveries array. + config.action_mailer.delivery_method = :test + + # Print deprecation notices to the stderr. + config.active_support.deprecation = :stderr +end diff --git a/gitlab-peek/test/dummy/config/initializers/backtrace_silencers.rb b/gitlab-peek/test/dummy/config/initializers/backtrace_silencers.rb new file mode 100644 index 0000000000..59385cdf37 --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/backtrace_silencers.rb @@ -0,0 +1,7 @@ +# Be sure to restart your server when you modify this file. + +# You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces. +# Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ } + +# You can also remove all the silencers if you're trying to debug a problem that might stem from framework code. +# Rails.backtrace_cleaner.remove_silencers! diff --git a/gitlab-peek/test/dummy/config/initializers/filter_parameter_logging.rb b/gitlab-peek/test/dummy/config/initializers/filter_parameter_logging.rb new file mode 100644 index 0000000000..4a994e1e7b --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/filter_parameter_logging.rb @@ -0,0 +1,4 @@ +# Be sure to restart your server when you modify this file. + +# Configure sensitive parameters which will be filtered from the log file. +Rails.application.config.filter_parameters += [:password] diff --git a/gitlab-peek/test/dummy/config/initializers/inflections.rb b/gitlab-peek/test/dummy/config/initializers/inflections.rb new file mode 100644 index 0000000000..ac033bf9dc --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/inflections.rb @@ -0,0 +1,16 @@ +# Be sure to restart your server when you modify this file. + +# Add new inflection rules using the following format. Inflections +# are locale specific, and you may define rules for as many different +# locales as you wish. All of these examples are active by default: +# ActiveSupport::Inflector.inflections(:en) do |inflect| +# inflect.plural /^(ox)$/i, '\1en' +# inflect.singular /^(ox)en/i, '\1' +# inflect.irregular 'person', 'people' +# inflect.uncountable %w( fish sheep ) +# end + +# These inflection rules are supported but not enabled by default: +# ActiveSupport::Inflector.inflections(:en) do |inflect| +# inflect.acronym 'RESTful' +# end diff --git a/gitlab-peek/test/dummy/config/initializers/mime_types.rb b/gitlab-peek/test/dummy/config/initializers/mime_types.rb new file mode 100644 index 0000000000..72aca7e441 --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/mime_types.rb @@ -0,0 +1,5 @@ +# Be sure to restart your server when you modify this file. + +# Add new mime types for use in respond_to blocks: +# Mime::Type.register "text/richtext", :rtf +# Mime::Type.register_alias "text/html", :iphone diff --git a/gitlab-peek/test/dummy/config/initializers/peek.rb b/gitlab-peek/test/dummy/config/initializers/peek.rb new file mode 100644 index 0000000000..0d10f02dc9 --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/peek.rb @@ -0,0 +1,3 @@ +require_relative '../../../dummy/lib/test_view' + +Peek.into TestView diff --git a/gitlab-peek/test/dummy/config/initializers/secret_token.rb b/gitlab-peek/test/dummy/config/initializers/secret_token.rb new file mode 100644 index 0000000000..5aaa3c0b96 --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/secret_token.rb @@ -0,0 +1,12 @@ +# Be sure to restart your server when you modify this file. + +# Your secret key is used for verifying the integrity of signed cookies. +# If you change this key, all old signed cookies will become invalid! + +# Make sure the secret is at least 30 characters and all random, +# no regular words or you'll be exposed to dictionary attacks. +# You can use `rake secret` to generate a secure secret key. + +# Make sure your secret_key_base is kept private +# if you're sharing your code publicly. +Rails.application.config.secret_key_base = '94c84623660ec36e05fa8584f7dad694c280aae1894eedb73fc017564933a4eea53962e7d8aba5dc33be928373045982ec9af92cef8150bb9576eaa55ad36d5b' diff --git a/gitlab-peek/test/dummy/config/initializers/session_store.rb b/gitlab-peek/test/dummy/config/initializers/session_store.rb new file mode 100644 index 0000000000..155f7b0285 --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/session_store.rb @@ -0,0 +1,3 @@ +# Be sure to restart your server when you modify this file. + +Dummy::Application.config.session_store :cookie_store, key: '_dummy_session' diff --git a/gitlab-peek/test/dummy/config/initializers/wrap_parameters.rb b/gitlab-peek/test/dummy/config/initializers/wrap_parameters.rb new file mode 100644 index 0000000000..33725e95fd --- /dev/null +++ b/gitlab-peek/test/dummy/config/initializers/wrap_parameters.rb @@ -0,0 +1,14 @@ +# Be sure to restart your server when you modify this file. + +# This file contains settings for ActionController::ParamsWrapper which +# is enabled by default. + +# Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array. +ActiveSupport.on_load(:action_controller) do + wrap_parameters format: [:json] if respond_to?(:wrap_parameters) +end + +# To enable root element in JSON for ActiveRecord objects. +# ActiveSupport.on_load(:active_record) do +# self.include_root_in_json = true +# end diff --git a/gitlab-peek/test/dummy/config/locales/en.yml b/gitlab-peek/test/dummy/config/locales/en.yml new file mode 100644 index 0000000000..0653957166 --- /dev/null +++ b/gitlab-peek/test/dummy/config/locales/en.yml @@ -0,0 +1,23 @@ +# Files in the config/locales directory are used for internationalization +# and are automatically loaded by Rails. If you want to use locales other +# than English, add the necessary files in this directory. +# +# To use the locales, use `I18n.t`: +# +# I18n.t 'hello' +# +# In views, this is aliased to just `t`: +# +# <%= t('hello') %> +# +# To use a different locale, set it with `I18n.locale`: +# +# I18n.locale = :es +# +# This would use the information in config/locales/es.yml. +# +# To learn more, please read the Rails Internationalization guide +# available at http://guides.rubyonrails.org/i18n.html. + +en: + hello: "Hello world" diff --git a/gitlab-peek/test/dummy/config/routes.rb b/gitlab-peek/test/dummy/config/routes.rb new file mode 100644 index 0000000000..826af3219f --- /dev/null +++ b/gitlab-peek/test/dummy/config/routes.rb @@ -0,0 +1,8 @@ +Dummy::Application.routes.draw do + mount Peek::Railtie => 'peek' + + get '/enabled' => 'home#enabled' + get '/disabled' => 'home#disabled' + + root to: 'home#enabled' +end diff --git a/gitlab-peek/test/dummy/lib/test_view.rb b/gitlab-peek/test/dummy/lib/test_view.rb new file mode 100644 index 0000000000..57b71296e0 --- /dev/null +++ b/gitlab-peek/test/dummy/lib/test_view.rb @@ -0,0 +1,18 @@ +require 'concurrent/atomics' + +class TestView < Peek::Views::View + class << self + attr_accessor :counter + end + self.counter = Concurrent::AtomicFixnum.new + + def results + { + number: self.class.counter.increment + } + end + + def partial_path + "peek/test_view" + end +end diff --git a/gitlab-peek/test/dummy/log/.keep b/gitlab-peek/test/dummy/log/.keep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gitlab-peek/test/dummy/public/404.html b/gitlab-peek/test/dummy/public/404.html new file mode 100644 index 0000000000..a0daa0c156 --- /dev/null +++ b/gitlab-peek/test/dummy/public/404.html @@ -0,0 +1,58 @@ + + + + The page you were looking for doesn't exist (404) + + + + + +
      +

      The page you were looking for doesn't exist.

      +

      You may have mistyped the address or the page may have moved.

      +
      +

      If you are the application owner check the logs for more information.

      + + diff --git a/gitlab-peek/test/dummy/public/422.html b/gitlab-peek/test/dummy/public/422.html new file mode 100644 index 0000000000..fbb4b84d72 --- /dev/null +++ b/gitlab-peek/test/dummy/public/422.html @@ -0,0 +1,58 @@ + + + + The change you wanted was rejected (422) + + + + + +
      +

      The change you wanted was rejected.

      +

      Maybe you tried to change something you didn't have access to.

      +
      +

      If you are the application owner check the logs for more information.

      + + diff --git a/gitlab-peek/test/dummy/public/500.html b/gitlab-peek/test/dummy/public/500.html new file mode 100644 index 0000000000..e9052d35bf --- /dev/null +++ b/gitlab-peek/test/dummy/public/500.html @@ -0,0 +1,57 @@ + + + + We're sorry, but something went wrong (500) + + + + + +
      +

      We're sorry, but something went wrong.

      +
      +

      If you are the application owner check the logs for more information.

      + + diff --git a/gitlab-peek/test/dummy/public/favicon.ico b/gitlab-peek/test/dummy/public/favicon.ico new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gitlab-peek/test/peek/views/view_test.rb b/gitlab-peek/test/peek/views/view_test.rb new file mode 100644 index 0000000000..21294c8d70 --- /dev/null +++ b/gitlab-peek/test/peek/views/view_test.rb @@ -0,0 +1,37 @@ +require 'test_helper' + +describe Peek::Views::View do + before do + @view = Peek::Views::View.new + end + + describe "partial path" do + it "should return correct partial class" do + assert_equal 'peek/views/view', @view.partial_path + end + end + + describe "dom_id" do + it "should return correct dom_id" do + assert_equal 'peek-view-view', @view.dom_id + end + end + + describe "key" do + it "should return correct key" do + assert_equal 'view', @view.key + end + end + + describe "context" do + it "should return correct context_id" do + assert_equal 'peek-context-view', @view.context_id + end + end + + describe "toggling off and on" do + it "should be enabled by default" do + assert @view.enabled? + end + end +end diff --git a/gitlab-peek/test/peek_test.rb b/gitlab-peek/test/peek_test.rb new file mode 100644 index 0000000000..7ff497add3 --- /dev/null +++ b/gitlab-peek/test/peek_test.rb @@ -0,0 +1,69 @@ +require 'test_helper' + +class Staff < Peek::Views::View + def initialize(options = {}) + @username = options.delete(:username) + end + + def username + @username + end + + def enabled? + !!@username + end +end + +describe Peek do + describe "enabled?" do + it "should not be enabled in test" do + refute Peek.enabled? + end + end + + describe "env" do + it "should return the current environment" do + assert_equal 'test', Peek.env + end + end + + describe "views" do + before do + Peek.reset + end + + it "should have none by default" do + assert_equal [], Peek.views + end + + it "should be able to append views" do + Peek.into Staff, username: 'dewski' + assert_kind_of Staff, Peek.views.first + end + + it "should be able to append views with options" do + Peek.into Staff, username: 'dewski' + @staff = Peek.views.first + assert_kind_of Staff, @staff + assert_equal 'dewski', @staff.username + end + + it "should only return enabled views" do + Peek.into Staff, username: false + assert_equal [], Peek.views + end + end + + describe "reset" do + before do + Peek.reset + end + + it "should clear any current views" do + Peek.into Staff, username: 'dewski' + assert_kind_of Staff, Peek.views.first + Peek.reset + assert_equal [], Peek.views + end + end +end diff --git a/gitlab-peek/test/test_helper.rb b/gitlab-peek/test/test_helper.rb new file mode 100644 index 0000000000..b4c147d768 --- /dev/null +++ b/gitlab-peek/test/test_helper.rb @@ -0,0 +1,22 @@ +ENV['RAILS_ENV'] = 'test' + +require File.expand_path('../dummy/config/environment.rb', __FILE__) +require 'rails/test_help' + +Rails.backtrace_cleaner.remove_silencers! + +# Load support files +Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f } + +# Load fixtures from the engine +if ActiveSupport::TestCase.method_defined?(:fixture_path=) + ActiveSupport::TestCase.fixture_path = File.expand_path("../fixtures", __FILE__) +end + +require 'minitest/autorun' + +begin + require 'turn' +rescue LoadError + # Not installed. +end diff --git a/snowplow-tracker/LICENSE-2.0.txt b/snowplow-tracker/LICENSE-2.0.txt deleted file mode 100644 index 7a4a3ea242..0000000000 --- a/snowplow-tracker/LICENSE-2.0.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/snowplow-tracker/README.md b/snowplow-tracker/README.md deleted file mode 100644 index dac689f899..0000000000 --- a/snowplow-tracker/README.md +++ /dev/null @@ -1,74 +0,0 @@ -# Ruby Analytics for Snowplow -[![Gem Version](https://badge.fury.io/rb/snowplow-tracker.svg)](http://badge.fury.io/rb/snowplow-tracker) -[![Build Status](https://travis-ci.org/snowplow/snowplow-ruby-tracker.png?branch=master)](https://travis-ci.org/snowplow/snowplow-ruby-tracker) -[![Code Climate](https://codeclimate.com/github/snowplow/snowplow-ruby-tracker.png)](https://codeclimate.com/github/snowplow/snowplow-ruby-tracker) -[![Coverage Status](https://coveralls.io/repos/snowplow/snowplow-ruby-tracker/badge.png)](https://coveralls.io/r/snowplow/snowplow-ruby-tracker) -[![License][license-image]][license] - -## Overview - -Add analytics to your Ruby and Rails apps and gems with the **[Snowplow] [snowplow]** event tracker for **[Ruby] [ruby]**. - -With this tracker you can collect event data from your **[Ruby] [ruby]** applications, **[Ruby on Rails] [rails]** web applications and **[Ruby gems] [rubygems]**. - -## Quickstart - -Assuming git, **[Vagrant] [vagrant-install]** and **[VirtualBox] [virtualbox-install]** installed: - -```bash - host$ git clone https://github.com/snowplow/snowplow-ruby-tracker.git - host$ cd snowplow-ruby-tracker - host$ vagrant up && vagrant ssh -guest$ cd /vagrant -guest$ gem install bundler -guest$ bundle install -guest$ rspec -``` - -## Publishing - -```bash - host$ vagrant push -``` - -## Find out more - -| Technical Docs | Setup Guide | Roadmap | Contributing | -|---------------------------------|---------------------------|-------------------------|-----------------------------------| -| ![i1] [techdocs-image] | ![i2] [setup-image] | ![i3] [roadmap-image] | ![i4] [contributing-image] | -| **[Technical Docs] [techdocs]** | **[Setup Guide] [setup]** | **[Roadmap] [roadmap]** | **[Contributing] [contributing]** | - -## Copyright and license - -The Snowplow Ruby Tracker is copyright 2013-2016 Snowplow Analytics Ltd. - -Licensed under the **[Apache License, Version 2.0] [license]** (the "License"); -you may not use this software except in compliance with the License. - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -[license-image]: http://img.shields.io/badge/license-Apache--2-blue.svg?style=flat -[license]: http://www.apache.org/licenses/LICENSE-2.0 - -[ruby]: https://www.ruby-lang.org/en/ -[rails]: http://rubyonrails.org/ -[rubygems]: https://rubygems.org/ - -[snowplow]: http://snowplowanalytics.com - -[vagrant-install]: http://docs.vagrantup.com/v2/installation/index.html -[virtualbox-install]: https://www.virtualbox.org/wiki/Downloads - -[techdocs-image]: https://d3i6fms1cm1j0i.cloudfront.net/github/images/techdocs.png -[setup-image]: https://d3i6fms1cm1j0i.cloudfront.net/github/images/setup.png -[roadmap-image]: https://d3i6fms1cm1j0i.cloudfront.net/github/images/roadmap.png -[contributing-image]: https://d3i6fms1cm1j0i.cloudfront.net/github/images/contributing.png - -[techdocs]: https://github.com/snowplow/snowplow/wiki/Ruby-Tracker -[setup]: https://github.com/snowplow/snowplow/wiki/Ruby-Tracker-Setup -[roadmap]: https://github.com/snowplow/snowplow/wiki/Ruby-Tracker-Roadmap -[contributing]: https://github.com/snowplow/snowplow/wiki/Ruby-Tracker-Contributing diff --git a/snowplow-tracker/lib/snowplow-tracker.rb b/snowplow-tracker/lib/snowplow-tracker.rb deleted file mode 100644 index a08defef22..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker.rb +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:snowplow-user@googlegroups.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -require 'snowplow-tracker/contracts.rb' -require 'snowplow-tracker/version.rb' -require 'snowplow-tracker/self_describing_json.rb' -require 'snowplow-tracker/payload.rb' -require 'snowplow-tracker/subject.rb' -require 'snowplow-tracker/emitters.rb' -require 'snowplow-tracker/timestamp.rb' -require 'snowplow-tracker/tracker.rb' - diff --git a/snowplow-tracker/lib/snowplow-tracker/contracts.rb b/snowplow-tracker/lib/snowplow-tracker/contracts.rb deleted file mode 100644 index 0ce2907b24..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/contracts.rb +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -require 'contracts' - -module SnowplowTracker - - ORIGINAL_FAILURE_CALLBACK = Contract.method(:failure_callback) - - def self.disable_contracts - Contract.define_singleton_method(:failure_callback) {|data| true} - end - - def self.enable_contracts - Contract.define_singleton_method(:failure_callback, ORIGINAL_FAILURE_CALLBACK) - end -end diff --git a/snowplow-tracker/lib/snowplow-tracker/emitters.rb b/snowplow-tracker/lib/snowplow-tracker/emitters.rb deleted file mode 100644 index 09c75d199e..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/emitters.rb +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -require 'net/https' -require 'set' -require 'logger' -require 'contracts' - -module SnowplowTracker - - LOGGER = Logger.new(STDERR) - LOGGER.level = Logger::INFO - - class Emitter - - include Contracts - - @@ConfigHash = ({ - :protocol => Maybe[Or['http', 'https']], - :port => Maybe[Num], - :method => Maybe[Or['get', 'post']], - :buffer_size => Maybe[Num], - :on_success => Maybe[Func[Num => Any]], - :on_failure => Maybe[Func[Num, Hash => Any]], - :thread_count => Maybe[Num] - }) - - @@StrictConfigHash = And[@@ConfigHash, lambda { |x| - x.class == Hash and Set.new(x.keys).subset? Set.new(@@ConfigHash.keys) - }] - - @@DefaultConfig = { - :protocol => 'http', - :method => 'get' - } - - Contract String, @@StrictConfigHash => lambda { |x| x.is_a? Emitter } - def initialize(endpoint, config={}) - config = @@DefaultConfig.merge(config) - @lock = Monitor.new - @collector_uri = as_collector_uri(endpoint, config[:protocol], config[:port], config[:method]) - @buffer = [] - if not config[:buffer_size].nil? - @buffer_size = config[:buffer_size] - elsif config[:method] == 'get' - @buffer_size = 1 - else - @buffer_size = 10 - end - @method = config[:method] - @on_success = config[:on_success] - @on_failure = config[:on_failure] - LOGGER.info("#{self.class} initialized with endpoint #{@collector_uri}") - - self - end - - # Build the collector URI from the configuration hash - # - Contract String, String, Maybe[Num], String => String - def as_collector_uri(endpoint, protocol, port, method) - port_string = port == nil ? '' : ":#{port.to_s}" - path = method == 'get' ? '/i' : '/com.snowplowanalytics.snowplow/tp2' - - "#{protocol}://#{endpoint}#{port_string}#{path}" - end - - # Add an event to the buffer and flush it if maximum size has been reached - # - Contract Hash => nil - def input(payload) - payload.each { |k,v| payload[k] = v.to_s} - @lock.synchronize do - @buffer.push(payload) - if @buffer.size >= @buffer_size - flush - end - end - - nil - end - - # Flush the buffer - # - Contract Bool => nil - def flush(async=true) - @lock.synchronize do - send_requests(@buffer) - @buffer = [] - end - nil - end - - # Send all events in the buffer to the collector - # - Contract ArrayOf[Hash] => nil - def send_requests(evts) - if evts.size < 1 - LOGGER.info("Skipping sending events since buffer is empty") - return - end - LOGGER.info("Attempting to send #{evts.size} request#{evts.size == 1 ? '' : 's'}") - - evts.each do |event| - event['stm'] = (Time.now.to_f * 1000).to_i.to_s # add the sent timestamp, overwrite if already exists - end - - if @method == 'post' - post_succeeded = false - begin - request = http_post(SelfDescribingJson.new( - 'iglu:com.snowplowanalytics.snowplow/payload_data/jsonschema/1-0-4', - evts - ).to_json) - post_succeeded = is_good_status_code(request.code) - rescue StandardError => se - LOGGER.warn(se) - end - if post_succeeded - unless @on_success.nil? - @on_success.call(evts.size) - end - else - unless @on_failure.nil? - @on_failure.call(0, evts) - end - end - - elsif @method == 'get' - success_count = 0 - unsent_requests = [] - evts.each do |evt| - get_succeeded = false - begin - request = http_get(evt) - get_succeeded = is_good_status_code(request.code) - rescue StandardError => se - LOGGER.warn(se) - end - if get_succeeded - success_count += 1 - else - unsent_requests << evt - end - end - if unsent_requests.size == 0 - unless @on_success.nil? - @on_success.call(success_count) - end - else - unless @on_failure.nil? - @on_failure.call(success_count, unsent_requests) - end - end - end - - nil - end - - # Send a GET request - # - Contract Hash => lambda { |x| x.is_a? Net::HTTPResponse } - def http_get(payload) - destination = URI(@collector_uri + '?' + URI.encode_www_form(payload)) - LOGGER.info("Sending GET request to #{@collector_uri}...") - LOGGER.debug("Payload: #{payload}") - http = Net::HTTP.new(destination.host, destination.port) - request = Net::HTTP::Get.new(destination.request_uri) - if destination.scheme == 'https' - http.use_ssl = true - end - response = http.request(request) - LOGGER.add(is_good_status_code(response.code) ? Logger::INFO : Logger::WARN) { - "GET request to #{@collector_uri} finished with status code #{response.code}" - } - - response - end - - # Send a POST request - # - Contract Hash => lambda { |x| x.is_a? Net::HTTPResponse } - def http_post(payload) - LOGGER.info("Sending POST request to #{@collector_uri}...") - LOGGER.debug("Payload: #{payload}") - destination = URI(@collector_uri) - http = Net::HTTP.new(destination.host, destination.port) - request = Net::HTTP::Post.new(destination.request_uri) - if destination.scheme == 'https' - http.use_ssl = true - end - request.body = payload.to_json - request.set_content_type('application/json; charset=utf-8') - response = http.request(request) - LOGGER.add(is_good_status_code(response.code) ? Logger::INFO : Logger::WARN) { - "POST request to #{@collector_uri} finished with status code #{response.code}" - } - - response - end - - # Only 2xx and 3xx status codes are considered successes - # - Contract String => Bool - def is_good_status_code(status_code) - status_code.to_i >= 200 && status_code.to_i < 400 - end - - private :as_collector_uri, - :http_get, - :http_post - - end - - - class AsyncEmitter < Emitter - - Contract String, @@StrictConfigHash => lambda { |x| x.is_a? Emitter } - def initialize(endpoint, config={}) - @queue = Queue.new() - # @all_processed_condition and @results_unprocessed are used to emulate Python's Queue.task_done() - @queue.extend(MonitorMixin) - @all_processed_condition = @queue.new_cond - @results_unprocessed = 0 - (config[:thread_count] || 1).times do - t = Thread.new do - consume - end - end - super(endpoint, config) - end - - def consume - loop do - work_unit = @queue.pop - send_requests(work_unit) - @queue.synchronize do - @results_unprocessed -= 1 - @all_processed_condition.broadcast - end - end - end - - # Flush the buffer - # If async is false, block until the queue is empty - # - def flush(async=true) - loop do - @lock.synchronize do - @queue.synchronize do - @results_unprocessed += 1 - end - @queue << @buffer - @buffer = [] - end - if not async - LOGGER.info('Starting synchronous flush') - @queue.synchronize do - @all_processed_condition.wait_while { @results_unprocessed > 0 } - LOGGER.info('Finished synchronous flush') - end - end - break if @buffer.size < 1 - end - end - end - -end diff --git a/snowplow-tracker/lib/snowplow-tracker/payload.rb b/snowplow-tracker/lib/snowplow-tracker/payload.rb deleted file mode 100644 index 383f525269..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/payload.rb +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -require 'base64' -require 'json' -require 'net/http' -require 'contracts' - -module SnowplowTracker - - class Payload - - include Contracts - - attr_reader :context - - Contract nil => Payload - def initialize - @context = {} - self - end - - # Add a single name-value pair to @context - # - Contract String, Or[String, Bool, Num, nil] => Or[String, Bool, Num, nil] - def add(name, value) - if value != "" and not value.nil? - @context[name] = value - end - end - - # Add each name-value pair in dict to @context - # - Contract Hash => Hash - def add_dict(dict) - for f in dict - self.add(f[0], f[1]) - end - end - - # Stringify a JSON and add it to @context - # - Contract Maybe[Hash], Bool, String, String => Maybe[String] - def add_json(dict, encode_base64, type_when_encoded, type_when_not_encoded) - - if dict.nil? - return - end - - dict_string = JSON.generate(dict) - - if encode_base64 - self.add(type_when_encoded, Base64.strict_encode64(dict_string)) - else - self.add(type_when_not_encoded, dict_string) - end - - end - - end -end diff --git a/snowplow-tracker/lib/snowplow-tracker/self_describing_json.rb b/snowplow-tracker/lib/snowplow-tracker/self_describing_json.rb deleted file mode 100644 index 7b917c1b00..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/self_describing_json.rb +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -module SnowplowTracker - - class SelfDescribingJson - - def initialize(schema, data) - @schema = schema - @data = data - end - - def to_json - { - :schema => @schema, - :data => @data - } - end - - end - -end diff --git a/snowplow-tracker/lib/snowplow-tracker/subject.rb b/snowplow-tracker/lib/snowplow-tracker/subject.rb deleted file mode 100644 index 09d2bdfb60..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/subject.rb +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -require 'contracts' - -module SnowplowTracker - - class Subject - - include Contracts - - @@default_platform = 'srv' - @@supported_platforms = ['pc', 'tv', 'mob', 'cnsl', 'iot'] - - attr_reader :standard_nv_pairs - - Contract None => Subject - def initialize - @standard_nv_pairs = {"p" => @@default_platform} - self - end - - # Specify the platform - # - Contract String => Subject - def set_platform(value) - if @@supported_platforms.include?(value) - @standard_nv_pairs['p'] = value - else - raise "#{value} is not a supported platform" - end - - self - end - - # Set the business-defined user ID for a user - # - Contract String => Subject - def set_user_id(user_id) - @standard_nv_pairs['uid'] = user_id - self - end - - # Set fingerprint for the user - # - Contract Num => Subject - def set_fingerprint(fingerprint) - @standard_nv_pairs['fp'] = fingerprint - self - end - - # Set the screen resolution for a device - # - Contract Num, Num => Subject - def set_screen_resolution(width, height) - @standard_nv_pairs['res'] = "#{width}x#{height}" - self - end - - # Set the dimensions of the current viewport - # - Contract Num, Num => Subject - def set_viewport(width, height) - @standard_nv_pairs['vp'] = "#{width}x#{height}" - self - end - - # Set the color depth of the device in bits per pixel - # - Contract Num => Subject - def set_color_depth(depth) - @standard_nv_pairs['cd'] = depth - self - end - - # Set the timezone field - # - Contract String => Subject - def set_timezone(timezone) - @standard_nv_pairs['tz'] = timezone - self - end - - # Set the language field - # - Contract String => Subject - def set_lang(lang) - @standard_nv_pairs['lang'] = lang - self - end - - # Set the domain user ID - # - Contract String => Subject - def set_domain_user_id(duid) - @standard_nv_pairs['duid'] = duid - self - end - - # Set the IP address field - # - Contract String => Subject - def set_ip_address(ip) - @standard_nv_pairs['ip'] = ip - self - end - - # Set the user agent - # - Contract String => Subject - def set_useragent(ua) - @standard_nv_pairs['ua'] = ua - self - end - - # Set the network user ID field - # This overwrites the nuid field set by the collector - # - Contract String => Subject - def set_network_user_id(nuid) - @standard_nv_pairs['tnuid'] = nuid - self - end - - end - -end diff --git a/snowplow-tracker/lib/snowplow-tracker/timestamp.rb b/snowplow-tracker/lib/snowplow-tracker/timestamp.rb deleted file mode 100644 index d81a12850c..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/timestamp.rb +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) 2016 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun, Ed Lewis (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2016 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -module SnowplowTracker - - class Timestamp - - attr_reader :type - attr_reader :value - - def initialize(type, value) - @type = type - @value = value - end - - end - - class TrueTimestamp < Timestamp - - def initialize(value) - super 'ttm', value - end - - end - - class DeviceTimestamp < Timestamp - - def initialize(value) - super 'dtm', value - end - - end - -end \ No newline at end of file diff --git a/snowplow-tracker/lib/snowplow-tracker/tracker.rb b/snowplow-tracker/lib/snowplow-tracker/tracker.rb deleted file mode 100644 index f73dcef505..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/tracker.rb +++ /dev/null @@ -1,371 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -require 'contracts' -require 'securerandom' -require 'set' - -module SnowplowTracker - - class Tracker - - include Contracts - - @@EmitterInput = Or[lambda {|x| x.is_a? Emitter}, ArrayOf[lambda {|x| x.is_a? Emitter}]] - - @@required_transaction_keys = Set.new(%w(order_id total_value)) - @@recognised_transaction_keys = Set.new(%w(order_id total_value affiliation tax_value shipping city state country currency)) - - @@Transaction = lambda { |x| - return false unless x.class == Hash - transaction_keys = Set.new(x.keys) - @@required_transaction_keys.subset? transaction_keys and - transaction_keys.subset? @@recognised_transaction_keys - } - - @@required_item_keys = Set.new(%w(sku price quantity)) - @@recognised_item_keys = Set.new(%w(sku price quantity name category context)) - - @@Item = lambda { |x| - return false unless x.class == Hash - item_keys = Set.new(x.keys) - @@required_item_keys.subset? item_keys and - item_keys.subset? @@recognised_item_keys - } - - @@required_augmented_item_keys = Set.new(%w(sku price quantity tstamp order_id)) - @@recognised_augmented_item_keys = Set.new(%w(sku price quantity name category context tstamp order_id currency)) - - @@AugmentedItem = lambda { |x| - return false unless x.class == Hash - augmented_item_keys = Set.new(x.keys) - @@required_augmented_item_keys.subset? augmented_item_keys and - augmented_item_keys.subset? @@recognised_augmented_item_keys - } - - @@ContextsInput = ArrayOf[SelfDescribingJson] - - @@version = TRACKER_VERSION - @@default_encode_base64 = true - - @@base_schema_path = "iglu:com.snowplowanalytics.snowplow" - @@schema_tag = "jsonschema" - @@context_schema = "#{@@base_schema_path}/contexts/#{@@schema_tag}/1-0-1" - @@unstruct_event_schema = "#{@@base_schema_path}/unstruct_event/#{@@schema_tag}/1-0-0" - - Contract @@EmitterInput, Maybe[Subject], Maybe[String], Maybe[String], Bool => Tracker - def initialize(emitters, subject=nil, namespace=nil, app_id=nil, encode_base64=@@default_encode_base64) - @emitters = Array(emitters) - if subject.nil? - @subject = Subject.new - else - @subject = subject - end - @standard_nv_pairs = { - 'tna' => namespace, - 'tv' => @@version, - 'aid' => app_id - } - @config = { - 'encode_base64' => encode_base64 - } - - self - end - - # Call subject methods from tracker instance - # - Subject.instance_methods(false).each do |name| - define_method name, ->(*splat) do - @subject.method(name.to_sym).call(*splat) - - self - end - end - - # Generates a type-4 UUID to identify this event - Contract nil => String - def get_event_id() - SecureRandom.uuid - end - - # Generates the timestamp (in milliseconds) to be attached to each event - # - Contract nil => Num - def get_timestamp - (Time.now.to_f * 1000).to_i - end - - # Builds a self-describing JSON from an array of custom contexts - # - Contract @@ContextsInput => Hash - def build_context(context) - SelfDescribingJson.new( - @@context_schema, - context.map {|c| c.to_json} - ).to_json - end - - # Tracking methods - - # Attaches all the fields in @standard_nv_pairs to the request - # Only attaches the context vendor if the event has a custom context - # - Contract Payload => nil - def track(pb) - pb.add_dict(@subject.standard_nv_pairs) - pb.add_dict(@standard_nv_pairs) - pb.add('eid', get_event_id()) - @emitters.each{ |emitter| emitter.input(pb.context)} - - nil - end - - # Log a visit to this page with an inserted device timestamp - # - Contract String, Maybe[String], Maybe[String], Maybe[@@ContextsInput], Maybe[Num] => Tracker - def track_page_view(page_url, page_title=nil, referrer=nil, context=nil, tstamp=nil) - if tstamp.nil? - tstamp = get_timestamp - end - - track_page_view(page_url, page_title, referrer, context, DeviceTimestamp.new(tstamp)) - end - - # Log a visit to this page - # - Contract String, Maybe[String], Maybe[String], Maybe[@@ContextsInput], SnowplowTracker::Timestamp => Tracker - def track_page_view(page_url, page_title=nil, referrer=nil, context=nil, tstamp=nil) - pb = Payload.new - pb.add('e', 'pv') - pb.add('url', page_url) - pb.add('page', page_title) - pb.add('refr', referrer) - - unless context.nil? - pb.add_json(build_context(context), @config['encode_base64'], 'cx', 'co') - end - - pb.add(tstamp.type, tstamp.value) - - track(pb) - - self - end - - # Track a single item within an ecommerce transaction - # Not part of the public API - # - Contract @@AugmentedItem => self - def track_ecommerce_transaction_item(argmap) - pb = Payload.new - pb.add('e', 'ti') - pb.add('ti_id', argmap['order_id']) - pb.add('ti_sk', argmap['sku']) - pb.add('ti_pr', argmap['price']) - pb.add('ti_qu', argmap['quantity']) - pb.add('ti_nm', argmap['name']) - pb.add('ti_ca', argmap['category']) - pb.add('ti_cu', argmap['currency']) - unless argmap['context'].nil? - pb.add_json(build_context(argmap['context']), @config['encode_base64'], 'cx', 'co') - end - pb.add(argmap['tstamp'].type, argmap['tstamp'].value) - track(pb) - - self - end - - # Track an ecommerce transaction and all the items in it - # Set the timestamp as the device timestamp - Contract @@Transaction, ArrayOf[@@Item], Maybe[@@ContextsInput], Maybe[Num] => Tracker - def track_ecommerce_transaction(transaction, - items, - context=nil, - tstamp=nil) - if tstamp.nil? - tstamp = get_timestamp - end - - track_ecommerce_transaction(transaction, items, context, DeviceTimestamp.new(tstamp)) - end - - # Track an ecommerce transaction and all the items in it - # - Contract @@Transaction, ArrayOf[@@Item], Maybe[@@ContextsInput], Timestamp => Tracker - def track_ecommerce_transaction(transaction, items, - context=nil, tstamp=nil) - pb = Payload.new - pb.add('e', 'tr') - pb.add('tr_id', transaction['order_id']) - pb.add('tr_tt', transaction['total_value']) - pb.add('tr_af', transaction['affiliation']) - pb.add('tr_tx', transaction['tax_value']) - pb.add('tr_sh', transaction['shipping']) - pb.add('tr_ci', transaction['city']) - pb.add('tr_st', transaction['state']) - pb.add('tr_co', transaction['country']) - pb.add('tr_cu', transaction['currency']) - unless context.nil? - pb.add_json(build_context(context), @config['encode_base64'], 'cx', 'co') - end - - pb.add(tstamp.type, tstamp.value) - - track(pb) - - for item in items - item['tstamp'] = tstamp - item['order_id'] = transaction['order_id'] - item['currency'] = transaction['currency'] - track_ecommerce_transaction_item(item) - end - - self - end - - # Track a structured event - # set the timestamp to the device timestamp - Contract String, String, Maybe[String], Maybe[String], Maybe[Num], Maybe[@@ContextsInput], Maybe[Num] => Tracker - def track_struct_event(category, action, label=nil, property=nil, value=nil, context=nil, tstamp=nil) - if tstamp.nil? - tstamp = get_timestamp - end - - track_struct_event(category, action, label, property, value, context, DeviceTimestamp.new(tstamp)) - end - # Track a structured event - # - Contract String, String, Maybe[String], Maybe[String], Maybe[Num], Maybe[@@ContextsInput], Timestamp => Tracker - def track_struct_event(category, action, label=nil, property=nil, value=nil, context=nil, tstamp=nil) - pb = Payload.new - pb.add('e', 'se') - pb.add('se_ca', category) - pb.add('se_ac', action) - pb.add('se_la', label) - pb.add('se_pr', property) - pb.add('se_va', value) - unless context.nil? - pb.add_json(build_context(context), @config['encode_base64'], 'cx', 'co') - end - - pb.add(tstamp.type, tstamp.value) - track(pb) - - self - end - - # Track a screen view event - # - Contract Maybe[String], Maybe[String], Maybe[@@ContextsInput], Or[Timestamp, Num, nil] => Tracker - def track_screen_view(name=nil, id=nil, context=nil, tstamp=nil) - screen_view_properties = {} - unless name.nil? - screen_view_properties['name'] = name - end - unless id.nil? - screen_view_properties['id'] = id - end - screen_view_schema = "#{@@base_schema_path}/screen_view/#{@@schema_tag}/1-0-0" - - event_json = SelfDescribingJson.new(screen_view_schema, screen_view_properties) - - self.track_unstruct_event(event_json, context, tstamp) - - self - end - - # Better name for track unstruct event - # - Contract SelfDescribingJson, Maybe[@@ContextsInput], Timestamp => Tracker - def track_self_describing_event(event_json, context=nil, tstamp=nil) - track_unstruct_event(event_json, context, tstamp) - end - - # Better name for track unstruct event - # set the timestamp to the device timestamp - Contract SelfDescribingJson, Maybe[@@ContextsInput], Maybe[Num] => Tracker - def track_self_describing_event(event_json, context=nil, tstamp=nil) - track_unstruct_event(event_json, context, tstamp) - end - - # Track an unstructured event - # set the timestamp to the device timstamp - Contract SelfDescribingJson, Maybe[@@ContextsInput], Maybe[Num] => Tracker - def track_unstruct_event(event_json, context=nil, tstamp=nil) - if tstamp.nil? - tstamp = get_timestamp - end - - track_unstruct_event(event_json, context, DeviceTimestamp.new(tstamp)) - end - - # Track an unstructured event - # - Contract SelfDescribingJson, Maybe[@@ContextsInput], Timestamp => Tracker - def track_unstruct_event(event_json, context=nil, tstamp=nil) - pb = Payload.new - pb.add('e', 'ue') - - envelope = SelfDescribingJson.new(@@unstruct_event_schema, event_json.to_json) - - pb.add_json(envelope.to_json, @config['encode_base64'], 'ue_px', 'ue_pr') - - unless context.nil? - pb.add_json(build_context(context), @config['encode_base64'], 'cx', 'co') - end - - pb.add(tstamp.type, tstamp.value) - - track(pb) - - self - end - - # Flush all events stored in all emitters - # - Contract Bool => Tracker - def flush(async=false) - @emitters.each do |emitter| - emitter.flush(async) - end - - self - end - - # Set the subject of the events fired by the tracker - # - Contract Subject => Tracker - def set_subject(subject) - @subject = subject - self - end - - # Add a new emitter - # - Contract Emitter => Tracker - def add_emitter(emitter) - @emitters.push(emitter) - self - end - - private :get_timestamp, - :build_context, - :track, - :track_ecommerce_transaction_item - - end - -end diff --git a/snowplow-tracker/lib/snowplow-tracker/version.rb b/snowplow-tracker/lib/snowplow-tracker/version.rb deleted file mode 100644 index 18bde7bf60..0000000000 --- a/snowplow-tracker/lib/snowplow-tracker/version.rb +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved. -# -# This program is licensed to you under the Apache License Version 2.0, -# and you may not use this file except in compliance with the Apache License Version 2.0. -# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the Apache License Version 2.0 is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. - -# Author:: Alex Dean, Fred Blundun (mailto:support@snowplowanalytics.com) -# Copyright:: Copyright (c) 2013-2014 Snowplow Analytics Ltd -# License:: Apache License Version 2.0 - -module SnowplowTracker - VERSION = '0.6.1' - TRACKER_VERSION = "rb-#{VERSION}" -end diff --git a/snowplow-tracker/snowplow-tracker.gemspec b/snowplow-tracker/snowplow-tracker.gemspec deleted file mode 100644 index c30cb26829..0000000000 --- a/snowplow-tracker/snowplow-tracker.gemspec +++ /dev/null @@ -1,41 +0,0 @@ -######################################################### -# This file has been automatically generated by gem2tgz # -######################################################### -# -*- encoding: utf-8 -*- -# stub: snowplow-tracker 0.6.1 ruby lib - -Gem::Specification.new do |s| - s.name = "snowplow-tracker".freeze - s.version = "0.6.1" - - s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= - s.require_paths = ["lib".freeze] - s.authors = ["Alexander Dean".freeze, "Fred Blundun".freeze] - s.date = "2016-12-26" - s.description = "With this tracker you can collect event data from your Ruby applications, Ruby on Rails web applications and Ruby gems.".freeze - s.email = "support@snowplowanalytics.com".freeze - s.files = ["LICENSE-2.0.txt".freeze, "README.md".freeze, "lib/snowplow-tracker.rb".freeze, "lib/snowplow-tracker/contracts.rb".freeze, "lib/snowplow-tracker/emitters.rb".freeze, "lib/snowplow-tracker/payload.rb".freeze, "lib/snowplow-tracker/self_describing_json.rb".freeze, "lib/snowplow-tracker/subject.rb".freeze, "lib/snowplow-tracker/timestamp.rb".freeze, "lib/snowplow-tracker/tracker.rb".freeze, "lib/snowplow-tracker/version.rb".freeze] - s.homepage = "http://github.com/snowplow/snowplow-ruby-tracker".freeze - s.licenses = ["Apache License 2.0".freeze] - s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze) - s.rubygems_version = "2.5.2.1".freeze - s.summary = "Ruby Analytics for Snowplow".freeze - - if s.respond_to? :specification_version then - s.specification_version = 4 - - if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then - s.add_runtime_dependency(%q.freeze, ["<= 0.11", "~> 0.7"]) - s.add_development_dependency(%q.freeze, ["~> 2.14.1"]) - s.add_development_dependency(%q.freeze, ["~> 1.17.4"]) - else - s.add_dependency(%q.freeze, ["<= 0.11", "~> 0.7"]) - s.add_dependency(%q.freeze, ["~> 2.14.1"]) - s.add_dependency(%q.freeze, ["~> 1.17.4"]) - end - else - s.add_dependency(%q.freeze, ["<= 0.11", "~> 0.7"]) - s.add_dependency(%q.freeze, ["~> 2.14.1"]) - s.add_dependency(%q.freeze, ["~> 1.17.4"]) - end -end