about summary refs log tree commit diff
path: root/app/lib/importer/base_importer.rb
blob: 0cd1d342279d0d9b2be71ae1e798ae0bc575bcfe (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
# frozen_string_literal: true

class Importer::BaseImporter
  # @param [Integer] batch_size
  # @param [Concurrent::ThreadPoolExecutor] executor
  def initialize(batch_size:, executor:)
    @batch_size = batch_size
    @executor   = executor
    @wait_for   = Concurrent::Set.new
  end

  # Callback to run when a concurrent work unit completes
  # @param [Proc]
  def on_progress(&block)
    @on_progress = block
  end

  # Callback to run when a concurrent work unit fails
  # @param [Proc]
  def on_failure(&block)
    @on_failure = block
  end

  # Reduce resource usage during and improve speed of indexing
  def optimize_for_import!
    Chewy.client.indices.put_settings index: index.index_name, body: { index: { refresh_interval: -1 } }
  end

  # Restore original index settings
  def optimize_for_search!
    Chewy.client.indices.put_settings index: index.index_name, body: { index: { refresh_interval: index.settings_hash[:settings][:index][:refresh_interval] } }
  end

  # Estimate the amount of documents that would be indexed. Not exact!
  # @returns [Integer]
  def estimate!
    ActiveRecord::Base.connection_pool.with_connection { |connection| connection.select_one("SELECT reltuples AS estimate FROM pg_class WHERE relname = '#{index.adapter.target.table_name}'")['estimate'].to_i }
  end

  # Import data from the database into the index
  def import!
    raise NotImplementedError
  end

  # Remove documents from the index that no longer exist in the database
  def clean_up!
    index.scroll_batches do |documents|
      ids           = documents.pluck('_id')
      existence_map = index.adapter.target.where(id: ids).pluck(:id).each_with_object({}) { |id, map| map[id.to_s] = true }
      tmp           = ids.reject { |id| existence_map[id] }

      next if tmp.empty?

      in_work_unit(tmp) do |deleted_ids|
        bulk = Chewy::Index::Import::BulkBuilder.new(index, delete: deleted_ids).bulk_body

        Chewy::Index::Import::BulkRequest.new(index).perform(bulk)

        [0, bulk.size]
      end
    end

    wait!
  end

  protected

  def in_work_unit(*args, &block)
    work_unit = Concurrent::Promises.future_on(@executor, *args, &block)

    work_unit.on_fulfillment!(&@on_progress)
    work_unit.on_rejection!(&@on_failure)
    work_unit.on_resolution! { @wait_for.delete(work_unit) }

    @wait_for << work_unit
  rescue Concurrent::RejectedExecutionError
    sleep(0.1) && retry # Backpressure
  end

  def wait!
    Concurrent::Promises.zip(*@wait_for).wait
  end

  def index
    raise NotImplementedError
  end
end