Commit c0600269 authored by Dylan Griffith's avatar Dylan Griffith

Merge branch '273264-copy-issues-to-new-index' into 'master'

Advanced Search: Move issues to new Elasticsearch index (not revertable)

See merge request gitlab-org/gitlab!48334
parents f7447302 88d73ee9
# frozen_string_literal: true
class CreateElasticReindexingSubtasks < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class ReindexingTask < ActiveRecord::Base
self.table_name = 'elastic_reindexing_tasks'
end
class ReindexingSubtask < ActiveRecord::Base
self.table_name = 'elastic_reindexing_subtasks'
end
def up
unless table_exists?(:elastic_reindexing_subtasks)
create_table :elastic_reindexing_subtasks do |t|
t.references :elastic_reindexing_task, foreign_key: { on_delete: :cascade }, null: false
t.text :alias_name, null: false
t.text :index_name_from, null: false
t.text :index_name_to, null: false
t.text :elastic_task, null: false
t.integer :documents_count_target
t.integer :documents_count
t.timestamps_with_timezone null: false
end
end
add_text_limit :elastic_reindexing_subtasks, :index_name_from, 255
add_text_limit :elastic_reindexing_subtasks, :index_name_to, 255
add_text_limit :elastic_reindexing_subtasks, :elastic_task, 255
add_text_limit :elastic_reindexing_subtasks, :alias_name, 255
ReindexingTask.find_each do |task|
next if task.index_name_from.blank? || task.index_name_to.blank? || task.elastic_task.blank?
next if ReindexingSubtask.where(elastic_reindexing_task_id: task.id).exists?
ReindexingSubtask.create(
elastic_reindexing_task_id: task.id,
documents_count_target: task.documents_count_target,
documents_count: task.documents_count,
alias_name: 'gitlab-production',
index_name_from: task.index_name_from,
index_name_to: task.index_name_to,
elastic_task: task.elastic_task
)
end
end
def down
drop_table :elastic_reindexing_subtasks
end
end
164bcc838beb7d51775f8b813b92d3ec7080d4c7937d6ad16cf973131b45359e
\ No newline at end of file
...@@ -11947,6 +11947,32 @@ CREATE SEQUENCE draft_notes_id_seq ...@@ -11947,6 +11947,32 @@ CREATE SEQUENCE draft_notes_id_seq
ALTER SEQUENCE draft_notes_id_seq OWNED BY draft_notes.id; ALTER SEQUENCE draft_notes_id_seq OWNED BY draft_notes.id;
CREATE TABLE elastic_reindexing_subtasks (
id bigint NOT NULL,
elastic_reindexing_task_id bigint NOT NULL,
alias_name text NOT NULL,
index_name_from text NOT NULL,
index_name_to text NOT NULL,
elastic_task text NOT NULL,
documents_count_target integer,
documents_count integer,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
CONSTRAINT check_4910adc798 CHECK ((char_length(elastic_task) <= 255)),
CONSTRAINT check_88f56216a4 CHECK ((char_length(alias_name) <= 255)),
CONSTRAINT check_a1fbd9faa9 CHECK ((char_length(index_name_from) <= 255)),
CONSTRAINT check_f456494bd8 CHECK ((char_length(index_name_to) <= 255))
);
CREATE SEQUENCE elastic_reindexing_subtasks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE elastic_reindexing_subtasks_id_seq OWNED BY elastic_reindexing_subtasks.id;
CREATE TABLE elastic_reindexing_tasks ( CREATE TABLE elastic_reindexing_tasks (
id bigint NOT NULL, id bigint NOT NULL,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
...@@ -18387,6 +18413,8 @@ ALTER TABLE ONLY diff_note_positions ALTER COLUMN id SET DEFAULT nextval('diff_n ...@@ -18387,6 +18413,8 @@ ALTER TABLE ONLY diff_note_positions ALTER COLUMN id SET DEFAULT nextval('diff_n
ALTER TABLE ONLY draft_notes ALTER COLUMN id SET DEFAULT nextval('draft_notes_id_seq'::regclass); ALTER TABLE ONLY draft_notes ALTER COLUMN id SET DEFAULT nextval('draft_notes_id_seq'::regclass);
ALTER TABLE ONLY elastic_reindexing_subtasks ALTER COLUMN id SET DEFAULT nextval('elastic_reindexing_subtasks_id_seq'::regclass);
ALTER TABLE ONLY elastic_reindexing_tasks ALTER COLUMN id SET DEFAULT nextval('elastic_reindexing_tasks_id_seq'::regclass); ALTER TABLE ONLY elastic_reindexing_tasks ALTER COLUMN id SET DEFAULT nextval('elastic_reindexing_tasks_id_seq'::regclass);
ALTER TABLE ONLY emails ALTER COLUMN id SET DEFAULT nextval('emails_id_seq'::regclass); ALTER TABLE ONLY emails ALTER COLUMN id SET DEFAULT nextval('emails_id_seq'::regclass);
...@@ -19548,6 +19576,9 @@ ALTER TABLE ONLY diff_note_positions ...@@ -19548,6 +19576,9 @@ ALTER TABLE ONLY diff_note_positions
ALTER TABLE ONLY draft_notes ALTER TABLE ONLY draft_notes
ADD CONSTRAINT draft_notes_pkey PRIMARY KEY (id); ADD CONSTRAINT draft_notes_pkey PRIMARY KEY (id);
ALTER TABLE ONLY elastic_reindexing_subtasks
ADD CONSTRAINT elastic_reindexing_subtasks_pkey PRIMARY KEY (id);
ALTER TABLE ONLY elastic_reindexing_tasks ALTER TABLE ONLY elastic_reindexing_tasks
ADD CONSTRAINT elastic_reindexing_tasks_pkey PRIMARY KEY (id); ADD CONSTRAINT elastic_reindexing_tasks_pkey PRIMARY KEY (id);
...@@ -21393,6 +21424,8 @@ CREATE INDEX index_draft_notes_on_discussion_id ON draft_notes USING btree (disc ...@@ -21393,6 +21424,8 @@ CREATE INDEX index_draft_notes_on_discussion_id ON draft_notes USING btree (disc
CREATE INDEX index_draft_notes_on_merge_request_id ON draft_notes USING btree (merge_request_id); CREATE INDEX index_draft_notes_on_merge_request_id ON draft_notes USING btree (merge_request_id);
CREATE INDEX index_elastic_reindexing_subtasks_on_elastic_reindexing_task_id ON elastic_reindexing_subtasks USING btree (elastic_reindexing_task_id);
CREATE UNIQUE INDEX index_elastic_reindexing_tasks_on_in_progress ON elastic_reindexing_tasks USING btree (in_progress) WHERE in_progress; CREATE UNIQUE INDEX index_elastic_reindexing_tasks_on_in_progress ON elastic_reindexing_tasks USING btree (in_progress) WHERE in_progress;
CREATE INDEX index_elastic_reindexing_tasks_on_state ON elastic_reindexing_tasks USING btree (state); CREATE INDEX index_elastic_reindexing_tasks_on_state ON elastic_reindexing_tasks USING btree (state);
...@@ -25361,6 +25394,9 @@ ALTER TABLE ONLY requirements ...@@ -25361,6 +25394,9 @@ ALTER TABLE ONLY requirements
ALTER TABLE ONLY snippet_repositories ALTER TABLE ONLY snippet_repositories
ADD CONSTRAINT fk_rails_f21f899728 FOREIGN KEY (shard_id) REFERENCES shards(id) ON DELETE RESTRICT; ADD CONSTRAINT fk_rails_f21f899728 FOREIGN KEY (shard_id) REFERENCES shards(id) ON DELETE RESTRICT;
ALTER TABLE ONLY elastic_reindexing_subtasks
ADD CONSTRAINT fk_rails_f2cc190164 FOREIGN KEY (elastic_reindexing_task_id) REFERENCES elastic_reindexing_tasks(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_chat_data ALTER TABLE ONLY ci_pipeline_chat_data
ADD CONSTRAINT fk_rails_f300456b63 FOREIGN KEY (chat_name_id) REFERENCES chat_names(id) ON DELETE CASCADE; ADD CONSTRAINT fk_rails_f300456b63 FOREIGN KEY (chat_name_id) REFERENCES chat_names(id) ON DELETE CASCADE;
......
...@@ -283,7 +283,7 @@ To disable the Elasticsearch integration: ...@@ -283,7 +283,7 @@ To disable the Elasticsearch integration:
1. Expand the **Advanced Search** section and uncheck **Elasticsearch indexing** 1. Expand the **Advanced Search** section and uncheck **Elasticsearch indexing**
and **Search with Elasticsearch enabled**. and **Search with Elasticsearch enabled**.
1. Click **Save changes** for the changes to take effect. 1. Click **Save changes** for the changes to take effect.
1. (Optional) Delete the existing index: 1. (Optional) Delete the existing indexes:
```shell ```shell
# Omnibus installations # Omnibus installations
...@@ -347,7 +347,8 @@ To reclaim the `gitlab-production` index name, you need to first create a `secon ...@@ -347,7 +347,8 @@ To reclaim the `gitlab-production` index name, you need to first create a `secon
To create a secondary index, run the following Rake task. The `SKIP_ALIAS` To create a secondary index, run the following Rake task. The `SKIP_ALIAS`
environment variable will disable the automatic creation of the Elasticsearch environment variable will disable the automatic creation of the Elasticsearch
alias, which would conflict with the existing index under `$PRIMARY_INDEX`: alias, which would conflict with the existing index under `$PRIMARY_INDEX`, and will
not create a separate Issue index:
```shell ```shell
# Omnibus installation # Omnibus installation
...@@ -523,8 +524,8 @@ The following are some available Rake tasks: ...@@ -523,8 +524,8 @@ The following are some available Rake tasks:
| [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Iterates over all projects and queues Sidekiq jobs to index them in the background. | | [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Iterates over all projects and queues Sidekiq jobs to index them in the background. |
| [`sudo gitlab-rake gitlab:elastic:index_projects_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Determines the overall status of the indexing. It is done by counting the total number of indexed projects, dividing by a count of the total number of projects, then multiplying by 100. | | [`sudo gitlab-rake gitlab:elastic:index_projects_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Determines the overall status of the indexing. It is done by counting the total number of indexed projects, dividing by a count of the total number of projects, then multiplying by 100. |
| [`sudo gitlab-rake gitlab:elastic:clear_index_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Deletes all instances of IndexStatus for all projects. Note that this command will result in a complete wipe of the index, and it should be used with caution. | | [`sudo gitlab-rake gitlab:elastic:clear_index_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Deletes all instances of IndexStatus for all projects. Note that this command will result in a complete wipe of the index, and it should be used with caution. |
| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates an empty index and assigns an alias for it on the Elasticsearch side only if it doesn't already exist. | | [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates empty indexes (the default index and a separate issues index) and assigns an alias for each on the Elasticsearch side only if it doesn't already exist. |
| [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab index and alias (if exists) on the Elasticsearch instance. | | [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab indexes and aliases (if they exist) on the Elasticsearch instance. |
| [`sudo gitlab-rake gitlab:elastic:recreate_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index[<TARGET_NAME>]` and `gitlab:elastic:create_empty_index[<TARGET_NAME>]`. | | [`sudo gitlab-rake gitlab:elastic:recreate_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index[<TARGET_NAME>]` and `gitlab:elastic:create_empty_index[<TARGET_NAME>]`. |
| [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Performs an Elasticsearch import that indexes the snippets data. | | [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Performs an Elasticsearch import that indexes the snippets data. |
| [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Displays which projects are not indexed. | | [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Displays which projects are not indexed. |
......
...@@ -6,9 +6,15 @@ module Elastic ...@@ -6,9 +6,15 @@ module Elastic
FORWARDABLE_INSTANCE_METHODS = [:es_id, :es_parent].freeze FORWARDABLE_INSTANCE_METHODS = [:es_id, :es_parent].freeze
FORWARDABLE_CLASS_METHODS = [:elastic_search, :es_import, :es_type, :index_name, :document_type, :mapping, :mappings, :settings, :import].freeze FORWARDABLE_CLASS_METHODS = [:elastic_search, :es_import, :es_type, :index_name, :document_type, :mapping, :mappings, :settings, :import].freeze
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def __elasticsearch__(&block) def __elasticsearch__(&block)
@__elasticsearch__ ||= ::Elastic::MultiVersionInstanceProxy.new(self) if self.class.use_separate_indices?
@__elasticsearch_separate__ ||= ::Elastic::MultiVersionInstanceProxy.new(self, use_separate_indices: true)
else
@__elasticsearch__ ||= ::Elastic::MultiVersionInstanceProxy.new(self)
end
end end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
# Should be overridden in the models where some records should be skipped # Should be overridden in the models where some records should be skipped
def searchable? def searchable?
...@@ -77,7 +83,15 @@ module Elastic ...@@ -77,7 +83,15 @@ module Elastic
class_methods do class_methods do
def __elasticsearch__ def __elasticsearch__
@__elasticsearch__ ||= ::Elastic::MultiVersionClassProxy.new(self) if use_separate_indices?
@__elasticsearch_separate__ ||= ::Elastic::MultiVersionClassProxy.new(self, use_separate_indices: true)
else
@__elasticsearch__ ||= ::Elastic::MultiVersionClassProxy.new(self)
end
end
def use_separate_indices?
Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.include?(self) && Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
end end
# Mark a dependant association as needing to be updated when a specific # Mark a dependant association as needing to be updated when a specific
......
...@@ -4,7 +4,7 @@ module Elastic ...@@ -4,7 +4,7 @@ module Elastic
class MigrationRecord class MigrationRecord
attr_reader :version, :name, :filename attr_reader :version, :name, :filename
delegate :migrate, :skip_migration?, :completed?, :batched?, :throttle_delay, to: :migration delegate :migrate, :skip_migration?, :completed?, :batched?, :throttle_delay, :pause_indexing?, to: :migration
def initialize(version:, name:, filename:) def initialize(version:, name:, filename:)
@version = version @version = version
...@@ -16,11 +16,17 @@ module Elastic ...@@ -16,11 +16,17 @@ module Elastic
def save!(completed:) def save!(completed:)
raise 'Migrations index is not found' unless helper.index_exists?(index_name: index_name) raise 'Migrations index is not found' unless helper.index_exists?(index_name: index_name)
data = { completed: completed }.merge(timestamps(completed: completed)) data = { completed: completed, state: load_state }.merge(timestamps(completed: completed))
client.index index: index_name, type: '_doc', id: version, body: data client.index index: index_name, type: '_doc', id: version, body: data
end end
def save_state!(state)
completed = load_from_index&.dig('_source', 'completed')
client.index index: index_name, type: '_doc', id: version, body: { completed: completed, state: load_state.merge(state) }
end
def persisted? def persisted?
load_from_index.present? load_from_index.present?
end end
...@@ -31,6 +37,18 @@ module Elastic ...@@ -31,6 +37,18 @@ module Elastic
nil nil
end end
def load_state
load_from_index&.dig('_source', 'state')&.with_indifferent_access || {}
end
def halted?
!!load_state&.dig('halted')
end
def name_for_key
name.underscore
end
def self.persisted_versions(completed:) def self.persisted_versions(completed:)
helper = Gitlab::Elastic::Helper.default helper = Gitlab::Elastic::Helper.default
helper.client helper.client
......
# frozen_string_literal: true
class Elastic::ReindexingSubtask < ApplicationRecord
self.table_name = 'elastic_reindexing_subtasks'
belongs_to :elastic_reindexing_task, class_name: 'Elastic::ReindexingTask'
validates :index_name_from, :index_name_to, :elastic_task, presence: true
end
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
class Elastic::ReindexingTask < ApplicationRecord class Elastic::ReindexingTask < ApplicationRecord
self.table_name = 'elastic_reindexing_tasks' self.table_name = 'elastic_reindexing_tasks'
has_many :subtasks, class_name: 'Elastic::ReindexingSubtask', foreign_key: :elastic_reindexing_task_id
enum state: { enum state: {
initial: 0, initial: 0,
indexing_paused: 1, indexing_paused: 1,
...@@ -27,8 +29,9 @@ class Elastic::ReindexingTask < ApplicationRecord ...@@ -27,8 +29,9 @@ class Elastic::ReindexingTask < ApplicationRecord
def self.drop_old_indices! def self.drop_old_indices!
old_indices_to_be_deleted.find_each do |task| old_indices_to_be_deleted.find_each do |task|
next unless Gitlab::Elastic::Helper.default.delete_index(index_name: task.index_name_from) task.subtasks.each do |subtask|
Gitlab::Elastic::Helper.default.delete_index(index_name: subtask.index_name_from)
end
task.update!(state: :original_index_deleted) task.update!(state: :original_index_deleted)
end end
end end
......
...@@ -31,6 +31,10 @@ module Elastic ...@@ -31,6 +31,10 @@ module Elastic
private private
def alias_names
[elastic_helper.target_name] + elastic_helper.standalone_indices_proxies.map(&:index_name)
end
def default_index_options def default_index_options
{ {
refresh_interval: nil, # Change it back to the default refresh_interval: nil, # Change it back to the default
...@@ -40,6 +44,12 @@ module Elastic ...@@ -40,6 +44,12 @@ module Elastic
end end
def initial! def initial!
if Elastic::DataMigrationService.pending_migrations?
# migrations may have paused indexing so we do not want to unpause when aborting the reindexing process
abort_reindexing!('You have unapplied advanced search migrations. Please wait until it is finished', unpause_indexing: false)
return false
end
# Pause indexing # Pause indexing
Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: true) Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: true)
...@@ -48,7 +58,7 @@ module Elastic ...@@ -48,7 +58,7 @@ module Elastic
return false return false
end end
expected_free_size = elastic_helper.index_size_bytes * 2 expected_free_size = alias_names.sum {|name| elastic_helper.index_size_bytes(index_name: name) } * 2
if elastic_helper.cluster_free_size_bytes < expected_free_size if elastic_helper.cluster_free_size_bytes < expected_free_size
abort_reindexing!("You should have at least #{expected_free_size} bytes of storage available to perform reindexing. Please increase the storage in your Elasticsearch cluster before reindexing.") abort_reindexing!("You should have at least #{expected_free_size} bytes of storage available to perform reindexing. Please increase the storage in your Elasticsearch cluster before reindexing.")
return false return false
...@@ -60,43 +70,52 @@ module Elastic ...@@ -60,43 +70,52 @@ module Elastic
end end
def indexing_paused! def indexing_paused!
# Create an index with custom settings # Create indices with custom settings
index_name = elastic_helper.create_empty_index(with_alias: false, options: { settings: INITIAL_INDEX_OPTIONS }) main_index = elastic_helper.create_empty_index(with_alias: false, options: { settings: INITIAL_INDEX_OPTIONS })
standalone_indices = elastic_helper.create_standalone_indices(with_alias: false, options: { settings: INITIAL_INDEX_OPTIONS })
# Record documents count
documents_count = elastic_helper.documents_count main_index.merge(standalone_indices).each do |new_index_name, alias_name|
old_index_name = elastic_helper.target_index_name(target: alias_name)
# Trigger reindex # Record documents count
task_id = elastic_helper.reindex(to: index_name) documents_count = elastic_helper.documents_count(index_name: old_index_name)
# Trigger reindex
task_id = elastic_helper.reindex(from: old_index_name, to: new_index_name)
current_task.subtasks.create!(
alias_name: alias_name,
index_name_from: old_index_name,
index_name_to: new_index_name,
documents_count: documents_count,
elastic_task: task_id
)
end
current_task.update!( current_task.update!(state: :reindexing)
index_name_from: elastic_helper.target_index_name,
index_name_to: index_name,
documents_count: documents_count,
elastic_task: task_id,
state: :reindexing
)
true true
end end
def save_documents_count!(refresh:) def save_documents_count!(refresh:)
elastic_helper.refresh_index(index_name: current_task.index_name_to) if refresh current_task.subtasks.each do |subtask|
elastic_helper.refresh_index(index_name: subtask.index_name_to) if refresh
new_documents_count = elastic_helper.documents_count(index_name: current_task.index_name_to) new_documents_count = elastic_helper.documents_count(index_name: subtask.index_name_to)
current_task.update!(documents_count_target: new_documents_count) subtask.update!(documents_count_target: new_documents_count)
end
end end
def check_task_status def check_task_status
save_documents_count!(refresh: false) save_documents_count!(refresh: false)
task_status = elastic_helper.task_status(task_id: current_task.elastic_task) current_task.subtasks.each do |subtask|
return false unless task_status['completed'] task_status = elastic_helper.task_status(task_id: subtask.elastic_task)
return false unless task_status['completed']
reindexing_error = task_status.dig('error', 'type') reindexing_error = task_status.dig('error', 'type')
if reindexing_error if reindexing_error
abort_reindexing!("Task #{current_task.elastic_task} has failed with Elasticsearch error.", additional_logs: { elasticsearch_error_type: reindexing_error }) abort_reindexing!("Task #{subtask.elastic_task} has failed with Elasticsearch error.", additional_logs: { elasticsearch_error_type: reindexing_error })
return false return false
end
end end
true true
...@@ -109,22 +128,28 @@ module Elastic ...@@ -109,22 +128,28 @@ module Elastic
def compare_documents_count def compare_documents_count
save_documents_count!(refresh: true) save_documents_count!(refresh: true)
old_documents_count = current_task.documents_count current_task.subtasks.each do |subtask|
new_documents_count = current_task.documents_count_target old_documents_count = subtask.documents_count
if old_documents_count != new_documents_count new_documents_count = subtask.documents_count_target
abort_reindexing!("Documents count is different, Count from new index: #{new_documents_count} Count from original index: #{old_documents_count}. This likely means something went wrong during reindexing.") if old_documents_count != new_documents_count
return false abort_reindexing!("Documents count is different, Count from new index: #{new_documents_count} Count from original index: #{old_documents_count}. This likely means something went wrong during reindexing.")
return false
end
end end
true true
end end
def apply_default_index_options def apply_default_index_options
elastic_helper.update_settings(index_name: current_task.index_name_to, settings: default_index_options) current_task.subtasks.each do |subtask|
elastic_helper.update_settings(index_name: subtask.index_name_to, settings: default_index_options)
end
end end
def switch_alias_to_new_index def switch_alias_to_new_index
elastic_helper.switch_alias(to: current_task.index_name_to) current_task.subtasks.each do |subtask|
elastic_helper.switch_alias(from: subtask.index_name_from, to: subtask.index_name_to, alias_name: subtask.alias_name)
end
end end
def finalize_reindexing def finalize_reindexing
...@@ -144,8 +169,8 @@ module Elastic ...@@ -144,8 +169,8 @@ module Elastic
true true
end end
def abort_reindexing!(reason, additional_logs: {}) def abort_reindexing!(reason, additional_logs: {}, unpause_indexing: true)
error = { message: 'elasticsearch_reindex_error', error: reason, elasticsearch_task_id: current_task.elastic_task, gitlab_task_id: current_task.id, gitlab_task_state: current_task.state } error = { message: 'elasticsearch_reindex_error', error: reason, gitlab_task_id: current_task.id, gitlab_task_state: current_task.state }
logger.error(error.merge(additional_logs)) logger.error(error.merge(additional_logs))
current_task.update!( current_task.update!(
...@@ -154,7 +179,7 @@ module Elastic ...@@ -154,7 +179,7 @@ module Elastic
) )
# Unpause indexing # Unpause indexing
Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: false) Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: false) if unpause_indexing
end end
def logger def logger
......
...@@ -20,23 +20,32 @@ module Elastic ...@@ -20,23 +20,32 @@ module Elastic
migrations.sort_by(&:version) migrations.sort_by(&:version)
end end
def [](version)
migrations.find { |m| m.version == version }
end
def drop_migration_has_finished_cache!(migration) def drop_migration_has_finished_cache!(migration)
name = migration.name.underscore Rails.cache.delete cache_key(:migration_has_finished, migration.name_for_key)
Rails.cache.delete cache_key(:migration_has_finished, name)
end end
def migration_has_finished?(name) def migration_has_finished?(name)
Rails.cache.fetch cache_key(:migration_has_finished, name), expires_in: 30.minutes do Rails.cache.fetch cache_key(:migration_has_finished, name.to_s.underscore), expires_in: 30.minutes do
migration_has_finished_uncached?(name) migration_has_finished_uncached?(name)
end end
end end
def migration_has_finished_uncached?(name) def migration_has_finished_uncached?(name)
migration = migrations.find { |migration| migration.name == name.to_s.camelize } migration = migrations.find { |migration| migration.name_for_key == name.to_s.underscore }
!!migration&.load_from_index&.dig('_source', 'completed') !!migration&.load_from_index&.dig('_source', 'completed')
end end
def pending_migrations?
migrations.reverse.any? do |migration|
!migration_has_finished?(migration.name_for_key)
end
end
def mark_all_as_completed! def mark_all_as_completed!
migrations.each do |migration| migrations.each do |migration|
migration.save!(completed: true) migration.save!(completed: true)
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
.form-check .form-check
= f.check_box :elasticsearch_indexing, class: 'form-check-input', data: { qa_selector: 'indexing_checkbox' } = f.check_box :elasticsearch_indexing, class: 'form-check-input', data: { qa_selector: 'indexing_checkbox' }
= f.label :elasticsearch_indexing, class: 'form-check-label' do = f.label :elasticsearch_indexing, class: 'form-check-label' do
Elasticsearch indexing = _('Elasticsearch indexing')
- unless Gitlab::CurrentSettings.elasticsearch_indexing? - unless Gitlab::CurrentSettings.elasticsearch_indexing?
.form-text.text-muted .form-text.text-muted
= _('An empty index will be created if one does not already exist') = _('An empty index will be created if one does not already exist')
...@@ -35,11 +35,16 @@ ...@@ -35,11 +35,16 @@
.card-body .card-body
.form-group .form-group
.form-check .form-check
= f.check_box :elasticsearch_pause_indexing, class: 'form-check-input', data: { qa_selector: 'pause_checkbox' }, disabled: !Gitlab::CurrentSettings.elasticsearch_indexing? - pending_migrations = Elastic::DataMigrationService.pending_migrations? && Gitlab::CurrentSettings.elasticsearch_pause_indexing? rescue false
- disable_checkbox = !Gitlab::CurrentSettings.elasticsearch_indexing? || pending_migrations
= f.check_box :elasticsearch_pause_indexing, class: 'form-check-input', data: { qa_selector: 'pause_checkbox' }, disabled: disable_checkbox
= f.label :elasticsearch_pause_indexing, class: 'form-check-label' do = f.label :elasticsearch_pause_indexing, class: 'form-check-label' do
Pause Elasticsearch indexing = _('Pause Elasticsearch indexing')
.form-text.text-muted .form-text.text-muted
= _('Changes are still tracked. Useful for cluster/index migrations.') = _('Changes are still tracked. Useful for cluster/index migrations.')
- if pending_migrations
.form-text.text-warning
= _('There are pending advanced search migrations. Indexing must remain paused until the migrations are completed.')
.form-group .form-group
.form-check .form-check
...@@ -102,29 +107,31 @@ ...@@ -102,29 +107,31 @@
.sub-section .sub-section
%h4= _('Elasticsearch zero-downtime reindexing') %h4= _('Elasticsearch zero-downtime reindexing')
= link_to _('Trigger cluster reindexing'), admin_elasticsearch_trigger_reindexing_path, class: 'gl-button btn btn-primary', data: { confirm: _('Are you sure you want to reindex?') }, method: :post, disabled: @elasticsearch_reindexing_task&.in_progress? = link_to _('Trigger cluster reindexing'), admin_elasticsearch_trigger_reindexing_path, class: "gl-button btn btn-info", disabled: @elasticsearch_reindexing_task&.in_progress?, data: { confirm: _('Are you sure you want to reindex?') }, method: :post
.form-text.text-muted .form-text.gl-text-gray-600
= _('This feature should be used with an index that was created after 13.0') = _('This feature should be used with an index that was created after 13.0')
- Elastic::ReindexingTask.old_indices_scheduled_for_deletion.each do |task| - Elastic::ReindexingTask.old_indices_scheduled_for_deletion.each do |task|
.form-text.text-danger .form-text.text-danger
= _("Unused, previous index '%{index_name}' will be deleted after %{time} automatically.") % { index_name: task.index_name_from, time: task.delete_original_index_at } = _("Unused, previous indices: %{index_names} will be deleted after %{time} automatically.") % { index_names: task.subtasks.map(&:index_name_from).join(', '), time: task.delete_original_index_at }
= link_to _('Cancel index deletion'), admin_elasticsearch_cancel_index_deletion_path(task_id: task.id), method: :post = link_to _('Cancel index deletion'), admin_elasticsearch_cancel_index_deletion_path(task_id: task.id), class: 'gl-mb-2', method: :post
- if @elasticsearch_reindexing_task - if @elasticsearch_reindexing_task
- expected_documents = @elasticsearch_reindexing_task.documents_count %h5= _('Reindexing Status: %{status}') % { status: @elasticsearch_reindexing_task.state }
- processed_documents = @elasticsearch_reindexing_task.documents_count_target
%h5= _('Reindexing status')
%p= _('State: %{last_reindexing_task_state}') % { last_reindexing_task_state: @elasticsearch_reindexing_task.state }
- if @elasticsearch_reindexing_task.elastic_task
%p= _('Task ID: %{elastic_task}') % { elastic_task: @elasticsearch_reindexing_task.elastic_task }
- if @elasticsearch_reindexing_task.error_message - if @elasticsearch_reindexing_task.error_message
%p= _('Error: %{error_message}') % { error_message: @elasticsearch_reindexing_task.error_message } %p= _('Error: %{error_message}') % { error_message: @elasticsearch_reindexing_task.error_message }
- if expected_documents - @elasticsearch_reindexing_task.subtasks.each do |subtask|
%p= _('Expected documents: %{expected_documents}') % { expected_documents: expected_documents } .card-body.form-group
- if processed_documents && expected_documents %h5= subtask.alias_name
- percentage = ((processed_documents / expected_documents.to_f) * 100).round(2) - expected_documents = subtask.documents_count
%p= _('Documents reindexed: %{processed_documents} (%{percentage}%%)') % { processed_documents: processed_documents, percentage: percentage } - if subtask.elastic_task
.progress %p= _('Task ID: %{elastic_task}') % { elastic_task: subtask.elastic_task }
.progress-bar.progress-bar-striped.bg-primary{ "aria-valuemax" => "100", "aria-valuemin" => "0", "aria-valuenow" => percentage, :role => "progressbar", :style => "width: #{percentage}%" } - if expected_documents
- processed_documents = subtask.documents_count_target
%p= _('Expected documents: %{expected_documents}') % { expected_documents: expected_documents }
- if processed_documents && expected_documents
- percentage = ((processed_documents / expected_documents.to_f) * 100).round(2)
%p= _('Documents reindexed: %{processed_documents} (%{percentage}%%)') % { processed_documents: processed_documents, percentage: percentage }
.progress
.progress-bar{ "aria-valuemax" => "100", "aria-valuemin" => "0", "aria-valuenow" => percentage, :role => "progressbar", :style => "width: #{percentage}%" }
.sub-section .sub-section
%h4= _('Elasticsearch indexing restrictions') %h4= _('Elasticsearch indexing restrictions')
......
...@@ -15,6 +15,10 @@ module Elastic ...@@ -15,6 +15,10 @@ module Elastic
self.class.get_throttle_delay self.class.get_throttle_delay
end end
def pause_indexing?
self.class.get_pause_indexing
end
class_methods do class_methods do
def batched! def batched!
class_attributes[:batched] = true class_attributes[:batched] = true
...@@ -24,6 +28,14 @@ module Elastic ...@@ -24,6 +28,14 @@ module Elastic
class_attributes[:batched] class_attributes[:batched]
end end
def pause_indexing!
class_attributes[:pause_indexing] = true
end
def get_pause_indexing
class_attributes[:pause_indexing]
end
def throttle_delay(value) def throttle_delay(value)
class_attributes[:throttle_delay] = value class_attributes[:throttle_delay] = value
end end
......
# frozen_string_literal: true
module Elastic
module MigrationState
def migration_state
migration_record.load_state
end
def set_migration_state(state)
log "Setting migration_state to #{state.to_json}"
migration_record.save_state!(state)
end
end
end
...@@ -29,12 +29,21 @@ module Elastic ...@@ -29,12 +29,21 @@ module Elastic
helper.create_migrations_index helper.create_migrations_index
end end
if migration.halted?
logger.info "MigrationWorker: migration[#{migration.name}] has been halted. All future migrations will be halted because of that. Exiting"
unpause_indexing!(migration)
break false
end
execute_migration(migration) execute_migration(migration)
completed = migration.completed? completed = migration.completed?
logger.info "MigrationWorker: migration[#{migration.name}] updating with completed: #{completed}" logger.info "MigrationWorker: migration[#{migration.name}] updating with completed: #{completed}"
migration.save!(completed: completed) migration.save!(completed: completed)
unpause_indexing!(migration) if completed
Elastic::DataMigrationService.drop_migration_has_finished_cache!(migration) Elastic::DataMigrationService.drop_migration_has_finished_cache!(migration)
end end
end end
...@@ -45,6 +54,8 @@ module Elastic ...@@ -45,6 +54,8 @@ module Elastic
if migration.persisted? && !migration.batched? if migration.persisted? && !migration.batched?
logger.info "MigrationWorker: migration[#{migration.name}] did not execute migrate method since it was already executed. Waiting for migration to complete" logger.info "MigrationWorker: migration[#{migration.name}] did not execute migrate method since it was already executed. Waiting for migration to complete"
else else
pause_indexing!(migration)
logger.info "MigrationWorker: migration[#{migration.name}] executing migrate method" logger.info "MigrationWorker: migration[#{migration.name}] executing migrate method"
migration.migrate migration.migrate
...@@ -61,6 +72,27 @@ module Elastic ...@@ -61,6 +72,27 @@ module Elastic
Elastic::DataMigrationService.migrations.find { |migration| !completed_migrations.include?(migration.version) } Elastic::DataMigrationService.migrations.find { |migration| !completed_migrations.include?(migration.version) }
end end
def pause_indexing!(migration)
return unless migration.pause_indexing?
return if migration.load_state[:pause_indexing].present?
pause_indexing = !Gitlab::CurrentSettings.elasticsearch_pause_indexing?
migration.save_state!(pause_indexing: pause_indexing)
if pause_indexing
logger.info 'MigrationWorker: Pausing indexing'
Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: true)
end
end
def unpause_indexing!(migration)
return unless migration.pause_indexing?
return unless migration.load_state[:pause_indexing]
logger.info 'MigrationWorker: unpausing indexing'
Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: false)
end
def helper def helper
Gitlab::Elastic::Helper.default Gitlab::Elastic::Helper.default
end end
......
...@@ -16,9 +16,19 @@ class ElasticDeleteProjectWorker ...@@ -16,9 +16,19 @@ class ElasticDeleteProjectWorker
private private
def indices
helper = Gitlab::Elastic::Helper.default
if Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
[helper.target_name] + helper.standalone_indices_proxies.map(&:index_name)
else
[helper.target_name]
end
end
def remove_project_and_children_documents(project_id, es_id) def remove_project_and_children_documents(project_id, es_id)
client.delete_by_query({ client.delete_by_query({
index: Project.__elasticsearch__.index_name, index: indices,
routing: es_id, routing: es_id,
body: { body: {
query: { query: {
......
---
title: 'Advanced Search: Copy issues to new index'
merge_request: 48334
author:
type: changed
# frozen_string_literal: true
class MigrateIssuesToSeparateIndex < Elastic::Migration
pause_indexing!
batched!
throttle_delay 1.minute
MAX_ATTEMPTS = 30
FIELDS = %w(
type
id
iid
title
description
created_at
updated_at
state
project_id
author_id
confidential
assignee_id
visibility_level
issues_access_level
).freeze
def migrate
# On initial batch we only create index
if migration_state[:slice].blank?
log "Create standalone issues index under #{issues_index_name}"
helper.create_standalone_indices unless helper.index_exists?(index_name: issues_index_name)
options = {
slice: 0,
retry_attempt: 0,
max_slices: get_number_of_shards
}
set_migration_state(options)
return
end
retry_attempt = migration_state[:retry_attempt].to_i
slice = migration_state[:slice]
max_slices = migration_state[:max_slices]
if retry_attempt >= MAX_ATTEMPTS
fail_migration_halt_error!(retry_attempt: retry_attempt)
return
end
if slice < max_slices
log "Launching reindexing for slice:#{slice} | max_slices:#{max_slices}"
response = reindex(slice: slice, max_slices: max_slices)
process_response(response)
log "Reindexing for slice:#{slice} | max_slices:#{max_slices} is completed with #{response.to_json}"
set_migration_state(
slice: slice + 1,
retry_attempt: retry_attempt,
max_slices: max_slices
)
end
rescue StandardError => e
log "migrate failed, increasing migration_state retry_attempt: #{retry_attempt} error:#{e.message}"
set_migration_state(
slice: slice,
retry_attempt: retry_attempt + 1,
max_slices: max_slices
)
raise e
end
def completed?
log "completed check: Refreshing #{issues_index_name}"
helper.refresh_index(index_name: issues_index_name)
original_count = original_issues_documents_count
new_count = new_issues_documents_count
log "Checking to see if migration is completed based on index counts: original_count:#{original_count}, new_count:#{new_count}"
original_count == new_count
end
private
def reindex(slice:, max_slices:)
body = query(slice: slice, max_slices: max_slices)
client.reindex(body: body, wait_for_completion: true)
end
def process_response(response)
if response['failures'].present?
log_raise "Reindexing failed with #{response['failures']}"
end
if response['total'] != (response['updated'] + response['created'] + response['deleted'])
log_raise "Slice reindexing seems to have failed, total is not equal to updated + created + deleted"
end
end
def query(slice:, max_slices:)
{
source: {
index: default_index_name,
_source: FIELDS,
query: {
match: {
type: 'issue'
}
},
slice: {
id: slice,
max: max_slices
}
},
dest: {
index: issues_index_name
}
}
end
def original_issues_documents_count
query = {
size: 0,
aggs: {
issues: {
filter: {
term: {
type: {
value: 'issue'
}
}
}
}
}
}
results = client.search(index: default_index_name, body: query)
results.dig('aggregations', 'issues', 'doc_count')
end
def new_issues_documents_count
helper.documents_count(index_name: issues_index_name)
end
def get_number_of_shards
helper.get_settings.dig('number_of_shards').to_i
end
def default_index_name
helper.target_name
end
def issues_index_name
"#{default_index_name}-issues"
end
end
...@@ -6,15 +6,24 @@ module Elastic ...@@ -6,15 +6,24 @@ module Elastic
module ClassProxyUtil module ClassProxyUtil
extend ActiveSupport::Concern extend ActiveSupport::Concern
def initialize(target) attr_reader :use_separate_indices
def initialize(target, use_separate_indices: false)
super(target) super(target)
config = version_namespace.const_get('Config', false) const_name = if use_separate_indices
"#{target.name}Config"
else
'Config'
end
config = version_namespace.const_get(const_name, false)
@index_name = config.index_name @index_name = config.index_name
@document_type = config.document_type @document_type = config.document_type
@settings = config.settings @settings = config.settings
@mapping = config.mapping @mapping = config.mapping
@use_separate_indices = use_separate_indices
end end
### Multi-version utils ### Multi-version utils
......
...@@ -6,10 +6,16 @@ module Elastic ...@@ -6,10 +6,16 @@ module Elastic
module InstanceProxyUtil module InstanceProxyUtil
extend ActiveSupport::Concern extend ActiveSupport::Concern
def initialize(target) def initialize(target, use_separate_indices: false)
super(target) super(target)
config = version_namespace.const_get('Config', false) const_name = if use_separate_indices
"#{target.class.name}Config"
else
'Config'
end
config = version_namespace.const_get(const_name, false)
@index_name = config.index_name @index_name = config.index_name
@document_type = config.document_type @document_type = config.document_type
......
# frozen_string_literal: true
module Elastic
module Latest
module IssueConfig
# To obtain settings and mappings methods
extend Elasticsearch::Model::Indexing::ClassMethods
extend Elasticsearch::Model::Naming::ClassMethods
self.document_type = 'doc'
self.index_name = [Rails.application.class.module_parent_name.downcase, Rails.env, 'issues'].join('-')
settings Elastic::Latest::Config.settings.to_hash
mappings dynamic: 'strict' do
indexes :type, type: :keyword
indexes :id, type: :integer
indexes :iid, type: :integer
indexes :title, type: :text, index_options: 'positions'
indexes :description, type: :text, index_options: 'positions'
indexes :created_at, type: :date
indexes :updated_at, type: :date
indexes :state, type: :keyword
indexes :project_id, type: :integer
indexes :author_id, type: :integer
indexes :confidential, type: :boolean
indexes :assignee_id, type: :integer
indexes :visibility_level, type: :integer
indexes :issues_access_level, type: :integer
end
end
end
end
...@@ -26,6 +26,16 @@ module Elastic ...@@ -26,6 +26,16 @@ module Elastic
data.merge(generic_attributes) data.merge(generic_attributes)
end end
private
def generic_attributes
if Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
super.except('join_field')
else
super
end
end
end end
end end
end end
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
module Elastic module Elastic
class Migration class Migration
include Elastic::MigrationOptions include Elastic::MigrationOptions
include Elastic::MigrationState
attr_reader :version attr_reader :version
...@@ -28,10 +29,26 @@ module Elastic ...@@ -28,10 +29,26 @@ module Elastic
helper.client helper.client
end end
def migration_record
Elastic::DataMigrationService[version]
end
def fail_migration_halt_error!(retry_attempt: 0)
set_migration_state(
retry_attempt: retry_attempt,
halted: true
)
end
def log(message) def log(message)
logger.info "[Elastic::Migration: #{self.version}] #{message}" logger.info "[Elastic::Migration: #{self.version}] #{message}"
end end
def log_raise(message)
logger.error "[Elastic::Migration: #{self.version}] #{message}"
raise message
end
def logger def logger
@logger ||= ::Gitlab::Elasticsearch::Logger.build @logger ||= ::Gitlab::Elasticsearch::Logger.build
end end
......
...@@ -5,9 +5,10 @@ module Elastic ...@@ -5,9 +5,10 @@ module Elastic
class MultiVersionClassProxy class MultiVersionClassProxy
include MultiVersionUtil include MultiVersionUtil
def initialize(data_target) def initialize(data_target, use_separate_indices: false)
@data_target = data_target @data_target = data_target
@data_class = get_data_class(data_target) @data_class = get_data_class(data_target)
@use_separate_indices = use_separate_indices
generate_forwarding generate_forwarding
end end
......
...@@ -5,9 +5,10 @@ module Elastic ...@@ -5,9 +5,10 @@ module Elastic
class MultiVersionInstanceProxy class MultiVersionInstanceProxy
include MultiVersionUtil include MultiVersionUtil
def initialize(data_target) def initialize(data_target, use_separate_indices: false)
@data_target = data_target @data_target = data_target
@data_class = get_data_class(data_target.class) @data_class = get_data_class(data_target.class)
@use_separate_indices = use_separate_indices
generate_forwarding generate_forwarding
end end
......
...@@ -5,7 +5,7 @@ module Elastic ...@@ -5,7 +5,7 @@ module Elastic
extend ActiveSupport::Concern extend ActiveSupport::Concern
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
attr_reader :data_class, :data_target attr_reader :data_class, :data_target, :use_separate_indices
# TODO: remove once multi-version is functional https://gitlab.com/gitlab-org/gitlab/issues/10156 # TODO: remove once multi-version is functional https://gitlab.com/gitlab-org/gitlab/issues/10156
TARGET_VERSION = 'V12p1' TARGET_VERSION = 'V12p1'
...@@ -13,7 +13,8 @@ module Elastic ...@@ -13,7 +13,8 @@ module Elastic
# @params version [String, Module] can be a string "V12p1" or module (Elastic::V12p1) # @params version [String, Module] can be a string "V12p1" or module (Elastic::V12p1)
def version(version) def version(version)
version = Elastic.const_get(version, false) if version.is_a?(String) version = Elastic.const_get(version, false) if version.is_a?(String)
version.const_get(proxy_class_name, false).new(data_target)
version.const_get(proxy_class_name, false).new(data_target, use_separate_indices: use_separate_indices)
end end
# TODO: load from db table https://gitlab.com/gitlab-org/gitlab/issues/12555 # TODO: load from db table https://gitlab.com/gitlab-org/gitlab/issues/12555
......
...@@ -3,5 +3,6 @@ ...@@ -3,5 +3,6 @@
module Elastic module Elastic
module V12p1 module V12p1
Config = Elastic::Latest::Config Config = Elastic::Latest::Config
IssueConfig = Elastic::Latest::IssueConfig
end end
end end
...@@ -3,9 +3,8 @@ ...@@ -3,9 +3,8 @@
module Gitlab module Gitlab
module Elastic module Elastic
class Helper class Helper
ES_ENABLED_CLASSES = [ ES_MAPPINGS_CLASSES = [
Project, Project,
Issue,
MergeRequest, MergeRequest,
Snippet, Snippet,
Note, Note,
...@@ -14,6 +13,10 @@ module Gitlab ...@@ -14,6 +13,10 @@ module Gitlab
Repository Repository
].freeze ].freeze
ES_SEPARATE_CLASSES = [
Issue
].freeze
attr_reader :version, :client attr_reader :version, :client
attr_accessor :target_name attr_accessor :target_name
...@@ -40,13 +43,13 @@ module Gitlab ...@@ -40,13 +43,13 @@ module Gitlab
end end
def default_settings def default_settings
ES_ENABLED_CLASSES.inject({}) do |settings, klass| ES_MAPPINGS_CLASSES.inject({}) do |settings, klass|
settings.deep_merge(klass.__elasticsearch__.settings.to_hash) settings.deep_merge(klass.__elasticsearch__.settings.to_hash)
end end
end end
def default_mappings def default_mappings
mappings = ES_ENABLED_CLASSES.inject({}) do |m, klass| mappings = ES_MAPPINGS_CLASSES.inject({}) do |m, klass|
m.deep_merge(klass.__elasticsearch__.mappings.to_hash) m.deep_merge(klass.__elasticsearch__.mappings.to_hash)
end end
mappings.deep_merge(::Elastic::Latest::CustomLanguageAnalyzers.custom_analyzers_mappings) mappings.deep_merge(::Elastic::Latest::CustomLanguageAnalyzers.custom_analyzers_mappings)
...@@ -64,6 +67,9 @@ module Gitlab ...@@ -64,6 +67,9 @@ module Gitlab
completed: { completed: {
type: 'boolean' type: 'boolean'
}, },
state: {
type: 'object'
},
started_at: { started_at: {
type: 'date' type: 'date'
}, },
...@@ -87,6 +93,54 @@ module Gitlab ...@@ -87,6 +93,54 @@ module Gitlab
migrations_index_name migrations_index_name
end end
def standalone_indices_proxies
ES_SEPARATE_CLASSES.map do |class_name|
::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
end
end
def create_standalone_indices(with_alias: true, options: {})
standalone_indices_proxies.each_with_object({}) do |proxy, indices|
alias_name = proxy.index_name
new_index_name = "#{alias_name}-#{Time.now.strftime("%Y%m%d-%H%M")}"
raise "Index under '#{new_index_name}' already exists" if index_exists?(index_name: new_index_name)
if with_alias
raise "Alias under '#{alias_name}' already exists" if alias_exists?(name: alias_name)
end
settings = proxy.settings.to_hash
settings = settings.merge(options[:settings]) if options[:settings]
mappings = proxy.mappings.to_hash
mappings = mappings.merge(options[:mappings]) if options[:mappings]
create_index_options = {
index: new_index_name,
body: {
settings: settings,
mappings: mappings
}
}.merge(additional_index_options)
client.indices.create create_index_options
client.indices.put_alias(name: alias_name, index: new_index_name) if with_alias
indices[new_index_name] = alias_name
end
end
def delete_standalone_indices
standalone_indices_proxies.map do |proxy|
index_name = target_index_name(target: proxy.index_name)
result = delete_index(index_name: index_name)
[index_name, proxy.index_name, result]
end
end
def create_empty_index(with_alias: true, options: {}) def create_empty_index(with_alias: true, options: {})
new_index_name = options[:index_name] || "#{target_name}-#{Time.now.strftime("%Y%m%d-%H%M")}" new_index_name = options[:index_name] || "#{target_name}-#{Time.now.strftime("%Y%m%d-%H%M")}"
...@@ -111,11 +165,13 @@ module Gitlab ...@@ -111,11 +165,13 @@ module Gitlab
client.indices.create create_index_options client.indices.create create_index_options
client.indices.put_alias(name: target_name, index: new_index_name) if with_alias client.indices.put_alias(name: target_name, index: new_index_name) if with_alias
new_index_name {
new_index_name => target_name
}
end end
def delete_index(index_name: nil) def delete_index(index_name: nil)
result = client.indices.delete(index: index_name || target_index_name) result = client.indices.delete(index: target_index_name(target: index_name))
result['acknowledged'] result['acknowledged']
rescue ::Elasticsearch::Transport::Transport::Errors::NotFound => e rescue ::Elasticsearch::Transport::Transport::Errors::NotFound => e
Gitlab::ErrorTracking.log_exception(e) Gitlab::ErrorTracking.log_exception(e)
...@@ -126,29 +182,40 @@ module Gitlab ...@@ -126,29 +182,40 @@ module Gitlab
client.indices.exists?(index: index_name || target_name) # rubocop:disable CodeReuse/ActiveRecord client.indices.exists?(index: index_name || target_name) # rubocop:disable CodeReuse/ActiveRecord
end end
def alias_exists? def alias_exists?(name: nil)
client.indices.exists_alias(name: target_name) client.indices.exists_alias(name: name || target_name)
end end
# Calls Elasticsearch refresh API to ensure data is searchable # Calls Elasticsearch refresh API to ensure data is searchable
# immediately. # immediately.
# https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-refresh.html # https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-refresh.html
# By default refreshes main and standalone_indices
def refresh_index(index_name: nil) def refresh_index(index_name: nil)
client.indices.refresh(index: index_name || target_name) indices = if index_name.nil?
[target_name] + standalone_indices_proxies.map(&:index_name)
else
[index_name]
end
indices.each do |index|
client.indices.refresh(index: index)
end
end end
def index_size(index_name: nil) def index_size(index_name: nil)
client.indices.stats['indices'][index_name || target_index_name]['total'] index = target_index_name(target: index_name || target_index_name)
client.indices.stats.dig('indices', index, 'total')
end end
def documents_count(index_name: nil) def documents_count(index_name: nil)
index = index_name || target_index_name index = target_index_name(target: index_name || target_index_name)
client.indices.stats.dig('indices', index, 'primaries', 'docs', 'count') client.indices.stats.dig('indices', index, 'primaries', 'docs', 'count')
end end
def index_size_bytes def index_size_bytes(index_name: nil)
index_size['store']['size_in_bytes'] index_size(index_name: index_name)['store']['size_in_bytes']
end end
def cluster_free_size_bytes def cluster_free_size_bytes
...@@ -184,13 +251,13 @@ module Gitlab ...@@ -184,13 +251,13 @@ module Gitlab
client.indices.put_settings(index: index_name || target_index_name, body: settings) client.indices.put_settings(index: index_name || target_index_name, body: settings)
end end
def switch_alias(from: target_index_name, to:) def switch_alias(from: target_index_name, alias_name: target_name, to:)
actions = [ actions = [
{ {
remove: { index: from, alias: target_name } remove: { index: from, alias: alias_name }
}, },
{ {
add: { index: to, alias: target_name } add: { index: to, alias: alias_name }
} }
] ]
...@@ -199,11 +266,13 @@ module Gitlab ...@@ -199,11 +266,13 @@ module Gitlab
end end
# This method is used when we need to get an actual index name (if it's used through an alias) # This method is used when we need to get an actual index name (if it's used through an alias)
def target_index_name def target_index_name(target: nil)
if alias_exists? target ||= target_name
client.indices.get_alias(name: target_name).each_key.first
if alias_exists?(name: target)
client.indices.get_alias(name: target).each_key.first
else else
target_name target
end end
end end
......
namespace :gitlab do namespace :gitlab do
namespace :elastic do namespace :elastic do
desc "GitLab | Elasticsearch | Index eveything at once" desc "GitLab | Elasticsearch | Index everything at once"
task :index do task :index do
# UPDATE_INDEX=true can cause some projects not to be indexed properly if someone were to push a commit to the # UPDATE_INDEX=true can cause some projects not to be indexed properly if someone were to push a commit to the
# project before the rake task could get to it, so we set it to `nil` here to avoid that. It doesn't make sense # project before the rake task could get to it, so we set it to `nil` here to avoid that. It doesn't make sense
...@@ -57,7 +57,7 @@ namespace :gitlab do ...@@ -57,7 +57,7 @@ namespace :gitlab do
logger.info("Indexing snippets... " + "done".color(:green)) logger.info("Indexing snippets... " + "done".color(:green))
end end
desc "GitLab | Elasticsearch | Create empty index and assign alias" desc "GitLab | Elasticsearch | Create empty indexes and assigns an alias for each"
task :create_empty_index, [:target_name] => [:environment] do |t, args| task :create_empty_index, [:target_name] => [:environment] do |t, args|
with_alias = ENV["SKIP_ALIAS"].nil? with_alias = ENV["SKIP_ALIAS"].nil?
options = {} options = {}
...@@ -68,14 +68,25 @@ namespace :gitlab do ...@@ -68,14 +68,25 @@ namespace :gitlab do
helper = Gitlab::Elastic::Helper.new(target_name: args[:target_name]) helper = Gitlab::Elastic::Helper.new(target_name: args[:target_name])
index_name = helper.create_empty_index(with_alias: with_alias, options: options) index_name = helper.create_empty_index(with_alias: with_alias, options: options)
helper.create_migrations_index unless helper.index_exists?(index_name: helper.migrations_index_name) # with_alias is used to support interacting with a specific index (such as when reclaiming the production index
::Elastic::DataMigrationService.mark_all_as_completed! # name when the index was created prior to 13.0). If the `SKIP_ALIAS` environment variable is set,
# do not create standalone indexes and do not create the migrations index
if with_alias
standalone_index_names = helper.create_standalone_indices(options: options)
standalone_index_names.each do |index_name, alias_name|
puts "Index '#{index_name}' has been created.".color(:green)
puts "Alias '#{alias_name}' -> '#{index_name}' has been created.".color(:green)
end
helper.create_migrations_index unless helper.index_exists?(index_name: helper.migrations_index_name)
::Elastic::DataMigrationService.mark_all_as_completed!
end
puts "Index '#{index_name}' has been created.".color(:green) puts "Index '#{index_name}' has been created.".color(:green)
puts "Alias '#{helper.target_name}' → '#{index_name}' has been created".color(:green) if with_alias puts "Alias '#{helper.target_name}' → '#{index_name}' has been created".color(:green) if with_alias
end end
desc "GitLab | Elasticsearch | Delete index" desc "GitLab | Elasticsearch | Delete all indexes"
task :delete_index, [:target_name] => [:environment] do |t, args| task :delete_index, [:target_name] => [:environment] do |t, args|
helper = Gitlab::Elastic::Helper.new(target_name: args[:target_name]) helper = Gitlab::Elastic::Helper.new(target_name: args[:target_name])
...@@ -84,9 +95,18 @@ namespace :gitlab do ...@@ -84,9 +95,18 @@ namespace :gitlab do
else else
puts "Index/alias '#{helper.target_name}' was not found".color(:green) puts "Index/alias '#{helper.target_name}' was not found".color(:green)
end end
results = helper.delete_standalone_indices
results.each do |index_name, alias_name, result|
if result
puts "Index '#{index_name}' with alias '#{alias_name}' has been deleted".color(:green)
else
puts "Index '#{index_name}' with alias '#{alias_name}' was not found".color(:green)
end
end
end end
desc "GitLab | Elasticsearch | Recreate index" desc "GitLab | Elasticsearch | Recreate indexes"
task :recreate_index, [:target_name] => [:environment] do |t, args| task :recreate_index, [:target_name] => [:environment] do |t, args|
Rake::Task["gitlab:elastic:delete_index"].invoke(*args) Rake::Task["gitlab:elastic:delete_index"].invoke(*args)
Rake::Task["gitlab:elastic:create_empty_index"].invoke(*args) Rake::Task["gitlab:elastic:create_empty_index"].invoke(*args)
......
...@@ -3,8 +3,14 @@ namespace :gitlab do ...@@ -3,8 +3,14 @@ namespace :gitlab do
namespace :test do namespace :test do
desc 'GitLab | Elasticsearch | Test | Measure space taken by ES indices' desc 'GitLab | Elasticsearch | Test | Measure space taken by ES indices'
task index_size: :environment do task index_size: :environment do
puts "===== Size stats for index: #{Project.__elasticsearch__.index_name} =====" helper = Gitlab::Elastic::Helper.default
pp Gitlab::Elastic::Helper.default.index_size.slice(*%w(docs store))
indices = [helper.target_name]
indices += helper.standalone_indices_proxies.map(&:index_name) if Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
indices.each do |index_name|
puts "===== Size stats for index: #{index_name} ====="
pp helper.index_size(index_name: index_name).slice(*%w(docs store))
end
end end
desc 'GitLab | Elasticsearch | Test | Measure space taken by ES indices, reindex, and measure space taken again' desc 'GitLab | Elasticsearch | Test | Measure space taken by ES indices, reindex, and measure space taken again'
......
...@@ -25,8 +25,7 @@ RSpec.describe Admin::ApplicationSettingsController do ...@@ -25,8 +25,7 @@ RSpec.describe Admin::ApplicationSettingsController do
get :general get :general
expect(assigns(:elasticsearch_reindexing_task)).to eq(task) expect(assigns(:elasticsearch_reindexing_task)).to eq(task)
expect(response.body).to include('Reindexing status') expect(response.body).to include("Reindexing Status: #{task.state}")
expect(response.body).to include("State: #{task.state}")
end end
end end
end end
......
...@@ -11,6 +11,10 @@ RSpec.describe AddNewDataToIssuesDocuments, :elastic, :sidekiq_inline do ...@@ -11,6 +11,10 @@ RSpec.describe AddNewDataToIssuesDocuments, :elastic, :sidekiq_inline do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
.with(:migrate_issues_to_separate_index)
.and_return(false)
# ensure issues are indexed # ensure issues are indexed
issues issues
......
# frozen_string_literal: true
require 'spec_helper'
require File.expand_path('ee/elastic/migrate/20201123123400_migrate_issues_to_separate_index.rb')
RSpec.describe MigrateIssuesToSeparateIndex, :elastic, :sidekiq_inline do
let(:version) { 20201123123400 }
let(:migration) { described_class.new(version) }
let(:issues) { create_list(:issue, 3) }
let(:index_name) { "#{es_helper.target_name}-issues" }
before do
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
.with(:migrate_issues_to_separate_index)
.and_return(false)
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
issues
ensure_elasticsearch_index!
end
describe 'migration_options' do
it 'has migration options set', :aggregate_failures do
expect(migration.batched?).to be_truthy
expect(migration.throttle_delay).to eq(1.minute)
expect(migration.pause_indexing?).to be_truthy
end
end
describe '.migrate', :clean_gitlab_redis_shared_state do
context 'initial launch' do
before do
es_helper.delete_index(index_name: es_helper.target_index_name(target: index_name))
end
it 'creates an index and sets migration_state' do
expect { migration.migrate }.to change { es_helper.alias_exists?(name: index_name) }.from(false).to(true)
expect(migration.migration_state).to include(slice: 0, max_slices: 5)
end
end
context 'batch run' do
it 'migrates all issues' do
total_shards = es_helper.get_settings.dig('number_of_shards').to_i
migration.set_migration_state(slice: 0, max_slices: total_shards)
total_shards.times do |i|
migration.migrate
end
expect(migration.completed?).to be_truthy
expect(es_helper.documents_count(index_name: "#{es_helper.target_name}-issues")).to eq(issues.count)
end
end
context 'failed run' do
let(:client) { double('Elasticsearch::Transport::Client') }
before do
allow(migration).to receive(:client).and_return(client)
end
context 'exception is raised' do
before do
allow(client).to receive(:reindex).and_raise(StandardError)
end
it 'increases retry_attempt' do
migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 1)
expect { migration.migrate }.to raise_error(StandardError)
expect(migration.migration_state).to match(slice: 0, max_slices: 2, retry_attempt: 2)
end
it 'fails the migration after too many attempts' do
migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 30)
migration.migrate
expect(migration.migration_state).to match(slice: 0, max_slices: 2, retry_attempt: 30, halted: true)
expect(migration).not_to receive(:process_response)
end
end
context 'elasticsearch failures' do
context 'total is not equal' do
before do
allow(client).to receive(:reindex).and_return({ "total" => 60, "updated" => 0, "created" => 45, "deleted" => 0, "failures" => [] })
end
it 'raises an error' do
migration.set_migration_state(slice: 0, max_slices: 2)
expect { migration.migrate }.to raise_error(/total is not equal/)
end
end
context 'reindexing failues' do
before do
allow(client).to receive(:reindex).and_return({ "total" => 60, "updated" => 0, "created" => 0, "deleted" => 0, "failures" => [{ "type": "es_rejected_execution_exception" }] })
end
it 'raises an error' do
migration.set_migration_state(slice: 0, max_slices: 2)
expect { migration.migrate }.to raise_error(/failed with/)
end
end
end
end
end
describe '.completed?' do
subject { migration.completed? }
before do
2.times do |slice|
migration.set_migration_state(slice: slice, max_slices: 2)
migration.migrate
end
end
context 'counts are equal' do
let(:issues_count) { issues.count }
it 'returns true' do
is_expected.to be_truthy
end
end
end
end
# frozen_string_literal: true
FactoryBot.define do
factory :elastic_reindexing_subtask, class: 'Elastic::ReindexingSubtask' do
association :elastic_reindexing_task, in_progress: false, state: :success
sequence(:index_name_from) { |n| "old_index_name_#{n}" }
sequence(:index_name_to) { |n| "new_index_name_#{n}" }
sequence(:elastic_task) { |n| "elastic_task_#{n}" }
sequence(:alias_name) { |n| "alias_name_#{n}" }
end
end
...@@ -6,5 +6,11 @@ FactoryBot.define do ...@@ -6,5 +6,11 @@ FactoryBot.define do
in_progress { true } in_progress { true }
index_name_from { 'old_index_name' } index_name_from { 'old_index_name' }
index_name_to { 'new_index_name' } index_name_to { 'new_index_name' }
trait :with_subtask do
after(:create) do |task|
create :elastic_reindexing_subtask, elastic_reindexing_task: task
end
end
end end
end end
...@@ -7,13 +7,13 @@ RSpec.describe Gitlab::Elastic::Helper do ...@@ -7,13 +7,13 @@ RSpec.describe Gitlab::Elastic::Helper do
shared_context 'with a legacy index' do shared_context 'with a legacy index' do
before do before do
@index_name = helper.create_empty_index(with_alias: false, options: { index_name: helper.target_name }) @index_name = helper.create_empty_index(with_alias: false, options: { index_name: helper.target_name }).each_key.first
end end
end end
shared_context 'with an existing index and alias' do shared_context 'with an existing index and alias' do
before do before do
@index_name = helper.create_empty_index(with_alias: true) @index_name = helper.create_empty_index(with_alias: true).each_key.first
end end
end end
...@@ -40,6 +40,10 @@ RSpec.describe Gitlab::Elastic::Helper do ...@@ -40,6 +40,10 @@ RSpec.describe Gitlab::Elastic::Helper do
end end
describe '#default_mappings' do describe '#default_mappings' do
it 'has only one type' do
expect(helper.default_mappings.keys).to match_array %i(doc)
end
context 'custom analyzers' do context 'custom analyzers' do
let(:custom_analyzers_mappings) { { doc: { properties: { title: { fields: { custom: true } } } } } } let(:custom_analyzers_mappings) { { doc: { properties: { title: { fields: { custom: true } } } } } }
...@@ -65,6 +69,44 @@ RSpec.describe Gitlab::Elastic::Helper do ...@@ -65,6 +69,44 @@ RSpec.describe Gitlab::Elastic::Helper do
end end
end end
describe '#create_standalone_indices' do
after do
@indices.each do |index_name, _|
helper.delete_index(index_name: index_name)
end
end
it 'creates standalone indices' do
@indices = helper.create_standalone_indices
@indices.each do |index|
expect(helper.index_exists?(index_name: index)).to be_truthy
end
end
it 'raises an exception when there is an existing alias' do
@indices = helper.create_standalone_indices
expect { helper.create_standalone_indices }.to raise_error(/already exists/)
end
it 'raises an exception when there is an existing index' do
@indices = helper.create_standalone_indices(with_alias: false)
expect { helper.create_standalone_indices(with_alias: false) }.to raise_error(/already exists/)
end
end
describe '#delete_standalone_indices' do
before do
helper.create_standalone_indices
end
subject { helper.delete_standalone_indices }
it_behaves_like 'deletes all standalone indices'
end
describe '#create_empty_index' do describe '#create_empty_index' do
context 'with an empty cluster' do context 'with an empty cluster' do
context 'with alias and index' do context 'with alias and index' do
...@@ -174,7 +216,7 @@ RSpec.describe Gitlab::Elastic::Helper do ...@@ -174,7 +216,7 @@ RSpec.describe Gitlab::Elastic::Helper do
end end
end end
describe '#cluster_free_size' do describe '#cluster_free_size_bytes' do
it 'returns valid cluster size' do it 'returns valid cluster size' do
expect(helper.cluster_free_size_bytes).to be_positive expect(helper.cluster_free_size_bytes).to be_positive
end end
...@@ -194,4 +236,51 @@ RSpec.describe Gitlab::Elastic::Helper do ...@@ -194,4 +236,51 @@ RSpec.describe Gitlab::Elastic::Helper do
helper.delete_index(index_name: new_index_name) helper.delete_index(index_name: new_index_name)
end end
end end
describe '#index_size' do
subject { helper.index_size }
context 'when there is a legacy index' do
include_context 'with a legacy index'
it { is_expected.to have_key("docs") }
it { is_expected.to have_key("store") }
end
context 'when there is an alias', :aggregate_failures do
include_context 'with an existing index and alias'
it { is_expected.to have_key("docs") }
it { is_expected.to have_key("store") }
it 'supports providing the alias name' do
alias_name = helper.target_name
expect(helper.index_size(index_name: alias_name)).to have_key("docs")
expect(helper.index_size(index_name: alias_name)).to have_key("store")
end
end
end
describe '#documents_count' do
subject { helper.documents_count }
context 'when there is a legacy index' do
include_context 'with a legacy index'
it { is_expected.to eq(0) }
end
context 'when there is an alias' do
include_context 'with an existing index and alias'
it { is_expected.to eq(0) }
it 'supports providing the alias name' do
alias_name = helper.target_name
expect(helper.documents_count(index_name: alias_name)).to eq(0)
end
end
end
end end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Elastic::Latest::IssueConfig do
describe '.document_type' do
it 'returns config' do
expect(described_class.document_type).to eq('doc')
end
end
describe '.settings' do
it 'returns config' do
expect(described_class.settings).to be_a(Elasticsearch::Model::Indexing::Settings)
end
end
describe '.mappings' do
it 'returns config' do
expect(described_class.mapping).to be_a(Elasticsearch::Model::Indexing::Mappings)
end
end
end
...@@ -35,7 +35,7 @@ RSpec.describe ::Gitlab::Instrumentation::ElasticsearchTransport, :elastic, :req ...@@ -35,7 +35,7 @@ RSpec.describe ::Gitlab::Instrumentation::ElasticsearchTransport, :elastic, :req
::Gitlab::SafeRequestStore.clear! ::Gitlab::SafeRequestStore.clear!
create(:issue, title: "new issue") create(:merge_request, title: "new MR")
ensure_elasticsearch_index! ensure_elasticsearch_index!
request = ::Gitlab::Instrumentation::ElasticsearchTransport.detail_store.first request = ::Gitlab::Instrumentation::ElasticsearchTransport.detail_store.first
......
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe CreateElasticReindexingSubtasks do
let(:migration) { described_class.new }
let(:reindexing_tasks) { table(:elastic_reindexing_tasks) }
let(:reindexing_subtasks) { table(:elastic_reindexing_subtasks) }
let(:fields_to_migrate) { %w(documents_count documents_count_target index_name_from index_name_to elastic_task) }
describe "#up" do
it 'migrates old reindexing tasks' do
# these tasks should not be migrated
reindexing_tasks.create!(in_progress: false, state: 10)
reindexing_tasks.create!(in_progress: false, state: 10, index_name_from: 'index_name')
reindexing_tasks.create!(in_progress: false, state: 10, index_name_to: 'index_name')
reindexing_tasks.create!(in_progress: false, state: 10, elastic_task: 'TASK')
# these tasks should not be migrated
task1 = reindexing_tasks.create!(in_progress: false, documents_count: 100, state: 10, index_name_from: 'index1', index_name_to: 'index2', elastic_task: 'TASK_ID', documents_count_target: 100)
task2 = reindexing_tasks.create!(in_progress: false, documents_count: 50, state: 11, index_name_from: 'index3', index_name_to: 'index4', elastic_task: 'TASK_ID2', documents_count_target: 99)
migrate!
expect(reindexing_subtasks.count).to eq(2)
[task1, task2].each do |task|
subtask = reindexing_subtasks.find_by(elastic_reindexing_task_id: task.id)
expect(task.attributes.slice(*fields_to_migrate)).to match(subtask.attributes.slice(*fields_to_migrate))
end
end
end
end
...@@ -122,11 +122,7 @@ RSpec.describe Issue, :elastic do ...@@ -122,11 +122,7 @@ RSpec.describe Issue, :elastic do
'confidential' 'confidential'
).merge({ ).merge({
'type' => issue.es_type, 'type' => issue.es_type,
'state' => issue.state, 'state' => issue.state
'join_field' => {
'name' => issue.es_type,
'parent' => issue.es_parent
}
}) })
expected_hash['assignee_id'] = [assignee.id] expected_hash['assignee_id'] = [assignee.id]
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Elastic::ReindexingSubtask, type: :model do
describe 'relations' do
it { is_expected.to belong_to(:elastic_reindexing_task) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:index_name_from) }
it { is_expected.to validate_presence_of(:index_name_to) }
it { is_expected.to validate_presence_of(:elastic_task) }
end
end
...@@ -3,6 +3,10 @@ ...@@ -3,6 +3,10 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Elastic::ReindexingTask, type: :model do RSpec.describe Elastic::ReindexingTask, type: :model do
describe 'relations' do
it { is_expected.to have_many(:subtasks) }
end
it 'only allows one running task at a time' do it 'only allows one running task at a time' do
expect { create(:elastic_reindexing_task, state: :success) }.not_to raise_error expect { create(:elastic_reindexing_task, state: :success) }.not_to raise_error
expect { create(:elastic_reindexing_task) }.not_to raise_error expect { create(:elastic_reindexing_task) }.not_to raise_error
...@@ -18,21 +22,21 @@ RSpec.describe Elastic::ReindexingTask, type: :model do ...@@ -18,21 +22,21 @@ RSpec.describe Elastic::ReindexingTask, type: :model do
end end
describe '.drop_old_indices!' do describe '.drop_old_indices!' do
let(:task_1) { create(:elastic_reindexing_task, index_name_from: 'original_index_1', state: :reindexing, delete_original_index_at: 1.day.ago) } let(:task_1) { create(:elastic_reindexing_task, :with_subtask, state: :reindexing, delete_original_index_at: 1.day.ago) }
let(:task_2) { create(:elastic_reindexing_task, index_name_from: 'original_index_2', state: :success, delete_original_index_at: nil) } let(:task_2) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: nil) }
let(:task_3) { create(:elastic_reindexing_task, index_name_from: 'original_index_3', state: :success, delete_original_index_at: 1.day.ago) } let(:task_3) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: 1.day.ago) }
let(:task_4) { create(:elastic_reindexing_task, index_name_from: 'original_index_4', state: :success, delete_original_index_at: 5.days.ago) } let(:task_4) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: 5.days.ago) }
let(:task_5) { create(:elastic_reindexing_task, index_name_from: 'original_index_5', state: :success, delete_original_index_at: 14.days.from_now) } let(:task_5) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: 14.days.from_now) }
let(:tasks_for_deletion) { [task_3, task_4] } let(:tasks_for_deletion) { [task_3, task_4] }
let(:other_tasks) { [task_1, task_2, task_5] } let(:other_tasks) { [task_1, task_2, task_5] }
it 'deletes the correct indices' do it 'deletes the correct indices' do
other_tasks.each do |task| other_tasks.each do |task|
expect(Gitlab::Elastic::Helper.default).not_to receive(:delete_index).with(index_name: task.index_name_from) expect(Gitlab::Elastic::Helper.default).not_to receive(:delete_index).with(index_name: task.subtasks.first.index_name_from)
end end
tasks_for_deletion.each do |task| tasks_for_deletion.each do |task|
expect(Gitlab::Elastic::Helper.default).to receive(:delete_index).with(index_name: task.index_name_from).and_return(true) expect(Gitlab::Elastic::Helper.default).to receive(:delete_index).with(index_name: task.subtasks.first.index_name_from).and_return(true)
end end
described_class.drop_old_indices! described_class.drop_old_indices!
......
...@@ -8,6 +8,20 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do ...@@ -8,6 +8,20 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do
context 'state: initial' do context 'state: initial' do
let(:task) { create(:elastic_reindexing_task, state: :initial) } let(:task) { create(:elastic_reindexing_task, state: :initial) }
it 'aborts if the main index does not use aliases' do
allow(Gitlab::Elastic::Helper.default).to receive(:alias_exists?).and_return(false)
expect { subject.execute }.to change { task.reload.state }.from('initial').to('failure')
expect(task.reload.error_message).to match(/use aliases/)
end
it 'aborts if there are pending ES migrations' do
allow(Elastic::DataMigrationService).to receive(:pending_migrations?).and_return(true)
expect { subject.execute }.to change { task.reload.state }.from('initial').to('failure')
expect(task.reload.error_message).to match(/unapplied advanced search migrations/)
end
it 'errors when there is not enough space' do it 'errors when there is not enough space' do
allow(Gitlab::Elastic::Helper.default).to receive(:index_size_bytes).and_return(100.megabytes) allow(Gitlab::Elastic::Helper.default).to receive(:index_size_bytes).and_return(100.megabytes)
allow(Gitlab::Elastic::Helper.default).to receive(:cluster_free_size_bytes).and_return(30.megabytes) allow(Gitlab::Elastic::Helper.default).to receive(:cluster_free_size_bytes).and_return(30.megabytes)
...@@ -29,19 +43,26 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do ...@@ -29,19 +43,26 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do
it 'triggers reindexing' do it 'triggers reindexing' do
task = create(:elastic_reindexing_task, state: :indexing_paused) task = create(:elastic_reindexing_task, state: :indexing_paused)
allow(Gitlab::Elastic::Helper.default).to receive(:create_empty_index).and_return('new_index_name') allow(Gitlab::Elastic::Helper.default).to receive(:create_empty_index).and_return('new_index_name' => 'new_index')
allow(Gitlab::Elastic::Helper.default).to receive(:reindex).and_return('task_id') allow(Gitlab::Elastic::Helper.default).to receive(:create_standalone_indices).and_return('new_issues_name' => 'new_issues')
allow(Gitlab::Elastic::Helper.default).to receive(:reindex).with(from: anything, to: 'new_index_name').and_return('task_id_1')
allow(Gitlab::Elastic::Helper.default).to receive(:reindex).with(from: anything, to: 'new_issues_name').and_return('task_id_2')
expect { subject.execute }.to change { task.reload.state }.from('indexing_paused').to('reindexing') expect { subject.execute }.to change { task.reload.state }.from('indexing_paused').to('reindexing')
task = task.reload subtasks = task.subtasks
expect(task.index_name_to).to eq('new_index_name') expect(subtasks.count).to eq(2)
expect(task.elastic_task).to eq('task_id')
expect(subtasks.first.index_name_to).to eq('new_index_name')
expect(subtasks.first.elastic_task).to eq('task_id_1')
expect(subtasks.last.index_name_to).to eq('new_issues_name')
expect(subtasks.last.elastic_task).to eq('task_id_2')
end end
end end
context 'state: reindexing' do context 'state: reindexing' do
let(:task) { create(:elastic_reindexing_task, state: :reindexing, documents_count: 10) } let(:task) { create(:elastic_reindexing_task, state: :reindexing) }
let(:subtask) { create(:elastic_reindexing_subtask, elastic_reindexing_task: task, documents_count: 10)}
let(:expected_default_settings) do let(:expected_default_settings) do
{ {
refresh_interval: nil, refresh_interval: nil,
...@@ -57,7 +78,7 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do ...@@ -57,7 +78,7 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do
context 'errors are raised' do context 'errors are raised' do
before do before do
allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: task.index_name_to).and_return(task.reload.documents_count * 2) allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: subtask.index_name_to).and_return(subtask.reload.documents_count * 2)
end end
it 'errors if documents count is different' do it 'errors if documents count is different' do
...@@ -82,12 +103,12 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do ...@@ -82,12 +103,12 @@ RSpec.describe Elastic::ClusterReindexingService, :elastic do
context 'task finishes correctly' do context 'task finishes correctly' do
before do before do
allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: task.index_name_to).and_return(task.reload.documents_count) allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: subtask.index_name_to).and_return(subtask.reload.documents_count)
end end
it 'launches all state steps' do it 'launches all state steps' do
expect(Gitlab::Elastic::Helper.default).to receive(:update_settings).with(index_name: task.index_name_to, settings: expected_default_settings) expect(Gitlab::Elastic::Helper.default).to receive(:update_settings).with(index_name: subtask.index_name_to, settings: expected_default_settings)
expect(Gitlab::Elastic::Helper.default).to receive(:switch_alias).with(to: task.index_name_to) expect(Gitlab::Elastic::Helper.default).to receive(:switch_alias).with(to: subtask.index_name_to, from: subtask.index_name_from, alias_name: subtask.alias_name)
expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: false) expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: false)
expect { subject.execute }.to change { task.reload.state }.from('reindexing').to('success') expect { subject.execute }.to change { task.reload.state }.from('reindexing').to('success')
......
...@@ -112,14 +112,21 @@ RSpec.describe Search::GlobalService do ...@@ -112,14 +112,21 @@ RSpec.describe Search::GlobalService do
# finished we need a test to verify the old style searches work for # finished we need a test to verify the old style searches work for
# instances which haven't finished the migration yet # instances which haven't finished the migration yet
context 'when add_new_data_to_issues_documents migration is not finished' do context 'when add_new_data_to_issues_documents migration is not finished' do
let!(:issue) { create :issue, project: project }
before do before do
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?).and_call_original
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?) allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
.with(:add_new_data_to_issues_documents) .with(:add_new_data_to_issues_documents)
.and_return(false) .and_return(false)
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
.with(:migrate_issues_to_separate_index)
.and_return(false)
end end
# issue cannot be defined prior to the migration mocks because it
# will cause the incorrect value to be passed to `use_separate_indices` when creating
# the proxy
let!(:issue) { create(:issue, project: project) }
where(:project_level, :feature_access_level, :membership, :admin_mode, :expected_count) do where(:project_level, :feature_access_level, :membership, :admin_mode, :expected_count) do
permission_table_for_guest_feature_access permission_table_for_guest_feature_access
end end
...@@ -158,6 +165,11 @@ RSpec.describe Search::GlobalService do ...@@ -158,6 +165,11 @@ RSpec.describe Search::GlobalService do
let(:search_url) { Addressable::Template.new("#{es_host}/{index}/doc/_search{?params*}") } let(:search_url) { Addressable::Template.new("#{es_host}/{index}/doc/_search{?params*}") }
before do before do
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?).and_call_original
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
.with(:migrate_issues_to_separate_index)
.and_return(false)
ensure_elasticsearch_index! ensure_elasticsearch_index!
end end
......
...@@ -6,20 +6,23 @@ RSpec.configure do |config| ...@@ -6,20 +6,23 @@ RSpec.configure do |config|
Elastic::ProcessBookkeepingService.clear_tracking! Elastic::ProcessBookkeepingService.clear_tracking!
# Delete the migration index and the main ES index # Delete all test indices
helper.delete_index(index_name: helper.migrations_index_name) indices = [helper.target_name, helper.migrations_index_name] + helper.standalone_indices_proxies.map(&:index_name)
helper.delete_index indices.each do |index_name|
helper.delete_index(index_name: index_name)
end
helper.create_empty_index(options: { settings: { number_of_replicas: 0 } }) helper.create_empty_index(options: { settings: { number_of_replicas: 0 } })
helper.create_migrations_index helper.create_migrations_index
::Elastic::DataMigrationService.mark_all_as_completed! ::Elastic::DataMigrationService.mark_all_as_completed!
helper.create_standalone_indices
refresh_index! refresh_index!
example.run example.run
helper.delete_index(index_name: helper.migrations_index_name) indices.each do |index_name|
helper.delete_index helper.delete_index(index_name: index_name)
end
Elastic::ProcessBookkeepingService.clear_tracking! Elastic::ProcessBookkeepingService.clear_tracking!
end end
......
# frozen_string_literal: true
RSpec.shared_examples 'deletes all standalone indices' do
Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.each do |class_name|
describe "#{class_name}" do
it 'removes a standalone index' do
proxy = ::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
expect { subject }.to change { helper.index_exists?(index_name: proxy.index_name) }.from(true).to(false)
end
end
end
end
...@@ -13,12 +13,68 @@ RSpec.describe 'gitlab:elastic namespace rake tasks', :elastic do ...@@ -13,12 +13,68 @@ RSpec.describe 'gitlab:elastic namespace rake tasks', :elastic do
before do before do
es_helper.delete_index es_helper.delete_index
es_helper.delete_index(index_name: es_helper.migrations_index_name) es_helper.delete_index(index_name: es_helper.migrations_index_name)
es_helper.delete_standalone_indices
end end
it 'creates an index' do it 'creates the default index' do
expect { subject }.to change { es_helper.index_exists? }.from(false).to(true) expect { subject }.to change { es_helper.index_exists? }.from(false).to(true)
end end
context 'when SKIP_ALIAS environment variable is set' do
let(:secondary_index_name) { "gitlab-test-#{Time.now.strftime("%Y%m%d-%H%M")}"}
before do
stub_env('SKIP_ALIAS', '1')
end
after do
es_helper.delete_index(index_name: secondary_index_name)
end
subject { run_rake_task('gitlab:elastic:create_empty_index', secondary_index_name) }
it 'does not alias the new index' do
expect { subject }.not_to change { es_helper.alias_exists?(name: es_helper.target_name) }
end
it 'does not create the migrations index if it does not exist' do
migration_index_name = es_helper.migrations_index_name
es_helper.delete_index(index_name: migration_index_name)
expect { subject }.not_to change { es_helper.index_exists?(index_name: migration_index_name) }
end
it 'creates an index at the specified name' do
expect { subject }.to change { es_helper.index_exists?(index_name: secondary_index_name) }.from(false).to(true)
end
Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.each do |class_name|
describe "#{class_name}" do
it "does not create a standalone index" do
proxy = ::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
expect { subject }.not_to change { es_helper.alias_exists?(name: proxy.index_name) }
end
end
end
end
it 'creates the migrations index if it does not exist' do
migration_index_name = es_helper.migrations_index_name
es_helper.delete_index(index_name: migration_index_name)
expect { subject }.to change { es_helper.index_exists?(index_name: migration_index_name) }.from(false).to(true)
end
Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.each do |class_name|
describe "#{class_name}" do
it "creates a standalone index" do
proxy = ::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
expect { subject }.to change { es_helper.index_exists?(index_name: proxy.index_name) }.from(false).to(true)
end
end
end
it 'marks all migrations as completed' do it 'marks all migrations as completed' do
expect(Elastic::DataMigrationService).to receive(:mark_all_as_completed!).and_call_original expect(Elastic::DataMigrationService).to receive(:mark_all_as_completed!).and_call_original
expect(Elastic::MigrationRecord.persisted_versions(completed: true)).to eq([]) expect(Elastic::MigrationRecord.persisted_versions(completed: true)).to eq([])
...@@ -37,6 +93,10 @@ RSpec.describe 'gitlab:elastic namespace rake tasks', :elastic do ...@@ -37,6 +93,10 @@ RSpec.describe 'gitlab:elastic namespace rake tasks', :elastic do
it 'removes the index' do it 'removes the index' do
expect { subject }.to change { es_helper.index_exists? }.from(true).to(false) expect { subject }.to change { es_helper.index_exists? }.from(true).to(false)
end end
it_behaves_like 'deletes all standalone indices' do
let(:helper) { es_helper }
end
end end
context "with elasticsearch_indexing enabled" do context "with elasticsearch_indexing enabled" do
......
...@@ -5,6 +5,8 @@ require 'spec_helper' ...@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe 'admin/application_settings/_elasticsearch_form' do RSpec.describe 'admin/application_settings/_elasticsearch_form' do
let_it_be(:admin) { create(:admin) } let_it_be(:admin) { create(:admin) }
let(:page) { Capybara::Node::Simple.new(rendered) } let(:page) { Capybara::Node::Simple.new(rendered) }
let(:pause_indexing) { false }
let(:pending_migrations) { false }
before do before do
assign(:application_setting, application_setting) assign(:application_setting, application_setting)
...@@ -18,7 +20,8 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do ...@@ -18,7 +20,8 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do
before do before do
allow(Gitlab::CurrentSettings).to(receive(:elasticsearch_indexing?)).and_return(es_indexing) allow(Gitlab::CurrentSettings).to(receive(:elasticsearch_indexing?)).and_return(es_indexing)
allow(Gitlab::CurrentSettings).to(receive(:elasticsearch_pause_indexing?)).and_return(true) allow(Gitlab::CurrentSettings).to(receive(:elasticsearch_pause_indexing?)).and_return(pause_indexing)
allow(Elastic::DataMigrationService).to(receive(:pending_migrations?)).and_return(pending_migrations)
end end
context 'indexing is enabled' do context 'indexing is enabled' do
...@@ -36,6 +39,17 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do ...@@ -36,6 +39,17 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do
expect(rendered).to have_css('input[id=application_setting_elasticsearch_pause_indexing]') expect(rendered).to have_css('input[id=application_setting_elasticsearch_pause_indexing]')
expect(rendered).not_to have_css('input[id=application_setting_elasticsearch_pause_indexing][disabled="disabled"]') expect(rendered).not_to have_css('input[id=application_setting_elasticsearch_pause_indexing][disabled="disabled"]')
end end
context 'pending migrations' do
let(:pending_migrations) { true }
let(:pause_indexing) { true }
it 'renders a disabled pause checkbox' do
render
expect(rendered).to have_css('input[id=application_setting_elasticsearch_pause_indexing][disabled="disabled"]')
end
end
end end
context 'indexing is disabled' do context 'indexing is disabled' do
...@@ -88,7 +102,7 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do ...@@ -88,7 +102,7 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do
it 'renders the task' do it 'renders the task' do
render render
expect(rendered).to include("State: #{task.state}") expect(rendered).to include("Reindexing Status: #{task.state}")
expect(rendered).not_to include("Task ID:") expect(rendered).not_to include("Task ID:")
expect(rendered).not_to include("Error:") expect(rendered).not_to include("Error:")
expect(rendered).not_to include("Expected documents:") expect(rendered).not_to include("Expected documents:")
...@@ -97,28 +111,30 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do ...@@ -97,28 +111,30 @@ RSpec.describe 'admin/application_settings/_elasticsearch_form' do
end end
context 'with extended details' do context 'with extended details' do
let(:task) { build(:elastic_reindexing_task, state: :reindexing, elastic_task: 'elastic-task-id', error_message: 'error-message', documents_count_target: 5, documents_count: 10) } let!(:task) { create(:elastic_reindexing_task, state: :reindexing, error_message: 'error-message') }
let!(:subtask) { create(:elastic_reindexing_subtask, elastic_reindexing_task: task, documents_count_target: 5, documents_count: 10) }
it 'renders the task' do it 'renders the task' do
render render
expect(rendered).to include("State: #{task.state}") expect(rendered).to include("Reindexing Status: #{task.state}")
expect(rendered).to include("Task ID: #{task.elastic_task}") expect(rendered).to include("Task ID: #{subtask.elastic_task}")
expect(rendered).to include("Error: #{task.error_message}") expect(rendered).to include("Error: #{task.error_message}")
expect(rendered).to include("Expected documents: #{task.documents_count}") expect(rendered).to include("Expected documents: #{subtask.documents_count}")
expect(rendered).to include("Documents reindexed: #{task.documents_count_target} (50.0%)") expect(rendered).to include("Documents reindexed: #{subtask.documents_count_target} (50.0%)")
end end
end end
context 'with extended details, but without documents_count_target' do context 'with extended details, but without documents_count_target' do
let(:task) { build(:elastic_reindexing_task, state: :reindexing, elastic_task: 'elastic-task-id', documents_count: 10) } let!(:task) { create(:elastic_reindexing_task, state: :reindexing) }
let!(:subtask) { create(:elastic_reindexing_subtask, elastic_reindexing_task: task, documents_count: 10) }
it 'renders the task' do it 'renders the task' do
render render
expect(rendered).to include("State: #{task.state}") expect(rendered).to include("Reindexing Status: #{task.state}")
expect(rendered).to include("Task ID: #{task.elastic_task}") expect(rendered).to include("Task ID: #{subtask.elastic_task}")
expect(rendered).to include("Expected documents: #{task.documents_count}") expect(rendered).to include("Expected documents: #{subtask.documents_count}")
expect(rendered).not_to include("Error:") expect(rendered).not_to include("Error:")
expect(rendered).not_to include("Documents reindexed:") expect(rendered).not_to include("Documents reindexed:")
end end
......
...@@ -44,6 +44,45 @@ RSpec.describe Elastic::MigrationWorker, :elastic do ...@@ -44,6 +44,45 @@ RSpec.describe Elastic::MigrationWorker, :elastic do
end end
end end
context 'migration is halted' do
before do
allow(Gitlab::CurrentSettings).to receive(:elasticsearch_pause_indexing?).and_return(true)
allow(subject).to receive(:current_migration).and_return(migration)
allow(migration).to receive(:pause_indexing?).and_return(true)
allow(migration).to receive(:halted?).and_return(true)
end
it 'skips execution' do
expect(migration).not_to receive(:migrate)
subject.perform
end
context 'pause indexing is not allowed' do
before do
migration.save_state!(pause_indexing: false)
end
it 'does not unpauses indexing' do
expect(Gitlab::CurrentSettings).not_to receive(:update!)
subject.perform
end
end
context 'pause indexing is allowed' do
before do
migration.save_state!(pause_indexing: true)
end
it 'unpauses indexing' do
expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: false)
subject.perform
end
end
end
context 'migration process' do context 'migration process' do
before do before do
allow(migration).to receive(:persisted?).and_return(persisted) allow(migration).to receive(:persisted?).and_return(persisted)
...@@ -91,6 +130,29 @@ RSpec.describe Elastic::MigrationWorker, :elastic do ...@@ -91,6 +130,29 @@ RSpec.describe Elastic::MigrationWorker, :elastic do
subject.perform subject.perform
end end
end end
context 'indexing pause' do
before do
allow(migration).to receive(:pause_indexing?).and_return(true)
end
let(:batched) { true }
where(:persisted, :completed, :expected) do
false | false | false
true | false | false
true | true | true
end
with_them do
it 'pauses and unpauses indexing' do
expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: true)
expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: false) if expected
subject.perform
end
end
end
end end
end end
end end
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe ElasticDeleteProjectWorker, :elastic do RSpec.describe ElasticDeleteProjectWorker, :elastic, :sidekiq_inline do
subject { described_class.new } subject { described_class.new }
# Create admin user and search globally to avoid dealing with permissions in # Create admin user and search globally to avoid dealing with permissions in
...@@ -11,6 +11,7 @@ RSpec.describe ElasticDeleteProjectWorker, :elastic do ...@@ -11,6 +11,7 @@ RSpec.describe ElasticDeleteProjectWorker, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_indexing: true)
allow(Elastic::DataMigrationService).to receive(:migration_has_finished?).and_return(migration_has_finished)
end end
# Extracted to a method as the `#elastic_search` methods using it below will # Extracted to a method as the `#elastic_search` methods using it below will
...@@ -19,36 +20,51 @@ RSpec.describe ElasticDeleteProjectWorker, :elastic do ...@@ -19,36 +20,51 @@ RSpec.describe ElasticDeleteProjectWorker, :elastic do
{ options: { current_user: user, project_ids: :any } } { options: { current_user: user, project_ids: :any } }
end end
it 'deletes a project with all nested objects' do shared_examples 'delete project and objects' do
project = create :project, :repository it 'deletes a project with all nested objects' do
issue = create :issue, project: project project = create :project, :repository
milestone = create :milestone, project: project issue = create :issue, project: project
note = create :note, project: project milestone = create :milestone, project: project
merge_request = create :merge_request, target_project: project, source_project: project note = create :note, project: project
merge_request = create :merge_request, target_project: project, source_project: project
ensure_elasticsearch_index!
ensure_elasticsearch_index!
## All database objects + data from repository. The absolute value does not matter
expect(Project.elastic_search('*', **search_options).records).to include(project) ## All database objects + data from repository. The absolute value does not matter
expect(Issue.elastic_search('*', **search_options).records).to include(issue) expect(Project.elastic_search('*', **search_options).records).to include(project)
expect(Milestone.elastic_search('*', **search_options).records).to include(milestone) expect(Issue.elastic_search('*', **search_options).records).to include(issue)
expect(Note.elastic_search('*', **search_options).records).to include(note) expect(Milestone.elastic_search('*', **search_options).records).to include(milestone)
expect(MergeRequest.elastic_search('*', **search_options).records).to include(merge_request) expect(Note.elastic_search('*', **search_options).records).to include(note)
expect(MergeRequest.elastic_search('*', **search_options).records).to include(merge_request)
subject.perform(project.id, project.es_id)
ensure_elasticsearch_index! subject.perform(project.id, project.es_id)
expect(Project.elastic_search('*', **search_options).total_count).to be(0) ensure_elasticsearch_index!
expect(Issue.elastic_search('*', **search_options).total_count).to be(0)
expect(Milestone.elastic_search('*', **search_options).total_count).to be(0) expect(Project.elastic_search('*', **search_options).total_count).to be(0)
expect(Note.elastic_search('*', **search_options).total_count).to be(0) expect(Issue.elastic_search('*', **search_options).total_count).to be(0)
expect(MergeRequest.elastic_search('*', **search_options).total_count).to be(0) expect(Milestone.elastic_search('*', **search_options).total_count).to be(0)
expect(Note.elastic_search('*', **search_options).total_count).to be(0)
# verify that entire index is empty expect(MergeRequest.elastic_search('*', **search_options).total_count).to be(0)
# searches use joins on the parent record (project)
# and the previous queries will not find data left in the index # verify that entire index is empty
helper = Gitlab::Elastic::Helper.default # searches use joins on the parent record (project)
# and the previous queries will not find data left in the index
expect(helper.documents_count).to be(0) helper = Gitlab::Elastic::Helper.default
expect(helper.documents_count).to be(0)
end
end
context 'migration has finished' do
let(:migration_has_finished) { true }
include_examples 'delete project and objects'
end
context 'migration has not finished' do
let(:migration_has_finished) { false }
include_examples 'delete project and objects'
end end
end end
...@@ -10398,6 +10398,9 @@ msgstr "" ...@@ -10398,6 +10398,9 @@ msgstr ""
msgid "Elasticsearch HTTP client timeout value in seconds." msgid "Elasticsearch HTTP client timeout value in seconds."
msgstr "" msgstr ""
msgid "Elasticsearch indexing"
msgstr ""
msgid "Elasticsearch indexing restrictions" msgid "Elasticsearch indexing restrictions"
msgstr "" msgstr ""
...@@ -20371,6 +20374,9 @@ msgstr "" ...@@ -20371,6 +20374,9 @@ msgstr ""
msgid "Pause" msgid "Pause"
msgstr "" msgstr ""
msgid "Pause Elasticsearch indexing"
msgstr ""
msgid "Pause replication" msgid "Pause replication"
msgstr "" msgstr ""
...@@ -23196,7 +23202,7 @@ msgstr "" ...@@ -23196,7 +23202,7 @@ msgstr ""
msgid "Regulate approvals by authors/committers. Affects all projects." msgid "Regulate approvals by authors/committers. Affects all projects."
msgstr "" msgstr ""
msgid "Reindexing status" msgid "Reindexing Status: %{status}"
msgstr "" msgstr ""
msgid "Rejected (closed)" msgid "Rejected (closed)"
...@@ -26687,9 +26693,6 @@ msgstr "" ...@@ -26687,9 +26693,6 @@ msgstr ""
msgid "State your message to activate" msgid "State your message to activate"
msgstr "" msgstr ""
msgid "State: %{last_reindexing_task_state}"
msgstr ""
msgid "Static Application Security Testing (SAST)" msgid "Static Application Security Testing (SAST)"
msgstr "" msgstr ""
...@@ -28212,6 +28215,9 @@ msgstr "" ...@@ -28212,6 +28215,9 @@ msgstr ""
msgid "There are no variables yet." msgid "There are no variables yet."
msgstr "" msgstr ""
msgid "There are pending advanced search migrations. Indexing must remain paused until the migrations are completed."
msgstr ""
msgid "There are running deployments on the environment. Please retry later." msgid "There are running deployments on the environment. Please retry later."
msgstr "" msgstr ""
...@@ -30138,7 +30144,7 @@ msgstr "" ...@@ -30138,7 +30144,7 @@ msgstr ""
msgid "Until" msgid "Until"
msgstr "" msgstr ""
msgid "Unused, previous index '%{index_name}' will be deleted after %{time} automatically." msgid "Unused, previous indices: %{index_names} will be deleted after %{time} automatically."
msgstr "" msgstr ""
msgid "Unverified" msgid "Unverified"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment