Commit 15a74241 authored by Andreas Brandl's avatar Andreas Brandl Committed by Adam Hegyi

Async index creation

parent b128cd0b
# frozen_string_literal: true
class CreatePostgresAsyncIndexesTable < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
def change
create_table_with_constraints :postgres_async_indexes do |t|
t.timestamps_with_timezone null: false
t.text :name, null: false
t.text :definition, null: false
t.text :table_name, null: false
t.text_limit :name, 63
t.text_limit :definition, 2048
t.text_limit :table_name, 63
t.index :name, unique: true
end
end
end
1ef66bdf4a1c61d9a1e0e632d8728f86769ac727d43971e897284272e9f53581
\ No newline at end of file
...@@ -16549,6 +16549,27 @@ CREATE SEQUENCE pool_repositories_id_seq ...@@ -16549,6 +16549,27 @@ CREATE SEQUENCE pool_repositories_id_seq
ALTER SEQUENCE pool_repositories_id_seq OWNED BY pool_repositories.id; ALTER SEQUENCE pool_repositories_id_seq OWNED BY pool_repositories.id;
CREATE TABLE postgres_async_indexes (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
name text NOT NULL,
definition text NOT NULL,
table_name text NOT NULL,
CONSTRAINT check_083b21157b CHECK ((char_length(definition) <= 2048)),
CONSTRAINT check_b732c6cd1d CHECK ((char_length(name) <= 63)),
CONSTRAINT check_e64ff4359e CHECK ((char_length(table_name) <= 63))
);
CREATE SEQUENCE postgres_async_indexes_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE postgres_async_indexes_id_seq OWNED BY postgres_async_indexes.id;
CREATE VIEW postgres_foreign_keys AS CREATE VIEW postgres_foreign_keys AS
SELECT pg_constraint.oid, SELECT pg_constraint.oid,
pg_constraint.conname AS name, pg_constraint.conname AS name,
...@@ -20513,6 +20534,8 @@ ALTER TABLE ONLY plans ALTER COLUMN id SET DEFAULT nextval('plans_id_seq'::regcl ...@@ -20513,6 +20534,8 @@ ALTER TABLE ONLY plans ALTER COLUMN id SET DEFAULT nextval('plans_id_seq'::regcl
ALTER TABLE ONLY pool_repositories ALTER COLUMN id SET DEFAULT nextval('pool_repositories_id_seq'::regclass); ALTER TABLE ONLY pool_repositories ALTER COLUMN id SET DEFAULT nextval('pool_repositories_id_seq'::regclass);
ALTER TABLE ONLY postgres_async_indexes ALTER COLUMN id SET DEFAULT nextval('postgres_async_indexes_id_seq'::regclass);
ALTER TABLE ONLY postgres_reindex_actions ALTER COLUMN id SET DEFAULT nextval('postgres_reindex_actions_id_seq'::regclass); ALTER TABLE ONLY postgres_reindex_actions ALTER COLUMN id SET DEFAULT nextval('postgres_reindex_actions_id_seq'::regclass);
ALTER TABLE ONLY product_analytics_events_experimental ALTER COLUMN id SET DEFAULT nextval('product_analytics_events_experimental_id_seq'::regclass); ALTER TABLE ONLY product_analytics_events_experimental ALTER COLUMN id SET DEFAULT nextval('product_analytics_events_experimental_id_seq'::regclass);
...@@ -22077,6 +22100,9 @@ ALTER TABLE ONLY plans ...@@ -22077,6 +22100,9 @@ ALTER TABLE ONLY plans
ALTER TABLE ONLY pool_repositories ALTER TABLE ONLY pool_repositories
ADD CONSTRAINT pool_repositories_pkey PRIMARY KEY (id); ADD CONSTRAINT pool_repositories_pkey PRIMARY KEY (id);
ALTER TABLE ONLY postgres_async_indexes
ADD CONSTRAINT postgres_async_indexes_pkey PRIMARY KEY (id);
ALTER TABLE ONLY postgres_reindex_actions ALTER TABLE ONLY postgres_reindex_actions
ADD CONSTRAINT postgres_reindex_actions_pkey PRIMARY KEY (id); ADD CONSTRAINT postgres_reindex_actions_pkey PRIMARY KEY (id);
...@@ -24635,6 +24661,8 @@ CREATE INDEX index_pool_repositories_on_shard_id ON pool_repositories USING btre ...@@ -24635,6 +24661,8 @@ CREATE INDEX index_pool_repositories_on_shard_id ON pool_repositories USING btre
CREATE UNIQUE INDEX index_pool_repositories_on_source_project_id_and_shard_id ON pool_repositories USING btree (source_project_id, shard_id); CREATE UNIQUE INDEX index_pool_repositories_on_source_project_id_and_shard_id ON pool_repositories USING btree (source_project_id, shard_id);
CREATE UNIQUE INDEX index_postgres_async_indexes_on_name ON postgres_async_indexes USING btree (name);
CREATE INDEX index_postgres_reindex_actions_on_index_identifier ON postgres_reindex_actions USING btree (index_identifier); CREATE INDEX index_postgres_reindex_actions_on_index_identifier ON postgres_reindex_actions USING btree (index_identifier);
CREATE UNIQUE INDEX index_programming_languages_on_name ON programming_languages USING btree (name); CREATE UNIQUE INDEX index_programming_languages_on_name ON programming_languages USING btree (name);
...@@ -226,3 +226,88 @@ def down ...@@ -226,3 +226,88 @@ def down
remove_concurrent_index_by_name :projects, INDEX_NAME remove_concurrent_index_by_name :projects, INDEX_NAME
end end
``` ```
## Create indexes asynchronously
For very large tables, index creation can be a challenge to manage.
While `add_concurrent_index` creates indexes in a way that does not block
normal traffic, it can still be problematic when index creation runs for
many hours. Necessary database operations like `autovacuum` cannot run, and
on GitLab.com, the deployment process is blocked waiting for index
creation to finish.
To limit impact on GitLab.com, a process exists to create indexes
asynchronously during weekend hours. Due to generally lower levels of
traffic and lack of regular deployments, this process allows the
creation of indexes to proceed with a lower level of risk. The below
sections describe the steps required to use these features:
1. [Schedule the index to be created](#schedule-the-index-to-be-created).
1. [Verify the MR was deployed and the index exists in production](#verify-the-mr-was-deployed-and-the-index-exists-in-production).
1. [Add a migration to create the index synchronously](#add-a-migration-to-create-the-index-synchronously).
### Schedule the index to be created
Create an MR with a post-deployment migration which prepares the index
for asynchronous creation. An example of creating an index using
the asynchronous index helpers can be seen in the block below. This migration
enters the index name and definition into the `postgres_async_indexes`
table. The process that runs on weekends pulls indexes from this
table and attempt to create them.
```ruby
# in db/post_migrate/
INDEX_NAME = 'index_ci_builds_on_some_column'
def up
prepare_async_index :ci_builds, :some_column, name: INDEX_NAME
end
def down
unprepare_async_index :ci_builds, :some_column, name: INDEX_NAME
end
```
### Verify the MR was deployed and the index exists in production
You can verify if the MR was deployed to GitLab.com by executing
`/chatops run auto_deploy status <merge_sha>`. To verify existence of
the index, you can:
- Use a meta-command in #database-lab, such as: `\di <index_name>`
- Ask someone in #database to check if the index exists
- With proper access, you can also verify directly on production or in a
production clone
### Add a migration to create the index synchronously
After the index is verified to exist on the production database, create a second
merge request that adds the index synchronously. The synchronous
migration results in a no-op on GitLab.com, but you should still add the
migration as expected for other installations. The below block
demonstrates how to create the second migration for the previous
asynchronous example.
WARNING:
The responsibility lies on the individual writing the migrations to verify
the index exists in production before merging a second migration that
adds the index using `add_concurrent_index`. If the second migration is
deployed and the index has not yet been created, the index is created
synchronously when the second migration executes.
```ruby
# in db/post_migrate/
INDEX_NAME = 'index_ci_builds_on_some_column'
disable_ddl_transaction!
def up
add_concurrent_index :ci_builds, :some_column, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :ci_builds, INDEX_NAME
end
```
# frozen_string_literal: true
module Gitlab
module Database
module AsyncIndexes
DEFAULT_INDEXES_PER_INVOCATION = 2
def self.create_pending_indexes!(how_many: DEFAULT_INDEXES_PER_INVOCATION)
PostgresAsyncIndex.order(:id).limit(how_many).each do |async_index|
IndexCreator.new(async_index).perform
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Database
module AsyncIndexes
class IndexCreator
include ExclusiveLeaseGuard
TIMEOUT_PER_ACTION = 1.day
STATEMENT_TIMEOUT = 9.hours
def initialize(async_index)
@async_index = async_index
end
def perform
try_obtain_lease do
if index_exists?
log_index_info('Skipping index creation as the index exists')
else
log_index_info('Creating async index')
set_statement_timeout do
connection.execute(async_index.definition)
end
end
async_index.destroy
end
end
private
attr_reader :async_index
def index_exists?
connection.indexes(async_index.table_name).any? { |index| index.name == async_index.name }
end
def connection
@connection ||= ApplicationRecord.connection
end
def lease_timeout
TIMEOUT_PER_ACTION
end
def set_statement_timeout
connection.execute("SET statement_timeout TO '%ds'" % STATEMENT_TIMEOUT)
yield
ensure
connection.execute('RESET statement_timeout')
end
def log_index_info(message)
Gitlab::AppLogger.info(message: message, table_name: async_index.table_name, index_name: async_index.name)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Database
module AsyncIndexes
module MigrationHelpers
def unprepare_async_index(table_name, column_name, **options)
return unless async_index_creation_available?
index_name = options[:name] || index_name(table_name, column_name)
raise 'Specifying index name is mandatory - specify name: argument' unless index_name
unprepare_async_index_by_name(table_name, index_name)
end
def unprepare_async_index_by_name(table_name, index_name, **options)
return unless async_index_creation_available?
PostgresAsyncIndex.find_by(name: index_name).try do |async_index|
async_index.destroy
end
end
# Prepares an index for asynchronous creation.
#
# Stores the index information in the postgres_async_indexes table to be created later. The
# index will be always be created CONCURRENTLY, so that option does not need to be given.
# If an existing asynchronous definition exists with the same name, the existing entry will be
# updated with the new definition.
#
# If the requested index has already been created, it is not stored in the table for
# asynchronous creation.
def prepare_async_index(table_name, column_name, **options)
return unless async_index_creation_available?
index_name = options[:name] || index_name(table_name, column_name)
raise 'Specifying index name is mandatory - specify name: argument' unless index_name
options = options.merge({ algorithm: :concurrently })
if index_exists?(table_name, column_name, **options)
Gitlab::AppLogger.warn(
message: 'Index not prepared because it already exists',
table_name: table_name,
index_name: index_name)
return
end
index, algorithm, if_not_exists = add_index_options(table_name, column_name, **options)
create_index = ActiveRecord::ConnectionAdapters::CreateIndexDefinition.new(index, algorithm, if_not_exists)
schema_creation = ActiveRecord::ConnectionAdapters::PostgreSQL::SchemaCreation.new(ApplicationRecord.connection)
definition = schema_creation.accept(create_index)
async_index = PostgresAsyncIndex.safe_find_or_create_by!(name: index_name) do |rec|
rec.table_name = table_name
rec.definition = definition
end
Gitlab::AppLogger.info(
message: 'Prepared index for async creation',
table_name: async_index.table_name,
index_name: async_index.name)
async_index
end
private
def async_index_creation_available?
ApplicationRecord.connection.table_exists?(:postgres_async_indexes) &&
Feature.enabled?(:database_async_index_creation, type: :ops)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Database
module AsyncIndexes
class PostgresAsyncIndex < ApplicationRecord
self.table_name = 'postgres_async_indexes'
MAX_IDENTIFIER_LENGTH = Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
MAX_DEFINITION_LENGTH = 2048
validates :name, presence: true, length: { maximum: MAX_IDENTIFIER_LENGTH }
validates :table_name, presence: true, length: { maximum: MAX_IDENTIFIER_LENGTH }
validates :definition, presence: true, length: { maximum: MAX_DEFINITION_LENGTH }
def to_s
definition
end
end
end
end
end
...@@ -6,6 +6,7 @@ module Gitlab ...@@ -6,6 +6,7 @@ module Gitlab
include Migrations::BackgroundMigrationHelpers include Migrations::BackgroundMigrationHelpers
include DynamicModelHelpers include DynamicModelHelpers
include RenameTableHelpers include RenameTableHelpers
include AsyncIndexes::MigrationHelpers
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS # https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
MAX_IDENTIFIER_NAME_LENGTH = 63 MAX_IDENTIFIER_NAME_LENGTH = 63
...@@ -152,6 +153,9 @@ module Gitlab ...@@ -152,6 +153,9 @@ module Gitlab
disable_statement_timeout do disable_statement_timeout do
add_index(table_name, column_name, **options) add_index(table_name, column_name, **options)
end end
# We created this index. Now let's remove the queuing entry for async creation in case it's still there.
unprepare_async_index(table_name, column_name, **options)
end end
# Removes an existed index, concurrently # Removes an existed index, concurrently
...@@ -178,6 +182,9 @@ module Gitlab ...@@ -178,6 +182,9 @@ module Gitlab
disable_statement_timeout do disable_statement_timeout do
remove_index(table_name, **options.merge({ column: column_name })) remove_index(table_name, **options.merge({ column: column_name }))
end end
# We removed this index. Now let's make sure it's not queued for async creation.
unprepare_async_index(table_name, column_name, **options)
end end
# Removes an existing index, concurrently # Removes an existing index, concurrently
...@@ -208,6 +215,9 @@ module Gitlab ...@@ -208,6 +215,9 @@ module Gitlab
disable_statement_timeout do disable_statement_timeout do
remove_index(table_name, **options.merge({ name: index_name })) remove_index(table_name, **options.merge({ name: index_name }))
end end
# We removed this index. Now let's make sure it's not queued for async creation.
unprepare_async_index_by_name(table_name, index_name, **options)
end end
# Adds a foreign key with only minimal locking on the tables involved. # Adds a foreign key with only minimal locking on the tables involved.
......
...@@ -176,6 +176,9 @@ namespace :gitlab do ...@@ -176,6 +176,9 @@ namespace :gitlab do
# Cleanup leftover temporary indexes from previous, possibly aborted runs (if any) # Cleanup leftover temporary indexes from previous, possibly aborted runs (if any)
Gitlab::Database::Reindexing.cleanup_leftovers! Gitlab::Database::Reindexing.cleanup_leftovers!
# Hack: Before we do actual reindexing work, create async indexes
Gitlab::Database::AsyncIndexes.create_pending_indexes! if Feature.enabled?(:database_async_index_creation, type: :ops)
Gitlab::Database::Reindexing.perform(indexes) Gitlab::Database::Reindexing.perform(indexes)
rescue StandardError => e rescue StandardError => e
Gitlab::AppLogger.error(e) Gitlab::AppLogger.error(e)
......
# frozen_string_literal: true
FactoryBot.define do
factory :postgres_async_index, class: 'Gitlab::Database::AsyncIndexes::PostgresAsyncIndex' do
sequence(:name) { |n| "users_id_#{n}" }
definition { "CREATE INDEX #{name} ON #{table_name} (id)" }
table_name { "users" }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
describe '#perform' do
subject { described_class.new(async_index) }
let(:async_index) { create(:postgres_async_index) }
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
let(:connection) { ApplicationRecord.connection }
context 'when the index already exists' do
before do
connection.execute(async_index.definition)
end
it 'skips index creation' do
expect(connection).not_to receive(:execute).with(/CREATE INDEX/)
subject.perform
end
end
it 'creates the index while controlling statement timeout' do
allow(connection).to receive(:execute).and_call_original
expect(connection).to receive(:execute).with("SET statement_timeout TO '32400s'").ordered.and_call_original
expect(connection).to receive(:execute).with(async_index.definition).ordered.and_call_original
expect(connection).to receive(:execute).with("RESET statement_timeout").ordered.and_call_original
subject.perform
end
it 'removes the index preparation record from postgres_async_indexes' do
expect(async_index).to receive(:destroy).and_call_original
expect { subject.perform }.to change { index_model.count }.by(-1)
end
it 'skips logic if not able to acquire exclusive lease' do
expect(subject).to receive(:try_obtain_lease).and_return(false)
expect(connection).not_to receive(:execute).with(/CREATE INDEX/)
expect(async_index).not_to receive(:destroy)
expect { subject.perform }.not_to change { index_model.count }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
let(:migration) { ActiveRecord::Migration.new.extend(described_class) }
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
let(:connection) { ApplicationRecord.connection }
let(:table_name) { '_test_async_indexes' }
let(:index_name) { "index_#{table_name}_on_id" }
before do
allow(migration).to receive(:puts)
end
describe '#unprepare_async_index' do
let!(:async_index) { create(:postgres_async_index, name: index_name) }
context 'when the flag is enabled' do
before do
stub_feature_flags(database_async_index_creation: true)
end
it 'destroys the record' do
expect do
migration.unprepare_async_index(table_name, 'id')
end.to change { index_model.where(name: index_name).count }.by(-1)
end
context 'when an explicit name is given' do
let(:index_name) { 'my_test_async_index' }
it 'destroys the record' do
expect do
migration.unprepare_async_index(table_name, 'id', name: index_name)
end.to change { index_model.where(name: index_name).count }.by(-1)
end
end
context 'when the async index table does not exist' do
it 'does not raise an error' do
connection.drop_table(:postgres_async_indexes)
expect(index_model).not_to receive(:find_by)
expect { migration.unprepare_async_index(table_name, 'id') }.not_to raise_error
end
end
end
context 'when the feature flag is disabled' do
it 'does not destroy the record' do
stub_feature_flags(database_async_index_creation: false)
expect do
migration.unprepare_async_index(table_name, 'id')
end.not_to change { index_model.where(name: index_name).count }
end
end
end
describe '#unprepare_async_index_by_name' do
let(:index_name) { "index_#{table_name}_on_id" }
let!(:async_index) { create(:postgres_async_index, name: index_name) }
context 'when the flag is enabled' do
before do
stub_feature_flags(database_async_index_creation: true)
end
it 'destroys the record' do
expect do
migration.unprepare_async_index_by_name(table_name, index_name)
end.to change { index_model.where(name: index_name).count }.by(-1)
end
context 'when the async index table does not exist' do
it 'does not raise an error' do
connection.drop_table(:postgres_async_indexes)
expect(index_model).not_to receive(:find_by)
expect { migration.unprepare_async_index_by_name(table_name, index_name) }.not_to raise_error
end
end
end
context 'when the feature flag is disabled' do
it 'does not destroy the record' do
stub_feature_flags(database_async_index_creation: false)
expect do
migration.unprepare_async_index_by_name(table_name, index_name)
end.not_to change { index_model.where(name: index_name).count }
end
end
end
describe '#prepare_async_index' do
before do
connection.create_table(table_name)
end
context 'when the feature flag is enabled' do
before do
stub_feature_flags(database_async_index_creation: true)
end
it 'creates the record for the async index' do
expect do
migration.prepare_async_index(table_name, 'id')
end.to change { index_model.where(name: index_name).count }.by(1)
record = index_model.find_by(name: index_name)
expect(record.table_name).to eq(table_name)
expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
end
context 'when an explicit name is given' do
let(:index_name) { 'my_async_index_name' }
it 'creates the record with the given name' do
expect do
migration.prepare_async_index(table_name, 'id', name: index_name)
end.to change { index_model.where(name: index_name).count }.by(1)
record = index_model.find_by(name: index_name)
expect(record.table_name).to eq(table_name)
expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
end
end
context 'when the index already exists' do
it 'does not create the record' do
connection.add_index(table_name, 'id', name: index_name)
expect do
migration.prepare_async_index(table_name, 'id')
end.not_to change { index_model.where(name: index_name).count }
end
end
context 'when the record already exists' do
it 'does attempt to create the record' do
create(:postgres_async_index, table_name: table_name, name: index_name)
expect do
migration.prepare_async_index(table_name, 'id')
end.not_to change { index_model.where(name: index_name).count }
end
end
context 'when the async index table does not exist' do
it 'does not raise an error' do
connection.drop_table(:postgres_async_indexes)
expect(index_model).not_to receive(:safe_find_or_create_by!)
expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error
end
end
end
context 'when the feature flag is disabled' do
it 'does not create the record' do
stub_feature_flags(database_async_index_creation: false)
expect do
migration.prepare_async_index(table_name, 'id')
end.not_to change { index_model.where(name: index_name).count }
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model do
describe 'validations' do
let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH }
let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) }
it { is_expected.to validate_presence_of(:table_name) }
it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) }
it { is_expected.to validate_presence_of(:definition) }
it { is_expected.to validate_length_of(:definition).is_at_most(definition_limit) }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::AsyncIndexes do
describe '.create_pending_indexes!' do
subject { described_class.create_pending_indexes! }
before do
create_list(:postgres_async_index, 4)
end
it 'takes 2 pending indexes and creates those' do
Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.order(:id).limit(2).each do |index|
creator = double('index creator')
expect(Gitlab::Database::AsyncIndexes::IndexCreator).to receive(:new).with(index).and_return(creator)
expect(creator).to receive(:perform)
end
subject
end
end
end
...@@ -278,6 +278,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do ...@@ -278,6 +278,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_index(:users, :foo, unique: true) model.add_concurrent_index(:users, :foo, unique: true)
end end
it 'unprepares the async index creation' do
expect(model).to receive(:add_index)
.with(:users, :foo, algorithm: :concurrently)
expect(model).to receive(:unprepare_async_index)
.with(:users, :foo, algorithm: :concurrently)
model.add_concurrent_index(:users, :foo)
end
end end
context 'inside a transaction' do context 'inside a transaction' do
...@@ -314,6 +324,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do ...@@ -314,6 +324,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.remove_concurrent_index(:users, :foo, unique: true) model.remove_concurrent_index(:users, :foo, unique: true)
end end
it 'unprepares the async index creation' do
expect(model).to receive(:remove_index)
.with(:users, { algorithm: :concurrently, column: :foo })
expect(model).to receive(:unprepare_async_index)
.with(:users, :foo, { algorithm: :concurrently })
model.remove_concurrent_index(:users, :foo)
end
describe 'by index name' do describe 'by index name' do
before do before do
allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true) allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true)
...@@ -345,6 +365,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do ...@@ -345,6 +365,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.remove_concurrent_index_by_name(:users, wrong_key: "index_x_by_y") model.remove_concurrent_index_by_name(:users, wrong_key: "index_x_by_y")
end.to raise_error 'remove_concurrent_index_by_name must get an index name as the second argument' end.to raise_error 'remove_concurrent_index_by_name must get an index name as the second argument'
end end
it 'unprepares the async index creation' do
expect(model).to receive(:remove_index)
.with(:users, { algorithm: :concurrently, name: "index_x_by_y" })
expect(model).to receive(:unprepare_async_index_by_name)
.with(:users, "index_x_by_y", { algorithm: :concurrently })
model.remove_concurrent_index_by_name(:users, "index_x_by_y")
end
end end
end end
end end
......
...@@ -207,6 +207,27 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do ...@@ -207,6 +207,27 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
run_rake_task('gitlab:db:reindex') run_rake_task('gitlab:db:reindex')
end end
context 'when async index creation is enabled' do
it 'executes async index creation prior to any reindexing actions' do
stub_feature_flags(database_async_index_creation: true)
expect(Gitlab::Database::AsyncIndexes).to receive(:create_pending_indexes!).ordered
expect(Gitlab::Database::Reindexing).to receive(:perform).ordered
run_rake_task('gitlab:db:reindex')
end
end
context 'when async index creation is disabled' do
it 'does not execute async index creation' do
stub_feature_flags(database_async_index_creation: false)
expect(Gitlab::Database::AsyncIndexes).not_to receive(:create_pending_indexes!)
run_rake_task('gitlab:db:reindex')
end
end
context 'when no index_name is given' do context 'when no index_name is given' do
it 'uses all candidate indexes' do it 'uses all candidate indexes' do
expect(Gitlab::Database::PostgresIndex).to receive(:reindexing_support).and_return(indexes) expect(Gitlab::Database::PostgresIndex).to receive(:reindexing_support).and_return(indexes)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment