Commit 9c25dca7 authored by Patrick Bajao's avatar Patrick Bajao

Backfill cleanup schedules for old closed/merged MRs

Adds background migration that will create records corresponding
records in `merge_request_cleanup_schedules` for merge requests
that are closed/merged.

These records will be later on queried by a cron worker so they
merge request refs of those MRs will be cleaned up accordingly.
parent 54631a2e
---
title: Backfill cleanup schedules for old closed/merged MRs
merge_request: 46782
author:
type: other
# frozen_string_literal: true
class ScheduleMergeRequestCleanupSchedulesBackfill < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'BackfillMergeRequestCleanupSchedules'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 10_000
TEMP_INDEX_NAME = 'merge_requests_state_id_temp_index'
disable_ddl_transaction!
def up
add_concurrent_index :merge_requests, :id, name: TEMP_INDEX_NAME, where: "state_id IN (2, 3)"
eligible_mrs = Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules::MergeRequest.eligible
queue_background_migration_jobs_by_range_at_intervals(
eligible_mrs,
MIGRATION,
DELAY_INTERVAL,
batch_size: BATCH_SIZE
)
end
def down
remove_concurrent_index_by_name :merge_requests, TEMP_INDEX_NAME
end
end
8a45a6186d7e18f1dea43593dc0226860fc2e8d3ae386f50a022958b758c7c75
\ No newline at end of file
......@@ -22234,6 +22234,8 @@ CREATE UNIQUE INDEX merge_request_user_mentions_on_mr_id_and_note_id_index ON me
CREATE UNIQUE INDEX merge_request_user_mentions_on_mr_id_index ON merge_request_user_mentions USING btree (merge_request_id) WHERE (note_id IS NULL);
CREATE INDEX merge_requests_state_id_temp_index ON merge_requests USING btree (id) WHERE (state_id = ANY (ARRAY[2, 3]));
CREATE INDEX note_mentions_temp_index ON notes USING btree (id, noteable_type) WHERE (note ~~ '%@%'::text);
CREATE UNIQUE INDEX one_canonical_wiki_page_slug_per_metadata ON wiki_page_slugs USING btree (wiki_page_meta_id) WHERE (canonical = true);
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfill merge request cleanup schedules of closed/merged merge requests
# without any corresponding records.
class BackfillMergeRequestCleanupSchedules
# Model used for migration added in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46782.
class MergeRequest < ActiveRecord::Base
include EachBatch
self.table_name = 'merge_requests'
def self.eligible
where('merge_requests.state_id IN (2, 3)')
end
end
def perform(start_id, end_id)
eligible_mrs = MergeRequest.eligible.where(id: start_id..end_id)
scheduled_at_column = "COALESCE(metrics.merged_at, COALESCE(metrics.latest_closed_at, merge_requests.updated_at)) + interval '14 days'"
query =
eligible_mrs
.select("merge_requests.id, #{scheduled_at_column}, NOW(), NOW()")
.joins('LEFT JOIN merge_request_metrics metrics ON metrics.merge_request_id = merge_requests.id')
result = ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO merge_request_cleanup_schedules (merge_request_id, scheduled_at, created_at, updated_at)
#{query.to_sql}
ON CONFLICT (merge_request_id) DO NOTHING;
SQL
::Gitlab::BackgroundMigration::Logger.info(
message: 'Backfilled merge_request_cleanup_schedules records',
count: result.cmd_tuples
)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20201103110018 do
let(:merge_requests) { table(:merge_requests) }
let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) }
let(:metrics) { table(:merge_request_metrics) }
let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
let(:project) { table(:projects).create!(namespace_id: namespace.id) }
subject { described_class.new }
describe '#perform' do
let!(:open_mr) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master') }
let!(:closed_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:closed_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:closed_mr_1_metrics) { metrics.create!(merge_request_id: closed_mr_1.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
let!(:closed_mr_2_metrics) { metrics.create!(merge_request_id: closed_mr_2.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
let!(:closed_mr_2_cleanup_schedule) { cleanup_schedules.create!(merge_request_id: closed_mr_2.id, scheduled_at: Time.current) }
let!(:merged_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) }
let!(:merged_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3, updated_at: Time.current) }
let!(:merged_mr_1_metrics) { metrics.create!(merge_request_id: merged_mr_1.id, target_project_id: project.id, merged_at: Time.current, created_at: Time.current, updated_at: Time.current) }
let!(:closed_mr_3) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:closed_mr_3_metrics) { metrics.create!(merge_request_id: closed_mr_3.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
it 'creates records for all closed and merged merge requests in range' do
expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with(
message: 'Backfilled merge_request_cleanup_schedules records',
count: 3
)
subject.perform(open_mr.id, merged_mr_2.id)
aggregate_failures do
expect(cleanup_schedules.all.pluck(:merge_request_id))
.to contain_exactly(closed_mr_1.id, closed_mr_2.id, merged_mr_1.id, merged_mr_2.id)
expect(cleanup_schedules.find_by(merge_request_id: closed_mr_1.id).scheduled_at.to_s)
.to eq((closed_mr_1_metrics.latest_closed_at + 14.days).to_s)
expect(cleanup_schedules.find_by(merge_request_id: closed_mr_2.id).scheduled_at.to_s)
.to eq(closed_mr_2_cleanup_schedule.scheduled_at.to_s)
expect(cleanup_schedules.find_by(merge_request_id: merged_mr_1.id).scheduled_at.to_s)
.to eq((merged_mr_1_metrics.merged_at + 14.days).to_s)
expect(cleanup_schedules.find_by(merge_request_id: merged_mr_2.id).scheduled_at.to_s)
.to eq((merged_mr_2.updated_at + 14.days).to_s)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe ScheduleMergeRequestCleanupSchedulesBackfill, :sidekiq, schema: 20201023114628 do
let(:merge_requests) { table(:merge_requests) }
let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) }
let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
let(:project) { table(:projects).create!(namespace_id: namespace.id) }
describe '#up' do
let!(:open_mr) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master') }
let!(:closed_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:closed_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:merged_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) }
let!(:merged_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) }
before do
stub_const("#{described_class}::BATCH_SIZE", 2)
end
it 'schdules BackfillMergeRequestCleanupSchedules background jobs' do
Sidekiq::Testing.fake! do
migrate!
aggregate_failures do
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, closed_mr_1.id, closed_mr_2.id)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(4.minutes, merged_mr_1.id, merged_mr_2.id)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment