Commit 1b4b09e5 authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents d9a33d80 7ffc8a86
......@@ -2275,7 +2275,6 @@ Gitlab/NamespacedClass:
- 'app/models/application_setting/term.rb'
- 'app/models/approval.rb'
- 'app/models/audit_event.rb'
- 'app/models/audit_event_archived.rb'
- 'app/models/authentication_event.rb'
- 'app/models/award_emoji.rb'
- 'app/models/badge.rb'
......
# frozen_string_literal: true
# This model is not intended to be used.
# It is a temporary reference to the pre-partitioned
# audit_events table.
# Please refer to https://gitlab.com/groups/gitlab-org/-/epics/3206
# for details.
class AuditEventArchived < ApplicationRecord
self.table_name = 'audit_events_archived'
end
......@@ -187,6 +187,14 @@
:weight: 1
:idempotent:
:tags: []
- :name: cronjob:database_batched_background_migration
:feature_category: :database
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:environments_auto_stop_cron
:feature_category: :continuous_delivery
:has_external_dependencies:
......
# frozen_string_literal: true
module Database
class BatchedBackgroundMigrationWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
feature_category :database
idempotent!
def perform
return unless Feature.enabled?(:execute_batched_migrations_on_schedule, type: :ops) && active_migration
with_exclusive_lease(active_migration.interval) do
# Now that we have the exclusive lease, reload migration in case another process has changed it.
# This is a temporary solution until we have better concurrency handling around job execution
#
# We also have to disable this cop, because ApplicationRecord aliases reset to reload, but our database
# models don't inherit from ApplicationRecord
active_migration.reload # rubocop:disable Cop/ActiveRecordAssociationReload
run_active_migration if active_migration.active? && active_migration.interval_elapsed?
end
end
private
def active_migration
@active_migration ||= Gitlab::Database::BackgroundMigration::BatchedMigration.active_migration
end
def run_active_migration
Gitlab::Database::BackgroundMigration::BatchedMigrationRunner.new.run_migration_job(active_migration)
end
def with_exclusive_lease(timeout)
lease = Gitlab::ExclusiveLease.new(lease_key, timeout: timeout * 2)
yield if lease.try_obtain
ensure
lease&.cancel
end
def lease_key
self.class.name.demodulize.underscore
end
end
end
---
title: Add Vulnerabilities::FindingEvidence model
merge_request: 56790
author:
type: changed
---
title: Initialize conversion of events.id to bigint, and add execute_batched_migrations_on_schedule feature flag to control scheduled background migrations
merge_request: 51332
author:
type: other
---
name: execute_batched_migrations_on_schedule
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/51332
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/326241
milestone: '13.11'
type: ops
group: group::database
default_enabled: false
......@@ -565,6 +565,9 @@ Gitlab.com do
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['cron'] ||= '0 9 * * *'
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['job_class'] = 'Namespaces::InProductMarketingEmailsWorker'
Settings.cron_jobs['batched_background_migrations_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['batched_background_migrations_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['batched_background_migrations_worker']['job_class'] = 'Database::BatchedBackgroundMigrationWorker'
end
Gitlab.ee do
......
# frozen_string_literal: true
class AddMetricsToBatchedBackgroundMigrationJobs < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :batched_background_migration_jobs, :metrics, :jsonb, null: false, default: {}
end
end
# frozen_string_literal: true
class InitializeConversionOfEventsIdToBigint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
# Initialize the conversion of events.id to bigint
# Primary Key of the Events table
initialize_conversion_of_integer_to_bigint :events, :id
end
def down
trigger_name = rename_trigger_name(:events, :id, :id_convert_to_bigint)
remove_rename_triggers_for_postgresql :events, trigger_name
remove_column :events, :id_convert_to_bigint
end
end
# frozen_string_literal: true
class InitializeConversionOfPushEventPayloadsEventIdToBigint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
# Foreign key that references events.id
# Also Primary key of the push_event_payloads table
initialize_conversion_of_integer_to_bigint :push_event_payloads, :event_id, primary_key: :event_id
end
def down
trigger_name = rename_trigger_name(:push_event_payloads, :event_id, :event_id_convert_to_bigint)
remove_rename_triggers_for_postgresql :push_event_payloads, trigger_name
remove_column :push_event_payloads, :event_id_convert_to_bigint
end
end
# frozen_string_literal: true
class CreateVulnerabilityFindingEvidences < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
create_table_with_constraints :vulnerability_finding_evidences do |t|
t.timestamps_with_timezone null: false
t.references :vulnerability_occurrence, index: { name: 'finding_evidences_on_vulnerability_occurrence_id' }, null: false, foreign_key: { on_delete: :cascade }
t.text :summary
t.text_limit :summary, 8_000_000
end
end
def down
with_lock_retries do
drop_table :vulnerability_finding_evidences
end
end
end
# frozen_string_literal: true
class BackfillEventsIdForBigintConversion < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless Gitlab.dev_env_or_com?
backfill_conversion_of_integer_to_bigint :events, :id, batch_size: 15000, sub_batch_size: 100
end
def down
return unless Gitlab.dev_env_or_com?
Gitlab::Database::BackgroundMigration::BatchedMigration
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
.where(table_name: 'events', column_name: 'id')
.where('job_arguments = ?', %w[id id_convert_to_bigint].to_json)
.delete_all
end
end
# frozen_string_literal: true
class BackfillPushEventPayloadEventIdForBigintConversion < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless Gitlab.dev_env_or_com?
backfill_conversion_of_integer_to_bigint :push_event_payloads, :event_id, primary_key: :event_id,
batch_size: 15000, sub_batch_size: 100
end
def down
return unless Gitlab.dev_env_or_com?
Gitlab::Database::BackgroundMigration::BatchedMigration
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
.where(table_name: 'push_event_payloads', column_name: 'event_id')
.where('job_arguments = ?', %w[event_id event_id_convert_to_bigint].to_json)
.delete_all
end
end
2ad45eaf6589600d9aadd225b55451d9213a4d858ef2717b7151062f1db225c8
\ No newline at end of file
3486452547ffa5da3e12837d2f184e356c90fdd1f016f85144a1ba4865825e87
\ No newline at end of file
e169ea265b942f636b2386a432e04d9dfccdc95f04113400d44ce59e81537843
\ No newline at end of file
b7af086a68c530dd528c4ceaf4bca8d04951c0f234f75a09922aa392bb17a796
\ No newline at end of file
4715c46f5d76c8eb3a206ad3bbcc94a8c13d1d6a66a7824dba400b0aa49c8aa6
\ No newline at end of file
d6181f8806592106305366f5e8ef508286ed447c1fce0de26f242de736b21809
\ No newline at end of file
......@@ -150,6 +150,24 @@ $$;
COMMENT ON FUNCTION table_sync_function_2be879775d() IS 'Partitioning migration: table sync for audit_events table';
CREATE FUNCTION trigger_07c94931164e() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW."event_id_convert_to_bigint" := NEW."event_id";
RETURN NEW;
END;
$$;
CREATE FUNCTION trigger_69523443cc10() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW."id_convert_to_bigint" := NEW."id";
RETURN NEW;
END;
$$;
CREATE TABLE audit_events (
id bigint NOT NULL,
author_id integer NOT NULL,
......@@ -9854,7 +9872,8 @@ CREATE TABLE batched_background_migration_jobs (
batch_size integer NOT NULL,
sub_batch_size integer NOT NULL,
status smallint DEFAULT 0 NOT NULL,
attempts smallint DEFAULT 0 NOT NULL
attempts smallint DEFAULT 0 NOT NULL,
metrics jsonb DEFAULT '{}'::jsonb NOT NULL
);
CREATE SEQUENCE batched_background_migration_jobs_id_seq
......@@ -12507,6 +12526,7 @@ CREATE TABLE events (
target_type character varying,
group_id bigint,
fingerprint bytea,
id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
CONSTRAINT check_97e06e05ad CHECK ((octet_length(fingerprint) <= 128))
);
......@@ -16914,7 +16934,8 @@ CREATE TABLE push_event_payloads (
commit_to bytea,
ref text,
commit_title character varying(70),
ref_count integer
ref_count integer,
event_id_convert_to_bigint bigint DEFAULT 0 NOT NULL
);
CREATE TABLE push_rules (
......@@ -18553,6 +18574,24 @@ CREATE SEQUENCE vulnerability_feedback_id_seq
ALTER SEQUENCE vulnerability_feedback_id_seq OWNED BY vulnerability_feedback.id;
CREATE TABLE vulnerability_finding_evidences (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
vulnerability_occurrence_id bigint NOT NULL,
summary text,
CONSTRAINT check_5773b236fb CHECK ((char_length(summary) <= 8000000))
);
CREATE SEQUENCE vulnerability_finding_evidences_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE vulnerability_finding_evidences_id_seq OWNED BY vulnerability_finding_evidences.id;
CREATE TABLE vulnerability_finding_fingerprints (
id bigint NOT NULL,
finding_id bigint NOT NULL,
......@@ -19840,6 +19879,8 @@ ALTER TABLE ONLY vulnerability_external_issue_links ALTER COLUMN id SET DEFAULT
ALTER TABLE ONLY vulnerability_feedback ALTER COLUMN id SET DEFAULT nextval('vulnerability_feedback_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_finding_evidences ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_evidences_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_finding_fingerprints ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_fingerprints_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_finding_links ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_links_id_seq'::regclass);
......@@ -21465,6 +21506,9 @@ ALTER TABLE ONLY vulnerability_external_issue_links
ALTER TABLE ONLY vulnerability_feedback
ADD CONSTRAINT vulnerability_feedback_pkey PRIMARY KEY (id);
ALTER TABLE ONLY vulnerability_finding_evidences
ADD CONSTRAINT vulnerability_finding_evidences_pkey PRIMARY KEY (id);
ALTER TABLE ONLY vulnerability_finding_fingerprints
ADD CONSTRAINT vulnerability_finding_fingerprints_pkey PRIMARY KEY (id);
......@@ -21700,6 +21744,8 @@ CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_index ON epic_user_mentions US
CREATE INDEX expired_artifacts_temp_index ON ci_job_artifacts USING btree (id, created_at) WHERE ((expire_at IS NULL) AND (date(timezone('UTC'::text, created_at)) < '2020-06-22'::date));
CREATE INDEX finding_evidences_on_vulnerability_occurrence_id ON vulnerability_finding_evidences USING btree (vulnerability_occurrence_id);
CREATE INDEX finding_links_on_vulnerability_occurrence_id ON vulnerability_finding_links USING btree (vulnerability_occurrence_id);
CREATE INDEX idx_audit_events_on_entity_id_desc_author_id_created_at ON audit_events_archived USING btree (entity_id, entity_type, id DESC, author_id, created_at);
......@@ -24620,6 +24666,10 @@ CREATE TRIGGER table_sync_trigger_b99eb6998c AFTER INSERT OR DELETE OR UPDATE ON
CREATE TRIGGER table_sync_trigger_ee39a25f9d AFTER INSERT OR DELETE OR UPDATE ON audit_events FOR EACH ROW EXECUTE PROCEDURE table_sync_function_2be879775d();
CREATE TRIGGER trigger_07c94931164e BEFORE INSERT OR UPDATE ON push_event_payloads FOR EACH ROW EXECUTE PROCEDURE trigger_07c94931164e();
CREATE TRIGGER trigger_69523443cc10 BEFORE INSERT OR UPDATE ON events FOR EACH ROW EXECUTE PROCEDURE trigger_69523443cc10();
CREATE TRIGGER trigger_has_external_issue_tracker_on_delete AFTER DELETE ON services FOR EACH ROW WHEN ((((old.category)::text = 'issue_tracker'::text) AND (old.active = true) AND (old.project_id IS NOT NULL))) EXECUTE PROCEDURE set_has_external_issue_tracker();
CREATE TRIGGER trigger_has_external_issue_tracker_on_insert AFTER INSERT ON services FOR EACH ROW WHEN ((((new.category)::text = 'issue_tracker'::text) AND (new.active = true) AND (new.project_id IS NOT NULL))) EXECUTE PROCEDURE set_has_external_issue_tracker();
......@@ -26636,6 +26686,9 @@ ALTER TABLE ONLY cluster_platforms_kubernetes
ALTER TABLE ONLY ci_builds_metadata
ADD CONSTRAINT fk_rails_e20479742e FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_finding_evidences
ADD CONSTRAINT fk_rails_e3205a0c65 FOREIGN KEY (vulnerability_occurrence_id) REFERENCES vulnerability_occurrences(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_occurrence_identifiers
ADD CONSTRAINT fk_rails_e4ef6d027c FOREIGN KEY (occurrence_id) REFERENCES vulnerability_occurrences(id) ON DELETE CASCADE;
......@@ -49,6 +49,73 @@ Support for syncing past branch and commit data [is planned](https://gitlab.com/
For more information, see [Usage](index.md#usage).
## Install the GitLab Jira Cloud application for self-managed instances **(FREE SELF)**
If your GitLab instance is self-managed, you must follow some
extra steps to install the GitLab Jira Cloud application.
Each Jira Cloud application must be installed from a single location. Jira fetches
a [manifest file](https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/)
from the location you provide. The manifest file describes the application to the system. To support
self-managed GitLab instances with Jira Cloud, you can either:
- [Install the application manually](#install-the-application-manually).
- [Create a Marketplace listing](#create-a-marketplace-listing).
### Install the application manually **(FREE SELF)**
You can configure your Atlassian Cloud instance to allow you to install applications
from outside the Marketplace, which allows you to install the application:
1. Sign in to your Jira instance as a user with administrator permissions.
1. Place your Jira instance into
[development mode](https://developer.atlassian.com/cloud/jira/platform/getting-started-with-connect/#step-2--enable-development-mode).
1. Sign in to your GitLab application as a user with [Administrator](../../user/permissions.md) permissions.
1. Install the GitLab application from your self-managed GitLab instance, as
described in the [Atlassian developer guides](https://developer.atlassian.com/cloud/jira/platform/getting-started-with-connect/#step-3--install-and-test-your-app)).
1. In your Jira instance, go to **Apps > Manage Apps** and click **Upload app**:
![Image showing button labeled "upload app"](img/jira-upload-app_v13_11.png)
1. For **App descriptor URL**, provide full URL to your manifest file, modifying this
URL based on your instance configuration: `https://your.domain/your-path/-/jira_connect/app_descriptor.json`
1. Click **Upload**, and Jira fetches the content of your `app_descriptor` file and installs
it for you.
1. If the upload is successful, Jira displays a modal panel: **Installed and ready to go!**
Click **Get started** to configure the integration.
![Image showing success modal](img/jira-upload-app-success_v13_11.png)
The **GitLab for Jira** app now displays under **Manage apps**. You can also
click **Get started** to open the configuration page rendered from your GitLab instance.
NOTE:
If you make changes to the application descriptor, you must uninstall, then reinstall, the
application.
### Create a Marketplace listing **(FREE SELF)**
If you prefer to not use development mode on your Jira instance, you can create
your own Marketplace listing for your instance, which enables your application
to be installed from the Atlassian Marketplace.
For full instructions, review the Atlassian [guide to creating a marketplace listing](https://developer.atlassian.com/platform/marketplace/installing-cloud-apps/#creating-the-marketplace-listing). To create a
Marketplace listing, you must:
1. Register as a Marketplace vendor.
1. List your application, using the application descriptor URL.
- Your manifest file is located at: `https://your.domain/your-path/-/jira_connect/app_descriptor.json`
- GitLab recommends you list your application as `private`, because public
applications can be viewed and installed by any user.
1. Generate test license tokens for your application.
Review the
[official Atlassian documentation](https://developer.atlassian.com/platform/marketplace/installing-cloud-apps/#creating-the-marketplace-listing)
for details.
NOTE:
DVCS means distributed version control system.
## Troubleshooting GitLab for Jira
The GitLab for Jira App uses an iframe to add namespaces on the settings page. Some browsers block cross-site cookies. This can lead to a message saying that the user needs to log in on GitLab.com even though the user is already logged in.
......
......@@ -36,6 +36,8 @@ module Vulnerabilities
has_many :fingerprints, class_name: 'Vulnerabilities::FindingFingerprint', inverse_of: :finding
has_many :finding_evidences, class_name: 'Vulnerabilities::FindingEvidence', inverse_of: :finding, foreign_key: 'vulnerability_occurrence_id'
serialize :config_options, Serializers::JSON # rubocop:disable Cop/ActiveRecordSerialize
attr_writer :sha
......
# frozen_string_literal: true
module Vulnerabilities
class FindingEvidence < ApplicationRecord
self.table_name = 'vulnerability_finding_evidences'
belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_evidences, foreign_key: 'vulnerability_occurrence_id', optional: false
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Vulnerabilities::FindingEvidence do
it { is_expected.to belong_to(:finding).class_name('Vulnerabilities::Finding').required }
end
......@@ -19,6 +19,7 @@ RSpec.describe Vulnerabilities::Finding do
it { is_expected.to have_many(:finding_links).class_name('Vulnerabilities::FindingLink').with_foreign_key('vulnerability_occurrence_id') }
it { is_expected.to have_many(:finding_remediations).class_name('Vulnerabilities::FindingRemediation').with_foreign_key('vulnerability_occurrence_id') }
it { is_expected.to have_many(:remediations).through(:finding_remediations) }
it { is_expected.to have_many(:finding_evidences).class_name('Vulnerabilities::FindingEvidence').with_foreign_key('vulnerability_occurrence_id') }
end
describe 'validations' do
......
......@@ -34,12 +34,18 @@ module Gitlab
parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id)
parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size) do |sub_batch|
sub_batch.update_all("#{quoted_copy_to}=#{quoted_copy_from}")
batch_metrics.time_operation(:update_all) do
sub_batch.update_all("#{quoted_copy_to}=#{quoted_copy_from}")
end
sleep(PAUSE_SECONDS)
end
end
def batch_metrics
@batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new
end
private
def connection
......
# frozen_string_literal: true
module Gitlab
module Database
module BackgroundMigration
class BatchMetrics
attr_reader :timings
def initialize
@timings = {}
end
def time_operation(label)
start_time = monotonic_time
yield
timings_for_label(label) << monotonic_time - start_time
end
private
def timings_for_label(label)
timings[label] ||= []
end
def monotonic_time
Gitlab::Metrics::System.monotonic_time
end
end
end
end
end
......@@ -23,6 +23,10 @@ module Gitlab
finished: 3
}
def self.active_migration
active.queue_order.first
end
def interval_elapsed?
last_job.nil? || last_job.created_at <= Time.current - interval
end
......
......@@ -8,6 +8,16 @@ module Gitlab
@migration_wrapper = migration_wrapper
end
# Runs the next batched_job for a batched_background_migration.
#
# The batch bounds of the next job are calculated at runtime, based on the migration
# configuration and the bounds of the most recently created batched_job. Updating the
# migration configuration will cause future jobs to use the updated batch sizes.
#
# The job instance will automatically receive a set of arguments based on the migration
# configuration. For more details, see the BatchedMigrationWrapper class.
#
# Note that this method is primarily intended to called by a scheduled worker.
def run_migration_job(active_migration)
if next_batched_job = create_next_batched_job!(active_migration)
migration_wrapper.perform(next_batched_job)
......@@ -16,7 +26,15 @@ module Gitlab
end
end
# Runs all remaining batched_jobs for a batched_background_migration.
#
# This method is intended to be used in a test/dev environment to execute the background
# migration inline. It should NOT be used in a real environment for any non-trivial migrations.
def run_entire_migration(migration)
unless Rails.env.development? || Rails.env.test?
raise 'this method is not intended for use in real environments'
end
while migration.active?
run_migration_job(migration)
......
......@@ -4,6 +4,13 @@ module Gitlab
module Database
module BackgroundMigration
class BatchedMigrationWrapper
# Wraps the execution of a batched_background_migration.
#
# Updates the job's tracking records with the status of the migration
# when starting and finishing execution, and optionally saves batch_metrics
# the migration provides, if any are given.
#
# The job's batch_metrics are serialized to JSON for storage.
def perform(batch_tracking_record)
start_tracking_execution(batch_tracking_record)
......@@ -34,6 +41,10 @@ module Gitlab
tracking_record.migration_column_name,
tracking_record.sub_batch_size,
*tracking_record.migration_job_arguments)
if job_instance.respond_to?(:batch_metrics)
tracking_record.metrics = job_instance.batch_metrics
end
end
def finish_tracking_execution(tracking_record)
......
......@@ -265,6 +265,7 @@ excluded_attributes:
- :issue_id
push_event_payload:
- :event_id
- :event_id_convert_to_bigint
project_badges:
- :group_id
resource_label_events:
......@@ -287,6 +288,7 @@ excluded_attributes:
- :label_id
events:
- :target_id
- :id_convert_to_bigint
timelogs:
- :issue_id
- :merge_request_id
......
......@@ -64,5 +64,13 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
end
it 'tracks timings of queries' do
expect(subject.batch_metrics.timings).to be_empty
subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
expect(subject.batch_metrics.timings[:update_all]).not_to be_empty
end
end
end
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchMetrics do
let(:batch_metrics) { described_class.new }
describe '#time_operation' do
it 'tracks the duration of the operation using monotonic time' do
expect(batch_metrics.timings).to be_empty
expect(Gitlab::Metrics::System).to receive(:monotonic_time)
.exactly(6).times
.and_return(0.0, 111.0, 200.0, 290.0, 300.0, 410.0)
batch_metrics.time_operation(:my_label) do
# some operation
end
batch_metrics.time_operation(:my_other_label) do
# some operation
end
batch_metrics.time_operation(:my_label) do
# some operation
end
expect(batch_metrics.timings).to eq(my_label: [111.0, 110.0], my_other_label: [90.0])
end
end
end
......@@ -139,6 +139,19 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
describe '#run_entire_migration' do
context 'when not in a development or test environment' do
it 'raises an error' do
environment = double('environment', development?: false, test?: false)
migration = build(:batched_background_migration, :finished)
allow(Rails).to receive(:env).and_return(environment)
expect do
runner.run_entire_migration(migration)
end.to raise_error('this method is not intended for use in real environments')
end
end
context 'when the given migration is not active' do
it 'does not create and run migration jobs' do
migration = build(:batched_background_migration, :finished)
......
......@@ -29,6 +29,16 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
describe '.active_migration' do
let!(:migration1) { create(:batched_background_migration, :finished) }
let!(:migration2) { create(:batched_background_migration, :active) }
let!(:migration3) { create(:batched_background_migration, :active) }
it 'returns the first active migration according to queue order' do
expect(described_class.active_migration).to eq(migration2)
end
end
describe '#interval_elapsed?' do
context 'when the migration has no last_job' do
let(:batched_migration) { build(:batched_background_migration) }
......
......@@ -9,16 +9,24 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) }
let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) }
let(:job_instance) { double('job instance', batch_metrics: {}) }
before do
allow(job_class).to receive(:new).and_return(job_instance)
end
it 'runs the migration job' do
expect_next_instance_of(job_class) do |job_instance|
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
end
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
migration_wrapper.perform(job_record)
end
it 'updates the the tracking record in the database' do
it 'updates the tracking record in the database' do
test_metrics = { 'my_metris' => 'some value' }
expect(job_instance).to receive(:perform)
expect(job_instance).to receive(:batch_metrics).and_return(test_metrics)
expect(job_record).to receive(:update!).with(hash_including(attempts: 1, status: :running)).and_call_original
freeze_time do
......@@ -29,14 +37,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
expect(reloaded_job_record).not_to be_pending
expect(reloaded_job_record.attempts).to eq(1)
expect(reloaded_job_record.started_at).to eq(Time.current)
expect(reloaded_job_record.metrics).to eq(test_metrics)
end
end
context 'when the migration job does not raise an error' do
it 'marks the tracking record as succeeded' do
expect_next_instance_of(job_class) do |job_instance|
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
end
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
freeze_time do
migration_wrapper.perform(job_record)
......@@ -51,11 +58,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the migration job raises an error' do
it 'marks the tracking record as failed before raising the error' do
expect_next_instance_of(job_class) do |job_instance|
expect(job_instance).to receive(:perform)
.with(1, 10, 'events', 'id', 1, 'id', 'other_id')
.and_raise(RuntimeError, 'Something broke!')
end
expect(job_instance).to receive(:perform)
.with(1, 10, 'events', 'id', 1, 'id', 'other_id')
.and_raise(RuntimeError, 'Something broke!')
freeze_time do
expect { migration_wrapper.perform(job_record) }.to raise_error(RuntimeError, 'Something broke!')
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuditEventArchived do
let(:source_table) { AuditEvent }
let(:destination_table) { described_class }
it 'has the same columns as the source table' do
column_names_from_source_table = column_names(source_table)
column_names_from_destination_table = column_names(destination_table)
expect(column_names_from_destination_table).to match_array(column_names_from_source_table)
end
it 'has the same null constraints as the source table' do
constraints_from_source_table = null_constraints(source_table)
constraints_from_destination_table = null_constraints(destination_table)
expect(constraints_from_destination_table.to_a).to match_array(constraints_from_source_table.to_a)
end
it 'inserts the same record as the one in the source table', :aggregate_failures do
expect { create(:audit_event) }.to change { destination_table.count }.by(1)
event_from_source_table = source_table.connection.select_one(
"SELECT * FROM #{source_table.table_name} ORDER BY created_at desc LIMIT 1"
)
event_from_destination_table = destination_table.connection.select_one(
"SELECT * FROM #{destination_table.table_name} ORDER BY created_at desc LIMIT 1"
)
expect(event_from_destination_table).to eq(event_from_source_table)
end
def column_names(table)
table.connection.select_all(<<~SQL)
SELECT c.column_name
FROM information_schema.columns c
WHERE c.table_name = '#{table.table_name}'
SQL
end
def null_constraints(table)
table.connection.select_all(<<~SQL)
SELECT c.column_name, c.is_nullable
FROM information_schema.columns c
WHERE c.table_name = '#{table.table_name}'
AND c.column_name != 'created_at'
SQL
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Database::BatchedBackgroundMigrationWorker, '#perform', :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
let(:worker) { described_class.new }
context 'when the feature flag is disabled' do
before do
stub_feature_flags(execute_batched_migrations_on_schedule: false)
end
it 'does nothing' do
expect(worker).not_to receive(:active_migration)
expect(worker).not_to receive(:run_active_migration)
worker.perform
end
end
context 'when the feature flag is enabled' do
before do
stub_feature_flags(execute_batched_migrations_on_schedule: true)
allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
end
context 'when no active migrations exist' do
it 'does nothing' do
expect(worker).not_to receive(:run_active_migration)
worker.perform
end
end
context 'when active migrations exist' do
let(:lease_key) { 'batched_background_migration_worker' }
let(:migration) { build(:batched_background_migration, :active, interval: 2.minutes) }
before do
allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
.and_return(migration)
allow(migration).to receive(:interval_elapsed?).and_return(true)
allow(migration).to receive(:reload)
end
context 'when the reloaded migration is no longer active' do
it 'does not run the migration' do
expect_to_obtain_exclusive_lease(lease_key, timeout: 4.minutes)
expect(migration).to receive(:reload)
expect(migration).to receive(:active?).and_return(false)
expect(worker).not_to receive(:run_active_migration)
worker.perform
end
end
context 'when the interval has not elapsed' do
it 'does not run the migration' do
expect_to_obtain_exclusive_lease(lease_key, timeout: 4.minutes)
expect(migration).to receive(:interval_elapsed?).and_return(false)
expect(worker).not_to receive(:run_active_migration)
worker.perform
end
end
context 'when the reloaded migration is still active and the interval has elapsed' do
it 'runs the migration' do
expect_to_obtain_exclusive_lease(lease_key, timeout: 4.minutes)
expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
expect(instance).to receive(:run_migration_job).with(migration)
end
expect(worker).to receive(:run_active_migration).and_call_original
worker.perform
end
end
it 'always cleans up the exclusive lease' do
lease = stub_exclusive_lease_taken(lease_key, timeout: 4.minutes)
expect(lease).to receive(:try_obtain).and_return(true)
expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
expect(lease).to receive(:cancel)
expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment