Commit c9f77f6e authored by Patrick Bair's avatar Patrick Bair

Merge branch '331248-remove-old-migrations-in-release-14-3' into 'master'

Resolve "Remove old migrations in release 14 - Part III"

See merge request gitlab-org/gitlab!74001
parents fddd1c97 6be83f35
# frozen_string_literal: true
# rubocop: disable Gitlab/ModuleWithInstanceVariables
module EE
module Gitlab
module BackgroundMigration
module MigrateSecurityScans
extend ::Gitlab::Utils::Override
override :perform
def perform(start_id, stop_id)
# Introduced in GitLab 12.9, will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/33124
::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/331248') do
execute <<~SQL
INSERT INTO security_scans (created_at, updated_at, build_id, scan_type)
SELECT ci_job_artifacts.created_at, ci_job_artifacts.updated_at, ci_job_artifacts.job_id, ci_job_artifacts.file_type - 4
FROM ci_job_artifacts
WHERE ci_job_artifacts.id BETWEEN #{start_id} AND #{stop_id}
AND ci_job_artifacts.file_type BETWEEN 5 and 8
ON CONFLICT (build_id, scan_type) DO NOTHING;
SQL
end
end
def execute(sql)
@connection ||= ::ActiveRecord::Base.connection
@connection.execute(sql)
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module MoveEpicIssuesAfterEpics
extend ::Gitlab::Utils::Override
class EpicIssue < ActiveRecord::Base
self.table_name = 'epic_issues'
end
class Epic < ActiveRecord::Base
self.table_name = 'epics'
end
override :perform
def perform(start_id, stop_id)
maximum_epic_position = Epic.maximum(:relative_position)
return unless maximum_epic_position
max_position = ::Gitlab::Database::MAX_INT_VALUE
delta = ((maximum_epic_position - max_position) / 2.0).abs.ceil
EpicIssue.where(epic_id: start_id..stop_id).where('relative_position < ?', max_position - delta)
.update_all("relative_position = relative_position + #{delta}")
end
end
end
end
end
# frozen_string_literal: true
# rubocop: disable Gitlab/ModuleWithInstanceVariables
module EE
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given merge request IDs range. A _single_ INSERT is issued for the given range.
module PopulateAnyApprovalRuleForMergeRequests
extend ::Gitlab::Utils::Override
MAX_VALUE = 2**15 - 1
override :perform
def perform(from_id, to_id)
select_sql =
::MergeRequest
.where(merge_request_approval_rules_not_exists_clause)
.where(id: from_id..to_id)
.where('approvals_before_merge <> 0')
.select("id, LEAST(#{MAX_VALUE}, approvals_before_merge), created_at, updated_at, 4, '#{::ApprovalRuleLike::ALL_MEMBERS}'")
.to_sql
execute("INSERT INTO approval_merge_request_rules (merge_request_id, approvals_required, created_at, updated_at, rule_type, name) #{select_sql}")
end
private
def merge_request_approval_rules_not_exists_clause
<<~SQL
NOT EXISTS (SELECT 1 FROM approval_merge_request_rules
WHERE approval_merge_request_rules.merge_request_id = merge_requests.id)
SQL
end
def execute(sql)
@connection ||= ActiveRecord::Base.connection
@connection.execute(sql)
end
end
end
end
end
# frozen_string_literal: true
# rubocop: disable Gitlab/ModuleWithInstanceVariables
module EE
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given project IDs range. A _single_ INSERT is issued for the given range.
module PopulateAnyApprovalRuleForProjects
extend ::Gitlab::Utils::Override
MAX_VALUE = 2**15 - 1
override :perform
def perform(from_id, to_id)
select_sql =
::Project
.where(project_approval_rules_not_exists_clause)
.where(id: from_id..to_id)
.where('approvals_before_merge <> 0')
.select(select_clause)
.to_sql
execute("INSERT INTO approval_project_rules (project_id, approvals_required, created_at, updated_at, rule_type, name) #{select_sql}")
end
private
def select_clause
<<~SQL
id, LEAST(#{MAX_VALUE}, approvals_before_merge),
created_at, updated_at, #{::ApprovalProjectRule.rule_types[:any_approver]}, \'#{ApprovalRuleLike::ALL_MEMBERS}\'
SQL
end
def project_approval_rules_not_exists_clause
<<~SQL
NOT EXISTS (SELECT 1 FROM approval_project_rules
WHERE approval_project_rules.project_id = projects.id)
SQL
end
def execute(sql)
@connection ||= ::ActiveRecord::Base.connection
@connection.execute(sql)
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
# This class updates vulnerability feedback entities with no pipeline id assigned.
module PopulateVulnerabilityFeedbackPipelineId
extend ::Gitlab::Utils::Override
SECURITY_REPORT_FILE_TYPES = {
sast: 5,
dependency_scanning: 6,
container_scanning: 7,
dast: 8,
license_management: 10,
license_scanning: 101,
secret_detection: 21,
coverage_fuzzing: 23,
api_fuzzing: 26
}.freeze
override :perform
def perform(project_ids)
filtered_project_ids = ::Project.non_archived.without_deleted.where(id: project_ids).pluck(:id)
update_vulnerability_feedback_with_pipeline_id(pipelines_with_security_reports_subquery(filtered_project_ids))
update_vulnerability_feedback_with_pipeline_id(legacy_pipelines_with_security_reports_subquery(filtered_project_ids))
end
private
def update_vulnerability_feedback_with_pipeline_id(subquery)
update_feedback_pipeline_id_sql = <<~SQL
UPDATE "vulnerability_feedback"
SET pipeline_id = "pipelines_with_reports"."id"
FROM (#{subquery}) AS pipelines_with_reports
WHERE "vulnerability_feedback"."pipeline_id" IS NULL
AND "vulnerability_feedback"."project_id" = "pipelines_with_reports"."project_id";
SQL
# Introduced in GitLab 13.6, will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/331248
::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/331248') do
connection.execute(update_feedback_pipeline_id_sql)
end
end
def pipelines_with_security_reports_subquery(project_ids)
<<~SQL
SELECT "ci_pipelines"."id", "ci_pipelines"."project_id"
FROM "ci_pipelines"
WHERE ("ci_pipelines"."project_id" in (#{project_ids.join(', ')}))
AND ("ci_pipelines"."status" IN ('success'))
AND (
EXISTS (
SELECT 1
FROM "ci_builds"
WHERE "ci_builds"."type" = 'Ci::Build'
AND (
"ci_builds"."retried" = FALSE
OR "ci_builds"."retried" IS NULL
)
AND (
EXISTS (
SELECT 1
FROM "ci_job_artifacts"
WHERE ("ci_builds"."id" = "ci_job_artifacts"."job_id")
AND "ci_job_artifacts"."file_type" IN (#{SECURITY_REPORT_FILE_TYPES.except(:license_management, :license_scanning).values.join(", ")})
)
)
AND ("ci_pipelines"."id" = "ci_builds"."commit_id")
)
)
ORDER BY "ci_pipelines"."id" DESC
LIMIT 1
SQL
end
def legacy_pipelines_with_security_reports_subquery(project_ids)
<<~SQL
SELECT "ci_pipelines"."id", "ci_pipelines"."project_id"
FROM "ci_pipelines"
INNER JOIN "ci_builds" ON "ci_builds"."commit_id" = "ci_pipelines"."id"
AND "ci_builds"."type" = 'Ci::Build'
AND ("ci_builds"."retried" = FALSE OR "ci_builds"."retried" IS NULL)
INNER JOIN "ci_job_artifacts" ON "ci_job_artifacts"."file_type" IN (#{SECURITY_REPORT_FILE_TYPES.values.join(", ")})
AND "ci_job_artifacts"."job_id" = "ci_builds"."id"
WHERE ("ci_pipelines"."project_id" in (#{project_ids.join(', ')}))
AND ("ci_pipelines"."status" IN ('success'))
AND "ci_builds"."name" IN ('sast', 'secret_detection', 'dependency_scanning', 'container_scanning', 'dast')
ORDER BY "ci_pipelines"."id" DESC
LIMIT 1
SQL
end
def connection
@connection ||= ActiveRecord::Base.connection
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
# This class creates/updates those project historical vulnerability statistics
# that haven't been created nor initialized.
module PopulateVulnerabilityHistoricalStatistics
extend ::Gitlab::Utils::Override
override :perform
def perform(project_ids, retention_period = 90)
project_ids.each do |project_id|
upsert_vulnerability_historical_statistics(project_id, retention_period)
rescue StandardError => e
error_message("Error updating statistics for project #{project_id}: #{e.message}")
end
end
private
MAX_DAYS_IN_SINGLE_QUERY = 10
EMPTY_STATISTIC = {
total: 0,
critical: 0,
high: 0,
medium: 0,
low: 0,
unknown: 0,
info: 0
}.freeze
class Vulnerability < ActiveRecord::Base
self.table_name = 'vulnerabilities'
enum severity: { info: 1, unknown: 2, low: 4, medium: 5, high: 6, critical: 7 }
end
class VulnerabilityHistoricalStatistic < ActiveRecord::Base
self.table_name = 'vulnerability_historical_statistics'
enum letter_grade: { a: 0, b: 1, c: 2, d: 3, f: 4 }
end
def upsert_vulnerability_historical_statistics(project_id, retention_period)
end_date = Date.today
start_date = end_date - retention_period.days
time_now = Time.current
counts_by_day_and_severity_in_batches(project_id, start_date, end_date, of: MAX_DAYS_IN_SINGLE_QUERY)
.map { |day, statistics| prepare_historical_statistic_attributes(project_id, day, statistics, time_now) }
.then { |statistics| deduplicate_statistics(statistics) }
.then { |statistics| VulnerabilityHistoricalStatistic.insert_all(statistics) }
end
def counts_by_day_and_severity_in_batches(project_id, start_date, end_date, of:)
(start_date...end_date)
.each_slice(of)
.flat_map { |date_range| counts_by_day_and_severity(project_id, date_range.first, date_range.last) }
.group_by(&:day)
end
def counts_by_day_and_severity(project_id, start_date, end_date)
quoted_start_date = ActiveRecord::Base.connection.quote(start_date)
quoted_end_date = ActiveRecord::Base.connection.quote(end_date)
Vulnerability
.where(project_id: project_id)
.select('DATE(calendar.entry) AS day, severity, COUNT(*)')
.from("generate_series(DATE #{quoted_start_date}, DATE #{quoted_end_date}, INTERVAL '1 day') as calendar(entry)")
.joins('INNER JOIN vulnerabilities ON vulnerabilities.created_at <= calendar.entry')
.where('(vulnerabilities.dismissed_at IS NULL OR vulnerabilities.dismissed_at > calendar.entry) AND (vulnerabilities.resolved_at IS NULL OR vulnerabilities.resolved_at > calendar.entry)')
.group(:day, :severity)
end
def prepare_historical_statistic_attributes(project_id, day, statistics, time_now)
severity_counts = statistics.map { |statistic| { statistic.severity.to_sym => statistic.count } }.inject(:merge)
EMPTY_STATISTIC.merge(
date: day,
total: statistics.sum(&:count),
letter_grade: letter_grade_for(severity_counts),
created_at: time_now,
updated_at: time_now,
project_id: project_id,
**severity_counts
)
end
def deduplicate_statistics(statistics)
statistics
.sort_by { |statistic| statistic[:date] }
.slice_when { |statistic_before, statistic| statistic_before.except(:date) != statistic.except(:date) }
.map(&:first)
end
def letter_grade_for(statistic)
if statistic[:critical].to_i > 0
VulnerabilityHistoricalStatistic.letter_grades[:f]
elsif statistic[:high].to_i > 0 || statistic[:unknown].to_i > 0
VulnerabilityHistoricalStatistic.letter_grades[:d]
elsif statistic[:medium].to_i > 0
VulnerabilityHistoricalStatistic.letter_grades[:c]
elsif statistic[:low].to_i > 0
VulnerabilityHistoricalStatistic.letter_grades[:b]
else
VulnerabilityHistoricalStatistic.letter_grades[:a]
end
end
def logger
@logger ||= ::Gitlab::BackgroundMigration::Logger.build
end
def error_message(message)
logger.error(message: "Vulnerability Historical Statistics Migration: #{message}")
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module PruneOrphanedGeoEvents
extend ::Gitlab::Utils::Override
BATCH_SIZE = 50_000
RESCHEDULE_DELAY = 5.minutes
EVENT_TABLES = %w[geo_repository_created_events
geo_repository_updated_events
geo_repository_deleted_events
geo_repository_renamed_events
geo_repositories_changed_events
geo_hashed_storage_migrated_events
geo_hashed_storage_attachments_events
geo_lfs_object_deleted_events
geo_job_artifact_deleted_events].freeze
module PrunableEvent
extend ActiveSupport::Concern
include EachBatch
included do
scope :orphans, -> do
where(
<<-SQL.squish)
NOT EXISTS (
SELECT 1
FROM geo_event_log
WHERE geo_event_log.#{geo_event_foreign_key} = #{table_name}.id
)
SQL
end
end
class_methods do
def geo_event_foreign_key
table_name.singularize.sub(/^geo_/, '') + '_id'
end
def delete_batch_of_orphans!
deleted = where(id: orphans.limit(BATCH_SIZE)).delete_all
vacuum! if deleted > 0
deleted
end
def vacuum!
connection.execute("VACUUM #{table_name}")
rescue ActiveRecord::StatementInvalid => e
# ignore timeout, auto-vacuum will take care of it
raise unless e.message =~ /statement timeout/i
end
end
end
override :perform
def perform(table_name = EVENT_TABLES.first)
return if ::Gitlab::Database.read_only?
deleted_rows = prune_orphaned_rows(table_name)
table_name = next_table(table_name) if deleted_rows == 0
::BackgroundMigrationWorker.perform_in(RESCHEDULE_DELAY, self.class.name.demodulize, table_name) if table_name
end
def prune_orphaned_rows(table)
event_model(table).delete_batch_of_orphans!
end
def event_model(table)
Class.new(ActiveRecord::Base) do
include PrunableEvent
self.table_name = table
end
end
def next_table(table_name)
return if EVENT_TABLES.last == table_name
index = EVENT_TABLES.index(table_name)
return unless index
EVENT_TABLES[index + 1]
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module RemoveDuplicateCsFindings
extend ::Gitlab::Utils::Override
class Finding < ActiveRecord::Base
include ::ShaAttribute
include ::EachBatch
BROKEN_FINGERPRINT_LENGTH = 40
belongs_to :vulnerability, class_name: 'Vulnerability'
self.table_name = 'vulnerability_occurrences'
REPORT_TYPES = {
container_scanning: 2
}.with_indifferent_access.freeze
enum report_type: REPORT_TYPES
sha_attribute :location_fingerprint
end
class Note < ActiveRecord::Base; end
class Vulnerability < ActiveRecord::Base
has_many :findings, class_name: 'Finding', inverse_of: :vulnerability
has_many :notes, class_name: 'Note', foreign_key: 'noteable_id'
def delete_notes
Note.where(project_id: project_id, noteable_type: 'Vulnerability', noteable_id: id).delete_all
end
end
override :perform
def perform(start_id, stop_id)
Finding.select(:id, :project_id, :primary_identifier_id, :location_fingerprint, :scanner_id)
.container_scanning
.where(id: start_id..stop_id)
.where("length(location_fingerprint) = ?", Finding::BROKEN_FINGERPRINT_LENGTH)
.each do |finding|
colliding_fingerprint = ::Gitlab::Database::ShaAttribute.new.serialize(finding.location_fingerprint).to_s
duplicated_finding = Finding.container_scanning.where(project_id: finding.project_id,
primary_identifier_id: finding.primary_identifier_id,
scanner_id: finding.scanner_id,
location_fingerprint: colliding_fingerprint).first
next if duplicated_finding.blank?
# we have some findings without vulnerabilities
next if duplicated_finding.vulnerability.nil?
ActiveRecord::Base.transaction do
duplicated_finding.vulnerability.delete_notes
duplicated_finding.vulnerability.delete
duplicated_finding.delete
# update can be done without violating unique constraint
# index_vulnerability_occurrences_on_unique_keys
# since we included sha_attribute :location_fingerprint it will be updated in correct format
finding.update(location_fingerprint: colliding_fingerprint)
end
end
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module RemoveDuplicatedCsFindingsWithoutVulnerabilityId
extend ::Gitlab::Utils::Override
class Finding < ActiveRecord::Base
include ::ShaAttribute
include ::EachBatch
BROKEN_FINGERPRINT_LENGTH = 40
scope :with_broken_fingerprint, -> { where("length(location_fingerprint) = ?", BROKEN_FINGERPRINT_LENGTH) }
self.table_name = 'vulnerability_occurrences'
REPORT_TYPES = {
container_scanning: 2
}.with_indifferent_access.freeze
enum report_type: REPORT_TYPES
sha_attribute :location_fingerprint
end
override :perform
def perform(start_id, stop_id)
Finding.select(:id, :project_id, :primary_identifier_id, :location_fingerprint, :scanner_id)
.container_scanning
.where(id: start_id..stop_id, vulnerability_id: nil)
.with_broken_fingerprint
.each do |finding|
fixed_fingerprint = ::Gitlab::Database::ShaAttribute.new.serialize(finding.location_fingerprint).to_s
duplicate = Finding.container_scanning
.where(project_id: finding.project_id,
primary_identifier_id: finding.primary_identifier_id,
scanner_id: finding.scanner_id,
location_fingerprint: fixed_fingerprint,
vulnerability_id: nil)
.where.not(id: finding.id).first
next if duplicate.blank?
Finding.transaction do
duplicate.delete
finding.update(location_fingerprint: fixed_fingerprint)
end
end
end
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module EE
module Gitlab
module BackgroundMigration
module RemoveInaccessibleEpicTodos
extend ::Gitlab::Utils::Override
class User < ActiveRecord::Base
end
class Todo < ActiveRecord::Base
belongs_to :epic, foreign_key: :target_id
belongs_to :user
end
class Member < ActiveRecord::Base
include FromUnion
self.inheritance_column = :_type_disabled
# backported from ApplicationRecord
def self.cached_column_list
self.column_names.map { |column_name| self.arel_table[column_name] }
end
end
class GroupGroupLink < ActiveRecord::Base
end
class Epic < ActiveRecord::Base
belongs_to :group
def can_read_confidential?(user)
group.max_member_access_for_user(user) >= ::Gitlab::Access::REPORTER
end
end
class Group < ActiveRecord::Base
self.table_name = 'namespaces'
self.inheritance_column = :_type_disabled
def max_member_access_for_user(user)
max_member_access = members_with_parents.where(user_id: user)
.reorder(access_level: :desc)
.first
&.access_level
max_member_access || ::Gitlab::Access::NO_ACCESS
end
def members_with_parents
group_hierarchy_members = Member
.where(source_type: 'Namespace', source_id: source_ids)
.select(*Member.cached_column_list)
Member.from_union([group_hierarchy_members,
members_from_self_and_ancestor_group_shares])
end
# rubocop:disable Metrics/AbcSize
# this is taken from Group model, so instead of doing additional
# refactoring let's keep it close to the original
def members_from_self_and_ancestor_group_shares
group_group_link_table = GroupGroupLink.arel_table
group_member_table = Member.arel_table
group_group_links_query = GroupGroupLink.where(shared_group_id: source_ids)
cte = ::Gitlab::SQL::CTE.new(:group_group_links_cte, group_group_links_query)
cte_alias = cte.table.alias(GroupGroupLink.table_name)
# Instead of members.access_level, we need to maximize that access_level at
# the respective group_group_links.group_access.
member_columns = Member.attribute_names.map do |column_name|
if column_name == 'access_level'
smallest_value_arel([cte_alias[:group_access], group_member_table[:access_level]],
'access_level')
else
group_member_table[column_name]
end
end
Member
.with(cte.to_arel)
.select(*member_columns)
.from([group_member_table, cte.alias_to(group_group_link_table)])
.where(group_member_table[:requested_at].eq(nil))
.where(group_member_table[:source_id].eq(group_group_link_table[:shared_with_group_id]))
.where(group_member_table[:source_type].eq('Namespace'))
end
# rubocop:enable Metrics/AbcSize
def source_ids
return id unless parent_id
::Gitlab::ObjectHierarchy
.new(self.class.where(id: id))
.base_and_ancestors
.reorder(nil).select(:id)
end
def smallest_value_arel(args, column_alias)
Arel::Nodes::As.new(
Arel::Nodes::NamedFunction.new('LEAST', args),
Arel::Nodes::SqlLiteral.new(column_alias))
end
end
override :perform
def perform(start_id, stop_id)
confidential_epic_ids = Epic.where(confidential: true).where(id: start_id..stop_id).ids
epic_todos = Todo
.where(target_type: 'Epic', target_id: confidential_epic_ids)
.includes(:epic, :user)
ids_to_delete = not_readable_epic_todo_ids(epic_todos)
logger.info(message: 'Deleting confidential epic todos', todo_ids: ids_to_delete)
Todo.where(id: ids_to_delete).delete_all
end
private
def not_readable_epic_todo_ids(todos)
todos.map do |todo|
next todo.id unless todo.epic
next if todo.epic.can_read_confidential?(todo.user)
todo.id
end.compact
end
def logger
@logger ||= ::Gitlab::BackgroundMigration::Logger.build
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module RemoveUndefinedVulnerabilityConfidenceLevel
extend ::Gitlab::Utils::Override
class Vulnerability < ActiveRecord::Base
include ::EachBatch
self.table_name = 'vulnerabilities'
CONFIDENCE_LEVELS = {
undefined: 0,
unknown: 2
}.with_indifferent_access.freeze
enum confidence: CONFIDENCE_LEVELS
def self.undefined_confidence
where(confidence: Vulnerability.confidences[:undefined])
end
end
override :perform
def perform(start_id, stop_id)
Vulnerability.undefined_confidence
.where(id: start_id..stop_id)
.update_all(confidence: Vulnerability.confidences[:unknown])
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module UpdateLocationFingerprintForContainerScanningFindings
extend ::Gitlab::Utils::Override
class Finding < ActiveRecord::Base
include ::ShaAttribute
include ::EachBatch
self.table_name = 'vulnerability_occurrences'
REPORT_TYPES = {
container_scanning: 2
}.with_indifferent_access.freeze
enum report_type: REPORT_TYPES
sha_attribute :location_fingerprint
# Copied from Reports::Security::Locations
def calculate_new_fingerprint(image, package_name)
return if image.nil? || package_name.nil?
Digest::SHA1.hexdigest("#{docker_image_name_without_tag(image)}:#{package_name}")
end
private
def docker_image_name_without_tag(image)
base_name, version = image.split(':')
return image if version_semver_like?(version)
base_name
end
def version_semver_like?(version)
hash_like = /\A[0-9a-f]{32,128}\z/i
if Gem::Version.correct?(version)
!hash_like.match?(version)
else
false
end
end
end
override :perform
def perform(start_id, stop_id)
Finding.container_scanning
.select(:id, "raw_metadata::json->'location' AS loc")
.where(id: start_id..stop_id)
.each do |finding|
next if finding.loc.nil?
package = finding.loc.dig('dependency', 'package', 'name')
image = finding.loc.dig('image')
new_fingerprint = finding.calculate_new_fingerprint(image, package)
next if new_fingerprint.blank?
begin
finding.update_column(:location_fingerprint, new_fingerprint)
rescue ActiveRecord::RecordNotUnique
::Gitlab::BackgroundMigration::Logger.warn("Duplicate finding found with finding id #{finding.id}")
end
end
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
# This migration updates the dismissed_by_id and dismissed_at properties
# of dimissed vulnerabilities records
module UpdateVulnerabilitiesFromDismissalFeedback
extend ::Gitlab::Utils::Override
VULNERABILITY_DISMISSED_STATE = 2
VULNERABILITY_FEEDBACK_DISMISSAL_TYPE = 0
class Project < ActiveRecord::Base
self.table_name = 'projects'
self.inheritance_column = :_type_disabled
end
override :perform
def perform(project_id)
project = Project.find_by(id: project_id)
return unless project
return if project.pending_delete?
update_vulnerability_from_dismissal_feedback(project.id)
end
private
def update_vulnerability_from_dismissal_feedback(project_id)
update_vulnerability_from_dismissal_feedback_sql = <<-SQL
UPDATE vulnerabilities AS v
SET dismissed_by_id = vf.author_id, dismissed_at = vf.created_at
FROM vulnerability_occurrences AS vo, vulnerability_feedback AS vf
WHERE vo.vulnerability_id = v.id
AND v.state = #{VULNERABILITY_DISMISSED_STATE}
AND vo.project_id = vf.project_id
AND ENCODE(vo.project_fingerprint, 'HEX') = vf.project_fingerprint
AND vo.project_id = #{project_id}
AND vo.report_type = vf.category
AND vf.feedback_type = #{VULNERABILITY_FEEDBACK_DISMISSAL_TYPE};
SQL
connection.execute(update_vulnerability_from_dismissal_feedback_sql)
rescue StandardError => e
logger.warn(
message: 'update_vulnerability_from_dismissal_feedback errored out',
project_id: project_id,
error: e.message
)
end
def connection
@connection ||= ActiveRecord::Base.connection
end
def logger
@logger ||= ::Gitlab::BackgroundMigration::Logger.build
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
# rubocop: disable RSpec/FactoriesInMigrationSpecs
RSpec.describe Gitlab::BackgroundMigration::MigrateSecurityScans, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:builds) { table(:ci_builds) }
let(:job_artifacts) { table(:ci_job_artifacts) }
let(:security_scans) { table(:security_scans) }
let(:namespace) { namespaces.create!(name: "foo", path: "bar") }
let(:project) { projects.create!(namespace_id: namespace.id) }
let(:build) { builds.create! }
subject { described_class.new }
describe '#perform' do
context 'when job artifacts and builds are present' do
using RSpec::Parameterized::TableSyntax
where(:scan_type_name, :report_type, :scan_type_number) do
:sast | 5 | 1
:dependency_scanning | 6 | 2
:container_scanning | 7 | 3
:dast | 8 | 4
end
with_them do
let!(:job_artifact) do
job_artifacts.create!(
created_at: 10.minutes.ago,
updated_at: 9.minutes.ago,
project_id: project.id,
job_id: build.id,
file_type: report_type
)
end
it 'creates a new security scan' do
subject.perform(job_artifact.id, job_artifact.id)
scan = Security::Scan.first
expect(scan.build_id).to eq(build.id)
expect(scan.scan_type).to eq(scan_type_name.to_s)
expect(scan.created_at.to_s).to eq(job_artifact.created_at.to_s)
expect(scan.updated_at.to_s).to eq(job_artifact.updated_at.to_s)
end
end
end
context 'job artifacts are not found' do
it 'security scans are not created' do
subject.perform(1, 2)
expect(Security::Scan.count).to eq(0)
end
end
end
context 'security scan has already been saved' do
let!(:job_artifact) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 5) }
before do
security_scans.create!(build_id: build.id, scan_type: 1)
end
it 'does not save a new security scan' do
subject.perform(job_artifact.id, job_artifact.id)
expect(Security::Scan.count).to eq(1)
end
end
context 'job artifacts are not security job artifacts' do
let!(:job_artifact) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 1) }
it 'does not save a new security scan' do
subject.perform(job_artifact.id, job_artifact.id)
expect(Security::Scan.count).to eq(0)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MoveEpicIssuesAfterEpics, schema: 20181228175414 do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:issues) { table(:issues) }
let(:epics) { table(:epics) }
let(:epic_issues) { table(:epic_issues) }
subject { described_class.new }
describe '#perform' do
let(:epic_params) do
{
title: 'Epic',
title_html: 'Epic',
group_id: group.id,
author_id: user.id
}
end
let(:issue_params) do
{
title: 'Issue',
title_html: 'Issue',
project_id: project.id,
author_id: user.id
}
end
let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
context 'when there are epic_issues present' do
let(:project) { projects.create!(namespace_id: group.id, name: 'foo') }
let(:base_epic) { epics.create!(epic_params.merge(iid: 3, relative_position: 500)) }
let(:issue_1) { issues.create!(issue_params.merge(iid: 1)) }
let(:issue_2) { issues.create!(issue_params.merge(iid: 2)) }
let(:issue_3) { issues.create!(issue_params.merge(iid: 3)) }
let!(:epic_1) { epics.create!(epic_params.merge(iid: 1, relative_position: 100)) }
let!(:epic_2) { epics.create!(epic_params.merge(iid: 2, relative_position: 5000)) }
let!(:epic_issue_1) { epic_issues.create!(issue_id: issue_1.id, epic_id: base_epic.id, relative_position: 400) }
let!(:epic_issue_2) { epic_issues.create!(issue_id: issue_2.id, epic_id: base_epic.id, relative_position: 5010) }
let!(:epic_issue_3) { epic_issues.create!(issue_id: issue_3.id, epic_id: base_epic.id, relative_position: Gitlab::Database::MAX_INT_VALUE - 10) }
before do
subject.perform(epics.first.id, epics.last.id)
end
it 'does not change relative_position of epics' do
expect(base_epic.relative_position).to eq(500)
expect(epic_1.relative_position).to eq(100)
expect(epic_2.relative_position).to eq(5000)
end
it 'moves epic_issues after epics' do
expect(epic_issue_1.reload.relative_position).to be > 5000
expect(epic_issue_2.reload.relative_position).to be > 5000
end
it 'keeps epic_issues order' do
expect(epic_issue_1.reload.relative_position).to be < epic_issue_2.reload.relative_position
end
it 'does not change the relative_position of epic_issue getting to the max value' do
expect(epic_issue_3.reload.relative_position).to eq(Gitlab::Database::MAX_INT_VALUE - 10)
end
end
context 'when there are no epics' do
it 'runs correctly' do
expect(subject.perform(1, 10)).to be_nil
end
end
context 'when there are no epic_issues' do
it 'runs correctly' do
epics.create!(epic_params.merge(iid: 3, relative_position: 500))
expect(subject.perform(1, 10)).to be_zero
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForMergeRequests, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:projects) { table(:projects) }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:merge_requests) { table(:merge_requests) }
let(:approval_merge_request_rules) { table(:approval_merge_request_rules) }
def create_merge_request(id, params = {})
params.merge!(id: id,
target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: 'mr name',
title: "mr name#{id}")
merge_requests.create!(params)
end
before do
create_merge_request(2, approvals_before_merge: 2)
# Test filtering rows with empty approvals_before_merge column
create_merge_request(3, approvals_before_merge: nil)
# Test filtering already migrated rows
create_merge_request(4, approvals_before_merge: 3)
approval_merge_request_rules.create!(id: 4,
merge_request_id: 4, approvals_required: 3, rule_type: 4, name: ApprovalRuleLike::ALL_MEMBERS)
# Test filtering MRs with existing rules
create_merge_request(5, approvals_before_merge: 3)
approval_merge_request_rules.create!(id: 5,
merge_request_id: 5, approvals_required: 3, rule_type: 1, name: 'Regular rules')
create_merge_request(6, approvals_before_merge: 5)
# Test filtering rows with zero approvals_before_merge column
create_merge_request(7, approvals_before_merge: 0)
# Test rows with too big approvals_before_merge value
create_merge_request(8, approvals_before_merge: 2**30)
end
describe '#perform' do
it 'creates approval_merge_request_rules rows according to merge_requests' do
expect { subject.perform(1, 8) }.to change(ApprovalMergeRequestRule, :count).by(3)
created_rows = [
{ 'merge_request_id' => 2, 'approvals_required' => 2 },
{ 'merge_request_id' => 6, 'approvals_required' => 5 }
]
existing_rows = [
{ 'merge_request_id' => 4, 'approvals_required' => 3 },
{ 'merge_request_id' => 8, 'approvals_required' => 2**15 - 1 }
]
rows = approval_merge_request_rules.where(rule_type: 4).order(:id).map do |row|
row.attributes.slice('merge_request_id', 'approvals_required')
end
expect(rows).to match_array(created_rows + existing_rows)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForProjects, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:projects) { table(:projects) }
let(:approval_project_rules) { table(:approval_project_rules) }
def create_project(id, params = {})
params.merge!(id: id, namespace_id: namespace.id)
projects.create!(params)
end
before do
create_project(2, approvals_before_merge: 2)
# Test filtering rows with empty approvals_before_merge column
create_project(3, approvals_before_merge: 0)
# Test filtering already migrated rows
project_with_any_approver_rule = create_project(4, approvals_before_merge: 3)
approval_project_rules.create!(id: 4,
project_id: project_with_any_approver_rule.id,
approvals_required: 3,
rule_type: ApprovalProjectRule.rule_types[:any_approver],
name: ApprovalRuleLike::ALL_MEMBERS)
# Test filtering MRs with existing rules
project_with_regular_rule = create_project(5, approvals_before_merge: 3)
approval_project_rules.create!(id: 5,
project_id: project_with_regular_rule.id,
approvals_required: 3,
rule_type: ApprovalProjectRule.rule_types[:regular],
name: 'Regular rules')
create_project(6, approvals_before_merge: 5)
create_project(7, approvals_before_merge: 2**30)
end
describe '#perform' do
it 'creates approval_project_rules rows according to projects' do
expect { subject.perform(1, 7) }.to change(ApprovalProjectRule, :count).by(3)
created_rows = [
{ 'project_id' => 2, 'approvals_required' => 2 },
{ 'project_id' => 6, 'approvals_required' => 5 }
]
existing_rows = [
{ 'project_id' => 4, 'approvals_required' => 3 },
{ 'project_id' => 7, 'approvals_required' => 2**15 - 1 }
]
rule_type = ApprovalProjectRule.rule_types[:any_approver]
rows = approval_project_rules.where(rule_type: rule_type).order(:id).map do |row|
row.attributes.slice('project_id', 'approvals_required')
end
expect(rows).to match_array(created_rows + existing_rows)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityFeedbackPipelineId, schema: 20181228175414 do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:vulnerability_feedback) { table(:vulnerability_feedback) }
let(:pipelines) { table(:ci_pipelines) }
let(:builds) { table(:ci_builds) }
let(:job_artifacts) { table(:ci_job_artifacts) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
let(:timestamp) { Date.current }
def create_pipeline(project_id, finished_at:, status: 'success', with_security_reports: false, with_legacy_security_report: false)
pipeline_params = { project_id: project_id, ref: '1', sha: '1', source: 6, status: status, finished_at: finished_at }
pipeline = pipelines.create!(pipeline_params)
if with_security_reports
build = builds.create!(project_id: project_id, name: 'brakeman', retried: false, commit_id: pipeline.id, type: 'Ci::Build')
job_artifacts.create!(project_id: project_id, file_format: 'raw', file_type: 5, job_id: build.id)
elsif with_legacy_security_report
build = builds.create!(project_id: project_id, name: 'sast', retried: false, commit_id: pipeline.id, type: 'Ci::Build')
job_artifacts.create!(project_id: project_id, file_format: 'raw', file_type: 101, job_id: build.id)
end
pipeline
end
def create_project_with_pipeline
project_params = { namespace_id: namespace.id, name: 'foo' }
project = projects.create!(project_params)
pipeline = create_pipeline(project.id, finished_at: timestamp - 31.days)
feedback_params = { project_id: project.id, author_id: user.id, feedback_type: 'dismissal', category: 'sast' }
vulnerability_feedback.create!(feedback_params.merge(project_fingerprint: SecureRandom.hex, created_at: timestamp - 30.days, pipeline_id: pipeline.id))
vulnerability_feedback.create!(feedback_params.merge(project_fingerprint: SecureRandom.hex, created_at: timestamp - 15.days))
vulnerability_feedback.create!(feedback_params.merge(project_fingerprint: SecureRandom.hex, created_at: timestamp - 5.days))
{ project: project, pipeline: pipeline }
end
let(:project_with_pipeline_1) { create_project_with_pipeline }
let(:project_with_pipeline_2) { create_project_with_pipeline }
let(:project_1) { project_with_pipeline_1[:project] }
let(:project_2) { project_with_pipeline_2[:project] }
let(:pipeline_1) { project_with_pipeline_1[:pipeline] }
let(:pipeline_2) { project_with_pipeline_2[:pipeline] }
describe '#perform' do
context 'when there is a succesful pipeline' do
context 'but the pipeline has no security reports' do
it 'does not update pipeline_id' do
subject.perform([project_1.id, project_2.id].sort)
updated_rows = [
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_1.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_2.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days }
]
rows = vulnerability_feedback.order(:project_id, :created_at).map do |row|
row.attributes.slice(*%w(project_id pipeline_id created_at))
end
expect(rows).to match_array(updated_rows)
end
end
context 'and the pipeline has security reports' do
context 'and is not successful' do
let!(:pipeline_3) do
create_pipeline(project_1.id, status: 'failed', finished_at: timestamp - 10.days, with_security_reports: true, with_legacy_security_report: false)
end
it 'does not update pipeline_id' do
subject.perform([project_1.id, project_2.id].sort)
updated_rows = [
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_1.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_2.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days }
]
rows = vulnerability_feedback.order(:project_id, :created_at).map do |row|
row.attributes.slice(*%w(project_id pipeline_id created_at))
end
expect(rows).to match_array(updated_rows)
end
end
context 'and is successful' do
let!(:pipeline_3) do
create_pipeline(project_1.id, finished_at: timestamp - 10.days, with_security_reports: true, with_legacy_security_report: false)
end
it 'does update pipeline_id for feedback' do
subject.perform([project_1.id, project_2.id].sort)
updated_rows = [
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_1.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_3.id, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_3.id, 'created_at' => timestamp - 5.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_2.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days }
]
rows = vulnerability_feedback.order(:project_id, :created_at).map do |row|
row.attributes.slice(*%w(project_id pipeline_id created_at))
end
expect(rows).to match_array(updated_rows)
end
context 'and the pipeline has also legacy security reports' do
let(:pipeline_4) do
create_pipeline(project_1.id, finished_at: timestamp - 5.days, with_security_reports: false, with_legacy_security_report: true)
end
it 'does update pipeline_id from for feedback using non-legacy pipeline_id' do
subject.perform([project_1.id, project_2.id].sort)
updated_rows = [
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_1.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_3.id, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_3.id, 'created_at' => timestamp - 5.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_2.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days }
]
rows = vulnerability_feedback.order(:project_id, :created_at).map do |row|
row.attributes.slice(*%w(project_id pipeline_id created_at))
end
expect(rows).to match_array(updated_rows)
end
end
end
end
context 'and the pipeline has legacy security reports' do
context 'and is not successful' do
let!(:pipeline_4) do
create_pipeline(project_1.id, status: 'failed', finished_at: timestamp - 10.days, with_security_reports: false, with_legacy_security_report: true)
end
it 'does not update pipeline_id' do
subject.perform([project_1.id, project_2.id].sort)
updated_rows = [
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_1.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_2.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_2.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days }
]
rows = vulnerability_feedback.order(:project_id, :created_at).map do |row|
row.attributes.slice(*%w(project_id pipeline_id created_at))
end
expect(rows).to match_array(updated_rows)
end
end
context 'and is successful' do
let!(:pipeline_4) do
create_pipeline(project_2.id, finished_at: timestamp - 10.days, with_security_reports: false, with_legacy_security_report: true)
end
it 'does update pipeline_id for feedback' do
subject.perform([project_1.id, project_2.id].sort)
updated_rows = [
{ 'project_id' => project_1.id, 'pipeline_id' => pipeline_1.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_1.id, 'pipeline_id' => nil, 'created_at' => timestamp - 5.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_2.id, 'created_at' => timestamp - 30.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_4.id, 'created_at' => timestamp - 15.days },
{ 'project_id' => project_2.id, 'pipeline_id' => pipeline_4.id, 'created_at' => timestamp - 5.days }
]
rows = vulnerability_feedback.order(:project_id, :created_at).map do |row|
row.attributes.slice(*%w(project_id pipeline_id created_at))
end
expect(rows).to match_array(updated_rows)
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityHistoricalStatistics, schema: 20181228175414 do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:historical_statistics) { table(:vulnerability_historical_statistics) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
def create_project(id)
project_params = {
id: id,
namespace_id: namespace.id,
name: 'foo'
}
project = projects.create!(project_params)
vulnerability_params = { title: 'title', state: 1, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id }
vulnerabilities.create!(vulnerability_params.merge(created_at: 400.days.ago, resolved_at: 380.days.ago, severity: 7))
vulnerabilities.create!(vulnerability_params.merge(created_at: 350.days.ago, resolved_at: 300.days.ago, severity: 6))
vulnerabilities.create!(vulnerability_params.merge(created_at: 80.days.ago, resolved_at: 50.days.ago, severity: 5))
vulnerabilities.create!(vulnerability_params.merge(created_at: 5.days.ago, dismissed_at: Date.current, severity: 7))
vulnerabilities.create!(vulnerability_params.merge(created_at: 5.days.ago, dismissed_at: 1.day.ago, severity: 6))
vulnerabilities.create!(vulnerability_params.merge(created_at: 4.days.ago, resolved_at: 2.days.ago, severity: 7))
end
around do |example|
travel_to(Date.parse('2020-07-28')) { example.run }
end
before do
create_project(1)
create_project(2)
end
describe '#perform' do
context 'when using default retention period' do
it 'creates historical statistic rows according to projects for 90 days', :aggregate_failures do
expect { subject.perform([1, 2]) }.to change(Vulnerabilities::HistoricalStatistic, :count).by(10)
created_rows = [
{ 'letter_grade' => 2, 'project_id' => 1, 'total' => 1, 'critical' => 0, 'high' => 0, 'medium' => 1, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 80.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 5.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 3, 'critical' => 2, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 4.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 2.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 1, 'critical' => 1, 'high' => 0, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 1.day },
{ 'letter_grade' => 2, 'project_id' => 2, 'total' => 1, 'critical' => 0, 'high' => 0, 'medium' => 1, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 80.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 5.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 3, 'critical' => 2, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 4.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 2.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 1, 'critical' => 1, 'high' => 0, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 1.day }
]
rows = historical_statistics.order(:project_id, :date).map do |row|
row.attributes.slice(*%w(letter_grade project_id total critical high medium low unknown info date))
end
expect(rows).to match_array(created_rows)
end
end
context 'when using a provided retention period' do
it 'creates historical statistic rows according to projects for requested period', :aggregate_failures do
expect { subject.perform([1, 2], 365) }.to change(Vulnerabilities::HistoricalStatistic, :count).by(12)
created_rows = [
{ 'letter_grade' => 3, 'project_id' => 1, 'total' => 1, 'critical' => 0, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 350.days },
{ 'letter_grade' => 2, 'project_id' => 1, 'total' => 1, 'critical' => 0, 'high' => 0, 'medium' => 1, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 80.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 5.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 3, 'critical' => 2, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 4.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 2.days },
{ 'letter_grade' => 4, 'project_id' => 1, 'total' => 1, 'critical' => 1, 'high' => 0, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 1.day },
{ 'letter_grade' => 3, 'project_id' => 2, 'total' => 1, 'critical' => 0, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 350.days },
{ 'letter_grade' => 2, 'project_id' => 2, 'total' => 1, 'critical' => 0, 'high' => 0, 'medium' => 1, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 80.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 5.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 3, 'critical' => 2, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 4.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 2, 'critical' => 1, 'high' => 1, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 2.days },
{ 'letter_grade' => 4, 'project_id' => 2, 'total' => 1, 'critical' => 1, 'high' => 0, 'medium' => 0, 'low' => 0, 'unknown' => 0, 'info' => 0, 'date' => Date.current - 1.day }
]
rows = historical_statistics.order(:project_id, :date).map do |row|
row.attributes.slice(*%w(letter_grade project_id total critical high medium low unknown info date))
end
expect(rows).to match_array(created_rows)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PruneOrphanedGeoEvents, geo: false, schema: 20181228175414 do
let(:event_table_name) { 'geo_repository_updated_events' }
let(:geo_event_log) { table(:geo_event_log) }
let(:geo_updated_events) { table(event_table_name) }
let(:namespace) { table(:namespaces).create!(name: 'foo', path: 'foo') }
let(:project) { table(:projects).create!(name: 'bar', path: 'path/to/bar', namespace_id: namespace.id) }
subject(:background_migration) { described_class.new }
describe 'PrunableEvent' do
subject(:prunable_event) do
Class.new(ActiveRecord::Base) do
include Gitlab::BackgroundMigration::PruneOrphanedGeoEvents::PrunableEvent
self.table_name = 'geo_repository_updated_events'
end
end
describe '.geo_event_foreign_key' do
it 'determines foreign key correctly' do
expect(subject.geo_event_foreign_key).to eq('repository_updated_event_id')
end
end
describe '.delete_batch_of_orphans!' do
it 'vacuums table after deleting rows' do
geo_updated_events.create!(project_id: project.id,
source: 0,
branches_affected: 0,
tags_affected: 0)
expect(subject).to receive(:vacuum!)
subject.delete_batch_of_orphans!
end
end
end
describe '#perform' do
before do
geo_updated_events.create!(project_id: project.id,
source: 0,
branches_affected: 0,
tags_affected: 0)
end
it 'does nothing if the database is read-only' do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
expect { background_migration.perform(event_table_name) }.not_to change { Geo::RepositoryUpdatedEvent.count }
end
it 'takes the first table if no table is specified' do
expect(subject).to receive(:prune_orphaned_rows).with(described_class::EVENT_TABLES.first).and_call_original
subject.perform
end
it 'deletes orphans' do
expect { background_migration.perform(event_table_name) }.to change { Geo::RepositoryUpdatedEvent.count }.by(-1)
end
it 'reschedules itself with the same table if positive number of rows were pruned' do
allow(subject).to receive(:prune_orphaned_rows).and_return(5)
expect(BackgroundMigrationWorker).to receive(:perform_in).with(5.minutes, described_class.name.demodulize, event_table_name)
subject.perform(event_table_name)
end
it 'reschedules itself with the next table if zero rows were pruned' do
allow(subject).to receive(:prune_orphaned_rows).and_return(0)
expect(BackgroundMigrationWorker).to receive(:perform_in).with(5.minutes, described_class.name.demodulize, 'geo_repository_deleted_events')
subject.perform(event_table_name)
end
end
describe '#prune_orphaned_rows' do
it 'returns the number of pruned rows' do
event_model = spy(:event_model)
allow(event_model).to receive(:delete_batch_of_orphans!).and_return(555)
allow(subject).to receive(:event_model).and_return(event_model)
expect(subject.prune_orphaned_rows(event_table_name)).to eq(555)
end
end
describe '#next_table' do
it 'takes the next table in the array' do
expect(subject.next_table(described_class::EVENT_TABLES.first)).to eq(described_class::EVENT_TABLES.second)
end
it 'stops with the last table' do
expect(subject.next_table(described_class::EVENT_TABLES.last)).to be_nil
end
it 'cycles for EVENT_TABLES.count' do
table_name = 'geo_repository_created_events'
count = 0
loop do
count += 1
table_name = subject.next_table(table_name)
break unless table_name
end
expect(count).to eq(described_class::EVENT_TABLES.count)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateCsFindings, :migration, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:notes) { table(:notes) }
let(:group) { namespaces.create!(name: 'foo', path: 'foo') }
let(:projects) { table(:projects) }
let(:findings) { table(:vulnerability_occurrences) }
let(:scanners) { table(:vulnerability_scanners) }
let(:issues) { table(:issues) }
let(:epics) { table(:epics) }
let(:identifiers) { table(:vulnerability_identifiers) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:issue_links) { table(:vulnerability_issue_links) }
let(:finding_identifiers) { table(:vulnerability_occurrence_identifiers) }
let(:users) { table(:users) }
let!(:epic_1) { epics.create!(iid: 14532, title: 'from issue 1', group_id: group.id, author_id: user.id, title_html: 'any') }
let!(:project) { projects.create!(id: 12058473, namespace_id: group.id, name: 'gitlab', path: 'gitlab') }
let!(:user) { users.create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) }
let!(:scanner) do
scanners.create!(id: 6, project_id: project.id, external_id: 'trivy', name: 'Security Scanner')
end
it 'removes duplicate findings and vulnerabilities' do
ids = [231411, 231412, 231413, 231500, 231600, 231700, 231800]
fingerprints = %w(
6c871440eb9f7618b9aef25e5246acddff6ed7a1
9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6
d7da2cc109c18d890ab239e833524d451cc45246
6c871440eb9f7618b9aef25e5246acddff6ed7a1
9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6
d7da2cc109c18d890ab239e833524d451cc45246
d7da2cc109c18d890ab239e833524d453cd45246
475f029c81fa0a944bc825a44e02617867a4256d
)
expected_fingerprints = %w(
6c871440eb9f7618b9aef25e5246acddff6ed7a1
9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6
d7da2cc109c18d890ab239e833524d451cc45246
d7da2cc109c18d890ab239e833524d453cd45246
475f029c81fa0a944bc825a44e02617867a4256d
)
7.times.each { |x| identifiers.create!(vulnerability_identifer_params(x, project.id)) }
7.times.each { vulnerabilities.create!(vulnerability_params(project.id, user.id)) }
vulnerability_ids = vulnerabilities.all.ids
3.times.each { |x| findings.create!(finding_params(x, project.id).merge({ id: ids[x], location_fingerprint: fingerprints[x], vulnerability_id: vulnerability_ids[x] })) }
findings.create!(finding_params(0, project.id).merge({ id: ids[3], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[3]).to_s, vulnerability_id: vulnerability_ids[3] }))
findings.create!(finding_params(1, project.id).merge({ id: ids[4], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[4]).to_s, vulnerability_id: vulnerability_ids[4] }))
findings.create!(finding_params(2, project.id).merge({ id: ids[5], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[5]).to_s, vulnerability_id: vulnerability_ids[5] }))
findings.create!(finding_params(3, project.id).merge({ id: ids[6], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[6]).to_s, vulnerability_id: vulnerability_ids[6] }))
findings.create!(finding_params(3, project.id).merge({ id: 100000, location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[7]).to_s, vulnerability_id: nil }))
7.times.each { |x| finding_identifiers.create!(occurrence_id: ids[x], identifier_id: x ) }
1.upto(5).each { |x| issues.create!(description: '1234', state_id: 1, project_id: project.id, id: x) }
notes.create!(project_id: project.id, noteable_id: vulnerability_ids[4], noteable_type: "Vulnerability", note: "test note", system: true)
1.upto(5).each { |x| issue_links.create!(vulnerability_id: vulnerability_ids[x], issue_id: x ) }
expect(finding_identifiers.all.count). to eq(7)
expect(issue_links.all.count). to eq(5)
described_class.new.perform(231411, 231413)
expect(findings.ids).to match_array([100000, 231800, 231412, 231413, 231411])
expect(findings.where(report_type: 2).count). to eq(5)
expect(vulnerabilities.all.count). to eq(4)
expect(notes.all.count). to eq(0)
expect(finding_identifiers.all.count). to eq(4)
expect(issue_links.all.count). to eq(2)
location_fingerprints = findings.pluck(:location_fingerprint).flat_map { |x| Gitlab::Database::ShaAttribute.new.deserialize(x) }
expect(location_fingerprints).to match_array(expected_fingerprints)
end
def vulnerability_identifer_params(id, project_id)
{
id: id,
project_id: project_id,
fingerprint: 'd432c2ad2953e8bd587a3a43b3ce309b5b0154c' + id.to_s,
external_type: 'SECURITY_ID',
external_id: 'SECURITY_0',
name: 'SECURITY_IDENTIFIER 0'
}
end
def vulnerability_params(project_id, user_id)
{
title: 'title',
state: 1,
confidence: 5,
severity: 6,
report_type: 2,
project_id: project.id,
author_id: user.id
}
end
def finding_params(primary_identifier_id, project_id)
uuid = SecureRandom.uuid
{
severity: 0,
confidence: 5,
report_type: 2,
project_id: project_id,
scanner_id: 6,
primary_identifier_id: primary_identifier_id,
project_fingerprint: SecureRandom.hex(20),
location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)),
uuid: uuid,
name: "Vulnerability Finding #{uuid}",
metadata_version: '1.3',
raw_metadata: raw_metadata
}
end
def raw_metadata
{ "description" => "The cipher does not provide data integrity update 1",
"message" => "The cipher does not provide data integrity",
"cve" => "818bf5dacb291e15d9e6dc3c5ac32178:CIPHER",
"solution" => "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.",
"location" => { "file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java", "start_line" => 29, "end_line" => 29, "class" => "com.gitlab.security_products.tests.App", "method" => "insecureCypher" },
"links" => [{ "name" => "Cipher does not check for integrity first?", "url" => "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first" }],
"assets" => [{ "type" => "postman", "name" => "Test Postman Collection", "url" => "http://localhost/test.collection" }],
"evidence" =>
{ "summary" => "Credit card detected",
"request" => { "headers" => [{ "name" => "Accept", "value" => "*/*" }], "method" => "GET", "url" => "http://goat:8080/WebGoat/logout", "body" => nil },
"response" => { "headers" => [{ "name" => "Content-Length", "value" => "0" }], "reason_phrase" => "OK", "status_code" => 200, "body" => nil },
"source" => { "id" => "assert:Response Body Analysis", "name" => "Response Body Analysis", "url" => "htpp://hostname/documentation" },
"supporting_messages" =>
[{ "name" => "Origional", "request" => { "headers" => [{ "name" => "Accept", "value" => "*/*" }], "method" => "GET", "url" => "http://goat:8080/WebGoat/logout", "body" => "" } },
{ "name" => "Recorded",
"request" => { "headers" => [{ "name" => "Accept", "value" => "*/*" }], "method" => "GET", "url" => "http://goat:8080/WebGoat/logout", "body" => "" },
"response" => { "headers" => [{ "name" => "Content-Length", "value" => "0" }], "reason_phrase" => "OK", "status_code" => 200, "body" => "" } }] } }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicatedCsFindingsWithoutVulnerabilityId, :migration, schema: 20181228175414 do
let(:migration) { 'RemoveDuplicatedCsFindingsWithoutVulnerabilityId'}
let(:namespaces) { table(:namespaces) }
let(:notes) { table(:notes) }
let(:group) { namespaces.create!(name: 'foo', path: 'foo') }
let(:projects) { table(:projects) }
let(:findings) { table(:vulnerability_occurrences) }
let(:scanners) { table(:vulnerability_scanners) }
let(:identifiers) { table(:vulnerability_identifiers) }
let(:finding_identifiers) { table(:vulnerability_occurrence_identifiers) }
let!(:project) { projects.create!(id: 12058473, namespace_id: group.id, name: 'gitlab', path: 'gitlab') }
let!(:scanner) do
scanners.create!(id: 6, project_id: project.id, external_id: 'trivy', name: 'Security Scanner')
end
it 'removes duplicate findings and vulnerabilities' do
allow(::Gitlab).to receive(:com?).and_return(true)
ids = [231411, 231412, 231413, 231500, 231600, 231700, 231800]
fingerprints = %w(
6c871440eb9f7618b9aef25e5246acddff6ed7a1
9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6
d7da2cc109c18d890ab239e833524d451cc45246
6c871440eb9f7618b9aef25e5246acddff6ed7a1
9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6
d7da2cc109c18d890ab239e833524d451cc45246
d7da2cc109c18d890ab239e833524d453cd45246
)
expected_fingerprints = %w(
6c871440eb9f7618b9aef25e5246acddff6ed7a1
9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6
d7da2cc109c18d890ab239e833524d451cc45246
d7da2cc109c18d890ab239e833524d453cd45246
)
7.times.each { |x| identifiers.create!(vulnerability_identifer_params(x, project.id)) }
3.times.each { |x| findings.create!(finding_params(x, project.id).merge({ id: ids[x], location_fingerprint: fingerprints[x], vulnerability_id: nil })) }
findings.create!(finding_params(0, project.id).merge({ id: ids[3], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[3]).to_s, vulnerability_id: nil }))
findings.create!(finding_params(1, project.id).merge({ id: ids[4], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[4]).to_s, vulnerability_id: nil }))
findings.create!(finding_params(2, project.id).merge({ id: ids[5], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[5]).to_s, vulnerability_id: nil }))
findings.create!(finding_params(3, project.id).merge({ id: ids[6], location_fingerprint: Gitlab::Database::ShaAttribute.new.serialize(fingerprints[6]).to_s, vulnerability_id: nil }))
7.times.each { |x| finding_identifiers.create!(occurrence_id: ids[x], identifier_id: x ) }
expect(finding_identifiers.all.count). to eq(7)
described_class.new.perform(231411, 231413)
expect(findings.ids).to match_array([231411, 231412, 231413, 231800])
expect(findings.where(report_type: 2).count). to eq(4)
expect(finding_identifiers.all.count). to eq(4)
location_fingerprints = findings.pluck(:location_fingerprint).flat_map { |x| Gitlab::Database::ShaAttribute.new.deserialize(x) }
expect(location_fingerprints).to match_array(expected_fingerprints)
end
def vulnerability_identifer_params(id, project_id)
{
id: id,
project_id: project_id,
fingerprint: 'd432c2ad2953e8bd587a3a43b3ce309b5b0154c' + id.to_s,
external_type: 'SECURITY_ID',
external_id: 'SECURITY_0',
name: 'SECURITY_IDENTIFIER 0'
}
end
def finding_params(primary_identifier_id, project_id)
uuid = SecureRandom.uuid
{
severity: 0,
confidence: 5,
report_type: 2,
project_id: project_id,
scanner_id: 6,
primary_identifier_id: primary_identifier_id,
project_fingerprint: SecureRandom.hex(20),
location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)),
uuid: uuid,
name: "Vulnerability Finding #{uuid}",
metadata_version: '1.3',
raw_metadata: raw_metadata
}
end
def raw_metadata
{
"description" => "The cipher does not provide data integrity update 1",
"message" => "The cipher does not provide data integrity",
"cve" => "818bf5dacb291e15d9e6dc3c5ac32178:CIPHER",
"solution" => "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.",
"location" => {
"file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java",
"start_line" => 29,
"end_line" => 29,
"class" => "com.gitlab.security_products.tests.App",
"method" => "insecureCypher"
},
"links" => [
{
"name" => "Cipher does not check for integrity first?",
"url" => "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first"
}
],
"assets" => [
{
"type" => "postman",
"name" => "Test Postman Collection",
"url" => "http://localhost/test.collection"
}
],
"evidence" => {
"summary" => "Credit card detected",
"request" => {
"method" => "GET",
"url" => "http://goat:8080/WebGoat/logout",
"body" => nil,
"headers" => [
{
"name" => "Accept",
"value" => "*/*"
}
]
},
"response" => {
"reason_phrase" => "OK",
"status_code" => 200,
"body" => nil,
"headers" => [
{
"name" => "Content-Length",
"value" => "0"
}
]
},
"source" => {
"id" => "assert:Response Body Analysis",
"name" => "Response Body Analysis",
"url" => "htpp://hostname/documentation"
},
"supporting_messages" => [
{
"name" => "Origional",
"request" => {
"method" => "GET",
"url" => "http://goat:8080/WebGoat/logout",
"body" => "",
"headers" => [
{
"name" => "Accept",
"value" => "*/*"
}
]
}
},
{
"name" => "Recorded",
"request" => {
"method" => "GET",
"url" => "http://goat:8080/WebGoat/logout",
"body" => "",
"headers" => [
{
"name" => "Accept",
"value" => "*/*"
}
]
},
"response" => {
"reason_phrase" => "OK",
"status_code" => 200,
"body" => "",
"headers" => [
{
"name" => "Content-Length",
"value" => "0"
}
]
}
}
]
}
}
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RemoveInaccessibleEpicTodos, schema: 20181228175414 do
include MigrationHelpers::NamespacesHelpers
let(:users) { table(:users) }
let(:todos) { table(:todos) }
let(:epics) { table(:epics) }
let(:members_table) { table(:members) }
let(:group_group_links) { table(:group_group_links) }
let(:author) { users.create!(email: 'author@example.com', projects_limit: 10) }
let(:user) { users.create!(email: 'user@example.com', projects_limit: 10) }
let(:group_root) { create_namespace('root', Gitlab::VisibilityLevel::PUBLIC) }
let(:group_level1) { create_namespace('level1', Gitlab::VisibilityLevel::PUBLIC, parent_id: group_root.id) }
let(:epic_conf1) { epics.create!(iid: 1, title: 'confidential1', title_html: 'confidential1', confidential: true, group_id: group_root.id, author_id: author.id) }
let(:epic_conf2) { epics.create!(iid: 1, title: 'confidential2', title_html: 'confidential2', confidential: true, group_id: group_level1.id, author_id: author.id) }
let(:epic_public1) { epics.create!(iid: 2, title: 'public1', title_html: 'epic_public1', group_id: group_root.id, author_id: author.id) }
let(:epic_public2) { epics.create!(iid: 2, title: 'public1', title_html: 'epic_public2', group_id: group_level1.id, author_id: author.id) }
let!(:todo1) { todos.create!(target_type: 'Epic', target_id: epic_conf1.id, user_id: user.id, author_id: user.id, action: 2, state: 0) }
let!(:todo2) { todos.create!(target_type: 'Epic', target_id: epic_conf2.id, user_id: user.id, author_id: user.id, action: 2, state: 0) }
let!(:todo3) { todos.create!(target_type: 'Epic', target_id: epic_public1.id, user_id: user.id, author_id: user.id, action: 2, state: 0) }
let!(:todo4) { todos.create!(target_type: 'Epic', target_id: epic_public2.id, user_id: user.id, author_id: user.id, action: 2, state: 0) }
describe '#perform' do
subject(:perform) { described_class.new.perform(epics.first.id, epics.last.id) }
def expect_todos(preserved:)
expect { subject }.to change { todos.count }.by(preserved.count - 4)
existing_ids = todos.pluck(:id)
expect(existing_ids).to match_array(preserved)
end
context 'when user is not member of related groups' do
it 'deletes only todos referencing confidential epics' do
expect_todos(preserved: [todo3.id, todo4.id])
end
end
context 'when user is only guest member of related groups' do
let!(:member) do
members_table.create!(user_id: user.id, source_id: group_root.id, source_type: 'Namespace',
type: 'GroupMember', access_level: 10, notification_level: 3)
end
it 'deletes todos referencing confidential epics' do
expect_todos(preserved: [todo3.id, todo4.id])
end
end
context 'when user is member of subgroup' do
let!(:member) do
members_table.create!(user_id: user.id, source_id: group_level1.id, source_type: 'Namespace',
type: 'GroupMember', access_level: 20, notification_level: 3)
end
it 'deletes only epic todos in the root group' do
expect_todos(preserved: [todo2.id, todo3.id, todo4.id])
end
end
context 'when user is member of root group' do
let!(:member) do
members_table.create!(user_id: user.id, source_id: group_root.id, source_type: 'Namespace',
type: 'GroupMember', access_level: 20, notification_level: 3)
end
it 'does not delete any todos' do
expect_todos(preserved: [todo1.id, todo2.id, todo3.id, todo4.id])
end
end
context 'when user is only guest on root group' do
let!(:root_member) do
members_table.create!(user_id: user.id, source_id: group_root.id, source_type: 'Namespace',
type: 'GroupMember', access_level: 10, notification_level: 3)
end
let!(:subgroup_member) do
members_table.create!(user_id: user.id, source_id: group_level1.id, source_type: 'Namespace',
type: 'GroupMember', access_level: 20, notification_level: 3)
end
it 'deletes only root confidential epic todo' do
expect_todos(preserved: [todo2.id, todo3.id, todo4.id])
end
end
context 'when root group is shared with other group' do
let!(:other_group) { create_namespace('other_group', Gitlab::VisibilityLevel::PRIVATE) }
let!(:member) do
members_table.create!(user_id: user.id, source_id: other_group.id, source_type: 'Namespace',
type: 'GroupMember', access_level: 20, notification_level: 3)
end
let!(:group_link) do
group_group_links.create!(shared_group_id: group_root.id,
shared_with_group_id: other_group.id, group_access: 20)
end
it 'does not delete any todos' do
expect_todos(preserved: [todo1.id, todo2.id, todo3.id, todo4.id])
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilityConfidenceLevel, :migration, schema: 20181228175414 do
let(:vulnerabilities) { table(:vulnerabilities) }
let(:identifiers) { table(:vulnerability_identifiers) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
it 'updates undefined confidence level to unknown' do
namespace = namespaces.create!(name: 'gitlab-org', path: 'gitlab-org')
projects.create!(id: 123, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab')
users.create!(id: 13, email: 'author@example.com', notification_email: 'author@example.com', name: 'author', username: 'author', projects_limit: 10, state: 'active')
vul1 = vulnerabilities.create!(vuln_params)
vulnerabilities.create!(vuln_params)
vul3 = vulnerabilities.create!(vuln_params.merge(confidence: 2))
expect(vulnerabilities.where(confidence: 2).count). to eq(1)
expect(vulnerabilities.where(severity: 5).count). to eq(3)
described_class.new.perform(vul1.id, vul3.id)
expect(vulnerabilities.where(confidence: 2).count).to eq(3)
end
def vuln_params
{
title: 'title',
state: 1,
severity: 5,
confidence: 0,
report_type: 2,
project_id: 123,
author_id: 13
}
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::UpdateLocationFingerprintForContainerScanningFindings, :migration, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:group) { namespaces.create!(name: 'foo', path: 'foo') }
let(:projects) { table(:projects) }
let(:findings) { table(:vulnerability_occurrences) }
let(:scanners) { table(:vulnerability_scanners) }
let(:identifiers) { table(:vulnerability_identifiers) }
let!(:project) { projects.create!(id: 123, namespace_id: group.id, name: 'gitlab', path: 'gitlab') }
let!(:scanner) do
scanners.create!(id: 6, project_id: project.id, external_id: 'trivy', name: 'Security Scanner')
end
it 'updates location fingerprint' do
raw_metadata = [
"{ \"location\":{\"dependency\":{\"package\":{\"name\":\"apparmor\"},\"version\":\"2.10.95-0ubuntu2.11\"},\"operating_system\":\"ubuntu:16.04\",\"image\":\"registry.staging.gitlab.com/gitlab/alpine-ruby2/master:49dda736b6386592f7dd0367bcdd260cb84edfa8\"} }",
"{ \"location\":{\"dependency\":{\"package\":{\"name\":\"glibc\"},\"version\":\"2.10.95-0ubuntu2.11\"},\"operating_system\":\"ubuntu:16.04\",\"image\":\"registry.staging.gitlab.com/gitlab/docker/master:2.1\"} }",
"{ \"location\":{\"dependency\":{\"package\":{\"name\":\"apt\"},\"version\":\"2.10.95-0ubuntu2.11\"},\"operating_system\":\"ubuntu:16.04\",\"image\":\"registry.staging.gitlab.com/gitlab/gitlab/master:49dda73\"} }"
]
new_fingerprints = %w(6c871440eb9f7618b9aef25e5246acddff6ed7a1 9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6 d7da2cc109c18d890ab239e833524d451cc45246)
create_identifier(3)
vul1 = findings.create!(finding_params(1).merge({ raw_metadata: raw_metadata[0] }))
findings.create!(finding_params(2).merge({ raw_metadata: raw_metadata[1] }))
vul3 = findings.create!(finding_params(3).merge({ raw_metadata: raw_metadata[2] }))
expect(findings.where(report_type: 2).count). to eq(3)
described_class.new.perform(vul1.id, vul3.id)
location_fingerprints = findings.pluck(:location_fingerprint).flat_map { |x| Gitlab::Database::ShaAttribute.new.deserialize(x) }
expect(location_fingerprints).to match_array(new_fingerprints)
end
it 'updates the rest when there is a collision' do
allow(::Gitlab::BackgroundMigration::Logger).to receive(:warn).with(any_args).and_call_original
raw_metadata = [
"{ \"location\":{\"dependency\":{\"package\":{\"name\":\"apparmor\"},\"version\":\"2.10.95-0ubuntu2.11\"},\"operating_system\":\"ubuntu:16.04\",\"image\":\"registry.staging.gitlab.com/gitlab/alpine-ruby2/master:49dda736b6386592f7dd0367bcdd260cb84edfa8\"} }",
"{ \"location\":{\"dependency\":{\"package\":{\"name\":\"glibc\"},\"version\":\"2.10.95-0ubuntu2.11\"},\"operating_system\":\"ubuntu:16.04\",\"image\":\"registry.staging.gitlab.com/gitlab/docker/master:2.1\"} }",
"{ \"location\":{\"dependency\":{\"package\":{\"name\":\"apt\"},\"version\":\"2.10.95-0ubuntu2.11\"},\"operating_system\":\"ubuntu:16.04\",\"image\":\"registry.staging.gitlab.com/gitlab/gitlab/master:49dda73\"} }"
]
new_fingerprints = %w(74657374 6c871440eb9f7618b9aef25e5246acddff6ed7a1 9d1a47927875f1aee1e2b9f16c25a8ff7586f1a6 d7da2cc109c18d890ab239e833524d451cc45246)
create_identifier(3)
# exsiting data in db
vul1 = findings.create!(finding_params(1).merge({ raw_metadata: raw_metadata[0], location_fingerprint: '36633837313434306562396637363138623961656632356535323436616364646666366564376131' }))
findings.create!(finding_params(1).merge({ raw_metadata: raw_metadata[0], location_fingerprint: 'test' }))
findings.create!(finding_params(2).merge({ raw_metadata: raw_metadata[1] }))
vul3 = findings.create!(finding_params(3).merge({ raw_metadata: raw_metadata[2] }))
expect(findings.where(report_type: 2).count). to eq(4)
described_class.new.perform(vul1.id, vul3.id)
expect(::Gitlab::BackgroundMigration::Logger).to have_received(:warn).with(any_args)
location_fingerprints = findings.pluck(:location_fingerprint).flat_map { |x| Gitlab::Database::ShaAttribute.new.deserialize(x) }
expect(location_fingerprints).to match_array(new_fingerprints)
end
def create_identifier(number_of)
(1..number_of).each do |identifier_id|
identifiers.create!(id: identifier_id,
project_id: 123,
fingerprint: 'd432c2ad2953e8bd587a3a43b3ce309b5b0154c' + identifier_id.to_s,
external_type: 'SECURITY_ID',
external_id: 'SECURITY_0',
name: 'SECURITY_IDENTIFIER 0')
end
end
def finding_params(primary_identifier_id)
uuid = SecureRandom.uuid
{
severity: 0,
confidence: 5,
report_type: 2,
project_id: 123,
scanner_id: 6,
primary_identifier_id: primary_identifier_id,
project_fingerprint: SecureRandom.hex(20),
location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)),
uuid: uuid,
name: "Vulnerability Finding #{uuid}",
metadata_version: '1.3',
raw_metadata: raw_metadata
}
end
def raw_metadata
{ "description" => "The cipher does not provide data integrity update 1",
"message" => "The cipher does not provide data integrity",
"cve" => "818bf5dacb291e15d9e6dc3c5ac32178:CIPHER",
"solution" => "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.",
"location" => { "file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java", "start_line" => 29, "end_line" => 29, "class" => "com.gitlab.security_products.tests.App", "method" => "insecureCypher" },
"links" => [{ "name" => "Cipher does not check for integrity first?", "url" => "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first" }],
"assets" => [{ "type" => "postman", "name" => "Test Postman Collection", "url" => "http://localhost/test.collection" }],
"evidence" =>
{ "summary" => "Credit card detected",
"request" => { "headers" => [{ "name" => "Accept", "value" => "*/*" }], "method" => "GET", "url" => "http://goat:8080/WebGoat/logout", "body" => nil },
"response" => { "headers" => [{ "name" => "Content-Length", "value" => "0" }], "reason_phrase" => "OK", "status_code" => 200, "body" => nil },
"source" => { "id" => "assert:Response Body Analysis", "name" => "Response Body Analysis", "url" => "htpp://hostname/documentation" },
"supporting_messages" =>
[{ "name" => "Origional", "request" => { "headers" => [{ "name" => "Accept", "value" => "*/*" }], "method" => "GET", "url" => "http://goat:8080/WebGoat/logout", "body" => "" } },
{ "name" => "Recorded",
"request" => { "headers" => [{ "name" => "Accept", "value" => "*/*" }], "method" => "GET", "url" => "http://goat:8080/WebGoat/logout", "body" => "" },
"response" => { "headers" => [{ "name" => "Content-Length", "value" => "0" }], "reason_phrase" => "OK", "status_code" => 200, "body" => "" } }] } }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::UpdateVulnerabilitiesFromDismissalFeedback, :migration, schema: 20181228175414 do
let(:users) { table(:users) }
let(:projects) { table(:projects) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:pipelines) { table(:ci_pipelines) }
let(:vulnerability_occurrences) { table(:vulnerability_occurrences) }
let(:scanners) { table(:vulnerability_scanners) }
let(:identifiers) { table(:vulnerability_identifiers) }
let(:feedback) { table(:vulnerability_feedback) }
let(:namespaces) { table(:namespaces)}
let(:severity) { ::Enums::Vulnerability.severity_levels[:unknown] }
let(:confidence) { ::Enums::Vulnerability.confidence_levels[:medium] }
let(:report_type) { ::Enums::Vulnerability.report_types[:sast] }
let!(:user) { users.create!(email: 'author@example.com', username: 'author', projects_limit: 10) }
let!(:project) { projects.create!(namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') }
let(:namespace) do
namespaces.create!(name: 'namespace', path: '/path', description: 'description')
end
let(:scanner) do
scanners.create!(project_id: project.id, external_id: 'trivy', name: 'Security Scanner')
end
let(:identifier) do
identifiers.create!(project_id: project.id,
fingerprint: 'd432c2ad2953e8bd587a3a43b3ce309b5b0154c7',
external_type: 'SECURITY_ID',
external_id: 'SECURITY_0',
name: 'SECURITY_IDENTIFIER 0')
end
context 'vulnerability has been dismissed' do
let!(:vulnerability) { vulnerabilities.create!(vuln_params) }
let!(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: :success, user_id: user.id) }
let!(:vulnerability_occurrence) do
vulnerability_occurrences.create!(
report_type: vulnerability.report_type, name: 'finding_1',
primary_identifier_id: identifier.id, uuid: 'abc', project_fingerprint: 'abc123',
location_fingerprint: 'abc456', project_id: project.id, scanner_id: scanner.id, severity: severity,
confidence: confidence, metadata_version: 'sast:1.0', raw_metadata: '{}', vulnerability_id: vulnerability.id)
end
let!(:dismiss_feedback) do
feedback.create!(category: vulnerability_occurrence.report_type, feedback_type: 0,
project_id: project.id, project_fingerprint: vulnerability_occurrence.project_fingerprint.unpack1('H*'),
author_id: user.id)
end
it 'vulnerability should now have a dismissed_by_id' do
expect(vulnerability.dismissed_by_id).to eq(nil)
expect { described_class.new.perform(project.id) }
.to change { vulnerability.reload.dismissed_by_id }
.from(nil)
.to(dismiss_feedback.author_id)
end
it 'vulnerability should now have a dismissed_at' do
expect(vulnerability.dismissed_at).to eq(nil)
expect { described_class.new.perform(project.id) }
.to change { vulnerability.reload.dismissed_at }
.from(nil)
.to(dismiss_feedback.reload.created_at)
end
context 'project is set to be deleted' do
let!(:project) { projects.create!(namespace_id: namespace.id, name: 'gitlab', path: 'gitlab', pending_delete: true) }
it 'vulnerability dismissed_by_id should remain nil' do
expect(vulnerability.dismissed_by_id).to eq(nil)
expect { described_class.new.perform(project.id) }.not_to change { vulnerability.reload.dismissed_by_id }.from(nil)
end
it 'vulnerability dismissed_at should remain nil' do
expect(vulnerability.dismissed_at).to eq(nil)
expect { described_class.new.perform(project.id) }.not_to change { vulnerability.reload.dismissed_at }.from(nil)
end
end
end
context 'has not been dismissed' do
let!(:vulnerability) { vulnerabilities.create!(vuln_params.merge({ state: 1 })) }
it 'vulnerability should not have a dismissed_by_id' do
expect(vulnerability.dismissed_by_id).to be_nil
expect { described_class.new.perform(project.id) }.not_to change { vulnerability.reload.dismissed_by_id }.from(nil)
end
it 'vulnerability should not have a dismissed_at' do
expect(vulnerability.dismissed_at).to be_nil
expect { described_class.new.perform(project.id) }.not_to change { vulnerability.reload.dismissed_at }.from(nil)
end
end
def vuln_params
{
title: 'title',
state: described_class::VULNERABILITY_DISMISSED_STATE,
severity: severity,
confidence: confidence,
report_type: report_type,
project_id: project.id,
author_id: user.id
}
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class AddMergeRequestDiffCommitsCount
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
end
def perform(start_id, stop_id)
Gitlab::AppLogger.info("Setting commits_count for merge request diffs: #{start_id} - #{stop_id}")
update = '
commits_count = (
SELECT count(*)
FROM merge_request_diff_commits
WHERE merge_request_diffs.id = merge_request_diff_commits.merge_request_diff_id
)'.squish
MergeRequestDiff.where(id: start_id..stop_id).where(commits_count: nil).update_all(update)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Compare all current rules to project rules
class AddModifiedToApprovalMergeRequestRule
# Stubbed class to access the Group table
class Group < ActiveRecord::Base
self.table_name = 'namespaces'
self.inheritance_column = :_type_disabled
end
# Stubbed class to access the ApprovalMergeRequestRule table
class ApprovalMergeRequestRule < ActiveRecord::Base
self.table_name = 'approval_merge_request_rules'
has_one :approval_merge_request_rule_source, class_name: 'AddModifiedToApprovalMergeRequestRule::ApprovalMergeRequestRuleSource'
has_one :approval_project_rule, through: :approval_merge_request_rule_source
has_and_belongs_to_many :groups,
class_name: 'AddModifiedToApprovalMergeRequestRule::Group', join_table: "#{self.table_name}_groups"
end
# Stubbed class to access the ApprovalProjectRule table
class ApprovalProjectRule < ActiveRecord::Base
self.table_name = 'approval_project_rules'
has_many :approval_merge_request_rule_sources, class_name: 'AddModifiedToApprovalMergeRequestRule::ApprovalMergeRequestRuleSource'
has_and_belongs_to_many :groups,
class_name: 'AddModifiedToApprovalMergeRequestRule::Group', join_table: "#{self.table_name}_groups"
end
# Stubbed class to access the ApprovalMergeRequestRuleSource table
class ApprovalMergeRequestRuleSource < ActiveRecord::Base
self.table_name = 'approval_merge_request_rule_sources'
belongs_to :approval_merge_request_rule, class_name: 'AddModifiedToApprovalMergeRequestRule::ApprovalMergeRequestRule'
belongs_to :approval_project_rule, class_name: 'AddModifiedToApprovalMergeRequestRule::ApprovalProjectRule'
end
def perform(start_id, stop_id)
approval_merge_requests_rules = ApprovalMergeRequestRule
.joins(:groups, :approval_merge_request_rule_source)
.where(id: start_id..stop_id)
.pluck(
'approval_merge_request_rule_sources.id as ars_id',
'approval_merge_request_rules_groups.id as amrg_id'
)
approval_project_rules = ApprovalProjectRule
.joins(:groups, approval_merge_request_rule_sources: :approval_merge_request_rule)
.where(approval_merge_request_rules: { id: start_id..stop_id })
.pluck(
'approval_merge_request_rule_sources.id as ars_id',
'approval_project_rules_groups.id as apg_id'
)
different_names_or_approval_sources = ApprovalMergeRequestRule.joins(:approval_project_rule, :approval_merge_request_rule_source)
.where(id: start_id..stop_id)
.where('approval_merge_request_rules.name != approval_project_rules.name OR ' \
'approval_merge_request_rules.approvals_required != approval_project_rules.approvals_required')
.pluck('approval_merge_request_rule_sources.id as ars_id')
intersected_set = approval_merge_requests_rules.to_set ^ approval_project_rules.to_set
source_ids = intersected_set.collect { |rule| rule[0] }.uniq
rule_sources = ApprovalMergeRequestRuleSource.where(id: source_ids + different_names_or_approval_sources)
changed_merge_request_rules = ApprovalMergeRequestRule.where(id: rule_sources.select(:approval_merge_request_rule_id))
changed_merge_request_rules.update_all(modified_from_project_rule: true)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfill deployment_clusters for a range of deployments
class BackfillDeploymentClustersFromDeployments
def perform(start_id, end_id)
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO deployment_clusters (deployment_id, cluster_id)
SELECT deployments.id, deployments.cluster_id
FROM deployments
WHERE deployments.cluster_id IS NOT NULL
AND deployments.id BETWEEN #{start_id} AND #{end_id}
ON CONFLICT DO NOTHING
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# BackfillEnvironmentIdDeploymentMergeRequests deletes duplicates
# from deployment_merge_requests table and backfills environment_id
class BackfillEnvironmentIdDeploymentMergeRequests
def perform(_start_mr_id, _stop_mr_id)
# no-op
# Background migration removed due to
# https://gitlab.com/gitlab-org/gitlab/-/issues/217191
end
def backfill_range(start_mr_id, stop_mr_id)
start_mr_id = Integer(start_mr_id)
stop_mr_id = Integer(stop_mr_id)
ActiveRecord::Base.connection.execute(<<~SQL)
DELETE FROM deployment_merge_requests
WHERE (deployment_id, merge_request_id) in (
SELECT t.deployment_id, t.merge_request_id FROM (
SELECT mrd.merge_request_id, mrd.deployment_id, ROW_NUMBER() OVER w AS rnum
FROM deployment_merge_requests as mrd
INNER JOIN "deployments" ON "deployments"."id" = "mrd"."deployment_id"
WHERE mrd.merge_request_id BETWEEN #{start_mr_id} AND #{stop_mr_id}
WINDOW w AS (
PARTITION BY merge_request_id, deployments.environment_id
ORDER BY deployments.id
)
) t
WHERE t.rnum > 1
);
SQL
ActiveRecord::Base.connection.execute(<<~SQL)
UPDATE deployment_merge_requests
SET environment_id = deployments.environment_id
FROM deployments
WHERE deployments.id = "deployment_merge_requests".deployment_id
AND "deployment_merge_requests".environment_id IS NULL
AND "deployment_merge_requests".merge_request_id BETWEEN #{start_mr_id} AND #{stop_mr_id}
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfill merge request cleanup schedules of closed/merged merge requests
# without any corresponding records.
class BackfillMergeRequestCleanupSchedules
# Model used for migration added in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46782.
class MergeRequest < ActiveRecord::Base
include EachBatch
self.table_name = 'merge_requests'
def self.eligible
where('merge_requests.state_id IN (2, 3)')
end
end
def perform(start_id, end_id)
eligible_mrs = MergeRequest.eligible.where(id: start_id..end_id)
scheduled_at_column = "COALESCE(metrics.merged_at, COALESCE(metrics.latest_closed_at, merge_requests.updated_at)) + interval '14 days'"
query =
eligible_mrs
.select("merge_requests.id, #{scheduled_at_column}, NOW(), NOW()")
.joins('LEFT JOIN merge_request_metrics metrics ON metrics.merge_request_id = merge_requests.id')
result = ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO merge_request_cleanup_schedules (merge_request_id, scheduled_at, created_at, updated_at)
#{query.to_sql}
ON CONFLICT (merge_request_id) DO NOTHING;
SQL
::Gitlab::BackgroundMigration::Logger.info(
message: 'Backfilled merge_request_cleanup_schedules records',
count: result.cmd_tuples
)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfillnamespace_settings for a range of namespaces
class BackfillNamespaceSettings
def perform(start_id, end_id)
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO namespace_settings (namespace_id, created_at, updated_at)
SELECT namespaces.id, now(), now()
FROM namespaces
WHERE namespaces.id BETWEEN #{start_id} AND #{end_id}
ON CONFLICT (namespace_id) DO NOTHING;
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfill project_settings for a range of projects
class BackfillProjectSettings
def perform(start_id, end_id)
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO project_settings (project_id, created_at, updated_at)
SELECT projects.id, now(), now()
FROM projects
WHERE projects.id BETWEEN #{start_id} AND #{end_id}
ON CONFLICT (project_id) DO NOTHING;
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Class that will insert record into project_push_rules
# for each existing push_rule
class BackfillPushRulesIdInProjects
# Temporary AR table for push rules
class ProjectSetting < ActiveRecord::Base
self.table_name = 'project_settings'
end
def perform(start_id, stop_id)
ProjectSetting.connection.execute(<<~SQL)
UPDATE project_settings ps1
SET push_rule_id = pr.id
FROM project_settings ps2
INNER JOIN push_rules pr
ON ps2.project_id = pr.project_id
WHERE pr.is_sample = false
AND pr.id BETWEEN #{start_id} AND #{stop_id}
AND ps1.project_id = ps2.project_id
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This migration updates discussion ids for epics that were promoted from issue so that the discussion id on epics
# is different from discussion id on issue, which was causing problems when replying to epic discussions as it would
# identify the discussion as related to an issue and complaint about missing project_id
class FixPromotedEpicsDiscussionIds
# notes model to iterate through the notes to be updated
class Note < ActiveRecord::Base
self.table_name = 'notes'
self.inheritance_column = :_type_disabled
end
def perform(discussion_ids)
Note.where(noteable_type: 'Epic')
.where(discussion_id: discussion_ids)
.update_all("discussion_id=MD5(discussion_id)||substring(discussion_id from 1 for 8)")
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This migration fixes the namespaces.name for all user-namespaces that have names
# that aren't equal to the users name.
# Then it uses the updated names of the namespaces to update the associated routes
# For more info see https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/23272
class FixUserNamespaceNames
def perform(from_id, to_id)
fix_namespace_names(from_id, to_id)
fix_namespace_route_names(from_id, to_id)
end
def fix_namespace_names(from_id, to_id)
ActiveRecord::Base.connection.execute <<~UPDATE_NAMESPACES
WITH namespaces_to_update AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
SELECT
namespaces.id,
users.name AS correct_name
FROM
namespaces
INNER JOIN users ON namespaces.owner_id = users.id
WHERE
namespaces.type IS NULL
AND namespaces.id BETWEEN #{from_id} AND #{to_id}
AND namespaces.name != users.name
)
UPDATE
namespaces
SET
name = correct_name
FROM
namespaces_to_update
WHERE
namespaces.id = namespaces_to_update.id
UPDATE_NAMESPACES
end
def fix_namespace_route_names(from_id, to_id)
ActiveRecord::Base.connection.execute <<~ROUTES_UPDATE
WITH routes_to_update AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
SELECT
routes.id,
users.name AS correct_name
FROM
routes
INNER JOIN namespaces ON routes.source_id = namespaces.id
INNER JOIN users ON namespaces.owner_id = users.id
WHERE
namespaces.type IS NULL
AND routes.source_type = 'Namespace'
AND namespaces.id BETWEEN #{from_id} AND #{to_id}
AND (routes.name != users.name OR routes.name IS NULL)
)
UPDATE
routes
SET
name = correct_name
FROM
routes_to_update
WHERE
routes_to_update.id = routes.id
ROUTES_UPDATE
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This migration fixes the routes.name for all user-projects that have names
# that don't start with the users name.
# For more info see https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/23272
class FixUserProjectRouteNames
def perform(from_id, to_id)
ActiveRecord::Base.connection.execute <<~ROUTES_UPDATE
WITH routes_to_update AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
SELECT
routes.id,
users.name || ' / ' || projects.name AS correct_name
FROM
routes
INNER JOIN projects ON routes.source_id = projects.id
INNER JOIN namespaces ON projects.namespace_id = namespaces.id
INNER JOIN users ON namespaces.owner_id = users.id
WHERE
routes.source_type = 'Project'
AND routes.id BETWEEN #{from_id} AND #{to_id}
AND namespaces.type IS NULL
AND (routes.name NOT LIKE users.name || '%' OR routes.name IS NULL)
)
UPDATE
routes
SET
name = routes_to_update.correct_name
FROM
routes_to_update
WHERE
routes_to_update.id = routes.id
ROUTES_UPDATE
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Create missing LfsObjectsProject records for forks
class LinkLfsObjectsProjects
# Model specifically used for migration.
class LfsObjectsProject < ActiveRecord::Base
include EachBatch
self.table_name = 'lfs_objects_projects'
def self.linkable
where(
<<~SQL
lfs_objects_projects.project_id IN (
SELECT fork_network_members.forked_from_project_id
FROM fork_network_members
WHERE fork_network_members.forked_from_project_id IS NOT NULL
)
SQL
)
end
end
# Model specifically used for migration.
class ForkNetworkMember < ActiveRecord::Base
include EachBatch
self.table_name = 'fork_network_members'
def self.without_lfs_object(lfs_object_id)
where(
<<~SQL
fork_network_members.project_id NOT IN (
SELECT lop.project_id
FROM lfs_objects_projects lop
WHERE lop.lfs_object_id = #{lfs_object_id}
)
SQL
)
end
end
BATCH_SIZE = 1000
def perform(start_id, end_id)
lfs_objects_projects =
Gitlab::BackgroundMigration::LinkLfsObjectsProjects::LfsObjectsProject
.linkable
.where(id: start_id..end_id)
return if lfs_objects_projects.empty?
lfs_objects_projects.find_each do |lop|
ForkNetworkMember
.select("#{lop.lfs_object_id}, fork_network_members.project_id, NOW(), NOW()")
.without_lfs_object(lop.lfs_object_id)
.where(forked_from_project_id: lop.project_id)
.each_batch(of: BATCH_SIZE) do |batch, index|
execute <<~SQL
INSERT INTO lfs_objects_projects (lfs_object_id, project_id, created_at, updated_at)
#{batch.to_sql}
SQL
logger.info(message: "LinkLfsObjectsProjects: created missing LfsObjectsProject records for LfsObject #{lop.lfs_object_id}")
end
end
end
private
def execute(sql)
::ActiveRecord::Base.connection.execute(sql)
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class is responsible to update all sha256 fingerprints within the keys table
class MigrateFingerprintSha256WithinKeys
# Temporary AR table for keys
class Key < ActiveRecord::Base
include EachBatch
self.table_name = 'keys'
self.inheritance_column = :_type_disabled
end
TEMP_TABLE = 'tmp_fingerprint_sha256_migration'
def perform(start_id, stop_id)
ActiveRecord::Base.transaction do
execute(<<~SQL)
CREATE TEMPORARY TABLE #{TEMP_TABLE}
(id bigint primary key, fingerprint_sha256 bytea not null)
ON COMMIT DROP
SQL
fingerprints = []
Key.where(id: start_id..stop_id, fingerprint_sha256: nil).find_each do |regular_key|
if fingerprint = generate_ssh_public_key(regular_key.key)
bytea = ActiveRecord::Base.connection.escape_bytea(Base64.decode64(fingerprint))
fingerprints << {
id: regular_key.id,
fingerprint_sha256: bytea
}
end
end
ApplicationRecord.legacy_bulk_insert(TEMP_TABLE, fingerprints) # rubocop:disable Gitlab/BulkInsert
execute("ANALYZE #{TEMP_TABLE}")
execute(<<~SQL)
UPDATE keys
SET fingerprint_sha256 = t.fingerprint_sha256
FROM #{TEMP_TABLE} t
WHERE keys.id = t.id
SQL
end
end
private
def generate_ssh_public_key(regular_key)
Gitlab::SSHPublicKey.new(regular_key).fingerprint("SHA256")&.gsub("SHA256:", "")
end
def execute(query)
ActiveRecord::Base.connection.execute(query)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Class that will insert record into project_pages_metadata
# for each existing project
class MigratePagesMetadata
def perform(start_id, stop_id)
perform_on_relation(Project.where(id: start_id..stop_id))
end
def perform_on_relation(relation)
successful_pages_deploy = <<~SQL
SELECT TRUE
FROM ci_builds
WHERE ci_builds.type = 'GenericCommitStatus'
AND ci_builds.status = 'success'
AND ci_builds.stage = 'deploy'
AND ci_builds.name = 'pages:deploy'
AND ci_builds.project_id = projects.id
LIMIT 1
SQL
select_from = relation
.select("projects.id", "COALESCE((#{successful_pages_deploy}), FALSE)")
.to_sql
ActiveRecord::Base.connection_pool.with_connection do |connection|
connection.execute <<~SQL
INSERT INTO project_pages_metadata (project_id, deployed)
#{select_from}
ON CONFLICT (project_id) DO NOTHING
SQL
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MigrateSecurityScans
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::MigrateSecurityScans.prepend_mod_with('Gitlab::BackgroundMigration::MigrateSecurityScans')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Background migration to move any legacy project to Hashed Storage
class MigrateToHashedStorage
def perform
batch_size = helper.batch_size
legacy_projects_count = Project.with_unmigrated_storage.count
if storage_migrator.rollback_pending?
logger.warn(
migrator: 'MigrateToHashedStorage',
message: 'Aborting an storage rollback operation currently in progress'
)
storage_migrator.abort_rollback!
end
if legacy_projects_count == 0
logger.info(
migrator: 'MigrateToHashedStorage',
message: 'There are no projects requiring migration to Hashed Storage'
)
return
end
logger.info(
migrator: 'MigrateToHashedStorage',
message: "Enqueuing migration of #{legacy_projects_count} projects in batches of #{batch_size}"
)
helper.project_id_batches_migration do |start, finish|
storage_migrator.bulk_schedule_migration(start: start, finish: finish)
logger.info(
migrator: 'MigrateToHashedStorage',
message: "Enqueuing migration of projects in batches of #{batch_size} from ID=#{start} to ID=#{finish}",
batch_from: start,
batch_to: finish
)
end
end
private
def helper
Gitlab::HashedStorage::RakeHelper
end
def storage_migrator
@storage_migrator ||= Gitlab::HashedStorage::Migrator.new
end
def logger
@logger ||= ::Gitlab::BackgroundMigration::Logger.build
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MoveEpicIssuesAfterEpics
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::MoveEpicIssuesAfterEpics.prepend_mod_with('Gitlab::BackgroundMigration::MoveEpicIssuesAfterEpics')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given merge request IDs range. A _single_ INSERT is issued for the given range.
class PopulateAnyApprovalRuleForMergeRequests
def perform(from_id, to_id)
end
end
end
end
Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForMergeRequests.prepend_mod_with('Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForMergeRequests')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given project IDs range. A _single_ INSERT is issued for the given range.
class PopulateAnyApprovalRuleForProjects
def perform(from_id, to_id)
end
end
end
end
Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForProjects.prepend_mod_with('Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForProjects')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Class to populate new rows of UserCanonicalEmail based on existing email addresses
class PopulateCanonicalEmails
def perform(start_id, stop_id)
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO
user_canonical_emails (
user_id,
canonical_email,
created_at,
updated_at
)
SELECT users.id AS user_id,
concat(translate(split_part(split_part(users.email, '@', 1), '+', 1), '.', ''), '@gmail.com') AS canonical_email,
NOW() AS created_at,
NOW() AS updated_at
FROM users
WHERE users.email ILIKE '%@gmail.com'
AND users.id BETWEEN #{start_id} AND #{stop_id}
ON CONFLICT DO NOTHING;
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class updates vulnerabilities entities with state dismissed
class PopulateDismissedStateForVulnerabilities
class Vulnerability < ActiveRecord::Base # rubocop:disable Style/Documentation
self.table_name = 'vulnerabilities'
end
def perform(*vulnerability_ids)
Vulnerability.where(id: vulnerability_ids).update_all(state: 2)
PopulateMissingVulnerabilityDismissalInformation.new.perform(*vulnerability_ids)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class populates missing dismissal information for
# vulnerability entries.
class PopulateHasVulnerabilities
class ProjectSetting < ActiveRecord::Base # rubocop:disable Style/Documentation
self.table_name = 'project_settings'
def self.upsert_for(project_ids)
connection.execute(upsert_sql % { project_ids: project_ids.join(', ') })
end
def self.upsert_sql
<<~SQL
WITH upsert_data (project_id, has_vulnerabilities, created_at, updated_at) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
SELECT projects.id, true, current_timestamp, current_timestamp FROM projects WHERE projects.id IN (%{project_ids})
)
INSERT INTO project_settings
(project_id, has_vulnerabilities, created_at, updated_at)
(SELECT * FROM upsert_data)
ON CONFLICT (project_id)
DO UPDATE SET
has_vulnerabilities = true,
updated_at = EXCLUDED.updated_at
SQL
end
end
class Vulnerability < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'vulnerabilities'
end
def perform(*project_ids)
ProjectSetting.upsert_for(project_ids)
rescue StandardError => e
log_error(e, project_ids)
ensure
log_info(project_ids)
end
private
def log_error(error, project_ids)
::Gitlab::BackgroundMigration::Logger.error(
migrator: self.class.name,
message: error.message,
project_ids: project_ids
)
end
def log_info(project_ids)
::Gitlab::BackgroundMigration::Logger.info(
migrator: self.class.name,
message: 'Projects has been processed to populate `has_vulnerabilities` information',
count: project_ids.length
)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates records on merge_request_assignees according
# to the given merge request IDs range. A _single_ INSERT is issued for the given range.
# This is required for supporting multiple assignees on merge requests.
class PopulateMergeRequestAssigneesTable
def perform(from_id, to_id)
select_sql =
MergeRequest
.where(merge_request_assignees_not_exists_clause)
.where(id: from_id..to_id)
.where.not(assignee_id: nil)
.select(:id, :assignee_id)
.to_sql
execute("INSERT INTO merge_request_assignees (merge_request_id, user_id) #{select_sql}")
end
def perform_all_sync(batch_size:)
MergeRequest.each_batch(of: batch_size) do |batch|
range = batch.pluck('MIN(id)', 'MAX(id)').first
perform(*range)
end
end
private
def merge_request_assignees_not_exists_clause
<<~SQL
NOT EXISTS (SELECT 1 FROM merge_request_assignees
WHERE merge_request_assignees.merge_request_id = merge_requests.id)
SQL
end
def execute(sql)
@connection ||= ActiveRecord::Base.connection
@connection.execute(sql)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class populates missing dismissal information for
# vulnerability entries.
class PopulateMissingVulnerabilityDismissalInformation
class Vulnerability < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'vulnerabilities'
has_one :finding, class_name: '::Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation::Finding'
scope :broken, -> { where('state = 2 AND (dismissed_at IS NULL OR dismissed_by_id IS NULL)') }
def copy_dismissal_information
return unless finding&.dismissal_feedback
update_columns(
dismissed_at: finding.dismissal_feedback.created_at,
dismissed_by_id: finding.dismissal_feedback.author_id
)
end
end
class Finding < ActiveRecord::Base # rubocop:disable Style/Documentation
include ShaAttribute
include ::Gitlab::Utils::StrongMemoize
self.table_name = 'vulnerability_occurrences'
sha_attribute :project_fingerprint
def dismissal_feedback
strong_memoize(:dismissal_feedback) do
Feedback.dismissal.where(category: report_type, project_fingerprint: project_fingerprint, project_id: project_id).first
end
end
end
class Feedback < ActiveRecord::Base # rubocop:disable Style/Documentation
DISMISSAL_TYPE = 0
self.table_name = 'vulnerability_feedback'
scope :dismissal, -> { where(feedback_type: DISMISSAL_TYPE) }
end
def perform(*vulnerability_ids)
Vulnerability.includes(:finding).where(id: vulnerability_ids).each { |vulnerability| populate_for(vulnerability) }
log_info(vulnerability_ids)
end
private
def populate_for(vulnerability)
log_warning(vulnerability) unless vulnerability.copy_dismissal_information
rescue StandardError => error
log_error(error, vulnerability)
end
def log_info(vulnerability_ids)
::Gitlab::BackgroundMigration::Logger.info(
migrator: self.class.name,
message: 'Dismissal information has been copied',
count: vulnerability_ids.length
)
end
def log_warning(vulnerability)
::Gitlab::BackgroundMigration::Logger.warn(
migrator: self.class.name,
message: 'Could not update vulnerability!',
vulnerability_id: vulnerability.id
)
end
def log_error(error, vulnerability)
::Gitlab::BackgroundMigration::Logger.error(
migrator: self.class.name,
message: error.message,
vulnerability_id: vulnerability.id
)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class creates/updates those personal snippets statistics
# that haven't been created nor initialized.
# It also updates the related root storage namespace stats
class PopulatePersonalSnippetStatistics
def perform(snippet_ids)
personal_snippets(snippet_ids).group_by(&:author).each do |author, author_snippets|
upsert_snippet_statistics(author_snippets)
update_namespace_statistics(author.namespace)
end
end
private
def personal_snippets(snippet_ids)
PersonalSnippet
.where(id: snippet_ids)
.includes(author: :namespace)
.includes(:statistics)
.includes(snippet_repository: :shard)
end
def upsert_snippet_statistics(snippets)
snippets.each do |snippet|
response = Snippets::UpdateStatisticsService.new(snippet).execute
error_message("#{response.message} snippet: #{snippet.id}") if response.error?
end
end
def update_namespace_statistics(namespace)
Namespaces::StatisticsRefresherService.new.execute(namespace)
rescue StandardError => e
error_message("Error updating statistics for namespace #{namespace.id}: #{e.message}")
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
def error_message(message)
logger.error(message: "Snippet Statistics Migration: #{message}")
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class creates/updates those project snippets statistics
# that haven't been created nor initialized.
# It also updates the related project statistics and its root storage namespace stats
class PopulateProjectSnippetStatistics
def perform(snippet_ids)
project_snippets(snippet_ids).group_by(&:namespace_id).each do |namespace_id, namespace_snippets|
namespace_snippets.group_by(&:project).each do |project, snippets|
upsert_snippet_statistics(snippets)
update_project_statistics(project)
rescue StandardError
error_message("Error updating statistics for project #{project.id}")
end
update_namespace_statistics(namespace_snippets.first.project.root_namespace)
rescue StandardError => e
error_message("Error updating statistics for namespace #{namespace_id}: #{e.message}")
end
end
private
def project_snippets(snippet_ids)
ProjectSnippet
.select('snippets.*, projects.namespace_id')
.where(id: snippet_ids)
.joins(:project)
.includes(:statistics)
.includes(snippet_repository: :shard)
.includes(project: [:route, :statistics, :namespace])
end
def upsert_snippet_statistics(snippets)
snippets.each do |snippet|
response = Snippets::UpdateStatisticsService.new(snippet).execute
error_message("#{response.message} snippet: #{snippet.id}") if response.error?
end
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
def error_message(message)
logger.error(message: "Snippet Statistics Migration: #{message}")
end
def update_project_statistics(project)
project.statistics&.refresh!(only: [:snippets_size])
end
def update_namespace_statistics(namespace)
Namespaces::StatisticsRefresherService.new.execute(namespace)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class updates vulnerability feedback entities with no pipeline id assigned.
class PopulateVulnerabilityFeedbackPipelineId
def perform(project_ids)
end
end
end
end
Gitlab::BackgroundMigration::PopulateVulnerabilityFeedbackPipelineId.prepend_mod_with('Gitlab::BackgroundMigration::PopulateVulnerabilityFeedbackPipelineId')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class creates/updates those project historical vulnerability statistics
# that haven't been created nor initialized. It should only be executed in EE.
class PopulateVulnerabilityHistoricalStatistics
def perform(project_ids, retention_period = 90)
end
end
end
end
Gitlab::BackgroundMigration::PopulateVulnerabilityHistoricalStatistics.prepend_mod_with('Gitlab::BackgroundMigration::PopulateVulnerabilityHistoricalStatistics')
# frozen_string_literal: true
#
# rubocop:disable Style/Documentation
# This job is added to fix https://gitlab.com/gitlab-org/gitlab/issues/30229
# It's not used anywhere else.
# Can be removed in GitLab 13.*
module Gitlab
module BackgroundMigration
class PruneOrphanedGeoEvents
def perform(table_name)
end
end
end
end
Gitlab::BackgroundMigration::PruneOrphanedGeoEvents.prepend_mod_with('Gitlab::BackgroundMigration::PruneOrphanedGeoEvents')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop:disable Style/Documentation
class RecalculateProjectAuthorizationsWithMinMaxUserId
def perform(min_user_id, max_user_id)
User.where(id: min_user_id..max_user_id).find_each do |user|
service = Users::RefreshAuthorizedProjectsService.new(
user,
incorrect_auth_found_callback:
->(project_id, access_level) do
logger.info(message: 'Removing ProjectAuthorizations',
user_id: user.id,
project_id: project_id,
access_level: access_level)
end,
missing_auth_found_callback:
->(project_id, access_level) do
logger.info(message: 'Creating ProjectAuthorizations',
user_id: user.id,
project_id: project_id,
access_level: access_level)
end
)
service.execute
end
end
private
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class RemoveDuplicateCsFindings
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::RemoveDuplicateCsFindings.prepend_mod_with('Gitlab::BackgroundMigration::RemoveDuplicateCsFindings')
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class RemoveDuplicatedCsFindingsWithoutVulnerabilityId
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::RemoveDuplicatedCsFindingsWithoutVulnerabilityId.prepend_mod_with('Gitlab::BackgroundMigration::RemoveDuplicatedCsFindingsWithoutVulnerabilityId')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop:disable Style/Documentation
class RemoveInaccessibleEpicTodos
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::RemoveInaccessibleEpicTodos.prepend_mod_with('Gitlab::BackgroundMigration::RemoveInaccessibleEpicTodos')
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class RemoveUndefinedVulnerabilityConfidenceLevel
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilityConfidenceLevel.prepend_mod_with('Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilityConfidenceLevel')
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class ReplaceBlockedByLinks
class IssueLink < ActiveRecord::Base
self.table_name = 'issue_links'
end
def perform(start_id, stop_id)
blocked_by_links = IssueLink.where(id: start_id..stop_id).where(link_type: 2)
ActiveRecord::Base.transaction do
# There could be two edge cases:
# 1) issue1 is blocked by issue2 AND issue2 blocks issue1 (type 1)
# 2) issue1 is blocked by issue2 AND issue2 is related to issue1 (type 0)
# In both cases cases we couldn't convert blocked by relation to
# `issue2 blocks issue` because there is already a link with the same
# source/target id. To avoid these conflicts, we first delete any
# "opposite" links before we update `blocked by` relation. This
# should be rare as we have a pre-create check which checks if issues
# are already linked
opposite_ids = blocked_by_links
.select('opposite_links.id')
.joins('INNER JOIN issue_links as opposite_links ON issue_links.source_id = opposite_links.target_id AND issue_links.target_id = opposite_links.source_id')
IssueLink.where(id: opposite_ids).delete_all
blocked_by_links.update_all('source_id=target_id,target_id=source_id,link_type=1')
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Updates the range of given MRs to merge_status "unchecked", if they're opened
# and mergeable.
class ResetMergeStatus
def perform(from_id, to_id)
relation = MergeRequest.where(id: from_id..to_id,
state_id: 1, # opened
merge_status: 'can_be_merged')
relation.update_all(merge_status: 'unchecked')
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Resets inconsistent state of shared_runners_enabled for projects that have been transferred
class ResetSharedRunnersForTransferredProjects
# Model specifically used for migration.
class Namespace < ActiveRecord::Base
include EachBatch
self.table_name = 'namespaces'
end
# Model specifically used for migration.
class Project < ActiveRecord::Base
self.table_name = 'projects'
end
def perform(start_id, stop_id)
Project.reset_column_information
Namespace.where(id: start_id..stop_id).each_batch(of: 1_000) do |relation|
ids = relation.where(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false).select(:id)
Project.where(namespace_id: ids).update_all(shared_runners_enabled: false)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Sets the MergeRequestDiff#files_count value for old rows
class SetMergeRequestDiffFilesCount
# Some historic data has a *lot* of files. Apply a sentinel to these cases
FILES_COUNT_SENTINEL = 2**15 - 1
def self.count_subquery
<<~SQL
files_count = (
SELECT LEAST(#{FILES_COUNT_SENTINEL}, count(*))
FROM merge_request_diff_files
WHERE merge_request_diff_files.merge_request_diff_id = merge_request_diffs.id
)
SQL
end
class MergeRequestDiff < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'merge_request_diffs'
end
def perform(start_id, end_id)
MergeRequestDiff.where(id: start_id..end_id).each_batch do |relation|
relation.update_all(self.class.count_subquery)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration updates children of group to match visibility of a parent
class UpdateExistingSubgroupToMatchVisibilityLevelOfParent
def perform(parents_groups_ids, level)
groups_ids = Gitlab::ObjectHierarchy.new(Group.where(id: parents_groups_ids))
.base_and_descendants
.where("visibility_level > ?", level)
.select(:id)
return if groups_ids.empty?
Group
.where(id: groups_ids)
.update_all(visibility_level: level)
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class UpdateExistingUsersThatRequireTwoFactorAuth # rubocop:disable Metrics/ClassLength
def perform(start_id, stop_id)
ActiveRecord::Base.connection.execute <<~SQL
UPDATE
users
SET
require_two_factor_authentication_from_group = FALSE
WHERE
users.id BETWEEN #{start_id}
AND #{stop_id}
AND users.require_two_factor_authentication_from_group = TRUE
AND users.id NOT IN ( SELECT DISTINCT
users_groups_query.user_id
FROM (
SELECT
users.id AS user_id,
members.source_id AS group_ids
FROM
users
LEFT JOIN members ON members.source_type = 'Namespace'
AND members.requested_at IS NULL
AND members.user_id = users.id
AND members.type = 'GroupMember'
WHERE
users.require_two_factor_authentication_from_group = TRUE
AND users.id BETWEEN #{start_id}
AND #{stop_id}) AS users_groups_query
INNER JOIN LATERAL ( WITH RECURSIVE "base_and_ancestors" AS (
(
SELECT
"namespaces"."type",
"namespaces"."id",
"namespaces"."parent_id",
"namespaces"."require_two_factor_authentication"
FROM
"namespaces"
WHERE
"namespaces"."type" = 'Group'
AND "namespaces"."id" = users_groups_query.group_ids)
UNION (
SELECT
"namespaces"."type",
"namespaces"."id",
"namespaces"."parent_id",
"namespaces"."require_two_factor_authentication"
FROM
"namespaces",
"base_and_ancestors"
WHERE
"namespaces"."type" = 'Group'
AND "namespaces"."id" = "base_and_ancestors"."parent_id")),
"base_and_descendants" AS (
(
SELECT
"namespaces"."type",
"namespaces"."id",
"namespaces"."parent_id",
"namespaces"."require_two_factor_authentication"
FROM
"namespaces"
WHERE
"namespaces"."type" = 'Group'
AND "namespaces"."id" = users_groups_query.group_ids)
UNION (
SELECT
"namespaces"."type",
"namespaces"."id",
"namespaces"."parent_id",
"namespaces"."require_two_factor_authentication"
FROM
"namespaces",
"base_and_descendants"
WHERE
"namespaces"."type" = 'Group'
AND "namespaces"."parent_id" = "base_and_descendants"."id"))
SELECT
"namespaces".*
FROM ((
SELECT
"namespaces"."type",
"namespaces"."id",
"namespaces"."parent_id",
"namespaces"."require_two_factor_authentication"
FROM
"base_and_ancestors" AS "namespaces"
WHERE
"namespaces"."type" = 'Group')
UNION (
SELECT
"namespaces"."type",
"namespaces"."id",
"namespaces"."parent_id",
"namespaces"."require_two_factor_authentication"
FROM
"base_and_descendants" AS "namespaces"
WHERE
"namespaces"."type" = 'Group')) namespaces
WHERE
"namespaces"."type" = 'Group'
AND "namespaces"."require_two_factor_authentication" = TRUE) AS hierarchy_tree ON TRUE);
SQL
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class UpdateLocationFingerprintForContainerScanningFindings
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::UpdateLocationFingerprintForContainerScanningFindings.prepend_mod_with('Gitlab::BackgroundMigration::UpdateLocationFingerprintForContainerScanningFindings')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class UpdateVulnerabilitiesFromDismissalFeedback
def perform(project_id)
end
end
end
end
Gitlab::BackgroundMigration::UpdateVulnerabilitiesFromDismissalFeedback.prepend_mod_with('Gitlab::BackgroundMigration::UpdateVulnerabilitiesFromDismissalFeedback')
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:merge_requests_table) { table(:merge_requests) }
let(:merge_request_diffs_table) { table(:merge_request_diffs) }
let(:merge_request_diff_commits_table) { table(:merge_request_diff_commits) }
let(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') }
let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id) }
let(:merge_request) do
merge_requests_table.create!(target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: 'mr name',
title: 'mr name')
end
def create_diff!(name, commits: 0)
mr_diff = merge_request_diffs_table.create!(
merge_request_id: merge_request.id)
commits.times do |i|
merge_request_diff_commits_table.create!(
merge_request_diff_id: mr_diff.id,
relative_order: i, sha: i)
end
mr_diff
end
describe '#perform' do
it 'migrates diffs that have no commits' do
diff = create_diff!('with_multiple_commits', commits: 0)
subject.perform(diff.id, diff.id)
expect(diff.reload.commits_count).to eq(0)
end
it 'skips diffs that have commits_count already set' do
timestamp = 2.days.ago
diff = merge_request_diffs_table.create!(
merge_request_id: merge_request.id,
commits_count: 0,
updated_at: timestamp)
subject.perform(diff.id, diff.id)
expect(diff.reload.updated_at).to be_within(1.second).of(timestamp)
end
it 'migrates multiple diffs to the correct values' do
diffs = Array.new(3).map.with_index { |_, i| create_diff!(i, commits: 3) }
subject.perform(diffs.first.id, diffs.last.id)
diffs.each do |diff|
expect(diff.reload.commits_count).to eq(3)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::AddModifiedToApprovalMergeRequestRule, schema: 20181228175414 do
let(:determine_if_rules_are_modified) { described_class.new }
let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') }
let(:projects) { table(:projects) }
let(:normal_project) { projects.create!(namespace_id: namespace.id) }
let(:overridden_project) { projects.create!(namespace_id: namespace.id) }
let(:rules) { table(:approval_merge_request_rules) }
let(:project_rules) { table(:approval_project_rules) }
let(:sources) { table(:approval_merge_request_rule_sources) }
let(:merge_requests) { table(:merge_requests) }
let(:groups) { table(:namespaces) }
let(:mr_groups) { table(:approval_merge_request_rules_groups) }
let(:project_groups) { table(:approval_project_rules_groups) }
before do
project_rule = project_rules.create!(project_id: normal_project.id, approvals_required: 3, name: 'test rule')
overridden_project_rule = project_rules.create!(project_id: overridden_project.id, approvals_required: 5, name: 'other test rule')
overridden_project_rule_two = project_rules.create!(project_id: overridden_project.id, approvals_required: 7, name: 'super cool rule')
merge_request = merge_requests.create!(target_branch: 'feature', source_branch: 'default', source_project_id: normal_project.id, target_project_id: normal_project.id)
overridden_merge_request = merge_requests.create!(target_branch: 'feature-2', source_branch: 'default', source_project_id: overridden_project.id, target_project_id: overridden_project.id)
merge_rule = rules.create!(merge_request_id: merge_request.id, approvals_required: 3, name: 'test rule')
overridden_merge_rule = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 6, name: 'other test rule')
overridden_merge_rule_two = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 7, name: 'super cool rule')
sources.create!(approval_project_rule_id: project_rule.id, approval_merge_request_rule_id: merge_rule.id)
sources.create!(approval_project_rule_id: overridden_project_rule.id, approval_merge_request_rule_id: overridden_merge_rule.id)
sources.create!(approval_project_rule_id: overridden_project_rule_two.id, approval_merge_request_rule_id: overridden_merge_rule_two.id)
group1 = groups.create!(name: "group1", path: "test_group1", type: 'Group')
group2 = groups.create!(name: "group2", path: "test_group2", type: 'Group')
group3 = groups.create!(name: "group3", path: "test_group3", type: 'Group')
project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group1.id)
project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group2.id)
project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group3.id)
mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule.id, group_id: group1.id)
mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule_two.id, group_id: group2.id)
end
describe '#perform' do
it 'changes the correct rules' do
original_count = rules.all.count
determine_if_rules_are_modified.perform(rules.minimum(:id), rules.maximum(:id))
results = rules.where(modified_from_project_rule: true)
expect(results.count).to eq 2
expect(results.collect(&:name)).to eq(['other test rule', 'super cool rule'])
expect(rules.count).to eq original_count
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillDeploymentClustersFromDeployments, :migration, schema: 20181228175414 do
subject { described_class.new }
describe '#perform' do
it 'backfills deployment_cluster for all deployments in the given range with a non-null cluster_id' do
deployment_clusters = table(:deployment_clusters)
namespace = table(:namespaces).create!(name: 'the-namespace', path: 'the-path')
project = table(:projects).create!(name: 'the-project', namespace_id: namespace.id)
environment = table(:environments).create!(name: 'the-environment', project_id: project.id, slug: 'slug')
cluster = table(:clusters).create!(name: 'the-cluster')
deployment_data = { cluster_id: cluster.id, project_id: project.id, environment_id: environment.id, ref: 'abc', tag: false, sha: 'sha', status: 1 }
expected_deployment_1 = create_deployment(**deployment_data)
create_deployment(**deployment_data, cluster_id: nil) # no cluster_id
expected_deployment_2 = create_deployment(**deployment_data)
out_of_range_deployment = create_deployment(**deployment_data, cluster_id: cluster.id) # expected to be out of range
# to test "ON CONFLICT DO NOTHING"
existing_record_for_deployment_2 = deployment_clusters.create!(
deployment_id: expected_deployment_2.id,
cluster_id: expected_deployment_2.cluster_id,
kubernetes_namespace: 'production'
)
subject.perform(expected_deployment_1.id, out_of_range_deployment.id - 1)
expect(deployment_clusters.all.pluck(:deployment_id, :cluster_id, :kubernetes_namespace)).to contain_exactly(
[expected_deployment_1.id, cluster.id, nil],
[expected_deployment_2.id, cluster.id, existing_record_for_deployment_2.kubernetes_namespace]
)
end
def create_deployment(**data)
@iid ||= 0
@iid += 1
table(:deployments).create!(iid: @iid, **data)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillEnvironmentIdDeploymentMergeRequests, schema: 20181228175414 do
let(:environments) { table(:environments) }
let(:merge_requests) { table(:merge_requests) }
let(:deployments) { table(:deployments) }
let(:deployment_merge_requests) { table(:deployment_merge_requests) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
subject(:migration) { described_class.new }
it 'correctly backfills environment_id column' do
namespace = namespaces.create!(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
production = environments.create!(project_id: project.id, name: 'production', slug: 'production')
staging = environments.create!(project_id: project.id, name: 'staging', slug: 'staging')
mr = merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id)
deployment1 = deployments.create!(environment_id: staging.id, iid: 1, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1)
deployment2 = deployments.create!(environment_id: production.id, iid: 2, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1)
deployment3 = deployments.create!(environment_id: production.id, iid: 3, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1)
# mr is tracked twice in production through deployment2 and deployment3
deployment_merge_requests.create!(deployment_id: deployment1.id, merge_request_id: mr.id)
deployment_merge_requests.create!(deployment_id: deployment2.id, merge_request_id: mr.id)
deployment_merge_requests.create!(deployment_id: deployment3.id, merge_request_id: mr.id)
expect(deployment_merge_requests.where(environment_id: nil).count).to eq(3)
migration.backfill_range(1, mr.id)
expect(deployment_merge_requests.where(environment_id: nil).count).to be_zero
expect(deployment_merge_requests.count).to eq(2)
production_deployments = deployment_merge_requests.where(environment_id: production.id)
expect(production_deployments.count).to eq(1)
expect(production_deployments.first.deployment_id).to eq(deployment2.id)
expect(deployment_merge_requests.where(environment_id: staging.id).count).to eq(1)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20181228175414 do
let(:merge_requests) { table(:merge_requests) }
let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) }
let(:metrics) { table(:merge_request_metrics) }
let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
let(:project) { table(:projects).create!(namespace_id: namespace.id) }
subject { described_class.new }
describe '#perform' do
let!(:open_mr) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master') }
let!(:closed_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:closed_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:closed_mr_1_metrics) { metrics.create!(merge_request_id: closed_mr_1.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
let!(:closed_mr_2_metrics) { metrics.create!(merge_request_id: closed_mr_2.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
let!(:closed_mr_2_cleanup_schedule) { cleanup_schedules.create!(merge_request_id: closed_mr_2.id, scheduled_at: Time.current) }
let!(:merged_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) }
let!(:merged_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3, updated_at: Time.current) }
let!(:merged_mr_1_metrics) { metrics.create!(merge_request_id: merged_mr_1.id, target_project_id: project.id, merged_at: Time.current, created_at: Time.current, updated_at: Time.current) }
let!(:closed_mr_3) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
let!(:closed_mr_3_metrics) { metrics.create!(merge_request_id: closed_mr_3.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
it 'creates records for all closed and merged merge requests in range' do
expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with(
message: 'Backfilled merge_request_cleanup_schedules records',
count: 3
)
subject.perform(open_mr.id, merged_mr_2.id)
aggregate_failures do
expect(cleanup_schedules.all.pluck(:merge_request_id))
.to contain_exactly(closed_mr_1.id, closed_mr_2.id, merged_mr_1.id, merged_mr_2.id)
expect(cleanup_schedules.find_by(merge_request_id: closed_mr_1.id).scheduled_at.to_s)
.to eq((closed_mr_1_metrics.latest_closed_at + 14.days).to_s)
expect(cleanup_schedules.find_by(merge_request_id: closed_mr_2.id).scheduled_at.to_s)
.to eq(closed_mr_2_cleanup_schedule.scheduled_at.to_s)
expect(cleanup_schedules.find_by(merge_request_id: merged_mr_1.id).scheduled_at.to_s)
.to eq((merged_mr_1_metrics.merged_at + 14.days).to_s)
expect(cleanup_schedules.find_by(merge_request_id: merged_mr_2.id).scheduled_at.to_s)
.to eq((merged_mr_2.updated_at + 14.days).to_s)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceSettings, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:namespace_settings) { table(:namespace_settings) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
subject { described_class.new }
describe '#perform' do
it 'creates settings for all projects in range' do
namespaces.create!(id: 5, name: 'test1', path: 'test1')
namespaces.create!(id: 7, name: 'test2', path: 'test2')
namespaces.create!(id: 8, name: 'test3', path: 'test3')
subject.perform(5, 7)
expect(namespace_settings.all.pluck(:namespace_id)).to contain_exactly(5, 7)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectSettings, schema: 20181228175414 do
let(:projects) { table(:projects) }
let(:project_settings) { table(:project_settings) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let(:project) { projects.create!(namespace_id: namespace.id) }
subject { described_class.new }
describe '#perform' do
it 'creates settings for all projects in range' do
projects.create!(id: 5, namespace_id: namespace.id)
projects.create!(id: 7, namespace_id: namespace.id)
projects.create!(id: 8, namespace_id: namespace.id)
subject.perform(5, 7)
expect(project_settings.all.pluck(:project_id)).to contain_exactly(5, 7)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPushRulesIdInProjects, :migration, schema: 20181228175414 do
let(:push_rules) { table(:push_rules) }
let(:projects) { table(:projects) }
let(:project_settings) { table(:project_settings) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
subject { described_class.new }
describe '#perform' do
it 'creates new project push_rules for all push rules in the range' do
project_1 = projects.create!(id: 1, namespace_id: namespace.id)
project_2 = projects.create!(id: 2, namespace_id: namespace.id)
project_3 = projects.create!(id: 3, namespace_id: namespace.id)
project_settings_1 = project_settings.create!(project_id: project_1.id)
project_settings_2 = project_settings.create!(project_id: project_2.id)
project_settings_3 = project_settings.create!(project_id: project_3.id)
push_rule_1 = push_rules.create!(id: 5, is_sample: false, project_id: project_1.id)
push_rule_2 = push_rules.create!(id: 6, is_sample: false, project_id: project_2.id)
push_rules.create!(id: 8, is_sample: false, project_id: 3)
subject.perform(5, 7)
expect(project_settings_1.reload.push_rule_id).to eq(push_rule_1.id)
expect(project_settings_2.reload.push_rule_id).to eq(push_rule_2.id)
expect(project_settings_3.reload.push_rule_id).to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::FixPromotedEpicsDiscussionIds, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:epics) { table(:epics) }
let(:notes) { table(:notes) }
let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:epic1) { epics.create!(id: 1, author_id: user.id, iid: 1, group_id: namespace.id, title: 'Epic with discussion', title_html: 'Epic with discussion') }
def create_note(discussion_id)
notes.create!(note: 'note comment',
noteable_id: epic1.id,
noteable_type: 'Epic',
discussion_id: discussion_id)
end
def expect_valid_discussion_id(id)
expect(id).to match(/\A\h{40}\z/)
end
describe '#perform with batch of discussion ids' do
it 'updates discussion ids' do
note1 = create_note('00000000')
note2 = create_note('00000000')
note3 = create_note('10000000')
subject.perform(%w(00000000 10000000))
expect_valid_discussion_id(note1.reload.discussion_id)
expect_valid_discussion_id(note2.reload.discussion_id)
expect_valid_discussion_id(note3.reload.discussion_id)
expect(note1.discussion_id).to eq(note2.discussion_id)
expect(note1.discussion_id).not_to eq(note3.discussion_id)
end
it 'skips notes with discussion id not in range' do
note4 = create_note('20000000')
subject.perform(%w(00000000 10000000))
expect(note4.reload.discussion_id).to eq('20000000')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::FixUserNamespaceNames, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:user) { users.create!(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
context 'updating the namespace names' do
it 'updates a user namespace within range' do
user2 = users.create!(name: "Other user's full name", projects_limit: 10, username: 'also-not-null', email: '2')
user_namespace1 = namespaces.create!(
id: 2,
owner_id: user.id,
name: "Should be the user's name",
path: user.username
)
user_namespace2 = namespaces.create!(
id: 3,
owner_id: user2.id,
name: "Should also be the user's name",
path: user.username
)
described_class.new.perform(1, 5)
expect(user_namespace1.reload.name).to eq("The user's full name")
expect(user_namespace2.reload.name).to eq("Other user's full name")
end
it 'does not update namespaces out of range' do
user_namespace = namespaces.create!(
id: 6,
owner_id: user.id,
name: "Should be the user's name",
path: user.username
)
expect { described_class.new.perform(1, 5) }
.not_to change { user_namespace.reload.name }
end
it 'does not update groups owned by the users' do
user_group = namespaces.create!(
id: 2,
owner_id: user.id,
name: 'A group name',
path: 'the-path',
type: 'Group'
)
expect { described_class.new.perform(1, 5) }
.not_to change { user_group.reload.name }
end
end
context 'namespace route names' do
let(:routes) { table(:routes) }
let(:namespace) do
namespaces.create!(
id: 2,
owner_id: user.id,
name: "Will be updated to the user's name",
path: user.username
)
end
it "updates the route name if it didn't match the namespace" do
route = routes.create!(path: namespace.path, name: 'Incorrect name', source_type: 'Namespace', source_id: namespace.id)
described_class.new.perform(1, 5)
expect(route.reload.name).to eq("The user's full name")
end
it 'updates the route name if it was nil match the namespace' do
route = routes.create!(path: namespace.path, name: nil, source_type: 'Namespace', source_id: namespace.id)
described_class.new.perform(1, 5)
expect(route.reload.name).to eq("The user's full name")
end
it "doesn't update group routes" do
route = routes.create!(path: 'group-path', name: 'Group name', source_type: 'Group', source_id: namespace.id)
expect { described_class.new.perform(1, 5) }
.not_to change { route.reload.name }
end
it "doesn't touch routes for namespaces out of range" do
user_namespace = namespaces.create!(
id: 6,
owner_id: user.id,
name: "Should be the user's name",
path: user.username
)
expect { described_class.new.perform(1, 5) }
.not_to change { user_namespace.reload.name }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:routes) { table(:routes) }
let(:projects) { table(:projects) }
let(:user) { users.create!(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
let(:namespace) do
namespaces.create!(
owner_id: user.id,
name: "Should eventually be the user's name",
path: user.username
)
end
let(:project) do
projects.create!(namespace_id: namespace.id, name: 'Project Name')
end
it "updates the route for a project if it did not match the user's name" do
route = routes.create!(
id: 1,
path: "#{user.username}/#{project.path}",
source_id: project.id,
source_type: 'Project',
name: 'Completely wrong'
)
described_class.new.perform(1, 5)
expect(route.reload.name).to eq("The user's full name / Project Name")
end
it 'updates the route for a project if the name was nil' do
route = routes.create!(
id: 1,
path: "#{user.username}/#{project.path}",
source_id: project.id,
source_type: 'Project',
name: nil
)
described_class.new.perform(1, 5)
expect(route.reload.name).to eq("The user's full name / Project Name")
end
it 'does not update routes that were are out of the range' do
route = routes.create!(
id: 6,
path: "#{user.username}/#{project.path}",
source_id: project.id,
source_type: 'Project',
name: 'Completely wrong'
)
expect { described_class.new.perform(1, 5) }
.not_to change { route.reload.name }
end
it 'does not update routes for projects in groups owned by the user' do
group = namespaces.create!(
owner_id: user.id,
name: 'A group',
path: 'a-path',
type: ''
)
project = projects.create!(namespace_id: group.id, name: 'Project Name')
route = routes.create!(
id: 1,
path: "#{group.path}/#{project.path}",
source_id: project.id,
source_type: 'Project',
name: 'Completely wrong'
)
expect { described_class.new.perform(1, 5) }
.not_to change { route.reload.name }
end
it 'does not update routes for namespaces' do
route = routes.create!(
id: 1,
path: namespace.path,
source_id: namespace.id,
source_type: 'Namespace',
name: 'Completely wrong'
)
expect { described_class.new.perform(1, 5) }
.not_to change { route.reload.name }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::LinkLfsObjectsProjects, :migration, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:fork_networks) { table(:fork_networks) }
let(:fork_network_members) { table(:fork_network_members) }
let(:lfs_objects) { table(:lfs_objects) }
let(:lfs_objects_projects) { table(:lfs_objects_projects) }
let(:namespace) { namespaces.create!(name: 'GitLab', path: 'gitlab') }
let(:fork_network) { fork_networks.create!(root_project_id: source_project.id) }
let(:another_fork_network) { fork_networks.create!(root_project_id: another_source_project.id) }
let(:source_project) { projects.create!(namespace_id: namespace.id) }
let(:another_source_project) { projects.create!(namespace_id: namespace.id) }
let(:project) { projects.create!(namespace_id: namespace.id) }
let(:another_project) { projects.create!(namespace_id: namespace.id) }
let(:partially_linked_project) { projects.create!(namespace_id: namespace.id) }
let(:fully_linked_project) { projects.create!(namespace_id: namespace.id) }
let(:lfs_object) { lfs_objects.create!(oid: 'abc123', size: 100) }
let(:another_lfs_object) { lfs_objects.create!(oid: 'def456', size: 200) }
let!(:source_project_lop_1) do
lfs_objects_projects.create!(
lfs_object_id: lfs_object.id,
project_id: source_project.id
)
end
let!(:source_project_lop_2) do
lfs_objects_projects.create!(
lfs_object_id: another_lfs_object.id,
project_id: source_project.id
)
end
let!(:another_source_project_lop_1) do
lfs_objects_projects.create!(
lfs_object_id: lfs_object.id,
project_id: another_source_project.id
)
end
let!(:another_source_project_lop_2) do
lfs_objects_projects.create!(
lfs_object_id: another_lfs_object.id,
project_id: another_source_project.id
)
end
before do
stub_const("#{described_class}::BATCH_SIZE", 2)
# Create links between projects
fork_network_members.create!(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
[project, partially_linked_project, fully_linked_project].each do |p|
fork_network_members.create!(
fork_network_id: fork_network.id,
project_id: p.id,
forked_from_project_id: fork_network.root_project_id
)
end
fork_network_members.create!(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
fork_network_members.create!(fork_network_id: another_fork_network.id, project_id: another_project.id, forked_from_project_id: another_fork_network.root_project_id)
# Links LFS objects to some projects
lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: fully_linked_project.id)
lfs_objects_projects.create!(lfs_object_id: another_lfs_object.id, project_id: fully_linked_project.id)
lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: partially_linked_project.id)
end
context 'when there are LFS objects to be linked' do
it 'creates LfsObjectsProject records for forks based on the specified range of LfsObjectProject id' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
expect(logger).to receive(:info).exactly(4).times
end
expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.to change { lfs_objects_projects.count }.by(5)
expect(lfs_object_ids_for(project)).to match_array(lfs_object_ids_for(source_project))
expect(lfs_object_ids_for(another_project)).to match_array(lfs_object_ids_for(another_source_project))
expect(lfs_object_ids_for(partially_linked_project)).to match_array(lfs_object_ids_for(source_project))
expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.not_to change { lfs_objects_projects.count }
end
end
context 'when there are no LFS objects to be linked' do
before do
# Links LFS objects to all projects
projects.all.each do |p|
lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: p.id)
lfs_objects_projects.create!(lfs_object_id: another_lfs_object.id, project_id: p.id)
end
end
it 'does not create LfsObjectProject records' do
expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }
.not_to change { lfs_objects_projects.count }
end
end
def lfs_object_ids_for(project)
lfs_objects_projects.where(project_id: project.id).pluck(:lfs_object_id)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, schema: 20181228175414 do
subject(:fingerprint_migrator) { described_class.new }
let(:key_table) { table(:keys) }
before do
generate_fingerprints!
end
it 'correctly creates a sha256 fingerprint for a key' do
key_1 = Key.find(1017)
key_2 = Key.find(1027)
expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1')
expect(key_1.fingerprint_sha256).to eq(nil)
expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4')
expect(key_2.fingerprint_sha256).to eq(nil)
query_count = ActiveRecord::QueryRecorder.new do
fingerprint_migrator.perform(1, 10000)
end.count
expect(query_count).to eq(8)
key_1.reload
key_2.reload
expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1')
expect(key_1.fingerprint_sha256).to eq('nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg')
expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4')
expect(key_2.fingerprint_sha256).to eq('zMNbLekgdjtcgDv8VSC0z5lpdACMG3Q4PUoIz5+H2jM')
end
context 'with invalid keys' do
before do
key = Key.find(1017)
# double space after "ssh-rsa" leads to a
# OpenSSL::PKey::PKeyError in Net::SSH::KeyFactory.load_data_public_key
key.update_column(:key, key.key.gsub('ssh-rsa ', 'ssh-rsa '))
end
it 'ignores errors and does not set the fingerprint' do
fingerprint_migrator.perform(1, 10000)
key_1 = Key.find(1017)
key_2 = Key.find(1027)
expect(key_1.fingerprint_sha256).to be_nil
expect(key_2.fingerprint_sha256).not_to be_nil
end
end
it 'migrates all keys' do
expect(Key.where(fingerprint_sha256: nil).count).to eq(Key.all.count)
fingerprint_migrator.perform(1, 10000)
expect(Key.where(fingerprint_sha256: nil).count).to eq(0)
end
def generate_fingerprints!
values = ""
(1000..2000).to_a.each do |record|
key = base_key_for(record)
fingerprint = fingerprint_for(key)
values += "(#{record}, #{record}, 'test-#{record}', '#{key}', '#{fingerprint}'),"
end
update_query = <<~SQL
INSERT INTO keys ( id, user_id, title, key, fingerprint )
VALUES
#{values.chomp(",")};
SQL
ActiveRecord::Base.connection.execute(update_query)
end
def base_key_for(record)
'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt0000k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0='
.gsub("0000", "%04d" % (record - 1)) # generate arbitrary keys with placeholder 0000 within the key above
end
def fingerprint_for(key)
Gitlab::SSHPublicKey.new(key).fingerprint("md5")
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 20181228175414 do
let(:projects) { table(:projects) }
subject(:migrate_pages_metadata) { described_class.new }
describe '#perform' do
let(:namespaces) { table(:namespaces) }
let(:builds) { table(:ci_builds) }
let(:pages_metadata) { table(:project_pages_metadata) }
it 'marks specified projects with successful pages deployment' do
namespace = namespaces.create!(name: 'gitlab', path: 'gitlab-org')
not_migrated_with_pages = projects.create!(namespace_id: namespace.id, name: 'Not Migrated With Pages')
builds.create!(project_id: not_migrated_with_pages.id, type: 'GenericCommitStatus', status: 'success', stage: 'deploy', name: 'pages:deploy')
migrated = projects.create!(namespace_id: namespace.id, name: 'Migrated')
pages_metadata.create!(project_id: migrated.id, deployed: true)
not_migrated_no_pages = projects.create!(namespace_id: namespace.id, name: 'Not Migrated No Pages')
project_not_in_relation_scope = projects.create!(namespace_id: namespace.id, name: 'Other')
ids = [not_migrated_no_pages.id, not_migrated_with_pages.id, migrated.id]
migrate_pages_metadata.perform(ids.min, ids.max)
expect(pages_metadata.find_by_project_id(not_migrated_with_pages.id).deployed).to eq(true)
expect(pages_metadata.find_by_project_id(not_migrated_no_pages.id).deployed).to eq(false)
expect(pages_metadata.find_by_project_id(migrated.id).deployed).to eq(true)
expect(pages_metadata.find_by_project_id(project_not_in_relation_scope.id)).to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs
RSpec.describe Gitlab::BackgroundMigration::MigrateToHashedStorage, :sidekiq, :redis do
let(:migrator) { Gitlab::HashedStorage::Migrator.new }
subject(:background_migration) { described_class.new }
describe '#perform' do
let!(:project) { create(:project, :empty_repo, :legacy_storage) }
context 'with pending rollback' do
it 'aborts rollback operation' do
Sidekiq::Testing.disable! do
Sidekiq::Client.push(
'queue' => ::HashedStorage::ProjectRollbackWorker.queue,
'class' => ::HashedStorage::ProjectRollbackWorker,
'args' => [project.id]
)
expect { background_migration.perform }.to change { migrator.rollback_pending? }.from(true).to(false)
end
end
end
it 'enqueues legacy projects to be migrated' do
Sidekiq::Testing.fake! do
expect { background_migration.perform }.to change { Sidekiq::Queues[::HashedStorage::MigratorWorker.queue].size }.by(1)
end
end
context 'when executing all jobs' do
it 'migrates legacy projects' do
Sidekiq::Testing.inline! do
expect { background_migration.perform }.to change { project.reload.legacy_storage? }.from(true).to(false)
end
end
end
end
end
# rubocop:enable RSpec/FactoriesInMigrationSpecs
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateCanonicalEmails, :migration, schema: 20181228175414 do
let(:migration) { described_class.new }
let_it_be(:users_table) { table(:users) }
let_it_be(:user_canonical_emails_table) { table(:user_canonical_emails) }
let_it_be(:users) { users_table.all }
let_it_be(:user_canonical_emails) { user_canonical_emails_table.all }
subject { migration.perform(1, 1) }
describe 'gmail users' do
using RSpec::Parameterized::TableSyntax
where(:original_email, :expected_result) do
'legitimateuser@gmail.com' | 'legitimateuser@gmail.com'
'userwithplus+somestuff@gmail.com' | 'userwithplus@gmail.com'
'user.with.periods@gmail.com' | 'userwithperiods@gmail.com'
'user.with.periods.and.plus+someotherstuff@gmail.com' | 'userwithperiodsandplus@gmail.com'
end
with_them do
it 'generates the correct canonical email' do
create_user(email: original_email, id: 1)
subject
result = canonical_emails
expect(result.count).to eq 1
expect(result.first).to match({
'user_id' => 1,
'canonical_email' => expected_result
})
end
end
end
describe 'non gmail.com domain users' do
%w[
legitimateuser@somedomain.com
userwithplus+somestuff@other.com
user.with.periods@gmail.org
user.with.periods.and.plus+someotherstuff@orangmail.com
].each do |non_gmail_address|
it 'does not generate a canonical email' do
create_user(email: non_gmail_address, id: 1)
subject
expect(canonical_emails(user_id: 1).count).to eq 0
end
end
end
describe 'gracefully handles missing records' do
specify { expect { subject }.not_to raise_error }
end
describe 'gracefully handles existing records, some of which may have an already-existing identical canonical_email field' do
let_it_be(:user_one) { create_user(email: "example.user@gmail.com", id: 1) }
let_it_be(:user_two) { create_user(email: "exampleuser@gmail.com", id: 2) }
let_it_be(:user_email_one) { user_canonical_emails.create!(canonical_email: "exampleuser@gmail.com", user_id: user_one.id) }
subject { migration.perform(1, 2) }
it 'only creates one record' do
subject
expect(canonical_emails.count).not_to be_nil
end
end
def create_user(attributes)
default_attributes = {
projects_limit: 0
}
users.create!(default_attributes.merge!(attributes))
end
def canonical_emails(user_id: nil)
filter_by_id = user_id ? "WHERE user_id = #{user_id}" : ""
ApplicationRecord.connection.execute <<~SQL
SELECT canonical_email, user_id
FROM user_canonical_emails
#{filter_by_id};
SQL
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Gitlab::BackgroundMigration::PopulateDismissedStateForVulnerabilities, schema: 20181228175414 do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:vulnerabilities) { table(:vulnerabilities) }
let!(:namespace) { namespaces.create!(name: "foo", path: "bar") }
let!(:user) { users.create!(name: 'John Doe', email: 'test@example.com', projects_limit: 5) }
let!(:project) { projects.create!(namespace_id: namespace.id) }
let!(:vulnerability_params) do
{
project_id: project.id,
author_id: user.id,
title: 'Vulnerability',
severity: 5,
confidence: 5,
report_type: 5
}
end
let!(:vulnerability_1) { vulnerabilities.create!(vulnerability_params.merge(state: 1)) }
let!(:vulnerability_2) { vulnerabilities.create!(vulnerability_params.merge(state: 3)) }
describe '#perform' do
it 'changes state of vulnerability to dismissed' do
subject.perform(vulnerability_1.id, vulnerability_2.id)
expect(vulnerability_1.reload.state).to eq(2)
expect(vulnerability_2.reload.state).to eq(2)
end
it 'populates missing dismissal information' do
expect_next_instance_of(::Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation) do |migration|
expect(migration).to receive(:perform).with(vulnerability_1.id, vulnerability_2.id)
end
subject.perform(vulnerability_1.id, vulnerability_2.id)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateHasVulnerabilities, schema: 20181228175414 do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:project_settings) { table(:project_settings) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:vulnerability_base_params) { { title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, author_id: user.id } }
let!(:project_1) { projects.create!(namespace_id: namespace.id, name: 'foo_1') }
let!(:project_2) { projects.create!(namespace_id: namespace.id, name: 'foo_2') }
let!(:project_3) { projects.create!(namespace_id: namespace.id, name: 'foo_3') }
before do
project_settings.create!(project_id: project_1.id)
vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_1.id))
vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_3.id))
allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, error: true)
end
describe '#perform' do
it 'sets `has_vulnerabilities` attribute of project_settings' do
expect { subject.perform(project_1.id, project_3.id) }.to change { project_settings.count }.from(1).to(2)
.and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
end
it 'writes info log message' do
subject.perform(project_1.id, project_3.id)
expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
message: 'Projects has been processed to populate `has_vulnerabilities` information',
count: 2)
end
context 'when non-existing project_id is given' do
it 'populates only for the existing projects' do
expect { subject.perform(project_1.id, 0, project_3.id) }.to change { project_settings.count }.from(1).to(2)
.and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
end
end
context 'when an error happens' do
before do
allow(described_class::ProjectSetting).to receive(:upsert_for).and_raise('foo')
end
it 'writes error log message' do
subject.perform(project_1.id, project_3.id)
expect(::Gitlab::BackgroundMigration::Logger).to have_received(:error).with(migrator: described_class.name,
message: 'foo',
project_ids: [project_1.id, project_3.id])
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
let(:user_2) { users.create!(email: 'test2@example.com', projects_limit: 100, username: 'test') }
let(:user_3) { users.create!(email: 'test3@example.com', projects_limit: 100, username: 'test') }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:merge_requests) { table(:merge_requests) }
let(:merge_request_assignees) { table(:merge_request_assignees) }
def create_merge_request(id, params = {})
params.merge!(id: id,
target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: 'mr name',
title: "mr name#{id}")
merge_requests.create!(params)
end
before do
create_merge_request(2, assignee_id: user.id)
create_merge_request(3, assignee_id: user_2.id)
create_merge_request(4, assignee_id: user_3.id)
# Test filtering MRs without assignees
create_merge_request(5, assignee_id: nil)
# Test filtering already migrated row
merge_request_assignees.create!(merge_request_id: 2, user_id: user_3.id)
end
describe '#perform' do
it 'creates merge_request_assignees rows according to merge_requests' do
subject.perform(1, 4)
rows = merge_request_assignees.order(:id).map { |row| row.attributes.slice('merge_request_id', 'user_id') }
existing_rows = [
{ 'merge_request_id' => 2, 'user_id' => user_3.id }
]
created_rows = [
{ 'merge_request_id' => 3, 'user_id' => user_2.id },
{ 'merge_request_id' => 4, 'user_id' => user_3.id }
]
expected_rows = existing_rows + created_rows
expect(rows.size).to eq(expected_rows.size)
expected_rows.each do |expected_row|
expect(rows).to include(expected_row)
end
end
end
describe '#perform_all_sync' do
it 'executes peform for all merge requests in batches' do
expect(subject).to receive(:perform).with(2, 4).ordered
expect(subject).to receive(:perform).with(5, 5).ordered
subject.perform_all_sync(batch_size: 3)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20181228175414 do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:findings) { table(:vulnerability_occurrences) }
let(:scanners) { table(:vulnerability_scanners) }
let(:identifiers) { table(:vulnerability_identifiers) }
let(:feedback) { table(:vulnerability_feedback) }
let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:vulnerability_1) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
let(:vulnerability_2) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'identifier') }
before do
feedback.create!(feedback_type: 0,
category: 'sast',
project_fingerprint: '418291a26024a1445b23fe64de9380cdcdfd1fa8',
project_id: project.id,
author_id: user.id,
created_at: Time.current)
findings.create!(name: 'Finding',
report_type: 'sast',
project_fingerprint: Gitlab::Database::ShaAttribute.new.serialize('418291a26024a1445b23fe64de9380cdcdfd1fa8'),
location_fingerprint: 'bar',
severity: 1,
confidence: 1,
metadata_version: 1,
raw_metadata: '',
uuid: SecureRandom.uuid,
project_id: project.id,
vulnerability_id: vulnerability_1.id,
scanner_id: scanner.id,
primary_identifier_id: identifier.id)
allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, warn: true, error: true)
end
describe '#perform' do
it 'updates the missing dismissal information of the vulnerability' do
expect { subject.perform(vulnerability_1.id, vulnerability_2.id) }.to change { vulnerability_1.reload.dismissed_at }.from(nil)
.and change { vulnerability_1.reload.dismissed_by_id }.from(nil).to(user.id)
end
it 'writes log messages' do
subject.perform(vulnerability_1.id, vulnerability_2.id)
expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
message: 'Dismissal information has been copied',
count: 2)
expect(::Gitlab::BackgroundMigration::Logger).to have_received(:warn).with(migrator: described_class.name,
message: 'Could not update vulnerability!',
vulnerability_id: vulnerability_2.id)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulatePersonalSnippetStatistics do
let(:file_name) { 'file_name.rb' }
let(:content) { 'content' }
let(:snippets) { table(:snippets) }
let(:snippet_repositories) { table(:snippet_repositories) }
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:snippet_statistics) { table(:snippet_statistics) }
let(:namespace_statistics) { table(:namespace_root_storage_statistics) }
let(:routes) { table(:routes) }
let(:repo_size) { 123456 }
let(:expected_repo_size) { repo_size.megabytes }
let(:user1) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test1') }
let(:user2) { users.create!(id: 2, email: 'test2@example.com', projects_limit: 100, username: 'test2') }
let!(:user1_namespace) { namespaces.create!(id: 1, name: 'user1', path: 'user1', owner_id: user1.id) }
let!(:user2_namespace) { namespaces.create!(id: 2, name: 'user2', path: 'user2', owner_id: user2.id) }
let(:user1_namespace_statistics) { namespace_statistics.find_by(namespace_id: user1_namespace.id) }
let(:user2_namespace_statistics) { namespace_statistics.find_by(namespace_id: user2_namespace.id) }
let(:ids) { snippets.pluck(:id) }
let(:migration) { described_class.new }
subject do
migration.perform(ids)
end
before do
allow_any_instance_of(Repository).to receive(:size).and_return(repo_size)
end
after do
snippets.all.each { |s| raw_repository(s).remove }
end
context 'with existing personal snippets' do
let!(:snippet1) { create_snippet(1, user1) }
let!(:snippet2) { create_snippet(2, user1) }
let!(:snippet3) { create_snippet(3, user2) }
let!(:snippet4) { create_snippet(4, user2) }
before do
create_snippet_statistics(2, 0)
create_snippet_statistics(4, 123)
end
it 'creates/updates all snippet_statistics' do
expect { subject }.to change { snippet_statistics.count }.from(2).to(4)
expect(snippet_statistics.pluck(:repository_size)).to be_all(expected_repo_size)
end
it 'creates/updates the associated namespace statistics' do
expect(migration).to receive(:update_namespace_statistics).twice.and_call_original
subject
stats = snippet_statistics.where(snippet_id: [snippet1, snippet2]).sum(:repository_size)
expect(user1_namespace_statistics.snippets_size).to eq stats
stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
expect(user2_namespace_statistics.snippets_size).to eq stats
end
context 'when an error is raised when updating a namespace statistics' do
it 'logs the error and continue execution' do
expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
expect(instance).to receive(:execute).with(Namespace.find(user1_namespace.id)).and_raise('Error')
end
expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once
end
subject
expect(user1_namespace_statistics).to be_nil
stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
expect(user2_namespace_statistics.snippets_size).to eq stats
end
end
end
context 'when a snippet repository is empty' do
let!(:snippet1) { create_snippet(1, user1, with_repo: false) }
let!(:snippet2) { create_snippet(2, user1) }
it 'logs error and continues execution' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once
end
subject
expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil
expect(user1_namespace_statistics.snippets_size).to eq expected_repo_size
end
end
def create_snippet(id, author, with_repo: true)
snippets.create!(id: id, type: 'PersonalSnippet', author_id: author.id, file_name: file_name, content: content).tap do |snippet|
if with_repo
allow(snippet).to receive(:disk_path).and_return(disk_path(snippet))
raw_repository(snippet).create_repository
TestEnv.copy_repo(snippet,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
end
end
end
def create_snippet_statistics(snippet_id, repository_size = 0)
snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size)
end
def raw_repository(snippet)
Gitlab::Git::Repository.new('default',
"#{disk_path(snippet)}.git",
Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
"@snippets/#{snippet.id}")
end
def hashed_repository(snippet)
Storage::Hashed.new(snippet, prefix: '@snippets')
end
def disk_path(snippet)
hashed_repository(snippet).disk_path
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateProjectSnippetStatistics do
let(:file_name) { 'file_name.rb' }
let(:content) { 'content' }
let(:snippets) { table(:snippets) }
let(:snippet_repositories) { table(:snippet_repositories) }
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:snippet_statistics) { table(:snippet_statistics) }
let(:project_statistics) { table(:project_statistics) }
let(:projects) { table(:projects) }
let(:namespace_statistics) { table(:namespace_root_storage_statistics) }
let(:routes) { table(:routes) }
let(:repo_size) { 123456 }
let(:expected_repo_size) { repo_size.megabytes }
let(:user) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test') }
let(:group) { namespaces.create!(id: 10, type: 'Group', name: 'group1', path: 'group1') }
let(:user_namespace) { namespaces.create!(id: 20, name: 'user', path: 'user', owner_id: user.id) }
let(:project1) { create_project(1, 'test', group) }
let(:project2) { create_project(2, 'test1', user_namespace) }
let(:project3) { create_project(3, 'test2', group) }
let!(:project_stats1) { create_project_statistics(project1) }
let!(:project_stats2) { create_project_statistics(project2) }
let!(:project_stats3) { create_project_statistics(project3) }
let(:ids) { snippets.pluck(:id) }
let(:migration) { described_class.new }
subject do
migration.perform(ids)
project_stats1.reload if project_stats1.persisted?
project_stats2.reload if project_stats2.persisted?
project_stats3.reload if project_stats3.persisted?
end
before do
allow_any_instance_of(Repository).to receive(:size).and_return(repo_size)
end
after do
snippets.all.each { |s| raw_repository(s).remove }
end
context 'with existing user and group snippets' do
let!(:snippet1) { create_snippet(1, project1) }
let!(:snippet2) { create_snippet(2, project1) }
let!(:snippet3) { create_snippet(3, project2) }
let!(:snippet4) { create_snippet(4, project2) }
let!(:snippet5) { create_snippet(5, project3) }
before do
create_snippet_statistics(2, 0)
create_snippet_statistics(4, 123)
end
it 'creates/updates all snippet_statistics' do
expect(snippet_statistics.count).to eq 2
subject
expect(snippet_statistics.count).to eq 5
snippet_statistics.all.each do |stat|
expect(stat.repository_size).to eq expected_repo_size
end
end
it 'updates associated snippet project statistics' do
expect(project_stats1.snippets_size).to be_nil
expect(project_stats2.snippets_size).to be_nil
subject
snippets_size = snippet_statistics.where(snippet_id: [snippet1.id, snippet2.id]).sum(:repository_size)
expect(project_stats1.snippets_size).to eq snippets_size
snippets_size = snippet_statistics.where(snippet_id: [snippet3.id, snippet4.id]).sum(:repository_size)
expect(project_stats2.snippets_size).to eq snippets_size
snippets_size = snippet_statistics.where(snippet_id: snippet5.id).sum(:repository_size)
expect(project_stats3.snippets_size).to eq snippets_size
end
it 'forces the project statistics refresh' do
expect(migration).to receive(:update_project_statistics).exactly(3).times
subject
end
it 'creates/updates the associated namespace statistics' do
expect(migration).to receive(:update_namespace_statistics).twice.and_call_original
subject
expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size + project_stats3.snippets_size
expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size
end
context 'when the project statistics does not exists' do
it 'does not raise any error' do
project_stats3.delete
subject
expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size
expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size
end
end
context 'when an error is raised when updating a project statistics' do
it 'logs the error and continue execution' do
expect(migration).to receive(:update_project_statistics).with(Project.find(project1.id)).and_raise('Error')
expect(migration).to receive(:update_project_statistics).with(Project.find(project2.id)).and_call_original
expect(migration).to receive(:update_project_statistics).with(Project.find(project3.id)).and_call_original
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).with(message: /Error updating statistics for project #{project1.id}/).once
end
subject
expect(project_stats2.snippets_size).not_to be_nil
expect(project_stats3.snippets_size).not_to be_nil
end
end
context 'when an error is raised when updating a namespace statistics' do
it 'logs the error and continue execution' do
expect(migration).to receive(:update_namespace_statistics).with(Group.find(group.id)).and_raise('Error')
expect(migration).to receive(:update_namespace_statistics).with(Namespace.find(user_namespace.id)).and_call_original
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once
end
subject
expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size
end
end
end
context 'when project snippet is in a subgroup' do
let(:subgroup) { namespaces.create!(id: 30, type: 'Group', name: 'subgroup', path: 'subgroup', parent_id: group.id) }
let(:project1) { create_project(1, 'test', subgroup, "#{group.path}/#{subgroup.path}/test") }
let!(:snippet1) { create_snippet(1, project1) }
it 'updates the root namespace statistics' do
subject
expect(snippet_statistics.count).to eq 1
expect(project_stats1.snippets_size).to eq snippet_statistics.first.repository_size
expect(namespace_statistics.find_by(namespace_id: subgroup.id)).to be_nil
expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size
end
end
context 'when a snippet repository is empty' do
let!(:snippet1) { create_snippet(1, project1, with_repo: false) }
let!(:snippet2) { create_snippet(2, project1) }
it 'logs error and continues execution' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once
end
subject
expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil
expect(project_stats1.snippets_size).to eq snippet_statistics.find(snippet2.id).repository_size
end
end
def create_snippet(id, project, with_repo: true)
snippets.create!(id: id, type: 'ProjectSnippet', project_id: project.id, author_id: user.id, file_name: file_name, content: content).tap do |snippet|
if with_repo
allow(snippet).to receive(:disk_path).and_return(disk_path(snippet))
raw_repository(snippet).create_repository
TestEnv.copy_repo(snippet,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
end
end
end
def create_project(id, name, namespace, path = nil)
projects.create!(id: id, name: name, path: name.downcase.gsub(/\s/, '_'), namespace_id: namespace.id).tap do |project|
path ||= "#{namespace.path}/#{project.path}"
routes.create!(id: id, source_type: 'Project', source_id: project.id, path: path)
end
end
def create_snippet_statistics(snippet_id, repository_size = 0)
snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size)
end
def create_project_statistics(project, snippets_size = nil)
project_statistics.create!(id: project.id, project_id: project.id, namespace_id: project.namespace_id, snippets_size: snippets_size)
end
def raw_repository(snippet)
Gitlab::Git::Repository.new('default',
"#{disk_path(snippet)}.git",
Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
"@snippets/#{snippet.id}")
end
def hashed_repository(snippet)
Storage::Hashed.new(snippet, prefix: '@snippets')
end
def disk_path(snippet)
hashed_repository(snippet).disk_path
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizationsWithMinMaxUserId, schema: 20181228175414 do
let(:users_table) { table(:users) }
let(:min) { 1 }
let(:max) { 5 }
before do
min.upto(max) do |i|
users_table.create!(id: i, email: "user#{i}@example.com", projects_limit: 10)
end
end
describe '#perform' do
it 'initializes Users::RefreshAuthorizedProjectsService with correct users' do
min.upto(max) do |i|
user = User.find(i)
expect(Users::RefreshAuthorizedProjectsService).to(
receive(:new).with(user, any_args).and_call_original)
end
described_class.new.perform(min, max)
end
it 'executes Users::RefreshAuthorizedProjectsService' do
expected_call_counts = max - min + 1
service = instance_double(Users::RefreshAuthorizedProjectsService)
expect(Users::RefreshAuthorizedProjectsService).to(
receive(:new).exactly(expected_call_counts).times.and_return(service))
expect(service).to receive(:execute).exactly(expected_call_counts).times
described_class.new.perform(min, max)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20181228175414 do
let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { table(:projects).create!(namespace_id: namespace.id, name: 'gitlab') }
let(:issue1) { table(:issues).create!(project_id: project.id, title: 'a') }
let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') }
let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') }
let(:issue_links) { table(:issue_links) }
let!(:blocked_link1) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) }
let!(:opposite_link1) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) }
let!(:blocked_link2) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) }
let!(:opposite_link2) { issue_links.create!(source_id: issue3.id, target_id: issue1.id, link_type: 0) }
let!(:nochange_link) { issue_links.create!(source_id: issue2.id, target_id: issue3.id, link_type: 1) }
subject { described_class.new.perform(issue_links.minimum(:id), issue_links.maximum(:id)) }
it 'deletes any opposite relations' do
subject
expect(issue_links.ids).to match_array([nochange_link.id, blocked_link1.id, blocked_link2.id])
end
it 'ignores issue links other than blocked_by' do
subject
expect(nochange_link.reload.link_type).to eq(1)
end
it 'updates blocked_by issue links' do
subject
expect(blocked_link1.reload.link_type).to eq(1)
expect(blocked_link1.source_id).to eq(issue1.id)
expect(blocked_link1.target_id).to eq(issue2.id)
expect(blocked_link2.reload.link_type).to eq(1)
expect(blocked_link2.source_id).to eq(issue3.id)
expect(blocked_link2.target_id).to eq(issue1.id)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ResetMergeStatus do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:merge_requests) { table(:merge_requests) }
def create_merge_request(id, extra_params = {})
params = {
id: id,
target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: 'mr name',
title: "mr name#{id}"
}.merge(extra_params)
merge_requests.create!(params)
end
it 'correctly updates opened mergeable MRs to unchecked' do
create_merge_request(1, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
create_merge_request(2, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
create_merge_request(3, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
create_merge_request(4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged')
create_merge_request(5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged')
subject.perform(1, 5)
expected_rows = [
{ id: 1, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
{ id: 2, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
{ id: 3, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
{ id: 4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged' },
{ id: 5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged' }
]
rows = merge_requests.order(:id).map do |row|
row.attributes.slice('id', 'state_id', 'merge_status').symbolize_keys
end
expect(rows).to eq(expected_rows)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ResetSharedRunnersForTransferredProjects, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace_1) { namespaces.create!(name: 'foo', path: 'foo', shared_runners_enabled: true, allow_descendants_override_disabled_shared_runners: false ) }
let(:namespace_2) { namespaces.create!(name: 'foo', path: 'foo', shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false ) }
let(:namespace_3) { namespaces.create!(name: 'bar', path: 'bar', shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true ) }
let(:project_1_1) { projects.create!(namespace_id: namespace_1.id, shared_runners_enabled: true) }
let(:project_1_2) { projects.create!(namespace_id: namespace_1.id, shared_runners_enabled: false) }
let(:project_2_1) { projects.create!(namespace_id: namespace_2.id, shared_runners_enabled: true) }
let(:project_2_2) { projects.create!(namespace_id: namespace_2.id, shared_runners_enabled: false) }
let(:project_3_1) { projects.create!(namespace_id: namespace_3.id, shared_runners_enabled: true) }
let(:project_3_2) { projects.create!(namespace_id: namespace_3.id, shared_runners_enabled: false) }
it 'corrects each project shared_runners_enabled column' do
expect do
described_class.new.perform(namespace_1.id, namespace_3.id)
project_1_1.reload
project_1_2.reload
project_2_1.reload
project_2_2.reload
project_3_1.reload
project_3_2.reload
end.to not_change(project_1_1, :shared_runners_enabled).from(true)
.and not_change(project_1_2, :shared_runners_enabled).from(false)
.and change(project_2_1, :shared_runners_enabled).from(true).to(false)
.and not_change(project_2_2, :shared_runners_enabled).from(false)
.and not_change(project_3_1, :shared_runners_enabled).from(true)
.and not_change(project_3_2, :shared_runners_enabled).from(false)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schema: 20181228175414 do
let(:merge_request_diff_files) { table(:merge_request_diff_files) }
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
let(:project) { projects.create!(namespace_id: namespace.id) }
let(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) }
let!(:empty_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) }
let!(:filled_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) }
let!(:filled_diff_files) do
1.upto(3).map do |n|
merge_request_diff_files.create!(
merge_request_diff_id: filled_diff.id,
relative_order: n,
new_file: false,
renamed_file: false,
deleted_file: false,
too_large: false,
a_mode: '',
b_mode: '',
old_path: '',
new_path: ''
)
end
end
it 'fills the files_count column' do
described_class.new.perform(empty_diff.id, filled_diff.id)
expect(empty_diff.reload.files_count).to eq(0)
expect(filled_diff.reload.files_count).to eq(3)
end
it 'uses the sentinel value if the actual count is too high' do
stub_const("#{described_class}::FILES_COUNT_SENTINEL", filled_diff_files.size - 1)
described_class.new.perform(empty_diff.id, filled_diff.id)
expect(empty_diff.reload.files_count).to eq(0)
expect(filled_diff.reload.files_count).to eq(described_class::FILES_COUNT_SENTINEL)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, schema: 20181228175414 do
include MigrationHelpers::NamespacesHelpers
context 'private visibility level' do
it 'updates the project visibility' do
parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
expect { subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::PRIVATE)
end
it 'updates sub-sub groups' do
parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PRIVATE, parent_id: parent.id)
child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
subject.perform([parent.id, middle_group.id], Gitlab::VisibilityLevel::PRIVATE)
expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
it 'updates all sub groups' do
parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE)
expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
expect(middle_group.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
end
context 'internal visibility level' do
it 'updates the project visibility' do
parent = create_namespace('parent', Gitlab::VisibilityLevel::INTERNAL)
child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
expect { subject.perform([parent.id], Gitlab::VisibilityLevel::INTERNAL) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::INTERNAL)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::UpdateExistingUsersThatRequireTwoFactorAuth, schema: 20181228175414 do
include MigrationHelpers::NamespacesHelpers
let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE) }
let(:group_with_2fa_child) { create_namespace('child', Gitlab::VisibilityLevel::PRIVATE, parent_id: group_with_2fa_parent.id) }
let(:members_table) { table(:members) }
let(:users_table) { table(:users) }
subject { described_class.new }
describe '#perform' do
context 'with group members' do
let(:user_1) { create_user('user@example.com') }
let!(:member) { create_group_member(user_1, group_with_2fa_parent) }
let!(:user_without_group) { create_user('user_without@example.com') }
let(:user_other) { create_user('user_other@example.com') }
let!(:member_other) { create_group_member(user_other, group_with_2fa_parent) }
it 'updates user when user should not be required to establish two factor authentication' do
subject.perform(user_1.id, user_without_group.id)
expect(user_1.reload.require_two_factor_authentication_from_group).to eq(false)
end
it 'does not update user when user is member of group that requires two factor authentication' do
group = create_namespace('other', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true)
create_group_member(user_1, group)
subject.perform(user_1.id, user_without_group.id)
expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true)
end
it 'does not update user who is not in current batch' do
subject.perform(user_1.id, user_without_group.id)
expect(user_other.reload.require_two_factor_authentication_from_group).to eq(true)
end
it 'updates all users in current batch' do
subject.perform(user_1.id, user_other.id)
expect(user_other.reload.require_two_factor_authentication_from_group).to eq(false)
end
it 'does not update user when user is member of group which parent group requires two factor authentication' do
group_with_2fa_parent.update!(require_two_factor_authentication: true)
subject.perform(user_1.id, user_other.id)
expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true)
end
it 'does not update user when user is member of group which has subgroup that requires two factor authentication' do
create_namespace('subgroup', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true, parent_id: group_with_2fa_child.id)
subject.perform(user_1.id, user_other.id)
expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true)
end
end
end
def create_user(email, require_2fa: true)
users_table.create!(email: email, projects_limit: 10, require_two_factor_authentication_from_group: require_2fa)
end
def create_group_member(user, group)
members_table.create!(user_id: user.id, source_id: group.id, access_level: GroupMember::MAINTAINER, source_type: "Namespace", type: "GroupMember", notification_level: 3)
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment