Commit 6ffa7490 authored by James Lopez's avatar James Lopez

Merge branch 'georgekoltsov/relation_factory_refactor' into 'master'

Extract shared logic from RelationFactory & ObjectBuilder

See merge request gitlab-org/gitlab!21426
parents 79ba1861 650da489
......@@ -3,7 +3,7 @@
module EE
module Gitlab
module ImportExport
module RelationFactory
module ProjectRelationFactory
extend ActiveSupport::Concern
EE_OVERRIDES = {
......
......@@ -3,7 +3,14 @@
module Gitlab
module ImportExport
class AttributeCleaner
ALLOWED_REFERENCES = RelationFactory::PROJECT_REFERENCES + RelationFactory::USER_REFERENCES + %w[group_id commit_id discussion_id custom_attributes]
ALLOWED_REFERENCES = [
*ProjectRelationFactory::PROJECT_REFERENCES,
*ProjectRelationFactory::USER_REFERENCES,
'group_id',
'commit_id',
'discussion_id',
'custom_attributes'
].freeze
PROHIBITED_REFERENCES = Regexp.union(/\Acached_markdown_version\Z/, /_id\Z/, /_ids\Z/, /_html\Z/, /attributes/).freeze
def self.clean(*args)
......
# frozen_string_literal: true
module Gitlab
module ImportExport
# Base class for Group & Project Object Builders.
# This class is not intended to be used on its own but
# rather inherited from.
#
# Cache keeps 1000 entries at most, 1000 is chosen based on:
# - one cache entry uses around 0.5K memory, 1000 items uses around 500K.
# (leave some buffer it should be less than 1M). It is afforable cost for project import.
# - for projects in Gitlab.com, it seems 1000 entries for labels/milestones is enough.
# For example, gitlab has ~970 labels and 26 milestones.
LRU_CACHE_SIZE = 1000
class BaseObjectBuilder
def self.build(*args)
new(*args).find
end
def initialize(klass, attributes)
@klass = klass.ancestors.include?(Label) ? Label : klass
@attributes = attributes
if Gitlab::SafeRequestStore.active?
@lru_cache = cache_from_request_store
@cache_key = [klass, attributes]
end
end
def find
find_with_cache do
find_object || klass.create(prepare_attributes)
end
end
protected
def where_clauses
raise NotImplementedError
end
# attributes wrapped in a method to be
# adjusted in sub-class if needed
def prepare_attributes
attributes
end
private
attr_reader :klass, :attributes, :lru_cache, :cache_key
def find_with_cache
return yield unless lru_cache && cache_key
lru_cache[cache_key] ||= yield
end
def cache_from_request_store
Gitlab::SafeRequestStore[:lru_cache] ||= LruRedux::Cache.new(LRU_CACHE_SIZE)
end
def find_object
klass.where(where_clause).first
end
def where_clause
where_clauses.reduce(:and)
end
def table
@table ||= klass.arel_table
end
# Returns Arel clause:
# `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
# from the given Hash of attributes.
def attrs_to_arel(attrs)
attrs.map do |key, value|
table[key].eq(value)
end.reduce(:and)
end
# Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
# if attributes has 'title key, otherwise `nil`.
def where_clause_for_title
attrs_to_arel(attributes.slice('title'))
end
# Returns Arel clause `"{table_name}"."description" = '{attributes['description']}'`
# if attributes has 'description key, otherwise `nil`.
def where_clause_for_description
attrs_to_arel(attributes.slice('description'))
end
# Returns Arel clause `"{table_name}"."created_at" = '{attributes['created_at']}'`
# if attributes has 'created_at key, otherwise `nil`.
def where_clause_for_created_at
attrs_to_arel(attributes.slice('created_at'))
end
end
end
end
......@@ -2,63 +2,32 @@
module Gitlab
module ImportExport
class RelationFactory
class BaseRelationFactory
include Gitlab::Utils::StrongMemoize
prepend_if_ee('::EE::Gitlab::ImportExport::RelationFactory') # rubocop: disable Cop/InjectEnterpriseEditionModule
OVERRIDES = { snippets: :project_snippets,
ci_pipelines: 'Ci::Pipeline',
pipelines: 'Ci::Pipeline',
stages: 'Ci::Stage',
statuses: 'commit_status',
triggers: 'Ci::Trigger',
pipeline_schedules: 'Ci::PipelineSchedule',
builds: 'Ci::Build',
runners: 'Ci::Runner',
hooks: 'ProjectHook',
merge_access_levels: 'ProtectedBranch::MergeAccessLevel',
push_access_levels: 'ProtectedBranch::PushAccessLevel',
create_access_levels: 'ProtectedTag::CreateAccessLevel',
labels: :project_labels,
priorities: :label_priorities,
auto_devops: :project_auto_devops,
label: :project_label,
custom_attributes: 'ProjectCustomAttribute',
project_badges: 'Badge',
metrics: 'MergeRequest::Metrics',
ci_cd_settings: 'ProjectCiCdSetting',
error_tracking_setting: 'ErrorTracking::ProjectErrorTrackingSetting',
links: 'Releases::Link',
metrics_setting: 'ProjectMetricsSetting' }.freeze
USER_REFERENCES = %w[author_id assignee_id updated_by_id merged_by_id latest_closed_by_id user_id created_by_id last_edited_by_id merge_user_id resolved_by_id closed_by_id owner_id].freeze
PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
GROUP_REFERENCES = %w[group_id].freeze
BUILD_MODELS = %i[Ci::Build commit_status].freeze
IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
EXISTING_OBJECT_RELATIONS = %i[
milestone
milestones
label
labels
project_label
project_labels
group_label
group_labels
project_feature
merge_request
epic
ProjectCiCdSetting
container_expiration_policy
].freeze
TOKEN_RESET_MODELS = %i[Project Namespace Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
OVERRIDES = {}.freeze
EXISTING_OBJECT_RELATIONS = %i[].freeze
# This represents all relations that have unique key on `project_id` or `group_id`
UNIQUE_RELATIONS = %i[].freeze
USER_REFERENCES = %w[
author_id
assignee_id
updated_by_id
merged_by_id
latest_closed_by_id
user_id
created_by_id
last_edited_by_id
merge_user_id
resolved_by_id
closed_by_id owner_id
].freeze
TOKEN_RESET_MODELS = %i[Project Namespace Group Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
def self.create(*args)
new(*args).create
......@@ -73,16 +42,16 @@ module Gitlab
relation_name.to_s.constantize
end
def initialize(relation_sym:, relation_hash:, members_mapper:, merge_requests_mapping:, user:, project:, excluded_keys: [])
def initialize(relation_sym:, relation_hash:, members_mapper:, object_builder:, merge_requests_mapping: nil, user:, importable:, excluded_keys: [])
@relation_name = self.class.overrides[relation_sym]&.to_sym || relation_sym
@relation_hash = relation_hash.except('noteable_id')
@members_mapper = members_mapper
@object_builder = object_builder
@merge_requests_mapping = merge_requests_mapping
@user = user
@project = project
@importable = importable
@imported_object_retries = 0
@relation_hash['project_id'] = @project.id
@relation_hash[importable_column_name] = @importable.id
# Remove excluded keys from relation_hash
# We don't do this in the parsed_relation_hash because of the 'transformed attributes'
......@@ -97,72 +66,46 @@ module Gitlab
# the relation_hash, updating references with new object IDs, mapping users using
# the "members_mapper" object, also updating notes if required.
def create
return if unknown_service?
return if invalid_relation?
setup_base_models
setup_models
object = generate_imported_object
# We preload the project, user, and group to re-use objects
object = preload_keys(object, PROJECT_REFERENCES, @project)
object = preload_keys(object, GROUP_REFERENCES, @project.group)
object = preload_keys(object, USER_REFERENCES, @user)
object
generate_imported_object
end
def self.overrides
OVERRIDES
self::OVERRIDES
end
def self.existing_object_relations
EXISTING_OBJECT_RELATIONS
self::EXISTING_OBJECT_RELATIONS
end
private
def existing_object?
strong_memoize(:_existing_object) do
self.class.existing_object_relations.include?(@relation_name) || unique_relation?
end
def invalid_relation?
false
end
def setup_models
case @relation_name
when :merge_request_diff_files then setup_diff
when :notes then setup_note
end
raise NotImplementedError
end
def unique_relations
# define in sub-class if any
self.class::UNIQUE_RELATIONS
end
def setup_base_models
update_user_references
update_project_references
update_group_references
remove_duplicate_assignees
if @relation_name == :'Ci::Pipeline'
update_merge_request_references
setup_pipeline
end
reset_tokens!
remove_encrypted_attributes!
end
def preload_keys(object, references, value)
return object unless value
references.each do |key|
attribute = "#{key.delete_suffix('_id')}=".to_sym
next unless object.respond_to?(key) && object.respond_to?(attribute)
if object.read_attribute(key) == value&.id
object.public_send(attribute, value) # rubocop:disable GitlabSecurity/PublicSend
end
end
object
end
def update_user_references
USER_REFERENCES.each do |reference|
self.class::USER_REFERENCES.each do |reference|
if @relation_hash[reference]
@relation_hash[reference] = @members_mapper.map[@relation_hash[reference]]
end
......@@ -177,95 +120,14 @@ module Gitlab
@relation_hash['issue_assignees'].uniq!(&:user_id)
end
def setup_note
set_note_author
# attachment is deprecated and note uploads are handled by Markdown uploader
@relation_hash['attachment'] = nil
end
# Sets the author for a note. If the user importing the project
# has admin access, an actual mapping with new project members
# will be used. Otherwise, a note stating the original author name
# is left.
def set_note_author
old_author_id = @relation_hash['author_id']
author = @relation_hash.delete('author')
update_note_for_missing_author(author['name']) unless has_author?(old_author_id)
end
def has_author?(old_author_id)
admin_user? && @members_mapper.include?(old_author_id)
end
def missing_author_note(updated_at, author_name)
timestamp = updated_at.split('.').first
"\n\n *By #{author_name} on #{timestamp} (imported from GitLab project)*"
end
def generate_imported_object
if BUILD_MODELS.include?(@relation_name)
@relation_hash.delete('trace') # old export files have trace
@relation_hash.delete('token')
@relation_hash.delete('commands')
@relation_hash.delete('artifacts_file_store')
@relation_hash.delete('artifacts_metadata_store')
@relation_hash.delete('artifacts_size')
imported_object
elsif @relation_name == :merge_requests
MergeRequestParser.new(@project, @relation_hash.delete('diff_head_sha'), imported_object, @relation_hash).parse!
else
imported_object
end
end
def update_project_references
# If source and target are the same, populate them with the new project ID.
if @relation_hash['source_project_id']
@relation_hash['source_project_id'] = same_source_and_target? ? @relation_hash['project_id'] : MergeRequestParser::FORKED_PROJECT_ID
end
@relation_hash['target_project_id'] = @relation_hash['project_id'] if @relation_hash['target_project_id']
end
def same_source_and_target?
@relation_hash['target_project_id'] && @relation_hash['target_project_id'] == @relation_hash['source_project_id']
end
def update_group_references
return unless existing_object?
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @project.namespace_id
end
# This code is a workaround for broken project exports that don't
# export merge requests with CI pipelines (i.e. exports that were
# generated from
# https://gitlab.com/gitlab-org/gitlab/merge_requests/17844).
# This method can be removed in GitLab 12.6.
def update_merge_request_references
# If a merge request was properly created, we don't need to fix
# up this export.
return if @relation_hash['merge_request']
merge_request_id = @relation_hash['merge_request_id']
return unless merge_request_id
new_merge_request_id = @merge_requests_mapping[merge_request_id]
return unless new_merge_request_id
@relation_hash['merge_request_id'] = new_merge_request_id
parsed_relation_hash['merge_request_id'] = new_merge_request_id
imported_object
end
def reset_tokens!
return unless Gitlab::ImportExport.reset_tokens? && TOKEN_RESET_MODELS.include?(@relation_name)
return unless Gitlab::ImportExport.reset_tokens? && self.class::TOKEN_RESET_MODELS.include?(@relation_name)
# If we import/export a project to the same instance, tokens will have to be reset.
# If we import/export to the same instance, tokens will have to be reset.
# We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across.
relation_class.attribute_names.select { |name| name.include?('token') }.each do |token|
@relation_hash[token] = nil
......@@ -284,6 +146,14 @@ module Gitlab
@relation_class ||= self.class.relation_class(@relation_name)
end
def importable_column_name
importable_class_name.concat('_id')
end
def importable_class_name
@importable.class.to_s.downcase
end
def imported_object
if existing_or_new_object.respond_to?(:importing)
existing_or_new_object.importing = true
......@@ -297,32 +167,11 @@ module Gitlab
retry if @imported_object_retries < IMPORTED_OBJECT_MAX_RETRIES
end
def update_note_for_missing_author(author_name)
@relation_hash['note'] = '*Blank note*' if @relation_hash['note'].blank?
@relation_hash['note'] = "#{@relation_hash['note']}#{missing_author_note(@relation_hash['updated_at'], author_name)}"
end
def admin_user?
@user.admin?
end
def parsed_relation_hash
@parsed_relation_hash ||= Gitlab::ImportExport::AttributeCleaner.clean(relation_hash: @relation_hash,
relation_class: relation_class)
end
def setup_diff
@relation_hash['diff'] = @relation_hash.delete('utf8_diff')
end
def setup_pipeline
@relation_hash.fetch('stages', []).each do |stage|
stage.statuses.each do |status|
status.pipeline = imported_object
end
end
end
def existing_or_new_object
# Only find existing records to avoid mapping tables such as milestones
# Otherwise always create the record, skipping the extra SELECT clause.
......@@ -346,7 +195,7 @@ module Gitlab
end
def attribute_hash_for(attributes)
attributes.inject({}) do |hash, value|
attributes.each_with_object({}) do |hash, value|
hash[value] = parsed_relation_hash.delete(value) if parsed_relation_hash[value]
hash
end
......@@ -356,62 +205,101 @@ module Gitlab
@existing_object ||= find_or_create_object!
end
def unknown_service?
@relation_name == :services && parsed_relation_hash['type'] &&
!Object.const_defined?(parsed_relation_hash['type'])
def unique_relation_object
unique_relation_object = relation_class.find_or_create_by(importable_column_name => @importable.id)
unique_relation_object.assign_attributes(parsed_relation_hash)
unique_relation_object
end
def find_or_create_object!
return unique_relation_object if unique_relation?
# Can't use IDs as validation exists calling `group` or `project` attributes
finder_hash = parsed_relation_hash.tap do |hash|
if relation_class.attribute_method?('group_id') && @importable.is_a?(Project)
hash['group'] = @importable.group
end
hash[importable_class_name] = @importable if relation_class.reflect_on_association(importable_class_name.to_sym)
hash.delete(importable_column_name)
end
@object_builder.build(relation_class, finder_hash)
end
def setup_note
set_note_author
# attachment is deprecated and note uploads are handled by Markdown uploader
@relation_hash['attachment'] = nil
end
# Sets the author for a note. If the user importing the project
# has admin access, an actual mapping with new project members
# will be used. Otherwise, a note stating the original author name
# is left.
def set_note_author
old_author_id = @relation_hash['author_id']
author = @relation_hash.delete('author')
update_note_for_missing_author(author['name']) unless has_author?(old_author_id)
end
def has_author?(old_author_id)
admin_user? && @members_mapper.include?(old_author_id)
end
def missing_author_note(updated_at, author_name)
timestamp = updated_at.split('.').first
"\n\n *By #{author_name} on #{timestamp} (imported from GitLab project)*"
end
def update_note_for_missing_author(author_name)
@relation_hash['note'] = '*Blank note*' if @relation_hash['note'].blank?
@relation_hash['note'] = "#{@relation_hash['note']}#{missing_author_note(@relation_hash['updated_at'], author_name)}"
end
def admin_user?
@user.admin?
end
def existing_object?
strong_memoize(:_existing_object) do
self.class.existing_object_relations.include?(@relation_name) || unique_relation?
end
end
def unique_relation?
strong_memoize(:unique_relation) do
project_foreign_key.present? &&
(has_unique_index_on_project_fk? || uses_project_fk_as_primary_key?)
importable_foreign_key.present? &&
(has_unique_index_on_importable_fk? || uses_importable_fk_as_primary_key?)
end
end
def has_unique_index_on_project_fk?
cache = cached_has_unique_index_on_project_fk
def has_unique_index_on_importable_fk?
cache = cached_has_unique_index_on_importable_fk
table_name = relation_class.table_name
return cache[table_name] if cache.has_key?(table_name)
index_exists =
ActiveRecord::Base.connection.index_exists?(
relation_class.table_name,
project_foreign_key,
importable_foreign_key,
unique: true)
cache[table_name] = index_exists
end
# Avoid unnecessary DB requests
def cached_has_unique_index_on_project_fk
Thread.current[:cached_has_unique_index_on_project_fk] ||= {}
end
def uses_project_fk_as_primary_key?
relation_class.primary_key == project_foreign_key
def cached_has_unique_index_on_importable_fk
Thread.current[:cached_has_unique_index_on_importable_fk] ||= {}
end
# Should be `:project_id` for most of the cases, but this is more general
def project_foreign_key
relation_class.reflect_on_association(:project)&.foreign_key
def uses_importable_fk_as_primary_key?
relation_class.primary_key == importable_foreign_key
end
def find_or_create_object!
if unique_relation?
unique_relation_object = relation_class.find_or_create_by(project_id: @project.id)
unique_relation_object.assign_attributes(parsed_relation_hash)
return unique_relation_object
end
# Can't use IDs as validation exists calling `group` or `project` attributes
finder_hash = parsed_relation_hash.tap do |hash|
hash['group'] = @project.group if relation_class.attribute_method?('group_id')
hash['project'] = @project if relation_class.reflect_on_association(:project)
hash.delete('project_id')
end
GroupProjectObjectBuilder.build(relation_class, finder_hash)
def importable_foreign_key
relation_class.reflect_on_association(importable_class_name.to_sym)&.foreign_key
end
end
end
......
......@@ -11,61 +11,29 @@ module Gitlab
# finds or initializes a label with the given attributes.
#
# It also adds some logic around Group Labels/Milestones for edge cases.
class GroupProjectObjectBuilder
# Cache keeps 1000 entries at most, 1000 is chosen based on:
# - one cache entry uses around 0.5K memory, 1000 items uses around 500K.
# (leave some buffer it should be less than 1M). It is afforable cost for project import.
# - for projects in Gitlab.com, it seems 1000 entries for labels/milestones is enough.
# For example, gitlab has ~970 labels and 26 milestones.
LRU_CACHE_SIZE = 1000
class GroupProjectObjectBuilder < BaseObjectBuilder
def self.build(*args)
Project.transaction do
new(*args).find
super
end
end
def initialize(klass, attributes)
@klass = klass < Label ? Label : klass
@attributes = attributes
super
@group = @attributes['group']
@project = @attributes['project']
if Gitlab::SafeRequestStore.active?
@lru_cache = cache_from_request_store
@cache_key = [klass, attributes]
end
end
def find
return if epic? && group.nil?
find_with_cache do
find_object || klass.create(project_attributes)
end
super
end
private
attr_reader :klass, :attributes, :group, :project, :lru_cache, :cache_key
def find_with_cache
return yield unless lru_cache && cache_key
lru_cache[cache_key] ||= yield
end
def cache_from_request_store
Gitlab::SafeRequestStore[:lru_cache] ||= LruRedux::Cache.new(LRU_CACHE_SIZE)
end
def find_object
klass.where(where_clause).first
end
def where_clause
where_clauses.reduce(:and)
end
attr_reader :group, :project
def where_clauses
[
......@@ -86,26 +54,12 @@ module Gitlab
end.reduce(:or)
end
# Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
# if attributes has 'title key, otherwise `nil`.
def where_clause_for_title
attrs_to_arel(attributes.slice('title'))
end
# Returns Arel clause:
# `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
# from the given Hash of attributes.
def attrs_to_arel(attrs)
attrs.map do |key, value|
table[key].eq(value)
end.reduce(:and)
end
def table
@table ||= klass.arel_table
# Returns Arel clause for a particular model or `nil`.
def where_clause_for_klass
attrs_to_arel(attributes.slice('iid')) if merge_request?
end
def project_attributes
def prepare_attributes
attributes.except('group').tap do |atts|
if label?
atts['type'] = 'ProjectLabel' # Always create project labels
......@@ -154,13 +108,6 @@ module Gitlab
milestone.ensure_project_iid!
milestone.save!
end
protected
# Returns Arel clause for a particular model or `nil`.
def where_clause_for_klass
return attrs_to_arel(attributes.slice('iid')) if merge_request?
end
end
end
end
......
# frozen_string_literal: true
module Gitlab
module ImportExport
class ProjectRelationFactory < BaseRelationFactory
prepend_if_ee('::EE::Gitlab::ImportExport::ProjectRelationFactory') # rubocop: disable Cop/InjectEnterpriseEditionModule
OVERRIDES = { snippets: :project_snippets,
ci_pipelines: 'Ci::Pipeline',
pipelines: 'Ci::Pipeline',
stages: 'Ci::Stage',
statuses: 'commit_status',
triggers: 'Ci::Trigger',
pipeline_schedules: 'Ci::PipelineSchedule',
builds: 'Ci::Build',
runners: 'Ci::Runner',
hooks: 'ProjectHook',
merge_access_levels: 'ProtectedBranch::MergeAccessLevel',
push_access_levels: 'ProtectedBranch::PushAccessLevel',
create_access_levels: 'ProtectedTag::CreateAccessLevel',
labels: :project_labels,
priorities: :label_priorities,
auto_devops: :project_auto_devops,
label: :project_label,
custom_attributes: 'ProjectCustomAttribute',
project_badges: 'Badge',
metrics: 'MergeRequest::Metrics',
ci_cd_settings: 'ProjectCiCdSetting',
error_tracking_setting: 'ErrorTracking::ProjectErrorTrackingSetting',
links: 'Releases::Link',
metrics_setting: 'ProjectMetricsSetting' }.freeze
BUILD_MODELS = %i[Ci::Build commit_status].freeze
GROUP_REFERENCES = %w[group_id].freeze
PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
EXISTING_OBJECT_RELATIONS = %i[
milestone
milestones
label
labels
project_label
project_labels
group_label
group_labels
project_feature
merge_request
epic
ProjectCiCdSetting
container_expiration_policy
].freeze
def create
@object = super
# We preload the project, user, and group to re-use objects
@object = preload_keys(@object, PROJECT_REFERENCES, @importable)
@object = preload_keys(@object, GROUP_REFERENCES, @importable.group)
@object = preload_keys(@object, USER_REFERENCES, @user)
end
private
def invalid_relation?
# Do not create relation if it is:
# - An unknown service
# - A legacy trigger
unknown_service? ||
(!Feature.enabled?(:use_legacy_pipeline_triggers, @importable) && legacy_trigger?)
end
def setup_models
case @relation_name
when :merge_request_diff_files then setup_diff
when :notes then setup_note
when :'Ci::Pipeline' then setup_pipeline
when *BUILD_MODELS then setup_build
end
update_project_references
update_group_references
end
def generate_imported_object
if @relation_name == :merge_requests
MergeRequestParser.new(@importable, @relation_hash.delete('diff_head_sha'), super, @relation_hash).parse!
else
super
end
end
def update_project_references
# If source and target are the same, populate them with the new project ID.
if @relation_hash['source_project_id']
@relation_hash['source_project_id'] = same_source_and_target? ? @relation_hash['project_id'] : MergeRequestParser::FORKED_PROJECT_ID
end
@relation_hash['target_project_id'] = @relation_hash['project_id'] if @relation_hash['target_project_id']
end
def same_source_and_target?
@relation_hash['target_project_id'] && @relation_hash['target_project_id'] == @relation_hash['source_project_id']
end
def update_group_references
return unless existing_object?
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @importable.namespace_id
end
# This code is a workaround for broken project exports that don't
# export merge requests with CI pipelines (i.e. exports that were
# generated from
# https://gitlab.com/gitlab-org/gitlab/merge_requests/17844).
# This method can be removed in GitLab 12.6.
def update_merge_request_references
# If a merge request was properly created, we don't need to fix
# up this export.
return if @relation_hash['merge_request']
merge_request_id = @relation_hash['merge_request_id']
return unless merge_request_id
new_merge_request_id = @merge_requests_mapping[merge_request_id]
return unless new_merge_request_id
@relation_hash['merge_request_id'] = new_merge_request_id
parsed_relation_hash['merge_request_id'] = new_merge_request_id
end
def setup_build
@relation_hash.delete('trace') # old export files have trace
@relation_hash.delete('token')
@relation_hash.delete('commands')
@relation_hash.delete('artifacts_file_store')
@relation_hash.delete('artifacts_metadata_store')
@relation_hash.delete('artifacts_size')
end
def setup_diff
@relation_hash['diff'] = @relation_hash.delete('utf8_diff')
end
def setup_pipeline
update_merge_request_references
@relation_hash.fetch('stages', []).each do |stage|
stage.statuses.each do |status|
status.pipeline = imported_object
end
end
end
def unknown_service?
@relation_name == :services && parsed_relation_hash['type'] &&
!Object.const_defined?(parsed_relation_hash['type'])
end
def legacy_trigger?
@relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
end
def preload_keys(object, references, value)
return object unless value
references.each do |key|
attribute = "#{key.delete_suffix('_id')}=".to_sym
next unless object.respond_to?(key) && object.respond_to?(attribute)
if object.read_attribute(key) == value&.id
object.public_send(attribute, value) # rubocop:disable GitlabSecurity/PublicSend
end
end
object
end
end
end
end
......@@ -48,6 +48,7 @@ module Gitlab
shared: @shared,
importable: @project,
tree_hash: @tree_hash,
object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader
......@@ -60,8 +61,12 @@ module Gitlab
importable: @project)
end
def object_builder
Gitlab::ImportExport::GroupProjectObjectBuilder
end
def relation_factory
Gitlab::ImportExport::RelationFactory
Gitlab::ImportExport::ProjectRelationFactory
end
def reader
......
......@@ -11,12 +11,13 @@ module Gitlab
attr_reader :importable
attr_reader :tree_hash
def initialize(user:, shared:, importable:, tree_hash:, members_mapper:, relation_factory:, reader:)
def initialize(user:, shared:, importable:, tree_hash:, members_mapper:, object_builder:, relation_factory:, reader:)
@user = user
@shared = shared
@importable = importable
@tree_hash = tree_hash
@members_mapper = members_mapper
@object_builder = object_builder
@relation_factory = relation_factory
@reader = reader
end
......@@ -221,15 +222,16 @@ module Gitlab
def relation_factory_params(relation_key, data_hash)
base_params = {
relation_sym: relation_key.to_sym,
relation_hash: data_hash,
relation_sym: relation_key.to_sym,
relation_hash: data_hash,
importable: @importable,
members_mapper: @members_mapper,
user: @user,
excluded_keys: excluded_keys_for_relation(relation_key)
object_builder: @object_builder,
user: @user,
excluded_keys: excluded_keys_for_relation(relation_key)
}
base_params[:merge_requests_mapping] = merge_requests_mapping if importable_class == Project
base_params[importable_class_sym] = @importable
base_params
end
end
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::BaseObjectBuilder do
let(:project) do
create(:project, :repository,
:builds_disabled,
:issues_disabled,
name: 'project',
path: 'project')
end
let(:klass) { Milestone }
let(:attributes) { { 'title' => 'Test BaseObjectBuilder Milestone', 'project' => project } }
subject { described_class.build(klass, attributes) }
describe '#build' do
context 'when object exists' do
context 'when where_clauses are implemented' do
before do
allow_next_instance_of(described_class) do |object_builder|
allow(object_builder).to receive(:where_clauses).and_return([klass.arel_table['title'].eq(attributes['title'])])
end
end
let!(:milestone) { create(:milestone, title: attributes['title'], project: project) }
it 'finds existing object instead of creating one' do
expect(subject).to eq(milestone)
end
end
context 'when where_clauses are not implemented' do
it 'raises NotImplementedError' do
expect { subject }.to raise_error(NotImplementedError)
end
end
end
context 'when object does not exist' do
before do
allow_next_instance_of(described_class) do |object_builder|
allow(object_builder).to receive(:find_object).and_return(nil)
end
end
it 'creates new object' do
expect { subject }.to change { Milestone.count }.from(0).to(1)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::BaseRelationFactory do
let(:user) { create(:admin) }
let(:project) { create(:project) }
let(:members_mapper) { double('members_mapper').as_null_object }
let(:relation_sym) { :project_snippets }
let(:merge_requests_mapping) { {} }
let(:relation_hash) { {} }
let(:excluded_keys) { [] }
subject do
described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash,
object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder,
members_mapper: members_mapper,
merge_requests_mapping: merge_requests_mapping,
user: user,
importable: project,
excluded_keys: excluded_keys)
end
describe '#create' do
context 'when relation is invalid' do
before do
expect_next_instance_of(described_class) do |relation_factory|
expect(relation_factory).to receive(:invalid_relation?).and_return(true)
end
end
it 'returns without creating new relations' do
expect(subject).to be_nil
end
end
context 'when #setup_models is not implemented' do
it 'raises NotImplementedError' do
expect { subject }.to raise_error(NotImplementedError)
end
end
context 'when #setup_models is implemented' do
let(:relation_sym) { :notes }
let(:relation_hash) do
{
"id" => 4947,
"note" => "merged",
"noteable_type" => "MergeRequest",
"author_id" => 999,
"created_at" => "2016-11-18T09:29:42.634Z",
"updated_at" => "2016-11-18T09:29:42.634Z",
"project_id" => 1,
"attachment" => {
"url" => nil
},
"noteable_id" => 377,
"system" => true,
"events" => []
}
end
before do
expect_next_instance_of(described_class) do |relation_factory|
expect(relation_factory).to receive(:setup_models).and_return(true)
end
end
it 'creates imported object' do
expect(subject).to be_instance_of(Note)
end
context 'when relation contains user references' do
let(:new_user) { create(:user) }
let(:exported_member) do
{
"id" => 111,
"access_level" => 30,
"source_id" => 1,
"source_type" => "Project",
"user_id" => 3,
"notification_level" => 3,
"created_at" => "2016-11-18T09:29:42.634Z",
"updated_at" => "2016-11-18T09:29:42.634Z",
"user" => {
"id" => 999,
"email" => new_user.email,
"username" => new_user.username
}
}
end
let(:members_mapper) do
Gitlab::ImportExport::MembersMapper.new(
exported_members: [exported_member],
user: user,
importable: project)
end
it 'maps the right author to the imported note' do
expect(subject.author).to eq(new_user)
end
end
context 'when relation contains token attributes' do
let(:relation_sym) { 'ProjectHook' }
let(:relation_hash) { { token: 'secret' } }
it 'removes token attributes' do
expect(subject.token).to be_nil
end
end
context 'when relation contains encrypted attributes' do
let(:relation_sym) { 'Ci::Variable' }
let(:relation_hash) do
create(:ci_variable).as_json
end
it 'removes encrypted attributes' do
expect(subject.value).to be_nil
end
end
end
end
describe '.relation_class' do
context 'when relation name is pluralized' do
let(:relation_name) { 'MergeRequest::Metrics' }
it 'returns constantized class' do
expect(described_class.relation_class(relation_name)).to eq(MergeRequest::Metrics)
end
end
context 'when relation name is singularized' do
let(:relation_name) { 'Badge' }
it 'returns constantized class' do
expect(described_class.relation_class(relation_name)).to eq(Badge)
end
end
end
end
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe Gitlab::ImportExport::RelationFactory do
describe Gitlab::ImportExport::ProjectRelationFactory do
let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object }
......@@ -12,10 +12,11 @@ describe Gitlab::ImportExport::RelationFactory do
let(:created_object) do
described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash,
object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder,
members_mapper: members_mapper,
merge_requests_mapping: merge_requests_mapping,
user: user,
project: project,
importable: project,
excluded_keys: excluded_keys)
end
......@@ -97,7 +98,7 @@ describe Gitlab::ImportExport::RelationFactory do
end
end
context 'merge_requset object' do
context 'merge_request object' do
let(:relation_sym) { :merge_requests }
let(:exported_member) do
......@@ -244,11 +245,11 @@ describe Gitlab::ImportExport::RelationFactory do
context 'Project references' do
let(:relation_sym) { :project_foo_model }
let(:relation_hash) do
Gitlab::ImportExport::RelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
end
class ProjectFooModel < FooModel
attr_accessor(*Gitlab::ImportExport::RelationFactory::PROJECT_REFERENCES)
attr_accessor(*Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES)
end
before do
......
......@@ -27,6 +27,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
shared: shared,
tree_hash: tree_hash,
importable: importable,
object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader
......@@ -38,7 +39,8 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
context 'when restoring a project' do
let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' }
let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
let(:relation_factory) { Gitlab::ImportExport::RelationFactory }
let(:object_builder) { Gitlab::ImportExport::GroupProjectObjectBuilder }
let(:relation_factory) { Gitlab::ImportExport::ProjectRelationFactory }
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
let(:tree_hash) { importable_hash }
......
......@@ -36,8 +36,8 @@ module ConfigurationHelper
end
def relation_class_for_name(relation_name)
relation_name = Gitlab::ImportExport::RelationFactory.overrides[relation_name.to_sym] || relation_name
Gitlab::ImportExport::RelationFactory.relation_class(relation_name)
relation_name = Gitlab::ImportExport::ProjectRelationFactory.overrides[relation_name.to_sym] || relation_name
Gitlab::ImportExport::ProjectRelationFactory.relation_class(relation_name)
end
def parsed_attributes(relation_name, attributes, config: Gitlab::ImportExport.config_file)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment