Commit 1d5dab27 authored by George Koltsov's avatar George Koltsov

Import group boards & board lists via ndjson when using Bulk Import

  - Add boards, board lists & milestones to the list of relations
    imported with ndjson when using Bulk Import in order to
    preserve board associations

Changelog: added
EE: true
parent 9dd5444f
......@@ -43,7 +43,7 @@ module BulkImports
def run(pipeline_tracker)
if ndjson_pipeline?(pipeline_tracker)
status = ExportStatus.new(pipeline_tracker, pipeline_tracker.pipeline_class::RELATION)
status = ExportStatus.new(pipeline_tracker, pipeline_tracker.pipeline_class.relation)
raise(Pipeline::ExpiredError, 'Pipeline timeout') if job_timeout?(pipeline_tracker)
raise(Pipeline::FailedError, status.error) if status.failed?
......
......@@ -69,6 +69,8 @@ The following resources are migrated to the target instance:
- name
- link URL
- image URL
- Boards
- Board Lists
Any other items are **not** migrated.
......
......@@ -6,44 +6,9 @@ module BulkImports
class EpicsPipeline
include BulkImports::NdjsonPipeline
RELATION = 'epics'
relation_name 'epics'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: RELATION
def transform(context, data)
relation_hash = data.first
relation_index = data.last
relation_definition = import_export_config.top_relation_tree(RELATION)
deep_transform_relation!(relation_hash, RELATION, relation_definition) do |key, hash|
Gitlab::ImportExport::Group::RelationFactory.create(
relation_index: relation_index,
relation_sym: key.to_sym,
relation_hash: hash,
importable: context.portable,
members_mapper: members_mapper,
object_builder: object_builder,
user: context.current_user,
excluded_keys: import_export_config.relation_excluded_keys(key)
)
end
end
def load(_, epic)
return unless epic
epic.save! unless epic.persisted?
end
private
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(
exported_members: [], # importer user is authoring everything for now
user: context.current_user,
importable: context.portable
)
end
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
......
{"id":57,"project_id":null,"created_at":"2019-11-20T17:27:41.118Z","updated_at":"2019-11-20T17:27:41.118Z","name":"first board","milestone_id":-2,"milestone":{"id":-2,"name":"#upcoming","title":"Upcoming"},"group_id":4351,"weight":null,"labels":[],"lists":[{"id":231,"board_id":173,"label_id":null,"list_type":"assignee","position":3,"created_at":"2020-02-11T17:02:14.073Z","updated_at":"2020-02-11T17:02:14.073Z","user_id":70,"milestone_id":null,"max_issue_count":0,"max_issue_weight":0,"board":{"id":173,"project_id":null,"created_at":"2020-02-11T14:35:51.561Z","updated_at":"2020-02-11T14:35:51.561Z","name":"hi","milestone_id":null,"group_id":4351,"weight":null}},{"id":33,"board_id":173,"label_id":null,"list_type":"milestone","position":1,"created_at":"2020-02-10T16:16:01.896Z","updated_at":"2020-02-10T16:16:01.896Z","user_id":null,"milestone_id":264,"max_issue_count":0,"max_issue_weight":0,"milestone":{"id":264,"title":"v2.2","project_id":null,"description":"Voluptatum itaque natus laboriosam dolor omnis eaque quos cupiditate.","due_date":null,"created_at":"2020-02-06T15:44:52.126Z","updated_at":"2020-02-06T15:44:52.126Z","state":"active","iid":1,"start_date":null,"group_id":4351,"events":[]},"board":{"id":173,"project_id":null,"created_at":"2020-02-11T14:35:51.561Z","updated_at":"2020-02-11T14:35:51.561Z","name":"hi","milestone_id":null,"group_id":4351,"weight":null}}]}
{"id":57,"project_id":null,"created_at":"2019-11-20T17:27:41.118Z","updated_at":"2019-11-20T17:27:41.118Z","name":"second board","milestone_id":7642,"milestone":{"id":7642,"title":"v4.0","project_id":null,"description":"Et laudantium enim omnis ea reprehenderit iure.","due_date":null,"created_at":"2019-11-20T17:02:14.336Z","updated_at":"2019-11-20T17:02:14.336Z","state":"closed","iid":5,"start_date":null,"group_id":4351},"group_id":4351,"weight":null,"labels":[],"lists":[]}
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::BoardsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:filepath) { 'ee/spec/fixtures/bulk_imports/gz/boards.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:tmpdir) { Dir.mktmpdir }
before do
stub_licensed_features(board_assignee_lists: true, board_milestone_lists: true)
FileUtils.copy_file(filepath, File.join(tmpdir, 'boards.ndjson.gz'))
group.add_owner(user)
end
subject { described_class.new(context) }
describe '#run' do
it 'imports group boards into destination group and removes tmpdir' do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
allow_next_instance_of(BulkImports::FileDownloadService) do |service|
allow(service).to receive(:execute)
end
expect { subject.run }.to change(Board, :count).by(2)
lists = group.boards.find_by(name: 'first board').lists
board_one = group.boards.find_by(name: 'first board')
board_two = group.boards.find_by(name: 'second board')
expect(lists.map(&:list_type)).to contain_exactly('assignee', 'milestone')
expect(board_one.milestone).to be_nil
expect(board_two.milestone.title).to eq 'v4.0'
end
end
end
......@@ -90,7 +90,7 @@ RSpec.describe BulkImports::Groups::Pipelines::EpicsPipeline do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::NdjsonExtractor,
options: { relation: described_class::RELATION }
options: { relation: described_class.relation }
)
end
end
......
......@@ -12,6 +12,7 @@ RSpec.describe BulkImports::Stage do
[1, BulkImports::Groups::Pipelines::MilestonesPipeline],
[1, BulkImports::Groups::Pipelines::BadgesPipeline],
[1, BulkImports::Groups::Pipelines::IterationsPipeline],
[2, BulkImports::Groups::Pipelines::BoardsPipeline],
[2, BulkImports::Groups::Pipelines::EpicsPipeline],
[4, BulkImports::Groups::Pipelines::EntityFinisher]
]
......
......@@ -238,7 +238,7 @@ RSpec.describe Geo::FileUploadService do
context 'bulk imports export file' do
let_it_be(:type) { :'bulk_imports/export' }
let_it_be(:export) { create(:bulk_import_export) }
let_it_be(:file) { fixture_file_upload('spec/fixtures/bulk_imports/labels.ndjson.gz') }
let_it_be(:file) { fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz') }
let(:upload) { Upload.find_by(model: export, uploader: 'BulkImports::ExportUploader') }
let(:request_data) { Gitlab::Geo::Replication::FileTransfer.new(type, upload).request_data }
......
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class BoardsPipeline
include NdjsonPipeline
relation_name 'boards'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
end
......@@ -6,34 +6,9 @@ module BulkImports
class LabelsPipeline
include NdjsonPipeline
RELATION = 'labels'
relation_name 'labels'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: RELATION
def transform(context, data)
relation_hash = data.first
relation_index = data.last
relation_definition = import_export_config.top_relation_tree(RELATION)
deep_transform_relation!(relation_hash, RELATION, relation_definition) do |key, hash|
Gitlab::ImportExport::Group::RelationFactory.create(
relation_index: relation_index,
relation_sym: key.to_sym,
relation_hash: hash,
importable: context.portable,
members_mapper: nil,
object_builder: object_builder,
user: context.current_user,
excluded_keys: import_export_config.relation_excluded_keys(key)
)
end
end
def load(_, label)
return unless label
label.save! unless label.persisted?
end
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
......
......@@ -4,26 +4,11 @@ module BulkImports
module Groups
module Pipelines
class MilestonesPipeline
include Pipeline
include NdjsonPipeline
extractor BulkImports::Common::Extractors::GraphqlExtractor,
query: BulkImports::Groups::Graphql::GetMilestonesQuery
relation_name 'milestones'
transformer Common::Transformers::ProhibitedAttributesTransformer
def load(context, data)
return unless data
raise ::BulkImports::Pipeline::NotAllowedError unless authorized?
context.group.milestones.create!(data)
end
private
def authorized?
context.current_user.can?(:admin_milestone, context.group)
end
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
......
......@@ -9,6 +9,30 @@ module BulkImports
included do
ndjson_pipeline!
def transform(context, data)
relation_hash, relation_index = data
relation_definition = import_export_config.top_relation_tree(relation)
deep_transform_relation!(relation_hash, relation, relation_definition) do |key, hash|
Gitlab::ImportExport::Group::RelationFactory.create(
relation_index: relation_index,
relation_sym: key.to_sym,
relation_hash: hash,
importable: context.portable,
members_mapper: members_mapper,
object_builder: object_builder,
user: context.current_user,
excluded_keys: import_export_config.relation_excluded_keys(key)
)
end
end
def load(_, object)
return unless object
object.save! unless object.persisted?
end
def deep_transform_relation!(relation_hash, relation_key, relation_definition, &block)
relation_key = relation_key_override(relation_key)
......@@ -58,6 +82,18 @@ module BulkImports
def object_builder
"Gitlab::ImportExport::#{portable.class}::ObjectBuilder".constantize
end
def relation
self.class.relation
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(
exported_members: [],
user: current_user,
importable: portable
)
end
end
end
end
......@@ -30,6 +30,10 @@ module BulkImports
@import_export_config ||= context.import_export_config
end
def current_user
@current_user ||= context.current_user
end
included do
private
......@@ -174,6 +178,14 @@ module BulkImports
class_attributes[:ndjson_pipeline]
end
def relation_name(name)
class_attributes[:relation_name] = name
end
def relation
class_attributes[:relation_name]
end
private
def add_attribute(sym, klass, options)
......
......@@ -29,9 +29,13 @@ module BulkImports
pipeline: BulkImports::Groups::Pipelines::BadgesPipeline,
stage: 1
},
boards: {
pipeline: BulkImports::Groups::Pipelines::BoardsPipeline,
stage: 2
},
finisher: {
pipeline: BulkImports::Groups::Pipelines::EntityFinisher,
stage: 2
stage: 3
}
}.freeze
......
{"id":173,"project_id":null,"created_at":"2020-02-11T14:35:51.561Z","updated_at":"2020-02-11T14:35:51.561Z","name":"first board","milestone_id":null,"group_id":4351,"weight":null,"lists":[{"id":189,"board_id":173,"label_id":271,"list_type":"label","position":0,"created_at":"2020-02-11T14:35:57.131Z","updated_at":"2020-02-11T14:35:57.131Z","user_id":null,"milestone_id":null,"max_issue_count":0,"max_issue_weight":0,"label":{"id":271,"title":"TSL","color":"#58796f","project_id":null,"created_at":"2019-11-20T17:02:20.541Z","updated_at":"2020-02-06T15:44:52.048Z","template":false,"description":null,"group_id":4351,"type":"GroupLabel","priorities":[]},"board":{"id":173,"project_id":null,"created_at":"2020-02-11T14:35:51.561Z","updated_at":"2020-02-11T14:35:51.561Z","name":"hi","milestone_id":null,"group_id":4351,"weight":null}},{"id":190,"board_id":173,"label_id":272,"list_type":"label","position":1,"created_at":"2020-02-11T14:35:57.868Z","updated_at":"2020-02-11T14:35:57.868Z","user_id":null,"milestone_id":null,"max_issue_count":0,"max_issue_weight":0,"label":{"id":272,"title":"Sosync","color":"#110320","project_id":null,"created_at":"2019-11-20T17:02:20.532Z","updated_at":"2020-02-06T15:44:52.057Z","template":false,"description":null,"group_id":4351,"type":"GroupLabel","priorities":[]},"board":{"id":173,"project_id":null,"created_at":"2020-02-11T14:35:51.561Z","updated_at":"2020-02-11T14:35:51.561Z","name":"hi","milestone_id":null,"group_id":4351,"weight":null}},{"id":188,"board_id":173,"label_id":null,"list_type":"closed","position":null,"created_at":"2020-02-11T14:35:51.593Z","updated_at":"2020-02-11T14:35:51.593Z","user_id":null,"milestone_id":null,"max_issue_count":0,"max_issue_weight":0}],"labels":[]}
{"id":111,"title":"Label 1","color":"#6699cc","project_id":null,"created_at":"2021-04-15T07:15:08.063Z","updated_at":"2021-04-15T07:15:08.063Z","template":false,"description":"Label 1","group_id":107,"type":"GroupLabel","priorities":[],"textColor":"#FFFFFF"}
{"id":7642,"title":"v4.0","project_id":null,"description":"Et laudantium enim omnis ea reprehenderit iure.","due_date":null,"created_at":"2019-11-20T17:02:14.336Z","updated_at":"2019-11-20T17:02:14.336Z","state":"closed","iid":5,"start_date":null,"group_id":4351}
{"id":7641,"title":"v3.0","project_id":null,"description":"Et repellat culpa nemo consequatur ut reprehenderit.","due_date":null,"created_at":"2019-11-20T17:02:14.323Z","updated_at":"2019-11-20T17:02:14.323Z","state":"active","iid":4,"start_date":null,"group_id":4351}
{"id":7640,"title":"v2.0","project_id":null,"description":"Velit cupiditate est neque voluptates iste rem sunt.","due_date":null,"created_at":"2019-11-20T17:02:14.309Z","updated_at":"2019-11-20T17:02:14.309Z","state":"active","iid":3,"start_date":null,"group_id":4351}
{"id":7639,"title":"v1.0","project_id":null,"description":"Amet velit repellat ut rerum aut cum.","due_date":null,"created_at":"2019-11-20T17:02:14.296Z","updated_at":"2019-11-20T17:02:14.296Z","state":"active","iid":2,"start_date":null,"group_id":4351}
{"id":7638,"title":"v0.0","project_id":null,"description":"Ea quia asperiores ut modi dolorem sunt non numquam.","due_date":null,"created_at":"2019-11-20T17:02:14.282Z","updated_at":"2019-11-20T17:02:14.282Z","state":"active","iid":1,"start_date":null,"group_id":4351}
......@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/labels.ndjson.gz' }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/labels.ndjson.gz' }
let_it_be(:import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::BoardsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/boards.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:tmpdir) { Dir.mktmpdir }
before do
FileUtils.copy_file(filepath, File.join(tmpdir, 'boards.ndjson.gz'))
group.add_owner(user)
end
subject { described_class.new(context) }
describe '#run' do
it 'imports group boards into destination group and removes tmpdir' do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
allow_next_instance_of(BulkImports::FileDownloadService) do |service|
allow(service).to receive(:execute)
end
expect { subject.run }.to change(Board, :count).by(1)
lists = group.boards.find_by(name: 'first board').lists
expect(lists.count).to eq(3)
expect(lists.first.label.title).to eq('TSL')
expect(lists.second.label.title).to eq('Sosync')
end
end
end
......@@ -6,7 +6,7 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/labels.ndjson.gz' }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/labels.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
......@@ -75,17 +75,4 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
end
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::NdjsonPipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractor' do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::NdjsonExtractor,
options: { relation: described_class::RELATION }
)
end
end
end
......@@ -5,119 +5,69 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/milestones.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path,
group: group
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
let(:tmpdir) { Dir.mktmpdir }
before do
FileUtils.copy_file(filepath, File.join(tmpdir, 'milestones.ndjson.gz'))
group.add_owner(user)
end
describe '#run' do
it 'imports group milestones' do
first_page = extracted_data(title: 'milestone1', iid: 1, has_next_page: true)
last_page = extracted_data(title: 'milestone2', iid: 2)
subject { described_class.new(context) }
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(first_page, last_page)
describe '#run' do
it 'imports group milestones into destination group and removes tmpdir' do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
allow_next_instance_of(BulkImports::FileDownloadService) do |service|
allow(service).to receive(:execute)
end
expect { subject.run }.to change(Milestone, :count).by(2)
expect(group.milestones.pluck(:title)).to contain_exactly('milestone1', 'milestone2')
milestone = group.milestones.last
expect(milestone.description).to eq('desc')
expect(milestone.state).to eq('closed')
expect(milestone.start_date.to_s).to eq('2020-10-21')
expect(milestone.due_date.to_s).to eq('2020-10-22')
expect(milestone.created_at).to eq(timestamp)
expect(milestone.updated_at).to eq(timestamp)
expect { subject.run }.to change(Milestone, :count).by(5)
expect(group.milestones.pluck(:title)).to contain_exactly('v4.0', 'v3.0', 'v2.0', 'v1.0', 'v0.0')
expect(File.directory?(tmpdir)).to eq(false)
end
end
describe '#load' do
it 'creates the milestone' do
data = milestone_data('milestone')
context 'when milestone is not persisted' do
it 'saves the milestone' do
milestone = build(:milestone, group: group)
expect { subject.load(context, data) }.to change(Milestone, :count).by(1)
end
expect(milestone).to receive(:save!)
context 'when user is not authorized to create the milestone' do
before do
allow(user).to receive(:can?).with(:admin_milestone, group).and_return(false)
end
it 'raises NotAllowedError' do
data = extracted_data(title: 'milestone')
expect { subject.load(context, data) }.to raise_error(::BulkImports::Pipeline::NotAllowedError)
end
subject.load(context, milestone)
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
context 'when milestone is persisted' do
it 'does not save milestone' do
milestone = create(:milestone, group: group)
it 'has extractors' do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: {
query: BulkImports::Groups::Graphql::GetMilestonesQuery
}
)
end
expect(milestone).not_to receive(:save!)
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
)
subject.load(context, milestone)
end
end
def milestone_data(title, iid: 1)
{
'title' => title,
'description' => 'desc',
'iid' => iid,
'state' => 'closed',
'start_date' => '2020-10-21',
'due_date' => '2020-10-22',
'created_at' => timestamp.to_s,
'updated_at' => timestamp.to_s
}
context 'when milestone is missing' do
it 'returns' do
expect(subject.load(context, nil)).to be_nil
end
end
def extracted_data(title:, iid: 1, has_next_page: false)
page_info = {
'has_next_page' => has_next_page,
'next_page' => has_next_page ? 'cursor' : nil
}
BulkImports::Pipeline::ExtractedData.new(
data: milestone_data(title, iid: iid),
page_info: page_info
)
end
end
......@@ -5,22 +5,31 @@ require 'spec_helper'
RSpec.describe BulkImports::NdjsonPipeline do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be(:klass) do
let_it_be(:user) { create(:user) }
let(:klass) do
Class.new do
include BulkImports::NdjsonPipeline
attr_reader :portable
relation_name 'test'
attr_reader :portable, :current_user
def initialize(portable)
def initialize(portable, user)
@portable = portable
@current_user = user
end
end
end
before do
stub_const('NdjsonPipelineClass', klass)
end
subject { klass.new(group) }
subject { NdjsonPipelineClass.new(group, user) }
it 'marks pipeline as ndjson' do
expect(klass.ndjson_pipeline?).to eq(true)
expect(NdjsonPipelineClass.ndjson_pipeline?).to eq(true)
end
describe '#deep_transform_relation!' do
......@@ -91,6 +100,60 @@ RSpec.describe BulkImports::NdjsonPipeline do
end
end
describe '#transform' do
it 'calls relation factory' do
hash = { key: :value }
data = [hash, 1]
user = double
config = double(relation_excluded_keys: nil, top_relation_tree: [])
context = double(portable: group, current_user: user, import_export_config: config)
allow(subject).to receive(:import_export_config).and_return(config)
expect(Gitlab::ImportExport::Group::RelationFactory)
.to receive(:create)
.with(
relation_index: 1,
relation_sym: :test,
relation_hash: hash,
importable: group,
members_mapper: instance_of(Gitlab::ImportExport::MembersMapper),
object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
user: user,
excluded_keys: nil
)
subject.transform(context, data)
end
end
describe '#load' do
context 'when object is not persisted' do
it 'saves the object' do
object = double(persisted?: false)
expect(object).to receive(:save!)
subject.load(nil, object)
end
end
context 'when object is persisted' do
it 'does not save the object' do
object = double(persisted?: true)
expect(object).not_to receive(:save!)
subject.load(nil, object)
end
end
context 'when object is missing' do
it 'returns' do
expect(subject.load(nil, nil)).to be_nil
end
end
end
describe '#relation_class' do
context 'when relation name is pluralized' do
it 'returns constantized class' do
......@@ -113,7 +176,7 @@ RSpec.describe BulkImports::NdjsonPipeline do
end
context 'when portable is project' do
subject { klass.new(project) }
subject { NdjsonPipelineClass.new(project, user) }
it 'returns group relation name override' do
expect(subject.relation_key_override('labels')).to eq('project_labels')
......
......@@ -10,7 +10,8 @@ RSpec.describe BulkImports::Stage do
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Groups::Pipelines::LabelsPipeline],
[1, BulkImports::Groups::Pipelines::MilestonesPipeline],
[1, BulkImports::Groups::Pipelines::BadgesPipeline]
[1, BulkImports::Groups::Pipelines::BadgesPipeline],
[2, BulkImports::Groups::Pipelines::BoardsPipeline]
]
end
......
......@@ -57,7 +57,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
it 'decompresses specified file' do
tmpdir = Dir.mktmpdir
filename = 'labels.ndjson.gz'
gz_filepath = "spec/fixtures/bulk_imports/#{filename}"
gz_filepath = "spec/fixtures/bulk_imports/gz/#{filename}"
FileUtils.copy_file(gz_filepath, File.join(tmpdir, filename))
subject.gunzip(dir: tmpdir, filename: filename)
......
......@@ -13,7 +13,7 @@ RSpec.describe BulkImports::ExportUpload do
method = 'export_file'
filename = 'labels.ndjson.gz'
subject.public_send("#{method}=", fixture_file_upload("spec/fixtures/bulk_imports/#{filename}"))
subject.public_send("#{method}=", fixture_file_upload("spec/fixtures/bulk_imports/gz/#{filename}"))
subject.save!
url = "/uploads/-/system/bulk_imports/export_upload/export_file/#{subject.id}/#{filename}"
......
......@@ -215,7 +215,7 @@ RSpec.describe API::GroupExport do
context 'when export file exists' do
it 'downloads exported group archive' do
upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/labels.ndjson.gz'))
upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz'))
get api(download_path, user)
......
......@@ -7,7 +7,7 @@ RSpec.describe BulkImports::FileDecompressionService do
let_it_be(:ndjson_filename) { 'labels.ndjson' }
let_it_be(:ndjson_filepath) { File.join(tmpdir, ndjson_filename) }
let_it_be(:gz_filename) { "#{ndjson_filename}.gz" }
let_it_be(:gz_filepath) { "spec/fixtures/bulk_imports/#{gz_filename}" }
let_it_be(:gz_filepath) { "spec/fixtures/bulk_imports/gz/#{gz_filename}" }
before do
FileUtils.copy_file(gz_filepath, File.join(tmpdir, gz_filename))
......
......@@ -62,7 +62,7 @@ RSpec.describe BulkImports::RelationExportService do
let(:upload) { create(:bulk_import_export_upload, export: export) }
it 'removes existing export before exporting' do
upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/labels.ndjson.gz'))
upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz'))
expect_any_instance_of(BulkImports::ExportUpload) do |upload|
expect(upload).to receive(:remove_export_file!)
......
......@@ -140,6 +140,10 @@ RSpec.describe BulkImports::PipelineWorker do
def self.ndjson_pipeline?
true
end
def self.relation
'test'
end
end
end
......@@ -153,7 +157,6 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('NdjsonPipeline', ndjson_pipeline)
stub_const('NdjsonPipeline::RELATION', 'test')
allow(BulkImports::Stage)
.to receive(:pipeline_exists?)
.with('NdjsonPipeline')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment