Commit 307b0e6c authored by Rémy Coutable's avatar Rémy Coutable

Merge branch 'georgekoltsov/project-migration-of-issues' into 'master'

Project Migration of issues

See merge request gitlab-org/gitlab!71211
parents 649767c6 8f79adc6
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/issues.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
:project_entity,
project: project,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Project',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:issue) do
{
'title' => 'Imported Issue',
'description' => 'Description',
'state' => 'opened',
'updated_at' => '2016-06-14T15:02:47.967Z',
'author_id' => 22,
'epic_issue' => {
'id' => 78,
'relative_position' => 1073740323,
'epic' => {
'title' => 'An epic',
'state_id' => 'opened',
'author_id' => 22
}
}
}
end
subject(:pipeline) { described_class.new(context) }
describe '#run' do
before do
group.add_owner(user)
issue_with_index = [issue, 0]
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [issue_with_index]))
end
end
context 'with pre-existing epic' do
it 'associates existing epic with imported issue' do
epic = create(:epic, title: 'An epic', group: group)
expect { pipeline.run }.not_to change { Epic.count }
expect(group.epics.count).to eq(1)
expect(project.issues.first.epic).to eq(epic)
expect(project.issues.first.epic_issue.relative_position).not_to be_nil
end
end
context 'without pre-existing epic' do
it 'creates a new epic for imported issue' do
group.epics.delete_all
expect { pipeline.run }.to change { Epic.count }.from(0).to(1)
expect(group.epics.count).to eq(1)
expect(project.issues.first.epic).not_to be_nil
expect(project.issues.first.epic_issue.relative_position).not_to be_nil
end
end
end
end
...@@ -14,7 +14,7 @@ module BulkImports ...@@ -14,7 +14,7 @@ module BulkImports
relation_definition = import_export_config.top_relation_tree(relation) relation_definition = import_export_config.top_relation_tree(relation)
deep_transform_relation!(relation_hash, relation, relation_definition) do |key, hash| deep_transform_relation!(relation_hash, relation, relation_definition) do |key, hash|
Gitlab::ImportExport::Group::RelationFactory.create( relation_factory.create(
relation_index: relation_index, relation_index: relation_index,
relation_sym: key.to_sym, relation_sym: key.to_sym,
relation_hash: hash, relation_hash: hash,
...@@ -83,6 +83,10 @@ module BulkImports ...@@ -83,6 +83,10 @@ module BulkImports
"Gitlab::ImportExport::#{portable.class}::ObjectBuilder".constantize "Gitlab::ImportExport::#{portable.class}::ObjectBuilder".constantize
end end
def relation_factory
"Gitlab::ImportExport::#{portable.class}::RelationFactory".constantize
end
def relation def relation
self.class.relation self.class.relation
end end
......
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class IssuesPipeline
include NdjsonPipeline
relation_name 'issues'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
end
...@@ -15,9 +15,13 @@ module BulkImports ...@@ -15,9 +15,13 @@ module BulkImports
pipeline: BulkImports::Common::Pipelines::LabelsPipeline, pipeline: BulkImports::Common::Pipelines::LabelsPipeline,
stage: 1 stage: 1
}, },
issues: {
pipeline: BulkImports::Projects::Pipelines::IssuesPipeline,
stage: 2
},
finisher: { finisher: {
pipeline: BulkImports::Common::Pipelines::EntityFinisher, pipeline: BulkImports::Common::Pipelines::EntityFinisher,
stage: 2 stage: 3
} }
} }
end end
......
...@@ -186,4 +186,20 @@ RSpec.describe BulkImports::NdjsonPipeline do ...@@ -186,4 +186,20 @@ RSpec.describe BulkImports::NdjsonPipeline do
end end
end end
end end
describe '#relation_factory' do
context 'when portable is group' do
it 'returns group relation factory' do
expect(subject.relation_factory).to eq(Gitlab::ImportExport::Group::RelationFactory)
end
end
context 'when portable is project' do
subject { NdjsonPipelineClass.new(project, user) }
it 'returns project relation factory' do
expect(subject.relation_factory).to eq(Gitlab::ImportExport::Project::RelationFactory)
end
end
end
end end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
:project_entity,
project: project,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Project',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:issue_attributes) { {} }
let(:issue) do
{
'iid' => 7,
'title' => 'Imported Issue',
'description' => 'Description',
'state' => 'opened',
'updated_at' => '2016-06-14T15:02:47.967Z',
'author_id' => 22
}.merge(issue_attributes)
end
subject(:pipeline) { described_class.new(context) }
describe '#run' do
before do
group.add_owner(user)
issue_with_index = [issue, 0]
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [issue_with_index]))
end
pipeline.run
end
it 'imports issue into destination project' do
expect(project.issues.count).to eq(1)
imported_issue = project.issues.last
aggregate_failures do
expect(imported_issue.iid).to eq(7)
expect(imported_issue.title).to eq(issue['title'])
expect(imported_issue.description).to eq(issue['description'])
expect(imported_issue.author).to eq(user)
expect(imported_issue.state).to eq('opened')
expect(imported_issue.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
end
end
context 'zoom meetings' do
let(:issue_attributes) { { 'zoom_meetings' => [{ 'url' => 'https://zoom.us/j/123456789' }] } }
it 'restores zoom meetings' do
expect(project.issues.last.zoom_meetings.first.url).to eq('https://zoom.us/j/123456789')
end
end
context 'sentry issue' do
let(:issue_attributes) { { 'sentry_issue' => { 'sentry_issue_identifier' => '1234567891' } } }
it 'restores sentry issue information' do
expect(project.issues.last.sentry_issue.sentry_issue_identifier).to eq(1234567891)
end
end
context 'award emoji' do
let(:issue_attributes) { { 'award_emoji' => [{ 'name' => 'musical_keyboard', 'user_id' => 22 }] } }
it 'has award emoji on an issue' do
award_emoji = project.issues.last.award_emoji.first
expect(award_emoji.name).to eq('musical_keyboard')
expect(award_emoji.user).to eq(user)
end
end
context 'issue state' do
let(:issue_attributes) { { 'state' => 'closed' } }
it 'restores issue state' do
expect(project.issues.last.state).to eq('closed')
end
end
context 'labels' do
let(:issue_attributes) do
{
'label_links' => [
{ 'label' => { 'title' => 'imported label 1', 'type' => 'ProjectLabel' } },
{ 'label' => { 'title' => 'imported label 2', 'type' => 'ProjectLabel' } }
]
}
end
it 'restores issue labels' do
expect(project.issues.last.labels.pluck(:title)).to contain_exactly('imported label 1', 'imported label 2')
end
end
context 'milestone' do
let(:issue_attributes) { { 'milestone' => { 'title' => 'imported milestone' } } }
it 'restores issue milestone' do
expect(project.issues.last.milestone.title).to eq('imported milestone')
end
end
context 'timelogs' do
let(:issue_attributes) { { 'timelogs' => [{ 'time_spent' => 72000, 'spent_at' => '2019-12-27T00:00:00.000Z', 'user_id' => 22 }] } }
it 'restores issue timelogs' do
timelog = project.issues.last.timelogs.first
aggregate_failures do
expect(timelog.time_spent).to eq(72000)
expect(timelog.spent_at).to eq("2019-12-27T00:00:00.000Z")
end
end
end
context 'notes' do
let(:issue_attributes) do
{
'notes' => [
{
'note' => 'Issue note',
'author_id' => 22,
'author' => {
'name' => 'User 22'
},
'updated_at' => '2016-06-14T15:02:47.770Z',
'award_emoji' => [
{
'name' => 'clapper',
'user_id' => 22
}
]
}
]
}
end
it 'restores issue notes and their award emoji' do
note = project.issues.last.notes.first
aggregate_failures do
expect(note.note).to eq("Issue note\n\n *By User 22 on 2016-06-14T15:02:47 (imported from GitLab)*")
expect(note.award_emoji.first.name).to eq('clapper')
end
end
end
end
end
...@@ -7,7 +7,8 @@ RSpec.describe BulkImports::Projects::Stage do ...@@ -7,7 +7,8 @@ RSpec.describe BulkImports::Projects::Stage do
[ [
[0, BulkImports::Projects::Pipelines::ProjectPipeline], [0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline], [1, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::EntityFinisher] [2, BulkImports::Projects::Pipelines::IssuesPipeline],
[3, BulkImports::Common::Pipelines::EntityFinisher]
] ]
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment