Commit dcfda9ad authored by Arturo Herrero's avatar Arturo Herrero

Merge branch 'georgekoltsov/bulk-import-migrate-iterations' into 'master'

Migrate group iterations when using Bulk Import

See merge request gitlab-org/gitlab!56018
parents 0f092626 622d4bdc
......@@ -57,6 +57,15 @@ The following resources are migrated to the target instance:
- due date
- created at
- updated at
- Iterations ([Introduced in 13.10](https://gitlab.com/gitlab-org/gitlab/-/issues/292428))
- iid
- title
- description
- state (upcoming / started / closed)
- start date
- due date
- created at
- updated at
Any other items are **not** migrated.
......
---
title: Migrate group iterations when using Bulk Import
merge_request: 56018
author:
type: added
# frozen_string_literal: true
module EE
module BulkImports
module Groups
module Graphql
module GetIterationsQuery
extend self
def to_s
<<-'GRAPHQL'
query($full_path: ID!, $cursor: String) {
group(fullPath: $full_path) {
iterations(first: 100, after: $cursor, includeAncestors: false) {
page_info: pageInfo {
end_cursor: endCursor
has_next_page: hasNextPage
}
nodes {
iid
title
description
state
start_date: startDate
due_date: dueDate
created_at: createdAt
updated_at: updatedAt
}
}
}
}
GRAPHQL
end
def variables(context)
{
full_path: context.entity.source_full_path,
cursor: context.entity.next_page_for(:iterations)
}
end
def base_path
%w[data group iterations]
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
end
end
end
end
end
# frozen_string_literal: true
module EE
module BulkImports
module Groups
module Pipelines
class IterationsPipeline
include ::BulkImports::Pipeline
extractor ::BulkImports::Common::Extractors::GraphqlExtractor,
query: EE::BulkImports::Groups::Graphql::GetIterationsQuery
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
def load(context, data)
return unless data
raise ::BulkImports::Pipeline::NotAllowedError unless authorized?
context.group.iterations.create!(data)
end
def after_run(extracted_data)
context.entity.update_tracker_for(
relation: :iterations,
has_next_page: extracted_data.has_next_page?,
next_page: extracted_data.next_page
)
if extracted_data.has_next_page?
run
end
end
private
def authorized?
context.current_user.can?(:admin_iteration, context.group)
end
end
end
end
end
end
......@@ -13,7 +13,8 @@ module EE
super + [
EE::BulkImports::Groups::Pipelines::EpicsPipeline,
EE::BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline,
EE::BulkImports::Groups::Pipelines::EpicEventsPipeline
EE::BulkImports::Groups::Pipelines::EpicEventsPipeline,
EE::BulkImports::Groups::Pipelines::IterationsPipeline
]
end
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe EE::BulkImports::Groups::Graphql::GetIterationsQuery do
it 'has a valid query' do
entity = create(:bulk_import_entity)
context = BulkImports::Pipeline::Context.new(entity)
query = GraphQL::Query.new(
GitlabSchema,
described_class.to_s,
variables: described_class.variables(context)
)
result = GitlabSchema.static_validator.validate(query)
expect(result[:errors]).to be_empty
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group iterations nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group iterations page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe EE::BulkImports::Groups::Pipelines::IterationsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:cursor) { 'cursor' }
let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let(:entity) do
create(
:bulk_import_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path,
group: group
)
end
let(:context) { BulkImports::Pipeline::Context.new(entity) }
subject { described_class.new(context) }
before do
stub_licensed_features(iterations: true)
group.add_owner(user)
end
def iteration_data(title, start_date: Date.today)
{
'title' => title,
'description' => 'desc',
'state' => 'upcoming',
'start_date' => start_date,
'due_date' => start_date + 1.day,
'created_at' => timestamp.to_s,
'updated_at' => timestamp.to_s
}
end
def extracted_data(title:, has_next_page:, cursor: nil, start_date: Date.today)
page_info = {
'end_cursor' => cursor,
'has_next_page' => has_next_page
}
BulkImports::Pipeline::ExtractedData.new(data: [iteration_data(title, start_date: start_date)], page_info: page_info)
end
describe '#run' do
it 'imports group iterations' do
first_page = extracted_data(title: 'iteration1', has_next_page: true, cursor: cursor)
last_page = extracted_data(title: 'iteration2', has_next_page: false, start_date: Date.today + 2.days)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(first_page, last_page)
end
expect { subject.run }.to change(Iteration, :count).by(2)
expect(group.iterations.pluck(:title)).to contain_exactly('iteration1', 'iteration2')
iteration = group.iterations.last
expect(iteration.description).to eq('desc')
expect(iteration.state).to eq('upcoming')
expect(iteration.start_date).to eq(Date.today + 2.days)
expect(iteration.due_date).to eq(Date.today + 3.days)
expect(iteration.created_at).to eq(timestamp)
expect(iteration.updated_at).to eq(timestamp)
end
end
describe '#after_run' do
context 'when extracted data has next page' do
it 'updates tracker information and runs pipeline again' do
data = extracted_data(title: 'iteration', has_next_page: true, cursor: cursor)
expect(subject).to receive(:run)
subject.after_run(data)
tracker = entity.trackers.find_by(relation: :iterations)
expect(tracker.has_next_page).to eq(true)
expect(tracker.next_page).to eq(cursor)
end
end
context 'when extracted data has no next page' do
it 'updates tracker information and does not run pipeline' do
data = extracted_data(title: 'iteration', has_next_page: false)
expect(subject).not_to receive(:run)
subject.after_run(data)
tracker = entity.trackers.find_by(relation: :iterations)
expect(tracker.has_next_page).to eq(false)
expect(tracker.next_page).to be_nil
end
end
end
describe '#load' do
it 'creates the iteration' do
data = iteration_data('iteration')
expect { subject.load(context, data) }.to change(Iteration, :count).by(1)
end
context 'when user is not authorized to create the milestone' do
before do
allow(user).to receive(:can?).with(:admin_iteration, group).and_return(false)
end
it 'raises NotAllowedError' do
data = extracted_data(title: 'iteration', has_next_page: false)
expect { subject.load(context, data) }.to raise_error(::BulkImports::Pipeline::NotAllowedError)
end
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: {
query: EE::BulkImports::Groups::Graphql::GetIterationsQuery
}
)
end
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
)
end
end
end
......@@ -27,6 +27,8 @@ RSpec.describe BulkImports::Importers::GroupImporter do
if Gitlab.ee?
expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicsPipeline'.constantize, context: context)
expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline'.constantize, context: context)
expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicEventsPipeline'.constantize, context: context)
expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::IterationsPipeline'.constantize, context: context)
end
subject.execute
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment