Commit af7d5f51 authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents 0674ef65 92875cf2
......@@ -127,7 +127,7 @@ module Projects
access_level: group_access_level)
end
if Feature.enabled?(:specialized_project_authorization_workers)
if Feature.enabled?(:specialized_project_authorization_workers, default_enabled: :yaml)
AuthorizedProjectUpdate::ProjectCreateWorker.perform_async(@project.id)
# AuthorizedProjectsWorker uses an exclusive lease per user but
# specialized workers might have synchronization issues. Until we
......
- type = local_assigns.fetch(:type)
- bulk_issue_health_status_flag = type == :issues && @project&.group&.feature_available?(:issuable_health_status)
- epic_bulk_edit_flag = @project&.group&.feature_available?(:epics) && type == :issues
- bulk_iterations_flag = @project.feature_available?(:iterations) && @project&.group.present? && type == :issues
- bulk_iterations_flag = @project&.group&.feature_available?(:iterations) && type == :issues
%aside.issues-bulk-update.js-right-sidebar.right-sidebar{ "aria-live" => "polite", data: { 'signed-in': current_user.present? } }
.issuable-sidebar.hidden
......
---
title: Improve highlighting for merge diffs
merge_request: 52499
author:
type: added
---
title: Add job to persist On-call shifts
merge_request: 50239
author:
type: added
---
title: Project creation to use specialized worker to calculate project authorizations
merge_request: 52719
author:
type: performance
---
name: improved_merge_diff_highlighting
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52499
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/299884
milestone: '13.9'
type: development
group: group::source code
default_enabled: false
---
name: specialized_project_authorization_workers
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/31377
rollout_issue_url:
rollout_issue_url:
milestone: '13.0'
type: development
group: group::access
default_enabled: false
default_enabled: true
......@@ -600,6 +600,9 @@ Gitlab.ee do
Settings.cron_jobs['incident_sla_exceeded_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['incident_sla_exceeded_check_worker']['cron'] ||= '*/2 * * * *'
Settings.cron_jobs['incident_sla_exceeded_check_worker']['job_class'] = 'IncidentManagement::IncidentSlaExceededCheckWorker'
Settings.cron_jobs['incident_management_persist_oncall_rotation_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['incident_management_persist_oncall_rotation_worker']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['incident_management_persist_oncall_rotation_worker']['job_class'] = 'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob'
Settings.cron_jobs['import_software_licenses_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['import_software_licenses_worker']['cron'] ||= '0 3 * * 0'
Settings.cron_jobs['import_software_licenses_worker']['job_class'] = 'ImportSoftwareLicensesWorker'
......
......@@ -168,6 +168,8 @@
- 2
- - incident_management_apply_incident_sla_exceeded_label
- 1
- - incident_management_oncall_rotations_persist_shifts_job
- 1
- - invalid_gpg_signature_update
- 2
- - irker
......
......@@ -27,6 +27,7 @@ exceptions:
- CNA
- CNAME
- CORE
- CVS
- FREE
- CPU
- CRIME
......
---
redirect_to: '../../topics/autodevops/stages.md#auto-deploy'
---
This document was moved to [another location](../../topics/autodevops/stages.md#auto-deploy).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../topics/autodevops/stages.md#auto-deploy'
---
This document was moved to [another location](../../topics/autodevops/stages.md#auto-deploy).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'environments/index.md'
---
This document was moved to [another location](environments/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/project/merge_requests/browser_performance_testing.md#configuring-browser-performance-testing'
---
This document was moved to [another location](../../user/project/merge_requests/browser_performance_testing.md#configuring-browser-performance-testing).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'code_quality.md'
---
This document was moved to [another location](code_quality.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/project/merge_requests/code_quality.md#example-configuration'
---
This document was moved to [another location](../../user/project/merge_requests/code_quality.md#example-configuration).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/application_security/container_scanning/index.md'
---
This document was moved to [another location](../../user/application_security/container_scanning/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/application_security/dast/index.md'
---
This document was moved to [another location](../../user/application_security/dast/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/application_security/dependency_scanning/index.md'
---
This document was moved to [another location](../../user/application_security/dependency_scanning/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/compliance/license_compliance/index.md'
---
This document was moved to [another location](../../user/compliance/license_compliance/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/application_security/sast/index.md'
---
This document was moved to [another location](../../user/application_security/sast/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../../user/application_security/container_scanning/index.md'
---
This document was moved to [another location](../../user/application_security/container_scanning/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'README.md'
---
This example is no longer available. [View other examples](README.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../migration/jenkins.md'
---
This document was moved to [another location](../migration/jenkins.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'unit_test_reports.md'
---
This document was moved to [unit_test_reports](unit_test_reports.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'multi_project_pipelines.md'
---
This document was moved to [another location](multi_project_pipelines.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'pipelines/index.md'
---
This document was moved to [another location](pipelines/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'README.md'
---
This document was moved to [another location](README.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/appearance.md#sign-in--sign-up-pages'
---
This document was moved to [another location](../user/admin_area/appearance.md#sign-in--sign-up-pages).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/appearance.md#navigation-bar'
---
This document was moved to [another location](../user/admin_area/appearance.md#navigation-bar).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/appearance.md#favicon'
---
This document was moved to [another location](../user/admin_area/appearance.md#favicon).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/settings/help_page.md'
---
This document was moved to [another location](../user/admin_area/settings/help_page.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/appearance.md'
---
This document was moved to [another location](../user/admin_area/appearance.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/project/description_templates.md'
---
This document was moved to [description_templates](../user/project/description_templates.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/project/issues/managing_issues.md#closing-issues-automatically'
---
This document was moved to [another location](../user/project/issues/managing_issues.md#closing-issues-automatically).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../administration/libravatar.md'
---
This document was moved to [another location](../administration/libravatar.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/appearance.md#new-project-pages'
---
This document was moved to [another location](../user/admin_area/appearance.md#new-project-pages).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/appearance.md#system-header-and-footer-messages'
---
This document was moved to [another location](../user/admin_area/appearance.md#system-header-and-footer-messages).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../user/admin_area/appearance.md#sign-in--sign-up-pages'
---
This document was moved to [another location](../user/admin_area/appearance.md#sign-in--sign-up-pages).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'installation.md#google-protobuf-loaderror-libx86_64-linux-gnulibcso6-version-glibc_214-not-found'
---
This document was moved to [another location](installation.md#google-protobuf-loaderror-libx86_64-linux-gnulibcso6-version-glibc_214-not-found).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../administration/auth/ldap/index.md'
---
This document was moved to [another location](../administration/auth/ldap/index.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: installation.md#7-redis
---
This document was moved to [another location](installation.md#7-redis).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../administration/operations/cleaning_up_redis_sessions.md'
---
This document was moved to [another location](../administration/operations/cleaning_up_redis_sessions.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: alerts.md
---
This document was moved to [another location](alerts.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: alert_integrations.md
---
This document was moved to [another location](alert_integrations.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../administration/operations/moving_repositories.md'
---
This document was moved to [another location](../administration/operations/moving_repositories.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../administration/operations/sidekiq_memory_killer.md'
---
This document was moved to [another location](../administration/operations/sidekiq_memory_killer.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../administration/operations/unicorn.md'
---
This document was moved to [another location](../administration/operations/unicorn.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: 'https://about.gitlab.com/handbook/product/product-intelligence-guide/'
---
This document was moved to [another location](https://about.gitlab.com/handbook/product/product-intelligence-guide/).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
---
redirect_to: '../development/snowplow.md'
---
This document was moved to [another location](../development/snowplow.md).
<!-- This redirect file can be deleted after February 1, 2021. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
......@@ -22,6 +22,8 @@ module IncidentManagement
validates :length, presence: true, numericality: true
validates :length_unit, presence: true
scope :started, -> { where('starts_at < ?', Time.current) }
delegate :project, to: :schedule
def shift_duration
......
......@@ -2,6 +2,8 @@
module IncidentManagement
class OncallShift < ApplicationRecord
include BulkInsertSafe
self.table_name = 'incident_management_oncall_shifts'
belongs_to :rotation, class_name: 'OncallRotation', inverse_of: :shifts, foreign_key: :rotation_id
......@@ -13,7 +15,10 @@ module IncidentManagement
validates :ends_at, presence: true
validate :timeframes_do_not_overlap, if: :rotation
scope :order_starts_at_desc, -> { order(starts_at: :desc) }
scope :for_timeframe, -> (starts_at, ends_at) do
return none unless starts_at.to_i < ends_at.to_i
where("tstzrange(starts_at, ends_at, '[)') && tstzrange(?, ?, '[)')", starts_at, ends_at)
end
......
......@@ -15,6 +15,7 @@ module IncidentManagement
@current_user = current_user
@start_time = start_time
@end_time = end_time
@current_time = Time.current
end
def execute
......@@ -23,16 +24,42 @@ module IncidentManagement
return error_invalid_range unless start_before_end?
return error_excessive_range unless under_max_timeframe?
success(
::IncidentManagement::OncallShiftGenerator
.new(rotation)
.for_timeframe(starts_at: start_time, ends_at: end_time)
)
persisted_shifts = find_shifts
generated_shifts = generate_shifts
shifts = combine_shifts(persisted_shifts, generated_shifts)
success(shifts)
end
private
attr_reader :rotation, :current_user, :start_time, :end_time
attr_reader :rotation, :current_user, :start_time, :end_time, :current_time
def find_shifts
rotation
.shifts
.for_timeframe(start_time, [end_time, current_time].min)
.order_starts_at_desc
end
def generate_shifts
::IncidentManagement::OncallShiftGenerator
.new(rotation)
.for_timeframe(
starts_at: [start_time, current_time].max,
ends_at: end_time
)
end
def combine_shifts(persisted_shifts, generated_shifts)
return generated_shifts unless persisted_shifts.present?
# Remove duplicate or overlapping shifts
min_start_time = persisted_shifts.last.ends_at
generated_shifts.reject! { |shift| shift.starts_at < min_start_time }
persisted_shifts + generated_shifts
end
def available?
::Gitlab::IncidentManagement.oncall_schedules_available?(rotation.project)
......
......@@ -235,6 +235,14 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:incident_management_oncall_rotations_persist_all_rotations_shifts_job
:feature_category: :incident_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:ingress_modsecurity_counter_metrics
:feature_category: :web_firewall
:has_external_dependencies: true
......@@ -773,6 +781,14 @@
:weight: 1
:idempotent: true
:tags: []
- :name: incident_management_oncall_rotations_persist_shifts_job
:feature_category: :incident_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: ldap_group_sync
:feature_category: :authentication_and_authorization
:has_external_dependencies: true
......
# frozen_string_literal: true
module IncidentManagement
module OncallRotations
class PersistAllRotationsShiftsJob
include ApplicationWorker
idempotent!
feature_category :incident_management
queue_namespace :cronjob
def perform
IncidentManagement::OncallRotation.started.pluck(:id).each do |rotation_id| # rubocop: disable CodeReuse/ActiveRecord
IncidentManagement::OncallRotations::PersistShiftsJob.perform_async(rotation_id)
end
end
end
end
end
# frozen_string_literal: true
module IncidentManagement
module OncallRotations
class PersistShiftsJob
include ApplicationWorker
idempotent!
feature_category :incident_management
def perform(rotation_id)
@rotation = ::IncidentManagement::OncallRotation.find_by_id(rotation_id)
return unless rotation && Gitlab::IncidentManagement.oncall_schedules_available?(rotation.project)
generated_shifts = generate_shifts
return unless generated_shifts.present?
IncidentManagement::OncallShift.bulk_insert!(generated_shifts)
end
private
attr_reader :rotation
def generate_shifts
::IncidentManagement::OncallShiftGenerator
.new(rotation)
.for_timeframe(
starts_at: shift_generation_start_time,
ends_at: Time.current
)
end
# To avoid generating shifts in the past, which could lead to unnecessary processing,
# we get the latest of rotation created time, rotation start time,
# or the most recent shift.
def shift_generation_start_time
[
rotation.created_at,
rotation.starts_at,
rotation.shifts.order_starts_at_desc.first&.ends_at
].compact.max
end
end
end
end
---
title: Process one record at a time in Bulk Import pipelines
merge_request: 52330
author:
type: changed
......@@ -16,22 +16,22 @@ module EE
first: 100,
after: $cursor
) {
pageInfo {
endCursor
hasNextPage
page_info: pageInfo {
end_cursor: endCursor
has_next_page: hasNextPage
}
nodes {
title
description
state
createdAt
closedAt
startDate
startDateFixed
startDateIsFixed
dueDateFixed
dueDateIsFixed
relativePosition
created_at: createdAt
closed_at: closedAt
start_date: startDate
start_date_fixed: startDateFixed
start_date_is_fixed: startDateIsFixed
due_date_fixed: dueDateFixed
due_date_is_fixed: dueDateIsFixed
relative_position: relativePosition
confidential
}
}
......@@ -46,6 +46,18 @@ module EE
cursor: entity.next_page_for(:epics)
}
end
def base_path
%w[data group epics]
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
end
end
end
......
......@@ -10,19 +10,11 @@ module EE
end
def load(context, data)
Array.wrap(data['nodes']).each do |args|
::Epics::CreateService.new(
context.entity.group,
context.current_user,
args
).execute
end
context.entity.update_tracker_for(
relation: :epics,
has_next_page: data.dig('page_info', 'has_next_page'),
next_page: data.dig('page_info', 'end_cursor')
)
::Epics::CreateService.new(
context.entity.group,
context.current_user,
data
).execute
end
end
end
......
......@@ -10,14 +10,18 @@ module EE
extractor ::BulkImports::Common::Extractors::GraphqlExtractor,
query: EE::BulkImports::Groups::Graphql::GetEpicsQuery
transformer ::BulkImports::Common::Transformers::HashKeyDigger, key_path: %w[data group epics]
transformer ::BulkImports::Common::Transformers::UnderscorifyKeysTransformer
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
loader EE::BulkImports::Groups::Loaders::EpicsLoader
def after_run(context)
if context.entity.has_next_page?(:epics)
def after_run(context, extracted_data)
context.entity.update_tracker_for(
relation: :epics,
has_next_page: extracted_data.has_next_page?,
next_page: extracted_data.next_page
)
if extracted_data.has_next_page?
run(context)
end
end
......
......@@ -4,7 +4,7 @@ FactoryBot.define do
factory :incident_management_oncall_rotation, class: 'IncidentManagement::OncallRotation' do
association :schedule, factory: :incident_management_oncall_schedule
sequence(:name) { |n| "On-call Rotation ##{n}" }
starts_at { Time.current }
starts_at { Time.current.floor }
length { 5 }
length_unit { :days }
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe EE::BulkImports::Groups::Graphql::GetEpicsQuery do
describe '#variables' do
let(:entity) { double(source_full_path: 'test', next_page_for: 'next_page') }
it 'returns query variables based on entity information' do
expected = { full_path: entity.source_full_path, cursor: entity.next_page_for }
expect(described_class.variables(entity)).to eq(expected)
end
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group epics nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group epics page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
......@@ -16,35 +16,18 @@ RSpec.describe EE::BulkImports::Groups::Loaders::EpicsLoader do
let(:data) do
{
'page_info' => {
'end_cursor' => 'endCursorValue',
'has_next_page' => true
},
'nodes' => [
{
'title' => 'epic1',
'state' => 'opened',
'confidential' => false
},
{
'title' => 'epic2',
'state' => 'closed',
'confidential' => true
}
]
'title' => 'epic1',
'state' => 'opened',
'confidential' => false
}
end
subject { described_class.new }
it 'creates the epics and update the entity tracker' do
expect { subject.load(context, data) }.to change(::Epic, :count).by(2)
it 'creates the epic' do
expect { subject.load(context, data) }.to change(::Epic, :count).by(1)
tracker = entity.trackers.last
expect(group.epics.count).to eq(2)
expect(tracker.has_next_page).to eq(true)
expect(tracker.next_page).to eq('endCursorValue')
expect(group.epics.count).to eq(1)
end
end
end
......@@ -3,30 +3,29 @@
require 'spec_helper'
RSpec.describe EE::BulkImports::Groups::Pipelines::EpicsPipeline do
describe '#run' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:entity) do
create(
:bulk_import_entity,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path,
group: group
)
end
let(:context) do
BulkImports::Pipeline::Context.new(
current_user: user,
entity: entity
)
end
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:cursor) { 'cursor' }
let(:entity) do
create(
:bulk_import_entity,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path,
group: group
)
end
subject { described_class.new }
let(:context) do
BulkImports::Pipeline::Context.new(
current_user: user,
entity: entity
)
end
describe '#run' do
it 'imports group epics into destination group' do
first_page = extractor_data(has_next_page: true, cursor: 'nextPageCursor')
first_page = extractor_data(has_next_page: true, cursor: cursor)
last_page = extractor_data(has_next_page: false)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
......@@ -39,6 +38,38 @@ RSpec.describe EE::BulkImports::Groups::Pipelines::EpicsPipeline do
end
end
describe '#after_run' do
context 'when extracted data has next page' do
it 'updates tracker information and runs pipeline again' do
data = extractor_data(has_next_page: true, cursor: cursor)
expect(subject).to receive(:run).with(context)
subject.after_run(context, data)
tracker = entity.trackers.find_by(relation: :epics)
expect(tracker.has_next_page).to eq(true)
expect(tracker.next_page).to eq(cursor)
end
end
context 'when extracted data has no next page' do
it 'updates tracker information and does not run pipeline' do
data = extractor_data(has_next_page: false)
expect(subject).not_to receive(:run).with(context)
subject.after_run(context, data)
tracker = entity.trackers.find_by(relation: :epics)
expect(tracker.has_next_page).to eq(false)
expect(tracker.next_page).to be_nil
end
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
......@@ -56,8 +87,6 @@ RSpec.describe EE::BulkImports::Groups::Pipelines::EpicsPipeline do
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::HashKeyDigger, options: { key_path: %w[data group epics] } },
{ klass: BulkImports::Common::Transformers::UnderscorifyKeysTransformer, options: nil },
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
)
end
......@@ -68,26 +97,19 @@ RSpec.describe EE::BulkImports::Groups::Pipelines::EpicsPipeline do
end
def extractor_data(has_next_page:, cursor: nil)
[
data = [
{
'data' => {
'group' => {
'epics' => {
'page_info' => {
'end_cursor' => cursor,
'has_next_page' => has_next_page
},
'nodes' => [
{
'title' => 'epic1',
'state' => 'closed',
'confidential' => true
}
]
}
}
}
'title' => 'epic1',
'state' => 'closed',
'confidential' => true
}
]
page_info = {
'end_cursor' => cursor,
'has_next_page' => has_next_page
}
BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
end
end
......@@ -35,6 +35,17 @@ RSpec.describe IncidentManagement::OncallRotation do
end
end
describe 'scopes' do
describe '.started' do
subject { described_class.started }
let_it_be(:rotation_1) { create(:incident_management_oncall_rotation, schedule: schedule) }
let_it_be(:rotation_2) { create(:incident_management_oncall_rotation, schedule: schedule, starts_at: 1.week.from_now) }
it { is_expected.to contain_exactly(rotation_1) }
end
end
describe '#shift_duration' do
let_it_be(:rotation) { create(:incident_management_oncall_rotation, schedule: schedule, length: 5, length_unit: :days) }
......
......@@ -91,6 +91,21 @@ RSpec.describe IncidentManagement::OncallShift do
# tue_to_wed - Ends as timeframe starts
# sat_to_sun - Starts as timeframe ends
end
context 'for invalid timeframe' do
subject { described_class.for_timeframe(saturday, friday) }
it { is_expected.to eq described_class.none }
end
end
describe '.order_starts_at_desc' do
subject { described_class.order_starts_at_desc }
let_it_be(:shift1) { create_shift(Time.current, Time.current + 1.hour, participant) }
let_it_be(:shift2) { create_shift(Time.current + 2.hours, Time.current + 3.hours, participant) }
it { is_expected.to eq [shift2, shift1]}
end
end
......
......@@ -3,14 +3,23 @@
require 'spec_helper'
RSpec.describe ::IncidentManagement::OncallShifts::ReadService do
let_it_be_with_refind(:rotation) { create(:incident_management_oncall_rotation) }
let_it_be(:participant) { create(:incident_management_oncall_participant, :with_developer_access, rotation: rotation) }
let_it_be(:project) { rotation.project }
let_it_be(:user_with_permissions) { create(:user) }
let_it_be(:user_without_permissions) { create(:user) }
let_it_be(:current_user) { user_with_permissions }
let(:params) { { start_time: 15.minutes.since(rotation.starts_at), end_time: 3.weeks.since(rotation.starts_at) } }
let_it_be_with_refind(:rotation) { create(:incident_management_oncall_rotation, length: 1, length_unit: :days) }
let_it_be(:participant) { create(:incident_management_oncall_participant, :with_developer_access, rotation: rotation) }
let_it_be(:project) { rotation.project }
let_it_be(:persisted_first_shift) { create(:incident_management_oncall_shift, participant: participant) }
let_it_be(:first_shift) { build(:incident_management_oncall_shift, participant: participant) }
let_it_be(:second_shift) { build(:incident_management_oncall_shift, participant: participant, starts_at: first_shift.ends_at) }
let_it_be(:third_shift) { build(:incident_management_oncall_shift, participant: participant, starts_at: second_shift.ends_at) }
let(:start_time) { rotation.starts_at }
let(:end_time) { 3.days.after(start_time) }
let(:params) { { start_time: start_time, end_time: end_time } }
let(:service) { described_class.new(rotation, current_user, **params) }
before_all do
......@@ -29,6 +38,18 @@ RSpec.describe ::IncidentManagement::OncallShifts::ReadService do
end
end
shared_examples 'returns expected shifts' do
it 'successfully returns a sorted collection of IncidentManagement::OncallShifts' do
expect(execute).to be_success
shifts = execute.payload[:shifts]
expect(shifts).to all(be_a(::IncidentManagement::OncallShift))
expect(shifts.sort_by(&:starts_at)).to eq(shifts)
expect(shifts.map(&:attributes)).to eq(expected_shifts.map(&:attributes))
end
end
subject(:execute) { service.execute }
context 'when the current_user is anonymous' do
......@@ -43,18 +64,6 @@ RSpec.describe ::IncidentManagement::OncallShifts::ReadService do
it_behaves_like 'error response', 'You have insufficient permissions to view shifts for this rotation'
end
context 'when the start time is after the end time' do
let(:params) { { start_time: rotation.starts_at, end_time: rotation.starts_at - 1.day } }
it_behaves_like 'error response', '`start_time` should precede `end_time`'
end
context 'when timeframe exceeds one month' do
let(:params) { { start_time: rotation.starts_at, end_time: rotation.starts_at + 1.month + 1.day } }
it_behaves_like 'error response', '`end_time` should not exceed one month after `start_time`'
end
context 'when feature is not available' do
before do
stub_licensed_features(oncall_schedules: false)
......@@ -71,23 +80,49 @@ RSpec.describe ::IncidentManagement::OncallShifts::ReadService do
it_behaves_like 'error response', 'Your license does not support on-call rotations'
end
context 'with valid params' do
it 'successfully returns a sorted collection of IncidentManagement::OncallShifts' do
expect(execute).to be_success
context 'when the start time is after the end time' do
let(:end_time) { 1.day.before(start_time) }
shifts = execute.payload[:shifts]
it_behaves_like 'error response', '`start_time` should precede `end_time`'
end
expect(shifts).to all(be_a(::IncidentManagement::OncallShift))
expect(shifts).to all(be_valid)
expect(shifts.sort_by(&:starts_at)).to eq(shifts)
expect(shifts.first.starts_at).to be <= params[:start_time]
expect(shifts.last.ends_at).to be >= params[:end_time]
context 'when timeframe exceeds one month' do
let(:end_time) { 2.months.after(start_time) }
it_behaves_like 'error response', '`end_time` should not exceed one month after `start_time`'
end
context 'when timeframe is exactly 1 month' do
let(:start_time) { rotation.starts_at.beginning_of_day }
let(:end_time) { 1.month.after(start_time).end_of_day }
it { is_expected.to be_success }
end
context 'with time frozen' do
around do |example|
travel_to(current_time) { example.run }
end
context 'when timeframe spans the current time' do
let(:current_time) { 5.minutes.after(start_time) }
let(:expected_shifts) { [persisted_first_shift, second_shift, third_shift] }
include_examples 'returns expected shifts'
end
context 'when timeframe is entirely in the past' do
let(:current_time) { 5.minutes.after(end_time) }
let(:expected_shifts) { [persisted_first_shift] }
include_examples 'returns expected shifts'
end
context 'when timeframe is exactly 1 month' do
let(:params) { { start_time: rotation.starts_at.beginning_of_day, end_time: (rotation.starts_at + 1.month).end_of_day } }
context 'when timeframe is entirely in the future' do
let(:current_time) { 5.minutes.before(start_time) }
let(:expected_shifts) { [first_shift, second_shift, third_shift] }
it { is_expected.to be_success }
include_examples 'returns expected shifts'
end
end
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob do
let(:worker) { described_class.new }
let_it_be(:schedule) { create(:incident_management_oncall_schedule) }
let_it_be(:rotation) { create(:incident_management_oncall_rotation, :with_participant, schedule: schedule) }
let_it_be(:rotation_2) { create(:incident_management_oncall_rotation, :with_participant, schedule: schedule) }
let_it_be(:not_started_rotation) { create(:incident_management_oncall_rotation, :with_participant, schedule: schedule, starts_at: 1.day.from_now) }
describe '.perform' do
subject(:perform) { worker.perform }
it 'creates a PersistOncallShiftsJob for each started rotation' do
expect(::IncidentManagement::OncallRotations::PersistShiftsJob).to receive(:perform_async).with(rotation.id)
expect(::IncidentManagement::OncallRotations::PersistShiftsJob).to receive(:perform_async).with(rotation_2.id)
expect(::IncidentManagement::OncallRotations::PersistShiftsJob).not_to receive(:perform_async).with(not_started_rotation.id)
perform
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::OncallRotations::PersistShiftsJob do
let(:worker) { described_class.new }
let(:rotation_id) { rotation.id }
before do
stub_licensed_features(oncall_schedules: true)
end
describe '#perform' do
subject(:perform) { worker.perform(rotation_id) }
context 'unknown rotation' do
let(:rotation_id) { non_existing_record_id }
it { is_expected.to be_nil }
it 'does not create shifts' do
expect { perform }.not_to change { IncidentManagement::OncallShift.count }
end
end
context 'when rotation has no saved shifts' do
context 'and rotation was created before it "started"' do
let_it_be(:rotation) { create(:incident_management_oncall_rotation, :with_participant, created_at: 1.day.ago) }
it 'creates shift' do
expect { perform }.to change { rotation.shifts.count }.by(1)
expect(rotation.shifts.first.starts_at).to eq(rotation.starts_at)
end
end
context 'and rotation "started" before it was created' do
let_it_be(:rotation) { create(:incident_management_oncall_rotation, :with_participant, starts_at: 1.month.ago) }
it 'creates shift without backfilling' do
expect { perform }.to change { rotation.shifts.count }.by(1)
first_shift = rotation.shifts.first
expect(first_shift.starts_at).to be > rotation.starts_at
expect(rotation.created_at).to be_between(first_shift.starts_at, first_shift.ends_at)
end
end
end
context 'when rotation has saved shifts' do
let_it_be(:existing_shift) { create(:incident_management_oncall_shift) }
let_it_be(:rotation) { existing_shift.rotation }
context 'when current time is during a saved shift' do
it 'does not create shifts' do
expect { perform }.not_to change { IncidentManagement::OncallShift.count }
end
end
context 'when current time is not during a saved shift' do
around do |example|
travel_to(5.minutes.after(existing_shift.ends_at)) { example.run }
end
it 'creates shift' do
expect { perform }.to change { rotation.shifts.count }.by(1)
expect(rotation.shifts.first).to eq(existing_shift)
expect(rotation.shifts.second.starts_at).to eq(existing_shift.ends_at)
end
end
# Unexpected case. If the job is delayed, we'll still
# fill in the correct shift history.
context 'when current time is several shifts after the last saved shift' do
around do |example|
travel_to(existing_shift.ends_at + (3 * rotation.shift_duration)) { example.run }
end
context 'when feature flag is not enabled' do
before do
stub_feature_flags(oncall_schedules_mvc: false)
end
it 'does not create shifts' do
expect { perform }.not_to change { IncidentManagement::OncallShift.count }
end
end
it 'creates multiple shifts' do
expect { perform }.to change { rotation.shifts.count }.by(3)
first_shift,
second_shift,
third_shift,
fourth_shift = rotation.shifts.order(:starts_at)
expect(rotation.shifts.length).to eq(4)
expect(first_shift).to eq(existing_shift)
expect(second_shift.starts_at).to eq(existing_shift.ends_at)
expect(third_shift.starts_at).to eq(existing_shift.ends_at + rotation.shift_duration)
expect(fourth_shift.starts_at).to eq(existing_shift.ends_at + (2 * rotation.shift_duration))
end
end
end
end
end
......@@ -4,17 +4,22 @@ module BulkImports
module Common
module Extractors
class GraphqlExtractor
def initialize(query)
@query = query[:query]
def initialize(options = {})
@query = options[:query]
end
def extract(context)
client = graphql_client(context)
client.execute(
response = client.execute(
client.parse(query.to_s),
query.variables(context.entity)
).original_hash.deep_dup
BulkImports::Pipeline::ExtractedData.new(
data: response.dig(*query.data_path),
page_info: response.dig(*query.page_info_path)
)
end
private
......@@ -27,10 +32,6 @@ module BulkImports
token: context.configuration.access_token
)
end
def parsed_query
@parsed_query ||= graphql_client.parse(query.to_s)
end
end
end
end
......
# frozen_string_literal: true
module BulkImports
module Common
module Transformers
class HashKeyDigger
def initialize(options = {})
@key_path = options[:key_path]
end
def transform(_, data)
raise ArgumentError, "Given data must be a Hash" unless data.is_a?(Hash)
data.dig(*Array.wrap(key_path))
end
private
attr_reader :key_path
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Common
module Transformers
class UnderscorifyKeysTransformer
def initialize(options = {})
@options = options
end
def transform(_, data)
data.deep_transform_keys do |key|
key.to_s.underscore
end
end
end
end
end
end
......@@ -9,9 +9,11 @@ module BulkImports
def extract(context)
encoded_parent_path = ERB::Util.url_encode(context.entity.source_full_path)
http_client(context.entity.bulk_import.configuration)
response = http_client(context.entity.bulk_import.configuration)
.each_page(:get, "groups/#{encoded_parent_path}/subgroups")
.flat_map(&:itself)
BulkImports::Pipeline::ExtractedData.new(data: response)
end
private
......
......@@ -12,18 +12,18 @@ module BulkImports
group(fullPath: $full_path) {
name
path
fullPath
full_path: fullPath
description
visibility
emailsDisabled
lfsEnabled
mentionsDisabled
projectCreationLevel
requestAccessEnabled
requireTwoFactorAuthentication
shareWithGroupLock
subgroupCreationLevel
twoFactorGracePeriod
emails_disabled: emailsDisabled
lfs_enabled: lfsEnabled
mentions_disabled: mentionsDisabled
project_creation_level: projectCreationLevel
request_access_enabled: requestAccessEnabled
require_two_factor_authentication: requireTwoFactorAuthentication
share_with_group_lock: shareWithGroupLock
subgroup_creation_level: subgroupCreationLevel
two_factor_grace_period: twoFactorGracePeriod
}
}
GRAPHQL
......@@ -32,6 +32,18 @@ module BulkImports
def variables(entity)
{ full_path: entity.source_full_path }
end
def base_path
%w[data group]
end
def data_path
base_path
end
def page_info_path
base_path << 'page_info'
end
end
end
end
......
......@@ -32,6 +32,18 @@ module BulkImports
cursor: entity.next_page_for(:labels)
}
end
def base_path
%w[data group labels]
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
end
end
end
......
......@@ -7,16 +7,7 @@ module BulkImports
def initialize(*); end
def load(context, data)
Array.wrap(data['nodes']).each do |entry|
Labels::CreateService.new(entry)
.execute(group: context.entity.group)
end
context.entity.update_tracker_for(
relation: :labels,
has_next_page: data.dig('page_info', 'has_next_page'),
next_page: data.dig('page_info', 'end_cursor')
)
Labels::CreateService.new(data).execute(group: context.entity.group)
end
end
end
......
......@@ -10,8 +10,6 @@ module BulkImports
extractor Common::Extractors::GraphqlExtractor, query: Graphql::GetGroupQuery
transformer Common::Transformers::HashKeyDigger, key_path: %w[data group]
transformer Common::Transformers::UnderscorifyKeysTransformer
transformer Common::Transformers::ProhibitedAttributesTransformer
transformer Groups::Transformers::GroupAttributesTransformer
......
......@@ -9,13 +9,18 @@ module BulkImports
extractor BulkImports::Common::Extractors::GraphqlExtractor,
query: BulkImports::Groups::Graphql::GetLabelsQuery
transformer BulkImports::Common::Transformers::HashKeyDigger, key_path: %w[data group labels]
transformer Common::Transformers::ProhibitedAttributesTransformer
loader BulkImports::Groups::Loaders::LabelsLoader
def after_run(context)
if context.entity.has_next_page?(:labels)
def after_run(context, extracted_data)
context.entity.update_tracker_for(
relation: :labels,
has_next_page: extracted_data.has_next_page?,
next_page: extracted_data.next_page
)
if extracted_data.has_next_page?
run(context)
end
end
......
# frozen_string_literal: true
module BulkImports
module Pipeline
class ExtractedData
attr_reader :data
def initialize(data: nil, page_info: {})
@data = Array.wrap(data)
@page_info = page_info
end
def has_next_page?
@page_info['has_next_page']
end
def next_page
@page_info['end_cursor']
end
def each(&block)
data.each(&block)
end
end
end
end
......@@ -12,7 +12,9 @@ module BulkImports
info(context, message: 'Pipeline started', pipeline_class: pipeline)
Array.wrap(extracted_data_from(context)).each do |entry|
extracted_data = extracted_data_from(context)
extracted_data&.each do |entry|
transformers.each do |transformer|
entry = run_pipeline_step(:transformer, transformer.class.name, context) do
transformer.transform(context, entry)
......@@ -24,7 +26,7 @@ module BulkImports
end
end
after_run(context) if respond_to?(:after_run)
after_run(context, extracted_data) if respond_to?(:after_run)
rescue MarkedAsFailedError
log_skip(context)
end
......@@ -43,6 +45,8 @@ module BulkImports
log_import_failure(e, step, context)
mark_as_failed(context) if abort_on_failure?
nil
end
def extracted_data_from(context)
......
......@@ -18,6 +18,33 @@ module Gitlab
@changes
end
def changed_ranges(offset: 0)
old_diffs = []
new_diffs = []
new_pointer = old_pointer = offset
generate_diff.each do |(action, content)|
content_size = content.size
if action == :equal
new_pointer += content_size
old_pointer += content_size
end
if action == :delete
old_diffs << (old_pointer..(old_pointer + content_size - 1))
old_pointer += content_size
end
if action == :insert
new_diffs << (new_pointer..(new_pointer + content_size - 1))
new_pointer += content_size
end
end
[old_diffs, new_diffs]
end
def to_html
@changes.map do |op, text|
%{<span class="#{html_class_names(op)}">#{ERB::Util.html_escape(text)}</span>}
......
......@@ -3,12 +3,13 @@
module Gitlab
module Diff
class Highlight
attr_reader :diff_file, :diff_lines, :raw_lines, :repository
attr_reader :diff_file, :diff_lines, :raw_lines, :repository, :project
delegate :old_path, :new_path, :old_sha, :new_sha, to: :diff_file, prefix: :diff
def initialize(diff_lines, repository: nil)
@repository = repository
@project = repository&.project
if diff_lines.is_a?(Gitlab::Diff::File)
@diff_file = diff_lines
......@@ -66,7 +67,7 @@ module Gitlab
end
def inline_diffs
@inline_diffs ||= InlineDiff.for_lines(@raw_lines)
@inline_diffs ||= InlineDiff.for_lines(@raw_lines, project: project)
end
def old_lines
......
......@@ -8,6 +8,7 @@ module Gitlab
EXPIRATION = 1.week
VERSION = 1
NEXT_VERSION = 2
delegate :diffable, to: :@diff_collection
delegate :diff_options, to: :@diff_collection
......@@ -69,12 +70,20 @@ module Gitlab
def key
strong_memoize(:redis_key) do
['highlighted-diff-files', diffable.cache_key, VERSION, diff_options].join(":")
['highlighted-diff-files', diffable.cache_key, version, diff_options].join(":")
end
end
private
def version
if Feature.enabled?(:improved_merge_diff_highlighting, diffable.project)
NEXT_VERSION
else
VERSION
end
end
def set_highlighted_diff_lines(diff_file, content)
diff_file.highlighted_diff_lines = content.map do |line|
Gitlab::Diff::Line.safe_init_from_hash(line)
......
......@@ -27,28 +27,19 @@ module Gitlab
@offset = offset
end
def inline_diffs
def inline_diffs(project: nil)
# Skip inline diff if empty line was replaced with content
return if old_line == ""
lcp = longest_common_prefix(old_line, new_line)
lcs = longest_common_suffix(old_line[lcp..-1], new_line[lcp..-1])
lcp += offset
old_length = old_line.length + offset
new_length = new_line.length + offset
old_diff_range = lcp..(old_length - lcs - 1)
new_diff_range = lcp..(new_length - lcs - 1)
old_diffs = [old_diff_range] if old_diff_range.begin <= old_diff_range.end
new_diffs = [new_diff_range] if new_diff_range.begin <= new_diff_range.end
[old_diffs, new_diffs]
if Feature.enabled?(:improved_merge_diff_highlighting, project)
CharDiff.new(old_line, new_line).changed_ranges(offset: offset)
else
deprecated_diff
end
end
class << self
def for_lines(lines)
def for_lines(lines, project: nil)
changed_line_pairs = find_changed_line_pairs(lines)
inline_diffs = []
......@@ -57,7 +48,7 @@ module Gitlab
old_line = lines[old_index]
new_line = lines[new_index]
old_diffs, new_diffs = new(old_line, new_line, offset: 1).inline_diffs
old_diffs, new_diffs = new(old_line, new_line, offset: 1).inline_diffs(project: project)
inline_diffs[old_index] = old_diffs
inline_diffs[new_index] = new_diffs
......@@ -97,6 +88,24 @@ module Gitlab
private
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/299884
def deprecated_diff
lcp = longest_common_prefix(old_line, new_line)
lcs = longest_common_suffix(old_line[lcp..-1], new_line[lcp..-1])
lcp += offset
old_length = old_line.length + offset
new_length = new_line.length + offset
old_diff_range = lcp..(old_length - lcs - 1)
new_diff_range = lcp..(new_length - lcs - 1)
old_diffs = [old_diff_range] if old_diff_range.begin <= old_diff_range.end
new_diffs = [new_diff_range] if new_diff_range.begin <= new_diff_range.end
[old_diffs, new_diffs]
end
def longest_common_prefix(a, b) # rubocop:disable Naming/UncommunicativeMethodParamName
max_length = [a.length, b.length].max
......
......@@ -169,9 +169,9 @@ RSpec.describe DiffHelper do
it "returns strings with marked inline diffs" do
marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line)
expect(marked_old_line).to eq(%q{abc <span class="idiff left right deletion">&#39;def&#39;</span>})
expect(marked_old_line).to eq(%q{abc <span class="idiff left deletion">&#39;</span>def<span class="idiff right deletion">&#39;</span>})
expect(marked_old_line).to be_html_safe
expect(marked_new_line).to eq(%q{abc <span class="idiff left right addition">&quot;def&quot;</span>})
expect(marked_new_line).to eq(%q{abc <span class="idiff left addition">&quot;</span>def<span class="idiff right addition">&quot;</span>})
expect(marked_new_line).to be_html_safe
end
......
......@@ -5,8 +5,18 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Extractors::GraphqlExtractor do
let(:graphql_client) { instance_double(BulkImports::Clients::Graphql) }
let(:import_entity) { create(:bulk_import_entity) }
let(:response) { double(original_hash: { foo: :bar }) }
let(:query) { { query: double(to_s: 'test', variables: {}) } }
let(:response) { double(original_hash: { 'data' => { 'foo' => 'bar' }, 'page_info' => {} }) }
let(:options) do
{
query: double(
to_s: 'test',
variables: {},
data_path: %w[data foo],
page_info_path: %w[data page_info]
)
}
end
let(:context) do
instance_double(
BulkImports::Pipeline::Context,
......@@ -14,58 +24,20 @@ RSpec.describe BulkImports::Common::Extractors::GraphqlExtractor do
)
end
subject { described_class.new(query) }
before do
allow(subject).to receive(:graphql_client).and_return(graphql_client)
allow(graphql_client).to receive(:parse)
end
subject { described_class.new(options) }
describe '#extract' do
before do
allow(subject).to receive(:query_variables).and_return({})
allow(graphql_client).to receive(:execute).and_return(response)
end
it 'returns original hash' do
expect(subject.extract(context)).to eq({ foo: :bar })
end
end
describe 'query variables' do
before do
allow(subject).to receive(:graphql_client).and_return(graphql_client)
allow(graphql_client).to receive(:parse)
allow(graphql_client).to receive(:execute).and_return(response)
end
context 'when variables are present' do
let(:variables) { { foo: :bar } }
let(:query) { { query: double(to_s: 'test', variables: variables) } }
it 'builds graphql query variables for import entity' do
expect(graphql_client).to receive(:execute).with(anything, variables)
subject.extract(context).first
end
end
context 'when no variables are present' do
let(:query) { { query: double(to_s: 'test', variables: nil) } }
it 'returns empty hash' do
expect(graphql_client).to receive(:execute).with(anything, nil)
subject.extract(context).first
end
end
context 'when variables are empty hash' do
let(:query) { { query: double(to_s: 'test', variables: {}) } }
it 'makes graphql request with empty hash' do
expect(graphql_client).to receive(:execute).with(anything, {})
it 'returns ExtractedData' do
extracted_data = subject.extract(context)
subject.extract(context).first
end
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
expect(extracted_data.data).to contain_exactly('bar')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Common::Transformers::HashKeyDigger do
describe '#transform' do
it 'when the key_path is an array' do
data = { foo: { bar: :value } }
key_path = %i[foo bar]
transformed = described_class.new(key_path: key_path).transform(nil, data)
expect(transformed).to eq(:value)
end
it 'when the key_path is not an array' do
data = { foo: { bar: :value } }
key_path = :foo
transformed = described_class.new(key_path: key_path).transform(nil, data)
expect(transformed).to eq({ bar: :value })
end
it "when the data is not a hash" do
expect { described_class.new(key_path: nil).transform(nil, nil) }
.to raise_error(ArgumentError, "Given data must be a Hash")
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Common::Transformers::UnderscorifyKeysTransformer do
describe '#transform' do
it 'deep underscorifies hash keys' do
data = {
'fullPath' => 'Foo',
'snakeKeys' => {
'snakeCaseKey' => 'Bar',
'moreKeys' => {
'anotherSnakeCaseKey' => 'Test'
}
}
}
transformed_data = described_class.new.transform(nil, data)
expect(transformed_data).to have_key('full_path')
expect(transformed_data).to have_key('snake_keys')
expect(transformed_data['snake_keys']).to have_key('snake_case_key')
expect(transformed_data['snake_keys']).to have_key('more_keys')
expect(transformed_data.dig('snake_keys', 'more_keys')).to have_key('another_snake_case_key')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Extractors::SubgroupsExtractor do
describe '#extract' do
it 'returns ExtractedData response' do
user = create(:user)
bulk_import = create(:bulk_import)
entity = create(:bulk_import_entity, bulk_import: bulk_import)
configuration = create(:bulk_import_configuration, bulk_import: bulk_import)
response = [{ 'test' => 'group' }]
context = BulkImports::Pipeline::Context.new(
current_user: user,
entity: entity,
configuration: configuration
)
allow_next_instance_of(BulkImports::Clients::Http) do |client|
allow(client).to receive(:each_page).and_return(response)
end
extracted_data = subject.extract(context)
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
expect(extracted_data.data).to eq(response)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetGroupQuery do
describe '#variables' do
let(:entity) { double(source_full_path: 'test') }
it 'returns query variables based on entity information' do
expected = { full_path: entity.source_full_path }
expect(described_class.variables(entity)).to eq(expected)
end
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do
describe '#variables' do
let(:entity) { double(source_full_path: 'test', next_page_for: 'next_page') }
it 'returns query variables based on entity information' do
expected = { full_path: entity.source_full_path, cursor: entity.next_page_for }
expect(described_class.variables(entity)).to eq(expected)
end
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group labels nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group labels page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Loaders::LabelsLoader do
describe '#load' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:entity) { create(:bulk_import_entity, group: group) }
let(:context) do
BulkImports::Pipeline::Context.new(
entity: entity,
current_user: user
)
end
let(:data) do
{
'title' => 'label',
'description' => 'description',
'color' => '#FFFFFF'
}
end
it 'creates the label' do
expect { subject.load(context, data) }.to change(Label, :count).by(1)
label = group.labels.first
expect(label.title).to eq(data['title'])
expect(label.description).to eq(data['description'])
expect(label.color).to eq(data['color'])
end
end
end
......@@ -24,19 +24,15 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
let(:group_data) do
{
'data' => {
'group' => {
'name' => 'source_name',
'fullPath' => 'source/full/path',
'visibility' => 'private',
'projectCreationLevel' => 'developer',
'subgroupCreationLevel' => 'maintainer',
'description' => 'Group Description',
'emailsDisabled' => true,
'lfsEnabled' => false,
'mentionsDisabled' => true
}
}
'name' => 'source_name',
'full_path' => 'source/full/path',
'visibility' => 'private',
'project_creation_level' => 'developer',
'subgroup_creation_level' => 'maintainer',
'description' => 'Group Description',
'emails_disabled' => true,
'lfs_enabled' => false,
'mentions_disabled' => true
}
end
......@@ -60,13 +56,13 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
expect(imported_group).not_to be_nil
expect(imported_group.parent).to eq(parent)
expect(imported_group.path).to eq(group_path)
expect(imported_group.description).to eq(group_data.dig('data', 'group', 'description'))
expect(imported_group.visibility).to eq(group_data.dig('data', 'group', 'visibility'))
expect(imported_group.project_creation_level).to eq(Gitlab::Access.project_creation_string_options[group_data.dig('data', 'group', 'projectCreationLevel')])
expect(imported_group.subgroup_creation_level).to eq(Gitlab::Access.subgroup_creation_string_options[group_data.dig('data', 'group', 'subgroupCreationLevel')])
expect(imported_group.lfs_enabled?).to eq(group_data.dig('data', 'group', 'lfsEnabled'))
expect(imported_group.emails_disabled?).to eq(group_data.dig('data', 'group', 'emailsDisabled'))
expect(imported_group.mentions_disabled?).to eq(group_data.dig('data', 'group', 'mentionsDisabled'))
expect(imported_group.description).to eq(group_data['description'])
expect(imported_group.visibility).to eq(group_data['visibility'])
expect(imported_group.project_creation_level).to eq(Gitlab::Access.project_creation_string_options[group_data['project_creation_level']])
expect(imported_group.subgroup_creation_level).to eq(Gitlab::Access.subgroup_creation_string_options[group_data['subgroup_creation_level']])
expect(imported_group.lfs_enabled?).to eq(group_data['lfs_enabled'])
expect(imported_group.emails_disabled?).to eq(group_data['emails_disabled'])
expect(imported_group.mentions_disabled?).to eq(group_data['mentions_disabled'])
end
end
......@@ -87,8 +83,6 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::HashKeyDigger, options: { key_path: %w[data group] } },
{ klass: BulkImports::Common::Transformers::UnderscorifyKeysTransformer, options: nil },
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Groups::Transformers::GroupAttributesTransformer, options: nil }
)
......
......@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:cursor) { 'cursor' }
let(:entity) do
create(
:bulk_import_entity,
......@@ -22,31 +23,26 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
)
end
def extractor_data(title:, has_next_page:, cursor: "")
{
"data" => {
"group" => {
"labels" => {
"page_info" => {
"end_cursor" => cursor,
"has_next_page" => has_next_page
},
"nodes" => [
{
"title" => title,
"description" => "desc",
"color" => "#428BCA"
}
]
}
}
def extractor_data(title:, has_next_page:, cursor: nil)
data = [
{
'title' => title,
'description' => 'desc',
'color' => '#428BCA'
}
]
page_info = {
'end_cursor' => cursor,
'has_next_page' => has_next_page
}
BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
end
describe '#run' do
it 'imports a group labels' do
first_page = extractor_data(title: 'label1', has_next_page: true, cursor: 'nextPageCursor')
first_page = extractor_data(title: 'label1', has_next_page: true, cursor: cursor)
last_page = extractor_data(title: 'label2', has_next_page: false)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
......@@ -65,6 +61,38 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
end
end
describe '#after_run' do
context 'when extracted data has next page' do
it 'updates tracker information and runs pipeline again' do
data = extractor_data(title: 'label', has_next_page: true, cursor: cursor)
expect(subject).to receive(:run).with(context)
subject.after_run(context, data)
tracker = entity.trackers.find_by(relation: :labels)
expect(tracker.has_next_page).to eq(true)
expect(tracker.next_page).to eq(cursor)
end
end
context 'when extracted data has no next page' do
it 'updates tracker information and does not run pipeline' do
data = extractor_data(title: 'label', has_next_page: false)
expect(subject).not_to receive(:run).with(context)
subject.after_run(context, data)
tracker = entity.trackers.find_by(relation: :labels)
expect(tracker.has_next_page).to eq(false)
expect(tracker.next_page).to be_nil
end
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
......@@ -82,7 +110,6 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::HashKeyDigger, options: { key_path: %w[data group labels] } },
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
)
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Pipeline::ExtractedData do
let(:data) { 'data' }
let(:has_next_page) { true }
let(:cursor) { 'cursor' }
let(:page_info) do
{
'has_next_page' => has_next_page,
'end_cursor' => cursor
}
end
subject { described_class.new(data: data, page_info: page_info) }
describe '#has_next_page?' do
context 'when next page is present' do
it 'returns true' do
expect(subject.has_next_page?).to eq(true)
end
end
context 'when next page is not present' do
let(:has_next_page) { false }
it 'returns false' do
expect(subject.has_next_page?).to eq(false)
end
end
end
describe '#next_page' do
it 'returns next page cursor information' do
expect(subject.next_page).to eq(cursor)
end
end
describe '#each' do
context 'when block is present' do
it 'yields each data item' do
expect { |b| subject.each(&b) }.to yield_control
end
end
context 'when block is not present' do
it 'returns enumerator' do
expect(subject.each).to be_instance_of(Enumerator)
end
end
end
end
......@@ -53,18 +53,26 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
it 'runs pipeline extractor, transformer, loader' do
entries = [{ foo: :bar }]
extracted_data = BulkImports::Pipeline::ExtractedData.new(data: { foo: :bar })
expect_next_instance_of(BulkImports::Extractor) do |extractor|
expect(extractor).to receive(:extract).with(context).and_return(entries)
expect(extractor)
.to receive(:extract)
.with(context)
.and_return(extracted_data)
end
expect_next_instance_of(BulkImports::Transformer) do |transformer|
expect(transformer).to receive(:transform).with(context, entries.first).and_return(entries.first)
expect(transformer)
.to receive(:transform)
.with(context, extracted_data.data.first)
.and_return(extracted_data.data.first)
end
expect_next_instance_of(BulkImports::Loader) do |loader|
expect(loader).to receive(:load).with(context, entries.first)
expect(loader)
.to receive(:load)
.with(context, extracted_data.data.first)
end
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
......
......@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Diff::CharDiff do
let(:old_string) { "Helo \n Worlld" }
let(:new_string) { "Hello \n World" }
subject { described_class.new(old_string, new_string) }
subject(:diff) { described_class.new(old_string, new_string) }
describe '#generate_diff' do
context 'when old string is nil' do
......@@ -39,6 +39,28 @@ RSpec.describe Gitlab::Diff::CharDiff do
end
end
describe '#changed_ranges' do
subject { diff.changed_ranges }
context 'when old string is nil' do
let(:old_string) { nil }
it 'returns lists of changes' do
old_diffs, new_diffs = subject
expect(old_diffs).to eq([])
expect(new_diffs).to eq([0..12])
end
end
it 'returns ranges of changes' do
old_diffs, new_diffs = subject
expect(old_diffs).to eq([11..11])
expect(new_diffs).to eq([3..3])
end
end
describe '#to_html' do
it 'returns an HTML representation of the diff' do
subject.generate_diff
......
......@@ -233,4 +233,22 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
cache.write_if_empty
end
end
describe '#key' do
subject { cache.key }
it 'returns the next version of the cache' do
is_expected.to start_with("highlighted-diff-files:#{cache.diffable.cache_key}:2")
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(improved_merge_diff_highlighting: false)
end
it 'returns the original version of the cache' do
is_expected.to start_with("highlighted-diff-files:#{cache.diffable.cache_key}:1")
end
end
end
end
......@@ -37,6 +37,33 @@ RSpec.describe Gitlab::Diff::InlineDiff do
it 'can handle unchanged empty lines' do
expect { described_class.for_lines(['- bar', '+ baz', '']) }.not_to raise_error
end
context 'when lines have multiple changes' do
let(:diff) do
<<~EOF
- Hello, how are you?
+ Hi, how are you doing?
EOF
end
let(:subject) { described_class.for_lines(diff.lines) }
it 'finds all inline diffs' do
expect(subject[0]).to eq([3..6])
expect(subject[1]).to eq([3..3, 17..22])
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(improved_merge_diff_highlighting: false)
end
it 'finds all inline diffs' do
expect(subject[0]).to eq([3..19])
expect(subject[1]).to eq([3..22])
end
end
end
end
describe "#inline_diffs" do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment