Commit 5da91562 authored by Adam Hegyi's avatar Adam Hegyi

Remove old Value Stream Analytics backend

This change removes the old version of the value stream analytics
(cycle analytics) backend.
parent 71c1db7a
......@@ -7,7 +7,6 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
include Analytics::UniqueVisitsHelper
include GracefulTimeoutHandling
before_action :whitelist_query_limiting, only: [:show]
before_action :authorize_read_cycle_analytics!
track_unique_visits :show, target_id: 'p_analytics_valuestream'
......@@ -38,8 +37,4 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
permissions: @cycle_analytics.permissions(user: current_user)
}
end
def whitelist_query_limiting
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42671')
end
end
# frozen_string_literal: true
module CycleAnalytics
module LevelBase
STAGES = %i[issue plan code test review staging].freeze
# This is a temporary adapter class which makes the new value stream (cycle analytics)
# backend compatible with the old implementation.
class StageAdapter
def initialize(stage, options)
@stage = stage
@options = options
end
# rubocop: disable CodeReuse/Presenter
def as_json(serializer: AnalyticsStageSerializer)
presenter = Analytics::CycleAnalytics::StagePresenter.new(stage)
serializer.new.represent(OpenStruct.new(
title: presenter.title,
description: presenter.description,
legend: presenter.legend,
name: stage.name,
project_median: median,
group_median: median
))
end
# rubocop: enable CodeReuse/Presenter
def events
data_collector.records_fetcher.serialized_records
end
def median
data_collector.median.seconds
end
alias_method :project_median, :median
alias_method :group_median, :median
private
attr_reader :stage, :options
def data_collector
@data_collector ||= Gitlab::Analytics::CycleAnalytics::DataCollector.new(stage: stage, params: options)
end
end
def all_medians_by_stage
STAGES.each_with_object({}) do |stage_name, medians_per_stage|
medians_per_stage[stage_name] = self[stage_name].median
end
end
def stats
@stats ||= STAGES.map do |stage_name|
self[stage_name].as_json
end
end
def [](stage_name)
if Feature.enabled?(:new_project_level_vsa_backend, resource_parent, default_enabled: true)
StageAdapter.new(build_stage(stage_name), options)
else
Gitlab::CycleAnalytics::Stage[stage_name].new(options: options)
end
end
def stage_params_by_name(name)
Gitlab::Analytics::CycleAnalytics::DefaultStages.find_by_name!(name)
end
end
end
......@@ -2,7 +2,6 @@
module CycleAnalytics
class ProjectLevel
include LevelBase
attr_reader :project, :options
def initialize(project, options:)
......@@ -21,13 +20,29 @@ module CycleAnalytics
Gitlab::CycleAnalytics::Permissions.get(user: user, project: project)
end
def stats
@stats ||= default_stage_names.map do |stage_name|
self[stage_name].as_json
end
end
def [](stage_name)
CycleAnalytics::ProjectLevelStageAdapter.new(build_stage(stage_name), options)
end
private
def build_stage(stage_name)
stage_params = stage_params_by_name(stage_name).merge(project: project)
Analytics::CycleAnalytics::ProjectStage.new(stage_params)
end
def resource_parent
project
def stage_params_by_name(name)
Gitlab::Analytics::CycleAnalytics::DefaultStages.find_by_name!(name)
end
def default_stage_names
Gitlab::Analytics::CycleAnalytics::DefaultStages.symbolized_stage_names
end
end
end
# frozen_string_literal: true
# This adapter class makes the new value stream (cycle analytics) backend
# compatible with the old value stream controller actions.
module CycleAnalytics
class ProjectLevelStageAdapter
def initialize(stage, options)
@stage = stage
@options = options
end
# rubocop: disable CodeReuse/Presenter
def as_json(serializer: AnalyticsStageSerializer)
presenter = Analytics::CycleAnalytics::StagePresenter.new(stage)
serializer.new.represent(OpenStruct.new(
title: presenter.title,
description: presenter.description,
legend: presenter.legend,
name: stage.name,
project_median: median
))
end
# rubocop: enable CodeReuse/Presenter
def events
data_collector.records_fetcher.serialized_records
end
def median
data_collector.median.seconds
end
alias_method :project_median, :median
private
attr_reader :stage, :options
def data_collector
@data_collector ||= Gitlab::Analytics::CycleAnalytics::DataCollector.new(stage: stage, params: options)
end
end
end
# frozen_string_literal: true
class GroupAnalyticsStageEntity < Grape::Entity
include EntityDateHelper
expose :title
expose :name
expose :legend
expose :description
expose :group_median, as: :value do |stage|
# group_median returns a BatchLoader instance which we first have to unwrap by using to_f
# we use to_f to make sure results below 1 are presented to the end-user
stage.group_median.to_f.nonzero? ? distance_of_time_in_words(stage.group_median) : nil
end
end
# frozen_string_literal: true
class GroupAnalyticsStageSerializer < BaseSerializer
entity GroupAnalyticsStageEntity
end
---
name: new_project_level_vsa_backend
introduced_by_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/282435
milestone: '13.7'
type: development
group: group::optimize
default_enabled: true
......@@ -3,8 +3,6 @@
module Analytics
module CycleAnalytics
class GroupLevel
include ::CycleAnalytics::LevelBase
attr_reader :options, :group
def initialize(group:, options:)
......@@ -25,27 +23,6 @@ module Analytics
.new(group, options: options)
.data
end
def permissions(*)
STAGES.each_with_object({}) do |stage, obj|
obj[stage] = true
end
end
def stats
@stats ||= STAGES.map do |stage_name|
self[stage_name].as_json(serializer: GroupAnalyticsStageSerializer)
end
end
def build_stage(stage_name)
stage_params = stage_params_by_name(stage_name).merge(group: group)
Analytics::CycleAnalytics::GroupStage.new(stage_params)
end
def resource_parent
group
end
end
end
end
......@@ -24,12 +24,6 @@ RSpec.describe Analytics::CycleAnalytics::GroupLevel do
group.add_owner(user)
end
describe '#permissions' do
it 'returns true for all stages' do
expect(subject.permissions.values.uniq).to eq([true])
end
end
describe '#summary' do
before do
create_cycle(user, project, issue, mr, milestone, pipeline)
......
......@@ -7,13 +7,11 @@ RSpec.describe Analytics::CycleAnalytics::StagePresenter do
let(:default_stage) { Analytics::CycleAnalytics::ProjectStage.new(default_stage_params) }
let(:custom_stage) { Analytics::CycleAnalytics::ProjectStage.new(name: 'Hello') }
let(:old_issue_stage_implementation) { Gitlab::CycleAnalytics::IssueStage.new(options: {}) }
describe '#title' do
it 'returns the pre-defined title for the default stage' do
decorator = described_class.new(default_stage)
expect(decorator.title).to eq(old_issue_stage_implementation.title)
expect(decorator.title).to eq(s_('CycleAnalyticsStage|Issue'))
end
it 'returns the name attribute for a custom stage' do
......@@ -27,7 +25,7 @@ RSpec.describe Analytics::CycleAnalytics::StagePresenter do
it 'returns the pre-defined description for the default stage' do
decorator = described_class.new(default_stage)
expect(decorator.description).to eq(old_issue_stage_implementation.description)
expect(decorator.description).to eq(_('Time before an issue gets scheduled'))
end
it 'returns empty string when custom stage is given' do
......
......@@ -30,6 +30,10 @@ module Gitlab
all.map { |stage| stage[:name] }
end
def self.symbolized_stage_names
names.map(&:to_sym)
end
def self.params_for_issue_stage
{
name: 'issue',
......
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class BaseEventFetcher
include BaseQuery
include GroupProjectsProvider
attr_reader :projections, :query, :stage, :options
MAX_EVENTS = 50
def initialize(stage:, options:)
@stage = stage
@options = options
end
def fetch
update_author!
event_result.map do |event|
serialize(event) if has_permission?(event['id'])
end.compact
end
def order
@order || default_order
end
private
def update_author!
return unless event_result.any? && event_result.first['author_id']
Updater.update!(event_result, from: 'author_id', to: 'author', klass: User)
end
def event_result
@event_result ||= ActiveRecord::Base.connection.exec_query(events_query.to_sql).to_a
end
def events_query
diff_fn = subtract_datetimes_diff(base_query, options[:start_time_attrs], options[:end_time_attrs])
base_query.project(extract_diff_epoch(diff_fn).as('total_time'), *projections).order(order.desc).take(MAX_EVENTS)
end
def default_order
[options[:start_time_attrs]].flatten.first
end
def serialize(_event)
raise NotImplementedError.new("Expected #{self.name} to implement serialize(event)")
end
def has_permission?(id)
allowed_ids.nil? || allowed_ids.include?(id.to_i)
end
def allowed_ids
@allowed_ids ||= allowed_ids_finder_class
.new(options[:current_user], allowed_ids_source)
.execute.where(id: event_result_ids).pluck(:id)
end
def event_result_ids
event_result.map { |event| event['id'] }
end
def allowed_ids_source
group ? { group_id: group.id, include_subgroups: true } : { project_id: project.id }
end
def serialization_context
{}
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module BaseQuery
include MetricsTables
include Gitlab::Database::Median
include Gitlab::Database::DateTime
private
def base_query
@base_query ||= stage_query(projects.map(&:id))
end
def stage_query(project_ids)
query = mr_closing_issues_table.join(issue_table).on(issue_table[:id].eq(mr_closing_issues_table[:issue_id]))
.join(issue_metrics_table).on(issue_table[:id].eq(issue_metrics_table[:issue_id]))
.join(projects_table).on(issue_table[:project_id].eq(projects_table[:id]))
.join(routes_table).on(projects_table[:namespace_id].eq(routes_table[:source_id]))
.project(issue_table[:project_id].as("project_id"))
.project(projects_table[:path].as("project_path"))
.project(routes_table[:path].as("namespace_path"))
query = limit_query(query, project_ids)
query = limit_query_by_date_range(query)
# Load merge_requests
query = load_merge_requests(query)
query
end
def limit_query(query, project_ids)
query.where(issue_table[:project_id].in(project_ids))
.where(routes_table[:source_type].eq('Namespace'))
end
def limit_query_by_date_range(query)
query = query.where(issue_table[:created_at].gteq(options[:from]))
query = query.where(issue_table[:created_at].lteq(options[:to])) if options[:to]
query
end
def load_merge_requests(query)
query.join(mr_table, Arel::Nodes::OuterJoin)
.on(mr_table[:id].eq(mr_closing_issues_table[:merge_request_id]))
.join(mr_metrics_table)
.on(mr_table[:id].eq(mr_metrics_table[:merge_request_id]))
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class BaseStage
include BaseQuery
include GroupProjectsProvider
attr_reader :options
def initialize(options:)
@options = options
end
def events
event_fetcher.fetch
end
def as_json(serializer: AnalyticsStageSerializer)
serializer.new.represent(self)
end
def title
raise NotImplementedError.new("Expected #{self.name} to implement title")
end
def project_median
return if project.nil?
BatchLoader.for(project.id).batch(key: name) do |project_ids, loader|
if project_ids.one?
loader.call(project.id, median_query(project_ids))
else
begin
median_datetimes(cte_table, interval_query(project_ids), name, :project_id)&.each do |project_id, median|
loader.call(project_id, median)
end
rescue NotSupportedError
{}
end
end
end
end
def group_median
median_query(projects.map(&:id))
end
def median_query(project_ids)
# Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time).
# Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time).
# We compute the (end_time - start_time) interval, and give it an alias based on the current
# value stream analytics stage.
median_datetime(cte_table, interval_query(project_ids), name)
end
def name
raise NotImplementedError.new("Expected #{self.name} to implement name")
end
def cte_table
Arel::Table.new("cte_table_for_#{name}")
end
def interval_query(project_ids)
Arel::Nodes::As.new(cte_table,
subtract_datetimes(stage_query(project_ids), start_time_attrs, end_time_attrs, name.to_s))
end
private
def event_fetcher
@event_fetcher ||= Gitlab::CycleAnalytics::EventFetcher[name].new(stage: name,
options: event_options)
end
def event_options
options.merge(start_time_attrs: start_time_attrs, end_time_attrs: end_time_attrs)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module BuildsEventHelper
def initialize(...)
@projections = [build_table[:id]]
@order = build_table[:created_at]
super(...)
end
def fetch
Updater.update!(event_result, from: 'id', to: 'build', klass: ::Ci::Build)
super
end
def events_query
base_query.join(build_table).on(mr_metrics_table[:pipeline_id].eq(build_table[:commit_id]))
super
end
private
def allowed_ids
nil
end
def serialize(event)
AnalyticsBuildSerializer.new.represent(event['build'])
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class CodeEventFetcher < BaseEventFetcher
include CodeHelper
def initialize(...)
@projections = [mr_table[:title],
mr_table[:iid],
mr_table[:id],
mr_table[:created_at],
mr_table[:state_id],
mr_table[:author_id]]
@order = mr_table[:created_at]
super(...)
end
private
def serialize(event)
AnalyticsMergeRequestSerializer.new(serialization_context).represent(event)
end
def allowed_ids_finder_class
MergeRequestsFinder
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module CodeHelper
def stage_query(project_ids)
super(project_ids).where(mr_table[:created_at].gteq(issue_metrics_table[:first_mentioned_in_commit_at]))
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class CodeStage < BaseStage
include CodeHelper
def start_time_attrs
@start_time_attrs ||= issue_metrics_table[:first_mentioned_in_commit_at]
end
def end_time_attrs
@end_time_attrs ||= mr_table[:created_at]
end
def name
:code
end
def title
s_('CycleAnalyticsStage|Code')
end
def legend
_("Related Merge Requests")
end
def description
_("Time until first merge request")
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module EventFetcher
def self.[](stage_name)
CycleAnalytics.const_get("#{stage_name.to_s.camelize}EventFetcher", false)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class IssueEventFetcher < BaseEventFetcher
include IssueHelper
def initialize(...)
@projections = [issue_table[:title],
issue_table[:iid],
issue_table[:id],
issue_table[:created_at],
issue_table[:author_id]]
super(...)
end
private
def serialize(event)
AnalyticsIssueSerializer.new(serialization_context).represent(event)
end
def allowed_ids_finder_class
IssuesFinder
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module IssueHelper
def stage_query(project_ids)
query = issue_table.join(issue_metrics_table).on(issue_table[:id].eq(issue_metrics_table[:issue_id]))
.join(projects_table).on(issue_table[:project_id].eq(projects_table[:id]))
.join(routes_table).on(projects_table[:namespace_id].eq(routes_table[:source_id]))
.project(issue_table[:project_id].as("project_id"))
.project(projects_table[:path].as("project_path"))
.project(routes_table[:path].as("namespace_path"))
query = limit_query(query, project_ids)
limit_query_by_date_range(query)
end
def limit_query(query, project_ids)
query.where(issue_table[:project_id].in(project_ids))
.where(routes_table[:source_type].eq('Namespace'))
.where(issue_metrics_table[:first_added_to_board_at].not_eq(nil).or(issue_metrics_table[:first_associated_with_milestone_at].not_eq(nil)))
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class IssueStage < BaseStage
include IssueHelper
def start_time_attrs
@start_time_attrs ||= issue_table[:created_at]
end
def end_time_attrs
@end_time_attrs ||= [issue_metrics_table[:first_associated_with_milestone_at],
issue_metrics_table[:first_added_to_board_at]]
end
def name
:issue
end
def title
s_('CycleAnalyticsStage|Issue')
end
def legend
_("Related Issues")
end
def description
_("Time before an issue gets scheduled")
end
end
end
end
......@@ -23,7 +23,7 @@ module Gitlab
end
def get
::CycleAnalytics::LevelBase::STAGES.each do |stage|
Gitlab::Analytics::CycleAnalytics::DefaultStages.symbolized_stage_names.each do |stage|
@stage_permission_hash[stage] = authorized_stage?(stage)
end
......
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class PlanEventFetcher < BaseEventFetcher
include PlanHelper
def initialize(...)
@projections = [issue_table[:title],
issue_table[:iid],
issue_table[:id],
issue_table[:created_at],
issue_table[:author_id]]
super(...)
end
private
def serialize(event)
AnalyticsIssueSerializer.new(serialization_context).represent(event)
end
def allowed_ids_finder_class
IssuesFinder
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module PlanHelper
def stage_query(project_ids)
query = issue_table.join(issue_metrics_table).on(issue_table[:id].eq(issue_metrics_table[:issue_id]))
.join(projects_table).on(issue_table[:project_id].eq(projects_table[:id]))
.join(routes_table).on(projects_table[:namespace_id].eq(routes_table[:source_id]))
.project(issue_table[:project_id].as("project_id"))
.project(projects_table[:path].as("project_path"))
.project(routes_table[:path].as("namespace_path"))
.where(issue_table[:project_id].in(project_ids))
.where(routes_table[:source_type].eq('Namespace'))
query = limit_query(query)
limit_query_by_date_range(query)
end
def limit_query(query)
query.where(issue_metrics_table[:first_added_to_board_at].not_eq(nil).or(issue_metrics_table[:first_associated_with_milestone_at].not_eq(nil)))
.where(issue_metrics_table[:first_mentioned_in_commit_at].not_eq(nil))
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class PlanStage < BaseStage
include PlanHelper
def start_time_attrs
@start_time_attrs ||= [issue_metrics_table[:first_associated_with_milestone_at],
issue_metrics_table[:first_added_to_board_at]]
end
def end_time_attrs
@end_time_attrs ||= issue_metrics_table[:first_mentioned_in_commit_at]
end
def name
:plan
end
def title
s_('CycleAnalyticsStage|Plan')
end
def legend
_("Related Issues")
end
def description
_("Time before an issue starts implementation")
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class ProductionEventFetcher < BaseEventFetcher
include ProductionHelper
def initialize(...)
@projections = [issue_table[:title],
issue_table[:iid],
issue_table[:id],
issue_table[:created_at],
issue_table[:author_id],
routes_table[:path]]
super(...)
end
private
def serialize(event)
AnalyticsIssueSerializer.new(serialization_context).represent(event)
end
def allowed_ids_finder_class
IssuesFinder
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module ProductionHelper
def stage_query(project_ids)
super(project_ids)
.where(mr_metrics_table[:first_deployed_to_production_at]
.gteq(options[:from]))
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class ReviewEventFetcher < BaseEventFetcher
include ReviewHelper
def initialize(...)
@projections = [mr_table[:title],
mr_table[:iid],
mr_table[:id],
mr_table[:created_at],
mr_table[:state_id],
mr_table[:author_id]]
super(...)
end
private
def serialize(event)
AnalyticsMergeRequestSerializer.new(serialization_context).represent(event)
end
def allowed_ids_finder_class
MergeRequestsFinder
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module ReviewHelper
def stage_query(project_ids)
super(project_ids).where(mr_metrics_table[:merged_at].not_eq(nil))
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class ReviewStage < BaseStage
include ReviewHelper
def start_time_attrs
@start_time_attrs ||= mr_table[:created_at]
end
def end_time_attrs
@end_time_attrs ||= mr_metrics_table[:merged_at]
end
def name
:review
end
def title
s_('CycleAnalyticsStage|Review')
end
def legend
_("Related Merged Requests")
end
def description
_("Time between merge request creation and merge/close")
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module Stage
def self.[](stage_name)
CycleAnalytics.const_get("#{stage_name.to_s.camelize}Stage", false)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class StagingEventFetcher < BaseEventFetcher
include ProductionHelper
include BuildsEventHelper
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class StagingStage < BaseStage
include ProductionHelper
def start_time_attrs
@start_time_attrs ||= mr_metrics_table[:merged_at]
end
def end_time_attrs
@end_time_attrs ||= mr_metrics_table[:first_deployed_to_production_at]
end
def name
:staging
end
def title
s_('CycleAnalyticsStage|Staging')
end
def legend
_("Related Deployed Jobs")
end
def description
_("From merge request merge until deploy to production")
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class TestEventFetcher < BaseEventFetcher
include TestHelper
include BuildsEventHelper
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
module TestHelper
def stage_query(project_ids)
if branch
super(project_ids).where(build_table[:ref].eq(branch))
else
super(project_ids)
end
end
private
def branch
@branch ||= options[:branch]
end
end
end
end
# frozen_string_literal: true
module Gitlab
module CycleAnalytics
class TestStage < BaseStage
include TestHelper
def start_time_attrs
@start_time_attrs ||= mr_metrics_table[:latest_build_started_at]
end
def end_time_attrs
@end_time_attrs ||= mr_metrics_table[:latest_build_finished_at]
end
def name
:test
end
def title
s_('CycleAnalyticsStage|Test')
end
def legend
_("Related Jobs")
end
def description
_("Total test time for all commits/merges")
end
end
end
end
# frozen_string_literal: true
# https://www.periscopedata.com/blog/medians-in-sql.html
module Gitlab
module Database
module Median
NotSupportedError = Class.new(StandardError)
def median_datetime(arel_table, query_so_far, column_sym)
extract_median(execute_queries(arel_table, query_so_far, column_sym)).presence
end
def median_datetimes(arel_table, query_so_far, column_sym, partition_column)
extract_medians(execute_queries(arel_table, query_so_far, column_sym, partition_column)).presence
end
def extract_median(results)
result = results.compact.first
result = result.first.presence
result['median']&.to_f if result
end
def extract_medians(results)
median_values = results.compact.first.values
median_values.each_with_object({}) do |(id, median), hash|
hash[id.to_i] = median&.to_f
end
end
def pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column = nil)
# Create a CTE with the column we're operating on, row number (after sorting by the column
# we're operating on), and count of the table we're operating on (duplicated across) all rows
# of the CTE. For example, if we're looking to find the median of the `projects.star_count`
# column, the CTE might look like this:
#
# star_count | row_id | ct
# ------------+--------+----
# 5 | 1 | 3
# 9 | 2 | 3
# 15 | 3 | 3
#
# If a partition column is used we will do the same operation but for separate partitions,
# when that happens the CTE might look like this:
#
# project_id | star_count | row_id | ct
# ------------+------------+--------+----
# 1 | 5 | 1 | 2
# 1 | 9 | 2 | 2
# 2 | 10 | 1 | 3
# 2 | 15 | 2 | 3
# 2 | 20 | 3 | 3
cte_table = Arel::Table.new("ordered_records")
cte = Arel::Nodes::As.new(
cte_table,
arel_table.project(*rank_rows(arel_table, column_sym, partition_column)).
# Disallow negative values
where(arel_table[column_sym].gteq(zero_interval)))
# From the CTE, select either the middle row or the middle two rows (this is accomplished
# by 'where cte.row_id between cte.ct / 2.0 AND cte.ct / 2.0 + 1'). Find the average of the
# selected rows, and this is the median value.
result =
cte_table
.project(*median_projections(cte_table, column_sym, partition_column))
.where(
Arel::Nodes::Between.new(
cte_table[:row_id],
Arel::Nodes::And.new(
[(cte_table[:ct] / Arel.sql('2.0')),
(cte_table[:ct] / Arel.sql('2.0') + 1)]
)
)
)
.with(query_so_far, cte)
result.group(cte_table[partition_column]).order(cte_table[partition_column]) if partition_column
result.to_sql
end
private
def execute_queries(arel_table, query_so_far, column_sym, partition_column = nil)
queries = pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column)
Array.wrap(queries).map { |query| ActiveRecord::Base.connection.execute(query) }
end
def average(args, as)
Arel::Nodes::NamedFunction.new("AVG", args, as)
end
def rank_rows(arel_table, column_sym, partition_column)
column_row = arel_table[column_sym].as(column_sym.to_s)
if partition_column
partition_row = arel_table[partition_column]
row_id =
Arel::Nodes::Over.new(
Arel::Nodes::NamedFunction.new('rank', []),
Arel::Nodes::Window.new.partition(arel_table[partition_column])
.order(arel_table[column_sym])
).as('row_id')
count = arel_table.from.from(arel_table.alias)
.project('COUNT(*)')
.where(arel_table[partition_column].eq(arel_table.alias[partition_column]))
.as('ct')
[partition_row, column_row, row_id, count]
else
row_id =
Arel::Nodes::Over.new(
Arel::Nodes::NamedFunction.new('row_number', []),
Arel::Nodes::Window.new.order(arel_table[column_sym])
).as('row_id')
count = arel_table.where(arel_table[column_sym].gteq(zero_interval)).project("COUNT(1)").as('ct')
[column_row, row_id, count]
end
end
def median_projections(table, column_sym, partition_column)
projections = []
projections << table[partition_column] if partition_column
projections << average([extract_epoch(table[column_sym])], "median")
projections
end
def extract_epoch(arel_attribute)
Arel.sql(%Q{EXTRACT(EPOCH FROM "#{arel_attribute.relation.name}"."#{arel_attribute.name}")})
end
def extract_diff_epoch(diff)
Arel.sql(%Q{EXTRACT(EPOCH FROM (#{diff.to_sql}))})
end
# Need to cast '0' to an INTERVAL before we can check if the interval is positive
def zero_interval
Arel::Nodes::NamedFunction.new("CAST", [Arel.sql("'0' AS INTERVAL")])
end
end
end
end
......@@ -23190,21 +23190,9 @@ msgstr ""
msgid "Rejected (closed)"
msgstr ""
msgid "Related Deployed Jobs"
msgstr ""
msgid "Related Issues"
msgstr ""
msgid "Related Jobs"
msgstr ""
msgid "Related Merge Requests"
msgstr ""
msgid "Related Merged Requests"
msgstr ""
msgid "Related issues"
msgstr ""
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::BaseEventFetcher do
let(:max_events) { 2 }
let(:project) { create(:project, :repository) }
let(:user) { project.owner }
let(:start_time_attrs) { Issue.arel_table[:created_at] }
let(:end_time_attrs) { [Issue::Metrics.arel_table[:first_associated_with_milestone_at]] }
let(:options) do
{ start_time_attrs: start_time_attrs,
end_time_attrs: end_time_attrs,
from: 30.days.ago,
project: project }
end
subject do
described_class.new(stage: :issue,
options: options).fetch
end
before do
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return(Issue.all)
allow_any_instance_of(described_class).to receive(:serialize) do |event|
event
end
allow_any_instance_of(described_class)
.to receive(:allowed_ids).and_return(nil)
stub_const('Gitlab::CycleAnalytics::BaseEventFetcher::MAX_EVENTS', max_events)
setup_events(count: 3)
end
it 'limits the rows to the max number' do
expect(subject.count).to eq(max_events)
end
def setup_events(count:)
count.times do
issue = create(:issue, project: project, created_at: 2.days.ago)
milestone = create(:milestone, project: project)
issue.update(milestone: milestone)
create_merge_request_closing_issue(user, project, issue)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::CodeEventFetcher do
let(:stage_name) { :code }
it_behaves_like 'default query config' do
it 'has a default order' do
expect(event.order).not_to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::CodeStage do
let(:stage_name) { :code }
let(:project) { create(:project) }
let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
let(:issue_3) { create(:issue, project: project, created_at: 60.minutes.ago) }
let(:mr_1) { create(:merge_request, source_project: project, created_at: 15.minutes.ago) }
let(:mr_2) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'A') }
let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
let(:stage) { described_class.new(options: stage_options) }
before do
issue_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 45.minutes.ago)
issue_2.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
issue_3.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B')
create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
end
it_behaves_like 'base stage'
context 'when using the new query backend' do
include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
let(:expected_record_count) { 2 }
let(:expected_ordered_attribute_values) { [mr_2.title, mr_1.title] }
end
end
describe '#project_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.project_median).to eq(ISSUES_MEDIAN)
end
include_examples 'calculate #median with date range'
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that closes issues' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:title] }).to contain_exactly(mr_1.title, mr_2.title)
end
end
context 'when group is given' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project_2) { create(:project, group: group) }
let(:project_3) { create(:project, group: group) }
let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
let(:mr_2_1) { create(:merge_request, source_project: project_2, created_at: 15.minutes.ago) }
let(:mr_2_2) { create(:merge_request, source_project: project_3, created_at: 10.minutes.ago, source_branch: 'A') }
let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
before do
group.add_owner(user)
issue_2_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 45.minutes.ago)
issue_2_2.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
issue_2_3.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
create(:merge_requests_closing_issues, merge_request: mr_2_1, issue: issue_2_1)
create(:merge_requests_closing_issues, merge_request: mr_2_2, issue: issue_2_2)
end
describe '#group_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.group_median).to eq(ISSUES_MEDIAN)
end
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:title] }).to contain_exactly(mr_2_1.title, mr_2_2.title)
end
end
context 'when subgroup is given' do
let(:subgroup) { create(:group, parent: group) }
let(:project_4) { create(:project, group: subgroup) }
let(:project_5) { create(:project, group: subgroup) }
let(:issue_3_1) { create(:issue, project: project_4, created_at: 90.minutes.ago) }
let(:issue_3_2) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
let(:issue_3_3) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
let(:mr_3_1) { create(:merge_request, source_project: project_4, created_at: 15.minutes.ago) }
let(:mr_3_2) { create(:merge_request, source_project: project_5, created_at: 10.minutes.ago, source_branch: 'A') }
before do
issue_3_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 45.minutes.ago)
issue_3_2.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
issue_3_3.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
create(:merge_requests_closing_issues, merge_request: mr_3_1, issue: issue_3_1)
create(:merge_requests_closing_issues, merge_request: mr_3_2, issue: issue_3_2)
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(4)
expect(subject.map { |event| event[:title] }).to contain_exactly(mr_2_1.title, mr_2_2.title, mr_3_1.title, mr_3_2.title)
end
it 'exposes merge requests that close issues with full path for subgroup' do
expect(subject.count).to eq(4)
expect(subject.find { |event| event[:title] == mr_3_1.title }[:url]).to include("#{subgroup.full_path}")
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'value stream analytics events', :aggregate_failures do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
let(:from_date) { 10.days.ago }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let(:events) do
CycleAnalytics::ProjectLevel
.new(project, options: { from: from_date, current_user: user })[stage]
.events
end
let(:event) { events.first }
before do
setup(context)
end
describe '#issue_events' do
let(:stage) { :issue }
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq(context.title)
expect(event[:url]).not_to be_nil
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(context.author.name)
end
end
describe '#plan_events' do
let(:stage) { :plan }
before do
create_commit_referencing_issue(context)
# Adding extra duration because the new VSA backend filters out 0 durations between these columns
context.metrics.update!(first_mentioned_in_commit_at: context.metrics.first_associated_with_milestone_at + 1.day)
end
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq(context.title)
expect(event[:url]).not_to be_nil
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(context.author.name)
end
end
describe '#code_events' do
let(:stage) { :code }
let!(:merge_request) { MergeRequest.first }
before do
create_commit_referencing_issue(context)
end
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq('Awesome merge_request')
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
describe '#test_events', :sidekiq_might_not_need_inline do
let(:stage) { :test }
let(:merge_request) { MergeRequest.first }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let!(:pipeline) do
create(:ci_pipeline,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
project: project,
head_pipeline_of: merge_request)
end
before do
create(:ci_build, :success, pipeline: pipeline, author: user)
create(:ci_build, :success, pipeline: pipeline, author: user)
pipeline.run!
pipeline.succeed!
merge_merge_requests_closing_issue(user, project, context)
end
it 'has correct attributes' do
expect(event[:name]).not_to be_nil
expect(event[:id]).not_to be_nil
expect(event[:url]).not_to be_nil
expect(event[:branch]).not_to be_nil
expect(event[:branch][:url]).not_to be_nil
expect(event[:short_sha]).not_to be_nil
expect(event[:commit_url]).not_to be_nil
expect(event[:date]).not_to be_nil
expect(event[:total_time]).not_to be_empty
end
end
describe '#review_events' do
let(:stage) { :review }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
before do
merge_merge_requests_closing_issue(user, project, context)
end
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq('Awesome merge_request')
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:url]).not_to be_nil
expect(event[:state]).not_to be_nil
expect(event[:created_at]).not_to be_nil
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
describe '#staging_events', :sidekiq_might_not_need_inline do
let(:stage) { :staging }
let(:merge_request) { MergeRequest.first }
let!(:pipeline) do
create(:ci_pipeline,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
project: project,
head_pipeline_of: merge_request)
end
before do
create(:ci_build, :success, pipeline: pipeline, author: user)
create(:ci_build, :success, pipeline: pipeline, author: user)
pipeline.run!
pipeline.succeed!
merge_merge_requests_closing_issue(user, project, context)
deploy_master(user, project)
end
it 'has correct attributes' do
expect(event[:name]).not_to be_nil
expect(event[:id]).not_to be_nil
expect(event[:url]).not_to be_nil
expect(event[:branch]).not_to be_nil
expect(event[:branch][:url]).not_to be_nil
expect(event[:short_sha]).not_to be_nil
expect(event[:commit_url]).not_to be_nil
expect(event[:date]).not_to be_nil
expect(event[:total_time]).not_to be_empty
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
def setup(context)
milestone = create(:milestone, project: project)
context.update!(milestone: milestone)
mr = create_merge_request_closing_issue(user, project, context, commit_message: "References #{context.to_reference}")
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::IssueEventFetcher do
let(:stage_name) { :issue }
it_behaves_like 'default query config'
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::IssueStage do
let(:stage_name) { :issue }
let(:project) { create(:project) }
let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
let(:issue_3) { create(:issue, project: project, created_at: 30.minutes.ago) }
let!(:issue_without_milestone) { create(:issue, project: project, created_at: 1.minute.ago) }
let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
let(:stage) { described_class.new(options: stage_options) }
before do
issue_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago )
issue_2.metrics.update!(first_added_to_board_at: 30.minutes.ago)
issue_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
end
it_behaves_like 'base stage'
context 'when using the new query backend' do
include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
let(:expected_record_count) { 3 }
let(:expected_ordered_attribute_values) { [issue_3.title, issue_2.title, issue_1.title] }
end
end
describe '#median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.project_median).to eq(ISSUES_MEDIAN)
end
include_examples 'calculate #median with date range'
end
describe '#events' do
it 'exposes issues with metrics' do
result = stage.events
expect(result.count).to eq(3)
expect(result.map { |event| event[:title] }).to contain_exactly(issue_1.title, issue_2.title, issue_3.title)
end
end
context 'when group is given' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project_2) { create(:project, group: group) }
let(:project_3) { create(:project, group: group) }
let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
before do
group.add_owner(user)
issue_2_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago)
issue_2_2.metrics.update!(first_added_to_board_at: 30.minutes.ago)
end
describe '#group_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.group_median).to eq(ISSUES_MEDIAN)
end
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title)
end
end
context 'when only part of projects is chosen' do
let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group, projects: [project_2.id] }) }
describe '#group_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.group_median).to eq(ISSUES_MEDIAN)
end
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(1)
expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title)
end
end
end
context 'when subgroup is given' do
let(:subgroup) { create(:group, parent: group) }
let(:project_4) { create(:project, group: subgroup) }
let(:project_5) { create(:project, group: subgroup) }
let(:issue_3_1) { create(:issue, project: project_4, created_at: 90.minutes.ago) }
let(:issue_3_2) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
let(:issue_3_3) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
before do
issue_3_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago)
issue_3_2.metrics.update!(first_added_to_board_at: 30.minutes.ago)
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(4)
expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title, issue_3_1.title, issue_3_2.title)
end
it 'exposes merge requests that close issues with full path for subgroup' do
expect(subject.count).to eq(4)
expect(subject.find { |event| event[:title] == issue_3_1.title }[:url]).to include("#{subgroup.full_path}")
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::PlanEventFetcher do
let(:stage_name) { :plan }
it_behaves_like 'default query config' do
context 'no commits' do
it 'does not blow up if there are no commits' do
allow(event).to receive(:event_result).and_return([{}])
expect { event.fetch }.not_to raise_error
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::PlanStage do
let(:stage_name) { :plan }
let(:project) { create(:project) }
let!(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
let!(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
let!(:issue_3) { create(:issue, project: project, created_at: 30.minutes.ago) }
let!(:issue_without_milestone) { create(:issue, project: project, created_at: 1.minute.ago) }
let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
let(:stage) { described_class.new(options: stage_options) }
before do
issue_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 10.minutes.ago)
issue_2.metrics.update!(first_added_to_board_at: 30.minutes.ago, first_mentioned_in_commit_at: 20.minutes.ago)
issue_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
end
it_behaves_like 'base stage'
context 'when using the new query backend' do
include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
let(:expected_record_count) { 2 }
let(:expected_ordered_attribute_values) { [issue_1.title, issue_2.title] }
end
end
describe '#project_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.project_median).to eq(ISSUES_MEDIAN)
end
include_examples 'calculate #median with date range'
end
describe '#events' do
subject { stage.events }
it 'exposes issues with metrics' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:title] }).to contain_exactly(issue_1.title, issue_2.title)
end
end
context 'when group is given' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project_2) { create(:project, group: group) }
let(:project_3) { create(:project, group: group) }
let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
before do
group.add_owner(user)
issue_2_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 10.minutes.ago)
issue_2_2.metrics.update!(first_added_to_board_at: 30.minutes.ago, first_mentioned_in_commit_at: 20.minutes.ago)
issue_2_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
end
describe '#group_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.group_median).to eq(ISSUES_MEDIAN)
end
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title)
end
end
context 'when subgroup is given' do
let(:subgroup) { create(:group, parent: group) }
let(:project_4) { create(:project, group: subgroup) }
let(:project_5) { create(:project, group: subgroup) }
let(:issue_3_1) { create(:issue, project: project_4, created_at: 90.minutes.ago) }
let(:issue_3_2) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
let(:issue_3_3) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
before do
issue_3_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 10.minutes.ago)
issue_3_2.metrics.update!(first_added_to_board_at: 30.minutes.ago, first_mentioned_in_commit_at: 20.minutes.ago)
issue_3_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(4)
expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title, issue_3_1.title, issue_3_2.title)
end
it 'exposes merge requests that close issues with full path for subgroup' do
expect(subject.count).to eq(4)
expect(subject.find { |event| event[:title] == issue_3_1.title }[:url]).to include("#{subgroup.full_path}")
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::ProductionEventFetcher do
let(:stage_name) { :production }
it_behaves_like 'default query config'
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::ReviewEventFetcher do
let(:stage_name) { :review }
it_behaves_like 'default query config'
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::ReviewStage do
let(:stage_name) { :review }
let(:project) { create(:project) }
let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
let(:issue_3) { create(:issue, project: project, created_at: 60.minutes.ago) }
let(:mr_1) { create(:merge_request, :closed, source_project: project, created_at: 60.minutes.ago) }
let(:mr_2) { create(:merge_request, :closed, source_project: project, created_at: 40.minutes.ago, source_branch: 'A') }
let(:mr_3) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B') }
let!(:mr_4) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'C') }
let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: project.creator, project: project }) }
before do
mr_1.metrics.update!(merged_at: 30.minutes.ago)
mr_2.metrics.update!(merged_at: 10.minutes.ago)
create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_3)
end
it_behaves_like 'base stage'
describe '#project_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.project_median).to eq(ISSUES_MEDIAN)
end
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:title] }).to contain_exactly(mr_1.title, mr_2.title)
end
end
context 'when group is given' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project_2) { create(:project, group: group) }
let(:project_3) { create(:project, group: group) }
let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
let(:mr_2_1) { create(:merge_request, :closed, source_project: project_2, created_at: 60.minutes.ago) }
let(:mr_2_2) { create(:merge_request, :closed, source_project: project_3, created_at: 40.minutes.ago, source_branch: 'A') }
let(:mr_2_3) { create(:merge_request, source_project: project_2, created_at: 10.minutes.ago, source_branch: 'B') }
let!(:mr_2_4) { create(:merge_request, source_project: project_3, created_at: 10.minutes.ago, source_branch: 'C') }
let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
before do
group.add_owner(user)
mr_2_1.metrics.update!(merged_at: 30.minutes.ago)
mr_2_2.metrics.update!(merged_at: 10.minutes.ago)
create(:merge_requests_closing_issues, merge_request: mr_2_1, issue: issue_2_1)
create(:merge_requests_closing_issues, merge_request: mr_2_2, issue: issue_2_2)
create(:merge_requests_closing_issues, merge_request: mr_2_3, issue: issue_2_3)
end
describe '#group_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.group_median).to eq(ISSUES_MEDIAN)
end
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:title] }).to contain_exactly(mr_2_1.title, mr_2_2.title)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::StagingEventFetcher do
let(:stage_name) { :staging }
it_behaves_like 'default query config' do
it 'has a default order' do
expect(event.order).not_to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::StagingStage do
let(:stage_name) { :staging }
let(:project) { create(:project) }
let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
let(:issue_3) { create(:issue, project: project, created_at: 60.minutes.ago) }
let(:mr_1) { create(:merge_request, :closed, source_project: project, created_at: 60.minutes.ago) }
let(:mr_2) { create(:merge_request, :closed, source_project: project, created_at: 40.minutes.ago, source_branch: 'A') }
let(:mr_3) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B') }
let(:build_1) { create(:ci_build, project: project) }
let(:build_2) { create(:ci_build, project: project) }
let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
let(:stage) { described_class.new(options: stage_options) }
before do
mr_1.metrics.update!(merged_at: 80.minutes.ago, first_deployed_to_production_at: 50.minutes.ago, pipeline_id: build_1.commit_id)
mr_2.metrics.update!(merged_at: 60.minutes.ago, first_deployed_to_production_at: 30.minutes.ago, pipeline_id: build_2.commit_id)
mr_3.metrics.update!(merged_at: 10.minutes.ago, first_deployed_to_production_at: 3.days.ago, pipeline_id: create(:ci_build, project: project).commit_id)
create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_3)
end
it_behaves_like 'base stage'
describe '#project_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.project_median).to eq(ISSUES_MEDIAN)
end
it_behaves_like 'calculate #median with date range'
end
describe '#events' do
subject { stage.events }
it 'exposes builds connected to merge request' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:name] }).to contain_exactly(build_1.name, build_2.name)
end
end
context 'when group is given' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project_2) { create(:project, group: group) }
let(:project_3) { create(:project, group: group) }
let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
let(:mr_1) { create(:merge_request, :closed, source_project: project_2, created_at: 60.minutes.ago) }
let(:mr_2) { create(:merge_request, :closed, source_project: project_3, created_at: 40.minutes.ago, source_branch: 'A') }
let(:mr_3) { create(:merge_request, source_project: project_2, created_at: 10.minutes.ago, source_branch: 'B') }
let(:build_1) { create(:ci_build, project: project_2) }
let(:build_2) { create(:ci_build, project: project_3) }
let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
before do
group.add_owner(user)
mr_1.metrics.update!(merged_at: 80.minutes.ago, first_deployed_to_production_at: 50.minutes.ago, pipeline_id: build_1.commit_id)
mr_2.metrics.update!(merged_at: 60.minutes.ago, first_deployed_to_production_at: 30.minutes.ago, pipeline_id: build_2.commit_id)
mr_3.metrics.update!(merged_at: 10.minutes.ago, first_deployed_to_production_at: 3.days.ago, pipeline_id: create(:ci_build, project: project_2).commit_id)
create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_2_1)
create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2_2)
create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_2_3)
end
describe '#group_median' do
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.group_median).to eq(ISSUES_MEDIAN)
end
end
describe '#events' do
subject { stage.events }
it 'exposes merge requests that close issues' do
expect(subject.count).to eq(2)
expect(subject.map { |event| event[:name] }).to contain_exactly(build_1.name, build_2.name)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::TestEventFetcher do
let(:stage_name) { :test }
it_behaves_like 'default query config' do
it 'has a default order' do
expect(event.order).not_to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::TestStage do
let(:stage_name) { :test }
let(:project) { create(:project) }
let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
let(:stage) { described_class.new(options: stage_options) }
it_behaves_like 'base stage'
describe '#median' do
let(:mr_1) { create(:merge_request, :closed, source_project: project, created_at: 60.minutes.ago) }
let(:mr_2) { create(:merge_request, :closed, source_project: project, created_at: 40.minutes.ago, source_branch: 'A') }
let(:mr_3) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B') }
let(:mr_4) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'C') }
let(:mr_5) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'D') }
let(:ci_build1) { create(:ci_build, project: project) }
let(:ci_build2) { create(:ci_build, project: project) }
before do
issue_1 = create(:issue, project: project, created_at: 90.minutes.ago)
issue_2 = create(:issue, project: project, created_at: 60.minutes.ago)
issue_3 = create(:issue, project: project, created_at: 60.minutes.ago)
mr_1.metrics.update!(latest_build_started_at: 32.minutes.ago, latest_build_finished_at: 2.minutes.ago, pipeline_id: ci_build1.commit_id)
mr_2.metrics.update!(latest_build_started_at: 62.minutes.ago, latest_build_finished_at: 32.minutes.ago, pipeline_id: ci_build2.commit_id)
mr_3.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
mr_4.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
mr_5.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_3)
create(:merge_requests_closing_issues, merge_request: mr_4, issue: issue_3)
create(:merge_requests_closing_issues, merge_request: mr_5, issue: issue_3)
end
around do |example|
freeze_time { example.run }
end
it 'counts median from issues with metrics' do
expect(stage.project_median).to eq(ISSUES_MEDIAN)
end
include_examples 'calculate #median with date range'
context 'when using the new query backend' do
include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
let(:expected_record_count) { 2 }
let(:attribute_to_verify) { :id }
let(:expected_ordered_attribute_values) { [mr_1.metrics.pipeline.builds.first.id, mr_2.metrics.pipeline.builds.first.id] }
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'CycleAnalytics#code' do
extend CycleAnalyticsHelpers::TestGeneration
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
context 'with deployment' do
generate_cycle_analytics_spec(
phase: :code,
data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
start_time_conditions: [["issue mentioned in a commit",
-> (context, data) do
context.create_commit_referencing_issue(data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: -> (context, data) do
end)
context "when a regular merge request (that doesn't close the issue) is created" do
it "returns nil" do
issue = create(:issue, project: project)
create_commit_referencing_issue(issue)
create_merge_request_closing_issue(user, project, issue, message: "Closes nothing")
merge_merge_requests_closing_issue(user, project, issue)
deploy_master(user, project)
expect(subject[:code].project_median).to be_nil
end
end
end
context 'without deployment' do
generate_cycle_analytics_spec(
phase: :code,
data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
start_time_conditions: [["issue mentioned in a commit",
-> (context, data) do
context.create_commit_referencing_issue(data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: -> (context, data) do
end)
context "when a regular merge request (that doesn't close the issue) is created" do
it "returns nil" do
issue = create(:issue, project: project)
create_commit_referencing_issue(issue)
create_merge_request_closing_issue(user, project, issue, message: "Closes nothing")
merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:code].project_median).to be_nil
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'CycleAnalytics#issue' do
extend CycleAnalyticsHelpers::TestGeneration
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
generate_cycle_analytics_spec(
phase: :issue,
data_fn: -> (context) { { issue: context.build(:issue, project: context.project) } },
start_time_conditions: [["issue created", -> (context, data) { data[:issue].save! }]],
end_time_conditions: [["issue associated with a milestone",
-> (context, data) do
if data[:issue].persisted?
data[:issue].update!(milestone: context.create(:milestone, project: context.project))
end
end],
["list label added to issue",
-> (context, data) do
if data[:issue].persisted?
data[:issue].update!(label_ids: [context.create(:list).label_id])
end
end]],
post_fn: -> (context, data) do
end)
context "when a regular label (instead of a list label) is added to the issue" do
it "returns nil" do
regular_label = create(:label)
issue = create(:issue, project: project)
issue.update!(label_ids: [regular_label.id])
create_merge_request_closing_issue(user, project, issue)
merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:issue].project_median).to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'CycleAnalytics#plan' do
extend CycleAnalyticsHelpers::TestGeneration
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
generate_cycle_analytics_spec(
phase: :plan,
data_fn: -> (context) do
{
issue: context.build(:issue, project: context.project),
branch_name: context.generate(:branch)
}
end,
start_time_conditions: [["issue associated with a milestone",
-> (context, data) do
data[:issue].update!(milestone: context.create(:milestone, project: context.project))
end],
["list label added to issue",
-> (context, data) do
data[:issue].update!(label_ids: [context.create(:list).label_id])
end]],
end_time_conditions: [["issue mentioned in a commit",
-> (context, data) do
context.create_commit_referencing_issue(data[:issue], branch_name: data[:branch_name])
end]],
post_fn: -> (context, data) do
end)
context "when a regular label (instead of a list label) is added to the issue" do
it "returns nil" do
branch_name = generate(:branch)
label = create(:label)
issue = create(:issue, project: project)
issue.update!(label_ids: [label.id])
create_commit_referencing_issue(issue, branch_name: branch_name)
create_merge_request_closing_issue(user, project, issue, source_branch: branch_name)
merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:issue].project_median).to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CycleAnalytics::ProjectLevel do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
let_it_be(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
let_it_be(:milestone) { create(:milestone, project: project) }
let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") }
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) }
subject { described_class.new(project, options: { from: from_date }) }
describe '#all_medians_by_stage' do
before do
allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
allow(instance).to receive(:issues).and_return([issue])
end
create_cycle(user, project, issue, mr, milestone, pipeline)
deploy_master(user, project)
end
it 'returns every median for each stage for a specific project' do
values = described_class::STAGES.each_with_object({}) do |stage_name, hsh|
hsh[stage_name] = subject[stage_name].project_median.presence
end
expect(subject.all_medians_by_stage).to eq(values)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CycleAnalytics::ProjectLevelStageAdapter, type: :model do
let_it_be(:stage_name) { :review } # pre-defined, default stage
let_it_be(:merge_request) do
create(:merge_request, created_at: 5.hours.ago).tap do |mr|
mr.metrics.update!(merged_at: mr.created_at + 1.hour)
end
end
let_it_be(:project) { merge_request.target_project }
let(:stage) do
params = Gitlab::Analytics::CycleAnalytics::DefaultStages.find_by_name!(stage_name).merge(project: project)
Analytics::CycleAnalytics::ProjectStage.new(params)
end
around do |example|
freeze_time { example.run }
end
subject { described_class.new(stage, from: 1.month.ago, to: Time.zone.now, current_user: merge_request.author) }
it 'calculates median' do
expect(subject.median).to be_within(1.hour).of(0.5)
end
it 'lists events' do
expect(subject.events.size).to eq(1)
expect(subject.events.first[:title]).to eq(merge_request.title)
end
it 'presents the data as json' do
expect(subject.as_json).to include({ title: 'Review', value: 'about 1 hour' })
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'CycleAnalytics#review' do
extend CycleAnalyticsHelpers::TestGeneration
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
generate_cycle_analytics_spec(
phase: :review,
data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
start_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is merged",
-> (context, data) do
context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: nil)
context "when a regular merge request (that doesn't close the issue) is created and merged" do
it "returns nil" do
MergeRequests::MergeService.new(project, user).execute(create(:merge_request))
expect(subject[:review].project_median).to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'CycleAnalytics#staging' do
extend CycleAnalyticsHelpers::TestGeneration
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
generate_cycle_analytics_spec(
phase: :staging,
data_fn: lambda do |context|
issue = context.create(:issue, project: context.project)
{ issue: issue, merge_request: context.create_merge_request_closing_issue(context.user, context.project, issue) }
end,
start_time_conditions: [["merge request that closes issue is merged",
-> (context, data) do
context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is deployed to production",
-> (context, data) do
context.deploy_master(context.user, context.project)
end],
["production deploy happens after merge request is merged (along with other changes)",
lambda do |context, data|
# Make other changes on master
context.project.repository.commit("this_sha_apparently_does_not_matter")
context.deploy_master(context.user, context.project)
end]])
context "when a regular merge request (that doesn't close the issue) is merged and deployed" do
it "returns nil" do
merge_request = create(:merge_request)
MergeRequests::MergeService.new(project, user).execute(merge_request)
deploy_master(user, project)
expect(subject[:staging].project_median).to be_nil
end
end
context "when the deployment happens to a non-production environment" do
it "returns nil" do
issue = create(:issue, project: project)
merge_request = create_merge_request_closing_issue(user, project, issue)
MergeRequests::MergeService.new(project, user).execute(merge_request)
deploy_master(user, project, environment: 'staging')
expect(subject[:staging].project_median).to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'CycleAnalytics#test' do
extend CycleAnalyticsHelpers::TestGeneration
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
let!(:merge_request) { create_merge_request_closing_issue(user, project, issue) }
subject { project_level }
generate_cycle_analytics_spec(
phase: :test,
data_fn: lambda do |context|
issue = context.issue
merge_request = context.create_merge_request_closing_issue(context.user, context.project, issue)
pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project, head_pipeline_of: merge_request)
{ pipeline: pipeline, issue: issue }
end,
start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]],
end_time_conditions: [["pipeline is finished", -> (context, data) { data[:pipeline].succeed! }]],
post_fn: -> (context, data) do
end)
context "when the pipeline is for a regular merge request (that doesn't close an issue)" do
it "returns nil" do
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.succeed!
expect(subject[:test].project_median).to be_nil
end
end
context "when the pipeline is not for a merge request" do
it "returns nil" do
pipeline = create(:ci_pipeline, ref: "refs/heads/master", sha: project.repository.commit('master').sha)
pipeline.run!
pipeline.succeed!
expect(subject[:test].project_median).to be_nil
end
end
context "when the pipeline is dropped (failed)" do
it "returns nil" do
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.drop!
expect(subject[:test].project_median).to be_nil
end
end
context "when the pipeline is cancelled" do
it "returns nil" do
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.cancel!
expect(subject[:test].project_median).to be_nil
end
end
end
......@@ -7,113 +7,95 @@ RSpec.describe 'value stream analytics events' do
let(:project) { create(:project, :repository, public_builds: false) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
shared_examples 'value stream analytics events examples' do
describe 'GET /:namespace/:project/value_stream_analytics/events/issues' do
before do
project.add_developer(user)
describe 'GET /:namespace/:project/value_stream_analytics/events/issues' do
before do
project.add_developer(user)
3.times do |count|
travel_to(Time.now + count.days) do
create_cycle
end
3.times do |count|
travel_to(Time.now + count.days) do
create_cycle
end
deploy_master(user, project)
login_as(user)
end
it 'lists the issue events' do
get project_cycle_analytics_issue_path(project, format: :json)
deploy_master(user, project)
first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
login_as(user)
end
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end
it 'lists the issue events' do
get project_cycle_analytics_issue_path(project, format: :json)
it 'lists the plan events' do
get project_cycle_analytics_plan_path(project, format: :json)
first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end
it 'lists the plan events' do
get project_cycle_analytics_plan_path(project, format: :json)
it 'lists the code events' do
get project_cycle_analytics_code_path(project, format: :json)
first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end
first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
it 'lists the code events' do
get project_cycle_analytics_code_path(project, format: :json)
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
expect(json_response['events']).not_to be_empty
it 'lists the test events', :sidekiq_inline do
get project_cycle_analytics_test_path(project, format: :json)
first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty
end
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
it 'lists the review events' do
get project_cycle_analytics_review_path(project, format: :json)
it 'lists the test events', :sidekiq_inline do
get project_cycle_analytics_test_path(project, format: :json)
first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty
end
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
it 'lists the review events' do
get project_cycle_analytics_review_path(project, format: :json)
it 'lists the staging events', :sidekiq_inline do
get project_cycle_analytics_staging_path(project, format: :json)
first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty
end
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
context 'with private project and builds' do
before do
project.members.last.update(access_level: Gitlab::Access::GUEST)
end
it 'lists the staging events', :sidekiq_inline do
get project_cycle_analytics_staging_path(project, format: :json)
it 'does not list the test events' do
get project_cycle_analytics_test_path(project, format: :json)
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty
end
expect(response).to have_gitlab_http_status(:not_found)
end
context 'with private project and builds' do
before do
project.members.last.update(access_level: Gitlab::Access::GUEST)
end
it 'does not list the staging events' do
get project_cycle_analytics_staging_path(project, format: :json)
it 'does not list the test events' do
get project_cycle_analytics_test_path(project, format: :json)
expect(response).to have_gitlab_http_status(:not_found)
end
expect(response).to have_gitlab_http_status(:not_found)
end
it 'lists the issue events' do
get project_cycle_analytics_issue_path(project, format: :json)
it 'does not list the staging events' do
get project_cycle_analytics_staging_path(project, format: :json)
expect(response).to have_gitlab_http_status(:ok)
end
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
describe 'when new_project_level_vsa_backend feature flag is off' do
before do
stub_feature_flags(new_project_level_vsa_backend: false, thing: project)
end
it_behaves_like 'value stream analytics events examples'
end
it 'lists the issue events' do
get project_cycle_analytics_issue_path(project, format: :json)
describe 'when new_project_level_vsa_backend feature flag is on' do
before do
stub_feature_flags(new_project_level_vsa_backend: true, thing: project)
expect(response).to have_gitlab_http_status(:ok)
end
end
it_behaves_like 'value stream analytics events examples'
end
def create_cycle
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AnalyticsStageSerializer do
subject do
described_class.new.represent(resource)
end
let(:resource) do
Gitlab::CycleAnalytics::CodeStage.new(options: { project: double })
end
before do
allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(1.12)
allow_any_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher).to receive(:event_result).and_return({})
end
it 'generates payload for single object' do
expect(subject).to be_kind_of Hash
end
it 'contains important elements of AnalyticsStage' do
expect(subject).to include(:title, :description, :value)
end
context 'when median is equal 0' do
before do
allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(0)
end
it 'sets the value to nil' do
expect(subject.fetch(:value)).to be_nil
end
end
context 'when median is below 1' do
before do
allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(0.12)
end
it 'sets the value to equal to median' do
expect(subject.fetch(:value)).to eq('less than a minute')
end
end
context 'when median is above 1' do
before do
allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(60.12)
end
it 'sets the value to equal to median' do
expect(subject.fetch(:value)).to eq('1 minute')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
ISSUES_MEDIAN = 30.minutes.to_i
RSpec.shared_examples 'base stage' do
let(:stage) { described_class.new(options: { project: double }) }
before do
allow(stage).to receive(:project_median).and_return(1.12)
allow_next_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher) do |instance|
allow(instance).to receive(:event_result).and_return({})
end
end
it 'has the median data value' do
expect(stage.as_json[:value]).not_to be_nil
end
it 'has the median data stage' do
expect(stage.as_json[:title]).not_to be_nil
end
it 'has the median data description' do
expect(stage.as_json[:description]).not_to be_nil
end
it 'has the title' do
expect(stage.title).to eq(stage_name.to_s.capitalize)
end
it 'has the events' do
expect(stage.events).not_to be_nil
end
end
RSpec.shared_examples 'calculate #median with date range' do
context 'when valid date range is given' do
before do
stage_options[:from] = 5.days.ago
stage_options[:to] = 5.days.from_now
end
it { expect(stage.project_median).to eq(ISSUES_MEDIAN) }
end
context 'when records are out of the date range' do
before do
stage_options[:from] = 2.years.ago
stage_options[:to] = 1.year.ago
end
it { expect(stage.project_median).to eq(nil) }
end
end
RSpec.shared_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
let(:stage_params) { Gitlab::Analytics::CycleAnalytics::DefaultStages.send("params_for_#{stage_name}_stage").merge(project: project) }
let(:stage) { Analytics::CycleAnalytics::ProjectStage.new(stage_params) }
let(:data_collector) { Gitlab::Analytics::CycleAnalytics::DataCollector.new(stage: stage, params: { from: stage_options[:from], current_user: project.creator }) }
let(:attribute_to_verify) { :title }
context 'provides the same results as the old implementation' do
it 'for the median' do
expect(data_collector.median.seconds).to be_within(0.5).of(ISSUES_MEDIAN)
end
it 'for the list of event records' do
records = data_collector.records_fetcher.serialized_records
expect(records.map { |event| event[attribute_to_verify] }).to eq(expected_ordered_attribute_values)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.shared_examples 'default query config' do
let(:project) { create(:project) }
let(:event) { described_class.new(stage: stage_name, options: { from: 1.day.ago, project: project }) }
it 'has the stage attribute' do
expect(event.stage).not_to be_nil
end
it 'has the projection attributes' do
expect(event.projections).not_to be_nil
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment