Commit 4d7f479a authored by Fabio Pitino's avatar Fabio Pitino

Merge branch 'feature/gb/add-pipeline-creation-chain-logger' into 'master'

Instrument pipeline creation chain with a new logger

See merge request gitlab-org/gitlab!72996
parents c867a760 451b4cc9
......@@ -2,10 +2,14 @@
module Ci
class CreatePipelineService < BaseService
attr_reader :pipeline
attr_reader :pipeline, :logger
CreateError = Class.new(StandardError)
LOG_MAX_DURATION_THRESHOLD = 3.seconds
LOG_MAX_PIPELINE_SIZE = 2_000
LOG_MAX_CREATION_THRESHOLD = 20.seconds
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Build,
Gitlab::Ci::Pipeline::Chain::Build::Associations,
Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
......@@ -53,6 +57,7 @@ module Ci
# @return [Ci::Pipeline] The created Ci::Pipeline object.
# rubocop: disable Metrics/ParameterLists
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, merge_request: nil, external_pull_request: nil, bridge: nil, **options, &block)
@logger = build_logger
@pipeline = Ci::Pipeline.new
command = Gitlab::Ci::Pipeline::Chain::Command.new(
......@@ -76,6 +81,7 @@ module Ci
push_options: params[:push_options] || {},
chat_data: params[:chat_data],
bridge: bridge,
logger: @logger,
**extra_options(**options))
# Ensure we never persist the pipeline when dry_run: true
......@@ -98,6 +104,9 @@ module Ci
else
ServiceResponse.success(payload: pipeline)
end
ensure
@logger.commit(pipeline: pipeline, caller: self.class.name)
end
# rubocop: enable Metrics/ParameterLists
......@@ -135,6 +144,32 @@ module Ci
def extra_options(content: nil, dry_run: false)
{ content: content, dry_run: dry_run }
end
def build_logger
Gitlab::Ci::Pipeline::Logger.new(project: project) do |l|
l.log_when do |observations|
observations.any? do |name, values|
values.any? &&
name.to_s.end_with?('duration_s') &&
values.max >= LOG_MAX_DURATION_THRESHOLD
end
end
l.log_when do |observations|
values = observations['pipeline_size_count']
next false if values.empty?
values.max >= LOG_MAX_PIPELINE_SIZE
end
l.log_when do |observations|
values = observations['pipeline_creation_duration_s']
next false if values.empty?
values.max >= LOG_MAX_CREATION_THRESHOLD
end
end
end
end
end
......
---
name: ci_pipeline_creation_logger
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72996
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/345779
milestone: '14.5'
type: ops
group: group::pipeline execution
default_enabled: false
......@@ -17,21 +17,27 @@ module Gitlab
Config::Yaml::Tags::TagError
].freeze
attr_reader :root, :context, :source_ref_path, :source
attr_reader :root, :context, :source_ref_path, :source, :logger
def initialize(config, project: nil, pipeline: nil, sha: nil, user: nil, parent_pipeline: nil, source: nil)
def initialize(config, project: nil, pipeline: nil, sha: nil, user: nil, parent_pipeline: nil, source: nil, logger: nil)
@logger = logger || ::Gitlab::Ci::Pipeline::Logger.new(project: project)
@source_ref_path = pipeline&.source_ref_path
@context = build_context(project: project, pipeline: pipeline, sha: sha, user: user, parent_pipeline: parent_pipeline)
@context = self.logger.instrument(:config_build_context) do
build_context(project: project, pipeline: pipeline, sha: sha, user: user, parent_pipeline: parent_pipeline)
end
@context.set_deadline(TIMEOUT_SECONDS)
@source = source
@config = expand_config(config)
@root = Entry::Root.new(@config)
@root.compose!
@config = self.logger.instrument(:config_expand) do
expand_config(config)
end
@root = self.logger.instrument(:config_compose) do
Entry::Root.new(@config).tap(&:compose!)
end
rescue *rescue_errors => e
raise Config::ConfigError, e.message
end
......@@ -94,11 +100,25 @@ module Gitlab
end
def build_config(config)
initial_config = Config::Yaml.load!(config)
initial_config = Config::External::Processor.new(initial_config, @context).perform
initial_config = Config::Extendable.new(initial_config).to_hash
initial_config = Config::Yaml::Tags::Resolver.new(initial_config).to_hash
Config::EdgeStagesInjector.new(initial_config).to_hash
initial_config = logger.instrument(:config_yaml_load) do
Config::Yaml.load!(config)
end
initial_config = logger.instrument(:config_external_process) do
Config::External::Processor.new(initial_config, @context).perform
end
initial_config = logger.instrument(:config_yaml_extend) do
Config::Extendable.new(initial_config).to_hash
end
initial_config = logger.instrument(:config_tags_resolve) do
Config::Yaml::Tags::Resolver.new(initial_config).to_hash
end
logger.instrument(:config_stages_inject) do
Config::EdgeStagesInjector.new(initial_config).to_hash
end
end
def find_sha(project)
......@@ -115,10 +135,20 @@ module Gitlab
sha: sha || find_sha(project),
user: user,
parent_pipeline: parent_pipeline,
variables: build_variables(project: project, pipeline: pipeline))
variables: build_variables(project: project, pipeline: pipeline),
logger: logger)
end
def build_variables(project:, pipeline:)
logger.instrument(:config_build_variables) do
build_variables_without_instrumentation(
project: project,
pipeline: pipeline
)
end
end
def build_variables_without_instrumentation(project:, pipeline:)
Gitlab::Ci::Variables::Collection.new.tap do |variables|
break variables unless project
......
......@@ -10,9 +10,11 @@ module Gitlab
TimeoutError = Class.new(StandardError)
attr_reader :project, :sha, :user, :parent_pipeline, :variables
attr_reader :expandset, :execution_deadline
attr_reader :expandset, :execution_deadline, :logger
def initialize(project: nil, sha: nil, user: nil, parent_pipeline: nil, variables: [])
delegate :instrument, to: :logger
def initialize(project: nil, sha: nil, user: nil, parent_pipeline: nil, variables: [], logger: nil)
@project = project
@sha = sha
@user = user
......@@ -20,6 +22,7 @@ module Gitlab
@variables = variables
@expandset = Set.new
@execution_deadline = 0
@logger = logger || Gitlab::Ci::Pipeline::Logger.new(project: project)
yield self if block_given?
end
......@@ -40,6 +43,7 @@ module Gitlab
self.class.new(**attrs) do |ctx|
ctx.expandset = expandset
ctx.execution_deadline = execution_deadline
ctx.logger = logger
end
end
......@@ -60,7 +64,7 @@ module Gitlab
protected
attr_writer :expandset, :execution_deadline
attr_writer :expandset, :execution_deadline, :logger
private
......
......@@ -30,6 +30,18 @@ module Gitlab
def process
return [] if locations.empty?
logger.instrument(:config_mapper_process) do
process_without_instrumentation
end
end
private
attr_reader :locations, :context
delegate :expandset, :logger, to: :context
def process_without_instrumentation
locations
.compact
.map(&method(:normalize_location))
......@@ -41,14 +53,14 @@ module Gitlab
.map(&method(:select_first_matching))
end
private
attr_reader :locations, :context
delegate :expandset, to: :context
def normalize_location(location)
logger.instrument(:config_mapper_normalize) do
normalize_location_without_instrumentation(location)
end
end
# convert location if String to canonical form
def normalize_location(location)
def normalize_location_without_instrumentation(location)
if location.is_a?(String)
expanded_location = expand_variables(location)
normalize_location_string(expanded_location)
......@@ -58,6 +70,12 @@ module Gitlab
end
def verify_rules(location)
logger.instrument(:config_mapper_rules) do
verify_rules_without_instrumentation(location)
end
end
def verify_rules_without_instrumentation(location)
return unless Rules.new(location[:rules]).evaluate(context).pass?
location
......@@ -72,6 +90,12 @@ module Gitlab
end
def expand_wildcard_paths(location)
logger.instrument(:config_mapper_wildcards) do
expand_wildcard_paths_without_instrumentation(location)
end
end
def expand_wildcard_paths_without_instrumentation(location)
# We only support local files for wildcard paths
return location unless location[:local] && location[:local].include?('*')
......@@ -89,6 +113,12 @@ module Gitlab
end
def verify_duplicates!(location)
logger.instrument(:config_mapper_verify) do
verify_duplicates_without_instrumentation!(location)
end
end
def verify_duplicates_without_instrumentation!(location)
if expandset.count >= MAX_INCLUDES
raise TooManyIncludesError, "Maximum of #{MAX_INCLUDES} nested includes are allowed!"
end
......@@ -106,6 +136,12 @@ module Gitlab
end
def select_first_matching(location)
logger.instrument(:config_mapper_select) do
select_first_matching_without_instrumentation(location)
end
end
def select_first_matching_without_instrumentation(location)
matching = FILE_CLASSES.map do |file_class|
file_class.new(location, context)
end.select(&:matching?)
......@@ -116,6 +152,12 @@ module Gitlab
end
def expand_variables(data)
logger.instrument(:config_mapper_variables) do
expand_variables_without_instrumentation(data)
end
end
def expand_variables_without_instrumentation(data)
if data.is_a?(String)
expand(data)
else
......
......@@ -7,10 +7,13 @@ module Gitlab
class Processor
IncludeError = Class.new(StandardError)
attr_reader :context, :logger
def initialize(values, context)
@values = values
@external_files = External::Mapper.new(values, context).process
@content = {}
@logger = context.logger
rescue External::Mapper::Error,
OpenSSL::SSL::SSLError => e
raise IncludeError, e.message
......@@ -29,13 +32,17 @@ module Gitlab
def validate_external_files!
@external_files.each do |file|
raise IncludeError, file.error_message unless file.valid?
logger.instrument(:config_external_verify) do
raise IncludeError, file.error_message unless file.valid?
end
end
end
def merge_external_files!
@external_files.each do |file|
@content.deep_merge!(file.to_hash)
logger.instrument(:config_external_merge) do
@content.deep_merge!(file.to_hash)
end
end
end
......
......@@ -7,7 +7,7 @@ module Gitlab
class Base
attr_reader :pipeline, :command, :config
delegate :project, :current_user, :parent_pipeline, to: :command
delegate :project, :current_user, :parent_pipeline, :logger, to: :command
def initialize(pipeline, command)
@pipeline = pipeline
......
......@@ -11,7 +11,7 @@ module Gitlab
:trigger_request, :schedule, :merge_request, :external_pull_request,
:ignore_skip_ci, :save_incompleted,
:seeds_block, :variables_attributes, :push_options,
:chat_data, :allow_mirror_update, :bridge, :content, :dry_run,
:chat_data, :allow_mirror_update, :bridge, :content, :dry_run, :logger,
# These attributes are set by Chains during processing:
:config_content, :yaml_processor_result, :workflow_rules_result, :pipeline_seed
) do
......@@ -88,7 +88,14 @@ module Gitlab
@metrics ||= ::Gitlab::Ci::Pipeline::Metrics
end
def logger
self[:logger] ||= ::Gitlab::Ci::Pipeline::Logger.new(project: project)
end
def observe_step_duration(step_class, duration)
step = step_class.name.underscore.parameterize(separator: '_')
logger.observe("pipeline_step_#{step}_duration_s", duration)
if Feature.enabled?(:ci_pipeline_creation_step_duration_tracking, type: :ops, default_enabled: :yaml)
metrics.pipeline_creation_step_duration_histogram
.observe({ step: step_class.name }, duration.seconds)
......@@ -96,11 +103,15 @@ module Gitlab
end
def observe_creation_duration(duration)
logger.observe(:pipeline_creation_duration_s, duration)
metrics.pipeline_creation_duration_histogram
.observe({}, duration.seconds)
end
def observe_pipeline_size(pipeline)
logger.observe(:pipeline_size_count, pipeline.total_size)
metrics.pipeline_size_histogram
.observe({ source: pipeline.source.to_s }, pipeline.total_size)
end
......
......@@ -11,16 +11,21 @@ module Gitlab
def perform!
raise ArgumentError, 'missing config content' unless @command.config_content
result = ::Gitlab::Ci::YamlProcessor.new(
@command.config_content, {
project: project,
pipeline: @pipeline,
sha: @pipeline.sha,
source: @pipeline.source,
user: current_user,
parent_pipeline: parent_pipeline
}
).execute
result = logger.instrument(:pipeline_config_process) do
processor = ::Gitlab::Ci::YamlProcessor.new(
@command.config_content, {
project: project,
pipeline: @pipeline,
sha: @pipeline.sha,
source: @pipeline.source,
user: current_user,
parent_pipeline: parent_pipeline,
logger: logger
}
)
processor.execute
end
add_warnings_to_pipeline(result.warnings)
......
......@@ -8,8 +8,10 @@ module Gitlab
include Chain::Helpers
def perform!
BulkInsertableAssociations.with_bulk_insert do
pipeline.save!
logger.instrument(:pipeline_save) do
BulkInsertableAssociations.with_bulk_insert do
pipeline.save!
end
end
rescue ActiveRecord::RecordInvalid => e
error("Failed to persist the pipeline: #{e}")
......
......@@ -13,8 +13,10 @@ module Gitlab
raise ArgumentError, 'missing workflow rules result' unless @command.workflow_rules_result
# Allocate next IID. This operation must be outside of transactions of pipeline creations.
pipeline.ensure_project_iid!
pipeline.ensure_ci_ref!
logger.instrument(:pipeline_allocate_seed_attributes) do
pipeline.ensure_project_iid!
pipeline.ensure_ci_ref!
end
# Protect the pipeline. This is assigned in Populate instead of
# Build to prevent erroring out on ambiguous refs.
......@@ -23,8 +25,12 @@ module Gitlab
##
# Gather all runtime build/stage errors
#
if pipeline_seed.errors
return error(pipeline_seed.errors.join("\n"), config_error: true)
seed_errors = logger.instrument(:pipeline_seed_evaluation) do
pipeline_seed.errors
end
if seed_errors
return error(seed_errors.join("\n"), config_error: true)
end
@command.pipeline_seed = pipeline_seed
......@@ -38,8 +44,11 @@ module Gitlab
def pipeline_seed
strong_memoize(:pipeline_seed) do
stages_attributes = @command.yaml_processor_result.stages_attributes
Gitlab::Ci::Pipeline::Seed::Pipeline.new(context, stages_attributes)
logger.instrument(:pipeline_seed_initialization) do
stages_attributes = @command.yaml_processor_result.stages_attributes
Gitlab::Ci::Pipeline::Seed::Pipeline.new(context, stages_attributes)
end
end
end
......@@ -48,9 +57,11 @@ module Gitlab
end
def root_variables
::Gitlab::Ci::Variables::Helpers.merge_variables(
@command.yaml_processor_result.root_variables, @command.workflow_rules_result.variables
)
logger.instrument(:pipeline_seed_merge_variables) do
::Gitlab::Ci::Variables::Helpers.merge_variables(
@command.yaml_processor_result.root_variables, @command.workflow_rules_result.variables
)
end
end
end
end
......
......@@ -9,30 +9,36 @@ module Gitlab
@pipeline = pipeline
@command = command
@sequence = sequence
@start = Time.now
@start = current_monotonic_time
end
def build!
@sequence.each do |step_class|
step_start = ::Gitlab::Metrics::System.monotonic_time
step_start = current_monotonic_time
step = step_class.new(@pipeline, @command)
step.perform!
@command.observe_step_duration(
step_class,
::Gitlab::Metrics::System.monotonic_time - step_start
current_monotonic_time - step_start
)
break if step.break?
end
@command.observe_creation_duration(Time.now - @start)
@command.observe_creation_duration(current_monotonic_time - @start)
@command.observe_pipeline_size(@pipeline)
@command.observe_jobs_count_in_alive_pipelines
@pipeline
end
private
def current_monotonic_time
::Gitlab::Metrics::System.monotonic_time
end
end
end
end
......
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
class Logger
include ::Gitlab::Utils::StrongMemoize
def self.current_monotonic_time
::Gitlab::Metrics::System.monotonic_time
end
def initialize(project:, destination: Gitlab::AppJsonLogger)
@started_at = current_monotonic_time
@project = project
@destination = destination
@log_conditions = []
yield(self) if block_given?
end
def log_when(&block)
log_conditions.push(block)
end
def instrument(operation)
return yield unless enabled?
raise ArgumentError, 'block not given' unless block_given?
op_started_at = current_monotonic_time
result = yield
observe("#{operation}_duration_s", current_monotonic_time - op_started_at)
result
end
def observe(operation, value)
return unless enabled?
observations[operation.to_s].push(value)
end
def commit(pipeline:, caller:)
return unless log?
attributes = {
caller: caller,
project_id: project.id,
pipeline_id: pipeline.id,
persisted: pipeline.persisted?,
source: pipeline.source,
duration_s: age
}.stringify_keys.merge(observations_hash)
destination.info(attributes)
end
def observations_hash
observations.transform_values do |values|
next if values.empty?
{
'count' => values.size,
'min' => values.min,
'max' => values.max,
'avg' => values.sum / values.size
}
end.compact
end
private
attr_reader :project, :destination, :started_at, :log_conditions
delegate :current_monotonic_time, to: :class
def age
current_monotonic_time - started_at
end
def log?
return false unless enabled?
return true if log_conditions.empty?
log_conditions.any? { |cond| cond.call(observations) }
end
def enabled?
strong_memoize(:enabled) do
::Feature.enabled?(:ci_pipeline_creation_logger, project, type: :ops, default_enabled: :yaml)
end
end
def observations
@observations ||= Hash.new { |hash, key| hash[key] = [] }
end
end
end
end
end
......@@ -29,10 +29,8 @@ module Gitlab
run_logical_validations!
Result.new(ci_config: @ci_config, warnings: @ci_config&.warnings)
rescue Gitlab::Ci::Config::ConfigError => e
Result.new(ci_config: @ci_config, errors: [e.message], warnings: @ci_config&.warnings)
rescue ValidationError => e
Result.new(ci_config: @ci_config, errors: [e.message], warnings: @ci_config&.warnings)
end
......
......@@ -94,6 +94,15 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
end
describe '#mutate' do
let(:attributes) do
{
project: project,
user: user,
sha: sha,
logger: double('logger')
}
end
shared_examples 'a mutated context' do
let(:mutated) { subject.mutate(new_attributes) }
......@@ -107,6 +116,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
it { expect(mutated).to have_attributes(new_attributes) }
it { expect(mutated.expandset).to eq(subject.expandset) }
it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) }
it { expect(mutated.logger).to eq(mutated.logger) }
end
context 'with attributes' do
......
......@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
include StubRequests
let_it_be(:project) { create(:project, :repository) }
let_it_be(:another_project) { create(:project, :repository) }
let_it_be_with_reload(:another_project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
......@@ -251,6 +251,17 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
it 'properly expands all includes' do
is_expected.to include(:my_build, :remote_build, :rspec)
end
it 'propagates the pipeline logger' do
processor.perform
process_obs_count = processor
.logger
.observations_hash
.dig('config_mapper_process_duration_s', 'count')
expect(process_obs_count).to eq(3)
end
end
context 'when user is reporter of another project' do
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
let_it_be(:project) { build_stubbed(:project) }
let_it_be(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
subject(:logger) { described_class.new(project: project) }
describe '#log_when' do
it 'stores blocks for later evaluation' do
logger.log_when { |obs| true }
expect(logger.send(:log_conditions).first).to be_a(Proc)
end
end
describe '#instrument' do
it "returns the block's value" do
expect(logger.instrument(:expensive_operation) { 123 }).to eq(123)
end
it 'records durations of instrumented operations' do
loggable_data = {
'expensive_operation_duration_s' => {
'count' => 1,
'avg' => a_kind_of(Numeric),
'max' => a_kind_of(Numeric),
'min' => a_kind_of(Numeric)
}
}
logger.instrument(:expensive_operation) { 123 }
expect(logger.observations_hash).to match(a_hash_including(loggable_data))
end
it 'raises an error when block is not provided' do
expect { logger.instrument(:expensive_operation) }
.to raise_error(ArgumentError, 'block not given')
end
end
describe '#observe' do
it 'records durations of observed operations' do
loggable_data = {
'pipeline_creation_duration_s' => {
'avg' => 30, 'count' => 1, 'max' => 30, 'min' => 30
}
}
expect(logger.observe(:pipeline_creation_duration_s, 30)).to be_truthy
expect(logger.observations_hash).to match(a_hash_including(loggable_data))
end
end
describe '#commit' do
subject(:commit) { logger.commit(pipeline: pipeline, caller: 'source') }
before do
stub_feature_flags(ci_pipeline_creation_logger: flag)
allow(logger).to receive(:current_monotonic_time) { Time.current.to_i }
logger.instrument(:pipeline_save) { travel(60.seconds) }
logger.observe(:pipeline_creation_duration_s, 30)
logger.observe(:pipeline_creation_duration_s, 10)
end
context 'when the feature flag is enabled' do
let(:flag) { true }
let(:loggable_data) do
{
'pipeline_id' => pipeline.id,
'persisted' => true,
'project_id' => project.id,
'duration_s' => a_kind_of(Numeric),
'caller' => 'source',
'source' => pipeline.source,
'pipeline_save_duration_s' => {
'avg' => 60, 'count' => 1, 'max' => 60, 'min' => 60
},
'pipeline_creation_duration_s' => {
'avg' => 20, 'count' => 2, 'max' => 30, 'min' => 10
}
}
end
it 'logs to application.json' do
expect(Gitlab::AppJsonLogger)
.to receive(:info)
.with(a_hash_including(loggable_data))
.and_call_original
expect(commit).to be_truthy
end
context 'with log conditions' do
it 'does not log when the conditions are false' do
logger.log_when { |_obs| false }
expect(Gitlab::AppJsonLogger).not_to receive(:info)
expect(commit).to be_falsey
end
it 'logs when a condition is true' do
logger.log_when { |_obs| true }
logger.log_when { |_obs| false }
expect(Gitlab::AppJsonLogger)
.to receive(:info)
.with(a_hash_including(loggable_data))
.and_call_original
expect(commit).to be_truthy
end
end
end
context 'when the feature flag is disabled' do
let(:flag) { false }
it 'does not log' do
expect(Gitlab::AppJsonLogger).not_to receive(:info)
expect(commit).to be_falsey
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
context 'pipeline logger' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(:push).payload }
let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
before do
stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
end
let(:counters) do
{
'count' => a_kind_of(Numeric),
'avg' => a_kind_of(Numeric),
'max' => a_kind_of(Numeric),
'min' => a_kind_of(Numeric)
}
end
let(:loggable_data) do
{
'caller' => 'Ci::CreatePipelineService',
'source' => 'push',
'pipeline_id' => a_kind_of(Numeric),
'persisted' => true,
'project_id' => project.id,
'duration_s' => a_kind_of(Numeric),
'pipeline_creation_duration_s' => counters,
'pipeline_size_count' => counters,
'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
}
end
context 'when the duration is under the threshold' do
it 'does not create a log entry but it collects the data' do
expect(Gitlab::AppJsonLogger).not_to receive(:info)
expect(pipeline).to be_created_successfully
expect(service.logger.observations_hash)
.to match(
a_hash_including(
'pipeline_creation_duration_s' => counters,
'pipeline_size_count' => counters,
'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
)
)
end
end
context 'when the durations exceeds the threshold' do
let(:timer) do
proc do
@timer = @timer.to_i + 30
end
end
before do
allow(Gitlab::Ci::Pipeline::Logger)
.to receive(:current_monotonic_time) { timer.call }
end
it 'creates a log entry' do
expect(Gitlab::AppJsonLogger)
.to receive(:info)
.with(a_hash_including(loggable_data))
.and_call_original
expect(pipeline).to be_created_successfully
end
context 'when the pipeline is not persisted' do
let(:loggable_data) do
{
'caller' => 'Ci::CreatePipelineService',
'source' => 'push',
'pipeline_id' => nil,
'persisted' => false,
'project_id' => project.id,
'duration_s' => a_kind_of(Numeric),
'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
}
end
before do
allow_next_instance_of(Ci::Pipeline) do |pipeline|
expect(pipeline).to receive(:save!).and_raise { RuntimeError }
end
end
it 'creates a log entry' do
expect(Gitlab::AppJsonLogger)
.to receive(:info)
.with(a_hash_including(loggable_data))
.and_call_original
expect { pipeline }.to raise_error(RuntimeError)
end
end
context 'when the feature flag is disabled' do
before do
stub_feature_flags(ci_pipeline_creation_logger: false)
end
it 'does not create a log entry' do
expect(Gitlab::AppJsonLogger).not_to receive(:info)
expect(pipeline).to be_created_successfully
expect(service.logger.observations_hash).to eq({})
end
end
end
context 'when the size exceeds the threshold' do
before do
allow_next_instance_of(Ci::Pipeline) do |pipeline|
allow(pipeline).to receive(:total_size) { 5000 }
end
end
it 'creates a log entry' do
expect(Gitlab::AppJsonLogger)
.to receive(:info)
.with(a_hash_including(loggable_data))
.and_call_original
expect(pipeline).to be_created_successfully
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment