Commit 1ee162b6 authored by Kamil Trzciński's avatar Kamil Trzciński Committed by Bob Van Landuyt

Refactor Sentry handling

Rename methods of Sentry class:
- `track_acceptable_exception` => `track_exception`: we just want
   to capture exception
- `track_exception` => `track_and_raise_for_dev_exception`: as said,
- `track_and_raise_exception` => we want to capture
  and re-raise exception

Update exception tracking

- Remove `extra:` and instead accept hash,
- Update documentation to include the best practices,
- Remove manual logging of exceptions
parent bc085f11
......@@ -23,7 +23,7 @@ class ApplicationController < ActionController::Base
before_action :validate_user_service_ticket!
before_action :check_password_expiration, if: :html_request?
before_action :ldap_security_check
before_action :sentry_context
around_action :sentry_context
before_action :default_headers
before_action :add_gon_variables, if: :html_request?
before_action :configure_permitted_parameters, if: :devise_controller?
......@@ -165,7 +165,7 @@ class ApplicationController < ActionController::Base
end
def log_exception(exception)
Gitlab::Sentry.track_acceptable_exception(exception)
Gitlab::Sentry.track_exception(exception)
backtrace_cleaner = request.env["action_dispatch.backtrace_cleaner"]
application_trace = ActionDispatch::ExceptionWrapper.new(backtrace_cleaner, exception).application_trace
......@@ -532,8 +532,8 @@ class ApplicationController < ActionController::Base
@impersonator ||= User.find(session[:impersonator_id]) if session[:impersonator_id]
end
def sentry_context
Gitlab::Sentry.context(current_user)
def sentry_context(&block)
Gitlab::Sentry.with_context(current_user, &block)
end
def allow_gitaly_ref_name_caching
......
......@@ -98,13 +98,11 @@ module IssuableActions
error_message = "Destroy confirmation not provided for #{issuable.human_class_name}"
exception = RuntimeError.new(error_message)
Gitlab::Sentry.track_acceptable_exception(
Gitlab::Sentry.track_exception(
exception,
extra: {
project_path: issuable.project.full_path,
issuable_type: issuable.class.name,
issuable_id: issuable.id
}
project_path: issuable.project.full_path,
issuable_type: issuable.class.name,
issuable_id: issuable.id
)
index_path = polymorphic_path([parent, issuable.class])
......
......@@ -42,11 +42,9 @@ module IconsHelper
end
def sprite_icon(icon_name, size: nil, css_class: nil)
if Gitlab::Sentry.should_raise_for_dev?
unless known_sprites.include?(icon_name)
exception = ArgumentError.new("#{icon_name} is not a known icon in @gitlab-org/gitlab-svg")
raise exception
end
unless known_sprites.include?(icon_name)
exception = ArgumentError.new("#{icon_name} is not a known icon in @gitlab-org/gitlab-svg")
Gitlab::Sentry.track_and_raise_for_dev_exception(exception)
end
css_classes = []
......
......@@ -57,7 +57,7 @@ module UsersHelper
unless user.association(:status).loaded?
exception = RuntimeError.new("Status was not preloaded")
Gitlab::Sentry.track_exception(exception, extra: { user: user.inspect })
Gitlab::Sentry.track_and_raise_for_dev_exception(exception, user: user.inspect)
end
return unless user.status
......
......@@ -289,7 +289,7 @@ module Ci
begin
build.deployment.drop!
rescue => e
Gitlab::Sentry.track_exception(e, extra: { build_id: build.id })
Gitlab::Sentry.track_and_raise_for_dev_exception(e, build_id: build.id)
end
true
......
......@@ -27,7 +27,7 @@ module Ci
create_ref(sha, path)
rescue => e
Gitlab::Sentry
.track_acceptable_exception(e, extra: { pipeline_id: pipeline.id })
.track_exception(e, pipeline_id: pipeline.id)
end
def delete
......@@ -38,7 +38,7 @@ module Ci
# no-op
rescue => e
Gitlab::Sentry
.track_acceptable_exception(e, extra: { pipeline_id: pipeline.id })
.track_exception(e, pipeline_id: pipeline.id)
end
def path
......
......@@ -71,6 +71,8 @@ module Clusters
# `proxy_url` could raise an exception because gitlab can not communicate with the cluster.
# We check for a nil client in downstream use and behaviour is equivalent to an empty state
log_exception(error, :failed_to_create_elasticsearch_client)
nil
end
end
......
......@@ -335,7 +335,7 @@ module Clusters
rescue Kubeclient::HttpError => e
kubeclient_error_status(e.message)
rescue => e
Gitlab::Sentry.track_acceptable_exception(e, extra: { cluster_id: id })
Gitlab::Sentry.track_exception(e, cluster_id: id)
:unknown_failure
else
......
......@@ -76,7 +76,7 @@ module Clusters
message: error.message
})
Gitlab::Sentry.track_acceptable_exception(error, extra: { cluster_id: cluster&.id, application_id: id })
Gitlab::Sentry.track_exception(error, cluster_id: cluster&.id, application_id: id)
end
end
end
......
......@@ -48,11 +48,11 @@ module GroupDescendant
extras = {
parent: parent.inspect,
child: child.inspect,
preloaded: preloaded.map(&:full_path)
preloaded: preloaded.map(&:full_path),
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/49404'
}
issue_url = 'https://gitlab.com/gitlab-org/gitlab-foss/issues/49404'
Gitlab::Sentry.track_exception(exception, issue_url: issue_url, extra: extras)
Gitlab::Sentry.track_and_raise_for_dev_exception(exception, extras)
end
if parent.nil? && hierarchy_top.present?
......
......@@ -37,8 +37,10 @@ module Storage
send_update_instructions
write_projects_repository_config
rescue => e
# Raise if development/test environment, else just notify Sentry
Gitlab::Sentry.track_exception(e, extra: { full_path_before_last_save: full_path_before_last_save, full_path: full_path, action: 'move_dir' })
Gitlab::Sentry.track_and_raise_for_dev_exception(e,
full_path_before_last_save: full_path_before_last_save,
full_path: full_path,
action: 'move_dir')
end
true # false would cancel later callbacks but not rollback
......
......@@ -1514,7 +1514,7 @@ class MergeRequest < ApplicationRecord
end
end
rescue ActiveRecord::LockWaitTimeout => e
Gitlab::Sentry.track_acceptable_exception(e)
Gitlab::Sentry.track_exception(e)
raise RebaseLockTimeout, REBASE_LOCK_MESSAGE
end
......
......@@ -103,10 +103,8 @@ class Upload < ApplicationRecord
# Help sysadmins find missing upload files
if persisted? && !exist
if Gitlab::Sentry.enabled?
Raven.capture_message(_("Upload file does not exist"), extra: self.attributes)
end
exception = RuntimeError.new("Uploaded file does not exist")
Gitlab::Sentry.track_exception(exception, self.attributes)
Gitlab::Metrics.counter(:upload_file_does_not_exist_total, _('The number of times an upload record could not find its file')).increment
end
......
......@@ -23,7 +23,8 @@ module Uploads
unless in_uploads?(path)
message = "Path '#{path}' is not in uploads dir, skipping"
logger.warn(message)
Gitlab::Sentry.track_exception(RuntimeError.new(message), extra: { uploads_dir: storage_dir })
Gitlab::Sentry.track_and_raise_for_dev_exception(
RuntimeError.new(message), uploads_dir: storage_dir)
return
end
......
......@@ -47,9 +47,9 @@ module Ci
job_id: job.id)
Gitlab::Sentry
.track_exception(error,
.track_and_raise_for_dev_exception(error,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
extra: { job_id: job.id })
job_id: job.id )
end
end
end
......@@ -15,7 +15,7 @@ module Ci
data: data
}
rescue => e
Gitlab::Sentry.track_acceptable_exception(e, extra: { project_id: project.id })
Gitlab::Sentry.track_exception(e, project_id: project.id)
{
status: :error,
key: key(base_pipeline, head_pipeline),
......
......@@ -13,7 +13,7 @@ module Ci
build.enqueue!
rescue => e
Gitlab::Sentry.track_acceptable_exception(e, extra: { build_id: build.id })
Gitlab::Sentry.track_exception(e, build_id: build.id)
build.drop(:unmet_prerequisites)
end
......
......@@ -128,13 +128,13 @@ module Ci
end
def track_exception_for_build(ex, build)
Gitlab::Sentry.track_acceptable_exception(ex, extra: {
Gitlab::Sentry.track_exception(ex,
build_id: build.id,
build_name: build.name,
build_stage: build.stage,
pipeline_id: build.pipeline_id,
project_id: build.project_id
})
)
end
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -21,14 +21,7 @@ module Clusters
group_ids: app.cluster.group_ids
}
logger_meta = meta.merge(
exception: error.class.name,
message: error.message,
backtrace: Gitlab::Profiler.clean_backtrace(error.backtrace)
)
logger.error(logger_meta)
Gitlab::Sentry.track_acceptable_exception(error, extra: meta)
Gitlab::Sentry.track_exception(error, meta)
end
def log_event(event)
......
......@@ -51,7 +51,7 @@ module Projects
digests = deleted_tags.values.uniq
# rubocop: disable CodeReuse/ActiveRecord
Gitlab::Sentry.track_exception(ArgumentError.new('multiple tag digests')) if digests.many?
Gitlab::Sentry.track_and_raise_for_dev_exception(ArgumentError.new('multiple tag digests')) if digests.many?
deleted_tags
end
......
......@@ -25,13 +25,13 @@ module Projects
success
rescue Gitlab::UrlBlocker::BlockedUrlError => e
Gitlab::Sentry.track_acceptable_exception(e, extra: { project_path: project.full_path, importer: project.import_type })
Gitlab::Sentry.track_exception(e, project_path: project.full_path, importer: project.import_type)
error(s_("ImportProjects|Error importing repository %{project_safe_import_url} into %{project_full_path} - %{message}") % { project_safe_import_url: project.safe_import_url, project_full_path: project.full_path, message: e.message })
rescue => e
message = Projects::ImportErrorFilter.filter_message(e.message)
Gitlab::Sentry.track_acceptable_exception(e, extra: { project_path: project.full_path, importer: project.import_type })
Gitlab::Sentry.track_exception(e, project_path: project.full_path, importer: project.import_type)
error(s_("ImportProjects|Error importing repository %{project_safe_import_url} into %{project_full_path} - %{message}") % { project_safe_import_url: project.safe_import_url, project_full_path: project.full_path, message: message })
end
......
......@@ -32,7 +32,7 @@ module Prometheus
success(result)
rescue TypeError, ArgumentError => exception
log_error(exception.message)
Gitlab::Sentry.track_acceptable_exception(exception, extra: {
Gitlab::Sentry.track_exception(exception, extra: {
template_string: query,
variables: predefined_context
})
......
......@@ -15,7 +15,7 @@ class DeleteStoredFilesWorker
unless klass
message = "Unknown class '#{class_name}'"
logger.error(message)
Gitlab::Sentry.track_exception(RuntimeError.new(message))
Gitlab::Sentry.track_and_raise_for_dev_exception(RuntimeError.new(message))
return
end
......
......@@ -11,7 +11,7 @@ class PagesDomainRemovalCronWorker
PagesDomain.for_removal.find_each do |domain|
domain.destroy!
rescue => e
Raven.capture_exception(e)
Gitlab::Sentry.track_exception(e)
end
end
end
......@@ -39,9 +39,9 @@ class RunPipelineScheduleWorker
"schedule_id: #{schedule.id} message: #{error.message}"
Gitlab::Sentry
.track_exception(error,
.track_and_raise_for_dev_exception(error,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/41231',
extra: { schedule_id: schedule.id })
schedule_id: schedule.id)
end
# rubocop:enable Gitlab/RailsLogger
......
......@@ -80,12 +80,12 @@ class StuckCiJobsWorker
end
def track_exception_for_build(ex, build)
Gitlab::Sentry.track_acceptable_exception(ex, extra: {
Gitlab::Sentry.track_exception(ex,
build_id: build.id,
build_name: build.name,
build_stage: build.stage,
pipeline_id: build.pipeline_id,
project_id: build.project_id
})
)
end
end
......@@ -29,7 +29,7 @@ module Sidekiq
MSG
rescue Sidekiq::Worker::EnqueueFromTransactionError => e
::Rails.logger.error(e.message) if ::Rails.env.production?
Gitlab::Sentry.track_exception(e)
Gitlab::Sentry.track_and_raise_for_dev_exception(e)
end
end
......
......@@ -2,35 +2,4 @@
require 'gitlab/current_settings'
def configure_sentry
if Gitlab::Sentry.enabled?
Raven.configure do |config|
config.dsn = Gitlab.config.sentry.dsn
config.release = Gitlab.revision
config.current_environment = Gitlab.config.sentry.environment
# Sanitize fields based on those sanitized from Rails.
config.sanitize_fields = Rails.application.config.filter_parameters.map(&:to_s)
# Sanitize authentication headers
config.sanitize_http_headers = %w[Authorization Private-Token]
config.tags = { program: Gitlab.process_name }
# Debugging for https://gitlab.com/gitlab-org/gitlab-foss/issues/57727
config.before_send = lambda do |event, hint|
if ActiveModel::MissingAttributeError === hint[:exception]
columns_hash = ActiveRecord::Base
.connection
.schema_cache
.instance_variable_get(:@columns_hash)
.map { |k, v| [k, v.map(&:first)] }
.to_h
event.extra.merge!(columns_hash)
end
event
end
end
end
end
configure_sentry if Rails.env.production? || Rails.env.development?
Gitlab::Sentry.configure
......@@ -127,6 +127,68 @@ importer progresses. Here's what to do:
logger.info(message: "Import error", error_code: 1, error: "I/O failure")
```
## Exception Handling
It often happens that you catch the exception and want to track it.
It should be noted that manual logging of exceptions is not allowed, as:
1. Manual logged exceptions can leak confidential data,
1. Manual logged exception very often require to clean backtrace
which reduces the boilerplate,
1. Very often manually logged exception needs to be tracked to Sentry as well,
1. Manually logged exceptions does not use `correlation_id`, which makes hard
to pin them to request, user and context in which this exception was raised,
1. It is very likely that manually logged exceptions will end-up across
multiple files, which increases burden scraping all logging files.
To avoid duplicating and having consistent behavior the `Gitlab::Sentry`
provides helper methods to track exceptions:
1. `Gitlab::Sentry.track_and_raise_exception`: this method logs,
sends exception to Sentry (if configured) and re-raises the exception,
1. `Gitlab::Sentry.track_exception`: this method only logs
and sends exception to Sentry (if configured),
1. `Gitlab::Sentry.log_exception`: this method only logs the exception,
and DOES NOT send the exception to Sentry,
1. `Gitlab::Sentry.track_and_raise_for_dev_exception`: this method logs,
sends exception to Sentry (if configured) and re-raises the exception
for development and test enviroments.
It is advised to only use `Gitlab::Sentry.track_and_raise_exception`
and `Gitlab::Sentry.track_exception` as presented on below examples.
Consider adding additional extra parameters to provide more context
for each tracked exception.
### Example
```ruby
class MyService < ::BaseService
def execute
project.perform_expensive_operation
success
rescue => e
Gitlab::Sentry.track_exception(e, project_id: project.id)
error('Exception occurred')
end
end
```
```ruby
class MyService < ::BaseService
def execute
project.perform_expensive_operation
success
rescue => e
Gitlab::Sentry.track_and_raise_exception(e, project_id: project.id)
end
end
```
## Additional steps with new log files
1. Consider log retention settings. By default, Omnibus will rotate any
......
......@@ -126,7 +126,7 @@ module EE
status: :error
}.merge(opts)
rescue Kubeclient::HttpError => e
::Gitlab::Sentry.track_acceptable_exception(e)
::Gitlab::Sentry.track_exception(e)
{
error: _('Kubernetes API returned status code: %{error_code}') % {
......
......@@ -106,7 +106,7 @@ class JenkinsDeprecatedService < CiService
begin
src = Nokogiri.parse(response).css('img.build-caption-status-icon,.build-caption>img').first.attributes['src'].value
rescue NoMethodError => ex
Raven.capture_exception(ex, extra: { 'response' => response })
Gitlab::Sentry.track_exception(ex, response: response)
return :error
end
......
......@@ -392,7 +392,7 @@ module API
# conan sends two upload requests, the first has no file, so we skip record creation if file.size == 0
::Packages::Conan::CreatePackageFileService.new(current_package, uploaded_file, params.merge(conan_file_type: file_type)).execute unless params['file.size'] == 0
rescue ObjectStorage::RemoteStoreError => e
Gitlab::Sentry.track_acceptable_exception(e, extra: { file_name: params[:file_name], project_id: project.id })
Gitlab::Sentry.track_exception(e, file_name: params[:file_name], project_id: project.id)
forbidden!
end
......
......@@ -28,7 +28,7 @@ module EE
error = LimitExceededError.new(message)
# TODO: change this to Gitlab::Sentry.log_exception(error, extra_context)
# https://gitlab.com/gitlab-org/gitlab/issues/32906
::Gitlab::Sentry.track_acceptable_exception(error, extra: extra_context)
::Gitlab::Sentry.track_exception(error, extra_context)
end
end
end
......
......@@ -18,7 +18,7 @@ module Gitlab
rescue JSON::ParserError
raise LicenseScanningParserError, 'JSON parsing failed'
rescue => e
Gitlab::Sentry.track_exception(e)
Gitlab::Sentry.track_and_raise_for_dev_exception(e)
raise LicenseScanningParserError, 'License scanning report parsing failed'
end
end
......
......@@ -19,7 +19,7 @@ module Gitlab
metrics_report.add_metric(name, metric_values.first)
rescue => e
Gitlab::Sentry.track_exception(e)
Gitlab::Sentry.track_and_raise_for_dev_exception(e)
raise MetricsParserError, "Metrics parsing failed"
end
end
......
......@@ -17,7 +17,7 @@ module Gitlab
rescue JSON::ParserError
raise SecurityReportParserError, 'JSON parsing failed'
rescue => e
Gitlab::Sentry.track_exception(e)
Gitlab::Sentry.track_and_raise_for_dev_exception(e)
raise SecurityReportParserError, "#{report.type} security report parsing failed"
end
......
......@@ -54,9 +54,9 @@ describe ::Gitlab::Ci::Pipeline::Chain::Limit::Activity do
end
it 'logs the error' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).with(
expect(Gitlab::Sentry).to receive(:track_exception).with(
instance_of(EE::Gitlab::Ci::Limit::LimitExceededError),
extra: { project_id: project.id, plan: namespace.actual_plan_name }
project_id: project.id, plan: namespace.actual_plan_name
)
subject
......@@ -83,7 +83,7 @@ describe ::Gitlab::Ci::Pipeline::Chain::Limit::Activity do
end
it 'does not log any error' do
expect(Gitlab::Sentry).not_to receive(:track_acceptable_exception)
expect(Gitlab::Sentry).not_to receive(:track_exception)
subject
end
......
......@@ -56,9 +56,9 @@ describe ::Gitlab::Ci::Pipeline::Chain::Limit::JobActivity do
end
it 'logs the error' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).with(
expect(Gitlab::Sentry).to receive(:track_exception).with(
instance_of(EE::Gitlab::Ci::Limit::LimitExceededError),
extra: { project_id: project.id, plan: namespace.actual_plan_name }
project_id: project.id, plan: namespace.actual_plan_name
)
subject
......@@ -85,7 +85,7 @@ describe ::Gitlab::Ci::Pipeline::Chain::Limit::JobActivity do
end
it 'does not log any error' do
expect(Gitlab::Sentry).not_to receive(:track_acceptable_exception)
expect(Gitlab::Sentry).not_to receive(:track_exception)
subject
end
......
......@@ -72,9 +72,9 @@ describe ::Gitlab::Ci::Pipeline::Chain::Limit::Size do
end
it 'logs the error' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).with(
expect(Gitlab::Sentry).to receive(:track_exception).with(
instance_of(EE::Gitlab::Ci::Limit::LimitExceededError),
extra: { project_id: project.id, plan: namespace.actual_plan_name }
project_id: project.id, plan: namespace.actual_plan_name
)
subject
......@@ -124,7 +124,7 @@ describe ::Gitlab::Ci::Pipeline::Chain::Limit::Size do
end
it 'does not log any error' do
expect(Gitlab::Sentry).not_to receive(:track_acceptable_exception)
expect(Gitlab::Sentry).not_to receive(:track_exception)
subject
end
......
......@@ -384,8 +384,9 @@ module API
def handle_api_exception(exception)
if report_exception?(exception)
define_params_for_grape_middleware
Gitlab::Sentry.context(current_user)
Gitlab::Sentry.track_acceptable_exception(exception, extra: params)
Gitlab::Sentry.with_context(current_user) do
Gitlab::Sentry.track_exception(exception, params)
end
end
# This is used with GrapeLogging::Loggers::ExceptionLogger
......
......@@ -11,7 +11,6 @@ module Gitlab
{ title: 'task', color: '#7F8C8D' }].freeze
attr_reader :project, :client, :errors, :users
attr_accessor :logger
def initialize(project)
@project = project
......@@ -20,7 +19,6 @@ module Gitlab
@labels = {}
@errors = []
@users = {}
@logger = Gitlab::Import::Logger.build
end
def execute
......@@ -47,7 +45,8 @@ module Gitlab
backtrace = Gitlab::Profiler.clean_backtrace(ex.backtrace)
error = { type: :pull_request, iid: pull_request.iid, errors: ex.message, trace: backtrace, raw_response: pull_request.raw }
log_error(error)
Gitlab::Sentry.log_exception(ex, error)
# Omit the details from the database to avoid blowing up usage in the error column
error.delete(:trace)
error.delete(:raw_response)
......@@ -275,10 +274,6 @@ module Gitlab
author.to_s + comment.note.to_s
end
def log_error(details)
logger.error(log_base_data.merge(details))
end
def log_base_data
{
class: self.class.name,
......
......@@ -133,7 +133,10 @@ module Gitlab
log_info(stage: 'import_repository', message: 'finished import')
rescue Gitlab::Shell::Error => e
log_error(stage: 'import_repository', message: 'failed import', error: e.message)
Gitlab::Sentry.log_exception(
e,
stage: 'import_repository', message: 'failed import', error: e.message
)
# Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
......@@ -164,8 +167,10 @@ module Gitlab
batch.each do |pull_request|
import_bitbucket_pull_request(pull_request)
rescue StandardError => e
backtrace = Gitlab::Profiler.clean_backtrace(e.backtrace)
log_error(stage: 'import_pull_requests', iid: pull_request.iid, error: e.message, backtrace: backtrace)
Gitlab::Sentry.log_exception(
e,
stage: 'import_pull_requests', iid: pull_request.iid, error: e.message
)
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, backtrace: backtrace.join("\n"), raw_response: pull_request.raw }
end
......@@ -177,7 +182,11 @@ module Gitlab
client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
project.repository.delete_branch(branch.name)
rescue BitbucketServer::Connection::ConnectionError => e
log_error(stage: 'delete_temp_branches', branch: branch.name, error: e.message)
Gitlab::Sentry.log_exception(
e,
stage: 'delete_temp_branches', branch: branch.name, error: e.message
)
@errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
end
end
......@@ -288,7 +297,11 @@ module Gitlab
# a regular note.
create_fallback_diff_note(merge_request, comment, position)
rescue StandardError => e
log_error(stage: 'create_diff_note', comment_id: comment.id, error: e.message)
Gitlab::Sentry.log_exception(
e,
stage: 'create_diff_note', comment_id: comment.id, error: e.message
)
errors << { type: :pull_request, id: comment.id, errors: e.message }
nil
end
......@@ -325,7 +338,11 @@ module Gitlab
merge_request.notes.create!(pull_request_comment_attributes(replies))
end
rescue StandardError => e
log_error(stage: 'import_standalone_pr_comments', merge_request_id: merge_request.id, comment_id: comment.id, error: e.message)
Gitlab::Sentry.log_exception(
e,
stage: 'import_standalone_pr_comments', merge_request_id: merge_request.id, comment_id: comment.id, error: e.message
)
errors << { type: :pull_request, comment_id: comment.id, errors: e.message }
end
end
......@@ -360,10 +377,6 @@ module Gitlab
logger.info(log_base_data.merge(details))
end
def log_error(details)
logger.error(log_base_data.merge(details))
end
def log_warn(details)
logger.warn(log_base_data.merge(details))
end
......
......@@ -67,11 +67,11 @@ module Gitlab
build_config(config)
rescue Gitlab::Config::Loader::Yaml::DataTooLargeError => e
track_exception(e)
track_and_raise_for_dev_exception(e)
raise Config::ConfigError, e.message
rescue Gitlab::Ci::Config::External::Context::TimeoutError => e
track_exception(e)
track_and_raise_for_dev_exception(e)
raise Config::ConfigError, TIMEOUT_MESSAGE
end
......@@ -94,8 +94,8 @@ module Gitlab
user: user)
end
def track_exception(error)
Gitlab::Sentry.track_exception(error, extra: @context.sentry_payload)
def track_and_raise_for_dev_exception(error)
Gitlab::Sentry.track_and_raise_for_dev_exception(error, @context.sentry_payload)
end
# Overriden in EE
......
......@@ -21,10 +21,10 @@ module Gitlab
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
error(ex.message, config_error: true)
rescue => ex
Gitlab::Sentry.track_acceptable_exception(ex, extra: {
Gitlab::Sentry.track_exception(ex,
project_id: project.id,
sha: @pipeline.sha
})
)
error("Undefined error (#{Labkit::Correlation::CorrelationId.current_id})",
config_error: true)
......
......@@ -35,7 +35,7 @@ module Gitlab
# match the blob, which is a bug. But we shouldn't fail to render
# completely in that case, even though we want to report the error.
rescue RangeError => e
Gitlab::Sentry.track_exception(e, issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/45441')
Gitlab::Sentry.track_and_raise_for_dev_exception(e, issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/45441')
end
end
......
......@@ -67,8 +67,7 @@ module Gitlab
File.read(cert_file).scan(PEM_REGEX).map do |cert|
OpenSSL::X509::Certificate.new(cert).to_pem
rescue OpenSSL::OpenSSLError => e
Rails.logger.error "Could not load certificate #{cert_file} #{e}" # rubocop:disable Gitlab/RailsLogger
Gitlab::Sentry.track_exception(e, extra: { cert_file: cert_file })
Gitlab::Sentry.track_and_raise_for_dev_exception(e, cert_file: cert_file)
nil
end.compact
end.uniq.join("\n")
......
......@@ -91,12 +91,10 @@ module Gitlab
project.repository.add_branch(project.creator, source_branch, pull_request.source_branch_sha)
rescue Gitlab::Git::CommandError => e
Gitlab::Sentry.track_acceptable_exception(e,
extra: {
source_branch: source_branch,
project_id: merge_request.project.id,
merge_request_id: merge_request.id
})
Gitlab::Sentry.track_exception(e,
source_branch: source_branch,
project_id: merge_request.project.id,
merge_request_id: merge_request.id)
end
end
end
......
......@@ -110,9 +110,9 @@ module Gitlab
folder_contents = Dir.children(tmp_dir)
# This means we left a GPG-agent process hanging. Logging the problem in
# sentry will make this more visible.
Gitlab::Sentry.track_exception(e,
Gitlab::Sentry.track_and_raise_for_dev_exception(e,
issue_url: 'https://gitlab.com/gitlab-org/gitlab/issues/20918',
extra: { tmp_dir: tmp_dir, contents: folder_contents })
tmp_dir: tmp_dir, contents: folder_contents)
end
tmp_keychains_removed.increment unless File.exist?(tmp_dir)
......
......@@ -32,7 +32,7 @@ module Gitlab
# Will inform you if there needs to be `calls_gitaly: true` as a kwarg in the field declaration
# if there is at least 1 Gitaly call involved with the field resolution.
error = RuntimeError.new("Gitaly is called for field '#{type_object.name}' on #{type_object.owner.try(:name)} - please either specify a constant complexity or add `calls_gitaly: true` to the field declaration")
Gitlab::Sentry.track_exception(error)
Gitlab::Sentry.track_and_raise_for_dev_exception(error)
end
end
end
......
......@@ -18,7 +18,7 @@ module Gitlab
variables: variables
})
rescue => e
Gitlab::Sentry.track_exception(e)
Gitlab::Sentry.track_and_raise_for_dev_exception(e)
default_initial_values(query)
end
......@@ -38,7 +38,7 @@ module Gitlab
GraphqlLogger.info(memo.except!(:time_started, :query))
rescue => e
Gitlab::Sentry.track_exception(e)
Gitlab::Sentry.track_and_raise_for_dev_exception(e)
end
private
......
......@@ -61,7 +61,7 @@ module Gitlab
tokens = lexer.lex(text, continue: continue)
Timeout.timeout(timeout_time) { @formatter.format(tokens, tag: tag).html_safe }
rescue Timeout::Error => e
Gitlab::Sentry.track_exception(e)
Gitlab::Sentry.track_and_raise_for_dev_exception(e)
highlight_plain(text)
rescue
highlight_plain(text)
......
......@@ -82,11 +82,8 @@ module Gitlab
end
def log_import_failure(relation_key, relation_index, exception)
Gitlab::Sentry.track_acceptable_exception(
exception,
extra: { project_id: @importable.id,
relation_key: relation_key,
relation_index: relation_index })
Gitlab::Sentry.track_exception(exception,
project_id: @importable.id, relation_key: relation_key, relation_index: relation_index)
ImportFailure.create(
project: @importable,
......
......@@ -56,11 +56,7 @@ module Gitlab
end
def error(error)
error_payload = { message: error.message }
error_payload[:error_backtrace] = Gitlab::Profiler.clean_backtrace(error.backtrace) if error.backtrace
log_error(error_payload)
Gitlab::Sentry.track_acceptable_exception(error, extra: log_base_data)
Gitlab::Sentry.track_exception(error, log_base_data)
add_error_message(error.message)
end
......
......@@ -118,6 +118,8 @@ module Gitlab
end
def self.clean_backtrace(backtrace)
return unless backtrace
Array(Rails.backtrace_cleaner.clean(backtrace)).reject do |line|
line.match(Regexp.union(IGNORE_BACKTRACES))
end
......
......@@ -2,76 +2,153 @@
module Gitlab
module Sentry
def self.enabled?
(Rails.env.production? || Rails.env.development?) &&
Gitlab.config.sentry.enabled
end
class << self
def configure
Raven.configure do |config|
config.dsn = sentry_dsn
config.release = Gitlab.revision
config.current_environment = Gitlab.config.sentry.environment
# Sanitize fields based on those sanitized from Rails.
config.sanitize_fields = Rails.application.config.filter_parameters.map(&:to_s)
# Sanitize authentication headers
config.sanitize_http_headers = %w[Authorization Private-Token]
config.tags = { program: Gitlab.process_name }
# Debugging for https://gitlab.com/gitlab-org/gitlab-foss/issues/57727
config.before_send = method(:add_context_from_exception_type)
end
end
def with_context(current_user = nil)
last_user_context = Raven.context.user
def self.context(current_user = nil)
return unless enabled?
user_context = {
id: current_user&.id,
email: current_user&.email,
username: current_user&.username
}.compact
Raven.tags_context(default_tags)
Raven.tags_context(default_tags)
Raven.user_context(user_context)
if current_user
Raven.user_context(
id: current_user.id,
email: current_user.email,
username: current_user.username
)
yield
ensure
Raven.user_context(last_user_context)
end
end
# This can be used for investigating exceptions that can be recovered from in
# code. The exception will still be raised in development and test
# environments.
#
# That way we can track down these exceptions with as much information as we
# need to resolve them.
#
# Provide an issue URL for follow up.
def self.track_exception(exception, issue_url: nil, extra: {})
track_acceptable_exception(exception, issue_url: issue_url, extra: extra)
raise exception if should_raise_for_dev?
end
# This should be used when you want to passthrough exception handling:
# rescue and raise to be catched in upper layers of the application.
#
# If the exception implements the method `sentry_extra_data` and that method
# returns a Hash, then the return value of that method will be merged into
# `extra`. Exceptions can use this mechanism to provide structured data
# to sentry in addition to their message and back-trace.
def track_and_raise_exception(exception, extra = {})
process_exception(exception, sentry: true, extra: extra)
# This should be used when you do not want to raise an exception in
# development and test. If you need development and test to behave
# just the same as production you can use this instead of
# track_exception.
#
# If the exception implements the method `sentry_extra_data` and that method
# returns a Hash, then the return value of that method will be merged into
# `extra`. Exceptions can use this mechanism to provide structured data
# to sentry in addition to their message and back-trace.
def self.track_acceptable_exception(exception, issue_url: nil, extra: {})
if enabled?
extra = build_extra_data(exception, issue_url, extra)
context # Make sure we've set everything we know in the context
Raven.capture_exception(exception, tags: default_tags, extra: extra)
raise exception
end
end
def self.should_raise_for_dev?
Rails.env.development? || Rails.env.test?
end
# This can be used for investigating exceptions that can be recovered from in
# code. The exception will still be raised in development and test
# environments.
#
# That way we can track down these exceptions with as much information as we
# need to resolve them.
#
# If the exception implements the method `sentry_extra_data` and that method
# returns a Hash, then the return value of that method will be merged into
# `extra`. Exceptions can use this mechanism to provide structured data
# to sentry in addition to their message and back-trace.
#
# Provide an issue URL for follow up.
# as `issue_url: 'http://gitlab.com/gitlab-org/gitlab/issues/111'`
def track_and_raise_for_dev_exception(exception, extra = {})
process_exception(exception, sentry: true, extra: extra)
def self.default_tags
{
Labkit::Correlation::CorrelationId::LOG_KEY.to_sym => Labkit::Correlation::CorrelationId.current_id,
locale: I18n.locale
}
end
raise exception if should_raise_for_dev?
end
def self.build_extra_data(exception, issue_url, extra)
exception.try(:sentry_extra_data)&.tap do |data|
extra.merge!(data) if data.is_a?(Hash)
# This should be used when you only want to track the exception.
#
# If the exception implements the method `sentry_extra_data` and that method
# returns a Hash, then the return value of that method will be merged into
# `extra`. Exceptions can use this mechanism to provide structured data
# to sentry in addition to their message and back-trace.
def track_exception(exception, extra = {})
process_exception(exception, sentry: true, extra: extra)
end
extra.merge({ issue_url: issue_url }.compact)
end
# This should be used when you only want to log the exception,
# but not send it to Sentry.
#
# If the exception implements the method `sentry_extra_data` and that method
# returns a Hash, then the return value of that method will be merged into
# `extra`. Exceptions can use this mechanism to provide structured data
# to sentry in addition to their message and back-trace.
def log_exception(exception, extra = {})
process_exception(exception, extra: extra)
end
private
def process_exception(exception, sentry: false, logging: true, extra:)
exception.try(:sentry_extra_data)&.tap do |data|
extra = extra.merge(data) if data.is_a?(Hash)
end
if sentry && Raven.configuration.server
Raven.capture_exception(exception, tags: default_tags, extra: extra)
end
private_class_method :build_extra_data
if logging
# TODO: this logic could migrate into `Gitlab::ExceptionLogFormatter`
# and we could also flatten deep nested hashes if required for search
# (e.g. if `extra` includes hash of hashes).
# In the current implementation, we don't flatten multi-level folded hashes.
log_hash = {}
Raven.context.tags.each { |name, value| log_hash["tags.#{name}"] = value }
Raven.context.user.each { |name, value| log_hash["user.#{name}"] = value }
Raven.context.extra.merge(extra).each { |name, value| log_hash["extra.#{name}"] = value }
Gitlab::ExceptionLogFormatter.format!(exception, log_hash)
Gitlab::Sentry::Logger.error(log_hash)
end
end
def sentry_dsn
return unless Rails.env.production? || Rails.env.development?
return unless Gitlab.config.sentry.enabled
Gitlab.config.sentry.dsn
end
def should_raise_for_dev?
Rails.env.development? || Rails.env.test?
end
def default_tags
{
Labkit::Correlation::CorrelationId::LOG_KEY.to_sym => Labkit::Correlation::CorrelationId.current_id,
locale: I18n.locale
}
end
def add_context_from_exception_type(event, hint)
if ActiveModel::MissingAttributeError === hint[:exception]
columns_hash = ActiveRecord::Base
.connection
.schema_cache
.instance_variable_get(:@columns_hash)
.map { |k, v| [k, v.map(&:first)] }
.to_h
event.extra.merge!(columns_hash)
end
event
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Sentry
class Logger < ::Gitlab::JsonLogger
def self.file_name_noext
'exceptions_json'
end
end
end
end
......@@ -126,7 +126,7 @@ module Gitlab
true
rescue => e
Gitlab::Sentry.track_acceptable_exception(e, extra: { path: path, new_path: new_path, storage: storage })
Gitlab::Sentry.track_exception(e, path: path, new_path: new_path, storage: storage)
false
end
......@@ -158,7 +158,7 @@ module Gitlab
true
rescue => e
Rails.logger.warn("Repository does not exist: #{e} at: #{name}.git") # rubocop:disable Gitlab/RailsLogger
Gitlab::Sentry.track_acceptable_exception(e, extra: { path: name, storage: storage })
Gitlab::Sentry.track_exception(e, path: name, storage: storage)
false
end
......@@ -267,7 +267,7 @@ module Gitlab
def mv_namespace(storage, old_name, new_name)
Gitlab::GitalyClient::NamespaceService.new(storage).rename(old_name, new_name)
rescue GRPC::InvalidArgument => e
Gitlab::Sentry.track_acceptable_exception(e, extra: { old_name: old_name, new_name: new_name, storage: storage })
Gitlab::Sentry.track_exception(e, old_name: old_name, new_name: new_name, storage: storage)
false
end
......
......@@ -8,7 +8,7 @@ module Gitlab::UsageDataCounters
class << self
def redis_key(event)
Gitlab::Sentry.track_exception(UnknownEvent, extra: { event: event }) unless known_events.include?(event.to_s)
Gitlab::Sentry.track_and_raise_for_dev_exception(UnknownEvent.new, event: event) unless known_events.include?(event.to_s)
"USAGE_#{prefix}_#{event}".upcase
end
......
......@@ -62,7 +62,7 @@ module Sentry
def handle_mapping_exceptions(&block)
yield
rescue KeyError => e
Gitlab::Sentry.track_acceptable_exception(e)
Gitlab::Sentry.track_exception(e)
raise MissingKeysError, "Sentry API response is missing keys. #{e.message}"
end
......@@ -118,7 +118,7 @@ module Sentry
def handle_request_exceptions
yield
rescue Gitlab::HTTP::Error => e
Gitlab::Sentry.track_acceptable_exception(e)
Gitlab::Sentry.track_exception(e)
raise_error 'Error when connecting to Sentry'
rescue Net::OpenTimeout
raise_error 'Connection to Sentry timed out'
......@@ -129,7 +129,7 @@ module Sentry
rescue Errno::ECONNREFUSED
raise_error 'Connection refused'
rescue => e
Gitlab::Sentry.track_acceptable_exception(e)
Gitlab::Sentry.track_exception(e)
raise_error "Sentry request failed due to #{e.class}"
end
......
......@@ -19211,9 +19211,6 @@ msgstr ""
msgid "Upload file"
msgstr ""
msgid "Upload file does not exist"
msgstr ""
msgid "Upload object map"
msgstr ""
......
......@@ -1159,7 +1159,7 @@ describe Projects::IssuesController do
end
it "prevents deletion if destroy_confirm is not set" do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
......@@ -1168,7 +1168,7 @@ describe Projects::IssuesController do
end
it "prevents deletion in JSON format if destroy_confirm is not set" do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid, format: 'json' }
......
......@@ -620,7 +620,7 @@ describe Projects::MergeRequestsController do
end
it "prevents deletion if destroy_confirm is not set" do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
......@@ -629,7 +629,7 @@ describe Projects::MergeRequestsController do
end
it "prevents deletion in JSON format if destroy_confirm is not set" do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid, format: 'json' }
......
......@@ -157,7 +157,7 @@ describe Gitlab::Ci::Config do
describe '.new' do
it 'raises error' do
expect(Gitlab::Sentry).to receive(:track_exception)
expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect { config }.to raise_error(
described_class::ConfigError,
......@@ -367,7 +367,7 @@ describe Gitlab::Ci::Config do
end
it 'raises error TimeoutError' do
expect(Gitlab::Sentry).to receive(:track_exception)
expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect { config }.to raise_error(
described_class::ConfigError,
......
......@@ -105,7 +105,7 @@ describe Gitlab::Diff::Highlight do
end
it 'keeps the original rich line' do
allow(Gitlab::Sentry).to receive(:track_exception)
allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
code = %q{+ raise RuntimeError, "System commands must be given as an array of strings"}
......@@ -114,7 +114,7 @@ describe Gitlab::Diff::Highlight do
end
it 'reports to Sentry if configured' do
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).and_call_original
expect { subject }. to raise_exception(RangeError)
end
......
......@@ -86,12 +86,11 @@ describe Gitlab::GitalyClient do
describe '.stub_certs' do
it 'skips certificates if OpenSSLError is raised and report it' do
expect(Rails.logger).to receive(:error).at_least(:once)
expect(Gitlab::Sentry)
.to receive(:track_exception)
.to receive(:track_and_raise_for_dev_exception)
.with(
a_kind_of(OpenSSL::X509::CertificateError),
extra: { cert_file: a_kind_of(String) }).at_least(:once)
cert_file: a_kind_of(String)).at_least(:once)
expect(OpenSSL::X509::Certificate)
.to receive(:new)
......
......@@ -301,7 +301,7 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
it 'ignores Git command errors when creating a branch' do
expect(project.repository).to receive(:add_branch).and_raise(Gitlab::Git::CommandError)
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
mr = insert_git_data
......
......@@ -182,17 +182,17 @@ describe Gitlab::Gpg do
expected_tmp_dir = nil
expect(described_class).to receive(:cleanup_tmp_dir).and_raise(expected_exception)
allow(Gitlab::Sentry).to receive(:track_exception)
allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
described_class.using_tmp_keychain do
expected_tmp_dir = described_class.current_home_dir
FileUtils.touch(File.join(expected_tmp_dir, 'dummy.file'))
end
expect(Gitlab::Sentry).to have_received(:track_exception).with(
expect(Gitlab::Sentry).to have_received(:track_and_raise_for_dev_exception).with(
expected_exception,
issue_url: 'https://gitlab.com/gitlab-org/gitlab/issues/20918',
extra: { tmp_dir: expected_tmp_dir, contents: ['dummy.file'] }
tmp_dir: expected_tmp_dir, contents: ['dummy.file']
)
end
......
......@@ -49,24 +49,9 @@ describe Gitlab::ImportExport::Shared do
it 'updates the import JID' do
import_state = create(:import_state, project: project, jid: 'jid-test')
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:error).with(hash_including(import_jid: import_state.jid))
end
subject.error(error)
end
it 'calls the error logger without a backtrace' do
expect(subject).to receive(:log_error).with(message: error.message)
subject.error(error)
end
it 'calls the error logger with the full message' do
backtrace = caller
allow(error).to receive(:backtrace).and_return(caller)
expect(subject).to receive(:log_error).with(message: error.message, error_backtrace: Gitlab::Profiler.clean_backtrace(backtrace))
expect(Gitlab::Sentry)
.to receive(:track_exception)
.with(error, hash_including(import_jid: import_state.jid))
subject.error(error)
end
......
......@@ -8,10 +8,20 @@ describe Gitlab::ImportExport::VersionChecker do
describe 'bundle a project Git repo' do
let(:version) { Gitlab::ImportExport.version }
let(:version_file) { Tempfile.new('VERSION') }
before do
allow_any_instance_of(Gitlab::ImportExport::Shared).to receive(:relative_archive_path).and_return('')
allow(File).to receive(:open).and_return(version)
version_file.write(version)
version_file.rewind
allow_any_instance_of(described_class).to receive(:version_file).and_return(version_file.path)
end
after do
version_file.close
version_file.unlink
end
it 'returns true if Import/Export have the same version' do
......
......@@ -3,12 +3,31 @@
require 'spec_helper'
describe Gitlab::Sentry do
describe '.context' do
it 'adds the expected tags' do
expect(described_class).to receive(:enabled?).and_return(true)
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
let(:expected_payload_includes) do
[
{ 'exception.class' => 'RuntimeError' },
{ 'exception.message' => 'boom' },
{ 'tags.correlation_id' => 'cid' },
{ 'extra.some_other_info' => 'info' },
{ 'extra.issue_url' => 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
]
end
before do
stub_sentry_settings
described_class.context(nil)
allow(described_class).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
described_class.configure
end
describe '.with_context' do
it 'adds the expected tags' do
described_class.with_context {}
expect(Raven.tags_context[:locale].to_s).to eq(I18n.locale.to_s)
expect(Raven.tags_context[Labkit::Correlation::CorrelationId::LOG_KEY.to_sym].to_s)
......@@ -16,23 +35,23 @@ describe Gitlab::Sentry do
end
end
describe '.track_exception' do
let(:exception) { RuntimeError.new('boom') }
describe '.track_and_raise_for_dev_exception' do
context 'when exceptions for dev should be raised' do
before do
expect(described_class).to receive(:should_raise_for_dev?).and_return(true)
end
before do
allow(described_class).to receive(:enabled?).and_return(true)
end
it 'raises the exception' do
expect(Raven).to receive(:capture_exception)
it 'raises the exception if it should' do
expect(described_class).to receive(:should_raise_for_dev?).and_return(true)
expect { described_class.track_exception(exception) }
.to raise_error(RuntimeError)
expect { described_class.track_and_raise_for_dev_exception(exception) }
.to raise_error(RuntimeError)
end
end
context 'when exceptions should not be raised' do
context 'when exceptions for dev should not be raised' do
before do
allow(described_class).to receive(:should_raise_for_dev?).and_return(false)
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
expect(described_class).to receive(:should_raise_for_dev?).and_return(false)
end
it 'logs the exception with all attributes passed' do
......@@ -50,30 +69,49 @@ describe Gitlab::Sentry do
tags: a_hash_including(expected_tags),
extra: a_hash_including(expected_extras))
described_class.track_exception(
described_class.track_and_raise_for_dev_exception(
exception,
issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1',
extra: { some_other_info: 'info' }
issue_url: issue_url,
some_other_info: 'info'
)
end
it 'sets the context' do
expect(described_class).to receive(:context)
it 'calls Gitlab::Sentry::Logger.error with formatted payload' do
expect(Gitlab::Sentry::Logger).to receive(:error)
.with(a_hash_including(*expected_payload_includes))
described_class.track_exception(exception)
described_class.track_and_raise_for_dev_exception(
exception,
issue_url: issue_url,
some_other_info: 'info'
)
end
end
end
context '.track_acceptable_exception' do
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
describe '.track_and_raise_exception' do
it 'always raises the exception' do
expect(Raven).to receive(:capture_exception)
expect { described_class.track_and_raise_exception(exception) }
.to raise_error(RuntimeError)
end
before do
allow(described_class).to receive(:enabled?).and_return(true)
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
it 'calls Gitlab::Sentry::Logger.error with formatted payload' do
expect(Gitlab::Sentry::Logger).to receive(:error)
.with(a_hash_including(*expected_payload_includes))
expect do
described_class.track_and_raise_exception(
exception,
issue_url: issue_url,
some_other_info: 'info'
)
end.to raise_error(RuntimeError)
end
end
describe '.track_exception' do
it 'calls Raven.capture_exception' do
expected_extras = {
some_other_info: 'info',
......@@ -89,34 +127,45 @@ describe Gitlab::Sentry do
tags: a_hash_including(expected_tags),
extra: a_hash_including(expected_extras))
described_class.track_acceptable_exception(
described_class.track_exception(
exception,
issue_url: issue_url,
some_other_info: 'info'
)
end
it 'calls Gitlab::Sentry::Logger.error with formatted payload' do
expect(Gitlab::Sentry::Logger).to receive(:error)
.with(a_hash_including(*expected_payload_includes))
described_class.track_exception(
exception,
issue_url: issue_url,
extra: { some_other_info: 'info' }
some_other_info: 'info'
)
end
context 'the exception implements :sentry_extra_data' do
let(:extra_info) { { event: 'explosion', size: :massive } }
let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info) }
let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller) }
it 'includes the extra data from the exception in the tracking information' do
expect(Raven).to receive(:capture_exception)
.with(exception, a_hash_including(extra: a_hash_including(extra_info)))
described_class.track_acceptable_exception(exception)
described_class.track_exception(exception)
end
end
context 'the exception implements :sentry_extra_data, which returns nil' do
let(:exception) { double(message: 'bang!', sentry_extra_data: nil) }
let(:exception) { double(message: 'bang!', sentry_extra_data: nil, backtrace: caller) }
it 'just includes the other extra info' do
extra_info = { issue_url: issue_url }
expect(Raven).to receive(:capture_exception)
.with(exception, a_hash_including(extra: a_hash_including(extra_info)))
described_class.track_acceptable_exception(exception, extra_info)
described_class.track_exception(exception, extra_info)
end
end
end
......
......@@ -3522,7 +3522,7 @@ describe Ci::Build do
end
it 'can drop the build' do
expect(Gitlab::Sentry).to receive(:track_exception)
expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect { build.drop! }.not_to raise_error
......
......@@ -1004,8 +1004,8 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to eq(connection_status: :unknown_failure) }
it 'notifies Sentry' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
.with(instance_of(StandardError), hash_including(extra: { cluster_id: cluster.id }))
expect(Gitlab::Sentry).to receive(:track_exception)
.with(instance_of(StandardError), hash_including(cluster_id: cluster.id))
subject
end
......
......@@ -82,7 +82,7 @@ describe GroupDescendant do
end
it 'tracks the exception when a parent was not preloaded' do
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).and_call_original
expect { described_class.build_hierarchy([subsub_group]) }.to raise_error(ArgumentError)
end
......@@ -91,7 +91,7 @@ describe GroupDescendant do
expected_hierarchy = { parent => { subgroup => subsub_group } }
# this does not raise in production, so stubbing it here.
allow(Gitlab::Sentry).to receive(:track_exception)
allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect(described_class.build_hierarchy([subsub_group])).to eq(expected_hierarchy)
end
......
......@@ -171,8 +171,7 @@ describe Upload do
it 'sends a message to Sentry' do
upload = create(:upload, :issuable_upload)
expect(Gitlab::Sentry).to receive(:enabled?).and_return(true)
expect(Raven).to receive(:capture_message).with("Upload file does not exist", extra: upload.attributes)
expect(Gitlab::Sentry).to receive(:track_exception).with(instance_of(RuntimeError), upload.attributes)
upload.exist?
end
......
......@@ -46,7 +46,7 @@ describe 'GraphQL' do
end
it 'logs the exception in Sentry and continues with the request' do
expect(Gitlab::Sentry).to receive(:track_exception).at_least(1).times
expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).at_least(1).times
expect(Gitlab::GraphqlLogger).to receive(:info)
post_graphql(query, variables: {})
......@@ -146,7 +146,7 @@ describe 'GraphQL' do
end
it "logs a warning that the 'calls_gitaly' field declaration is missing" do
expect(Gitlab::Sentry).to receive(:track_exception).once
expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).once
post_graphql(query, current_user: user)
end
......
......@@ -226,11 +226,11 @@ describe API::Helpers do
describe '.handle_api_exception' do
before do
allow_any_instance_of(self.class).to receive(:rack_response)
allow(Gitlab::Sentry).to receive(:enabled?).and_return(true)
stub_sentry_settings
configure_sentry
expect(Gitlab::Sentry).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
Gitlab::Sentry.configure
Raven.client.configuration.encoding = 'json'
end
......
......@@ -60,10 +60,10 @@ describe Ci::ArchiveTraceService, '#execute' do
it 'increments Prometheus counter, sends crash report to Sentry and ignore an error for continuing to archive' do
expect(Gitlab::Sentry)
.to receive(:track_exception)
.to receive(:track_and_raise_for_dev_exception)
.with(::Gitlab::Ci::Trace::ArchiveError,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
extra: { job_id: job.id } ).once
job_id: job.id).once
expect(Sidekiq.logger).to receive(:warn).with(
class: ArchiveTraceWorker.name,
......
......@@ -528,7 +528,7 @@ describe Ci::CreatePipelineService do
end
it 'logs error' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
execute_service
end
......@@ -613,7 +613,7 @@ describe Ci::CreatePipelineService do
end
it 'logs error' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
execute_service
end
......
......@@ -51,8 +51,8 @@ describe Ci::PrepareBuildService do
it 'drops the build and notifies Sentry' do
expect(build).to receive(:drop).with(:unmet_prerequisites).once
expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
.with(instance_of(Kubeclient::HttpError), hash_including(extra: { build_id: build.id }))
expect(Gitlab::Sentry).to receive(:track_exception)
.with(instance_of(Kubeclient::HttpError), hash_including(build_id: build.id))
subject
end
......
......@@ -514,8 +514,8 @@ module Ci
subject { execute(specific_runner, {}) }
it 'does drop the build and logs both failures' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
.with(anything, a_hash_including(extra: a_hash_including(build_id: pending_job.id)))
expect(Gitlab::Sentry).to receive(:track_exception)
.with(anything, a_hash_including(build_id: pending_job.id))
.twice
.and_call_original
......@@ -540,8 +540,8 @@ module Ci
subject { execute(specific_runner, {}) }
it 'does drop the build and logs failure' do
expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
.with(anything, a_hash_including(extra: a_hash_including(build_id: pending_job.id)))
expect(Gitlab::Sentry).to receive(:track_exception)
.with(anything, a_hash_including(build_id: pending_job.id))
.once
.and_call_original
......
......@@ -11,20 +11,10 @@ shared_examples 'logs kubernetes errors' do
}
end
let(:logger_hash) do
error_hash.merge(
exception: error_name,
message: error_message,
backtrace: instance_of(Array)
)
end
it 'logs into kubernetes.log and Sentry' do
expect(service.send(:logger)).to receive(:error).with(hash_including(logger_hash))
expect(Gitlab::Sentry).to receive(:track_acceptable_exception).with(
expect(Gitlab::Sentry).to receive(:track_exception).with(
error,
extra: hash_including(error_hash)
hash_including(error_hash)
)
service.execute
......
......@@ -63,7 +63,7 @@ describe Ci::ArchiveTracesCronWorker do
let!(:build) { create(:ci_build, :success, :trace_live, finished_at: finished_at) }
before do
allow(Gitlab::Sentry).to receive(:track_exception)
allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
allow_any_instance_of(Gitlab::Ci::Trace).to receive(:archive!).and_raise('Unexpected error')
end
......
......@@ -43,10 +43,10 @@ describe RunPipelineScheduleWorker do
allow(Ci::CreatePipelineService).to receive(:new) { raise ActiveRecord::StatementInvalid }
expect(Gitlab::Sentry)
.to receive(:track_exception)
.to receive(:track_and_raise_for_dev_exception)
.with(ActiveRecord::StatementInvalid,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/41231',
extra: { schedule_id: pipeline_schedule.id } ).once
schedule_id: pipeline_schedule.id).once
end
it 'increments Prometheus counter' do
......
......@@ -30,8 +30,8 @@ describe StuckCiJobsWorker do
it "does drop the job and logs the reason" do
job.update_columns(yaml_variables: '[{"key" => "value"}]')
expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
.with(anything, a_hash_including(extra: a_hash_including(build_id: job.id)))
expect(Gitlab::Sentry).to receive(:track_exception)
.with(anything, a_hash_including(build_id: job.id))
.once
.and_call_original
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment