Commit 6e655f9b authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents df8aa2a5 52ab1ffd
79003389aa5098a6ef37e73298cafbad4d9e6b79
67a362bf7aaab3aae021d19fda728c24b7723d7a
......@@ -3,7 +3,7 @@ import IntegrationForm from '../components/integration_form.vue';
import { createStore } from '../stores';
export default () => {
const entryPoint = document.querySelector('#js-cluster-integration-form');
const entryPoint = document.querySelector('#js-cluster-details-form');
if (!entryPoint) {
return;
......
<script>
import { GlTooltipDirective, GlSafeHtmlDirective as SafeHtml } from '@gitlab/ui';
/* eslint-disable vue/no-v-html */
import { GlTooltipDirective } from '@gitlab/ui';
import { mapActions, mapGetters, mapState } from 'vuex';
import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
......@@ -21,7 +22,6 @@ export default {
},
directives: {
GlTooltip: GlTooltipDirective,
SafeHtml,
},
mixins: [glFeatureFlagsMixin()],
props: {
......@@ -256,7 +256,7 @@ export default {
@mousedown="handleParallelLineMouseDown"
>
<strong v-if="isLeftConflictMarker">{{ conflictText(line.left) }}</strong>
<span v-else v-safe-html="line.left.rich_text"></span>
<span v-else v-html="line.left.rich_text"></span>
</div>
</template>
<template v-else-if="!inline || (line.left && line.left.type === $options.CONFLICT_MARKER)">
......@@ -345,7 +345,6 @@ export default {
<div
:id="line.right.line_code"
:key="line.right.rich_text"
v-safe-html="line.right.rich_text"
:class="[
line.right.type,
{
......@@ -360,7 +359,7 @@ export default {
<strong v-if="line.right.type === $options.CONFLICT_MARKER_THEIR">{{
conflictText(line.right)
}}</strong>
<span v-else v-safe-html="line.right.rich_text"></span>
<span v-else v-html="line.right.rich_text"></span>
</div>
</template>
<template v-else>
......
......@@ -77,15 +77,10 @@ export default {
},
[types.SET_DIFF_DATA_BATCH](state, data) {
const files = prepareDiffData({
state.diffFiles = prepareDiffData({
diff: data,
priorFiles: state.diffFiles,
});
Object.assign(state, {
...convertObjectPropsToCamelCase(data),
});
updateDiffFilesInState(state, files);
},
[types.SET_COVERAGE_DATA](state, coverageFiles) {
......
......@@ -5,8 +5,8 @@ module RunnerSetupScripts
private
def private_runner_setup_scripts(**kwargs)
instructions = Gitlab::Ci::RunnerInstructions.new(current_user: current_user, os: script_params[:os], arch: script_params[:arch], **kwargs)
def private_runner_setup_scripts
instructions = Gitlab::Ci::RunnerInstructions.new(os: script_params[:os], arch: script_params[:arch])
output = {
install: instructions.install_script,
register: instructions.register_command
......
......@@ -52,7 +52,7 @@ module Groups
end
def runner_setup_scripts
private_runner_setup_scripts(group: group)
private_runner_setup_scripts
end
private
......
......@@ -62,7 +62,7 @@ module Projects
end
def runner_setup_scripts
private_runner_setup_scripts(project: @project)
private_runner_setup_scripts
end
private
......
......@@ -21,19 +21,19 @@ module Resolvers
argument :project_id,
type: ::Types::GlobalIDType[::Project],
required: false,
deprecated: { reason: 'No longer used', milestone: '13.11' },
description: 'Project to register the runner for.'
argument :group_id,
type: ::Types::GlobalIDType[::Group],
required: false,
deprecated: { reason: 'No longer used', milestone: '13.11' },
description: 'Group to register the runner for.'
def resolve(platform:, architecture:, **args)
instructions = Gitlab::Ci::RunnerInstructions.new(
current_user: current_user,
os: platform,
arch: architecture,
**target_param(args)
arch: architecture
)
{
......
= form_for @cluster, url: clusterable.cluster_path(@cluster), as: :cluster, html: { class: 'js-cluster-integration-form' } do |field|
= form_for @cluster, url: clusterable.cluster_path(@cluster), as: :cluster, html: { class: 'js-cluster-details-form' } do |field|
= form_errors(@cluster)
#js-cluster-integration-form{ data: js_cluster_form_data(@cluster, can?(current_user, :update_cluster, @cluster)) }
#js-cluster-details-form{ data: js_cluster_form_data(@cluster, can?(current_user, :update_cluster, @cluster)) }
---
title: Fix issue where merge description not showing when merged with merge train
merge_request: 57787
author:
type: fixed
---
title: Remove groupId and projectId arguments to Runner install instructions
merge_request: 57720
author:
type: changed
---
name: sentry_processors_before_send
introduced_by_url:
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/849#processors
milestone: '13.11'
type: development
group: team::Scalability
default_enabled: false
......@@ -281,9 +281,9 @@ Returns [`RunnerSetup`](#runnersetup).
| Name | Type | Description |
| ---- | ---- | ----------- |
| `architecture` | [`String!`](#string) | Architecture to generate the instructions for. |
| `groupId` | [`GroupID`](#groupid) | Group to register the runner for. |
| `groupId` | [`GroupID`](#groupid) | Group to register the runner for. Deprecated in 13.11: No longer used. |
| `platform` | [`String!`](#string) | Platform to generate the instructions for. |
| `projectId` | [`ProjectID`](#projectid) | Project to register the runner for. |
| `projectId` | [`ProjectID`](#projectid) | Project to register the runner for. Deprecated in 13.11: No longer used. |
### `snippets`
......
......@@ -6,14 +6,15 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Monthly release process
When a new GitLab version is released on the 22nd, we need to release the published documentation
for the new version.
When a new GitLab version is released on the 22nd, we release version-specific published
documentation for the new version.
This should be done as soon as possible after the GitLab version is announced, so that:
We complete the process as soon as possible after the GitLab version is announced. The result is:
- The published documentation includes the three most recent minor releases of the current major
version, and the most recent minor releases of the last two major versions. For example 13.9,
13.8, 13.7, 12.10, and 11.11.
- The [online published documentation](https://docs.gitlab.com) includes:
- The three most recent minor releases of the current major version. For example 13.9, 13.8, and
13.7.
- The most recent minor releases of the last two major versions. For example 12.10, and 11.11.
- Documentation updates after the 22nd are for the next release. The versions drop down
should have the current milestone with `-pre` appended to it, for example `13.10-pre`.
......@@ -31,23 +32,30 @@ For example:
[stable branch](https://gitlab.com/gitlab-org/gitlab-docs/-/tree/13.8) and Docker image:
[`registry.gitlab.com/gitlab-org/gitlab-docs:13.8`](https://gitlab.com/gitlab-org/gitlab-docs/container_registry/631635).
To set up a documentation release, follow these steps:
1. [Add the charts version](#add-chart-version), so that the documentation is built using the
[version of the charts project that maps to](https://docs.gitlab.com/charts/installation/version_mappings.html)
the GitLab release. This step may have been completed already.
1. [Create a stable branch and Docker image](#create-stable-branch-and-docker-image-for-release) for
the new version.
1. [Create a release merge request](#create-release-merge-request) for the new version, which
updates the version dropdown menu for the current documentation and adds the release to the
Docker configuration. For example, the
[release merge request for 13.9](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1555).
1. [Update the three online versions](#update-dropdown-for-online-versions), so that they display the new release on their
version dropdown menus. For example:
- The merge request to [update the 13.9 version dropdown menu for the 13.9 release](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1556).
- The merge request to [update the 13.8 version dropdown menu for the 13.9 release](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1557).
- The merge request to [update the 13.7 version dropdown menu for the 13.9 release](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1558).
1. [Merge the release merge request and run the necessary Docker image builds](#merge-release-merge-request-and-run-docker-image-builds).
## Recommended timeline
To minimize problems during the documentation release process, use the following timeline:
- Before the 20nd of the month:
[Add the charts version](#add-chart-version), so that the documentation is built using the
[version of the charts project that maps to](https://docs.gitlab.com/charts/installation/version_mappings.html)
the GitLab release. This step may have been completed already.
- On or near the 20th of the month:
1. [Create a stable branch and Docker image](#create-stable-branch-and-docker-image-for-release) for
the new version.
1. [Create a release merge request](#create-release-merge-request) for the new version, which
updates the version dropdown menu for the current documentation and adds the release to the
Docker configuration. For example, the
[release merge request for 13.9](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1555).
1. [Update the three online versions](#update-dropdown-for-online-versions), so that they display the new release on their
version dropdown menus.
- On the 22nd of the month:
[Merge the release merge requests and run the necessary Docker image builds](#merge-merge-requests-and-run-docker-image-builds).
## Add chart version
......@@ -135,8 +143,12 @@ Do not merge the release merge request yet.
## Update dropdown for online versions
To update`content/_data/versions.yaml` for all online versions (stable branches `X.Y` of the
`gitlab-docs` project):
To update `content/_data/versions.yaml` for all online versions (stable branches `X.Y` of the
`gitlab-docs` project). For example:
- The merge request to [update the 13.9 version dropdown menu for the 13.9 release](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1556).
- The merge request to [update the 13.8 version dropdown menu for the 13.9 release](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1557).
- The merge request to [update the 13.7 version dropdown menu for the 13.9 release](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests/1558).
1. Run the Rake task that creates all of the necessary merge requests to update the dropdowns. For
example, for the 13.9 release:
......@@ -146,13 +158,12 @@ To update`content/_data/versions.yaml` for all online versions (stable branches
./bin/rake release:dropdowns
```
These merge requests are set to automatically merge.
1. [Visit the merge requests page](https://gitlab.com/gitlab-org/gitlab-docs/-/merge_requests?label_name%5B%5D=release)
to check that their pipelines pass. After all MRs are merged, proceed to the following and final
step.
to check that their pipelines pass.
Do not merge these merge requests yet.
## Merge release merge request and run Docker image builds
## Merge merge requests and run Docker image builds
The merge requests for the dropdowns should now all be merged into their respective stable branches.
Each merge triggers a new pipeline for each stable branch. Wait for the stable branch pipelines to
......@@ -160,7 +171,9 @@ complete, then:
1. Check the [pipelines page](https://gitlab.com/gitlab-org/gitlab-docs/pipelines)
and make sure all stable branches have green pipelines.
1. After all the pipelines succeed, merge the [release merge request](#create-release-merge-request).
1. After all the pipelines succeed:
1. Merge all of the [dropdown merge requests](#update-dropdown-for-online-versions).
1. Merge the [release merge request](#create-release-merge-request).
1. Finally, run the
[`Build docker images weekly` pipeline](https://gitlab.com/gitlab-org/gitlab-docs/pipeline_schedules)
that builds the `:latest` and `:archives` Docker images.
......
......@@ -62,7 +62,7 @@ module MergeTrains
def merge!
merge_train.start_merge!
MergeRequests::MergeService.new(project, merge_user, merge_request.merge_params)
MergeRequests::MergeService.new(project, merge_user, merge_request.merge_params.with_indifferent_access)
.execute(merge_request, skip_discussions_check: true)
raise ProcessError, "failed to merge. #{merge_request.merge_error}" unless merge_request.merged?
......
---
title: Fix N+1 in /projects endpoint
merge_request: 57746
author:
type: performance
......@@ -11,7 +11,7 @@ module EE
override :preload_relation
def preload_relation(projects_relation, options = {})
super(projects_relation).with_compliance_framework_settings
super(projects_relation).with_compliance_framework_settings.with_group_saml_provider
end
end
......
......@@ -38,16 +38,16 @@ RSpec.describe API::Internal::Kubernetes do
end
shared_examples 'agent authentication' do
it 'returns 403 if Authorization header not sent' do
it 'returns 401 if Authorization header not sent' do
send_request
expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns 403 if Authorization is for non-existent agent' do
it 'returns 401 if Authorization is for non-existent agent' do
send_request(headers: { 'Authorization' => 'Bearer NONEXISTENT' })
expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
......
......@@ -90,6 +90,27 @@ RSpec.describe API::Projects do
expect(json_response.first['id']).to eq project.id
end
end
context 'when there are several projects owned by groups' do
let_it_be(:admin) { create(:admin) }
it 'avoids N+1 queries' do
create(:project, :public, namespace: create(:group))
# Warming up context
get api('/projects', admin)
control = ActiveRecord::QueryRecorder.new do
get api('/projects', admin)
end
create_list(:project, 2, :public, namespace: create(:group))
expect do
get api('/projects', admin)
end.not_to exceed_query_limit(control.count)
end
end
end
describe 'GET /projects/:id' do
......
......@@ -208,7 +208,7 @@ RSpec.describe MergeTrains::RefreshMergeRequestService do
expect(merge_request).to receive(:cleanup_refs).with(only: :train)
expect(merge_request.merge_train).to receive(:start_merge!).and_call_original
expect(merge_request.merge_train).to receive(:finish_merge!).and_call_original
expect_next_instance_of(MergeRequests::MergeService, project, maintainer, anything) do |service|
expect_next_instance_of(MergeRequests::MergeService, project, maintainer, instance_of(HashWithIndifferentAccess)) do |service|
expect(service).to receive(:execute).with(merge_request, skip_discussions_check: true).and_call_original
end
......
......@@ -127,7 +127,7 @@ module API
# as `:tags` are defined as: `has_many :tags, through: :taggings`
# N+1 is solved then by using `subject.tags.map(&:name)`
# MR describing the solution: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/20555
super(projects_relation).preload(:group)
super(projects_relation).preload(group: :namespace_settings)
.preload(:ci_cd_settings)
.preload(:project_setting)
.preload(:container_expiration_policy)
......
......@@ -13,7 +13,7 @@ module API
helpers do
def authenticate_gitlab_kas_request!
unauthorized! unless Gitlab::Kas.verify_api_request(headers)
render_api_error!('KAS JWT authentication invalid', 401) unless Gitlab::Kas.verify_api_request(headers)
end
def agent_token
......@@ -51,7 +51,7 @@ module API
end
def check_agent_token
forbidden! unless agent_token
unauthorized! unless agent_token
forbidden! unless Gitlab::Kas.included_in_gitlab_com_rollout?(agent.project)
......
......@@ -51,10 +51,7 @@ module Gitlab
attr_reader :errors
def initialize(current_user:, group: nil, project: nil, os:, arch:)
@current_user = current_user
@group = group
@project = project
def initialize(os:, arch:)
@os = os
@arch = arch
@errors = []
......@@ -77,7 +74,7 @@ module Gitlab
server_url = Gitlab::Routing.url_helpers.root_url(only_path: false)
runner_executable = environment[:runner_executable]
"#{runner_executable} register --url #{server_url} --registration-token #{registration_token}"
"#{runner_executable} register --url #{server_url} --registration-token $REGISTRATION_TOKEN"
end
end
......@@ -108,30 +105,6 @@ module Gitlab
def get_file(path)
File.read(Rails.root.join(path).to_s)
end
def registration_token
project_token || group_token || instance_token
end
def project_token
return unless @project
raise Gitlab::Access::AccessDeniedError unless can?(@current_user, :admin_pipeline, @project)
'$REGISTRATION_TOKEN'
end
def group_token
return unless @group
raise Gitlab::Access::AccessDeniedError unless can?(@current_user, :admin_group, @group)
'$REGISTRATION_TOKEN'
end
def instance_token
raise Gitlab::Access::AccessDeniedError unless @current_user&.admin?
'$REGISTRATION_TOKEN'
end
end
end
end
......@@ -16,6 +16,12 @@ module Gitlab
Rack::Timeout::RequestTimeoutException
].freeze
PROCESSORS = [
::Gitlab::ErrorTracking::Processor::SidekiqProcessor,
::Gitlab::ErrorTracking::Processor::GrpcErrorProcessor,
::Gitlab::ErrorTracking::Processor::ContextPayloadProcessor
].freeze
class << self
def configure
Raven.configure do |config|
......@@ -97,7 +103,9 @@ module Gitlab
inject_context_for_exception(event, hint[:exception])
custom_fingerprinting(event, hint[:exception])
event
PROCESSORS.reduce(event) do |processed_event, processor|
processor.call(processed_event)
end
end
def process_exception(exception, sentry: false, logging: true, extra:)
......
......@@ -9,9 +9,21 @@ module Gitlab
# integrations are re-implemented and use Gitlab::ErrorTracking, this
# processor should be removed.
def process(payload)
return payload if ::Feature.enabled?(:sentry_processors_before_send, default_enabled: :yaml)
context_payload = Gitlab::ErrorTracking::ContextPayloadGenerator.generate(nil, {})
payload.deep_merge!(context_payload)
end
def self.call(event)
return event unless ::Feature.enabled?(:sentry_processors_before_send, default_enabled: :yaml)
Gitlab::ErrorTracking::ContextPayloadGenerator.generate(nil, {}).each do |key, value|
event.public_send(key).deep_merge!(value) # rubocop:disable GitlabSecurity/PublicSend
end
event
end
end
end
end
......
......@@ -6,60 +6,126 @@ module Gitlab
class GrpcErrorProcessor < ::Raven::Processor
DEBUG_ERROR_STRING_REGEX = RE2('(.*) debug_error_string:(.*)')
def process(value)
process_first_exception_value(value)
process_custom_fingerprint(value)
def process(payload)
return payload if ::Feature.enabled?(:sentry_processors_before_send, default_enabled: :yaml)
value
end
# Sentry can report multiple exceptions in an event. Sanitize
# only the first one since that's what is used for grouping.
def process_first_exception_value(value)
exceptions = value.dig(:exception, :values)
return unless exceptions.is_a?(Array)
entry = exceptions.first
return unless entry.is_a?(Hash)
exception_type = entry[:type]
raw_message = entry[:value]
return unless exception_type&.start_with?('GRPC::')
return unless raw_message.present?
message, debug_str = split_debug_error_string(raw_message)
entry[:value] = message if message
extra = value[:extra] || {}
extra[:grpc_debug_error_string] = debug_str if debug_str
end
def process_custom_fingerprint(value)
fingerprint = value[:fingerprint]
return value unless custom_grpc_fingerprint?(fingerprint)
self.class.process_first_exception_value(payload)
self.class.process_custom_fingerprint(payload)
message, _ = split_debug_error_string(fingerprint[1])
fingerprint[1] = message if message
payload
end
private
def custom_grpc_fingerprint?(fingerprint)
fingerprint.is_a?(Array) && fingerprint.length == 2 && fingerprint[0].start_with?('GRPC::')
end
def split_debug_error_string(message)
return unless message
match = DEBUG_ERROR_STRING_REGEX.match(message)
return unless match
[match[1], match[2]]
class << self
def call(event)
return event unless ::Feature.enabled?(:sentry_processors_before_send, default_enabled: :yaml)
process_first_exception_value(event)
process_custom_fingerprint(event)
event
end
# Sentry can report multiple exceptions in an event. Sanitize
# only the first one since that's what is used for grouping.
def process_first_exception_value(event_or_payload)
exceptions = exceptions(event_or_payload)
return unless exceptions.is_a?(Array)
exception = exceptions.first
return unless valid_exception?(exception)
exception_type, raw_message = type_and_value(exception)
return unless exception_type&.start_with?('GRPC::')
return unless raw_message.present?
message, debug_str = split_debug_error_string(raw_message)
set_new_values!(event_or_payload, exception, message, debug_str)
end
def process_custom_fingerprint(event)
fingerprint = fingerprint(event)
return event unless custom_grpc_fingerprint?(fingerprint)
message, _ = split_debug_error_string(fingerprint[1])
fingerprint[1] = message if message
end
private
def custom_grpc_fingerprint?(fingerprint)
fingerprint.is_a?(Array) && fingerprint.length == 2 && fingerprint[0].start_with?('GRPC::')
end
def split_debug_error_string(message)
return unless message
match = DEBUG_ERROR_STRING_REGEX.match(message)
return unless match
[match[1], match[2]]
end
# The below methods can be removed once we remove the
# sentry_processors_before_send feature flag, and we can
# assume we always have an Event object
def exceptions(event_or_payload)
case event_or_payload
when Raven::Event
# Better in new version, will be event_or_payload.exception.values
event_or_payload.instance_variable_get(:@interfaces)[:exception]&.values
when Hash
event_or_payload.dig(:exception, :values)
end
end
def valid_exception?(exception)
case exception
when Raven::SingleExceptionInterface
exception&.value
when Hash
true
else
false
end
end
def type_and_value(exception)
case exception
when Raven::SingleExceptionInterface
[exception.type, exception.value]
when Hash
exception.values_at(:type, :value)
end
end
def set_new_values!(event_or_payload, exception, message, debug_str)
case event_or_payload
when Raven::Event
# Worse in new version, no setter! Have to poke at the
# instance variable
exception.value = message if message
event_or_payload.extra[:grpc_debug_error_string] = debug_str if debug_str
when Hash
exception[:value] = message if message
extra = event_or_payload[:extra] || {}
extra[:grpc_debug_error_string] = debug_str if debug_str
end
end
def fingerprint(event_or_payload)
case event_or_payload
when Raven::Event
event_or_payload.fingerprint
when Hash
event_or_payload[:fingerprint]
end
end
end
end
end
......
......@@ -8,39 +8,66 @@ module Gitlab
class SidekiqProcessor < ::Raven::Processor
FILTERED_STRING = '[FILTERED]'
def self.filter_arguments(args, klass)
args.lazy.with_index.map do |arg, i|
case arg
when Numeric
arg
else
if permitted_arguments_for_worker(klass).include?(i)
class << self
def filter_arguments(args, klass)
args.lazy.with_index.map do |arg, i|
case arg
when Numeric
arg
else
FILTERED_STRING
if permitted_arguments_for_worker(klass).include?(i)
arg
else
FILTERED_STRING
end
end
end
end
end
def self.permitted_arguments_for_worker(klass)
@permitted_arguments_for_worker ||= {}
@permitted_arguments_for_worker[klass] ||=
begin
klass.constantize&.loggable_arguments&.to_set
rescue
Set.new
def permitted_arguments_for_worker(klass)
@permitted_arguments_for_worker ||= {}
@permitted_arguments_for_worker[klass] ||=
begin
klass.constantize&.loggable_arguments&.to_set
rescue
Set.new
end
end
def loggable_arguments(args, klass)
Gitlab::Utils::LogLimitedArray
.log_limited_array(filter_arguments(args, klass))
.map(&:to_s)
.to_a
end
def call(event)
return event unless ::Feature.enabled?(:sentry_processors_before_send, default_enabled: :yaml)
sidekiq = event&.extra&.dig(:sidekiq)
return event unless sidekiq
sidekiq = sidekiq.deep_dup
sidekiq.delete(:jobstr)
# 'args' in this hash => from Gitlab::ErrorTracking.track_*
# 'args' in :job => from default error handler
job_holder = sidekiq.key?('args') ? sidekiq : sidekiq[:job]
if job_holder['args']
job_holder['args'] = filter_arguments(job_holder['args'], job_holder['class']).to_a
end
end
def self.loggable_arguments(args, klass)
Gitlab::Utils::LogLimitedArray
.log_limited_array(filter_arguments(args, klass))
.map(&:to_s)
.to_a
event.extra[:sidekiq] = sidekiq
event
end
end
def process(value, key = nil)
return value if ::Feature.enabled?(:sentry_processors_before_send, default_enabled: :yaml)
sidekiq = value.dig(:extra, :sidekiq)
return value unless sidekiq
......
......@@ -23,7 +23,7 @@ RSpec.describe 'Clusterable > Show page' do
it 'allow the user to set domain', :js do
visit cluster_path
within '.js-cluster-integration-form' do
within '.js-cluster-details-form' do
fill_in('cluster_base_domain', with: 'test.com')
click_on 'Save changes'
end
......@@ -39,7 +39,7 @@ RSpec.describe 'Clusterable > Show page' do
end
it 'shows help text with the domain as an alternative to custom domain', :js do
within '.js-cluster-integration-form' do
within '.js-cluster-details-form' do
expect(find(cluster_ingress_help_text_selector).text).to include('192.168.1.100')
end
end
......@@ -49,7 +49,7 @@ RSpec.describe 'Clusterable > Show page' do
it 'alternative to custom domain is not shown' do
visit cluster_path
within '.js-cluster-integration-form' do
within '.js-cluster-details-form' do
expect(page).not_to have_selector(cluster_ingress_help_text_selector)
end
end
......
......@@ -97,7 +97,7 @@ RSpec.describe 'User Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
page.within('.js-cluster-integration-form') { click_button 'Save changes' }
page.within('.js-cluster-details-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
......
......@@ -119,7 +119,7 @@ RSpec.describe 'Gcp Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
page.within('.js-cluster-integration-form') { click_button 'Save changes' }
page.within('.js-cluster-details-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
......
......@@ -84,7 +84,7 @@ RSpec.describe 'User Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
page.within('.js-cluster-integration-form') { click_button 'Save changes' }
page.within('.js-cluster-details-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
......
......@@ -58,7 +58,7 @@ RSpec.describe 'Clusters', :js do
before do
click_link 'default-cluster'
fill_in 'cluster_environment_scope', with: 'production/*'
within '.js-cluster-integration-form' do
within '.js-cluster-details-form' do
click_button 'Save changes'
end
end
......@@ -149,7 +149,7 @@ RSpec.describe 'Clusters', :js do
before do
click_link 'default-cluster'
fill_in 'cluster_environment_scope', with: 'production/*'
within ".js-cluster-integration-form" do
within ".js-cluster-details-form" do
click_button 'Save changes'
end
end
......
......@@ -8,12 +8,11 @@ RSpec.describe Resolvers::Ci::RunnerSetupResolver do
describe '#resolve' do
let(:user) { create(:user) }
subject(:resolve_subject) { resolve(described_class, ctx: { current_user: user }, args: { platform: platform, architecture: 'amd64' }.merge(target_param)) }
subject(:resolve_subject) { resolve(described_class, ctx: { current_user: user }, args: { platform: platform, architecture: 'amd64' }) }
context 'with container platforms' do
let(:platform) { 'docker' }
let(:project) { create(:project) }
let(:target_param) { { project_id: project.to_global_id } }
it 'returns install instructions' do
expect(resolve_subject[:install_instructions]).not_to eq(nil)
......@@ -27,77 +26,9 @@ RSpec.describe Resolvers::Ci::RunnerSetupResolver do
context 'with regular platforms' do
let(:platform) { 'linux' }
context 'without target parameter' do
let(:target_param) { {} }
context 'when user is not admin' do
it 'returns access error' do
expect { resolve_subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
context 'when user is admin' do
before do
user.update!(admin: true)
end
it 'returns install and register instructions' do
expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
expect(resolve_subject.values).not_to include(nil)
end
end
end
context 'with project target parameter' do
let(:project) { create(:project) }
let(:target_param) { { project_id: project.to_global_id } }
context 'when user has access to admin builds on project' do
before do
project.add_maintainer(user)
end
it 'returns install and register instructions' do
expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
expect(resolve_subject.values).not_to include(nil)
end
end
context 'when user does not have access to admin builds on project' do
before do
project.add_developer(user)
end
it 'returns access error' do
expect { resolve_subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
context 'with group target parameter' do
let(:group) { create(:group) }
let(:target_param) { { group_id: group.to_global_id } }
context 'when user has access to admin builds on group' do
before do
group.add_owner(user)
end
it 'returns install and register instructions' do
expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
expect(resolve_subject.values).not_to include(nil)
end
end
context 'when user does not have access to admin builds on group' do
before do
group.add_developer(user)
end
it 'returns access error' do
expect { resolve_subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
it 'returns install and register instructions' do
expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
expect(resolve_subject.values).not_to include(nil)
end
end
end
......
......@@ -6,7 +6,6 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do
using RSpec::Parameterized::TableSyntax
let(:params) { {} }
let(:user) { create(:user) }
describe 'OS' do
Gitlab::Ci::RunnerInstructions::OS.each do |name, subject|
......@@ -37,7 +36,7 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do
end
describe '#install_script' do
subject { described_class.new(current_user: user, **params) }
subject { described_class.new(**params) }
context 'invalid params' do
where(:current_params, :expected_error_message) do
......@@ -106,117 +105,18 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do
end
end
context 'group' do
let(:group) { create(:group) }
subject { described_class.new(current_user: user, group: group, **params) }
context 'user is owner' do
before do
group.add_owner(user)
end
with_them do
let(:params) { { os: commands.each_key.first, arch: 'foo' } }
it 'have correct configurations' do
result = subject.register_command
expect(result).to include("#{commands[commands.each_key.first]} register")
expect(result).to include("--registration-token $REGISTRATION_TOKEN")
expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
end
end
end
context 'user is not owner' do
where(:user_permission) do
[:maintainer, :developer, :reporter, :guest]
end
with_them do
before do
create(:group_member, user_permission, group: group, user: user)
end
it 'raises error' do
result = subject.register_command
expect(result).to be_nil
expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
end
end
end
end
context 'project' do
let(:project) { create(:project) }
subject { described_class.new(current_user: user, project: project, **params) }
context 'user is maintainer' do
before do
project.add_maintainer(user)
end
with_them do
let(:params) { { os: commands.each_key.first, arch: 'foo' } }
it 'have correct configurations' do
result = subject.register_command
expect(result).to include("#{commands[commands.each_key.first]} register")
expect(result).to include("--registration-token $REGISTRATION_TOKEN")
expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
end
end
end
context 'user is not maintainer' do
where(:user_permission) do
[:developer, :reporter, :guest]
end
with_them do
before do
create(:project_member, user_permission, project: project, user: user)
end
it 'raises error' do
result = subject.register_command
expect(result).to be_nil
expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
end
end
end
end
context 'instance' do
subject { described_class.new(current_user: user, **params) }
context 'user is admin' do
let(:user) { create(:user, :admin) }
with_them do
let(:params) { { os: commands.each_key.first, arch: 'foo' } }
subject { described_class.new(**params) }
it 'have correct configurations' do
result = subject.register_command
expect(result).to include("#{commands[commands.each_key.first]} register")
expect(result).to include("--registration-token $REGISTRATION_TOKEN")
expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
end
end
end
with_them do
let(:params) { { os: commands.each_key.first, arch: 'foo' } }
context 'user is not admin' do
it 'raises error' do
it 'have correct configurations' do
result = subject.register_command
expect(result).to be_nil
expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
expect(result).to include("#{commands[commands.each_key.first]} register")
expect(result).to include("--registration-token $REGISTRATION_TOKEN")
expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
end
end
end
......
# frozen_string_literal: true
require 'fast_spec_helper'
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do
subject(:processor) { described_class.new }
before do
allow_next_instance_of(Gitlab::ErrorTracking::ContextPayloadGenerator) do |generator|
allow(generator).to receive(:generate).and_return(
user: { username: 'root' },
tags: { locale: 'en', program: 'test', feature_category: 'feature_a', correlation_id: 'cid' },
extra: { some_info: 'info' }
)
shared_examples 'processing an exception' do
before do
allow_next_instance_of(Gitlab::ErrorTracking::ContextPayloadGenerator) do |generator|
allow(generator).to receive(:generate).and_return(
user: { username: 'root' },
tags: { locale: 'en', program: 'test', feature_category: 'feature_a', correlation_id: 'cid' },
extra: { some_info: 'info' }
)
end
end
end
it 'merges the context payload into event payload' do
payload = {
user: { ip_address: '127.0.0.1' },
tags: { priority: 'high' },
extra: { sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] } }
}
processor.process(payload)
expect(payload).to eql(
user: {
ip_address: '127.0.0.1',
username: 'root'
},
tags: {
priority: 'high',
locale: 'en',
program: 'test',
feature_category: 'feature_a',
correlation_id: 'cid'
},
extra: {
some_info: 'info',
sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] }
let(:payload) do
{
user: { ip_address: '127.0.0.1' },
tags: { priority: 'high' },
extra: { sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] } }
}
)
end
it 'merges the context payload into event payload', :aggregate_failures do
expect(result_hash[:user]).to eq(ip_address: '127.0.0.1', username: 'root')
expect(result_hash[:tags])
.to eq(priority: 'high',
locale: 'en',
program: 'test',
feature_category: 'feature_a',
correlation_id: 'cid')
expect(result_hash[:extra])
.to include(some_info: 'info',
sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] })
end
end
describe '.call' do
let(:event) { Raven::Event.new(payload) }
let(:result_hash) { described_class.call(event).to_hash }
it_behaves_like 'processing an exception'
context 'when followed by #process' do
let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
it_behaves_like 'processing an exception'
end
end
describe '#process' do
let(:event) { Raven::Event.new(payload) }
let(:result_hash) { described_class.new.process(event.to_hash) }
context 'with sentry_processors_before_send disabled' do
before do
stub_feature_flags(sentry_processors_before_send: false)
end
it_behaves_like 'processing an exception'
end
end
end
......@@ -3,73 +3,83 @@
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
describe '#process' do
subject { described_class.new }
shared_examples 'processing an exception' do
context 'when there is no GRPC exception' do
let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
it 'leaves data unchanged' do
expect(subject.process(data)).to eq(data)
expect(result_hash).to include(data)
end
end
context 'when there is a GPRC exception with a debug string' do
let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
let(:data) do
{
exception: {
values: [
{
type: "GRPC::DeadlineExceeded",
value: "4:DeadlineExceeded. debug_error_string:{\"hello\":1}"
}
]
},
extra: {
caller: 'test'
},
fingerprint: [
"GRPC::DeadlineExceeded",
"4:Deadline Exceeded. debug_error_string:{\"created\":\"@1598938192.005782000\",\"description\":\"Error received from peer unix:/home/git/gitalypraefect.socket\",\"file\":\"src/core/lib/surface/call.cc\",\"file_line\":1055,\"grpc_message\":\"Deadline Exceeded\",\"grpc_status\":4}"
'GRPC::DeadlineExceeded',
'4:Deadline Exceeded. debug_error_string:{"created":"@1598938192.005782000","description":"Error received from peer unix:/home/git/gitalypraefect.socket","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Deadline Exceeded","grpc_status":4}'
]
}
end
let(:expected) do
{
fingerprint: [
"GRPC::DeadlineExceeded",
"4:Deadline Exceeded."
],
exception: {
values: [
{
type: "GRPC::DeadlineExceeded",
value: "4:DeadlineExceeded."
}
]
},
extra: {
caller: 'test',
grpc_debug_error_string: "{\"hello\":1}"
}
}
end
it 'removes the debug error string and stores it as an extra field' do
expect(subject.process(data)).to eq(expected)
expect(result_hash[:fingerprint])
.to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
context 'with no custom fingerprint' do
before do
data.delete(:fingerprint)
expected.delete(:fingerprint)
let(:data) do
{ extra: { caller: 'test' } }
end
it 'removes the debug error string and stores it as an extra field' do
expect(subject.process(data)).to eq(expected)
expect(result_hash).not_to include(:fingerprint)
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
end
end
end
describe '.call' do
let(:event) { Raven::Event.from_exception(exception, data) }
let(:result_hash) { described_class.call(event).to_hash }
it_behaves_like 'processing an exception'
context 'when followed by #process' do
let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
it_behaves_like 'processing an exception'
end
end
describe '#process' do
let(:event) { Raven::Event.from_exception(exception, data) }
let(:result_hash) { described_class.new.process(event.to_hash) }
context 'with sentry_processors_before_send disabled' do
before do
stub_feature_flags(sentry_processors_before_send: false)
end
it_behaves_like 'processing an exception'
end
end
end
......@@ -94,28 +94,37 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
end
describe '#process' do
shared_examples 'processing an exception' do
context 'when there is Sidekiq data' do
let(:wrapped_value) { { extra: { sidekiq: value } } }
shared_examples 'Sidekiq arguments' do |args_in_job_hash: true|
let(:path) { [:extra, :sidekiq, args_in_job_hash ? :job : nil, 'args'].compact }
let(:args) { [1, 'string', { a: 1 }, [1, 2]] }
it 'only allows numeric arguments for an unknown worker' do
value = { 'args' => args, 'class' => 'UnknownWorker' }
context 'for an unknown worker' do
let(:value) do
hash = { 'args' => args, 'class' => 'UnknownWorker' }
value = { job: value } if args_in_job_hash
args_in_job_hash ? { job: hash } : hash
end
expect(subject.process(extra_sidekiq(value)).dig(*path))
.to eq([1, described_class::FILTERED_STRING, described_class::FILTERED_STRING, described_class::FILTERED_STRING])
it 'only allows numeric arguments for an unknown worker' do
expect(result_hash.dig(*path))
.to eq([1, described_class::FILTERED_STRING, described_class::FILTERED_STRING, described_class::FILTERED_STRING])
end
end
it 'allows all argument types for a permitted worker' do
value = { 'args' => args, 'class' => 'PostReceive' }
context 'for a permitted worker' do
let(:value) do
hash = { 'args' => args, 'class' => 'PostReceive' }
value = { job: value } if args_in_job_hash
args_in_job_hash ? { job: hash } : hash
end
expect(subject.process(extra_sidekiq(value)).dig(*path))
.to eq(args)
it 'allows all argument types for a permitted worker' do
expect(result_hash.dig(*path)).to eq(args)
end
end
end
......@@ -127,39 +136,62 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
include_examples 'Sidekiq arguments', args_in_job_hash: false
end
it 'removes a jobstr field if present' do
value = {
job: { 'args' => [1] },
jobstr: { 'args' => [1] }.to_json
}
context 'when a jobstr field is present' do
let(:value) do
{
job: { 'args' => [1] },
jobstr: { 'args' => [1] }.to_json
}
end
expect(subject.process(extra_sidekiq(value)))
.to eq(extra_sidekiq(value.except(:jobstr)))
it 'removes the jobstr' do
expect(result_hash.dig(:extra, :sidekiq)).to eq(value.except(:jobstr))
end
end
it 'does nothing with no jobstr' do
value = { job: { 'args' => [1] } }
context 'when no jobstr value is present' do
let(:value) { { job: { 'args' => [1] } } }
expect(subject.process(extra_sidekiq(value)))
.to eq(extra_sidekiq(value))
it 'does nothing' do
expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
end
end
end
context 'when there is no Sidekiq data' do
it 'does nothing' do
value = {
request: {
method: 'POST',
data: { 'key' => 'value' }
}
}
let(:value) { { tags: { foo: 'bar', baz: 'quux' } } }
let(:wrapped_value) { value }
expect(subject.process(value)).to eq(value)
it 'does nothing' do
expect(result_hash).to include(value)
expect(result_hash.dig(:extra, :sidekiq)).to be_nil
end
end
end
describe '.call' do
let(:event) { Raven::Event.new(wrapped_value) }
let(:result_hash) { described_class.call(event).to_hash }
it_behaves_like 'processing an exception'
context 'when followed by #process' do
let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
it_behaves_like 'processing an exception'
end
end
describe '#process' do
let(:event) { Raven::Event.new(wrapped_value) }
let(:result_hash) { described_class.new.process(event.to_hash) }
context 'with sentry_processors_before_send disabled' do
before do
stub_feature_flags(sentry_processors_before_send: false)
end
def extra_sidekiq(hash)
{ extra: { sidekiq: hash } }
it_behaves_like 'processing an exception'
end
end
end
......@@ -7,6 +7,7 @@ require 'raven/transports/dummy'
RSpec.describe Gitlab::ErrorTracking do
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
let(:extra) { { issue_url: issue_url, some_other_info: 'info' } }
let(:user) { create(:user) }
......@@ -42,6 +43,8 @@ RSpec.describe Gitlab::ErrorTracking do
}
end
let(:sentry_event) { Gitlab::Json.parse(Raven.client.transport.events.last[1]) }
before do
stub_sentry_settings
......@@ -133,8 +136,6 @@ RSpec.describe Gitlab::ErrorTracking do
end
describe '.track_exception' do
let(:extra) { { issue_url: issue_url, some_other_info: 'info' } }
subject(:track_exception) { described_class.track_exception(exception, extra) }
before do
......@@ -195,6 +196,55 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
context 'when the error is kind of an `ActiveRecord::StatementInvalid`' do
let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
it 'injects the normalized sql query into extra' do
track_exception
expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
end
end
context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do
it 'injects the normalized sql query into extra' do
allow(exception).to receive(:cause).and_return(ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1'))
track_exception
expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
end
end
end
shared_examples 'event processors' do
subject(:track_exception) { described_class.track_exception(exception, extra) }
before do
allow(Raven).to receive(:capture_exception).and_call_original
allow(Gitlab::ErrorTracking::Logger).to receive(:error)
end
context 'custom GitLab context when using Raven.capture_exception directly' do
subject(:raven_capture_exception) { Raven.capture_exception(exception) }
it 'merges a default set of tags into the existing tags' do
allow(Raven.context).to receive(:tags).and_return(foo: 'bar')
raven_capture_exception
expect(sentry_event['tags']).to include('correlation_id', 'feature_category', 'foo', 'locale', 'program')
end
it 'merges the current user information into the existing user information' do
Raven.user_context(id: -1)
raven_capture_exception
expect(sentry_event['user']).to eq('id' => -1, 'username' => user.username)
end
end
context 'with sidekiq args' do
context 'when the args does not have anything sensitive' do
let(:extra) { { sidekiq: { 'class' => 'PostReceive', 'args' => [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'] } } }
......@@ -211,16 +261,20 @@ RSpec.describe Gitlab::ErrorTracking do
)
)
end
it 'does not filter parameters when sending to Sentry' do
track_exception
expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq([1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'])
end
end
context 'when the args has sensitive information' do
let(:extra) { { sidekiq: { 'class' => 'UnknownWorker', 'args' => ['sensitive string', 1, 2] } } }
it 'filters sensitive arguments before sending' do
it 'filters sensitive arguments before sending and logging' do
track_exception
sentry_event = Gitlab::Json.parse(Raven.client.transport.events.last[1])
expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
hash_including(
......@@ -234,28 +288,44 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
context 'when the error is kind of an `ActiveRecord::StatementInvalid`' do
let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
context 'when the error is a GRPC error' do
context 'when the GRPC error contains a debug_error_string value' do
let(:exception) { GRPC::DeadlineExceeded.new('unknown cause', {}, '{"hello":1}') }
it 'injects the normalized sql query into extra' do
allow(Raven.client.transport).to receive(:send_event) do |event|
expect(event.extra).to include(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
it 'sets the GRPC debug error string in the Sentry event and adds a custom fingerprint' do
track_exception
expect(sentry_event.dig('extra', 'grpc_debug_error_string')).to eq('{"hello":1}')
expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
end
end
track_exception
context 'when the GRPC error does not contain a debug_error_string value' do
let(:exception) { GRPC::DeadlineExceeded.new }
it 'does not do any processing on the event' do
track_exception
expect(sentry_event['extra']).not_to include('grpc_debug_error_string')
expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
end
end
end
end
context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do
let(:exception) { RuntimeError.new(cause: ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1')) }
context 'with sentry_processors_before_send enabled' do
before do
stub_feature_flags(sentry_processors_before_send: true)
end
it 'injects the normalized sql query into extra' do
allow(Raven.client.transport).to receive(:send_event) do |event|
expect(event.extra).to include(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
end
include_examples 'event processors'
end
track_exception
end
context 'with sentry_processors_before_send disabled' do
before do
stub_feature_flags(sentry_processors_before_send: false)
end
include_examples 'event processors'
end
end
......@@ -38,16 +38,16 @@ RSpec.describe API::Internal::Kubernetes do
end
shared_examples 'agent authentication' do
it 'returns 403 if Authorization header not sent' do
it 'returns 401 if Authorization header not sent' do
send_request
expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns 403 if Authorization is for non-existent agent' do
it 'returns 401 if Authorization is for non-existent agent' do
send_request(headers: { 'Authorization' => 'Bearer NONEXISTENT' })
expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
......
......@@ -85,7 +85,7 @@ func TestConfigDefaults(t *testing.T) {
DocumentRoot: "public",
ProxyHeadersTimeout: 5 * time.Minute,
APIQueueTimeout: queueing.DefaultTimeout,
APICILongPollingDuration: 50 * time.Second,
APICILongPollingDuration: 50 * time.Nanosecond, // TODO this is meant to be 50*time.Second but it has been wrong for ages
ImageResizerConfig: config.DefaultImageResizerConfig,
}
......
......@@ -102,7 +102,7 @@ func buildConfig(arg0 string, args []string) (*bootConfig, *config.Config, error
fset.UintVar(&cfg.APILimit, "apiLimit", 0, "Number of API requests allowed at single time")
fset.UintVar(&cfg.APIQueueLimit, "apiQueueLimit", 0, "Number of API requests allowed to be queued")
fset.DurationVar(&cfg.APIQueueTimeout, "apiQueueDuration", queueing.DefaultTimeout, "Maximum queueing duration of requests")
fset.DurationVar(&cfg.APICILongPollingDuration, "apiCiLongPollingDuration", 50*time.Second, "Long polling duration for job requesting for runners (default 50s - enabled)")
fset.DurationVar(&cfg.APICILongPollingDuration, "apiCiLongPollingDuration", 50, "Long polling duration for job requesting for runners (default 50s - enabled)")
fset.BoolVar(&cfg.PropagateCorrelationID, "propagateCorrelationID", false, "Reuse existing Correlation-ID from the incoming request header `X-Request-ID` if present")
if err := fset.Parse(args); err != nil {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment