Commit 0810b3bf authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents 02dd9b1b f318bef3
......@@ -462,7 +462,7 @@ class ApplicationController < ActionController::Base
feature_category: feature_category) do
yield
ensure
@current_context = Labkit::Context.current.to_h
@current_context = Gitlab::ApplicationContext.current
end
end
......
......@@ -8,7 +8,7 @@ module Mutations
ADMIN_MESSAGE = 'You must be an admin to use this mutation'
Labkit::Context::KNOWN_KEYS.each do |key|
Gitlab::ApplicationContext::KNOWN_KEYS.each do |key|
argument key,
GraphQL::STRING_TYPE,
required: false,
......
......@@ -21,11 +21,17 @@ class BuildArtifactEntity < Grape::Entity
)
end
expose :keep_path, if: -> (*) { artifact.expiring? } do |artifact|
expose :keep_path, if: -> (*) { artifact.expiring? && show_duplicated_paths?(artifact.project) } do |artifact|
fast_keep_project_job_artifacts_path(artifact.project, artifact.job)
end
expose :browse_path do |artifact|
expose :browse_path, if: -> (*) { show_duplicated_paths?(artifact.project) } do |artifact|
fast_browse_project_job_artifacts_path(artifact.project, artifact.job)
end
private
def show_duplicated_paths?(project)
!Gitlab::Ci::Features.remove_duplicate_artifact_exposure_paths?(project)
end
end
......@@ -18,7 +18,7 @@ module ApplicationWorker
set_queue
def structured_payload(payload = {})
context = Labkit::Context.current.to_h.merge(
context = Gitlab::ApplicationContext.current.merge(
'class' => self.class.name,
'job_status' => 'running',
'queue' => self.class.queue,
......
......@@ -15,7 +15,7 @@ module CronjobQueue
# Cronjobs never get scheduled with arguments, so this is safe to
# override
def context_for_arguments(_args)
return if Gitlab::ApplicationContext.current_context_include?('meta.caller_id')
return if Gitlab::ApplicationContext.current_context_include?(:caller_id)
Gitlab::ApplicationContext.new(caller_id: "Cronjob")
end
......
---
name: remove_duplicate_artifact_exposure_paths
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54611
rollout_issue_url:
milestone: '13.10'
type: development
group: group::testing
default_enabled: false
......@@ -340,6 +340,31 @@ As in other list types, click the trash icon to remove a list.
![Milestone lists](img/issue_board_milestone_lists_v13_6.png)
### Iteration lists **(PREMIUM)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/250479) in GitLab 13.10.
> - It's [deployed behind the `board_new_lists` feature flag](../feature_flags.md), disabled by default.
> - It's disabled on GitLab.com.
> - It's recommended for production use.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-new-add-list-form).
WARNING:
This feature might not be available to you. Check the **version history** note above for details.
You're also able to create lists of an iteration.
These are lists that filter issues by the assigned
iteration. To add an iteration list:
1. Select **Create list**.
1. Select the **Iteration**.
1. In the dropdown, select an iteration.
1. Select **Add to board**.
Like the milestone lists, you're able to [drag issues](#drag-issues-between-lists)
to and from a iteration list to manipulate the iteration of the dragged issues.
![Iteration lists](img/issue_board_iteration_lists_v13_10.png)
### Group issues in swimlanes **(PREMIUM)**
> - Grouping by epic [introduced](https://gitlab.com/groups/gitlab-org/-/epics/3352) in [GitLab Premium](https://about.gitlab.com/pricing/) 13.6.
......@@ -649,3 +674,22 @@ To disable it:
```ruby
Feature.disable(:add_issues_button)
```
### Enable or disable new add list form **(FREE SELF)**
The new form for adding lists is under development and not ready for production use. It is
deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
can enable it.
To enable it:
```ruby
Feature.enable(:board_new_list)
```
To disable it:
```ruby
Feature.disable(:board_new_list)
```
......@@ -38,7 +38,6 @@ import {
ERROR_MESSAGES,
SCANNER_PROFILES_QUERY,
SITE_PROFILES_QUERY,
SITE_PROFILES_EXTENDED_QUERY,
TYPE_SITE_PROFILE,
TYPE_SCANNER_PROFILE,
} from '../settings';
......@@ -101,15 +100,11 @@ export default {
'selectedScannerProfileId',
SCANNER_PROFILES_QUERY,
),
siteProfiles() {
return createProfilesApolloOptions(
'siteProfiles',
'selectedSiteProfileId',
this.glFeatures.securityDastSiteProfilesAdditionalFields
? SITE_PROFILES_EXTENDED_QUERY
: SITE_PROFILES_QUERY,
);
},
siteProfiles: createProfilesApolloOptions(
'siteProfiles',
'selectedSiteProfileId',
SITE_PROFILES_QUERY,
),
},
inject: {
dastSiteValidationDocsPath: {
......@@ -233,6 +228,9 @@ export default {
selectedSiteProfileId,
};
},
hasExcludedUrls() {
return this.selectedSiteProfile.excludedUrls?.length > 0;
},
},
created() {
const params = queryToObject(window.location.search);
......@@ -499,6 +497,10 @@ export default {
:label="s__('DastProfiles|Username')"
:value="selectedSiteProfile.auth.username"
/>
<profile-selector-summary-cell
:label="s__('DastProfiles|Password')"
value="••••••••"
/>
</div>
<div class="row">
<profile-selector-summary-cell
......@@ -513,12 +515,14 @@ export default {
</template>
<div class="row">
<profile-selector-summary-cell
v-if="hasExcludedUrls"
:label="s__('DastProfiles|Excluded URLs')"
:value="selectedSiteProfile.excludedUrls.join($options.EXCLUDED_URLS_SEPARATOR)"
/>
<profile-selector-summary-cell
v-if="selectedSiteProfile.requestHeaders"
:label="s__('DastProfiles|Request headers')"
:value="selectedSiteProfile.requestHeaders"
:value="__('[Redacted]')"
/>
</div>
</template>
......
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { resolvers } from 'ee/security_configuration/dast_profiles/graphql/provider';
import createDefaultClient from '~/lib/graphql';
Vue.use(VueApollo);
export default new VueApollo({
defaultClient: createDefaultClient(),
defaultClient: createDefaultClient(resolvers),
});
import dastScannerProfilesQuery from 'ee/security_configuration/dast_profiles/graphql/dast_scanner_profiles.query.graphql';
import dastSiteProfilesQuery from 'ee/security_configuration/dast_profiles/graphql/dast_site_profiles.query.graphql';
import dastSiteProfilesExtendedQuery from 'ee/security_configuration/dast_profiles/graphql/dast_site_profiles_extended.query.graphql';
import { s__ } from '~/locale';
export const ERROR_RUN_SCAN = 'ERROR_RUN_SCAN';
......@@ -29,10 +28,5 @@ export const SITE_PROFILES_QUERY = {
fetchError: ERROR_FETCH_SITE_PROFILES,
};
export const SITE_PROFILES_EXTENDED_QUERY = {
...SITE_PROFILES_QUERY,
fetchQuery: dastSiteProfilesExtendedQuery,
};
export const TYPE_SITE_PROFILE = 'DastSiteProfile';
export const TYPE_SCANNER_PROFILE = 'DastScannerProfile';
......@@ -17,6 +17,15 @@ query DastSiteProfiles($fullPath: ID!, $after: String, $before: String, $first:
editPath
validationStatus
referencedInSecurityPolicies
auth @client {
enabled
url
usernameField
passwordField
username
}
excludedUrls @client
requestHeaders @client
}
}
}
......
#import "~/graphql_shared/fragments/pageInfo.fragment.graphql"
query DastSiteProfiles($fullPath: ID!, $after: String, $before: String, $first: Int, $last: Int) {
project(fullPath: $fullPath) {
siteProfiles: dastSiteProfiles(after: $after, before: $before, first: $first, last: $last)
@connection(key: "dastSiteProfiles") {
pageInfo {
...PageInfo
}
edges {
cursor
node {
id
profileName
normalizedTargetUrl
targetUrl
editPath
validationStatus
auth @client {
enabled
url
usernameField
passwordField
username
}
excludedUrls @client
requestHeaders @client
referencedInSecurityPolicies
}
}
}
}
}
......@@ -4,6 +4,21 @@ import createDefaultClient from '~/lib/graphql';
Vue.use(VueApollo);
export const resolvers = {
DastSiteProfile: {
auth: () => ({
__typename: 'DastSiteProfileAuth',
enabled: true,
url: 'http://test.local/users/sign_in',
usernameField: 'username',
passwordField: 'password',
username: 'root',
}),
excludedUrls: () => ['http://test.local/sign_out', 'http://test.local/send_mail'],
requestHeaders: () => 'log-identifier: dast-active-scan',
},
};
export default new VueApollo({
defaultClient: createDefaultClient({}, { assumeImmutableResults: true }),
defaultClient: createDefaultClient(resolvers, { assumeImmutableResults: true }),
});
<script>
import { GlFormGroup, GlFormInput, GlFormCheckbox } from '@gitlab/ui';
import { initFormField } from 'ee/security_configuration/utils';
import { __ } from '~/locale';
import validation from '~/vue_shared/directives/validation';
export default {
......@@ -65,8 +64,8 @@ export default {
showValidationOrInEditMode() {
return this.showValidation || this.isEditMode;
},
sensitiveFieldPlaceholder() {
return this.isEditMode ? __('[Unchanged]') : '';
passwordFieldPlaceholder() {
return this.isEditMode ? '••••••••' : '';
},
},
watch: {
......@@ -132,7 +131,7 @@ export default {
autocomplete="off"
name="password"
type="password"
:placeholder="sensitiveFieldPlaceholder"
:placeholder="passwordFieldPlaceholder"
:required="isSensitiveFieldRequired"
:state="form.fields.password.state"
/>
......
......@@ -65,8 +65,7 @@ export default {
},
},
data() {
const { name = '', targetUrl = '', excludedUrls = [], requestHeaders = '', auth = {} } =
this.siteProfile || {};
const { name = '', targetUrl = '', excludedUrls = [], auth = {} } = this.siteProfile || {};
const form = {
state: false,
......@@ -80,7 +79,7 @@ export default {
skipValidation: true,
}),
requestHeaders: initFormField({
value: requestHeaders,
value: '',
required: false,
skipValidation: true,
}),
......@@ -108,6 +107,9 @@ export default {
isEdit() {
return Boolean(this.siteProfile?.id);
},
hasRequestHeaders() {
return Boolean(this.siteProfile?.requestHeaders);
},
i18n() {
const { isEdit } = this;
return {
......@@ -138,8 +140,10 @@ export default {
tooltip: s__(
'DastProfiles|Request header names and values. Headers are added to every request made by DAST.',
),
// eslint-disable-next-line @gitlab/require-i18n-strings
placeholder: 'Cache-control: no-cache, User-Agent: DAST/1.0',
placeholder: this.hasRequestHeaders
? __('[Redacted]')
: // eslint-disable-next-line @gitlab/require-i18n-strings
'Cache-control: no-cache, User-Agent: DAST/1.0',
},
};
},
......
......@@ -255,7 +255,10 @@ export default {
:default-branch-name="vulnerability.projectDefaultBranch"
/>
<div class="detail-page-header">
<div class="detail-page-header-body align-items-center">
<div
class="detail-page-header-body align-items-center"
data-testid="vulnerability-detail-body"
>
<gl-loading-icon v-if="isLoadingVulnerability" class="mr-2" />
<gl-badge v-else class="gl-mr-4 text-capitalize" :variant="stateVariant">
{{ vulnerability.state }}
......
......@@ -6,7 +6,7 @@ module Elastic
# This class should only be used with sidekiq workers which extend Elastic::IndexingControl module
class IndexingControlService
LIMIT = 1000
PROJECT_CONTEXT_KEY = "#{Labkit::Context::LOG_KEY}.project"
PROJECT_CONTEXT_KEY = "#{Gitlab::ApplicationContext::LOG_KEY}.project"
def initialize(klass)
raise ArgumentError, "passed class must extend Elastic::IndexingControl" unless klass.include?(Elastic::IndexingControl)
......@@ -93,7 +93,7 @@ module Elastic
end
def send_to_processing_queue(job)
Labkit::Context.with_context(job['context']) do
Gitlab::ApplicationContext.with_raw_context(job['context']) do
klass.perform_async(*job['args'])
end
end
......
......@@ -51,7 +51,7 @@ module Elastic
end
def current_context
Labkit::Context.current.to_h
Gitlab::ApplicationContext.current
end
end
end
......@@ -12,6 +12,7 @@ import dastSiteProfilesQuery from 'ee/security_configuration/dast_profiles/graph
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import createApolloProvider from 'helpers/mock_apollo_helper';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { redirectTo, setUrlParams } from '~/lib/utils/url_utility';
import RefSelector from '~/ref/components/ref_selector.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
......@@ -113,7 +114,6 @@ describe('OnDemandScansForm', () => {
dastSiteProfiles: jest.fn().mockResolvedValue(responses.dastSiteProfiles()),
...handlers,
};
return createApolloProvider([
[dastScannerProfilesQuery, requestHandlers.dastScannerProfiles],
[dastSiteProfilesQuery, requestHandlers.dastSiteProfiles],
......@@ -499,13 +499,15 @@ describe('OnDemandScansForm', () => {
`('when there is a single $profileType profile', ({ query, selector, profiles }) => {
const [profile] = profiles;
beforeEach(() => {
beforeEach(async () => {
mountShallowSubject(
{},
{
[query]: jest.fn().mockResolvedValue(responses[query]([profile])),
},
);
await waitForPromises();
});
it('automatically selects the only available profile', () => {
......@@ -534,14 +536,17 @@ describe('OnDemandScansForm', () => {
it('renders all fields correctly', async () => {
await selectSiteProfile(authEnabledProfile);
const summary = subject.find(SiteProfileSelector).text();
const defaultPassword = '••••••••';
const defaultRequestHeaders = '[Redacted]';
expect(summary).toMatch(authEnabledProfile.targetUrl);
expect(summary).toMatch(authEnabledProfile.excludedUrls.join(','));
expect(summary).toMatch(authEnabledProfile.requestHeaders);
expect(summary).toMatch(authEnabledProfile.auth.url);
expect(summary).toMatch(authEnabledProfile.auth.username);
expect(summary).toMatch(authEnabledProfile.auth.usernameField);
expect(summary).toMatch(authEnabledProfile.auth.passwordField);
expect(summary).toMatch(defaultPassword);
expect(summary).toMatch(defaultRequestHeaders);
});
});
......
......@@ -64,6 +64,10 @@ export const siteProfiles = [
validationStatus: 'PASSED_VALIDATION',
auth: {
enabled: false,
url: 'https://foo.com/login',
usernameField: 'username',
passwordField: 'password',
username: 'admin',
},
excludedUrls: ['https://bar.com/logout'],
requestHeaders: 'auth: gitlab-dast',
......
......@@ -175,6 +175,33 @@ describe('DastSiteProfileForm', () => {
expect(findExcludedUrlsInput().attributes('maxlength')).toBe('2048');
expect(findRequestHeadersInput().attributes('maxlength')).toBe('2048');
});
describe('should have correct placeholders', () => {
const defaultPlaceholder = 'Cache-control: no-cache, User-Agent: DAST/1.0';
it('when creating a new profile', async () => {
expect(findRequestHeadersInput().attributes('placeholder')).toBe(defaultPlaceholder);
});
it('when updating an existing profile with no request headers set', () => {
createFullComponent({
propsData: {
siteProfile: { ...siteProfileOne, requestHeaders: '' },
},
});
expect(findRequestHeadersInput().attributes('placeholder')).toBe(defaultPlaceholder);
});
it('when updating an existing profile', () => {
createFullComponent({
propsData: {
siteProfile: siteProfileOne,
},
});
expect(findRequestHeadersInput().attributes('placeholder')).toBe('[Redacted]');
expect(findByNameAttribute('password').attributes('placeholder')).toBe('••••••••');
});
});
});
describe.each`
......
export const mockIssueLink = {
assignee: null,
assignees: [],
author: { id: 1, name: 'Administrator', username: 'container', state: 'active' },
blocking_issues_count: 0,
closed_at: null,
closed_by: null,
confidential: true,
created_at: '2021-01-10T23:17:23.385Z',
description: 'description',
discussion_locked: null,
downvotes: 0,
due_date: null,
id: 501,
iid: 12,
labels: [],
merge_requests_count: 0,
milestone: null,
project_id: 23,
state: 'opened',
task_completion_status: { count: 0, completed_count: 0 },
time_stats: {
time_estimate: 0,
total_time_spent: 0,
human_time_estimate: null,
human_total_time_spent: null,
},
title: 'Investigate vulnerability: Improper Input Validation in xterm',
updated_at: '2021-01-10T23:17:23.385Z',
upvotes: 0,
user_notes_count: 0,
vulnerability_link_id: 53,
vulnerability_link_type: 'created',
web_url: 'http://gdk.test:3000/security-reports/dependency-list-test/-/issues/12',
weight: null,
};
import defaultRoutes from 'test_helpers/mock_server/routes';
/* eslint-disable global-require */
export default (server) => {
[require('./vulnerabilities')].forEach(({ default: setup }) => {
setup(server);
});
defaultRoutes(server);
};
import { mockIssueLink } from '../../mock_data/vulnerabilities_mock_data';
export default (server) => {
server.get('/api/v4/vulnerabilities/:id/issue_links', () => [mockIssueLink]);
};
export const mockVulnerability = {
id: 1,
title: 'Vulnerability Title',
description: 'Vulnerability Description',
created_at: new Date(2020, 0, 1).toISOString(),
severity: 'medium',
state: 'detected',
pipeline: {
id: 2,
created_at: new Date(2020, 0, 1).toISOString(),
},
project: {
full_path: '/project_full_path',
},
};
import { screen, within } from '@testing-library/dom';
import initVulnerabilities from 'ee/vulnerabilities/vulnerabilities_init';
import { waitForText } from 'helpers/wait_for_text';
import { mockIssueLink } from '../test_helpers/mock_data/vulnerabilities_mock_data';
import { mockVulnerability } from './mock_data';
describe('Vulnerability Report', () => {
let vm;
let container;
const createComponent = () => {
const el = document.createElement('div');
const elDataSet = {
vulnerability: JSON.stringify(mockVulnerability),
};
Object.assign(el.dataset, {
...elDataSet,
});
container.appendChild(el);
return initVulnerabilities(el);
};
beforeEach(() => {
setFixtures('<div class="vulnerability-details"></div>');
container = document.querySelector('.vulnerability-details');
vm = createComponent(container);
});
afterEach(() => {
vm.$destroy();
vm = null;
container = null;
});
it("displays the vulnerability's status", () => {
const headerBody = screen.getByTestId('vulnerability-detail-body');
expect(within(headerBody).getByText(mockVulnerability.state)).toBeInstanceOf(HTMLElement);
});
it("displays the vulnerability's severity", () => {
const severitySection = screen.getByTestId('severity');
const severityValue = within(severitySection).getByTestId('value');
expect(severityValue.textContent.toLowerCase()).toContain(
mockVulnerability.severity.toLowerCase(),
);
});
it("displays a heading containing the vulnerability's title", () => {
expect(screen.getByRole('heading', { name: mockVulnerability.title })).toBeInstanceOf(
HTMLElement,
);
});
it("displays the vulnerability's description", () => {
expect(screen.getByText(mockVulnerability.description)).toBeInstanceOf(HTMLElement);
});
it('displays related issues', async () => {
const relatedIssueTitle = await waitForText(mockIssueLink.title);
expect(relatedIssueTitle).toBeInstanceOf(HTMLElement);
});
});
......@@ -20,7 +20,7 @@ RSpec.describe Elastic::IndexingControlService, :clean_gitlab_redis_shared_state
end
let(:stored_context) do
{ "#{Labkit::Context::LOG_KEY}.project" => 'gitlab-org/gitlab' }
{ "#{Gitlab::ApplicationContext::LOG_KEY}.project" => 'gitlab-org/gitlab' }
end
let(:worker_args) { [1, 2] }
......@@ -133,7 +133,7 @@ RSpec.describe Elastic::IndexingControlService, :clean_gitlab_redis_shared_state
subject.add_to_waiting_queue!(j, worker_context)
end
expect(Labkit::Context).to receive(:with_context).with(stored_context).exactly(jobs.count).times.and_call_original
expect(Gitlab::ApplicationContext).to receive(:with_raw_context).with(stored_context).exactly(jobs.count).times.and_call_original
expect(worker_class).to receive(:perform_async).exactly(jobs.count).times
expect { subject.resume_processing! }.to change { subject.has_jobs_in_waiting_queue? }.from(true).to(false)
......
......@@ -75,7 +75,7 @@ RSpec.describe Elastic::IndexingControl do
expect_any_instance_of(Gitlab::Elastic::Indexer).not_to receive(:run)
expect(Elastic::IndexingControlService).to receive(:add_to_waiting_queue!).with(worker.class, worker_args, worker_context)
Labkit::Context.with_context(worker_context) do
Gitlab::ApplicationContext.with_raw_context(worker_context) do
worker.perform(*worker_args)
end
end
......
......@@ -27,15 +27,15 @@ RSpec.describe UpdateAllMirrorsWorker do
worker.perform
end
it 'removes metadata except correlation_id from the application context before scheduling mirrors' do
it 'removes metadata except correlation_id from the application context before scheduling mirrors', :context_aware do
inner_context = nil
outer_context = nil
Gitlab::ApplicationContext.with_context(project: build(:project)) do
outer_context = Labkit::Context.current.to_h
outer_context = Gitlab::ApplicationContext.current
expect(worker).to receive(:schedule_mirrors!) do
inner_context = Labkit::Context.current.to_h
inner_context = Gitlab::ApplicationContext.current
# `schedule_mirrors!` needs to return an integer.
0
......
......@@ -45,7 +45,8 @@ module.exports = (path) => {
'emojis(/.*).json': '<rootDir>/fixtures/emojis$1.json',
'^spec/test_constants$': '<rootDir>/spec/frontend/__helpers__/test_constants',
'^jest/(.*)$': '<rootDir>/spec/frontend/$1',
'test_helpers(/.*)$': '<rootDir>/spec/frontend_integration/test_helpers$1',
'^test_helpers(/.*)$': '<rootDir>/spec/frontend_integration/test_helpers$1',
'^ee_else_ce_test_helpers(/.*)$': '<rootDir>/spec/frontend_integration/test_helpers$1',
};
const collectCoverageFrom = ['<rootDir>/app/assets/javascripts/**/*.{js,vue}'];
......@@ -56,6 +57,7 @@ module.exports = (path) => {
'^ee(/.*)$': rootDirEE,
'^ee_component(/.*)$': rootDirEE,
'^ee_else_ce(/.*)$': rootDirEE,
'^ee_else_ce_test_helpers(/.*)$': '<rootDir>/ee/spec/frontend_integration/test_helpers$1',
'^ee_jest/(.*)$': '<rootDir>/ee/spec/frontend/$1',
[TEST_FIXTURES_PATTERN]: '<rootDir>/tmp/tests/frontend/fixtures-ee$1',
});
......
......@@ -12,11 +12,11 @@ module API
namespace 'queues' do
desc 'Drop jobs matching the given metadata from the Sidekiq queue'
params do
Labkit::Context::KNOWN_KEYS.each do |key|
Gitlab::ApplicationContext::KNOWN_KEYS.each do |key|
optional key, type: String, allow_blank: false
end
at_least_one_of(*Labkit::Context::KNOWN_KEYS)
at_least_one_of(*Gitlab::ApplicationContext::KNOWN_KEYS)
end
delete ':queue_name' do
result =
......
......@@ -8,6 +8,9 @@ module Gitlab
Attribute = Struct.new(:name, :type)
LOG_KEY = Labkit::Context::LOG_KEY
KNOWN_KEYS = Labkit::Context::KNOWN_KEYS
APPLICATION_ATTRIBUTES = [
Attribute.new(:project, Project),
Attribute.new(:namespace, Namespace),
......@@ -24,6 +27,10 @@ module Gitlab
application_context.use(&block)
end
def self.with_raw_context(attributes = {}, &block)
Labkit::Context.with_context(attributes, &block)
end
def self.push(args)
application_context = new(**args)
Labkit::Context.push(application_context.to_lazy_hash)
......
......@@ -71,6 +71,10 @@ module Gitlab
def self.ci_commit_pipeline_mini_graph_vue_enabled?(project)
::Feature.enabled?(:ci_commit_pipeline_mini_graph_vue, project, default_enabled: :yaml)
end
def self.remove_duplicate_artifact_exposure_paths?(project)
::Feature.enabled?(:remove_duplicate_artifact_exposure_paths, project, default_enabled: :yaml)
end
end
end
end
......@@ -215,7 +215,7 @@ module Gitlab
'client_name' => CLIENT_NAME
}
context_data = Labkit::Context.current&.to_h
context_data = Gitlab::ApplicationContext.current
feature_stack = Thread.current[:gitaly_feature_stack]
feature = feature_stack && feature_stack[0]
......
......@@ -6,7 +6,7 @@ module Gitlab
module Loggers
class ContextLogger < ::GrapeLogging::Loggers::Base
def parameters(_, _)
Labkit::Context.current.to_h
Gitlab::ApplicationContext.current
end
end
end
......
......@@ -21,7 +21,7 @@ module Gitlab
job_search_metadata =
search_metadata
.stringify_keys
.slice(*Labkit::Context::KNOWN_KEYS)
.slice(*Gitlab::ApplicationContext::KNOWN_KEYS)
.transform_keys { |key| "meta.#{key}" }
.compact
......
......@@ -35160,7 +35160,7 @@ msgstr ""
msgid "[No reason]"
msgstr ""
msgid "[Unchanged]"
msgid "[Redacted]"
msgstr ""
msgid "`end_time` should not exceed one month after `start_time`"
......
......@@ -898,7 +898,7 @@ RSpec.describe ApplicationController do
feature_category :issue_tracking
def index
Labkit::Context.with_context do |context|
Gitlab::ApplicationContext.with_raw_context do |context|
render json: context.to_h
end
end
......
......@@ -524,7 +524,7 @@ RSpec.describe SessionsController do
it 'sets the username and caller_id in the context' do
expect(controller).to receive(:destroy).and_wrap_original do |m, *args|
expect(Labkit::Context.current.to_h)
expect(Gitlab::ApplicationContext.current)
.to include('meta.user' => user.username,
'meta.caller_id' => 'SessionsController#destroy')
......@@ -538,9 +538,9 @@ RSpec.describe SessionsController do
context 'when not signed in' do
it 'sets the caller_id in the context' do
expect(controller).to receive(:new).and_wrap_original do |m, *args|
expect(Labkit::Context.current.to_h)
expect(Gitlab::ApplicationContext.current)
.to include('meta.caller_id' => 'SessionsController#new')
expect(Labkit::Context.current.to_h)
expect(Gitlab::ApplicationContext.current)
.not_to include('meta.user')
m.call(*args)
......@@ -557,9 +557,9 @@ RSpec.describe SessionsController do
it 'sets the caller_id in the context' do
allow_any_instance_of(User).to receive(:lock_access!).and_wrap_original do |m, *args|
expect(Labkit::Context.current.to_h)
expect(Gitlab::ApplicationContext.current)
.to include('meta.caller_id' => 'SessionsController#create')
expect(Labkit::Context.current.to_h)
expect(Gitlab::ApplicationContext.current)
.not_to include('meta.user')
m.call(*args)
......
import { Server, Model, RestSerializer } from 'miragejs';
import setupRoutes from 'ee_else_ce_test_helpers/mock_server/routes';
import {
getProject,
getEmptyProject,
......@@ -11,7 +12,6 @@ import {
getBlobImage,
getBlobZip,
} from 'test_helpers/fixtures';
import setupRoutes from './routes';
export const createMockServerOptions = () => ({
models: {
......
......@@ -27,6 +27,20 @@ RSpec.describe Gitlab::ApplicationContext do
end
end
describe '.with_raw_context' do
it 'yields the block' do
expect { |b| described_class.with_raw_context({}, &b) }.to yield_control
end
it 'passes the attributes unaltered on to labkit' do
attrs = { foo: :bar }
expect(Labkit::Context).to receive(:with_context).with(attrs)
described_class.with_raw_context(attrs) {}
end
end
describe '.push' do
it 'passes the expected context on to labkit' do
fake_proc = duck_type(:call)
......@@ -138,7 +152,7 @@ RSpec.describe Gitlab::ApplicationContext do
it 'does not cause queries' do
context = described_class.new(project: create(:project), namespace: create(:group, :nested), user: create(:user))
expect { context.use { Labkit::Context.current.to_h } }.not_to exceed_query_limit(0)
expect { context.use { Gitlab::ApplicationContext.current } }.not_to exceed_query_limit(0)
end
end
end
......@@ -30,7 +30,7 @@ RSpec.describe Gitlab::Metrics::BackgroundTransaction do
describe '#labels' do
it 'provides labels with endpoint_id and feature_category' do
Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
Gitlab::ApplicationContext.with_raw_context(feature_category: 'projects', caller_id: 'TestWorker') do
expect(transaction.labels).to eq({ endpoint_id: 'TestWorker', feature_category: 'projects' })
end
end
......@@ -41,7 +41,7 @@ RSpec.describe Gitlab::Metrics::BackgroundTransaction do
value = 1
expect(prometheus_metric).to receive(metric_method).with({ endpoint_id: 'TestWorker', feature_category: 'projects' }, value)
Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
Gitlab::ApplicationContext.with_raw_context(feature_category: 'projects', caller_id: 'TestWorker') do
transaction.send(metric_method, :test_metric, value)
end
end
......
......@@ -18,7 +18,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
worker_context user: nil
def perform(identifier, *args)
self.class.contexts.merge!(identifier => Labkit::Context.current.to_h)
self.class.contexts.merge!(identifier => Gitlab::ApplicationContext.current)
end
end
end
......
......@@ -105,7 +105,7 @@ RSpec.describe API::API do
it 'logs all application context fields' do
allow_any_instance_of(Gitlab::GrapeLogging::Loggers::ContextLogger).to receive(:parameters) do
Labkit::Context.current.to_h.tap do |log_context|
Gitlab::ApplicationContext.current.tap do |log_context|
expect(log_context).to match('correlation_id' => an_instance_of(String),
'meta.caller_id' => '/api/:version/projects/:id/issues',
'meta.remote_ip' => an_instance_of(String),
......@@ -122,7 +122,7 @@ RSpec.describe API::API do
it 'skips fields that do not apply' do
allow_any_instance_of(Gitlab::GrapeLogging::Loggers::ContextLogger).to receive(:parameters) do
Labkit::Context.current.to_h.tap do |log_context|
Gitlab::ApplicationContext.current.tap do |log_context|
expect(log_context).to match('correlation_id' => an_instance_of(String),
'meta.caller_id' => '/api/:version/users',
'meta.remote_ip' => an_instance_of(String),
......
......@@ -21,15 +21,30 @@ RSpec.describe BuildArtifactEntity do
expect(subject).to include(:expired, :expire_at)
end
it 'contains paths to the artifacts' do
expect(subject[:path])
.to include "jobs/#{job.id}/artifacts/download?file_type=codequality"
it 'exposes the artifact download path' do
expect(subject[:path]).to include "jobs/#{job.id}/artifacts/download?file_type=codequality"
end
context 'with remove_duplicate_artifact_exposure_paths enabled' do
before do
stub_feature_flags(remove_duplicate_artifact_exposure_paths: true)
end
it 'has no keep or browse path' do
expect(subject).not_to include(:keep_path)
expect(subject).not_to include(:browse_path)
end
end
expect(subject[:keep_path])
.to include "jobs/#{job.id}/artifacts/keep"
context 'with remove_duplicate_artifact_exposure_paths disabled' do
before do
stub_feature_flags(remove_duplicate_artifact_exposure_paths: false)
end
expect(subject[:browse_path])
.to include "jobs/#{job.id}/artifacts/browse"
it 'has keep and browse paths' do
expect(subject[:keep_path]).to be_present
expect(subject[:browse_path]).to be_present
end
end
end
end
......@@ -333,10 +333,20 @@ RSpec.configure do |config|
RequestStore.clear!
end
config.around do |example|
# Wrap each example in it's own context to make sure the contexts don't
# leak
Labkit::Context.with_context { example.run }
if ENV['SKIP_RSPEC_CONTEXT_WRAPPING']
config.around(:example, :context_aware) do |example|
# Wrap each example in it's own context to make sure the contexts don't
# leak
Gitlab::ApplicationContext.with_raw_context { example.run }
end
else
config.around do |example|
if [:controller, :request, :feature].include?(example.metadata[:type]) || example.metadata[:context_aware]
Gitlab::ApplicationContext.with_raw_context { example.run }
else
example.run
end
end
end
config.around do |example|
......
# frozen_string_literal: true
RSpec.shared_examples 'API::CI::Runner application context metadata' do |api_route|
it 'contains correct context metadata' do
it 'contains correct context metadata', :context_aware do
# Avoids popping the context from the thread so we can
# check its content after the request.
allow(Labkit::Context).to receive(:pop)
send_request
Labkit::Context.with_context do |context|
Gitlab::ApplicationContext.with_raw_context do |context|
expected_context = {
'meta.caller_id' => api_route,
'meta.user' => job.user.username,
......
# frozen_string_literal: true
RSpec.shared_examples 'storing arguments in the application context' do
around do |example|
Labkit::Context.with_context { example.run }
end
it 'places the expected params in the application context' do
it 'places the expected params in the application context', :context_aware do
# Stub the clearing of the context so we can validate it later
# The `around` block above makes sure we do clean it up later
allow(Labkit::Context).to receive(:pop)
subject
Labkit::Context.with_context do |context|
expect(context.to_h)
.to include(log_hash(expected_params))
end
expect(Gitlab::ApplicationContext.current).to include(log_hash(expected_params))
end
def log_hash(hash)
......
......@@ -101,7 +101,7 @@ RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state do
it 'sets the class that will be executed as the caller_id' do
expect(Gitlab::BackgroundMigration).to receive(:perform) do
expect(Labkit::Context.current.to_h).to include('meta.caller_id' => 'Foo')
expect(Gitlab::ApplicationContext.current).to include('meta.caller_id' => 'Foo')
end
worker.perform('Foo', [10, 20])
......
......@@ -103,7 +103,7 @@ RSpec.describe WorkerContext do
describe '#with_context' do
it 'allows modifying context when the job is running' do
worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do
expect(Labkit::Context.current.to_h).to include('meta.user' => 'jane-doe')
expect(Gitlab::ApplicationContext.current).to include('meta.user' => 'jane-doe')
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment