Commit 5f9d3dfc authored by Mike Greiling's avatar Mike Greiling

Merge branch 'revert-167eeb4a' into 'master'

Revert "Merge branch 'mg-webpack-middleware-pass-host' into 'master'"

See merge request gitlab-org/gitlab!46793
parents 0ec6653d a218a9f1
...@@ -17,10 +17,13 @@ export default { ...@@ -17,10 +17,13 @@ export default {
}, },
}, },
computed: { computed: {
seriesData() { barSeriesData() {
return { return [
full: this.formattedData.keys.map((val, idx) => [val, this.formattedData.values[idx]]), {
}; name: 'full',
data: this.formattedData.keys.map((val, idx) => [val, this.formattedData.values[idx]]),
},
];
}, },
}, },
}; };
...@@ -30,7 +33,7 @@ export default { ...@@ -30,7 +33,7 @@ export default {
<div class="gl-xs-w-full"> <div class="gl-xs-w-full">
<gl-column-chart <gl-column-chart
v-if="formattedData.keys" v-if="formattedData.keys"
:data="seriesData" :bars="barSeriesData"
:x-axis-title="__('Value')" :x-axis-title="__('Value')"
:y-axis-title="__('Number of events')" :y-axis-title="__('Number of events')"
:x-axis-type="'category'" :x-axis-type="'category'"
......
...@@ -35,18 +35,14 @@ export default { ...@@ -35,18 +35,14 @@ export default {
}; };
}, },
computed: { computed: {
chartData() { barChartData() {
const queryData = this.graphData.metrics.reduce((acc, query) => { return this.graphData.metrics.reduce((acc, query) => {
const series = makeDataSeries(query.result || [], { const series = makeDataSeries(query.result || [], {
name: this.formatLegendLabel(query), name: this.formatLegendLabel(query),
}); });
return acc.concat(series); return acc.concat(series);
}, []); }, []);
return {
values: queryData[0].data,
};
}, },
chartOptions() { chartOptions() {
const xAxis = getTimeAxisOptions({ timezone: this.timezone }); const xAxis = getTimeAxisOptions({ timezone: this.timezone });
...@@ -109,7 +105,7 @@ export default { ...@@ -109,7 +105,7 @@ export default {
<gl-column-chart <gl-column-chart
ref="columnChart" ref="columnChart"
v-bind="$attrs" v-bind="$attrs"
:data="chartData" :bars="barChartData"
:option="chartOptions" :option="chartOptions"
:width="width" :width="width"
:height="height" :height="height"
......
...@@ -61,14 +61,16 @@ export default { ...@@ -61,14 +61,16 @@ export default {
}, },
computed: { computed: {
chartData() { chartData() {
return this.graphData.metrics.map(({ result }) => { return this.graphData.metrics
// This needs a fix. Not only metrics[0] should be shown. .map(({ label: name, result }) => {
// See https://gitlab.com/gitlab-org/gitlab/-/issues/220492 // This needs a fix. Not only metrics[0] should be shown.
if (!result || result.length === 0) { // See https://gitlab.com/gitlab-org/gitlab/-/issues/220492
return []; if (!result || result.length === 0) {
} return [];
return result[0].values.map(val => val[1]); }
}); return { name, data: result[0].values.map(val => val[1]) };
})
.slice(0, 1);
}, },
xAxisTitle() { xAxisTitle() {
return this.graphData.x_label !== undefined ? this.graphData.x_label : ''; return this.graphData.x_label !== undefined ? this.graphData.x_label : '';
...@@ -136,7 +138,7 @@ export default { ...@@ -136,7 +138,7 @@ export default {
<gl-stacked-column-chart <gl-stacked-column-chart
ref="chart" ref="chart"
v-bind="$attrs" v-bind="$attrs"
:data="chartData" :bars="chartData"
:option="chartOptions" :option="chartOptions"
:x-axis-title="xAxisTitle" :x-axis-title="xAxisTitle"
:y-axis-title="yAxisTitle" :y-axis-title="yAxisTitle"
...@@ -144,7 +146,6 @@ export default { ...@@ -144,7 +146,6 @@ export default {
:group-by="groupBy" :group-by="groupBy"
:width="width" :width="width"
:height="height" :height="height"
:series-names="seriesNames"
:legend-layout="legendLayout" :legend-layout="legendLayout"
:legend-average-text="legendAverageText" :legend-average-text="legendAverageText"
:legend-current-text="legendCurrentText" :legend-current-text="legendCurrentText"
......
...@@ -5,6 +5,8 @@ import { __ } from '~/locale'; ...@@ -5,6 +5,8 @@ import { __ } from '~/locale';
import CodeCoverage from '../components/code_coverage.vue'; import CodeCoverage from '../components/code_coverage.vue';
import SeriesDataMixin from './series_data_mixin'; import SeriesDataMixin from './series_data_mixin';
const seriesDataToBarData = raw => Object.entries(raw).map(([name, data]) => ({ name, data }));
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
waitForCSSLoaded(() => { waitForCSSLoaded(() => {
const languagesContainer = document.getElementById('js-languages-chart'); const languagesContainer = document.getElementById('js-languages-chart');
...@@ -41,13 +43,13 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -41,13 +43,13 @@ document.addEventListener('DOMContentLoaded', () => {
}, },
computed: { computed: {
seriesData() { seriesData() {
return { full: this.chartData.map(d => [d.label, d.value]) }; return [{ name: 'full', data: this.chartData.map(d => [d.label, d.value]) }];
}, },
}, },
render(h) { render(h) {
return h(GlColumnChart, { return h(GlColumnChart, {
props: { props: {
data: this.seriesData, bars: this.seriesData,
xAxisTitle: __('Used programming language'), xAxisTitle: __('Used programming language'),
yAxisTitle: __('Percentage'), yAxisTitle: __('Percentage'),
xAxisType: 'category', xAxisType: 'category',
...@@ -86,7 +88,7 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -86,7 +88,7 @@ document.addEventListener('DOMContentLoaded', () => {
render(h) { render(h) {
return h(GlColumnChart, { return h(GlColumnChart, {
props: { props: {
data: this.seriesData, bars: seriesDataToBarData(this.seriesData),
xAxisTitle: __('Day of month'), xAxisTitle: __('Day of month'),
yAxisTitle: __('No. of commits'), yAxisTitle: __('No. of commits'),
xAxisType: 'category', xAxisType: 'category',
...@@ -113,13 +115,13 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -113,13 +115,13 @@ document.addEventListener('DOMContentLoaded', () => {
acc.push([key, weekDays[key]]); acc.push([key, weekDays[key]]);
return acc; return acc;
}, []); }, []);
return { full: data }; return [{ name: 'full', data }];
}, },
}, },
render(h) { render(h) {
return h(GlColumnChart, { return h(GlColumnChart, {
props: { props: {
data: this.seriesData, bars: this.seriesData,
xAxisTitle: __('Weekday'), xAxisTitle: __('Weekday'),
yAxisTitle: __('No. of commits'), yAxisTitle: __('No. of commits'),
xAxisType: 'category', xAxisType: 'category',
...@@ -143,7 +145,7 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -143,7 +145,7 @@ document.addEventListener('DOMContentLoaded', () => {
render(h) { render(h) {
return h(GlColumnChart, { return h(GlColumnChart, {
props: { props: {
data: this.seriesData, bars: seriesDataToBarData(this.seriesData),
xAxisTitle: __('Hour (UTC)'), xAxisTitle: __('Hour (UTC)'),
yAxisTitle: __('No. of commits'), yAxisTitle: __('No. of commits'),
xAxisType: 'category', xAxisType: 'category',
......
...@@ -45,9 +45,12 @@ export default { ...@@ -45,9 +45,12 @@ export default {
}, },
data() { data() {
return { return {
timesChartTransformedData: { timesChartTransformedData: [
full: this.mergeLabelsAndValues(this.timesChartData.labels, this.timesChartData.values), {
}, name: 'full',
data: this.mergeLabelsAndValues(this.timesChartData.labels, this.timesChartData.values),
},
],
}; };
}, },
computed: { computed: {
...@@ -128,7 +131,7 @@ export default { ...@@ -128,7 +131,7 @@ export default {
<gl-column-chart <gl-column-chart
:height="$options.chartContainerHeight" :height="$options.chartContainerHeight"
:option="$options.timesChartOptions" :option="$options.timesChartOptions"
:data="timesChartTransformedData" :bars="timesChartTransformedData"
:y-axis-title="__('Minutes')" :y-axis-title="__('Minutes')"
:x-axis-title="__('Commit')" :x-axis-title="__('Commit')"
x-axis-type="category" x-axis-type="category"
......
...@@ -318,7 +318,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -318,7 +318,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
end end
def export_csv def export_csv
return render_404 unless Feature.enabled?(:export_merge_requests_as_csv, project) return render_404 unless Feature.enabled?(:export_merge_requests_as_csv, project, default_enabled: true)
IssuableExportCsvWorker.perform_async(:merge_request, current_user.id, project.id, finder_options.to_h) # rubocop:disable CodeReuse/Worker IssuableExportCsvWorker.perform_async(:merge_request, current_user.id, project.id, finder_options.to_h) # rubocop:disable CodeReuse/Worker
......
...@@ -18,14 +18,13 @@ module Projects ...@@ -18,14 +18,13 @@ module Projects
end end
def cleanup def cleanup
cleanup_params = params.require(:project).permit(:bfg_object_map) bfg_object_map = params.require(:project).require(:bfg_object_map)
result = Projects::UpdateService.new(project, current_user, cleanup_params).execute result = Projects::CleanupService.enqueue(project, current_user, bfg_object_map)
if result[:status] == :success if result[:status] == :success
RepositoryCleanupWorker.perform_async(project.id, current_user.id) # rubocop:disable CodeReuse/Worker
flash[:notice] = _('Repository cleanup has started. You will receive an email once the cleanup operation is complete.') flash[:notice] = _('Repository cleanup has started. You will receive an email once the cleanup operation is complete.')
else else
flash[:alert] = _('Failed to upload object map file') flash[:alert] = status.fetch(:message, _('Failed to upload object map file'))
end end
redirect_to project_settings_repository_path(project) redirect_to project_settings_repository_path(project)
......
...@@ -56,12 +56,9 @@ module Emails ...@@ -56,12 +56,9 @@ module Emails
subject: @message.subject) subject: @message.subject)
end end
def prometheus_alert_fired_email(project_id, user_id, alert_attributes) def prometheus_alert_fired_email(project, user, alert)
@project = ::Project.find(project_id) @project = project
user = ::User.find(user_id) @alert = alert.present
@alert = AlertManagement::Alert.new(alert_attributes.with_indifferent_access).present
return unless @alert.parsed_payload.has_required_attributes?
subject_text = "Alert: #{@alert.email_title}" subject_text = "Alert: #{@alert.email_title}"
mail(to: user.notification_email_for(@project.group), subject: subject(subject_text)) mail(to: user.notification_email_for(@project.group), subject: subject(subject_text))
......
...@@ -9,6 +9,10 @@ module AlertManagement ...@@ -9,6 +9,10 @@ module AlertManagement
return bad_request unless incoming_payload.has_required_attributes? return bad_request unless incoming_payload.has_required_attributes?
process_alert_management_alert process_alert_management_alert
return bad_request unless alert.persisted?
process_incident_issues if process_issues?
send_alert_email if send_email?
ServiceResponse.success ServiceResponse.success
end end
...@@ -30,8 +34,6 @@ module AlertManagement ...@@ -30,8 +34,6 @@ module AlertManagement
else else
create_alert_management_alert create_alert_management_alert
end end
process_incident_issues if process_issues?
end end
def reset_alert_management_alert_status def reset_alert_management_alert_status
...@@ -85,12 +87,17 @@ module AlertManagement ...@@ -85,12 +87,17 @@ module AlertManagement
end end
def process_incident_issues def process_incident_issues
return unless alert.persisted? return if alert.issue || alert.resolved?
return if alert.issue
IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id) IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end end
def send_alert_email
notification_service
.async
.prometheus_alerts_fired(project, [alert])
end
def logger def logger
@logger ||= Gitlab::AppLogger @logger ||= Gitlab::AppLogger
end end
......
...@@ -601,7 +601,7 @@ class NotificationService ...@@ -601,7 +601,7 @@ class NotificationService
return if project.emails_disabled? return if project.emails_disabled?
owners_and_maintainers_without_invites(project).to_a.product(alerts).each do |recipient, alert| owners_and_maintainers_without_invites(project).to_a.product(alerts).each do |recipient, alert|
mailer.prometheus_alert_fired_email(project.id, recipient.user.id, alert).deliver_later mailer.prometheus_alert_fired_email(project, recipient.user, alert).deliver_later
end end
end end
......
...@@ -73,7 +73,7 @@ module Projects ...@@ -73,7 +73,7 @@ module Projects
end end
def process_incident_issues def process_incident_issues
return if alert.issue return if alert.issue || alert.resolved?
::IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id) ::IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end end
...@@ -81,7 +81,7 @@ module Projects ...@@ -81,7 +81,7 @@ module Projects
def send_alert_email def send_alert_email
notification_service notification_service
.async .async
.prometheus_alerts_fired(project, [alert.attributes]) .prometheus_alerts_fired(project, [alert])
end end
def alert def alert
......
...@@ -11,6 +11,24 @@ module Projects ...@@ -11,6 +11,24 @@ module Projects
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
class << self
def enqueue(project, current_user, bfg_object_map)
Projects::UpdateService.new(project, current_user, bfg_object_map: bfg_object_map).execute.tap do |result|
next unless result[:status] == :success
project.set_repository_read_only!
RepositoryCleanupWorker.perform_async(project.id, current_user.id)
end
rescue Project::RepositoryReadOnlyError => err
{ status: :error, message: (_('Failed to make repository read-only. %{reason}') % { reason: err.message }) }
end
def cleanup_after(project)
project.bfg_object_map.remove!
project.set_repository_writable!
end
end
# Attempt to clean up the project following the push. Warning: this is # Attempt to clean up the project following the push. Warning: this is
# destructive! # destructive!
# #
...@@ -29,7 +47,7 @@ module Projects ...@@ -29,7 +47,7 @@ module Projects
# time. Better to feel the pain immediately. # time. Better to feel the pain immediately.
project.repository.expire_all_method_caches project.repository.expire_all_method_caches
project.bfg_object_map.remove! self.class.cleanup_after(project)
end end
private private
......
...@@ -23,7 +23,6 @@ module Projects ...@@ -23,7 +23,6 @@ module Projects
return unauthorized unless valid_alert_manager_token?(token) return unauthorized unless valid_alert_manager_token?(token)
process_prometheus_alerts process_prometheus_alerts
send_alert_email if send_email?
ServiceResponse.success ServiceResponse.success
end end
...@@ -120,14 +119,6 @@ module Projects ...@@ -120,14 +119,6 @@ module Projects
ActiveSupport::SecurityUtils.secure_compare(expected, actual) ActiveSupport::SecurityUtils.secure_compare(expected, actual)
end end
def send_alert_email
return unless firings.any?
notification_service
.async
.prometheus_alerts_fired(project, alerts_attributes)
end
def process_prometheus_alerts def process_prometheus_alerts
alerts.each do |alert| alerts.each do |alert|
AlertManagement::ProcessPrometheusAlertService AlertManagement::ProcessPrometheusAlertService
...@@ -136,18 +127,6 @@ module Projects ...@@ -136,18 +127,6 @@ module Projects
end end
end end
def alerts_attributes
firings.map do |payload|
alert_params = Gitlab::AlertManagement::Payload.parse(
project,
payload,
monitoring_tool: Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus]
).alert_params
AlertManagement::Alert.new(alert_params).attributes
end
end
def bad_request def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request) ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end end
......
...@@ -50,11 +50,11 @@ ...@@ -50,11 +50,11 @@
= f.text_field :home_page_url, class: 'form-control', placeholder: 'http://company.example.com', :'aria-describedby' => 'home_help_block' = f.text_field :home_page_url, class: 'form-control', placeholder: 'http://company.example.com', :'aria-describedby' => 'home_help_block'
%span.form-text.text-muted#home_help_block We will redirect non-logged in users to this page %span.form-text.text-muted#home_help_block We will redirect non-logged in users to this page
.form-group .form-group
= f.label :after_sign_out_path, class: 'label-bold' = f.label :after_sign_out_path, _('After sign-out path'), class: 'label-bold'
= f.text_field :after_sign_out_path, class: 'form-control', placeholder: 'http://company.example.com', :'aria-describedby' => 'after_sign_out_path_help_block' = f.text_field :after_sign_out_path, class: 'form-control', placeholder: 'http://company.example.com', :'aria-describedby' => 'after_sign_out_path_help_block'
%span.form-text.text-muted#after_sign_out_path_help_block We will redirect users to this page after they sign out %span.form-text.text-muted#after_sign_out_path_help_block We will redirect users to this page after they sign out
.form-group .form-group
= f.label :sign_in_text, class: 'label-bold' = f.label :sign_in_text, _('Sign-in text'), class: 'label-bold'
= f.text_area :sign_in_text, class: 'form-control', rows: 4 = f.text_area :sign_in_text, class: 'form-control', rows: 4
.form-text.text-muted Markdown enabled .form-text.text-muted Markdown enabled
= f.submit 'Save changes', class: "gl-button btn btn-success" = f.submit 'Save changes', class: "gl-button btn btn-success"
- body = @alert.resolved? ? _('An alert has been resolved in %{project_path}.') : _('An alert has been triggered in %{project_path}.')
%p
= body % { project_path: @alert.project.full_path }
%p %p
= _('An alert has been triggered in %{project_path}.') % { project_path: @alert.project.full_path } = link_to(_('View alert details.'), @alert.details_url)
- if description = @alert.description - if description = @alert.description
%p %p
......
<%= _('An alert has been triggered in %{project_path}.') % { project_path: @alert.project.full_path } %>. <% body = @alert.resolved? ? _('An alert has been resolved in %{project_path}.') : _('An alert has been triggered in %{project_path}.') %>
<%= body % { project_path: @alert.project.full_path } %>
<%= _('View alert details at') %> <%= @alert.details_url %>
<% if description = @alert.description %> <% if description = @alert.description %>
<%= _('Description:') %> <%= description %> <%= _('Description:') %> <%= description %>
......
- if Feature.enabled?(:export_merge_requests_as_csv, @project) - if Feature.enabled?(:export_merge_requests_as_csv, @project, default_enabled: true)
.btn-group .btn-group
= render 'shared/issuable/csv_export/button', issuable_type: 'merge-requests' = render 'shared/issuable/csv_export/button', issuable_type: 'merge-requests'
...@@ -8,5 +8,5 @@ ...@@ -8,5 +8,5 @@
= link_to new_merge_request_path, class: "gl-button btn btn-success", title: "New merge request" do = link_to new_merge_request_path, class: "gl-button btn btn-success", title: "New merge request" do
New merge request New merge request
- if Feature.enabled?(:export_merge_requests_as_csv, @project) - if Feature.enabled?(:export_merge_requests_as_csv, @project, default_enabled: true)
= render 'shared/issuable/csv_export/modal', issuable_type: 'merge_requests' = render 'shared/issuable/csv_export/modal', issuable_type: 'merge_requests'
...@@ -27,8 +27,9 @@ class RepositoryCleanupWorker # rubocop:disable Scalability/IdempotentWorker ...@@ -27,8 +27,9 @@ class RepositoryCleanupWorker # rubocop:disable Scalability/IdempotentWorker
project = Project.find(project_id) project = Project.find(project_id)
user = User.find(user_id) user = User.find(user_id)
# Ensure the file is removed # Ensure the file is removed and the repository is made read-write again
project.bfg_object_map.remove! Projects::CleanupService.cleanup_after(project)
notification_service.repository_cleanup_failure(project, user, error) notification_service.repository_cleanup_failure(project, user, error)
end end
......
---
title: Make the repository read-only while running cleanup
merge_request: 45058
author:
type: changed
---
title: Corrected grammar in Sign-in restrictions text
merge_request: 46500
author:
type: other
---
title: Enable MR CSV export
merge_request: 46662
author:
type: added
---
title: Fix example responses for Project Issue Board creation API in the docs
merge_request: 46749
author: Takuya Noguchi
type: fixed
---
title: Improve messaging for emails from alerts
merge_request: 43054
author:
type: changed
...@@ -4,4 +4,4 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45130 ...@@ -4,4 +4,4 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45130
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/267129 rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/267129
type: development type: development
group: group::compliance group: group::compliance
default_enabled: false default_enabled: true
...@@ -16,7 +16,6 @@ if Gitlab.ee? && Gitlab.dev_or_test_env? ...@@ -16,7 +16,6 @@ if Gitlab.ee? && Gitlab.dev_or_test_env?
IGNORED_FEATURE_FLAGS = %i[ IGNORED_FEATURE_FLAGS = %i[
ci_secrets_management ci_secrets_management
feature_flags_related_issues feature_flags_related_issues
group_coverage_reports
group_wikis group_wikis
incident_sla incident_sla
swimlanes swimlanes
......
...@@ -203,48 +203,12 @@ Example response: ...@@ -203,48 +203,12 @@ Example response:
"web_url": "http://example.com/diaspora/diaspora-project-site" "web_url": "http://example.com/diaspora/diaspora-project-site"
}, },
"name": "newboard", "name": "newboard",
"milestone": { "lists" : [],
"id": 12 "group": null,
"title": "10.0" "milestone": null,
}, "assignee" : null,
"lists" : [ "labels" : [],
{ "weight" : null
"id" : 1,
"label" : {
"name" : "Testing",
"color" : "#F0AD4E",
"description" : null
},
"position" : 1,
"max_issue_count": 0,
"max_issue_weight": 0,
"limit_metric": null
},
{
"id" : 2,
"label" : {
"name" : "Ready",
"color" : "#FF0000",
"description" : null
},
"position" : 2,
"max_issue_count": 0,
"max_issue_weight": 0,
"limit_metric": null
},
{
"id" : 3,
"label" : {
"name" : "Production",
"color" : "#FF5F00",
"description" : null
},
"position" : 3,
"max_issue_count": 0,
"max_issue_weight": 0,
"limit_metric": null
}
]
} }
``` ```
......
...@@ -21,20 +21,20 @@ TIP: **Tip:** ...@@ -21,20 +21,20 @@ TIP: **Tip:**
When you enable 2FA, don't forget to back up your [recovery codes](#recovery-codes)! When you enable 2FA, don't forget to back up your [recovery codes](#recovery-codes)!
In addition to time-based one time passwords (TOTP), GitLab supports U2F In addition to time-based one time passwords (TOTP), GitLab supports U2F
(universal 2nd factor) devices as the second factor of authentication. Once (universal 2nd factor) and WebAuthn (experimental) devices as the second factor of authentication. Once
enabled, in addition to supplying your username and password to log in, you'll enabled, in addition to supplying your username and password to log in, you'll
be prompted to activate your U2F device (usually by pressing a button on it), be prompted to activate your U2F / WebAuthn device (usually by pressing a button on it),
and it will perform secure authentication on your behalf. and it will perform secure authentication on your behalf.
It is highly recommended that you set up 2FA with both a It is highly recommended that you set up 2FA with both a
[one-time password authenticator](#one-time-password) or use [FortiAuthenticator](#one-time-password-via-fortiauthenticator) [one-time password authenticator](#one-time-password) or use [FortiAuthenticator](#one-time-password-via-fortiauthenticator)
and a [U2F device](#u2f-device), so you can still access your account if you and a [U2F device](#u2f-device) or a [WebAuthn device](#webauthn-device), so you can still access your account
lose your U2F device. if you lose your U2F / WebAuthn device.
## Enabling 2FA ## Enabling 2FA
There are two ways to enable two-factor authentication: via a one time password authenticator There are multiple ways to enable two-factor authentication: via a one time password authenticator
or a U2F device. or a U2F / WebAuthn device.
### One-time password ### One-time password
...@@ -174,10 +174,46 @@ To set up 2FA with a U2F device: ...@@ -174,10 +174,46 @@ To set up 2FA with a U2F device:
You will see a message indicating that your device was successfully set up. You will see a message indicating that your device was successfully set up.
Click on **Register U2F Device** to complete the process. Click on **Register U2F Device** to complete the process.
### WebAuthn device
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/22506) in GitLab 13.4.
> - It's [deployed behind a feature flag](../../feature_flags.md), disabled by default.
> - It's disabled on GitLab.com.
> - It's not recommended for production use.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-webauthn). **(CORE ONLY)**
The WebAuthn workflow is [supported by](https://caniuse.com/#search=webauthn) the
following desktop browsers:
- Chrome
- Edge
- Firefox
- Opera
- Safari
and the following mobile browsers:
- Chrome for Android
- Firefox for Android
- iOS Safari (since iOS 13.3)
To set up 2FA with a WebAuthn compatible device:
1. Sign in to your GitLab account.
1. Go to your [**Profile settings**](../index.md#profile-settings).
1. Go to **Account**.
1. Select **Enable Two-Factor Authentication**.
1. Plug in your WebAuthn device.
1. Select **Set up New WebAuthn Device**.
1. Depending on your device, you might need to press a button or touch a sensor.
You will see a message indicating that your device was successfully set up.
Recovery codes are not generated for WebAuthn devices.
## Recovery codes ## Recovery codes
NOTE: **Note:** NOTE: **Note:**
Recovery codes are not generated for U2F devices. Recovery codes are not generated for U2F / WebAuthn devices.
CAUTION: **Caution:** CAUTION: **Caution:**
Each code can be used only once to log in to your account. Each code can be used only once to log in to your account.
...@@ -215,6 +251,14 @@ To log in via a U2F device: ...@@ -215,6 +251,14 @@ To log in via a U2F device:
You will see a message indicating that your device responded to the authentication You will see a message indicating that your device responded to the authentication
request and you will be automatically logged in. request and you will be automatically logged in.
### Log in via WebAuthn device
In supported browsers you should be automatically prompted to activate your WebAuthn device
(e.g. by touching/pressing its button) after entering your credentials.
You will see a message indicating that your device responded to the authentication
request and you will be automatically logged in.
## Disabling 2FA ## Disabling 2FA
If you ever need to disable 2FA: If you ever need to disable 2FA:
...@@ -225,7 +269,7 @@ If you ever need to disable 2FA: ...@@ -225,7 +269,7 @@ If you ever need to disable 2FA:
1. Click **Disable**, under **Two-Factor Authentication**. 1. Click **Disable**, under **Two-Factor Authentication**.
This will clear all your two-factor authentication registrations, including mobile This will clear all your two-factor authentication registrations, including mobile
applications and U2F devices. applications and U2F / WebAuthn devices.
## Personal access tokens ## Personal access tokens
...@@ -331,7 +375,8 @@ Sign in and re-enable two-factor authentication as soon as possible. ...@@ -331,7 +375,8 @@ Sign in and re-enable two-factor authentication as soon as possible.
you may have cases where authorization always fails because of time differences. you may have cases where authorization always fails because of time differences.
- The GitLab U2F implementation does _not_ work when the GitLab instance is accessed from - The GitLab U2F implementation does _not_ work when the GitLab instance is accessed from
multiple hostnames, or FQDNs. Each U2F registration is linked to the _current hostname_ at multiple hostnames, or FQDNs. Each U2F registration is linked to the _current hostname_ at
the time of registration, and cannot be used for other hostnames/FQDNs. the time of registration, and cannot be used for other hostnames/FQDNs. The same applies to
WebAuthn registrations.
For example, if a user is trying to access a GitLab instance from `first.host.xyz` and `second.host.xyz`: For example, if a user is trying to access a GitLab instance from `first.host.xyz` and `second.host.xyz`:
...@@ -342,6 +387,25 @@ Sign in and re-enable two-factor authentication as soon as possible. ...@@ -342,6 +387,25 @@ Sign in and re-enable two-factor authentication as soon as possible.
- To enforce 2FA at the system or group levels see [Enforce Two-factor Authentication](../../../security/two_factor_authentication.md). - To enforce 2FA at the system or group levels see [Enforce Two-factor Authentication](../../../security/two_factor_authentication.md).
## Enable or disable WebAuthn **(CORE ONLY)**
Support for WebAuthn is under development and not ready for production use. It is
deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can enable it.
To enable it:
```ruby
Feature.enable(:webauthn)
```
To disable it:
```ruby
Feature.disable(:webauthn)
```
## Troubleshooting ## Troubleshooting
If you are receiving an `invalid pin code` error, this may indicate that there is a time sync issue between the authentication application and the GitLab instance itself. If you are receiving an `invalid pin code` error, this may indicate that there is a time sync issue between the authentication application and the GitLab instance itself.
......
...@@ -7,10 +7,12 @@ info: To determine the technical writer assigned to the Stage/Group associated w ...@@ -7,10 +7,12 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Export Merge Requests to CSV **(CORE)** # Export Merge Requests to CSV **(CORE)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3619) in GitLab 13.6. > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3619) in GitLab 13.6.
> - It's [deployed behind a feature flag](../../../administration/feature_flags.md), disabled by default. > - It was [deployed behind a feature flag](../../../administration/feature_flags.md), disabled by default.
> - It's disabled on GitLab.com. > - Became enabled by default in GitLab 13.6.
> - It's not recommended for production use. > - It's enabled on GitLab.com.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-export-merge-requests-to-csv). **(CORE ONLY)** > - It's recommended for production use.
> - For GitLab self-managed instances, GitLab administrators can opt to [disable it](#enable-or-disable-export-merge-requests-to-csv). **(CORE ONLY)**
> - It can be enabled or disabled for a single project.
CAUTION: **Warning:** CAUTION: **Warning:**
This feature might not be available to you. Check the **version history** note above for details. This feature might not be available to you. Check the **version history** note above for details.
...@@ -55,25 +57,25 @@ The following table shows what attributes will be present in the CSV. ...@@ -55,25 +57,25 @@ The following table shows what attributes will be present in the CSV.
### Enable or disable Export Merge Requests to CSV **(CORE ONLY)** ### Enable or disable Export Merge Requests to CSV **(CORE ONLY)**
Export merge requests to CSV is under development and not ready for production use. It is Export merge requests to CSV is under development but ready for production use.
deployed behind a feature flag that is **disabled by default**. It is deployed behind a feature flag that is **enabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md) [GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can enable it. can opt to disable it.
To enable it: To enable it:
```ruby ```ruby
# For the instance
Feature.enable(:export_merge_requests_as_csv) Feature.enable(:export_merge_requests_as_csv)
# For a single project
Feature.enable(:export_merge_requests_as_csv, Project.find(<project id>))
``` ```
To disable it: To disable it:
```ruby ```ruby
Feature.enable(:export_merge_requests_as_csv) # For the instance
``` Feature.disable(:export_merge_requests_as_csv)
# For a single project
Optionally, pass a project as an argument to enable for a single project. Feature.disable(:export_merge_requests_as_csv, Project.find(<project id>))
```ruby
Feature.enable(:export_merge_requests_as_csv, project)
``` ```
...@@ -202,6 +202,12 @@ To purge files from GitLab storage: ...@@ -202,6 +202,12 @@ To purge files from GitLab storage:
## Repository cleanup ## Repository cleanup
NOTE: **Note:**
Safely cleaning the repository requires it to be made read-only for the duration
of the operation. This happens automatically, but submitting the cleanup request
will fail if any writes are ongoing, so cancel any outstanding `git push`
operations before continuing.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/19376) in GitLab 11.6. > [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/19376) in GitLab 11.6.
Repository cleanup allows you to upload a text file of objects and GitLab will remove internal Git Repository cleanup allows you to upload a text file of objects and GitLab will remove internal Git
......
...@@ -65,7 +65,7 @@ export default { ...@@ -65,7 +65,7 @@ export default {
}; };
}, },
seriesData() { seriesData() {
return { full: this.chartData }; return [{ name: 'full', data: this.chartData }];
}, },
}, },
methods: { methods: {
...@@ -102,7 +102,7 @@ export default { ...@@ -102,7 +102,7 @@ export default {
v-bind="$attrs" v-bind="$attrs"
:width="width" :width="width"
:height="height" :height="height"
:data="seriesData" :bars="seriesData"
:x-axis-title="xAxisTitle" :x-axis-title="xAxisTitle"
:y-axis-title="yAxisTitle" :y-axis-title="yAxisTitle"
x-axis-type="category" x-axis-type="category"
......
...@@ -15,21 +15,16 @@ export default { ...@@ -15,21 +15,16 @@ export default {
type: Array, type: Array,
required: true, required: true,
}, },
seriesNames: {
type: Array,
required: true,
},
}, },
}; };
</script> </script>
<template> <template>
<gl-stacked-column-chart <gl-stacked-column-chart
:data="data" :bars="data"
:group-by="groupBy" :group-by="groupBy"
x-axis-type="category" x-axis-type="category"
y-axis-type="value" y-axis-type="value"
:x-axis-title="__('Date')" :x-axis-title="__('Date')"
:y-axis-title="s__('CycleAnalytics|Number of tasks')" :y-axis-title="s__('CycleAnalytics|Number of tasks')"
:series-names="seriesNames"
/> />
</template> </template>
...@@ -87,7 +87,6 @@ export default { ...@@ -87,7 +87,6 @@ export default {
v-if="hasData" v-if="hasData"
:data="tasksByTypeChartData.data" :data="tasksByTypeChartData.data"
:group-by="tasksByTypeChartData.groupBy" :group-by="tasksByTypeChartData.groupBy"
:series-names="tasksByTypeChartData.seriesNames"
/> />
<gl-alert v-else variant="info" :dismissible="false" class="gl-mt-3"> <gl-alert v-else variant="info" :dismissible="false" class="gl-mt-3">
{{ error }} {{ error }}
......
...@@ -21,5 +21,5 @@ export const tasksByTypeChartData = ({ data = [] } = {}, _, rootState = {}) => { ...@@ -21,5 +21,5 @@ export const tasksByTypeChartData = ({ data = [] } = {}, _, rootState = {}) => {
startDate, startDate,
endDate, endDate,
}) })
: { groupBy: [], data: [], seriesNames: [] }; : { groupBy: [], data: [] };
}; };
...@@ -253,7 +253,6 @@ export const getTasksByTypeData = ({ data = [], startDate = null, endDate = null ...@@ -253,7 +253,6 @@ export const getTasksByTypeData = ({ data = [], startDate = null, endDate = null
return { return {
groupBy: [], groupBy: [],
data: [], data: [],
seriesNames: [],
}; };
} }
...@@ -269,14 +268,19 @@ export const getTasksByTypeData = ({ data = [], startDate = null, endDate = null ...@@ -269,14 +268,19 @@ export const getTasksByTypeData = ({ data = [], startDate = null, endDate = null
const transformed = data.reduce( const transformed = data.reduce(
(acc, curr) => { (acc, curr) => {
const { const {
label: { title }, label: { title: name },
series, series,
} = curr; } = curr;
acc.seriesNames = [...acc.seriesNames, title];
acc.data = [ acc.data = [
...acc.data, ...acc.data,
// adds 0 values for each data point and overrides with data from the series {
flattenTaskByTypeSeries({ ...zeroValuesForEachDataPoint, ...Object.fromEntries(series) }), name,
// adds 0 values for each data point and overrides with data from the series
data: flattenTaskByTypeSeries({
...zeroValuesForEachDataPoint,
...Object.fromEntries(series),
}),
},
]; ];
return acc; return acc;
}, },
......
...@@ -206,7 +206,7 @@ export default { ...@@ -206,7 +206,7 @@ export default {
:chart-data="getColumnChartData(chartKeys.main)" :chart-data="getColumnChartData(chartKeys.main)"
> >
<gl-column-chart <gl-column-chart
:data="{ full: getColumnChartData(chartKeys.main) }" :bars="[{ name: 'full', data: getColumnChartData(chartKeys.main) }]"
:option="getColumnChartOption(chartKeys.main)" :option="getColumnChartOption(chartKeys.main)"
:y-axis-title="__('Merge requests')" :y-axis-title="__('Merge requests')"
:x-axis-title="__('Days')" :x-axis-title="__('Days')"
...@@ -257,7 +257,7 @@ export default { ...@@ -257,7 +257,7 @@ export default {
" "
> >
<gl-column-chart <gl-column-chart
:data="{ full: getColumnChartData(chartKeys.timeBasedHistogram) }" :bars="[{ name: 'full', data: getColumnChartData(chartKeys.timeBasedHistogram) }]"
:option="getColumnChartOption(chartKeys.timeBasedHistogram)" :option="getColumnChartOption(chartKeys.timeBasedHistogram)"
:y-axis-title="s__('ProductivityAnalytics|Merge requests')" :y-axis-title="s__('ProductivityAnalytics|Merge requests')"
:x-axis-title="s__('ProductivityAnalytics|Hours')" :x-axis-title="s__('ProductivityAnalytics|Hours')"
...@@ -283,7 +283,7 @@ export default { ...@@ -283,7 +283,7 @@ export default {
" "
> >
<gl-column-chart <gl-column-chart
:data="{ full: getColumnChartData(chartKeys.commitBasedHistogram) }" :bars="[{ name: 'full', data: getColumnChartData(chartKeys.commitBasedHistogram) }]"
:option="getColumnChartOption(chartKeys.commitBasedHistogram)" :option="getColumnChartOption(chartKeys.commitBasedHistogram)"
:y-axis-title="s__('ProductivityAanalytics|Merge requests')" :y-axis-title="s__('ProductivityAanalytics|Merge requests')"
:x-axis-title="getMetricLabel(chartKeys.commitBasedHistogram)" :x-axis-title="getMetricLabel(chartKeys.commitBasedHistogram)"
......
...@@ -134,7 +134,7 @@ export default { ...@@ -134,7 +134,7 @@ export default {
v-if="loaded && isColumnChart" v-if="loaded && isColumnChart"
v-bind="$attrs" v-bind="$attrs"
:height="$options.height" :height="$options.height"
:data="data.datasets" :bars="data.datasets"
x-axis-type="category" x-axis-type="category"
:x-axis-title="data.xAxisTitle" :x-axis-title="data.xAxisTitle"
:y-axis-title="data.yAxisTitle" :y-axis-title="data.yAxisTitle"
...@@ -145,9 +145,8 @@ export default { ...@@ -145,9 +145,8 @@ export default {
v-else-if="loaded && isStackedColumnChart" v-else-if="loaded && isStackedColumnChart"
v-bind="$attrs" v-bind="$attrs"
:height="$options.height" :height="$options.height"
:data="data.datasets" :bars="data.datasets"
:group-by="data.labels" :group-by="data.labels"
:series-names="data.seriesNames"
x-axis-type="category" x-axis-type="category"
:x-axis-title="data.xAxisTitle" :x-axis-title="data.xAxisTitle"
:y-axis-title="data.yAxisTitle" :y-axis-title="data.yAxisTitle"
......
...@@ -31,10 +31,21 @@ export const transformChartDataForGlCharts = ( ...@@ -31,10 +31,21 @@ export const transformChartDataForGlCharts = (
}; };
switch (type) { switch (type) {
case CHART_TYPES.BAR:
formattedData.datasets = [
{
name: 'all',
data: labels.map((label, i) => [label, datasets[0].data[i]]),
},
];
break;
case CHART_TYPES.STACKED_BAR: case CHART_TYPES.STACKED_BAR:
formattedData.datasets = datasets.map(dataset => dataset.data); formattedData.datasets.push(
formattedData.seriesNames = datasets.map(dataset => dataset.label); ...datasets.map(dataset => ({
name: dataset.label,
data: dataset.data,
})),
);
break; break;
case CHART_TYPES.LINE: case CHART_TYPES.LINE:
formattedData.datasets.push( formattedData.datasets.push(
...@@ -48,7 +59,6 @@ export const transformChartDataForGlCharts = ( ...@@ -48,7 +59,6 @@ export const transformChartDataForGlCharts = (
default: default:
formattedData.datasets = { all: labels.map((label, i) => [label, datasets[0].data[i]]) }; formattedData.datasets = { all: labels.map((label, i) => [label, datasets[0].data[i]]) };
} }
return formattedData; return formattedData;
}; };
......
...@@ -182,7 +182,7 @@ export default { ...@@ -182,7 +182,7 @@ export default {
<gl-column-chart <gl-column-chart
data-qa-selector="issues_analytics_graph" data-qa-selector="issues_analytics_graph"
:data="{ Full: data }" :bars="[{ name: 'Full', data }]"
:option="chartOptions" :option="chartOptions"
:y-axis-title="s__('IssuesAnalytics|Issues opened')" :y-axis-title="s__('IssuesAnalytics|Issues opened')"
:x-axis-title="s__('IssuesAnalytics|Last 12 months') + ' (' + chartDateRange + ')'" :x-axis-title="s__('IssuesAnalytics|Last 12 months') + ' (' + chartDateRange + ')'"
......
---
title: Rename "cycle analytics" with "value stream analytics" under /ee/spec
merge_request: 46745
author: Takuya Noguchi
type: other
---
name: group_coverage_reports
introduced_by_url:
rollout_issue_url:
type: licensed
group: group::analytics
default_enabled: true
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
exports[`Contribution Analytics Column Chart matches the snapshot 1`] = ` exports[`Contribution Analytics Column Chart matches the snapshot 1`] = `
<div> <div>
<gl-column-chart-stub <gl-column-chart-stub
bars="" bars="[object Object]"
data="[object Object]" data="[object Object]"
height="350" height="350"
lines="" lines=""
......
// Jest Snapshot v1, https://goo.gl/fbAQLP // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Cycle analytics utils transformRawStages retains all the stage properties 1`] = ` exports[`Value Stream Analytics utils transformRawStages retains all the stage properties 1`] = `
Array [ Array [
Object { Object {
"custom": false, "custom": false,
......
...@@ -2,4 +2,4 @@ ...@@ -2,4 +2,4 @@
exports[`TasksByTypeChart no data available should render the no data available message 1`] = `"<gl-stacked-column-chart-stub data=\\"\\" bars=\\"\\" lines=\\"\\" secondarydata=\\"\\" option=\\"[object Object]\\" presentation=\\"stacked\\" groupby=\\"\\" xaxistype=\\"category\\" xaxistitle=\\"Date\\" yaxistitle=\\"Number of tasks\\" secondarydatatitle=\\"\\" seriesnames=\\"\\" legendaveragetext=\\"Avg\\" legendmaxtext=\\"Max\\" legendmintext=\\"Min\\" legendcurrenttext=\\"Current\\" legendlayout=\\"inline\\" y-axis-type=\\"value\\"></gl-stacked-column-chart-stub>"`; exports[`TasksByTypeChart no data available should render the no data available message 1`] = `"<gl-stacked-column-chart-stub data=\\"\\" bars=\\"\\" lines=\\"\\" secondarydata=\\"\\" option=\\"[object Object]\\" presentation=\\"stacked\\" groupby=\\"\\" xaxistype=\\"category\\" xaxistitle=\\"Date\\" yaxistitle=\\"Number of tasks\\" secondarydatatitle=\\"\\" seriesnames=\\"\\" legendaveragetext=\\"Avg\\" legendmaxtext=\\"Max\\" legendmintext=\\"Min\\" legendcurrenttext=\\"Current\\" legendlayout=\\"inline\\" y-axis-type=\\"value\\"></gl-stacked-column-chart-stub>"`;
exports[`TasksByTypeChart with data available should render the loading chart 1`] = `"<gl-stacked-column-chart-stub data=\\"0,1,2,5,2,3,2,4,1\\" bars=\\"\\" lines=\\"\\" secondarydata=\\"\\" option=\\"[object Object]\\" presentation=\\"stacked\\" groupby=\\"Group 1,Group 2,Group 3\\" xaxistype=\\"category\\" xaxistitle=\\"Date\\" yaxistitle=\\"Number of tasks\\" secondarydatatitle=\\"\\" seriesnames=\\"Cool label,Normal label\\" legendaveragetext=\\"Avg\\" legendmaxtext=\\"Max\\" legendmintext=\\"Min\\" legendcurrenttext=\\"Current\\" legendlayout=\\"inline\\" y-axis-type=\\"value\\"></gl-stacked-column-chart-stub>"`; exports[`TasksByTypeChart with data available should render the loading chart 1`] = `"<gl-stacked-column-chart-stub data=\\"\\" bars=\\"0,1,2,5,2,3,2,4,1\\" lines=\\"\\" secondarydata=\\"\\" option=\\"[object Object]\\" presentation=\\"stacked\\" groupby=\\"Group 1,Group 2,Group 3\\" xaxistype=\\"category\\" xaxistitle=\\"Date\\" yaxistitle=\\"Number of tasks\\" secondarydatatitle=\\"\\" seriesnames=\\"Cool label,Normal label\\" legendaveragetext=\\"Avg\\" legendmaxtext=\\"Max\\" legendmintext=\\"Min\\" legendcurrenttext=\\"Current\\" legendlayout=\\"inline\\" y-axis-type=\\"value\\"></gl-stacked-column-chart-stub>"`;
...@@ -83,7 +83,7 @@ describe('TypeOfWorkCharts', () => { ...@@ -83,7 +83,7 @@ describe('TypeOfWorkCharts', () => {
beforeEach(() => { beforeEach(() => {
wrapper = createComponent({ wrapper = createComponent({
initialGetters: { initialGetters: {
tasksByTypeChartData: () => ({ groupBy: [], data: [], seriesNames: [] }), tasksByTypeChartData: () => ({ groupBy: [], data: [] }),
}, },
}); });
}); });
......
...@@ -42,7 +42,7 @@ const stageEndpoint = ({ stageId }) => ...@@ -42,7 +42,7 @@ const stageEndpoint = ({ stageId }) =>
jest.mock('~/flash'); jest.mock('~/flash');
describe('Cycle analytics actions', () => { describe('Value Stream Analytics actions', () => {
let state; let state;
let mock; let mock;
......
...@@ -28,7 +28,7 @@ const milestoneValues = getFilterValues(filterMilestones); ...@@ -28,7 +28,7 @@ const milestoneValues = getFilterValues(filterMilestones);
const labelValues = getFilterValues(filterLabels); const labelValues = getFilterValues(filterLabels);
const userValues = getFilterValues(filterUsers, { prop: 'name' }); const userValues = getFilterValues(filterUsers, { prop: 'name' });
describe('Cycle analytics getters', () => { describe('Value Stream Analytics getters', () => {
describe('hasNoAccessError', () => { describe('hasNoAccessError', () => {
beforeEach(() => { beforeEach(() => {
state = { state = {
......
...@@ -19,7 +19,7 @@ describe('Type of work getters', () => { ...@@ -19,7 +19,7 @@ describe('Type of work getters', () => {
describe('with no data', () => { describe('with no data', () => {
it('returns all required properties', () => { it('returns all required properties', () => {
expect(tasksByTypeChartData()).toEqual({ groupBy: [], data: [], seriesNames: [] }); expect(tasksByTypeChartData()).toEqual({ groupBy: [], data: [] });
}); });
}); });
}); });
......
...@@ -6,7 +6,7 @@ import { apiTasksByTypeData, rawTasksByTypeData } from '../../../mock_data'; ...@@ -6,7 +6,7 @@ import { apiTasksByTypeData, rawTasksByTypeData } from '../../../mock_data';
let state = null; let state = null;
describe('Cycle analytics mutations', () => { describe('Value Stream Analytics mutations', () => {
beforeEach(() => { beforeEach(() => {
state = {}; state = {};
}); });
......
...@@ -16,7 +16,7 @@ import { ...@@ -16,7 +16,7 @@ import {
let state = null; let state = null;
describe('Cycle analytics mutations', () => { describe('Value Stream Analytics mutations', () => {
beforeEach(() => { beforeEach(() => {
state = {}; state = {};
}); });
......
...@@ -41,7 +41,7 @@ import { ...@@ -41,7 +41,7 @@ import {
const labelEventIds = labelEvents.map(ev => ev.identifier); const labelEventIds = labelEvents.map(ev => ev.identifier);
describe('Cycle analytics utils', () => { describe('Value Stream Analytics utils', () => {
describe('isStartEvent', () => { describe('isStartEvent', () => {
it('will return true for a valid start event', () => { it('will return true for a valid start event', () => {
expect(isStartEvent(startEvents[0])).toEqual(true); expect(isStartEvent(startEvents[0])).toEqual(true);
...@@ -227,10 +227,15 @@ describe('Cycle analytics utils', () => { ...@@ -227,10 +227,15 @@ describe('Cycle analytics utils', () => {
describe('getTasksByTypeData', () => { describe('getTasksByTypeData', () => {
let transformed = {}; let transformed = {};
const groupBy = getDatesInRange(startDate, endDate, toYmd); const groupBy = getDatesInRange(startDate, endDate, toYmd);
// only return the values, drop the date which is the first paramater // only return the values, drop the date which is the first paramater
const extractSeriesValues = ({ series }) => series.map(kv => kv[1]); const extractSeriesValues = ({ label: { title: name }, series }) => {
return {
name,
data: series.map(kv => kv[1]),
};
};
const data = rawTasksByTypeData.map(extractSeriesValues); const data = rawTasksByTypeData.map(extractSeriesValues);
const labels = rawTasksByTypeData.map(d => { const labels = rawTasksByTypeData.map(d => {
...@@ -241,7 +246,7 @@ describe('Cycle analytics utils', () => { ...@@ -241,7 +246,7 @@ describe('Cycle analytics utils', () => {
it('will return blank arrays if given no data', () => { it('will return blank arrays if given no data', () => {
[{ data: [], startDate, endDate }, [], {}].forEach(chartData => { [{ data: [], startDate, endDate }, [], {}].forEach(chartData => {
transformed = getTasksByTypeData(chartData); transformed = getTasksByTypeData(chartData);
['seriesNames', 'data', 'groupBy'].forEach(key => { ['data', 'groupBy'].forEach(key => {
expect(transformed[key]).toEqual([]); expect(transformed[key]).toEqual([]);
}); });
}); });
...@@ -253,17 +258,11 @@ describe('Cycle analytics utils', () => { ...@@ -253,17 +258,11 @@ describe('Cycle analytics utils', () => {
}); });
it('will return an object with the properties needed for the chart', () => { it('will return an object with the properties needed for the chart', () => {
['seriesNames', 'data', 'groupBy'].forEach(key => { ['data', 'groupBy'].forEach(key => {
expect(transformed).toHaveProperty(key); expect(transformed).toHaveProperty(key);
}); });
}); });
describe('seriesNames', () => {
it('returns the names of all the labels in the dataset', () => {
expect(transformed.seriesNames).toEqual(labels);
});
});
describe('groupBy', () => { describe('groupBy', () => {
it('returns the date groupBy as an array', () => { it('returns the date groupBy as an array', () => {
expect(transformed.groupBy).toEqual(groupBy); expect(transformed.groupBy).toEqual(groupBy);
...@@ -289,7 +288,7 @@ describe('Cycle analytics utils', () => { ...@@ -289,7 +288,7 @@ describe('Cycle analytics utils', () => {
it('contains a value for each day in the groupBy', () => { it('contains a value for each day in the groupBy', () => {
transformed.data.forEach(d => { transformed.data.forEach(d => {
expect(d).toHaveLength(transformed.groupBy.length); expect(d.data).toHaveLength(transformed.groupBy.length);
}); });
}); });
}); });
......
...@@ -200,7 +200,7 @@ describe('Api', () => { ...@@ -200,7 +200,7 @@ describe('Api', () => {
}); });
}); });
describe('Cycle analytics', () => { describe('Value Stream Analytics', () => {
const createdBefore = '2019-11-18'; const createdBefore = '2019-11-18';
const createdAfter = '2019-08-18'; const createdAfter = '2019-08-18';
const groupId = 'counting-54321'; const groupId = 'counting-54321';
......
...@@ -12,9 +12,12 @@ export const chartInfo = { ...@@ -12,9 +12,12 @@ export const chartInfo = {
export const barChartData = { export const barChartData = {
labels: ['January', 'February'], labels: ['January', 'February'],
datasets: { datasets: [
all: [['January', 1], ['February', 2]], {
}, name: 'all',
data: [['January', 1], ['February', 2]],
},
],
xAxisTitle: 'Months', xAxisTitle: 'Months',
yAxisTitle: 'Issues', yAxisTitle: 'Issues',
}; };
...@@ -37,8 +40,16 @@ export const lineChartData = { ...@@ -37,8 +40,16 @@ export const lineChartData = {
export const stackedBarChartData = { export const stackedBarChartData = {
labels: ['January', 'February'], labels: ['January', 'February'],
datasets: [[1, 2], [1, 2]], datasets: [
seriesNames: ['Series 1', 'Series 2'], {
name: 'Series 1',
data: [1, 2],
},
{
name: 'Series 2',
data: [1, 2],
},
],
xAxisTitle: 'Months', xAxisTitle: 'Months',
yAxisTitle: 'Issues', yAxisTitle: 'Issues',
}; };
......
...@@ -39,22 +39,9 @@ describe('Insights helpers', () => { ...@@ -39,22 +39,9 @@ describe('Insights helpers', () => {
datasets: [{ label: 'Dataset 1', data: [1] }, { label: 'Dataset 2', data: [2] }], datasets: [{ label: 'Dataset 1', data: [1] }, { label: 'Dataset 2', data: [2] }],
}; };
expect(transformChartDataForGlCharts(chart, data).datasets).toEqual([[1], [2]]); expect(transformChartDataForGlCharts(chart, data).datasets).toEqual([
}); { name: 'Dataset 1', data: [1] },
{ name: 'Dataset 2', data: [2] },
it('copies the dataset labels to seriesNames for stacked bar charts', () => {
const chart = {
type: CHART_TYPES.STACKED_BAR,
query: { group_by: 'month', issuable_type: 'issue' },
};
const data = {
labels: ['January', 'February'],
datasets: [{ label: 'Dataset 1', data: [1] }, { label: 'Dataset 2', data: [2] }],
};
expect(transformChartDataForGlCharts(chart, data).seriesNames).toEqual([
'Dataset 1',
'Dataset 2',
]); ]);
}); });
...@@ -74,7 +61,7 @@ describe('Insights helpers', () => { ...@@ -74,7 +61,7 @@ describe('Insights helpers', () => {
]); ]);
}); });
it('creates an object of all containing an array of label / data pairs for bar charts', () => { it('creates an array of objects containing an array of label / data pairs and a name for bar charts', () => {
const chart = { const chart = {
type: CHART_TYPES.BAR, type: CHART_TYPES.BAR,
query: { group_by: 'month', issuable_type: 'issue' }, query: { group_by: 'month', issuable_type: 'issue' },
...@@ -84,9 +71,9 @@ describe('Insights helpers', () => { ...@@ -84,9 +71,9 @@ describe('Insights helpers', () => {
datasets: [{ data: [1, 2] }], datasets: [{ data: [1, 2] }],
}; };
expect(transformChartDataForGlCharts(chart, data).datasets).toEqual({ expect(transformChartDataForGlCharts(chart, data).datasets).toEqual([
all: [['January', 1], ['February', 2]], { name: 'all', data: [['January', 1], ['February', 2]] },
}); ]);
}); });
it('creates an object of all containing an array of label / data pairs for pie charts', () => { it('creates an object of all containing an array of label / data pairs for pie charts', () => {
......
...@@ -107,11 +107,11 @@ describe('Insights mutations', () => { ...@@ -107,11 +107,11 @@ describe('Insights mutations', () => {
}; };
const transformedData = { const transformedData = {
datasets: [[1], [2]], datasets: [{ name: 'Dataset 1', data: [1] }, { name: 'Dataset 2', data: [2] }],
labels: ['January', 'February'], labels: ['January', 'February'],
xAxisTitle: 'Months', xAxisTitle: 'Months',
yAxisTitle: 'Issues', yAxisTitle: 'Issues',
seriesNames: ['Dataset 1', 'Dataset 2'], seriesNames: [],
}; };
beforeEach(() => { beforeEach(() => {
......
...@@ -2294,6 +2294,9 @@ msgstr "" ...@@ -2294,6 +2294,9 @@ msgstr ""
msgid "After a successful password update, you will be redirected to the login page where you can log in with your new password." msgid "After a successful password update, you will be redirected to the login page where you can log in with your new password."
msgstr "" msgstr ""
msgid "After sign-out path"
msgstr ""
msgid "After that, you will not be able to use merge approvals or code quality as well as many other features." msgid "After that, you will not be able to use merge approvals or code quality as well as many other features."
msgstr "" msgstr ""
...@@ -2818,6 +2821,9 @@ msgstr "" ...@@ -2818,6 +2821,9 @@ msgstr ""
msgid "An administrator changed the password for your GitLab account on %{link_to}." msgid "An administrator changed the password for your GitLab account on %{link_to}."
msgstr "" msgstr ""
msgid "An alert has been resolved in %{project_path}."
msgstr ""
msgid "An alert has been triggered in %{project_path}." msgid "An alert has been triggered in %{project_path}."
msgstr "" msgstr ""
...@@ -11165,6 +11171,9 @@ msgstr "" ...@@ -11165,6 +11171,9 @@ msgstr ""
msgid "Failed to load stacktrace." msgid "Failed to load stacktrace."
msgstr "" msgstr ""
msgid "Failed to make repository read-only. %{reason}"
msgstr ""
msgid "Failed to mark this issue as a duplicate because referenced issue was not found." msgid "Failed to mark this issue as a duplicate because referenced issue was not found."
msgstr "" msgstr ""
...@@ -24723,6 +24732,9 @@ msgstr "" ...@@ -24723,6 +24732,9 @@ msgstr ""
msgid "Sign-in restrictions" msgid "Sign-in restrictions"
msgstr "" msgstr ""
msgid "Sign-in text"
msgstr ""
msgid "Sign-up restrictions" msgid "Sign-up restrictions"
msgstr "" msgstr ""
...@@ -29426,6 +29438,12 @@ msgstr "" ...@@ -29426,6 +29438,12 @@ msgstr ""
msgid "View Documentation" msgid "View Documentation"
msgstr "" msgstr ""
msgid "View alert details at"
msgstr ""
msgid "View alert details."
msgstr ""
msgid "View all issues" msgid "View all issues"
msgstr "" msgstr ""
......
...@@ -23,13 +23,15 @@ RSpec.describe Projects::Settings::RepositoryController do ...@@ -23,13 +23,15 @@ RSpec.describe Projects::Settings::RepositoryController do
describe 'PUT cleanup' do describe 'PUT cleanup' do
let(:object_map) { fixture_file_upload('spec/fixtures/bfg_object_map.txt') } let(:object_map) { fixture_file_upload('spec/fixtures/bfg_object_map.txt') }
it 'enqueues a RepositoryCleanupWorker' do it 'enqueues a project cleanup' do
allow(RepositoryCleanupWorker).to receive(:perform_async) expect(Projects::CleanupService)
.to receive(:enqueue)
.with(project, user, anything)
.and_return(status: :success)
put :cleanup, params: { namespace_id: project.namespace, project_id: project, project: { object_map: object_map } } put :cleanup, params: { namespace_id: project.namespace, project_id: project, project: { bfg_object_map: object_map } }
expect(response).to redirect_to project_settings_repository_path(project) expect(response).to redirect_to project_settings_repository_path(project)
expect(RepositoryCleanupWorker).to have_received(:perform_async).once
end end
end end
......
...@@ -30,6 +30,7 @@ describe('Column component', () => { ...@@ -30,6 +30,7 @@ describe('Column component', () => {
}, },
metrics: [ metrics: [
{ {
label: 'Mock data',
result: [ result: [
{ {
metric: {}, metric: {},
...@@ -96,7 +97,7 @@ describe('Column component', () => { ...@@ -96,7 +97,7 @@ describe('Column component', () => {
describe('wrapped components', () => { describe('wrapped components', () => {
describe('GitLab UI column chart', () => { describe('GitLab UI column chart', () => {
it('receives data properties needed for proper chart render', () => { it('receives data properties needed for proper chart render', () => {
expect(chartProps('data').values).toEqual(dataValues); expect(chartProps('bars')).toEqual([{ name: 'Mock data', data: dataValues }]);
}); });
it('passes the y axis name correctly', () => { it('passes the y axis name correctly', () => {
......
...@@ -44,19 +44,19 @@ describe('Stacked column chart component', () => { ...@@ -44,19 +44,19 @@ describe('Stacked column chart component', () => {
}); });
it('data should match the graphData y value for each series', () => { it('data should match the graphData y value for each series', () => {
const data = findChart().props('data'); const data = findChart().props('bars');
data.forEach((series, index) => { data.forEach((series, index) => {
const { values } = stackedColumnMockedData.metrics[index].result[0]; const { values } = stackedColumnMockedData.metrics[index].result[0];
expect(series).toEqual(values.map(value => value[1])); expect(series.data).toEqual(values.map(value => value[1]));
}); });
}); });
it('series names should be the same as the graphData metrics labels', () => { it('data should be the same length as the graphData metrics labels', () => {
const seriesNames = findChart().props('seriesNames'); const barDataProp = findChart().props('bars');
expect(seriesNames).toHaveLength(stackedColumnMockedData.metrics.length); expect(barDataProp).toHaveLength(stackedColumnMockedData.metrics.length);
seriesNames.forEach((name, index) => { barDataProp.forEach(({ name }, index) => {
expect(stackedColumnMockedData.metrics[index].label).toBe(name); expect(stackedColumnMockedData.metrics[index].label).toBe(name);
}); });
}); });
......
...@@ -45,7 +45,7 @@ describe('ProjectsPipelinesChartsApp', () => { ...@@ -45,7 +45,7 @@ describe('ProjectsPipelinesChartsApp', () => {
expect(chart.exists()).toBeTruthy(); expect(chart.exists()).toBeTruthy();
expect(chart.props('yAxisTitle')).toBe('Minutes'); expect(chart.props('yAxisTitle')).toBe('Minutes');
expect(chart.props('xAxisTitle')).toBe('Commit'); expect(chart.props('xAxisTitle')).toBe('Commit');
expect(chart.props('data')).toBe(wrapper.vm.timesChartTransformedData); expect(chart.props('bars')).toBe(wrapper.vm.timesChartTransformedData);
expect(chart.props('option')).toBe(wrapper.vm.$options.timesChartOptions); expect(chart.props('option')).toBe(wrapper.vm.$options.timesChartOptions);
}); });
}); });
......
...@@ -32,19 +32,13 @@ RSpec.describe Emails::Projects do ...@@ -32,19 +32,13 @@ RSpec.describe Emails::Projects do
describe '#prometheus_alert_fired_email' do describe '#prometheus_alert_fired_email' do
let(:default_title) { Gitlab::AlertManagement::Payload::Generic::DEFAULT_TITLE } let(:default_title) { Gitlab::AlertManagement::Payload::Generic::DEFAULT_TITLE }
let(:payload) { { 'startsAt' => Time.now.rfc3339 } } let(:payload) { { 'startsAt' => Time.now.rfc3339 } }
let(:alert_attributes) { build(:alert_management_alert, :from_payload, payload: payload, project: project).attributes } let(:alert) { create(:alert_management_alert, :from_payload, payload: payload, project: project) }
subject do subject do
Notify.prometheus_alert_fired_email(project.id, user.id, alert_attributes) Notify.prometheus_alert_fired_email(project, user, alert)
end end
context 'missing required attributes' do context 'with empty payload' do
let(:alert_attributes) { build(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project).attributes }
it_behaves_like 'no email'
end
context 'with minimum required attributes' do
let(:payload) { {} } let(:payload) { {} }
it_behaves_like 'an email sent from GitLab' it_behaves_like 'an email sent from GitLab'
...@@ -58,6 +52,7 @@ RSpec.describe Emails::Projects do ...@@ -58,6 +52,7 @@ RSpec.describe Emails::Projects do
it 'has expected content' do it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered') is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path) is_expected.to have_body_text(project.full_path)
is_expected.to have_body_text(alert.details_url)
is_expected.not_to have_body_text('Description:') is_expected.not_to have_body_text('Description:')
is_expected.not_to have_body_text('Environment:') is_expected.not_to have_body_text('Environment:')
is_expected.not_to have_body_text('Metric:') is_expected.not_to have_body_text('Metric:')
...@@ -78,6 +73,7 @@ RSpec.describe Emails::Projects do ...@@ -78,6 +73,7 @@ RSpec.describe Emails::Projects do
it 'has expected content' do it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered') is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path) is_expected.to have_body_text(project.full_path)
is_expected.to have_body_text(alert.details_url)
is_expected.to have_body_text('Description:') is_expected.to have_body_text('Description:')
is_expected.to have_body_text('alert description') is_expected.to have_body_text('alert description')
is_expected.not_to have_body_text('Environment:') is_expected.not_to have_body_text('Environment:')
...@@ -101,6 +97,7 @@ RSpec.describe Emails::Projects do ...@@ -101,6 +97,7 @@ RSpec.describe Emails::Projects do
it 'has expected content' do it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered') is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path) is_expected.to have_body_text(project.full_path)
is_expected.to have_body_text(alert.details_url)
is_expected.to have_body_text('Environment:') is_expected.to have_body_text('Environment:')
is_expected.to have_body_text(environment.name) is_expected.to have_body_text(environment.name)
is_expected.not_to have_body_text('Description:') is_expected.not_to have_body_text('Description:')
...@@ -112,7 +109,7 @@ RSpec.describe Emails::Projects do ...@@ -112,7 +109,7 @@ RSpec.describe Emails::Projects do
let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) } let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
let_it_be(:environment) { prometheus_alert.environment } let_it_be(:environment) { prometheus_alert.environment }
let(:alert_attributes) { build(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project).attributes } let(:alert) { create(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project) }
let(:title) { "#{prometheus_alert.title} #{prometheus_alert.computed_operator} #{prometheus_alert.threshold}" } let(:title) { "#{prometheus_alert.title} #{prometheus_alert.computed_operator} #{prometheus_alert.threshold}" }
let(:metrics_url) { metrics_project_environment_url(project, environment) } let(:metrics_url) { metrics_project_environment_url(project, environment) }
...@@ -135,6 +132,7 @@ RSpec.describe Emails::Projects do ...@@ -135,6 +132,7 @@ RSpec.describe Emails::Projects do
it 'has expected content' do it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered') is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path) is_expected.to have_body_text(project.full_path)
is_expected.to have_body_text(alert.details_url)
is_expected.to have_body_text('Environment:') is_expected.to have_body_text('Environment:')
is_expected.to have_body_text(environment.name) is_expected.to have_body_text(environment.name)
is_expected.to have_body_text('Metric:') is_expected.to have_body_text('Metric:')
...@@ -143,5 +141,23 @@ RSpec.describe Emails::Projects do ...@@ -143,5 +141,23 @@ RSpec.describe Emails::Projects do
is_expected.not_to have_body_text('Description:') is_expected.not_to have_body_text('Description:')
end end
end end
context 'resolved' do
let_it_be(:alert) { create(:alert_management_alert, :resolved, project: project) }
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
it 'has expected subject' do
is_expected.to have_subject("#{project.name} | Alert: #{alert.title}")
end
it 'has expected content' do
is_expected.to have_body_text('An alert has been resolved')
is_expected.to have_body_text(project.full_path)
is_expected.to have_body_text(alert.details_url)
end
end
end end
end end
...@@ -73,18 +73,20 @@ RSpec.describe API::Files do ...@@ -73,18 +73,20 @@ RSpec.describe API::Files do
describe "HEAD /projects/:id/repository/files/:file_path" do describe "HEAD /projects/:id/repository/files/:file_path" do
shared_examples_for 'repository files' do shared_examples_for 'repository files' do
let(:options) { {} }
it 'returns 400 when file path is invalid' do it 'returns 400 when file path is invalid' do
head api(route(rouge_file_path), current_user), params: params head api(route(rouge_file_path), current_user, **options), params: params
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
end end
it_behaves_like 'when path is absolute' do it_behaves_like 'when path is absolute' do
subject { head api(route(absolute_path), current_user), params: params } subject { head api(route(absolute_path), current_user, **options), params: params }
end end
it 'returns file attributes in headers' do it 'returns file attributes in headers' do
head api(route(file_path), current_user), params: params head api(route(file_path), current_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['X-Gitlab-File-Path']).to eq(CGI.unescape(file_path)) expect(response.headers['X-Gitlab-File-Path']).to eq(CGI.unescape(file_path))
...@@ -98,7 +100,7 @@ RSpec.describe API::Files do ...@@ -98,7 +100,7 @@ RSpec.describe API::Files do
file_path = "files%2Fjs%2Fcommit%2Ejs%2Ecoffee" file_path = "files%2Fjs%2Fcommit%2Ejs%2Ecoffee"
params[:ref] = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9" params[:ref] = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
head api(route(file_path), current_user), params: params head api(route(file_path), current_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['X-Gitlab-File-Name']).to eq('commit.js.coffee') expect(response.headers['X-Gitlab-File-Name']).to eq('commit.js.coffee')
...@@ -107,7 +109,7 @@ RSpec.describe API::Files do ...@@ -107,7 +109,7 @@ RSpec.describe API::Files do
context 'when mandatory params are not given' do context 'when mandatory params are not given' do
it "responds with a 400 status" do it "responds with a 400 status" do
head api(route("any%2Ffile"), current_user) head api(route("any%2Ffile"), current_user, **options)
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
end end
...@@ -117,7 +119,7 @@ RSpec.describe API::Files do ...@@ -117,7 +119,7 @@ RSpec.describe API::Files do
it "responds with a 404 status" do it "responds with a 404 status" do
params[:ref] = 'master' params[:ref] = 'master'
head api(route('app%2Fmodels%2Fapplication%2Erb'), current_user), params: params head api(route('app%2Fmodels%2Fapplication%2Erb'), current_user, **options), params: params
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:not_found)
end end
...@@ -127,7 +129,7 @@ RSpec.describe API::Files do ...@@ -127,7 +129,7 @@ RSpec.describe API::Files do
include_context 'disabled repository' include_context 'disabled repository'
it "responds with a 403 status" do it "responds with a 403 status" do
head api(route(file_path), current_user), params: params head api(route(file_path), current_user, **options), params: params
expect(response).to have_gitlab_http_status(:forbidden) expect(response).to have_gitlab_http_status(:forbidden)
end end
...@@ -154,8 +156,8 @@ RSpec.describe API::Files do ...@@ -154,8 +156,8 @@ RSpec.describe API::Files do
context 'when PATs are used' do context 'when PATs are used' do
it_behaves_like 'repository files' do it_behaves_like 'repository files' do
let(:token) { create(:personal_access_token, scopes: ['read_repository'], user: user) } let(:token) { create(:personal_access_token, scopes: ['read_repository'], user: user) }
let(:current_user) { user } let(:current_user) { nil }
let(:api_user) { { personal_access_token: token } } let(:options) { { personal_access_token: token } }
end end
end end
...@@ -174,21 +176,21 @@ RSpec.describe API::Files do ...@@ -174,21 +176,21 @@ RSpec.describe API::Files do
describe "GET /projects/:id/repository/files/:file_path" do describe "GET /projects/:id/repository/files/:file_path" do
shared_examples_for 'repository files' do shared_examples_for 'repository files' do
let(:api_user) { current_user } let(:options) { {} }
it 'returns 400 for invalid file path' do it 'returns 400 for invalid file path' do
get api(route(rouge_file_path), api_user), params: params get api(route(rouge_file_path), api_user, **options), params: params
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq(invalid_file_message) expect(json_response['error']).to eq(invalid_file_message)
end end
it_behaves_like 'when path is absolute' do it_behaves_like 'when path is absolute' do
subject { get api(route(absolute_path), api_user), params: params } subject { get api(route(absolute_path), api_user, **options), params: params }
end end
it 'returns file attributes as json' do it 'returns file attributes as json' do
get api(route(file_path), api_user), params: params get api(route(file_path), api_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['file_path']).to eq(CGI.unescape(file_path)) expect(json_response['file_path']).to eq(CGI.unescape(file_path))
...@@ -201,10 +203,10 @@ RSpec.describe API::Files do ...@@ -201,10 +203,10 @@ RSpec.describe API::Files do
it 'returns json when file has txt extension' do it 'returns json when file has txt extension' do
file_path = "bar%2Fbranch-test.txt" file_path = "bar%2Fbranch-test.txt"
get api(route(file_path), api_user), params: params get api(route(file_path), api_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq('application/json') expect(response.media_type).to eq('application/json')
end end
context 'with filename with pathspec characters' do context 'with filename with pathspec characters' do
...@@ -218,7 +220,7 @@ RSpec.describe API::Files do ...@@ -218,7 +220,7 @@ RSpec.describe API::Files do
it 'returns JSON wth commit SHA' do it 'returns JSON wth commit SHA' do
params[:ref] = 'master' params[:ref] = 'master'
get api(route(file_path), api_user), params: params get api(route(file_path), api_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['file_path']).to eq(file_path) expect(json_response['file_path']).to eq(file_path)
...@@ -232,7 +234,7 @@ RSpec.describe API::Files do ...@@ -232,7 +234,7 @@ RSpec.describe API::Files do
file_path = "files%2Fjs%2Fcommit%2Ejs%2Ecoffee" file_path = "files%2Fjs%2Fcommit%2Ejs%2Ecoffee"
params[:ref] = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9" params[:ref] = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
get api(route(file_path), api_user), params: params get api(route(file_path), api_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['file_name']).to eq('commit.js.coffee') expect(json_response['file_name']).to eq('commit.js.coffee')
...@@ -244,7 +246,7 @@ RSpec.describe API::Files do ...@@ -244,7 +246,7 @@ RSpec.describe API::Files do
url = route(file_path) + "/raw" url = route(file_path) + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob) expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(url, api_user), params: params get api(url, api_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true" expect(headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
...@@ -253,7 +255,7 @@ RSpec.describe API::Files do ...@@ -253,7 +255,7 @@ RSpec.describe API::Files do
it 'returns blame file info' do it 'returns blame file info' do
url = route(file_path) + '/blame' url = route(file_path) + '/blame'
get api(url, api_user), params: params get api(url, api_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
end end
...@@ -261,14 +263,14 @@ RSpec.describe API::Files do ...@@ -261,14 +263,14 @@ RSpec.describe API::Files do
it 'sets inline content disposition by default' do it 'sets inline content disposition by default' do
url = route(file_path) + "/raw" url = route(file_path) + "/raw"
get api(url, api_user), params: params get api(url, api_user, **options), params: params
expect(headers['Content-Disposition']).to eq(%q(inline; filename="popen.rb"; filename*=UTF-8''popen.rb)) expect(headers['Content-Disposition']).to eq(%q(inline; filename="popen.rb"; filename*=UTF-8''popen.rb))
end end
context 'when mandatory params are not given' do context 'when mandatory params are not given' do
it_behaves_like '400 response' do it_behaves_like '400 response' do
let(:request) { get api(route("any%2Ffile"), current_user) } let(:request) { get api(route("any%2Ffile"), current_user, **options) }
end end
end end
...@@ -276,7 +278,7 @@ RSpec.describe API::Files do ...@@ -276,7 +278,7 @@ RSpec.describe API::Files do
let(:params) { { ref: 'master' } } let(:params) { { ref: 'master' } }
it_behaves_like '404 response' do it_behaves_like '404 response' do
let(:request) { get api(route('app%2Fmodels%2Fapplication%2Erb'), api_user), params: params } let(:request) { get api(route('app%2Fmodels%2Fapplication%2Erb'), api_user, **options), params: params }
let(:message) { '404 File Not Found' } let(:message) { '404 File Not Found' }
end end
end end
...@@ -285,7 +287,7 @@ RSpec.describe API::Files do ...@@ -285,7 +287,7 @@ RSpec.describe API::Files do
include_context 'disabled repository' include_context 'disabled repository'
it_behaves_like '403 response' do it_behaves_like '403 response' do
let(:request) { get api(route(file_path), api_user), params: params } let(:request) { get api(route(file_path), api_user, **options), params: params }
end end
end end
end end
...@@ -294,6 +296,7 @@ RSpec.describe API::Files do ...@@ -294,6 +296,7 @@ RSpec.describe API::Files do
it_behaves_like 'repository files' do it_behaves_like 'repository files' do
let(:project) { create(:project, :public, :repository) } let(:project) { create(:project, :public, :repository) }
let(:current_user) { nil } let(:current_user) { nil }
let(:api_user) { nil }
end end
end end
...@@ -301,7 +304,8 @@ RSpec.describe API::Files do ...@@ -301,7 +304,8 @@ RSpec.describe API::Files do
it_behaves_like 'repository files' do it_behaves_like 'repository files' do
let(:token) { create(:personal_access_token, scopes: ['read_repository'], user: user) } let(:token) { create(:personal_access_token, scopes: ['read_repository'], user: user) }
let(:current_user) { user } let(:current_user) { user }
let(:api_user) { { personal_access_token: token } } let(:api_user) { nil }
let(:options) { { personal_access_token: token } }
end end
end end
...@@ -315,6 +319,7 @@ RSpec.describe API::Files do ...@@ -315,6 +319,7 @@ RSpec.describe API::Files do
context 'when authenticated', 'as a developer' do context 'when authenticated', 'as a developer' do
it_behaves_like 'repository files' do it_behaves_like 'repository files' do
let(:current_user) { user } let(:current_user) { user }
let(:api_user) { user }
end end
end end
...@@ -687,7 +692,7 @@ RSpec.describe API::Files do ...@@ -687,7 +692,7 @@ RSpec.describe API::Files do
post api(route("new_file_with_author%2Etxt"), user), params: params post api(route("new_file_with_author%2Etxt"), user), params: params
expect(response).to have_gitlab_http_status(:created) expect(response).to have_gitlab_http_status(:created)
expect(response.content_type).to eq('application/json') expect(response.media_type).to eq('application/json')
last_commit = project.repository.commit.raw last_commit = project.repository.commit.raw
expect(last_commit.author_email).to eq(author_email) expect(last_commit.author_email).to eq(author_email)
expect(last_commit.author_name).to eq(author_name) expect(last_commit.author_name).to eq(author_name)
......
...@@ -11,9 +11,16 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -11,9 +11,16 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
describe '#execute' do describe '#execute' do
let(:service) { described_class.new(project, nil, payload) } let(:service) { described_class.new(project, nil, payload) }
let(:incident_management_setting) { double(auto_close_incident?: auto_close_incident, create_issue?: create_issue) }
let(:auto_close_incident) { true } let(:auto_close_incident) { true }
let(:create_issue) { true } let(:create_issue) { true }
let(:send_email) { true }
let(:incident_management_setting) do
double(
auto_close_incident?: auto_close_incident,
create_issue?: create_issue,
send_email?: send_email
)
end
before do before do
allow(service) allow(service)
...@@ -55,6 +62,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -55,6 +62,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
it_behaves_like 'adds an alert management alert event' it_behaves_like 'adds an alert management alert event'
it_behaves_like 'processes incident issues' it_behaves_like 'processes incident issues'
it_behaves_like 'Alert Notification Service sends notification email'
context 'existing alert is resolved' do context 'existing alert is resolved' do
let!(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: fingerprint) } let!(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: fingerprint) }
...@@ -92,28 +100,48 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -92,28 +100,48 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
end end
end end
context 'when auto-alert creation is disabled' do context 'when auto-creation of issues is disabled' do
let(:create_issue) { false } let(:create_issue) { false }
it_behaves_like 'does not process incident issues' it_behaves_like 'does not process incident issues'
end end
context 'when emails are disabled' do
let(:send_email) { false }
it 'does not send notification' do
expect(NotificationService).not_to receive(:new)
expect(subject).to be_success
end
end
end end
context 'when alert does not exist' do context 'when alert does not exist' do
context 'when alert can be created' do context 'when alert can be created' do
it_behaves_like 'creates an alert management alert' it_behaves_like 'creates an alert management alert'
it_behaves_like 'Alert Notification Service sends notification email'
it_behaves_like 'processes incident issues'
it 'creates a system note corresponding to alert creation' do it 'creates a system note corresponding to alert creation' do
expect { subject }.to change(Note, :count).by(1) expect { subject }.to change(Note, :count).by(1)
end end
it_behaves_like 'processes incident issues'
context 'when auto-alert creation is disabled' do context 'when auto-alert creation is disabled' do
let(:create_issue) { false } let(:create_issue) { false }
it_behaves_like 'does not process incident issues' it_behaves_like 'does not process incident issues'
end end
context 'when emails are disabled' do
let(:send_email) { false }
it 'does not send notification' do
expect(NotificationService).not_to receive(:new)
expect(subject).to be_success
end
end
end end
context 'when alert cannot be created' do context 'when alert cannot be created' do
...@@ -125,6 +153,9 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -125,6 +153,9 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
allow(service).to receive_message_chain(:alert, :errors).and_return(errors) allow(service).to receive_message_chain(:alert, :errors).and_return(errors)
end end
it_behaves_like 'Alert Notification Service sends no notifications', http_status: :bad_request
it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
it 'writes a warning to the log' do it 'writes a warning to the log' do
expect(Gitlab::AppLogger).to receive(:warn).with( expect(Gitlab::AppLogger).to receive(:warn).with(
message: 'Unable to create AlertManagement::Alert', message: 'Unable to create AlertManagement::Alert',
...@@ -134,8 +165,6 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -134,8 +165,6 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
execute execute
end end
it_behaves_like 'does not process incident issues'
end end
it { is_expected.to be_success } it { is_expected.to be_success }
...@@ -148,6 +177,9 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -148,6 +177,9 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when auto_resolve_incident set to true' do context 'when auto_resolve_incident set to true' do
context 'when status can be changed' do context 'when status can be changed' do
it_behaves_like 'Alert Notification Service sends notification email'
it_behaves_like 'does not process incident issues'
it 'resolves an existing alert' do it 'resolves an existing alert' do
expect { execute }.to change { alert.reload.resolved? }.to(true) expect { execute }.to change { alert.reload.resolved? }.to(true)
end end
...@@ -185,6 +217,8 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -185,6 +217,8 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
execute execute
end end
it_behaves_like 'Alert Notification Service sends notification email'
end end
it { is_expected.to be_success } it { is_expected.to be_success }
...@@ -197,6 +231,16 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -197,6 +231,16 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
expect { execute }.not_to change { alert.reload.resolved? } expect { execute }.not_to change { alert.reload.resolved? }
end end
end end
context 'when emails are disabled' do
let(:send_email) { false }
it 'does not send notification' do
expect(NotificationService).not_to receive(:new)
expect(subject).to be_success
end
end
end end
context 'environment given' do context 'environment given' do
......
...@@ -3100,26 +3100,26 @@ RSpec.describe NotificationService, :mailer do ...@@ -3100,26 +3100,26 @@ RSpec.describe NotificationService, :mailer do
end end
describe '#prometheus_alerts_fired' do describe '#prometheus_alerts_fired' do
let!(:project) { create(:project) } let_it_be(:project) { create(:project) }
let!(:master) { create(:user) } let_it_be(:master) { create(:user) }
let!(:developer) { create(:user) } let_it_be(:developer) { create(:user) }
let(:alert_attributes) { build(:alert_management_alert, project: project).attributes } let_it_be(:alert) { create(:alert_management_alert, project: project) }
before do before do
project.add_maintainer(master) project.add_maintainer(master)
end end
it 'sends the email to owners and masters' do it 'sends the email to owners and masters' do
expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, master.id, alert_attributes).and_call_original expect(Notify).to receive(:prometheus_alert_fired_email).with(project, master, alert).and_call_original
expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, project.owner.id, alert_attributes).and_call_original expect(Notify).to receive(:prometheus_alert_fired_email).with(project, project.owner, alert).and_call_original
expect(Notify).not_to receive(:prometheus_alert_fired_email).with(project.id, developer.id, alert_attributes) expect(Notify).not_to receive(:prometheus_alert_fired_email).with(project, developer, alert)
subject.prometheus_alerts_fired(project, [alert_attributes]) subject.prometheus_alerts_fired(project, [alert])
end end
it_behaves_like 'project emails are disabled' do it_behaves_like 'project emails are disabled' do
let(:notification_target) { project } let(:notification_target) { project }
let(:notification_trigger) { subject.prometheus_alerts_fired(project, [alert_attributes]) } let(:notification_trigger) { subject.prometheus_alerts_fired(project, [alert]) }
around do |example| around do |example|
perform_enqueued_jobs { example.run } perform_enqueued_jobs { example.run }
......
...@@ -129,6 +129,12 @@ RSpec.describe Projects::Alerting::NotifyService do ...@@ -129,6 +129,12 @@ RSpec.describe Projects::Alerting::NotifyService do
it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') } it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
it { expect { subject }.to change(ResourceStateEvent, :count).by(1) } it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
end end
context 'with issue enabled' do
let(:issue_enabled) { true }
it_behaves_like 'does not process incident issues'
end
end end
end end
......
...@@ -3,14 +3,84 @@ ...@@ -3,14 +3,84 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Projects::CleanupService do RSpec.describe Projects::CleanupService do
let(:project) { create(:project, :repository, bfg_object_map: fixture_file_upload('spec/fixtures/bfg_object_map.txt')) } subject(:service) { described_class.new(project) }
let(:object_map) { project.bfg_object_map }
let(:cleaner) { service.__send__(:repository_cleaner) } describe '.enqueue' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
subject(:service) { described_class.new(project) } let(:object_map_file) { fixture_file_upload('spec/fixtures/bfg_object_map.txt') }
subject(:enqueue) { described_class.enqueue(project, user, object_map_file) }
it 'makes the repository read-only' do
expect { enqueue }
.to change(project, :repository_read_only?)
.from(false)
.to(true)
end
it 'sets the bfg_object_map of the project' do
enqueue
expect(project.bfg_object_map.read).to eq(object_map_file.read)
end
it 'enqueues a RepositoryCleanupWorker' do
enqueue
expect(RepositoryCleanupWorker.jobs.count).to eq(1)
end
it 'returns success' do
expect(enqueue[:status]).to eq(:success)
end
it 'returns an error if making the repository read-only fails' do
project.set_repository_read_only!
expect(enqueue[:status]).to eq(:error)
end
it 'returns an error if updating the project fails' do
expect_next_instance_of(Projects::UpdateService) do |service|
expect(service).to receive(:execute).and_return(status: :error)
end
expect(enqueue[:status]).to eq(:error)
expect(project.reload.repository_read_only?).to be_falsy
end
end
describe '.cleanup_after' do
let(:project) { create(:project, :repository, bfg_object_map: fixture_file_upload('spec/fixtures/bfg_object_map.txt')) }
subject(:cleanup_after) { described_class.cleanup_after(project) }
before do
project.set_repository_read_only!
end
it 'sets the repository read-write' do
expect { cleanup_after }.to change(project, :repository_read_only?).from(true).to(false)
end
it 'removes the BFG object map' do
cleanup_after
expect(project.bfg_object_map).not_to be_exist
end
end
describe '#execute' do describe '#execute' do
let(:project) { create(:project, :repository, bfg_object_map: fixture_file_upload('spec/fixtures/bfg_object_map.txt')) }
let(:object_map) { project.bfg_object_map }
let(:cleaner) { service.__send__(:repository_cleaner) }
before do
project.set_repository_read_only!
end
it 'runs the apply_bfg_object_map_stream gitaly RPC' do it 'runs the apply_bfg_object_map_stream gitaly RPC' do
expect(cleaner).to receive(:apply_bfg_object_map_stream).with(kind_of(IO)) expect(cleaner).to receive(:apply_bfg_object_map_stream).with(kind_of(IO))
...@@ -37,6 +107,13 @@ RSpec.describe Projects::CleanupService do ...@@ -37,6 +107,13 @@ RSpec.describe Projects::CleanupService do
expect(object_map.exists?).to be_falsy expect(object_map.exists?).to be_falsy
end end
it 'makes the repository read-write again' do
expect { service.execute }
.to change(project, :repository_read_only?)
.from(true)
.to(false)
end
context 'with a tainted merge request diff' do context 'with a tainted merge request diff' do
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) } let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:diff) { merge_request.merge_request_diff } let(:diff) { merge_request.merge_request_diff }
......
...@@ -40,6 +40,8 @@ RSpec.describe RepositoryCleanupWorker do ...@@ -40,6 +40,8 @@ RSpec.describe RepositoryCleanupWorker do
describe '#sidekiq_retries_exhausted' do describe '#sidekiq_retries_exhausted' do
let(:job) { { 'args' => [project.id, user.id], 'error_message' => 'Error' } } let(:job) { { 'args' => [project.id, user.id], 'error_message' => 'Error' } }
subject(:sidekiq_retries_exhausted) { described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new) }
it 'does not send a failure notification for a RecordNotFound error' do it 'does not send a failure notification for a RecordNotFound error' do
expect(NotificationService).not_to receive(:new) expect(NotificationService).not_to receive(:new)
...@@ -51,7 +53,13 @@ RSpec.describe RepositoryCleanupWorker do ...@@ -51,7 +53,13 @@ RSpec.describe RepositoryCleanupWorker do
expect(service).to receive(:repository_cleanup_failure).with(project, user, 'Error') expect(service).to receive(:repository_cleanup_failure).with(project, user, 'Error')
end end
described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new) sidekiq_retries_exhausted
end
it 'cleans up the attempt' do
expect(Projects::CleanupService).to receive(:cleanup_after).with(project)
sidekiq_retries_exhausted
end end
end end
end end
...@@ -7590,6 +7590,13 @@ linkify-it@^2.0.0: ...@@ -7590,6 +7590,13 @@ linkify-it@^2.0.0:
dependencies: dependencies:
uc.micro "^1.0.1" uc.micro "^1.0.1"
linkify-it@^3.0.1:
version "3.0.2"
resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-3.0.2.tgz#f55eeb8bc1d3ae754049e124ab3bb56d97797fb8"
integrity sha512-gDBO4aHNZS6coiZCKVhSNh43F9ioIL4JwRjLZPkoLIY4yZFwg264Y5lu2x6rb1Js42Gh6Yqm2f6L2AJcnkzinQ==
dependencies:
uc.micro "^1.0.1"
load-json-file@^1.0.0: load-json-file@^1.0.0:
version "1.1.0" version "1.1.0"
resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0"
...@@ -7971,14 +7978,14 @@ markdown-escapes@^1.0.0: ...@@ -7971,14 +7978,14 @@ markdown-escapes@^1.0.0:
resolved "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.2.tgz#e639cbde7b99c841c0bacc8a07982873b46d2122" resolved "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.2.tgz#e639cbde7b99c841c0bacc8a07982873b46d2122"
integrity sha512-lbRZ2mE3Q9RtLjxZBZ9+IMl68DKIXaVAhwvwn9pmjnPLS0h/6kyBMgNhqi1xFJ/2yv6cSyv0jbiZavZv93JkkA== integrity sha512-lbRZ2mE3Q9RtLjxZBZ9+IMl68DKIXaVAhwvwn9pmjnPLS0h/6kyBMgNhqi1xFJ/2yv6cSyv0jbiZavZv93JkkA==
markdown-it@10.0.0: markdown-it@11.0.0:
version "10.0.0" version "11.0.0"
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-10.0.0.tgz#abfc64f141b1722d663402044e43927f1f50a8dc" resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-11.0.0.tgz#dbfc30363e43d756ebc52c38586b91b90046b876"
integrity sha512-YWOP1j7UbDNz+TumYP1kpwnP0aEa711cJjrAQrzd0UXlbJfc5aAq0F/PZHjiioqDC1NKgvIMX+o+9Bk7yuM2dg== integrity sha512-+CvOnmbSubmQFSA9dKz1BRiaSMV7rhexl3sngKqFyXSagoA3fBdJQ8oZWtRy2knXdpDXaBw44euz37DeJQ9asg==
dependencies: dependencies:
argparse "^1.0.7" argparse "^1.0.7"
entities "~2.0.0" entities "~2.0.0"
linkify-it "^2.0.0" linkify-it "^3.0.1"
mdurl "^1.0.1" mdurl "^1.0.1"
uc.micro "^1.0.5" uc.micro "^1.0.5"
...@@ -7998,10 +8005,10 @@ markdown-table@^1.1.0: ...@@ -7998,10 +8005,10 @@ markdown-table@^1.1.0:
resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-1.1.2.tgz#c78db948fa879903a41bce522e3b96f801c63786" resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-1.1.2.tgz#c78db948fa879903a41bce522e3b96f801c63786"
integrity sha512-NcWuJFHDA8V3wkDgR/j4+gZx+YQwstPgfQDV8ndUeWWzta3dnDTBxpVzqS9lkmJAuV5YX35lmyojl6HO5JXAgw== integrity sha512-NcWuJFHDA8V3wkDgR/j4+gZx+YQwstPgfQDV8ndUeWWzta3dnDTBxpVzqS9lkmJAuV5YX35lmyojl6HO5JXAgw==
markdownlint-cli@0.23.2: markdownlint-cli@0.24.0:
version "0.23.2" version "0.24.0"
resolved "https://registry.yarnpkg.com/markdownlint-cli/-/markdownlint-cli-0.23.2.tgz#43f0a96e2699542b74fb5e6188c025e4934357fc" resolved "https://registry.yarnpkg.com/markdownlint-cli/-/markdownlint-cli-0.24.0.tgz#d1c1d43cd53b87aaec93035b3234eef7097139a8"
integrity sha512-OSl5OZ8xzGN6z355cqRkiq67zPi3reJimklaF72p0554q85Dng5ToOjjSB9tDKZebSt85jX8cp+ruoQlPqOsPA== integrity sha512-AusUxaX4sFayUBFTCKeHc8+fq73KFqIUW+ZZZYyQ/BvY0MoGAnE2C/3xiawSE7WXmpmguaWzhrXRuY6IrOLX7A==
dependencies: dependencies:
commander "~2.9.0" commander "~2.9.0"
deep-extend "~0.5.1" deep-extend "~0.5.1"
...@@ -8012,23 +8019,23 @@ markdownlint-cli@0.23.2: ...@@ -8012,23 +8019,23 @@ markdownlint-cli@0.23.2:
jsonc-parser "~2.2.0" jsonc-parser "~2.2.0"
lodash.differencewith "~4.5.0" lodash.differencewith "~4.5.0"
lodash.flatten "~4.4.0" lodash.flatten "~4.4.0"
markdownlint "~0.20.4" markdownlint "~0.21.0"
markdownlint-rule-helpers "~0.11.0" markdownlint-rule-helpers "~0.12.0"
minimatch "~3.0.4" minimatch "~3.0.4"
minimist "~1.2.5" minimist "~1.2.5"
rc "~1.2.7" rc "~1.2.7"
markdownlint-rule-helpers@~0.11.0: markdownlint-rule-helpers@~0.12.0:
version "0.11.0" version "0.12.0"
resolved "https://registry.yarnpkg.com/markdownlint-rule-helpers/-/markdownlint-rule-helpers-0.11.0.tgz#faaaae4337771c7d88ca38ef6265afb4fbe0fb3f" resolved "https://registry.yarnpkg.com/markdownlint-rule-helpers/-/markdownlint-rule-helpers-0.12.0.tgz#c41d9b990c50911572e8eb2fba3e6975a5514b7e"
integrity sha512-PhGii9dOiDJDXxiRMpK8N0FM9powprvRPsXALgkjlSPTwLh6ymH+iF3iUe3nq8KGu26tclFBlLL5xAGy/zb7FA== integrity sha512-Q7qfAk+AJvx82ZY52OByC4yjoQYryOZt6D8TKrZJIwCfhZvcj8vCQNuwDqILushtDBTvGFmUPq+uhOb1KIMi6A==
markdownlint@~0.20.4: markdownlint@~0.21.0:
version "0.20.4" version "0.21.1"
resolved "https://registry.yarnpkg.com/markdownlint/-/markdownlint-0.20.4.tgz#3b34681494cdad174e14a27182b5cdefaa537823" resolved "https://registry.yarnpkg.com/markdownlint/-/markdownlint-0.21.1.tgz#9442afcf12bf65ce9d613212028cf85741677421"
integrity sha512-jpfaPgjT0OpeBbemjYNZbzGG3hCLcAIvrm/pEY3+q/szDScG6ZonDacqySVRJAv9glbo8y4wBPJ0wgW17+9GGA== integrity sha512-8kc88w5dyEzlmOWIElp8J17qBgzouOQfJ0LhCcpBFrwgyYK6JTKvILsk4FCEkiNqHkTxwxopT2RS2DYb/10qqg==
dependencies: dependencies:
markdown-it "10.0.0" markdown-it "11.0.0"
marked@^0.3.12, marked@~0.3.6: marked@^0.3.12, marked@~0.3.6:
version "0.3.19" version "0.3.19"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment