Commit 7c077d96 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent da35510c
......@@ -12,11 +12,18 @@ export default {
type: String,
required: true,
},
kubernetesIntegrationHelpPath: {
type: String,
required: true,
},
},
};
</script>
<template>
<eks-cluster-configuration-form
:gitlab-managed-cluster-help-path="gitlabManagedClusterHelpPath"
/>
<div class="js-create-eks-cluster">
<eks-cluster-configuration-form
:gitlab-managed-cluster-help-path="gitlabManagedClusterHelpPath"
:kubernetes-integration-help-path="kubernetesIntegrationHelpPath"
/>
</div>
</template>
......@@ -35,6 +35,10 @@ export default {
type: String,
required: true,
},
kubernetesIntegrationHelpPath: {
type: String,
required: true,
},
},
computed: {
...mapState([
......@@ -94,6 +98,20 @@ export default {
securityGroupDropdownDisabled() {
return !this.selectedVpc;
},
kubernetesIntegrationHelpText() {
const escapedUrl = _.escape(this.kubernetesIntegrationHelpPath);
return sprintf(
s__(
'ClusterIntegration|Read our %{link_start}help page%{link_end} on Kubernetes cluster integration.',
),
{
link_start: `<a href="${escapedUrl}" target="_blank" rel="noopener noreferrer">`,
link_end: '</a>',
},
false,
);
},
roleDropdownHelpText() {
return sprintf(
s__(
......@@ -212,6 +230,10 @@ export default {
</script>
<template>
<form name="eks-cluster-configuration-form">
<h2>
{{ s__('ClusterIntegration|Enter the details for your Amazon EKS Kubernetes cluster') }}
</h2>
<p v-html="kubernetesIntegrationHelpText"></p>
<div class="form-group">
<label class="label-bold" for="eks-cluster-name">{{
s__('ClusterIntegration|Kubernetes cluster name')
......
......@@ -5,25 +5,22 @@ import createStore from './store';
Vue.use(Vuex);
export default () =>
new Vue({
el: '.js-create-eks-cluster-form-container',
export default el => {
const { gitlabManagedClusterHelpPath, kubernetesIntegrationHelpPath } = el.dataset;
return new Vue({
el,
store: createStore(),
components: {
CreateEksCluster,
},
data() {
const { gitlabManagedClusterHelpPath } = document.querySelector(this.$options.el).dataset;
return {
gitlabManagedClusterHelpPath,
};
},
render(createElement) {
return createElement('create-eks-cluster', {
props: {
gitlabManagedClusterHelpPath: this.gitlabManagedClusterHelpPath,
gitlabManagedClusterHelpPath,
kubernetesIntegrationHelpPath,
},
});
},
});
};
......@@ -4,6 +4,8 @@ import { GlEmptyState, GlButton, GlLink, GlLoadingIcon, GlTable } from '@gitlab/
import Icon from '~/vue_shared/components/icon.vue';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import { __ } from '~/locale';
import TrackEventDirective from '~/vue_shared/directives/track_event';
import { trackViewInSentryOptions, trackClickErrorLinkToSentryOptions } from '../utils';
export default {
fields: [
......@@ -21,6 +23,9 @@ export default {
Icon,
TimeAgo,
},
directives: {
TrackEvent: TrackEventDirective,
},
props: {
indexPath: {
type: String,
......@@ -53,6 +58,8 @@ export default {
},
methods: {
...mapActions(['startPolling', 'restartPolling']),
trackViewInSentryOptions,
trackClickErrorLinkToSentryOptions,
},
};
</script>
......@@ -65,7 +72,13 @@ export default {
</div>
<div v-else>
<div class="d-flex justify-content-end">
<gl-button class="my-3 ml-auto" variant="primary" :href="externalUrl" target="_blank">
<gl-button
v-track-event="trackViewInSentryOptions(externalUrl)"
class="my-3 ml-auto"
variant="primary"
:href="externalUrl"
target="_blank"
>
{{ __('View in Sentry') }}
<icon name="external-link" class="flex-shrink-0" />
</gl-button>
......@@ -80,7 +93,12 @@ export default {
</template>
<template slot="error" slot-scope="errors">
<div class="d-flex flex-column">
<gl-link :href="errors.item.externalUrl" class="d-flex text-dark" target="_blank">
<gl-link
v-track-event="trackClickErrorLinkToSentryOptions(errors.item.externalUrl)"
:href="errors.item.externalUrl"
class="d-flex text-dark"
target="_blank"
>
<strong class="text-truncate">{{ errors.item.title.trim() }}</strong>
<icon name="external-link" class="ml-1 flex-shrink-0" />
</gl-link>
......
/* eslint-disable @gitlab/i18n/no-non-i18n-strings */
/**
* Tracks snowplow event when user clicks View in Sentry btn
* @param {String} externalUrl that will be send as a property for the event
*/
export const trackViewInSentryOptions = url => ({
category: 'Error Tracking',
action: 'click_view_in_sentry',
label: 'External Url',
property: url,
});
/**
* Tracks snowplow event when User clicks on error link to Sentry
* @param {String} externalUrl that will be send as a property for the event
*/
export const trackClickErrorLinkToSentryOptions = url => ({
category: 'Error Tracking',
action: 'click_error_link_to_sentry',
label: 'Error Link',
property: url,
});
......@@ -21,7 +21,14 @@ import MonitorSingleStatChart from './charts/single_stat.vue';
import GraphGroup from './graph_group.vue';
import EmptyState from './empty_state.vue';
import { sidebarAnimationDuration, timeWindows } from '../constants';
import { getTimeDiff, getTimeWindow } from '../utils';
import TrackEventDirective from '~/vue_shared/directives/track_event';
import {
getTimeDiff,
getTimeWindow,
downloadCSVOptions,
generateLinkToChartOptions,
} from '../utils';
let sidebarMutationObserver;
......@@ -43,6 +50,7 @@ export default {
directives: {
GlModal: GlModalDirective,
GlTooltip: GlTooltipDirective,
TrackEvent: TrackEventDirective,
},
props: {
externalDashboardUrl: {
......@@ -322,6 +330,8 @@ export default {
groupHasData(group) {
return this.chartsWithData(group.metrics).length > 0;
},
downloadCSVOptions,
generateLinkToChartOptions,
},
addMetric: {
title: s__('Metrics|Add metric'),
......@@ -552,10 +562,19 @@ export default {
<template slot="button-content">
<icon name="ellipsis_v" class="text-secondary" />
</template>
<gl-dropdown-item :href="downloadCsv(graphData)" download="chart_metrics.csv">
<gl-dropdown-item
v-track-event="downloadCSVOptions(graphData.title)"
:href="downloadCsv(graphData)"
download="chart_metrics.csv"
>
{{ __('Download CSV') }}
</gl-dropdown-item>
<gl-dropdown-item
v-track-event="
generateLinkToChartOptions(
generateLink(groupData.group, graphData.title, graphData.y_label),
)
"
class="js-chart-link"
:data-clipboard-text="
generateLink(groupData.group, graphData.title, graphData.y_label)
......
......@@ -13,6 +13,8 @@ import Icon from '~/vue_shared/components/icon.vue';
import MonitorTimeSeriesChart from './charts/time_series.vue';
import MonitorSingleStatChart from './charts/single_stat.vue';
import MonitorEmptyChart from './charts/empty_chart.vue';
import TrackEventDirective from '~/vue_shared/directives/track_event';
import { downloadCSVOptions, generateLinkToChartOptions } from '../utils';
export default {
components: {
......@@ -27,6 +29,7 @@ export default {
directives: {
GlModal: GlModalDirective,
GlTooltip: GlTooltipDirective,
TrackEvent: TrackEventDirective,
},
props: {
clipboardText: {
......@@ -84,6 +87,8 @@ export default {
showToast() {
this.$toast.show(__('Link copied'));
},
downloadCSVOptions,
generateLinkToChartOptions,
},
};
</script>
......@@ -121,13 +126,18 @@ export default {
<template slot="button-content">
<icon name="ellipsis_v" class="text-secondary" />
</template>
<gl-dropdown-item :href="downloadCsv" download="chart_metrics.csv">
<gl-dropdown-item
v-track-event="downloadCSVOptions(graphData.title)"
:href="downloadCsv"
download="chart_metrics.csv"
>
{{ __('Download CSV') }}
</gl-dropdown-item>
<gl-dropdown-item
v-track-event="generateLinkToChartOptions(clipboardText)"
class="js-chart-link"
:data-clipboard-text="clipboardText"
@click="showToast"
@click="showToast(clipboardText)"
>
{{ __('Generate link to chart') }}
</gl-dropdown-item>
......
......@@ -45,4 +45,47 @@ export const graphDataValidatorForValues = (isValues, graphData) => {
);
};
/* eslint-disable @gitlab/i18n/no-non-i18n-strings */
/**
* Checks that element that triggered event is located on cluster health check dashboard
* @param {HTMLElement} element to check against
* @returns {boolean}
*/
const isClusterHealthBoard = () => (document.body.dataset.page || '').includes(':clusters:show');
/**
* Tracks snowplow event when user generates link to metric chart
* @param {String} chart link that will be sent as a property for the event
* @return {Object} config object for event tracking
*/
export const generateLinkToChartOptions = chartLink => {
const isCLusterHealthBoard = isClusterHealthBoard();
const category = isCLusterHealthBoard
? 'Cluster Monitoring'
: 'Incident Management::Embedded metrics';
const action = isCLusterHealthBoard
? 'generate_link_to_cluster_metric_chart'
: 'generate_link_to_metrics_chart';
return { category, action, label: 'Chart link', property: chartLink };
};
/**
* Tracks snowplow event when user downloads CSV of cluster metric
* @param {String} chart title that will be sent as a property for the event
*/
export const downloadCSVOptions = title => {
const isCLusterHealthBoard = isClusterHealthBoard();
const category = isCLusterHealthBoard
? 'Cluster Monitoring'
: 'Incident Management::Embedded metrics';
const action = isCLusterHealthBoard
? 'download_csv_of_cluster_metric_chart'
: 'download_csv_of_metrics_dashboard_chart';
return { category, action, label: 'Chart title', property: title };
};
export default {};
document.addEventListener('DOMContentLoaded', () => {
if (gon.features.createEksClusters) {
import(/* webpackChunkName: 'eks_cluster' */ '~/create_cluster/eks_cluster')
.then(({ default: initCreateEKSCluster }) => initCreateEKSCluster())
.then(({ default: initCreateEKSCluster }) => {
const el = document.querySelector('.js-create-eks-cluster-form-container');
if (el) {
initCreateEKSCluster(el);
}
})
.catch(() => {});
}
});
import Tracking from '~/tracking';
export default {
bind(el, binding) {
el.dataset.trackingOptions = JSON.stringify(binding.value || {});
el.addEventListener('click', () => {
const { category, action, label, property, value } = JSON.parse(el.dataset.trackingOptions);
if (!category || !action) {
return;
}
Tracking.event(category, action, { label, property, value });
});
},
update(el, binding) {
if (binding.value !== binding.oldValue) {
el.dataset.trackingOptions = JSON.stringify(binding.value || {});
}
},
};
......@@ -13,9 +13,14 @@ module Projects
def update
result = ::Projects::Operations::UpdateService.new(project, current_user, update_params).execute
track_events(result)
render_update_response(result)
end
# overridden in EE
def track_events(result)
end
private
# overridden in EE
......
......@@ -10,6 +10,7 @@ module Issues
def add_link(link)
if can_add_link? && (link = parse_link(link))
track_meeting_added_event
success(_('Zoom meeting added'), append_to_description(link))
else
error(_('Failed to add a Zoom meeting'))
......@@ -22,6 +23,7 @@ module Issues
def remove_link
if can_remove_link?
track_meeting_removed_event
success(_('Zoom meeting removed'), remove_from_description)
else
error(_('Failed to remove a Zoom meeting'))
......@@ -44,6 +46,14 @@ module Issues
issue.description || ''
end
def track_meeting_added_event
::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'add_zoom_meeting', label: 'Issue ID', value: issue.id)
end
def track_meeting_removed_event
::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'remove_zoom_meeting', label: 'Issue ID', value: issue.id)
end
def success(message, description)
ServiceResponse
.success(message: message, payload: { description: description })
......
......@@ -3,6 +3,6 @@
- label = local_assigns.fetch(:label)
= link_to clusterable.new_path(provider: provider), class: 'btn gl-button btn-outline flex-fill d-inline-flex flex-column mr-3 justify-content-center align-items-center' do
= image_tag logo_path, alt: label, class: 'gl-w-13 gl-h-13'
.svg-content= image_tag logo_path, alt: label, class: 'gl-w-13 gl-h-13'
%span
= label
......@@ -6,6 +6,6 @@
= create_cluster_label
.d-flex
= render partial: 'clusters/clusters/cloud_providers/cloud_provider_button',
locals: { provider: 'gke', label: gke_label, logo_path: '' }
locals: { provider: 'eks', label: eks_label, logo_path: 'illustrations/logos/amazon_eks.svg' }
= render partial: 'clusters/clusters/cloud_providers/cloud_provider_button',
locals: { provider: 'eks', label: eks_label, logo_path: '' }
locals: { provider: 'gke', label: gke_label, logo_path: 'illustrations/logos/google_gke.svg' }
.js-create-eks-cluster-form-container{ data: { 'gitlab-managed-cluster-help-path' => help_page_path('user/project/clusters/index.md', anchor: 'gitlab-managed-clusters') } }
.js-create-eks-cluster-form-container{ data: { 'gitlab-managed-cluster-help-path' => help_page_path('user/project/clusters/index.md', anchor: 'gitlab-managed-clusters'),
'kubernetes-integration-help-path' => help_page_path('user/project/clusters/index') } }
......@@ -3,13 +3,12 @@
- zones_link_url = 'https://cloud.google.com/compute/docs/regions-zones/regions-zones'
- machine_type_link_url = 'https://cloud.google.com/compute/docs/machine-types'
- pricing_link_url = 'https://cloud.google.com/compute/pricing#machinetype'
- kubernetes_integration_url = help_page_path('user/project/clusters/index')
- help_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe
- help_link_end = ' %{external_link_icon}</a>'.html_safe % { external_link_icon: external_link_icon }
%p
- link_to_help_page = link_to(s_('ClusterIntegration|help page'),
help_page_path('user/project/clusters/index'), target: '_blank', rel: 'noopener noreferrer')
= s_('ClusterIntegration|Read our %{link_to_help_page} on Kubernetes cluster integration.').html_safe % { link_to_help_page: link_to_help_page }
= s_('ClusterIntegration|Read our %{link_start}help page%{link_end} on Kubernetes cluster integration.').html_safe % { link_start: help_link_start % { url: kubernetes_integration_url }, link_end: '</a>'.html_safe }
%p= link_to('Select a different Google account', @authorize_url)
......
- documentation_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: help_page_path("integration/google") }
- link_end = '<a/>'.html_safe
= s_('Google authentication is not %{link_start}property configured%{link_end}. Ask your GitLab administrator if you want to use this service.').html_safe % { link_start: documentation_link_start, link_end: link_end }
.signin-with-google
- create_account_link = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: 'https://accounts.google.com/SignUpWithoutGmail?service=cloudconsole&continue=https%3A%2F%2Fconsole.cloud.google.com%2Ffreetrial%3Futm_campaign%3D2018_cpanel%26utm_source%3Dgitlab%26utm_medium%3Dreferral' }
= link_to(image_tag('auth_buttons/signin_with_google.png', width: '191px', alt: _('Sign in with Google')), @authorize_url)
= s_('or %{link_start}create a new Google account%{link_end}').html_safe % { link_start: create_account_link, link_end: '</a>'.html_safe }
......@@ -2,7 +2,9 @@
- page_title _('Kubernetes Cluster')
- create_eks_enabled = Feature.enabled?(:create_eks_clusters)
- active_tab = local_assigns.fetch(:active_tab, 'create')
- link_end = '<a/>'.html_safe
- create_on_gke_tab_label = s_('ClusterIntegration|Create new Cluster on GKE')
- create_on_eks_tab_label = s_('ClusterIntegration|Create new Cluster on EKS')
- create_new_cluster_label = s_('ClusterIntegration|Create new Cluster')
= javascript_include_tag 'https://apis.google.com/js/api.js'
= render_gcp_signup_offer
......@@ -14,7 +16,16 @@
%ul.nav-links.nav-tabs.gitlab-tabs.nav{ role: 'tablist' }
%li.nav-item{ role: 'presentation' }
%a.nav-link{ href: '#create-cluster-pane', id: 'create-cluster-tab', class: active_when(active_tab == 'create'), data: { toggle: 'tab' }, role: 'tab' }
%span Create new Cluster on GKE
%span
- if create_eks_enabled
- if @gke_selected
= create_on_gke_tab_label
- elsif @eks_selected
= create_on_eks_tab_label
- else
= create_new_cluster_label
- else
= create_on_gke_tab_label
%li.nav-item{ role: 'presentation' }
%a.nav-link{ href: '#add-cluster-pane', id: 'add-cluster-tab', class: active_when(active_tab == 'add'), data: { toggle: 'tab' }, role: 'tab' }
%span Add existing cluster
......@@ -22,9 +33,14 @@
.tab-content.gitlab-tab-content
- if create_eks_enabled
.tab-pane{ id: 'create-cluster-pane', class: active_when(active_tab == 'create'), role: 'tabpanel' }
- if @gke_selected && @valid_gcp_token
- if @gke_selected
= render 'clusters/clusters/gcp/header'
= render 'clusters/clusters/gcp/form'
- if @valid_gcp_token
= render 'clusters/clusters/gcp/form'
- elsif @authorize_url
= render 'clusters/clusters/gcp/signin_with_google_button'
- else
= render 'clusters/clusters/gcp/gcp_not_configured'
- elsif @eks_selected
= render 'clusters/clusters/eks/index'
- else
......@@ -35,13 +51,9 @@
- if @valid_gcp_token
= render 'clusters/clusters/gcp/form'
- elsif @authorize_url
.signin-with-google
- create_account_link = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: 'https://accounts.google.com/SignUpWithoutGmail?service=cloudconsole&continue=https%3A%2F%2Fconsole.cloud.google.com%2Ffreetrial%3Futm_campaign%3D2018_cpanel%26utm_source%3Dgitlab%26utm_medium%3Dreferral' }
= link_to(image_tag('auth_buttons/signin_with_google.png', width: '191px', alt: _('Sign in with Google')), @authorize_url)
= s_('or %{link_start}create a new Google account%{link_end}').html_safe % { link_start: create_account_link, link_end: link_end }
= render 'clusters/clusters/gcp/signin_with_google_button'
- else
- documentation_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: help_page_path("integration/google") }
= s_('Google authentication is not %{link_start}property configured%{link_end}. Ask your GitLab administrator if you want to use this service.').html_safe % { link_start: documentation_link_start, link_end: link_end }
= render 'clusters/clusters/gcp/gcp_not_configured'
.tab-pane{ id: 'add-cluster-pane', class: active_when(active_tab == 'add'), role: 'tabpanel' }
= render 'clusters/clusters/user/header'
......
---
title: 'Snowplow custom events for Monitor: Health Product Categories'
merge_request: 18157
author:
type: added
......@@ -549,7 +549,7 @@ a few things that you need to do:
1. Configure [database lookup of SSH keys](../operations/fast_ssh_key_lookup.md)
to eliminate the need for a shared authorized_keys file.
1. Configure [object storage for job artifacts](../job_artifacts.md#using-object-storage)
including [live tracing](../job_traces.md#new-live-trace-architecture).
including [incremental logging](../job_logs.md#new-incremental-logging-architecture).
1. Configure [object storage for LFS objects](../../workflow/lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
1. Configure [object storage for uploads](../uploads.md#using-object-storage-core-only).
......
......@@ -154,7 +154,7 @@ Learn how to install, configure, update, and maintain your GitLab instance.
- [Enable/disable GitLab CI/CD](../ci/enable_or_disable_ci.md#site-wide-admin-setting): Enable or disable GitLab CI/CD for your instance.
- [GitLab CI/CD admin settings](../user/admin_area/settings/continuous_integration.md): Enable or disable Auto DevOps site-wide and define the artifacts' max size and expiration time.
- [Job artifacts](job_artifacts.md): Enable, disable, and configure job artifacts (a set of files and directories which are outputted by a job when it completes successfully).
- [Job traces](job_traces.md): Information about the job traces (logs).
- [Job logs](job_logs.md): Information about the job logs.
- [Register Shared and specific Runners](../ci/runners/README.md#registering-a-shared-runner): Learn how to register and configure Shared and specific Runners to your own instance.
- [Shared Runners pipelines quota](../user/admin_area/settings/continuous_integration.md#shared-runners-pipeline-minutes-quota-starter-only): Limit the usage of pipeline minutes for Shared Runners. **(STARTER ONLY)**
- [Enable/disable Auto DevOps](../topics/autodevops/index.md#enablingdisabling-auto-devops): Enable or disable Auto DevOps for your instance.
......
......@@ -90,7 +90,7 @@ This configuration relies on valid AWS credentials to be configured already.
Use an object storage option like AWS S3 to store job artifacts.
DANGER: **Danger:**
If you're enabling S3 in [GitLab HA](high_availability/README.md), you will need to have an [NFS mount set up for CI traces and artifacts](high_availability/nfs.md#a-single-nfs-mount) or enable [live tracing](job_traces.md#new-live-trace-architecture). If these settings are not set, you will risk job traces disappearing or not being saved.
If you're enabling S3 in [GitLab HA](high_availability/README.md), you will need to have an [NFS mount set up for CI logs and artifacts](high_availability/nfs.md#a-single-nfs-mount) or enable [incremental logging](job_logs.md#new-incremental-logging-architecture). If these settings are not set, you will risk job logs disappearing or not being saved.
#### Object Storage Settings
......
# Job logs
> [Renamed from Job Traces to Job logs](https://gitlab.com/gitlab-org/gitlab/issues/29121) in 12.4.
Job logs (traces) are sent by GitLab Runner while it's processing a job. You can see
logs in job pages, pipelines, email notifications, etc.
## Data flow
In general, there are two states for job logs: `log` and `archived log`.
In the following table you can see the phases a log goes through:
| Phase | State | Condition | Data flow | Stored path |
| -------------- | ------------ | ----------------------- | -----------------------------------------| ----------- |
| 1: patching | log | When a job is running | GitLab Runner => Unicorn => file storage | `#{ROOT_PATH}/gitlab-ci/builds/#{YYYY_mm}/#{project_id}/#{job_id}.log` |
| 2: overwriting | log | When a job is finished | GitLab Runner => Unicorn => file storage | `#{ROOT_PATH}/gitlab-ci/builds/#{YYYY_mm}/#{project_id}/#{job_id}.log` |
| 3: archiving | archived log | After a job is finished | Sidekiq moves log to artifacts folder | `#{ROOT_PATH}/gitlab-rails/shared/artifacts/#{disk_hash}/#{YYYY_mm_dd}/#{job_id}/#{job_artifact_id}/job.log` |
| 4: uploading | archived log | After a log is archived | Sidekiq moves archived log to [object storage](#uploading-logs-to-object-storage) (if configured) | `#{bucket_name}/#{disk_hash}/#{YYYY_mm_dd}/#{job_id}/#{job_artifact_id}/job.log` |
The `ROOT_PATH` varies per environment. For Omnibus GitLab it
would be `/var/opt/gitlab`, and for installations from source
it would be `/home/git/gitlab`.
## Changing the job logs local location
To change the location where the job logs will be stored, follow the steps below.
**In Omnibus installations:**
1. Edit `/etc/gitlab/gitlab.rb` and add or amend the following line:
```ruby
gitlab_ci['builds_directory'] = '/mnt/to/gitlab-ci/builds'
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
---
**In installations from source:**
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
```yaml
gitlab_ci:
# The location where build logs are stored (default: builds/).
# Relative paths are relative to Rails.root.
builds_path: path/to/builds/
```
1. Save the file and [restart GitLab][] for the changes to take effect.
[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
## Uploading logs to object storage
Archived logs are considered as [job artifacts](job_artifacts.md).
Therefore, when you [set up the object storage integration](job_artifacts.md#object-storage-settings),
job logs are automatically migrated to it along with the other job artifacts.
See "Phase 4: uploading" in [Data flow](#data-flow) to learn about the process.
## How to remove job logs
There isn't a way to automatically expire old job logs, but it's safe to remove
them if they're taking up too much space. If you remove the logs manually, the
job output in the UI will be empty.
## New incremental logging architecture
> [Introduced][ce-18169] in GitLab 10.4.
> [Announced as General availability][ce-46097] in GitLab 11.0.
NOTE: **Note:**
This feature is off by default. See below for how to [enable or disable](#enabling-incremental-logging) it.
By combining the process with object storage settings, we can completely bypass
the local file storage. This is a useful option if GitLab is installed as
cloud-native, for example on Kubernetes.
The data flow is the same as described in the [data flow section](#data-flow)
with one change: _the stored path of the first two phases is different_. This incremental
log architecture stores chunks of logs in Redis and a persistent store (object storage or database) instead of
file storage. Redis is used as first-class storage, and it stores up-to 128KB
of data. Once the full chunk is sent, it is flushed to a persistent store, either object storage(temporary directory) or database.
After a while, the data in Redis and a persitent store will be archived to [object storage](#uploading-logs-to-object-storage).
The data are stored in the following Redis namespace: `Gitlab::Redis::SharedState`.
Here is the detailed data flow:
1. GitLab Runner picks a job from GitLab
1. GitLab Runner sends a piece of log to GitLab
1. GitLab appends the data to Redis
1. Once the data in Redis reach 128KB, the data is flushed to a persistent store (object storage or the database).
1. The above steps are repeated until the job is finished.
1. Once the job is finished, GitLab schedules a Sidekiq worker to archive the log.
1. The Sidekiq worker archives the log to object storage and cleans up the log
in Redis and a persistent store (object storage or the database).
### Enabling incremental logging
The following commands are to be issued in a Rails console:
```sh
# Omnibus GitLab
gitlab-rails console
# Installation from source
cd /home/git/gitlab
sudo -u git -H bin/rails console RAILS_ENV=production
```
**To check if incremental logging (trace) is enabled:**
```ruby
Feature.enabled?('ci_enable_live_trace')
```
**To enable incremental logging (trace):**
```ruby
Feature.enable('ci_enable_live_trace')
```
NOTE: **Note:**
The transition period will be handled gracefully. Upcoming logs will be
generated with the incremental architecture, and on-going logs will stay with the
legacy architecture, which means that on-going logs won't be forcibly
re-generated with the incremental architecture.
**To disable incremental logging (trace):**
```ruby
Feature.disable('ci_enable_live_trace')
```
NOTE: **Note:**
The transition period will be handled gracefully. Upcoming logs will be generated
with the legacy architecture, and on-going incremental logs will stay with the incremental
architecture, which means that on-going incremental logs won't be forcibly re-generated
with the legacy architecture.
### Potential implications
In some cases, having data stored on Redis could incur data loss:
1. **Case 1: When all data in Redis are accidentally flushed**
- On going incremental logs could be recovered by re-sending logs (this is
supported by all versions of the GitLab Runner).
- Finished jobs which have not archived incremental logs will lose the last part
(~128KB) of log data.
1. **Case 2: When Sidekiq workers fail to archive (e.g., there was a bug that
prevents archiving process, Sidekiq inconsistency, etc.)**
- Currently all log data in Redis will be deleted after one week. If the
Sidekiq workers can't finish by the expiry date, the part of log data will be lost.
Another issue that might arise is that it could consume all memory on the Redis
instance. If the number of jobs is 1000, 128MB (128KB * 1000) is consumed.
Also, it could pressure the database replication lag. `INSERT`s are generated to
indicate that we have log chunk. `UPDATE`s with 128KB of data is issued once we
receive multiple chunks.
[ce-18169]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/18169
[ce-21193]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/21193
[ce-46097]: https://gitlab.com/gitlab-org/gitlab-foss/issues/46097
# Job traces (logs)
Job traces are sent by GitLab Runner while it's processing a job. You can see
traces in job pages, pipelines, email notifications, etc.
## Data flow
In general, there are two states in job traces: "live trace" and "archived trace".
In the following table you can see the phases a trace goes through.
| Phase | State | Condition | Data flow | Stored path |
| ----- | ----- | --------- | --------- | ----------- |
| 1: patching | Live trace | When a job is running | GitLab Runner => Unicorn => file storage |`#{ROOT_PATH}/gitlab-ci/builds/#{YYYY_mm}/#{project_id}/#{job_id}.log`|
| 2: overwriting | Live trace | When a job is finished | GitLab Runner => Unicorn => file storage |`#{ROOT_PATH}/gitlab-ci/builds/#{YYYY_mm}/#{project_id}/#{job_id}.log`|
| 3: archiving | Archived trace | After a job is finished | Sidekiq moves live trace to artifacts folder |`#{ROOT_PATH}/gitlab-rails/shared/artifacts/#{disk_hash}/#{YYYY_mm_dd}/#{job_id}/#{job_artifact_id}/job.log`|
| 4: uploading | Archived trace | After a trace is archived | Sidekiq moves archived trace to [object storage](#uploading-traces-to-object-storage) (if configured) |`#{bucket_name}/#{disk_hash}/#{YYYY_mm_dd}/#{job_id}/#{job_artifact_id}/job.log`|
The `ROOT_PATH` varies per your environment. For Omnibus GitLab it
would be `/var/opt/gitlab`, whereas for installations from source
it would be `/home/git/gitlab`.
## Changing the job traces local location
To change the location where the job logs will be stored, follow the steps below.
**In Omnibus installations:**
1. Edit `/etc/gitlab/gitlab.rb` and add or amend the following line:
```ruby
gitlab_ci['builds_directory'] = '/mnt/to/gitlab-ci/builds'
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
---
redirect_to: 'job_logs.md'
---
**In installations from source:**
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
```yaml
gitlab_ci:
# The location where build traces are stored (default: builds/).
# Relative paths are relative to Rails.root.
builds_path: path/to/builds/
```
1. Save the file and [restart GitLab][] for the changes to take effect.
[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
## Uploading traces to object storage
Archived traces are considered as [job artifacts](job_artifacts.md).
Therefore, when you [set up the object storage integration](job_artifacts.md#object-storage-settings),
job traces are automatically migrated to it along with the other job artifacts.
See "Phase 4: uploading" in [Data flow](#data-flow) to learn about the process.
## How to remove job traces
There isn't a way to automatically expire old job logs, but it's safe to remove
them if they're taking up too much space. If you remove the logs manually, the
job output in the UI will be empty.
## New live trace architecture
> [Introduced][ce-18169] in GitLab 10.4.
> [Announced as General availability][ce-46097] in GitLab 11.0.
NOTE: **Note:**
This feature is off by default. Check below how to [enable/disable](#enabling-live-trace) it.
By combining the process with object storage settings, we can completely bypass
the local file storage. This is a useful option if GitLab is installed as
cloud-native, for example on Kubernetes.
The data flow is the same as described in the [data flow section](#data-flow)
with one change: _the stored path of the first two phases is different_. This new live
trace architecture stores chunks of traces in Redis and a persistent store (object storage or database) instead of
file storage. Redis is used as first-class storage, and it stores up-to 128KB
of data. Once the full chunk is sent, it is flushed a persistent store, either object storage(temporary directory) or database.
After a while, the data in Redis and a persitent store will be archived to [object storage](#uploading-traces-to-object-storage).
The data are stored in the following Redis namespace: `Gitlab::Redis::SharedState`.
Here is the detailed data flow:
1. GitLab Runner picks a job from GitLab
1. GitLab Runner sends a piece of trace to GitLab
1. GitLab appends the data to Redis
1. Once the data in Redis reach 128KB, the data is flushed to a persistent store (object storage or the database).
1. The above steps are repeated until the job is finished.
1. Once the job is finished, GitLab schedules a Sidekiq worker to archive the trace.
1. The Sidekiq worker archives the trace to object storage and cleans up the trace
in Redis and a persistent store (object storage or the database).
### Enabling live trace
The following commands are to be issues in a Rails console:
```sh
# Omnibus GitLab
gitlab-rails console
# Installation from source
cd /home/git/gitlab
sudo -u git -H bin/rails console RAILS_ENV=production
```
**To check if live trace is enabled:**
```ruby
Feature.enabled?('ci_enable_live_trace')
```
**To enable live trace:**
```ruby
Feature.enable('ci_enable_live_trace')
```
NOTE: **Note:**
The transition period will be handled gracefully. Upcoming traces will be
generated with the new architecture, and on-going live traces will stay with the
legacy architecture, which means that on-going live traces won't be forcibly
re-generated with the new architecture.
**To disable live trace:**
```ruby
Feature.disable('ci_enable_live_trace')
```
NOTE: **Note:**
The transition period will be handled gracefully. Upcoming traces will be generated
with the legacy architecture, and on-going live traces will stay with the new
architecture, which means that on-going live traces won't be forcibly re-generated
with the legacy architecture.
### Potential implications
In some cases, having data stored on Redis could incur data loss:
1. **Case 1: When all data in Redis are accidentally flushed**
- On going live traces could be recovered by re-sending traces (this is
supported by all versions of the GitLab Runner).
- Finished jobs which have not archived live traces will lose the last part
(~128KB) of trace data.
1. **Case 2: When Sidekiq workers fail to archive (e.g., there was a bug that
prevents archiving process, Sidekiq inconsistency, etc.)**
- Currently all trace data in Redis will be deleted after one week. If the
Sidekiq workers can't finish by the expiry date, the part of trace data will be lost.
Another issue that might arise is that it could consume all memory on the Redis
instance. If the number of jobs is 1000, 128MB (128KB * 1000) is consumed.
Also, it could pressure the database replication lag. `INSERT`s are generated to
indicate that we have trace chunk. `UPDATE`s with 128KB of data is issued once we
receive multiple chunks.
[ce-18169]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/18169
[ce-21193]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/21193
[ce-46097]: https://gitlab.com/gitlab-org/gitlab-foss/issues/46097
This document was moved to [another location](job_logs.md).
......@@ -537,9 +537,9 @@ Possible response status codes:
| 400 | Invalid path provided |
| 404 | Build not found or no file/artifacts |
## Get a trace file
## Get a log file
Get a trace of a specific job of a project
Get a log (trace) of a specific job of a project:
```
GET /projects/:id/jobs/:job_id/trace
......@@ -556,10 +556,10 @@ curl --location --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.ex
Possible response status codes:
| Status | Description |
|-----------|-----------------------------------|
| 200 | Serves the trace file |
| 404 | Build not found or no trace file |
| Status | Description |
|-----------|-------------------------------|
| 200 | Serves the log file |
| 404 | Job not found or no log file |
## Cancel a job
......@@ -661,7 +661,7 @@ Example of response
## Erase a job
Erase a single job of a project (remove job artifacts and a job trace)
Erase a single job of a project (remove job artifacts and a job log)
```
POST /projects/:id/jobs/:job_id/erase
......
......@@ -283,11 +283,11 @@ You can also access pipelines for a merge request by navigating to its **Pipelin
When you access a pipeline, you can see the related jobs for that pipeline.
Clicking on an individual job will show you its job trace, and allow you to:
Clicking on an individual job will show you its job log, and allow you to:
- Cancel the job.
- Retry the job.
- Erase the job trace.
- Erase the job log.
### Seeing the failure reason for jobs
......
......@@ -365,8 +365,8 @@ We're always looking for contributions that can mitigate these
### Resetting the registration token for a Project
If you think that registration token for a Project was revealed, you should
reset them. It's recommended because such token can be used to register another
Runner to the Project. It may be next used to obtain the values of secret
reset them. It's recommended because such a token can be used to register another
Runner to the Project. It may then be used to obtain the values of secret
variables or clone the project code, that normally may be unavailable for the
attacker.
......@@ -379,10 +379,10 @@ To reset the token:
1. After the page is refreshed, expand the **Runners settings** section
and check the registration token - it should be changed.
From now on the old token is not valid anymore and will not allow to register
a new Runner to the project. If you are using any tools to provision and
register new Runners, you should now update the token that is used to the
new value.
From now on the old token is no longer valid and will not register
any new Runners to the project. If you are using any tools to provision and
register new Runners, the tokens used in those tools should be updated to reflect the
value of the new token.
## Determining the IP address of a Runner
......
......@@ -35,8 +35,8 @@ with any type of [executor](https://docs.gitlab.com/runner/executors/)
if you are accessing a private GitLab repository.
NOTE: **Note:**
The private key will not be displayed in the job trace, unless you enable
[debug tracing](../variables/README.md#debug-tracing). You might also want to
The private key will not be displayed in the job log, unless you enable
[debug logging](../variables/README.md#debug-logging). You might also want to
check the [visibility of your pipelines](../../user/project/pipelines/settings.md#visibility-of-pipelines).
## SSH keys when using the Docker executor
......
......@@ -568,7 +568,7 @@ Below you can find supported syntax reference:
Precedence of operators follows standard Ruby 2.5 operation
[precedence](https://ruby-doc.org/core-2.5.0/doc/syntax/precedence_rdoc.html).
## Debug tracing
## Debug logging
> Introduced in GitLab Runner 1.7.
......@@ -576,24 +576,24 @@ CAUTION: **Warning:**
Enabling debug tracing can have severe security implications. The
output **will** contain the content of all your variables and any other
secrets! The output **will** be uploaded to the GitLab server and made visible
in job traces!
in job logs!
By default, GitLab Runner hides most of the details of what it is doing when
processing a job. This behavior keeps job traces short, and prevents secrets
from being leaked into the trace unless your script writes them to the screen.
processing a job. This behavior keeps job logs short, and prevents secrets
from being leaked into the log unless your script writes them to the screen.
If a job isn't working as expected, this can make the problem difficult to
investigate; in these cases, you can enable debug tracing in `.gitlab-ci.yml`.
Available on GitLab Runner v1.7+, this feature enables the shell's execution
trace, resulting in a verbose job trace listing all commands that were run,
log, resulting in a verbose job log listing all commands that were run,
variables that were set, etc.
Before enabling this, you should ensure jobs are visible to
[team members only](../../user/permissions.md#project-features). You should
also [erase](../pipelines.md#accessing-individual-jobs) all generated job traces
also [erase](../pipelines.md#accessing-individual-jobs) all generated job logs
before making them visible again.
To enable debug traces, set the `CI_DEBUG_TRACE` variable to `true`:
To enable debug logs (traces), set the `CI_DEBUG_TRACE` variable to `true`:
```yaml
job_name:
......@@ -601,7 +601,7 @@ job_name:
CI_DEBUG_TRACE: "true"
```
Example truncated output with debug trace set to true:
Example truncated output with `CI_DEBUG_TRACE` set to `true`:
```bash
...
......
......@@ -40,7 +40,7 @@ future GitLab releases.**
| `CI_COMMIT_TAG` | 9.0 | 0.5 | The commit tag name. Present only when building tags. |
| `CI_COMMIT_TITLE` | 10.8 | all | The title of the commit - the full first line of the message |
| `CI_CONFIG_PATH` | 9.4 | 0.5 | The path to CI config file. Defaults to `.gitlab-ci.yml` |
| `CI_DEBUG_TRACE` | all | 1.7 | Whether [debug tracing](README.md#debug-tracing) is enabled |
| `CI_DEBUG_TRACE` | all | 1.7 | Whether [debug logging (tracing)](README.md#debug-logging) is enabled |
| `CI_DEPLOY_PASSWORD` | 10.8 | all | Authentication password of the [GitLab Deploy Token][gitlab-deploy-token], only present if the Project has one related.|
| `CI_DEPLOY_USER` | 10.8 | all | Authentication username of the [GitLab Deploy Token][gitlab-deploy-token], only present if the Project has one related.|
| `CI_DISPOSABLE_ENVIRONMENT` | all | 10.1 | Marks that the job is executed in a disposable environment (something that is created only for this job and disposed of/destroyed after the execution - all executors except `shell` and `ssh`). If the environment is disposable, it is set to true, otherwise it is not defined at all. |
......
......@@ -242,7 +242,7 @@ For more information, see see [Available settings for `services`](../docker/usin
`before_script` is used to define the command that should be run before all
jobs, including deploy jobs, but after the restoration of [artifacts](#artifacts).
This can be an array or a multi-line string.
This must be an an array.
`after_script` is used to define the command that will be run after all
jobs, including failed ones. This has to be an array or a multi-line string.
......
......@@ -448,7 +448,7 @@ sudo -u git -H mkdir -p public/uploads/
# now that files in public/uploads are served by gitlab-workhorse
sudo chmod 0700 public/uploads
# Change the permissions of the directory where CI job traces are stored
# Change the permissions of the directory where CI job logs are stored
sudo chmod -R u+rwX builds/
# Change the permissions of the directory where CI artifacts are stored
......
......@@ -113,6 +113,25 @@ NOTE: **Note:** GitLab is unable to provide support for IdPs that are not listed
| OneLogin | [Use the OneLogin SAML Test Connector](https://onelogin.service-now.com/support?id=kb_article&sys_id=93f95543db109700d5505eea4b96198f) |
| Ping Identity | [Add and configure a new SAML application](https://support.pingidentity.com/s/document-item?bundleId=pingone&topicId=xsh1564020480660-1.html) |
When [configuring your identify provider](#configuring-your-identity-provider), please consider the notes below for specific providers to help avoid common issues and as a guide for terminology used.
### OneLogin setup notes
NOTE: **Note:**
The GitLab app listed in the directory is for self-managed GitLab instances. Please use a generic SAML Test Connector.
| GitLab Setting | OneLogin Field |
|--------------|----------------|
| Identifier | Audience |
| Assertion consumer service URL | Recipient |
| Assertion consumer service URL | ACS (Consumer) URL |
| Assertion consumer service URL (escaped version) | ACS (Consumer) URL Validator |
| GitLab single sign on URL | Login URL |
Recommended `NameID` value: `OneLogin ID`.
Set parameters according to the [assertions table](#assertions).
## Linking SAML to your existing GitLab.com account
To link SAML to your existing GitLab.com account:
......
......@@ -5,8 +5,11 @@ module Gitlab
class ConflictFilesStitcher
include Enumerable
def initialize(rpc_response)
attr_reader :gitaly_repo
def initialize(rpc_response, gitaly_repo)
@rpc_response = rpc_response
@gitaly_repo = gitaly_repo
end
def each
......@@ -31,7 +34,7 @@ module Gitlab
def file_from_gitaly_header(header)
Gitlab::Git::Conflict::File.new(
Gitlab::GitalyClient::Util.git_repository(header.repository),
Gitlab::GitalyClient::Util.git_repository(gitaly_repo),
header.commit_oid,
conflict_from_gitaly_file_header(header),
''
......
......@@ -22,7 +22,7 @@ module Gitlab
)
response = GitalyClient.call(@repository.storage, :conflicts_service, :list_conflict_files, request, timeout: GitalyClient.long_timeout)
GitalyClient::ConflictFilesStitcher.new(response)
GitalyClient::ConflictFilesStitcher.new(response, @gitaly_repo)
end
def conflicts?
......
# frozen_string_literal: true
module Gitlab
module Tracking
module IncidentManagement
class << self
def track_from_params(incident_params)
return if incident_params.blank?
incident_params.each do |k, v|
prefix = ['', '0'].include?(v.to_s) ? 'disabled' : 'enabled'
key = tracking_keys.dig(k, :name)
label = tracking_keys.dig(k, :label)
next if key.blank?
details = label ? { label: label, property: v } : {}
::Gitlab::Tracking.event('IncidentManagement::Settings', "#{prefix}_#{key}", **details )
end
end
def tracking_keys
{
create_issue: {
name: 'issue_auto_creation_on_alerts'
},
issue_template_key: {
name: 'issue_template_on_alerts',
label: 'Template name'
},
send_email: {
name: 'sending_emails'
}
}.with_indifferent_access.freeze
end
end
end
end
end
......@@ -3528,6 +3528,15 @@ msgstr ""
msgid "ClusterIntegration|Create cluster on"
msgstr ""
msgid "ClusterIntegration|Create new Cluster"
msgstr ""
msgid "ClusterIntegration|Create new Cluster on EKS"
msgstr ""
msgid "ClusterIntegration|Create new Cluster on GKE"
msgstr ""
msgid "ClusterIntegration|Did you know?"
msgstr ""
......@@ -3540,6 +3549,9 @@ msgstr ""
msgid "ClusterIntegration|Enable this setting if using role-based access control (RBAC)."
msgstr ""
msgid "ClusterIntegration|Enter the details for your Amazon EKS Kubernetes cluster"
msgstr ""
msgid "ClusterIntegration|Enter the details for your Kubernetes cluster"
msgstr ""
......@@ -3801,7 +3813,7 @@ msgstr ""
msgid "ClusterIntegration|RBAC-enabled cluster"
msgstr ""
msgid "ClusterIntegration|Read our %{link_to_help_page} on Kubernetes cluster integration."
msgid "ClusterIntegration|Read our %{link_start}help page%{link_end} on Kubernetes cluster integration."
msgstr ""
msgid "ClusterIntegration|Region"
......@@ -4017,9 +4029,6 @@ msgstr ""
msgid "ClusterIntegration|documentation"
msgstr ""
msgid "ClusterIntegration|help page"
msgstr ""
msgid "ClusterIntegration|installed via %{installed_via}"
msgstr ""
......
# frozen_string_literal: true
require 'spec_helper'
describe 'AWS EKS Cluster', :js do
let(:project) { create(:project) }
let(:user) { create(:user) }
before do
project.add_maintainer(user)
gitlab_sign_in(user)
allow(Projects::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
end
context 'when user does not have a cluster and visits cluster index page' do
let(:project_id) { 'test-project-1234' }
before do
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
end
context 'when user creates a cluster on AWS EKS' do
before do
click_link 'Amazon EKS'
end
it 'user sees a form to create an EKS cluster' do
expect(page).to have_selector(:css, '.js-create-eks-cluster')
end
end
end
end
......@@ -177,6 +177,7 @@ describe 'Gcp Cluster', :js do
context 'when user has not dismissed GCP signup offer' do
before do
stub_feature_flags(create_eks_clusters: false)
visit project_clusters_path(project)
end
......@@ -200,6 +201,7 @@ describe 'Gcp Cluster', :js do
context 'when user has dismissed GCP signup offer' do
before do
stub_feature_flags(create_eks_clusters: false)
visit project_clusters_path(project)
end
......
......@@ -74,7 +74,7 @@ describe 'Clusters', :js do
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
click_link 'Create new Cluster on GKE'
click_link 'Create new Cluster'
end
it 'user sees a link to create a GKE cluster' do
......
......@@ -123,6 +123,7 @@ describe('EksClusterConfigurationForm', () => {
store,
propsData: {
gitlabManagedClusterHelpPath: '',
kubernetesIntegrationHelpPath: '',
},
});
});
......
import * as errorTrackingUtils from '~/error_tracking/utils';
const externalUrl = 'https://sentry.io/organizations/test-sentry-nk/issues/1/?project=1';
describe('Error Tracking Events', () => {
describe('trackViewInSentryOptions', () => {
it('should return correct event options', () => {
expect(errorTrackingUtils.trackViewInSentryOptions(externalUrl)).toEqual({
category: 'Error Tracking',
action: 'click_view_in_sentry',
label: 'External Url',
property: externalUrl,
});
});
});
describe('trackClickErrorLinkToSentryOptions', () => {
it('should return correct event options', () => {
expect(errorTrackingUtils.trackClickErrorLinkToSentryOptions(externalUrl)).toEqual({
category: 'Error Tracking',
action: 'click_error_link_to_sentry',
label: 'Error Link',
property: externalUrl,
});
});
});
});
import * as monitoringUtils from '~/monitoring/utils';
describe('Snowplow Events', () => {
const generatedLink = 'http://chart.link.com';
const chartTitle = 'Some metric chart';
describe('trackGenerateLinkToChartEventOptions', () => {
it('should return Cluster Monitoring options if located on Cluster Health Dashboard', () => {
document.body.dataset.page = 'groups:clusters:show';
expect(monitoringUtils.generateLinkToChartOptions(generatedLink)).toEqual({
category: 'Cluster Monitoring',
action: 'generate_link_to_cluster_metric_chart',
label: 'Chart link',
property: generatedLink,
});
});
it('should return Incident Management event options if located on Metrics Dashboard', () => {
document.body.dataset.page = 'metrics:show';
expect(monitoringUtils.generateLinkToChartOptions(generatedLink)).toEqual({
category: 'Incident Management::Embedded metrics',
action: 'generate_link_to_metrics_chart',
label: 'Chart link',
property: generatedLink,
});
});
});
describe('trackDownloadCSVEvent', () => {
it('should return Cluster Monitoring options if located on Cluster Health Dashboard', () => {
document.body.dataset.page = 'groups:clusters:show';
expect(monitoringUtils.downloadCSVOptions(chartTitle)).toEqual({
category: 'Cluster Monitoring',
action: 'download_csv_of_cluster_metric_chart',
label: 'Chart title',
property: chartTitle,
});
});
it('should return Incident Management event options if located on Metrics Dashboard', () => {
document.body.dataset.page = 'metriss:show';
expect(monitoringUtils.downloadCSVOptions(chartTitle)).toEqual({
category: 'Incident Management::Embedded metrics',
action: 'download_csv_of_metrics_dashboard_chart',
label: 'Chart title',
property: chartTitle,
});
});
});
});
import Vue from 'vue';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Tracking from '~/tracking';
import TrackEvent from '~/vue_shared/directives/track_event';
jest.mock('~/tracking');
const Component = Vue.component('dummy-element', {
directives: {
TrackEvent,
},
data() {
return {
trackingOptions: null,
};
},
template: '<button id="trackable" v-track-event="trackingOptions"></button>',
});
const localVue = createLocalVue();
let wrapper;
let button;
describe('Error Tracking directive', () => {
beforeEach(() => {
wrapper = shallowMount(localVue.extend(Component), {
localVue,
});
button = wrapper.find('#trackable');
});
it('should not track the event if required arguments are not provided', () => {
button.trigger('click');
expect(Tracking.event).not.toHaveBeenCalled();
});
it('should track event on click if tracking info provided', () => {
const trackingOptions = {
category: 'Tracking',
action: 'click_trackable_btn',
label: 'Trackable Info',
};
wrapper.setData({ trackingOptions });
const { category, action, label, property, value } = trackingOptions;
button.trigger('click');
expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property, value });
});
});
......@@ -32,7 +32,7 @@ describe Gitlab::GitalyClient::ConflictFilesStitcher do
double(files: [double(header: nil, content: content_2[11..-1])])
]
conflict_files = described_class.new(messages).to_a
conflict_files = described_class.new(messages, target_repository.gitaly_repository).to_a
expect(conflict_files.size).to be(2)
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Tracking::IncidentManagement do
describe '.track_from_params' do
shared_examples 'a tracked event' do |label, value = nil|
it 'creates the tracking event with the correct details' do
expect(::Gitlab::Tracking)
.to receive(:event)
.with(
'IncidentManagement::Settings',
label,
value || kind_of(Hash)
)
end
end
after do
described_class.track_from_params(params)
end
context 'known params' do
known_params = described_class.tracking_keys
known_params.each do |key, values|
context "param #{key}" do
let(:params) { { key => '1' } }
it_behaves_like 'a tracked event', "enabled_#{known_params[key][:name]}"
end
end
context 'different input values' do
shared_examples 'the correct prefixed event name' do |input, enabled|
let(:params) { { issue_template_key: input } }
it 'matches' do
expect(::Gitlab::Tracking)
.to receive(:event)
.with(
anything,
"#{enabled}_issue_template_on_alerts",
anything
)
end
end
it_behaves_like 'the correct prefixed event name', 1, 'enabled'
it_behaves_like 'the correct prefixed event name', '1', 'enabled'
it_behaves_like 'the correct prefixed event name', 'template', 'enabled'
it_behaves_like 'the correct prefixed event name', '', 'disabled'
it_behaves_like 'the correct prefixed event name', nil, 'disabled'
end
context 'param with label' do
let(:params) { { issue_template_key: '1' } }
it_behaves_like 'a tracked event', "enabled_issue_template_on_alerts", { label: 'Template name', property: '1' }
end
context 'param without label' do
let(:params) { { create_issue: '1' } }
it_behaves_like 'a tracked event', "enabled_issue_auto_creation_on_alerts", {}
end
end
context 'unknown params' do
let(:params) { { 'unknown' => '1' } }
it 'does not create the tracking event' do
expect(::Gitlab::Tracking)
.not_to receive(:event)
end
end
end
end
......@@ -51,6 +51,12 @@ describe Issues::ZoomLinkService do
expect(result.payload[:description])
.to eq("#{issue.description}\n\n#{zoom_link}")
end
it 'tracks the add event' do
expect(Gitlab::Tracking).to receive(:event)
.with('IncidentManagement::ZoomIntegration', 'add_zoom_meeting', label: 'Issue ID', value: issue.id)
result
end
end
shared_examples 'cannot add link' do
......@@ -135,6 +141,13 @@ describe Issues::ZoomLinkService do
.to eq(issue.description.delete_suffix("\n\n#{zoom_link}"))
end
it 'tracks the remove event' do
expect(Gitlab::Tracking).to receive(:event)
.with('IncidentManagement::ZoomIntegration', 'remove_zoom_meeting', label: 'Issue ID', value: issue.id)
result
end
context 'with insufficient permissions' do
include_context 'insufficient permissions'
include_examples 'cannot remove link'
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment