Commit a431e373 authored by Bob Van Landuyt's avatar Bob Van Landuyt

Merge branch 'ce-to-ee-2018-09-27' into 'master'

CE upstream - 2018-09-27 21:21 UTC

Closes #1801, gitlab-ce#50181, and gitlab-ce#51925

See merge request gitlab-org/gitlab-ee!7570
parents 7b4269c0 bb8eb7f9
...@@ -72,11 +72,13 @@ For a first-time step-by-step guide to the contribution process, please see ...@@ -72,11 +72,13 @@ For a first-time step-by-step guide to the contribution process, please see
Looking for something to work on? Look for issues in the [Backlog (Accepting merge requests) milestone](#i-want-to-contribute). Looking for something to work on? Look for issues in the [Backlog (Accepting merge requests) milestone](#i-want-to-contribute).
GitLab comes into two flavors, GitLab Community Edition (CE) our free and open GitLab comes in two flavors, GitLab Community Edition (CE) our free and open
source edition, and GitLab Enterprise Edition (EE) which is our commercial source edition, and GitLab Enterprise Edition (EE) which is our commercial
edition. Throughout this guide you will see references to CE and EE for edition. Throughout this guide you will see references to CE and EE for
abbreviation. abbreviation.
To get an overview of GitLab community membership including those that would be reviewing or merging your contributions, please visit [the community roles page](doc/development/contributing/community_roles.md).
If you want to know how the GitLab [core team] If you want to know how the GitLab [core team]
operates please see [the GitLab contributing process](PROCESS.md). operates please see [the GitLab contributing process](PROCESS.md).
......
...@@ -6,7 +6,7 @@ import { visitUrl } from './lib/utils/url_utility'; ...@@ -6,7 +6,7 @@ import { visitUrl } from './lib/utils/url_utility';
import bp from './breakpoints'; import bp from './breakpoints';
import { numberToHumanSize } from './lib/utils/number_utils'; import { numberToHumanSize } from './lib/utils/number_utils';
import { setCiStatusFavicon } from './lib/utils/common_utils'; import { setCiStatusFavicon } from './lib/utils/common_utils';
import { isScrolledToBottom, scrollDown } from './lib/utils/scroll_utils'; import { isScrolledToBottom, scrollDown, scrollUp } from './lib/utils/scroll_utils';
import LogOutputBehaviours from './lib/utils/logoutput_behaviours'; import LogOutputBehaviours from './lib/utils/logoutput_behaviours';
export default class Job extends LogOutputBehaviours { export default class Job extends LogOutputBehaviours {
...@@ -80,7 +80,7 @@ export default class Job extends LogOutputBehaviours { ...@@ -80,7 +80,7 @@ export default class Job extends LogOutputBehaviours {
} }
scrollToTop() { scrollToTop() {
$(document).scrollTop(0); scrollUp();
this.hasBeenScrolled = true; this.hasBeenScrolled = true;
this.toggleScroll(); this.toggleScroll();
} }
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
validator(value) { validator(value) {
return ( return (
value === null || value === null ||
(Object.prototype.hasOwnProperty.call(value, 'link') && (Object.prototype.hasOwnProperty.call(value, 'path') &&
Object.prototype.hasOwnProperty.call(value, 'method') && Object.prototype.hasOwnProperty.call(value, 'method') &&
Object.prototype.hasOwnProperty.call(value, 'title')) Object.prototype.hasOwnProperty.call(value, 'title'))
); );
...@@ -63,7 +63,7 @@ ...@@ -63,7 +63,7 @@
class="text-center" class="text-center"
> >
<a <a
:href="action.link" :href="action.path"
:data-method="action.method" :data-method="action.method"
class="js-job-empty-state-action btn btn-primary" class="js-job-empty-state-action btn btn-primary"
> >
......
<script> <script>
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; import _ from 'underscore';
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
export default { export default {
components: { components: {
TimeagoTooltip, TimeagoTooltip,
}, },
props: { props: {
erasedByUser: { user: {
type: Boolean, type: Object,
required: true,
},
username: {
type: String,
required: false,
default: null,
},
linkToUser: {
type: String,
required: false, required: false,
default: null, default: () => ({}),
}, },
erasedAt: { erasedAt: {
type: String, type: String,
required: true, required: true,
}, },
}, },
}; computed: {
isErasedByUser() {
return !_.isEmpty(this.user);
},
},
};
</script> </script>
<template> <template>
<div class="prepend-top-default js-build-erased"> <div class="prepend-top-default js-build-erased">
<div class="erased alert alert-warning"> <div class="erased alert alert-warning">
<template v-if="erasedByUser"> <template v-if="isErasedByUser">
{{ s__("Job|Job has been erased by") }} {{ s__("Job|Job has been erased by") }}
<a :href="linkToUser"> <a :href="user.web_url">
{{ username }} {{ user.username }}
</a> </a>
</template> </template>
<template v-else> <template v-else>
......
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
type: String, type: String,
required: true, required: true,
}, },
isReceivingBuildTrace: { isComplete: {
type: Boolean, type: Boolean,
required: true, required: true,
}, },
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
</code> </code>
<div <div
v-if="isReceivingBuildTrace" v-if="isComplete"
class="js-log-animation build-loader-animation" class="js-log-animation build-loader-animation"
> >
<div class="dot"></div> <div class="dot"></div>
......
<script> <script>
import { polyfillSticky } from '~/lib/utils/sticky';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '~/vue_shared/directives/tooltip'; import tooltip from '~/vue_shared/directives/tooltip';
import { numberToHumanSize } from '~/lib/utils/number_utils'; import { numberToHumanSize } from '~/lib/utils/number_utils';
import { s__, sprintf } from '~/locale'; import { sprintf } from '~/locale';
export default { export default {
components: { components: {
...@@ -12,44 +13,48 @@ ...@@ -12,44 +13,48 @@
tooltip, tooltip,
}, },
props: { props: {
canEraseJob: { erasePath: {
type: Boolean, type: String,
required: true, required: false,
default: null,
}, },
size: { size: {
type: Number, type: Number,
required: true, required: true,
}, },
rawTracePath: { rawPath: {
type: String, type: String,
required: false, required: false,
default: null, default: null,
}, },
canScrollToTop: { isScrollTopDisabled: {
type: Boolean, type: Boolean,
required: true, required: true,
}, },
canScrollToBottom: { isScrollBottomDisabled: {
type: Boolean,
required: true,
},
isScrollingDown: {
type: Boolean,
required: true,
},
isTraceSizeVisible: {
type: Boolean, type: Boolean,
required: true, required: true,
}, },
}, },
computed: { computed: {
jobLogSize() { jobLogSize() {
return sprintf('Showing last %{startSpanTag} %{size} %{endSpanTag} of log -', { return sprintf('Showing last %{size} of log -', {
startSpanTag: '<span class="s-truncated-info-size truncated-info-size">',
endSpanTag: '</span>',
size: numberToHumanSize(this.size), size: numberToHumanSize(this.size),
}); });
}, },
}, },
methods: { mounted() {
handleEraseJobClick() { polyfillSticky(this.$el);
// eslint-disable-next-line no-alert
if (window.confirm(s__('Job|Are you sure you want to erase this job?'))) {
this.$emit('eraseJob');
}
}, },
methods: {
handleScrollToTop() { handleScrollToTop() {
this.$emit('scrollJobLogTop'); this.$emit('scrollJobLogTop');
}, },
...@@ -57,48 +62,52 @@ ...@@ -57,48 +62,52 @@
this.$emit('scrollJobLogBottom'); this.$emit('scrollJobLogBottom');
}, },
}, },
}; };
</script> </script>
<template> <template>
<div class="top-bar"> <div class="top-bar">
<!-- truncate information --> <!-- truncate information -->
<div class="js-truncated-info truncated-info d-none d-sm-block float-left"> <div class="js-truncated-info truncated-info d-none d-sm-block float-left">
<p v-html="jobLogSize"></p> <template v-if="isTraceSizeVisible">
{{ jobLogSize }}
<a <a
v-if="rawTracePath" v-if="rawPath"
:href="rawTracePath" :href="rawPath"
class="js-raw-link raw-link" class="js-raw-link raw-link"
> >
{{ s__("Job|Complete Raw") }} {{ s__("Job|Complete Raw") }}
</a> </a>
</template>
</div> </div>
<!-- eo truncate information --> <!-- eo truncate information -->
<div class="controllers float-right"> <div class="controllers float-right">
<!-- links --> <!-- links -->
<a <a
v-if="rawTracePath" v-if="rawPath"
v-tooltip v-tooltip
:title="s__('Job|Show complete raw')" :title="s__('Job|Show complete raw')"
:href="rawTracePath" :href="rawPath"
class="js-raw-link-controller controllers-buttons" class="js-raw-link-controller controllers-buttons"
data-container="body" data-container="body"
> >
<icon name="doc-text" /> <icon name="doc-text" />
</a> </a>
<button <a
v-if="canEraseJob" v-if="erasePath"
v-tooltip v-tooltip
:title="s__('Job|Erase job log')" :title="s__('Job|Erase job log')"
type="button" :href="erasePath"
data-confirm="__('Are you sure you want to erase this build?')"
class="js-erase-link controllers-buttons" class="js-erase-link controllers-buttons"
data-container="body" data-container="body"
@click="handleEraseJobClick" data-method="post"
> >
<icon name="remove" /> <icon name="remove" />
</button> </a>
<!-- eo links --> <!-- eo links -->
<!-- scroll buttons --> <!-- scroll buttons -->
...@@ -109,7 +118,7 @@ ...@@ -109,7 +118,7 @@
data-container="body" data-container="body"
> >
<button <button
:disabled="!canScrollToTop" :disabled="isScrollTopDisabled"
type="button" type="button"
class="js-scroll-top btn-scroll btn-transparent btn-blank" class="js-scroll-top btn-scroll btn-transparent btn-blank"
@click="handleScrollToTop" @click="handleScrollToTop"
...@@ -125,9 +134,10 @@ ...@@ -125,9 +134,10 @@
data-container="body" data-container="body"
> >
<button <button
:disabled="!canScrollToBottom" :disabled="isScrollBottomDisabled"
type="button" type="button"
class="js-scroll-bottom btn-scroll btn-transparent btn-blank" class="js-scroll-bottom btn-scroll btn-transparent btn-blank"
:class="{ animate: isScrollingDown }"
@click="handleScrollToBottom" @click="handleScrollToBottom"
> >
<icon name="scroll_down"/> <icon name="scroll_down"/>
......
...@@ -24,14 +24,14 @@ export default { ...@@ -24,14 +24,14 @@ export default {
<div class="bs-callout bs-callout-warning"> <div class="bs-callout bs-callout-warning">
<p <p
v-if="hasNoRunnersForProject" v-if="hasNoRunnersForProject"
class="js-stuck-no-runners" class="js-stuck-no-runners append-bottom-0"
> >
{{ s__(`Job|This job is stuck, because the project {{ s__(`Job|This job is stuck, because the project
doesn't have any runners online assigned to it.`) }} doesn't have any runners online assigned to it.`) }}
</p> </p>
<p <p
v-else-if="tags.length" v-else-if="tags.length"
class="js-stuck-with-tags" class="js-stuck-with-tags append-bottom-0"
> >
{{ s__(`This job is stuck, because you don't have {{ s__(`This job is stuck, because you don't have
any active runners online with any of these tags assigned to them:`) }} any active runners online with any of these tags assigned to them:`) }}
...@@ -45,7 +45,7 @@ export default { ...@@ -45,7 +45,7 @@ export default {
</p> </p>
<p <p
v-else v-else
class="js-stuck-no-active-runner" class="js-stuck-no-active-runner append-bottom-0"
> >
{{ s__(`This job is stuck, because you don't {{ s__(`This job is stuck, because you don't
have any active runners that can run this job.`) }} have any active runners that can run this job.`) }}
......
import { mapState } from 'vuex';
import Vue from 'vue'; import Vue from 'vue';
import JobMediator from './job_details_mediator'; import Job from '../job';
import jobHeader from './components/header.vue'; import JobHeader from './components/header.vue';
import detailsBlock from './components/sidebar_details_block.vue'; import DetailsBlock from './components/sidebar_details_block.vue';
import createStore from './store';
export default () => { export default () => {
const { dataset } = document.getElementById('js-job-details-vue'); const { dataset } = document.getElementById('js-job-details-vue');
const mediator = new JobMediator({ endpoint: dataset.endpoint });
mediator.fetchJob(); // eslint-disable-next-line no-new
new Job();
const store = createStore();
store.dispatch('setJobEndpoint', dataset.endpoint);
store.dispatch('fetchJob');
// Header // Header
// eslint-disable-next-line no-new // eslint-disable-next-line no-new
new Vue({ new Vue({
el: '#js-build-header-vue', el: '#js-build-header-vue',
components: { components: {
jobHeader, JobHeader,
},
data() {
return {
mediator,
};
}, },
mounted() { store,
this.mediator.initBuildClass(); computed: {
...mapState(['job', 'isLoading']),
}, },
render(createElement) { render(createElement) {
return createElement('job-header', { return createElement('job-header', {
props: { props: {
isLoading: this.mediator.state.isLoading, isLoading: this.isLoading,
job: this.mediator.store.state.job, job: this.job,
}, },
}); });
}, },
...@@ -41,18 +43,17 @@ export default () => { ...@@ -41,18 +43,17 @@ export default () => {
new Vue({ new Vue({
el: detailsBlockElement, el: detailsBlockElement,
components: { components: {
detailsBlock, DetailsBlock,
}, },
data() { store,
return { computed: {
mediator, ...mapState(['job', 'isLoading']),
};
}, },
render(createElement) { render(createElement) {
return createElement('details-block', { return createElement('details-block', {
props: { props: {
isLoading: this.mediator.state.isLoading, isLoading: this.isLoading,
job: this.mediator.store.state.job, job: this.job,
runnerHelpUrl: dataset.runnerHelpUrl, runnerHelpUrl: dataset.runnerHelpUrl,
terminalPath: detailsBlockDataset.terminalPath, terminalPath: detailsBlockDataset.terminalPath,
}, },
......
import Visibility from 'visibilityjs';
import Flash from '../flash';
import Poll from '../lib/utils/poll';
import JobStore from './stores/job_store';
import JobService from './services/job_service';
import Job from '../job';
export default class JobMediator {
constructor(options = {}) {
this.options = options;
this.store = new JobStore();
this.service = new JobService(options.endpoint);
this.state = {
isLoading: false,
};
}
initBuildClass() {
this.build = new Job();
}
fetchJob() {
this.poll = new Poll({
resource: this.service,
method: 'getJob',
successCallback: response => this.successCallback(response),
errorCallback: () => this.errorCallback(),
});
if (!Visibility.hidden()) {
this.state.isLoading = true;
this.poll.makeRequest();
} else {
this.getJob();
}
Visibility.change(() => {
if (!Visibility.hidden()) {
this.poll.restart();
} else {
this.poll.stop();
}
});
}
getJob() {
return this.service
.getJob()
.then(response => this.successCallback(response))
.catch(() => this.errorCallback());
}
successCallback(response) {
this.state.isLoading = false;
return this.store.storeJob(response.data);
}
errorCallback() {
this.state.isLoading = false;
return new Flash('An error occurred while fetching the job.');
}
}
import axios from '../../lib/utils/axios_utils';
export default class JobService {
constructor(endpoint) {
this.job = endpoint;
}
getJob() {
return axios.get(this.job);
}
}
import * as types from './mutation_types'; import * as types from './mutation_types';
export default { export default {
[types.SET_JOB_ENDPOINT](state, endpoint) {
state.jobEndpoint = endpoint;
},
[types.REQUEST_STATUS_FAVICON](state) { [types.REQUEST_STATUS_FAVICON](state) {
state.fetchingStatusFavicon = true; state.fetchingStatusFavicon = true;
}, },
......
export default class JobStore {
constructor() {
this.state = {
job: {},
};
}
storeJob(job = {}) {
this.state.job = job;
}
}
import $ from 'jquery'; import $ from 'jquery';
import { canScroll, isScrolledToBottom, toggleDisableButton } from './scroll_utils'; import {
canScroll,
isScrolledToBottom,
isScrolledToTop,
isScrolledToMiddle,
toggleDisableButton,
} from './scroll_utils';
export default class LogOutputBehaviours { export default class LogOutputBehaviours {
constructor() { constructor() {
...@@ -12,18 +18,13 @@ export default class LogOutputBehaviours { ...@@ -12,18 +18,13 @@ export default class LogOutputBehaviours {
} }
toggleScroll() { toggleScroll() {
const $document = $(document);
const currentPosition = $document.scrollTop();
const scrollHeight = $document.height();
const windowHeight = $(window).height();
if (canScroll()) { if (canScroll()) {
if (currentPosition > 0 && scrollHeight - currentPosition !== windowHeight) { if (isScrolledToMiddle()) {
// User is in the middle of the log // User is in the middle of the log
toggleDisableButton(this.$scrollTopBtn, false); toggleDisableButton(this.$scrollTopBtn, false);
toggleDisableButton(this.$scrollBottomBtn, false); toggleDisableButton(this.$scrollBottomBtn, false);
} else if (currentPosition === 0) { } else if (isScrolledToTop()) {
// User is at Top of Log // User is at Top of Log
toggleDisableButton(this.$scrollTopBtn, true); toggleDisableButton(this.$scrollTopBtn, true);
......
...@@ -4,6 +4,7 @@ export const canScroll = () => $(document).height() > $(window).height(); ...@@ -4,6 +4,7 @@ export const canScroll = () => $(document).height() > $(window).height();
/** /**
* Checks if the entire page is scrolled down all the way to the bottom * Checks if the entire page is scrolled down all the way to the bottom
* @returns {Boolean}
*/ */
export const isScrolledToBottom = () => { export const isScrolledToBottom = () => {
const $document = $(document); const $document = $(document);
...@@ -16,11 +17,34 @@ export const isScrolledToBottom = () => { ...@@ -16,11 +17,34 @@ export const isScrolledToBottom = () => {
return scrollHeight - currentPosition === windowHeight; return scrollHeight - currentPosition === windowHeight;
}; };
/**
* Checks if page is scrolled to the top
* @returns {Boolean}
*/
export const isScrolledToTop = () => $(document).scrollTop() === 0;
export const scrollDown = () => { export const scrollDown = () => {
const $document = $(document); const $document = $(document);
$document.scrollTop($document.height()); $document.scrollTop($document.height());
}; };
export const scrollUp = () => {
$(document).scrollTop(0);
};
/**
* Checks if scroll position is in the middle of the page
* @returns {Boolean}
*/
export const isScrolledToMiddle = () => {
const $document = $(document);
const currentPosition = $document.scrollTop();
const scrollHeight = $document.height();
const windowHeight = $(window).height();
return currentPosition > 0 && scrollHeight - currentPosition !== windowHeight;
};
export const toggleDisableButton = ($button, disable) => { export const toggleDisableButton = ($button, disable) => {
if (disable && $button.prop('disabled')) return; if (disable && $button.prop('disabled')) return;
$button.prop('disabled', disable); $button.prop('disabled', disable);
......
...@@ -42,7 +42,7 @@ export default { ...@@ -42,7 +42,7 @@ export default {
keys: ['feature', 'request'], keys: ['feature', 'request'],
}, },
], ],
simpleMetrics: ['redis', 'sidekiq'], simpleMetrics: ['redis'],
data() { data() {
return { currentRequestId: '' }; return { currentRequestId: '' };
}, },
......
<script> <script>
export default { export default {
props: { props: {
currentRequest: { currentRequest: {
type: Object, type: Object,
...@@ -10,7 +10,20 @@ export default { ...@@ -10,7 +10,20 @@ export default {
required: true, required: true,
}, },
}, },
}; computed: {
duration() {
return (
this.currentRequest.details[this.metric] &&
this.currentRequest.details[this.metric].duration
);
},
calls() {
return (
this.currentRequest.details[this.metric] && this.currentRequest.details[this.metric].calls
);
},
},
};
</script> </script>
<template> <template>
<div <div
...@@ -21,9 +34,9 @@ export default { ...@@ -21,9 +34,9 @@ export default {
v-if="currentRequest.details" v-if="currentRequest.details"
class="bold" class="bold"
> >
{{ currentRequest.details[metric].duration }} {{ duration }}
/ /
{{ currentRequest.details[metric].calls }} {{ calls }}
</span> </span>
{{ metric }} {{ metric }}
</div> </div>
......
...@@ -39,7 +39,7 @@ ...@@ -39,7 +39,7 @@
.table-section { .table-section {
white-space: nowrap; white-space: nowrap;
$section-widths: 5 10 15 20 25 30 40 50 100; $section-widths: 5 10 15 20 25 30 40 50 60 100;
@each $width in $section-widths { @each $width in $section-widths {
&.section-#{$width} { &.section-#{$width} {
flex: 0 0 #{$width + '%'}; flex: 0 0 #{$width + '%'};
......
...@@ -56,7 +56,7 @@ $blue-50: #f6fafe; ...@@ -56,7 +56,7 @@ $blue-50: #f6fafe;
$blue-100: #e4f0fb; $blue-100: #e4f0fb;
$blue-200: #b8d6f4; $blue-200: #b8d6f4;
$blue-300: #73afea; $blue-300: #73afea;
$blue-400: #2e87e0; $blue-400: #418cd8;
$blue-500: #1f78d1; $blue-500: #1f78d1;
$blue-600: #1b69b6; $blue-600: #1b69b6;
$blue-700: #17599c; $blue-700: #17599c;
...@@ -68,7 +68,7 @@ $orange-50: #fffaf4; ...@@ -68,7 +68,7 @@ $orange-50: #fffaf4;
$orange-100: #fff1de; $orange-100: #fff1de;
$orange-200: #fed69f; $orange-200: #fed69f;
$orange-300: #fdbc60; $orange-300: #fdbc60;
$orange-400: #fca121; $orange-400: #fca429;
$orange-500: #fc9403; $orange-500: #fc9403;
$orange-600: #de7e00; $orange-600: #de7e00;
$orange-700: #c26700; $orange-700: #c26700;
...@@ -79,7 +79,7 @@ $orange-950: #592800; ...@@ -79,7 +79,7 @@ $orange-950: #592800;
$red-50: #fef6f5; $red-50: #fef6f5;
$red-100: #fbe5e1; $red-100: #fbe5e1;
$red-200: #f2b4a9; $red-200: #f2b4a9;
$red-300: #e67664; $red-300: #ea8271;
$red-400: #e05842; $red-400: #e05842;
$red-500: #db3b21; $red-500: #db3b21;
$red-600: #c0341d; $red-600: #c0341d;
......
...@@ -281,9 +281,10 @@ class ApplicationController < ActionController::Base ...@@ -281,9 +281,10 @@ class ApplicationController < ActionController::Base
end end
def event_filter def event_filter
# Split using comma to maintain backward compatibility Ex/ "filter1,filter2" @event_filter ||=
filters = cookies['event_filter'].split(',')[0] if cookies['event_filter'].present? EventFilter.new(params[:event_filter].presence || cookies[:event_filter]).tap do |new_event_filter|
@event_filter ||= EventFilter.new(filters) cookies[:event_filter] = new_event_filter.filter
end
end end
# JSON for infinite scroll via Pager object # JSON for infinite scroll via Pager object
......
...@@ -41,7 +41,7 @@ class DashboardController < Dashboard::ApplicationController ...@@ -41,7 +41,7 @@ class DashboardController < Dashboard::ApplicationController
end end
@events = EventCollection @events = EventCollection
.new(projects, offset: params[:offset].to_i, filter: @event_filter) .new(projects, offset: params[:offset].to_i, filter: event_filter)
.to_a .to_a
Events::RenderService.new(current_user).execute(@events) Events::RenderService.new(current_user).execute(@events)
......
...@@ -72,8 +72,12 @@ module Ci ...@@ -72,8 +72,12 @@ module Ci
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive) '', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end end
scope :with_existing_job_artifacts, ->(query) do
where('EXISTS (?)', ::Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').merge(query))
end
scope :with_archived_trace, ->() do scope :with_archived_trace, ->() do
where('EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').trace) with_existing_job_artifacts(Ci::JobArtifact.trace)
end end
scope :without_archived_trace, ->() do scope :without_archived_trace, ->() do
...@@ -81,10 +85,12 @@ module Ci ...@@ -81,10 +85,12 @@ module Ci
end end
scope :with_test_reports, ->() do scope :with_test_reports, ->() do
includes(:job_artifacts_junit) # Prevent N+1 problem when iterating each ci_job_artifact row with_existing_job_artifacts(Ci::JobArtifact.test_reports)
.where('EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').test_reports) .eager_load_job_artifacts
end end
scope :eager_load_job_artifacts, -> { includes(:job_artifacts) }
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) } scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_archived_trace_stored_locally, -> { with_archived_trace.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) } scope :with_archived_trace_stored_locally, -> { with_archived_trace.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
...@@ -408,8 +414,8 @@ module Ci ...@@ -408,8 +414,8 @@ module Ci
trace.exist? trace.exist?
end end
def has_test_reports? def has_job_artifacts?
job_artifacts.test_reports.any? job_artifacts.any?
end end
def has_old_trace? def has_old_trace?
...@@ -473,28 +479,23 @@ module Ci ...@@ -473,28 +479,23 @@ module Ci
end end
end end
def erase_artifacts! # and use that for `ExpireBuildInstanceArtifactsWorker`?
remove_artifacts_file! def erase_erasable_artifacts!
remove_artifacts_metadata! job_artifacts.erasable.destroy_all # rubocop: disable DestroyAll
save erase_old_artifacts!
end
def erase_test_reports!
# TODO: Use fast_destroy_all in the context of https://gitlab.com/gitlab-org/gitlab-ce/issues/35240
job_artifacts_junit&.destroy
end end
def erase(opts = {}) def erase(opts = {})
return false unless erasable? return false unless erasable?
erase_artifacts! job_artifacts.destroy_all # rubocop: disable DestroyAll
erase_test_reports! erase_old_artifacts!
erase_trace! erase_trace!
update_erased!(opts[:erased_by]) update_erased!(opts[:erased_by])
end end
def erasable? def erasable?
complete? && (artifacts? || has_test_reports? || has_trace?) complete? && (artifacts? || has_job_artifacts? || has_trace?)
end end
def erased? def erased?
...@@ -652,8 +653,8 @@ module Ci ...@@ -652,8 +653,8 @@ module Ci
def collect_test_reports!(test_reports) def collect_test_reports!(test_reports)
test_reports.get_suite(group_name).tap do |test_suite| test_reports.get_suite(group_name).tap do |test_suite|
each_test_report do |file_type, blob| each_report(Ci::JobArtifact::TEST_REPORT_FILE_TYPES) do |file_type, blob|
Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, test_suite) Gitlab::Ci::Parsers::Test.fabricate!(file_type).parse!(blob, test_suite)
end end
end end
end end
...@@ -673,6 +674,13 @@ module Ci ...@@ -673,6 +674,13 @@ module Ci
private private
def erase_old_artifacts!
# TODO: To be removed once we get rid of
remove_artifacts_file!
remove_artifacts_metadata!
save
end
def successful_deployment_status def successful_deployment_status
if success? && last_deployment&.last? if success? && last_deployment&.last?
return :last return :last
...@@ -683,12 +691,17 @@ module Ci ...@@ -683,12 +691,17 @@ module Ci
:creating :creating
end end
def each_test_report def each_report(report_types)
Ci::JobArtifact::TEST_REPORT_FILE_TYPES.each do |file_type| job_artifacts_for_types(report_types).each do |report_artifact|
public_send("job_artifacts_#{file_type}").each_blob do |blob| # rubocop:disable GitlabSecurity/PublicSend report_artifact.each_blob do |blob|
yield file_type, blob yield report_artifact.file_type, blob
end
end end
end end
def job_artifacts_for_types(report_types)
# Use select to leverage cached associations and avoid N+1 queries
job_artifacts.select { |artifact| artifact.file_type.in?(report_types) }
end end
def update_artifacts_size def update_artifacts_size
......
...@@ -11,8 +11,28 @@ module Ci ...@@ -11,8 +11,28 @@ module Ci
NotSupportedAdapterError = Class.new(StandardError) NotSupportedAdapterError = Class.new(StandardError)
TEST_REPORT_FILE_TYPES = %w[junit].freeze TEST_REPORT_FILE_TYPES = %w[junit].freeze
DEFAULT_FILE_NAMES = { junit: 'junit.xml' }.freeze NON_ERASABLE_FILE_TYPES = %w[trace].freeze
TYPE_AND_FORMAT_PAIRS = { archive: :zip, metadata: :gzip, trace: :raw, junit: :gzip }.freeze DEFAULT_FILE_NAMES = {
archive: nil,
metadata: nil,
trace: nil,
junit: 'junit.xml',
sast: 'gl-sast-report.json',
dependency_scanning: 'gl-dependency-scanning-report.json',
container_scanning: 'gl-container-scanning-report.json',
dast: 'gl-dast-report.json'
}.freeze
TYPE_AND_FORMAT_PAIRS = {
archive: :zip,
metadata: :gzip,
trace: :raw,
junit: :gzip,
sast: :gzip,
dependency_scanning: :gzip,
container_scanning: :gzip,
dast: :gzip
}.freeze
belongs_to :project belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
...@@ -30,8 +50,18 @@ module Ci ...@@ -30,8 +50,18 @@ module Ci
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) } scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
scope :with_file_types, -> (file_types) do
types = self.file_types.select { |file_type| file_types.include?(file_type) }.values
where(file_type: types)
end
scope :test_reports, -> do scope :test_reports, -> do
types = self.file_types.select { |file_type| TEST_REPORT_FILE_TYPES.include?(file_type) }.values with_file_types(TEST_REPORT_FILE_TYPES)
end
scope :erasable, -> do
types = self.file_types.reject { |file_type| NON_ERASABLE_FILE_TYPES.include?(file_type) }.values
where(file_type: types) where(file_type: types)
end end
...@@ -42,7 +72,11 @@ module Ci ...@@ -42,7 +72,11 @@ module Ci
archive: 1, archive: 1,
metadata: 2, metadata: 2,
trace: 3, trace: 3,
junit: 4 junit: 4,
sast: 5, ## EE-specific
dependency_scanning: 6, ## EE-specific
container_scanning: 7, ## EE-specific
dast: 8 ## EE-specific
} }
enum file_format: { enum file_format: {
......
...@@ -73,6 +73,11 @@ module Clusters ...@@ -73,6 +73,11 @@ module Clusters
"clientSecret" => oauth_application.secret, "clientSecret" => oauth_application.secret,
"callbackUrl" => callback_url "callbackUrl" => callback_url
} }
},
"singleuser" => {
"extraEnv" => {
"GITLAB_CLUSTER_ID" => cluster.id
}
} }
} }
end end
......
...@@ -9,6 +9,7 @@ class BuildDetailsEntity < JobEntity ...@@ -9,6 +9,7 @@ class BuildDetailsEntity < JobEntity
expose :coverage, :erased_at, :duration expose :coverage, :erased_at, :duration
expose :tag_list, as: :tags expose :tag_list, as: :tags
expose :has_trace?, as: :has_trace
expose :user, using: UserEntity expose :user, using: UserEntity
expose :runner, using: RunnerEntity expose :runner, using: RunnerEntity
expose :pipeline, using: PipelineEntity expose :pipeline, using: PipelineEntity
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
module Files module Files
class MultiService < Files::BaseService class MultiService < Files::BaseService
UPDATE_FILE_ACTIONS = %w(update move delete).freeze UPDATE_FILE_ACTIONS = %w(update move delete chmod).freeze
def create_commit! def create_commit!
transformer = Lfs::FileTransformer.new(project, @branch_name) transformer = Lfs::FileTransformer.new(project, @branch_name)
......
...@@ -287,7 +287,6 @@ module QuickActions ...@@ -287,7 +287,6 @@ module QuickActions
end end
params '#issue | !merge_request' params '#issue | !merge_request'
condition do condition do
issuable.persisted? &&
current_user.can?(:"update_#{issuable.to_ability_name}", issuable) current_user.can?(:"update_#{issuable.to_ability_name}", issuable)
end end
parse_params do |issuable_param| parse_params do |issuable_param|
......
...@@ -117,7 +117,7 @@ ...@@ -117,7 +117,7 @@
= link_to edit_group_path(@group) do = link_to edit_group_path(@group) do
.nav-icon-container .nav-icon-container
= sprite_icon('settings') = sprite_icon('settings')
%span.nav-item-name %span.nav-item-name.qa-settings-item
= _('Settings') = _('Settings')
%ul.sidebar-sub-level-items %ul.sidebar-sub-level-items
= nav_link(path: %w[groups#projects groups#edit badges#index ci_cd#show], html_options: { class: "fly-out-top-item" } ) do = nav_link(path: %w[groups#projects groups#edit badges#index ci_cd#show], html_options: { class: "fly-out-top-item" } ) do
......
...@@ -2,13 +2,13 @@ ...@@ -2,13 +2,13 @@
.fade-left= icon('angle-left') .fade-left= icon('angle-left')
.fade-right= icon('angle-right') .fade-right= icon('angle-right')
%ul.nav-links.event-filter.scrolling-tabs.nav.nav-tabs %ul.nav-links.event-filter.scrolling-tabs.nav.nav-tabs
= event_filter_link EventFilter.all, _('All'), s_('EventFilterBy|Filter by all') = event_filter_link EventFilter::ALL, _('All'), s_('EventFilterBy|Filter by all')
- if event_filter_visible(:repository) - if event_filter_visible(:repository)
= event_filter_link EventFilter.push, _('Push events'), s_('EventFilterBy|Filter by push events') = event_filter_link EventFilter::PUSH, _('Push events'), s_('EventFilterBy|Filter by push events')
- if event_filter_visible(:merge_requests) - if event_filter_visible(:merge_requests)
= event_filter_link EventFilter.merged, _('Merge events'), s_('EventFilterBy|Filter by merge events') = event_filter_link EventFilter::MERGED, _('Merge events'), s_('EventFilterBy|Filter by merge events')
- if event_filter_visible(:issues) - if event_filter_visible(:issues)
= event_filter_link EventFilter.issue, _('Issue events'), s_('EventFilterBy|Filter by issue events') = event_filter_link EventFilter::ISSUE, _('Issue events'), s_('EventFilterBy|Filter by issue events')
- if comments_visible? - if comments_visible?
= event_filter_link EventFilter.comments, _('Comments'), s_('EventFilterBy|Filter by comments') = event_filter_link EventFilter::COMMENTS, _('Comments'), s_('EventFilterBy|Filter by comments')
= event_filter_link EventFilter.team, _('Team'), s_('EventFilterBy|Filter by team') = event_filter_link EventFilter::TEAM, _('Team'), s_('EventFilterBy|Filter by team')
...@@ -13,7 +13,7 @@ class ExpireBuildInstanceArtifactsWorker ...@@ -13,7 +13,7 @@ class ExpireBuildInstanceArtifactsWorker
return unless build&.project && !build.project.pending_delete return unless build&.project && !build.project.pending_delete
Rails.logger.info "Removing artifacts for build #{build.id}..." Rails.logger.info "Removing artifacts for build #{build.id}..."
build.erase_artifacts! build.erase_erasable_artifacts!
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
end end
---
title: "Allow events filter to be set in the URL in addition to cookie"
merge_request: 21557
author: Igor @igas
type: added
---
title: Allows to chmod file with commits API
merge_request: 21866
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Dont create license_management build when not included in license
merge_request: 21958
author:
type: performance
---
title: Update all gitlab CI templates from gitlab-org/gitlab-ci-yml
merge_request: 21929
author:
type: added
---
title: Extracts scroll position check into reusable functions
merge_request:
author:
type: other
---
title: Uses Vuex store in job details page and removes old mediator pattern
merge_request:
author:
type: other
---
title: Fixes performance bar looking for a key in a undefined prop
merge_request:
author:
type: fixed
---
title: Expose has_trace in job API
merge_request: 21950
author:
type: other
---
title: Extend reports feature to support Security Products
merge_request: 21892
author:
type: added
---
title: Adds the user's public_email attribute to the API
merge_request: 21909
author: Alexis Reigel
type: added
---
title: Adds an extra width to the responsive tables
merge_request: 21928
author:
type: other
---
title: Fix blue, orange, and red color inconsistencies
merge_request: 21972
author:
type: other
---
title: Guard against a login attempt with invalid CSRF token
merge_request: 21934
author:
type: fixed
---
title: Allow /copy_metadata for new issues and MRs
merge_request: 21953
author:
type: changed
...@@ -31,6 +31,11 @@ Rails.application.configure do |config| ...@@ -31,6 +31,11 @@ Rails.application.configure do |config|
Warden::Manager.before_logout(scope: :user) do |user, auth, opts| Warden::Manager.before_logout(scope: :user) do |user, auth, opts|
user ||= auth.user user ||= auth.user
# Rails CSRF protection may attempt to log out a user before that
# user even logs in
next unless user
activity = Gitlab::Auth::Activity.new(opts) activity = Gitlab::Auth::Activity.new(opts)
tracker = Gitlab::Auth::BlockedUserTracker.new(user, auth) tracker = Gitlab::Auth::BlockedUserTracker.new(user, auth)
......
...@@ -83,12 +83,13 @@ POST /projects/:id/repository/commits ...@@ -83,12 +83,13 @@ POST /projects/:id/repository/commits
| `actions[]` Attribute | Type | Required | Description | | `actions[]` Attribute | Type | Required | Description |
| --------------------- | ---- | -------- | ----------- | | --------------------- | ---- | -------- | ----------- |
| `action` | string | yes | The action to perform, `create`, `delete`, `move`, `update` | | `action` | string | yes | The action to perform, `create`, `delete`, `move`, `update`, `chmod`|
| `file_path` | string | yes | Full path to the file. Ex. `lib/class.rb` | | `file_path` | string | yes | Full path to the file. Ex. `lib/class.rb` |
| `previous_path` | string | no | Original full path to the file being moved. Ex. `lib/class1.rb` | | `previous_path` | string | no | Original full path to the file being moved. Ex. `lib/class1.rb`. Only considered for `move` action. |
| `content` | string | no | File content, required for all except `delete`. Optional for `move` | | `content` | string | no | File content, required for all except `delete` and `chmod`. Optional for `move` |
| `encoding` | string | no | `text` or `base64`. `text` is default. | | `encoding` | string | no | `text` or `base64`. `text` is default. |
| `last_commit_id` | string | no | Last known file commit id. Will be only considered in update, move and delete actions. | | `last_commit_id` | string | no | Last known file commit id. Will be only considered in update, move and delete actions. |
| `execute_filemode` | boolean | no | When `true/false` enables/disables the execute flag on the file. Only considered for `chmod` action. |
```bash ```bash
PAYLOAD=$(cat << 'JSON' PAYLOAD=$(cat << 'JSON'
...@@ -115,6 +116,11 @@ PAYLOAD=$(cat << 'JSON' ...@@ -115,6 +116,11 @@ PAYLOAD=$(cat << 'JSON'
"action": "update", "action": "update",
"file_path": "foo/bar5", "file_path": "foo/bar5",
"content": "new content" "content": "new content"
},
{
"action": "chmod",
"file_path": "foo/bar5",
"execute_filemode": true
} }
] ]
} }
......
...@@ -46,19 +46,21 @@ Example of response ...@@ -46,19 +46,21 @@ Example of response
"status": "success", "status": "success",
"tag": false, "tag": false,
"user": { "user": {
"id": 1,
"name": "Administrator",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null, "bio": null,
"created_at": "2016-08-11T07:09:20.351Z",
"id": 1,
"linkedin": "",
"location": null, "location": null,
"name": "Administrator", "public_email": "",
"skype": "", "skype": "",
"state": "active", "linkedin": "",
"twitter": "", "twitter": "",
"username": "root", "website_url": "",
"web_url": "http://localhost:3000/root", "organization": ""
"website_url": ""
} }
}, },
"environment": { "environment": {
...@@ -103,19 +105,21 @@ Example of response ...@@ -103,19 +105,21 @@ Example of response
"status": "success", "status": "success",
"tag": false, "tag": false,
"user": { "user": {
"id": 1,
"name": "Administrator",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null, "bio": null,
"created_at": "2016-08-11T07:09:20.351Z",
"id": 1,
"linkedin": "",
"location": null, "location": null,
"name": "Administrator", "public_email": "",
"skype": "", "skype": "",
"state": "active", "linkedin": "",
"twitter": "", "twitter": "",
"username": "root", "website_url": "",
"web_url": "http://localhost:3000/root", "organization": ""
"website_url": ""
} }
}, },
"environment": { "environment": {
...@@ -188,19 +192,20 @@ Example of response ...@@ -188,19 +192,20 @@ Example of response
"started_at": null, "started_at": null,
"finished_at": "2016-08-11T11:32:35.145Z", "finished_at": "2016-08-11T11:32:35.145Z",
"user": { "user": {
"id": 1,
"name": "Administrator", "name": "Administrator",
"username": "root", "username": "root",
"id": 1,
"state": "active", "state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://localhost:3000/root", "web_url": "http://gitlab.dev/root",
"created_at": "2016-08-11T07:09:20.351Z", "created_at": "2015-12-21T13:14:24.077Z",
"bio": null, "bio": null,
"location": null, "location": null,
"skype": "", "skype": "",
"linkedin": "", "linkedin": "",
"twitter": "", "twitter": "",
"website_url": "" "website_url": "",
"organization": ""
}, },
"commit": { "commit": {
"id": "a91957a858320c0e17f3a0eca7cfacbff50ea29a", "id": "a91957a858320c0e17f3a0eca7cfacbff50ea29a",
......
...@@ -53,18 +53,21 @@ Example of response ...@@ -53,18 +53,21 @@ Example of response
"tag": false, "tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/6", "web_url": "https://example.com/foo/bar/-/jobs/6",
"user": { "user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1, "id": 1,
"linkedin": "",
"name": "Administrator", "name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root", "username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root", "web_url": "http://gitlab.dev/root",
"website_url": "" "created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
} }
}, },
{ {
...@@ -109,18 +112,21 @@ Example of response ...@@ -109,18 +112,21 @@ Example of response
"tag": false, "tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/7", "web_url": "https://example.com/foo/bar/-/jobs/7",
"user": { "user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1, "id": 1,
"linkedin": "",
"name": "Administrator", "name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root", "username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root", "web_url": "http://gitlab.dev/root",
"website_url": "" "created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
} }
} }
] ]
...@@ -180,18 +186,21 @@ Example of response ...@@ -180,18 +186,21 @@ Example of response
"tag": false, "tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/6", "web_url": "https://example.com/foo/bar/-/jobs/6",
"user": { "user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1, "id": 1,
"linkedin": "",
"name": "Administrator", "name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root", "username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root", "web_url": "http://gitlab.dev/root",
"website_url": "" "created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
} }
}, },
{ {
...@@ -236,18 +245,21 @@ Example of response ...@@ -236,18 +245,21 @@ Example of response
"tag": false, "tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/7", "web_url": "https://example.com/foo/bar/-/jobs/7",
"user": { "user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1, "id": 1,
"linkedin": "",
"name": "Administrator", "name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root", "username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root", "web_url": "http://gitlab.dev/root",
"website_url": "" "created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
} }
} }
] ]
...@@ -305,18 +317,21 @@ Example of response ...@@ -305,18 +317,21 @@ Example of response
"tag": false, "tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/8", "web_url": "https://example.com/foo/bar/-/jobs/8",
"user": { "user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1, "id": 1,
"linkedin": "",
"name": "Administrator", "name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root", "username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root", "web_url": "http://gitlab.dev/root",
"website_url": "" "created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
} }
} }
``` ```
......
...@@ -27,10 +27,16 @@ Parameters: ...@@ -27,10 +27,16 @@ Parameters:
"web_url": "http://localhost:3000/john_smith", "web_url": "http://localhost:3000/john_smith",
"created_at": "2015-09-03T07:24:01.670Z", "created_at": "2015-09-03T07:24:01.670Z",
"bio": null, "bio": null,
"location": null,
"public_email": "john@example.com",
"skype": "", "skype": "",
"linkedin": "", "linkedin": "",
"twitter": "", "twitter": "",
"website_url": "", "website_url": "",
"organization": null,
"last_sign_in_at": "2015-09-03T07:24:01.670Z",
"confirmed_at": "2015-09-03T07:24:01.670Z",
"last_activity_on": "2015-09-03",
"email": "john@example.com", "email": "john@example.com",
"theme_id": 2, "theme_id": 2,
"color_scheme_id": 1, "color_scheme_id": 1,
...@@ -40,6 +46,8 @@ Parameters: ...@@ -40,6 +46,8 @@ Parameters:
"can_create_group": true, "can_create_group": true,
"can_create_project": true, "can_create_project": true,
"two_factor_enabled": false "two_factor_enabled": false
"external": false,
"private_profile": null
} }
} }
``` ```
...@@ -439,6 +439,11 @@ Parameters: ...@@ -439,6 +439,11 @@ Parameters:
"id" : 1, "id" : 1,
"name" : "Administrator" "name" : "Administrator"
}, },
"diff_refs": {
"base_sha": "1111111111111111111111111111111111111111",
"head_sha": "2222222222222222222222222222222222222222",
"start_sha": "3333333333333333333333333333333333333333"
},
"diverged_commits_count": 2 "diverged_commits_count": 2
} }
``` ```
......
...@@ -294,6 +294,7 @@ Example response: ...@@ -294,6 +294,7 @@ Example response:
"created_at": "2017-11-16T18:38:46.000Z", "created_at": "2017-11-16T18:38:46.000Z",
"bio": null, "bio": null,
"location": null, "location": null,
"public_email": "",
"skype": "", "skype": "",
"linkedin": "", "linkedin": "",
"twitter": "", "twitter": "",
......
...@@ -201,6 +201,7 @@ Parameters: ...@@ -201,6 +201,7 @@ Parameters:
"created_at": "2012-05-23T08:00:58Z", "created_at": "2012-05-23T08:00:58Z",
"bio": null, "bio": null,
"location": null, "location": null,
"public_email": "john@example.com",
"skype": "", "skype": "",
"linkedin": "", "linkedin": "",
"twitter": "", "twitter": "",
...@@ -232,6 +233,7 @@ Parameters: ...@@ -232,6 +233,7 @@ Parameters:
"is_admin": false, "is_admin": false,
"bio": null, "bio": null,
"location": null, "location": null,
"public_email": "john@example.com",
"skype": "", "skype": "",
"linkedin": "", "linkedin": "",
"twitter": "", "twitter": "",
...@@ -372,6 +374,7 @@ GET /user ...@@ -372,6 +374,7 @@ GET /user
"created_at": "2012-05-23T08:00:58Z", "created_at": "2012-05-23T08:00:58Z",
"bio": null, "bio": null,
"location": null, "location": null,
"public_email": "john@example.com",
"skype": "", "skype": "",
"linkedin": "", "linkedin": "",
"twitter": "", "twitter": "",
...@@ -420,6 +423,7 @@ GET /user ...@@ -420,6 +423,7 @@ GET /user
"is_admin": false, "is_admin": false,
"bio": null, "bio": null,
"location": null, "location": null,
"public_email": "john@example.com",
"skype": "", "skype": "",
"linkedin": "", "linkedin": "",
"twitter": "", "twitter": "",
......
...@@ -314,8 +314,8 @@ build: ...@@ -314,8 +314,8 @@ build:
stage: build stage: build
script: script:
- docker pull $CONTAINER_IMAGE:latest || true - docker pull $CONTAINER_IMAGE:latest || true
- docker build --cache-from $CONTAINER_IMAGE:latest --tag $CONTAINER_IMAGE:$CI_BUILD_REF --tag $CONTAINER_IMAGE:latest . - docker build --cache-from $CONTAINER_IMAGE:latest --tag $CONTAINER_IMAGE:$CI_COMMIT_SHA --tag $CONTAINER_IMAGE:latest .
- docker push $CONTAINER_IMAGE:$CI_BUILD_REF - docker push $CONTAINER_IMAGE:$CI_COMMIT_SHA
- docker push $CONTAINER_IMAGE:latest - docker push $CONTAINER_IMAGE:latest
``` ```
......
### Community members & roles
GitLab community members and their privileges/responsibilities.
| Roles | Responsibilities | Requirements |
|-------|------------------|--------------|
| Maintainer | Accepts merge requests on several GitLab projects | Added to the [team page](https://about.gitlab.com/team/). An expert on code reviews and knows the product/code base |
| Reviewer | Performs code reviews on MRs | Added to the [team page](https://about.gitlab.com/team/) |
| Developer |Has access to GitLab internal infrastructure & issues (e.g. HR-related) | GitLab employee or a Core Team member (with an NDA) |
| Contributor | Can make contributions to all GitLab public projects | Have a GitLab.com account |
[List of current reviewers/maintainers](https://about.gitlab.com/handbook/engineering/projects/#gitlab-ce)
\ No newline at end of file
<!-- START doctoc generated TOC please keep comment here to allow auto update --> # Implement design & UI elements
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Implement design & UI elements](#implement-design--ui-elements)
- [Style guides](#style-guides)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Implement design & UI elements
For guidance on UX implementation at GitLab, please refer to our [Design System](https://design.gitlab.com/). For guidance on UX implementation at GitLab, please refer to our [Design System](https://design.gitlab.com/).
......
<!-- START doctoc generated TOC please keep comment here to allow auto update --> # Contribute to GitLab
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Contribute to GitLab](#contribute-to-gitlab)
- [Security vulnerability disclosure](#security-vulnerability-disclosure)
- [Code of conduct](#code-of-conduct)
- [Closing policy for issues and merge requests](#closing-policy-for-issues-and-merge-requests)
- [Helping others](#helping-others)
- [I want to contribute!](#i-want-to-contribute)
- [Contribution Flow](#contribution-flow)
- [Workflow labels](#workflow-labels)
- [Type labels](#type-labels)
- [Subject labels](#subject-labels)
- [Team labels](#team-labels)
- [Milestone labels](#milestone-labels)
- [Bug Priority labels](#bug-priority-labels)
- [Bug Severity labels](#bug-severity-labels)
- [Severity impact guidance](#severity-impact-guidance)
- [Label for community contributors](#label-for-community-contributors)
- [Implement design & UI elements](#implement-design--ui-elements)
- [Issue tracker](#issue-tracker)
- [Issue triaging](#issue-triaging)
- [Feature proposals](#feature-proposals)
- [Issue tracker guidelines](#issue-tracker-guidelines)
- [Issue weight](#issue-weight)
- [Regression issues](#regression-issues)
- [Technical and UX debt](#technical-and-ux-debt)
- [Stewardship](#stewardship)
- [Merge requests](#merge-requests)
- [Merge request guidelines](#merge-request-guidelines)
- [Contribution acceptance criteria](#contribution-acceptance-criteria)
- [Definition of done](#definition-of-done)
- [Style guides](#style-guides)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Contribute to GitLab
For a first-time step-by-step guide to the contribution process, see For a first-time step-by-step guide to the contribution process, see
["Contributing to GitLab"](https://about.gitlab.com/contributing/). ["Contributing to GitLab"](https://about.gitlab.com/contributing/).
......
<!-- START doctoc generated TOC please keep comment here to allow auto update --> # Workflow labels
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Workflow labels](#workflow-labels)
- [Type labels](#type-labels)
- [Subject labels](#subject-labels)
- [Team labels](#team-labels)
- [Release Scoping labels](#release-scoping-labels)
- [Priority labels](#priority-labels)
- [Severity labels](#severity-labels)
- [Severity impact guidance](#severity-impact-guidance)
- [Label for community contributors](#label-for-community-contributors)
- [Issue triaging](#issue-triaging)
- [Feature proposals](#feature-proposals)
- [Issue tracker guidelines](#issue-tracker-guidelines)
- [Issue weight](#issue-weight)
- [Regression issues](#regression-issues)
- [Technical and UX debt](#technical-and-ux-debt)
- [Stewardship](#stewardship)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Workflow labels
To allow for asynchronous issue handling, we use [milestones][milestones-page] To allow for asynchronous issue handling, we use [milestones][milestones-page]
and [labels][labels-page]. Leads and product managers handle most of the and [labels][labels-page]. Leads and product managers handle most of the
...@@ -45,7 +22,7 @@ labels, you can _always_ add the team and type, and often also the subject. ...@@ -45,7 +22,7 @@ labels, you can _always_ add the team and type, and often also the subject.
[milestones-page]: https://gitlab.com/gitlab-org/gitlab-ce/milestones [milestones-page]: https://gitlab.com/gitlab-org/gitlab-ce/milestones
[labels-page]: https://gitlab.com/gitlab-org/gitlab-ce/labels [labels-page]: https://gitlab.com/gitlab-org/gitlab-ce/labels
### Type labels ## Type labels
Type labels are very important. They define what kind of issue this is. Every Type labels are very important. They define what kind of issue this is. Every
issue should have one or more. issue should have one or more.
...@@ -61,7 +38,7 @@ already reserved for subject labels). ...@@ -61,7 +38,7 @@ already reserved for subject labels).
The descriptions on the [labels page][labels-page] explain what falls under each type label. The descriptions on the [labels page][labels-page] explain what falls under each type label.
### Subject labels ## Subject labels
Subject labels are labels that define what area or feature of GitLab this issue Subject labels are labels that define what area or feature of GitLab this issue
hits. They are not always necessary, but very convenient. hits. They are not always necessary, but very convenient.
...@@ -75,7 +52,7 @@ issue is labeled with a subject label corresponding to your expertise. ...@@ -75,7 +52,7 @@ issue is labeled with a subject label corresponding to your expertise.
Subject labels are always all-lowercase. Subject labels are always all-lowercase.
### Team labels ## Team labels
Team labels specify what team is responsible for this issue. Team labels specify what team is responsible for this issue.
Assigning a team label makes sure issues get the attention of the appropriate Assigning a team label makes sure issues get the attention of the appropriate
...@@ -107,7 +84,7 @@ indicate if an issue needs backend work, frontend work, or both. ...@@ -107,7 +84,7 @@ indicate if an issue needs backend work, frontend work, or both.
Team labels are always capitalized so that they show up as the first label for Team labels are always capitalized so that they show up as the first label for
any issue. any issue.
### Release Scoping labels ## Release Scoping labels
Release Scoping labels help us clearly communicate expectations of the work for the Release Scoping labels help us clearly communicate expectations of the work for the
release. There are three levels of Release Scoping labels: release. There are three levels of Release Scoping labels:
...@@ -138,7 +115,7 @@ This label documents the planned timeline & urgency which is used to measure aga ...@@ -138,7 +115,7 @@ This label documents the planned timeline & urgency which is used to measure aga
| ~P3 | Medium Priority | Within the next 3 releases (approx one quarter) | | ~P3 | Medium Priority | Within the next 3 releases (approx one quarter) |
| ~P4 | Low Priority | Anything outside the next 3 releases (approx beyond one quarter) | | ~P4 | Low Priority | Anything outside the next 3 releases (approx beyond one quarter) |
### Severity labels ## Severity labels
Severity labels help us clearly communicate the impact of a ~bug on users. Severity labels help us clearly communicate the impact of a ~bug on users.
...@@ -149,7 +126,7 @@ Severity labels help us clearly communicate the impact of a ~bug on users. ...@@ -149,7 +126,7 @@ Severity labels help us clearly communicate the impact of a ~bug on users.
| ~S3 | Major Severity | Broken Feature, workaround acceptable | Can create merge requests only from the Merge Requests page, not through the Issue. | | ~S3 | Major Severity | Broken Feature, workaround acceptable | Can create merge requests only from the Merge Requests page, not through the Issue. |
| ~S4 | Low Severity | Functionality inconvenience or cosmetic issue | Label colors are incorrect / not being displayed. | | ~S4 | Low Severity | Functionality inconvenience or cosmetic issue | Label colors are incorrect / not being displayed. |
#### Severity impact guidance ### Severity impact guidance
Severity levels can be applied further depending on the facet of the impact; e.g. Affected customers, GitLab.com availability, performance and etc. The below is a guideline. Severity levels can be applied further depending on the facet of the impact; e.g. Affected customers, GitLab.com availability, performance and etc. The below is a guideline.
...@@ -160,7 +137,7 @@ Severity levels can be applied further depending on the facet of the impact; e.g ...@@ -160,7 +137,7 @@ Severity levels can be applied further depending on the facet of the impact; e.g
| ~S3 | A few users or a single paid customer affected | Limited impact on important portions of GitLab.com | Degradation is likely to occur in the near future | | ~S3 | A few users or a single paid customer affected | Limited impact on important portions of GitLab.com | Degradation is likely to occur in the near future |
| ~S4 | No paid users/customer affected, or expected to in the near future | Minor impact on on GitLab.com | Degradation _may_ occur but it's not likely | | ~S4 | No paid users/customer affected, or expected to in the near future | Minor impact on on GitLab.com | Degradation _may_ occur but it's not likely |
### Label for community contributors ## Label for community contributors
Issues that are beneficial to our users, 'nice to haves', that we currently do Issues that are beneficial to our users, 'nice to haves', that we currently do
not have the capacity for or want to give the priority to, are labeled as not have the capacity for or want to give the priority to, are labeled as
...@@ -210,8 +187,7 @@ any potential community contributor to @-mention per above. ...@@ -210,8 +187,7 @@ any potential community contributor to @-mention per above.
[up-for-grabs]: https://gitlab.com/gitlab-org/gitlab-ce/issues?label_name=Accepting+Merge+Requests&scope=all&sort=weight_asc&state=opened [up-for-grabs]: https://gitlab.com/gitlab-org/gitlab-ce/issues?label_name=Accepting+Merge+Requests&scope=all&sort=weight_asc&state=opened
[firt-timers]: https://gitlab.com/gitlab-org/gitlab-ce/issues?label_name%5B%5D=Accepting+Merge+Requests&scope=all&sort=upvotes_desc&state=opened&weight=1 [firt-timers]: https://gitlab.com/gitlab-org/gitlab-ce/issues?label_name%5B%5D=Accepting+Merge+Requests&scope=all&sort=upvotes_desc&state=opened&weight=1
## Issue triaging
### Issue triaging
Our issue triage policies are [described in our handbook]. You are very welcome Our issue triage policies are [described in our handbook]. You are very welcome
to help the GitLab team triage issues. We also organize [issue bash events] once to help the GitLab team triage issues. We also organize [issue bash events] once
...@@ -233,7 +209,7 @@ project. ...@@ -233,7 +209,7 @@ project.
[scheduled pipeline]: https://gitlab.com/gitlab-org/quality/triage-ops/pipeline_schedules/10512/edit [scheduled pipeline]: https://gitlab.com/gitlab-org/quality/triage-ops/pipeline_schedules/10512/edit
[quality/triage-ops]: https://gitlab.com/gitlab-org/quality/triage-ops [quality/triage-ops]: https://gitlab.com/gitlab-org/quality/triage-ops
### Feature proposals ## Feature proposals
To create a feature proposal for CE, open an issue on the To create a feature proposal for CE, open an issue on the
[issue tracker of CE][ce-tracker]. [issue tracker of CE][ce-tracker].
...@@ -259,7 +235,7 @@ need to ask one of the [core team] members to add the label, if you do not have ...@@ -259,7 +235,7 @@ need to ask one of the [core team] members to add the label, if you do not have
If you want to create something yourself, consider opening an issue first to If you want to create something yourself, consider opening an issue first to
discuss whether it is interesting to include this in GitLab. discuss whether it is interesting to include this in GitLab.
### Issue tracker guidelines ## Issue tracker guidelines
**[Search the issue tracker][ce-tracker]** for similar entries before **[Search the issue tracker][ce-tracker]** for similar entries before
submitting your own, there's a good chance somebody else had the same issue or submitting your own, there's a good chance somebody else had the same issue or
...@@ -271,7 +247,7 @@ The text in the parenthesis is there to help you with what to include. Omit it ...@@ -271,7 +247,7 @@ The text in the parenthesis is there to help you with what to include. Omit it
when submitting the actual issue. You can copy-paste it and then edit as you when submitting the actual issue. You can copy-paste it and then edit as you
see fit. see fit.
### Issue weight ## Issue weight
Issue weight allows us to get an idea of the amount of work required to solve Issue weight allows us to get an idea of the amount of work required to solve
one or multiple issues. This makes it possible to schedule work more accurately. one or multiple issues. This makes it possible to schedule work more accurately.
...@@ -293,7 +269,7 @@ is probably 1, adding a new Git Hook maybe 4 or 5, big features 7-9. ...@@ -293,7 +269,7 @@ is probably 1, adding a new Git Hook maybe 4 or 5, big features 7-9.
issues or chunks. You can simply not set the weight of a parent issue and set issues or chunks. You can simply not set the weight of a parent issue and set
weights to children issues. weights to children issues.
### Regression issues ## Regression issues
Every monthly release has a corresponding issue on the CE issue tracker to keep Every monthly release has a corresponding issue on the CE issue tracker to keep
track of functionality broken by that release and any fixes that need to be track of functionality broken by that release and any fixes that need to be
...@@ -313,7 +289,7 @@ addressed. ...@@ -313,7 +289,7 @@ addressed.
[8.3 Regressions]: https://gitlab.com/gitlab-org/gitlab-ce/issues/4127 [8.3 Regressions]: https://gitlab.com/gitlab-org/gitlab-ce/issues/4127
[update the notes]: https://gitlab.com/gitlab-org/release-tools/blob/master/doc/pro-tips.md#update-the-regression-issue [update the notes]: https://gitlab.com/gitlab-org/release-tools/blob/master/doc/pro-tips.md#update-the-regression-issue
### Technical and UX debt ## Technical and UX debt
In order to track things that can be improved in GitLab's codebase, In order to track things that can be improved in GitLab's codebase,
we use the ~"technical debt" label in [GitLab's issue tracker][ce-tracker]. we use the ~"technical debt" label in [GitLab's issue tracker][ce-tracker].
...@@ -337,7 +313,7 @@ for a release by the appropriate person. ...@@ -337,7 +313,7 @@ for a release by the appropriate person.
Make sure to mention the merge request that the ~"technical debt" issue or Make sure to mention the merge request that the ~"technical debt" issue or
~"UX debt" issue is associated with in the description of the issue. ~"UX debt" issue is associated with in the description of the issue.
### Stewardship ## Stewardship
For issues related to the open source stewardship of GitLab, For issues related to the open source stewardship of GitLab,
there is the ~"stewardship" label. there is the ~"stewardship" label.
......
<!-- START doctoc generated TOC please keep comment here to allow auto update --> # Merge requests
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Merge requests](#merge-requests)
- [Merge request guidelines](#merge-request-guidelines)
- [Contribution acceptance criteria](#contribution-acceptance-criteria)
- [Definition of done](#definition-of-done)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Merge requests
We welcome merge requests with fixes and improvements to GitLab code, tests, We welcome merge requests with fixes and improvements to GitLab code, tests,
and/or documentation. The issues that are specifically suitable for and/or documentation. The issues that are specifically suitable for
...@@ -36,7 +25,7 @@ some potentially easy issues. ...@@ -36,7 +25,7 @@ some potentially easy issues.
To start with GitLab development download the [GitLab Development Kit][gdk] and To start with GitLab development download the [GitLab Development Kit][gdk] and
see the [Development section](../README.md) for some guidelines. see the [Development section](../README.md) for some guidelines.
### Merge request guidelines ## Merge request guidelines
If you can, please submit a merge request with the fix or improvements If you can, please submit a merge request with the fix or improvements
including tests. If you don't know how to fix the issue but can write a test including tests. If you don't know how to fix the issue but can write a test
...@@ -114,7 +103,7 @@ Please ensure that your merge request meets the contribution acceptance criteria ...@@ -114,7 +103,7 @@ Please ensure that your merge request meets the contribution acceptance criteria
When having your code reviewed and when reviewing merge requests please take the When having your code reviewed and when reviewing merge requests please take the
[code review guidelines](../code_review.md) into account. [code review guidelines](../code_review.md) into account.
### Contribution acceptance criteria ## Contribution acceptance criteria
1. The change is as small as possible 1. The change is as small as possible
1. Include proper tests and make all tests pass (unless it contains a test 1. Include proper tests and make all tests pass (unless it contains a test
......
...@@ -43,7 +43,7 @@ how to structure GitLab docs. ...@@ -43,7 +43,7 @@ how to structure GitLab docs.
Currently GitLab docs use Redcarpet as [markdown](../../user/markdown.md) engine, but there's an [open discussion](https://gitlab.com/gitlab-com/gitlab-docs/issues/50) for implementing Kramdown in the near future. Currently GitLab docs use Redcarpet as [markdown](../../user/markdown.md) engine, but there's an [open discussion](https://gitlab.com/gitlab-com/gitlab-docs/issues/50) for implementing Kramdown in the near future.
All the docs follow the [documentation style guidelines](styleguide.md). All the docs follow the [documentation style guidelines](styleguide.md). See [Linting](#linting) for help to follow the guidelines.
## Documentation directory structure ## Documentation directory structure
...@@ -223,6 +223,108 @@ redirect_from: 'https://docs.gitlab.com/my-old-location/README.html' ...@@ -223,6 +223,108 @@ redirect_from: 'https://docs.gitlab.com/my-old-location/README.html'
Note: it is necessary to include the file name in the `redirect_from` URL, Note: it is necessary to include the file name in the `redirect_from` URL,
even if it's `index.html` or `README.html`. even if it's `index.html` or `README.html`.
## Linting
To help adhere to the [documentation style guidelines](styleguide.md), and to improve the content
added to documentation, consider locally installing and running documentation linters. This will
help you catch common issues before raising merge requests for review of documentation.
The following are some suggested linters you can install locally and sample configuration:
- `proselint`
- `markdownlint`
NOTE: **Note:**
This list does not limit what other linters you can add to your local documentation writing
toolchain.
### `proselint`
`proselint` checks for common problems with English prose. It provides a
[plethora of checks](http://proselint.com/checks/) that are helpful for technical writing.
`proselint` can be used [on the command line](http://proselint.com/utility/), either on a single
Markdown file or on all Markdown files in a project. For example, to run `proselint` on all
documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce), run the
following commands from within the `gitlab-ce` project:
```sh
cd doc
proselint **/*.md
```
`proselint` can also be run from within editors using plugins. For example, the following plugins
are available:
- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-proselint)
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=PatrykPeszko.vscode-proselint)
- [Others](https://github.com/amperser/proselint#plugins-for-other-software)
#### Sample `proselint` configuration
All of the checks are good to use. However, excluding the `typography.symbols` checks might reduce
noise. The following sample `proselint` configuration disables the `typography.symbols` checks:
```json
{
"checks": {
"typography.symbols": false
}
}
```
A file with `proselint` configuration must be placed in a
[valid location](https://github.com/amperser/proselint#checks). For example, `~/.config/proselint/config`.
### `markdownlint`
`markdownlint` checks that certain rules ([example](https://github.com/DavidAnson/markdownlint/blob/master/README.md#rules--aliases))
are followed for Markdown syntax. Our [style guidelines](styleguide.md) elaborate on which choices
must be made when selecting Markdown syntax for GitLab documentation and this tool helps
catch deviations from those guidelines.
`markdownlint` can be used [on the command line](https://github.com/igorshubovych/markdownlint-cli#markdownlint-cli--),
either on a single Markdown file or on all Markdown files in a project. For example, to run
`markdownlint` on all documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce),
run the following commands from within the `gitlab-ce` project:
```sh
cd doc
markdownlint **/*.md
```
`markdownlint` can also be run from within editors using plugins. For example, the following plugins
are available:
- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-markdownlint)
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint)
- [Others](https://github.com/DavidAnson/markdownlint#related)
#### Sample `markdownlint` configuration
The following sample `markdownlint` configuration modifies the available default rules to:
- Adhere to the [style guidelines](styleguide.md).
- Apply conventions found in the GitLab documentation.
```json
{
"default": true,
"header-style": { "style": "atx" },
"ul-style": { "style": "dash" },
"line-length": false,
"no-trailing-punctuation": false,
"ol-prefix": { "style": "one" },
"blanks-around-fences": false,
"hr-style": { "style": "---" },
"fenced-code-language": false
}
```
For [`markdownlint`](https://gitahub.com/DavidAnson/markdownlint/), this configuration must be
placed in a [valid location](https://github.com/igorshubovych/markdownlint-cli#configuration). For
example, `~/.markdownlintrc`.
## Testing ## Testing
We treat documentation as code, thus have implemented some testing. We treat documentation as code, thus have implemented some testing.
...@@ -278,7 +380,6 @@ for GitLab Team members. ...@@ -278,7 +380,6 @@ for GitLab Team members.
- Label the MR `Documentation` - Label the MR `Documentation`
- Assign the correct milestone (see note below) - Assign the correct milestone (see note below)
NOTE: **Note:** NOTE: **Note:**
If the release version you want to add the documentation to has already been If the release version you want to add the documentation to has already been
frozen or released, use the label `Pick into X.Y` to get it merged into frozen or released, use the label `Pick into X.Y` to get it merged into
......
...@@ -10,6 +10,8 @@ GitLab documentation. Check the ...@@ -10,6 +10,8 @@ GitLab documentation. Check the
Check the GitLab handbook for the [writing styles guidelines](https://about.gitlab.com/handbook/communication/#writing-style-guidelines). Check the GitLab handbook for the [writing styles guidelines](https://about.gitlab.com/handbook/communication/#writing-style-guidelines).
For help adhering to the guidelines, see [Linting](index.md#linting).
## Files ## Files
- [Directory structure](index.md#location-and-naming-documents): place the docs - [Directory structure](index.md#location-and-naming-documents): place the docs
......
...@@ -591,10 +591,11 @@ This procedure assumes that: ...@@ -591,10 +591,11 @@ This procedure assumes that:
First make sure your backup tar file is in the backup directory described in the First make sure your backup tar file is in the backup directory described in the
`gitlab.rb` configuration `gitlab_rails['backup_path']`. The default is `gitlab.rb` configuration `gitlab_rails['backup_path']`. The default is
`/var/opt/gitlab/backups`. `/var/opt/gitlab/backups`. It needs to be owned by the `git` user.
```shell ```shell
sudo cp 11493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar /var/opt/gitlab/backups/ sudo cp 11493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar /var/opt/gitlab/backups/
sudo chown git.git /var/opt/gitlab/backups/11493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar
``` ```
Stop the processes that are connected to the database. Leave the rest of GitLab Stop the processes that are connected to the database. Leave the rest of GitLab
......
...@@ -216,7 +216,7 @@ needs to trust the mitmproxy SSL certificates for this to work. ...@@ -216,7 +216,7 @@ needs to trust the mitmproxy SSL certificates for this to work.
The following installation instructions assume you are running Ubuntu: The following installation instructions assume you are running Ubuntu:
1. Install mitmproxy (see http://docs.mitmproxy.org/en/stable/install.html) 1. [Install mitmproxy](https://docs.mitmproxy.org/stable/overview-installation/).
1. Run `mitmproxy --port 9000` to generate its certificates. 1. Run `mitmproxy --port 9000` to generate its certificates.
Enter <kbd>CTRL</kbd>-<kbd>C</kbd> to quit. Enter <kbd>CTRL</kbd>-<kbd>C</kbd> to quit.
1. Install the certificate from `~/.mitmproxy` to your system: 1. Install the certificate from `~/.mitmproxy` to your system:
......
...@@ -73,7 +73,26 @@ module API ...@@ -73,7 +73,26 @@ module API
params do params do
requires :branch, type: String, desc: 'Name of the branch to commit into. To create a new branch, also provide `start_branch`.', allow_blank: false requires :branch, type: String, desc: 'Name of the branch to commit into. To create a new branch, also provide `start_branch`.', allow_blank: false
requires :commit_message, type: String, desc: 'Commit message' requires :commit_message, type: String, desc: 'Commit message'
requires :actions, type: Array[Hash], desc: 'Actions to perform in commit' requires :actions, type: Array, desc: 'Actions to perform in commit' do
requires :action, type: String, desc: 'The action to perform, `create`, `delete`, `move`, `update`, `chmod`', values: %w[create update move delete chmod].freeze
requires :file_path, type: String, desc: 'Full path to the file. Ex. `lib/class.rb`'
given action: ->(action) { action == 'move' } do
requires :previous_path, type: String, desc: 'Original full path to the file being moved. Ex. `lib/class1.rb`'
end
given action: ->(action) { %w[create move].include? action } do
optional :content, type: String, desc: 'File content'
end
given action: ->(action) { action == 'update' } do
requires :content, type: String, desc: 'File content'
end
optional :encoding, type: String, desc: '`text` or `base64`', default: 'text', values: %w[text base64]
given action: ->(action) { %w[update move delete].include? action } do
optional :last_commit_id, type: String, desc: 'Last known file commit id'
end
given action: ->(action) { action == 'chmod' } do
requires :execute_filemode, type: Boolean, desc: 'When `true/false` enables/disables the execute flag on the file.'
end
end
optional :start_branch, type: String, desc: 'Name of the branch to start the new commit from' optional :start_branch, type: String, desc: 'Name of the branch to start the new commit from'
optional :author_email, type: String, desc: 'Author email for commit' optional :author_email, type: String, desc: 'Author email for commit'
optional :author_name, type: String, desc: 'Author name for commit' optional :author_name, type: String, desc: 'Author name for commit'
......
...@@ -53,7 +53,7 @@ module API ...@@ -53,7 +53,7 @@ module API
class User < UserBasic class User < UserBasic
expose :created_at, if: ->(user, opts) { Ability.allowed?(opts[:current_user], :read_user_profile, user) } expose :created_at, if: ->(user, opts) { Ability.allowed?(opts[:current_user], :read_user_profile, user) }
expose :bio, :location, :skype, :linkedin, :twitter, :website_url, :organization expose :bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization
end end
class UserActivity < Grape::Entity class UserActivity < Grape::Entity
......
class EventFilter # frozen_string_literal: true
attr_accessor :params
class << self
def all
'all'
end
def push
'push'
end
def merged
'merged'
end
def issue class EventFilter
'issue' attr_accessor :filter
end
def comments ALL = 'all'
'comments' PUSH = 'push'
end MERGED = 'merged'
ISSUE = 'issue'
COMMENTS = 'comments'
TEAM = 'team'
FILTERS = [ALL, PUSH, MERGED, ISSUE, COMMENTS, TEAM].freeze
def team def initialize(filter)
'team' # Split using comma to maintain backward compatibility Ex/ "filter1,filter2"
end filter = filter.to_s.split(',')[0].to_s
@filter = FILTERS.include?(filter) ? filter : ALL
end end
def initialize(params) def active?(key)
@params = if params filter == key.to_s
params.dup
else
[] # EventFilter.default_filter
end
end end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def apply_filter(events) def apply_filter(events)
return events if params.blank? || params == EventFilter.all case filter
when PUSH
case params
when EventFilter.push
events.where(action: Event::PUSHED) events.where(action: Event::PUSHED)
when EventFilter.merged when MERGED
events.where(action: Event::MERGED) events.where(action: Event::MERGED)
when EventFilter.comments when COMMENTS
events.where(action: Event::COMMENTED) events.where(action: Event::COMMENTED)
when EventFilter.team when TEAM
events.where(action: [Event::JOINED, Event::LEFT, Event::EXPIRED]) events.where(action: [Event::JOINED, Event::LEFT, Event::EXPIRED])
when EventFilter.issue when ISSUE
events.where(action: [Event::CREATED, Event::UPDATED, Event::CLOSED, Event::REOPENED]) events.where(action: [Event::CREATED, Event::UPDATED, Event::CLOSED, Event::REOPENED])
end
end
# rubocop: enable CodeReuse/ActiveRecord
def options(key)
filter = params.dup
if filter.include? key
filter.delete key
else else
filter << key events
end
filter
end
def active?(key)
if params.present?
params.include? key
else
key == EventFilter.all
end end
end end
# rubocop: enable CodeReuse/ActiveRecord
end end
# frozen_string_literal: true
module Gitlab module Gitlab
module Ci module Ci
class Config class Config
...@@ -9,7 +11,7 @@ module Gitlab ...@@ -9,7 +11,7 @@ module Gitlab
include Validatable include Validatable
include Attributable include Attributable
ALLOWED_KEYS = %i[junit].freeze ALLOWED_KEYS = %i[junit sast dependency_scanning container_scanning dast].freeze
attributes ALLOWED_KEYS attributes ALLOWED_KEYS
...@@ -19,6 +21,10 @@ module Gitlab ...@@ -19,6 +21,10 @@ module Gitlab
with_options allow_nil: true do with_options allow_nil: true do
validates :junit, array_of_strings_or_string: true validates :junit, array_of_strings_or_string: true
validates :sast, array_of_strings_or_string: true
validates :dependency_scanning, array_of_strings_or_string: true
validates :container_scanning, array_of_strings_or_string: true
validates :dast, array_of_strings_or_string: true
end end
end end
......
module Gitlab
module Ci
module Parsers
def self.fabricate!(file_type)
"Gitlab::Ci::Parsers::#{file_type.classify}".constantize.new
end
end
end
end
module Gitlab
module Ci
module Parsers
class Junit
JunitParserError = Class.new(StandardError)
def parse!(xml_data, test_suite)
root = Hash.from_xml(xml_data)
all_cases(root) do |test_case|
test_case = create_test_case(test_case)
test_suite.add_test_case(test_case)
end
rescue REXML::ParseException => e
raise JunitParserError, "XML parsing failed: #{e.message}"
rescue => e
raise JunitParserError, "JUnit parsing failed: #{e.message}"
end
private
def all_cases(root, parent = nil, &blk)
return unless root.present?
[root].flatten.compact.map do |node|
next unless node.is_a?(Hash)
# we allow only one top-level 'testsuites'
all_cases(node['testsuites'], root, &blk) unless parent
# we require at least one level of testsuites or testsuite
each_case(node['testcase'], &blk) if parent
# we allow multiple nested 'testsuite' (eg. PHPUnit)
all_cases(node['testsuite'], root, &blk)
end
end
def each_case(testcase, &blk)
return unless testcase.present?
[testcase].flatten.compact.map(&blk)
end
def create_test_case(data)
if data['failure']
status = ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED
system_output = data['failure']
else
status = ::Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS
system_output = nil
end
::Gitlab::Ci::Reports::TestCase.new(
classname: data['classname'],
name: data['name'],
file: data['file'],
execution_time: data['time'],
status: status,
system_output: system_output
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Parsers
module Test
ParserNotFoundError = Class.new(StandardError)
PARSERS = {
junit: ::Gitlab::Ci::Parsers::Test::Junit
}.freeze
def self.fabricate!(file_type)
PARSERS.fetch(file_type.to_sym).new
rescue KeyError
raise ParserNotFoundError, "Cannot find any parser matching file type '#{file_type}'"
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Parsers
module Test
class Junit
JunitParserError = Class.new(StandardError)
def parse!(xml_data, test_suite)
root = Hash.from_xml(xml_data)
all_cases(root) do |test_case|
test_case = create_test_case(test_case)
test_suite.add_test_case(test_case)
end
rescue REXML::ParseException => e
raise JunitParserError, "XML parsing failed: #{e.message}"
rescue => e
raise JunitParserError, "JUnit parsing failed: #{e.message}"
end
private
def all_cases(root, parent = nil, &blk)
return unless root.present?
[root].flatten.compact.map do |node|
next unless node.is_a?(Hash)
# we allow only one top-level 'testsuites'
all_cases(node['testsuites'], root, &blk) unless parent
# we require at least one level of testsuites or testsuite
each_case(node['testcase'], &blk) if parent
# we allow multiple nested 'testsuite' (eg. PHPUnit)
all_cases(node['testsuite'], root, &blk)
end
end
def each_case(testcase, &blk)
return unless testcase.present?
[testcase].flatten.compact.map(&blk)
end
def create_test_case(data)
if data['failure']
status = ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED
system_output = data['failure']
else
status = ::Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS
system_output = nil
end
::Gitlab::Ci::Reports::TestCase.new(
classname: data['classname'],
name: data['name'],
file: data['file'],
execution_time: data['time'],
status: status,
system_output: system_output
)
end
end
end
end
end
end
...@@ -49,7 +49,7 @@ variables: ...@@ -49,7 +49,7 @@ variables:
POSTGRES_DB: $CI_ENVIRONMENT_SLUG POSTGRES_DB: $CI_ENVIRONMENT_SLUG
KUBERNETES_VERSION: 1.8.6 KUBERNETES_VERSION: 1.8.6
HELM_VERSION: 2.6.1 HELM_VERSION: 2.10.0
DOCKER_DRIVER: overlay2 DOCKER_DRIVER: overlay2
...@@ -122,6 +122,9 @@ license_management: ...@@ -122,6 +122,9 @@ license_management:
paths: [gl-license-management-report.json] paths: [gl-license-management-report.json]
only: only:
- branches - branches
only:
variables:
- $GITLAB_FEATURES =~ /\blicense_management\b/
except: except:
variables: variables:
- $LICENSE_MANAGEMENT_DISABLED - $LICENSE_MANAGEMENT_DISABLED
...@@ -484,15 +487,11 @@ rollout 100%: ...@@ -484,15 +487,11 @@ rollout 100%:
} }
function license_management() { function license_management() {
if echo $GITLAB_FEATURES |grep license_management > /dev/null ; then
# Extract "MAJOR.MINOR" from CI_SERVER_VERSION and generate "MAJOR-MINOR-stable" # Extract "MAJOR.MINOR" from CI_SERVER_VERSION and generate "MAJOR-MINOR-stable"
LICENSE_MANAGEMENT_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/') LICENSE_MANAGEMENT_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
docker run --volume "$PWD:/code" \ docker run --volume "$PWD:/code" \
"registry.gitlab.com/gitlab-org/security-products/license-management:$LICENSE_MANAGEMENT_VERSION" analyze /code "registry.gitlab.com/gitlab-org/security-products/license-management:$LICENSE_MANAGEMENT_VERSION" analyze /code
else
echo "License management is not available in your subscription"
fi
} }
function sast() { function sast() {
...@@ -605,7 +604,6 @@ rollout 100%: ...@@ -605,7 +604,6 @@ rollout 100%:
--set postgresql.postgresPassword="$POSTGRES_PASSWORD" \ --set postgresql.postgresPassword="$POSTGRES_PASSWORD" \
--set postgresql.postgresDatabase="$POSTGRES_DB" \ --set postgresql.postgresDatabase="$POSTGRES_DB" \
--namespace="$KUBE_NAMESPACE" \ --namespace="$KUBE_NAMESPACE" \
--version="$CI_PIPELINE_ID-$CI_JOB_ID" \
"$name" \ "$name" \
chart/ chart/
......
# An example .gitlab-ci.yml file to test (and optionally report the coverage # This is an example .gitlab-ci.yml file to test (and optionally report the coverage
# results of) your [Julia][1] packages. Please refer to the [documentation][2] # results of) your [Julia][1] packages. Please refer to the [documentation][2]
# for more information about package development in Julia. # for more information about package development in Julia.
# #
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
# whatever name you have given to your package. # whatever name you have given to your package.
# #
# [1]: http://julialang.org/ # [1]: http://julialang.org/
# [2]: http://julia.readthedocs.org/ # [2]: https://docs.julialang.org/en/v1/manual/documentation/index.html
# Below is the template to run your tests in Julia # Below is the template to run your tests in Julia
.test_template: &test_definition .test_template: &test_definition
...@@ -18,37 +18,59 @@ ...@@ -18,37 +18,59 @@
script: script:
# Let's run the tests. Substitute `coverage = false` below, if you do not # Let's run the tests. Substitute `coverage = false` below, if you do not
# want coverage results. # want coverage results.
- /opt/julia/bin/julia -e 'Pkg.clone(pwd()); Pkg.test("MyPackage", - julia -e 'using Pkg; Pkg.clone(pwd()); Pkg.build("MyPackage"); Pkg.test("MyPackage"; coverage = true)'
coverage = true)'
# Comment out below if you do not want coverage results. # Comment out below if you do not want coverage results.
- /opt/julia/bin/julia -e 'Pkg.add("Coverage"); cd(Pkg.dir("MyPackage")); - julia -e 'using Pkg; Pkg.add("Coverage");
import MyPackage; cd(joinpath(dirname(pathof(MyPackage)), ".."));
using Coverage; cl, tl = get_summary(process_folder()); using Coverage; cl, tl = get_summary(process_folder());
println("(", cl/tl*100, "%) covered")' println("(", cl/tl*100, "%) covered")'
# Name a test and select an appropriate image. # Name a test and select an appropriate image.
test:0.4.6: # images comes from Docker hub
image: julialang/julia:v0.4.6 test:0.7:
image: julia:0.7
<<: *test_definition <<: *test_definition
# Maybe you would like to test your package against the development branch: test:1.0:
test:0.5.0-dev: image: julia:1.0
image: julialang/julia:v0.5.0-dev
# ... allowing for failures, since we are testing against the development
# branch:
allow_failure: true
<<: *test_definition <<: *test_definition
# Maybe you would like to test your package against the development branch:
# test:1.1-dev (not sure there is such an image in docker, so not tested yet):
# image: julia:v1.1-dev
# # ... allowing for failures, since we are testing against the development
# # branch:
# allow_failure: true
# <<: *test_definition
# REMARK: Do not forget to enable the coverage feature for your project, if you # REMARK: Do not forget to enable the coverage feature for your project, if you
# are using code coverage reporting above. This can be done by # are using code coverage reporting above. This can be done by
# #
# - Navigating to the `CI/CD Pipelines` settings of your project, # - Navigating to the `CI/CD Pipelines` settings of your project,
# - Copying and pasting the default `Simplecov` regex example provided, i.e., # - Copying and pasting the default `Simplecov` regex example provided, i.e.,
# `\(\d+.\d+\%\) covered` in the `test coverage parsing` textfield. # `\(\d+.\d+\%\) covered` in the `test coverage parsing` textfield.
#
# WARNING: This template is using the `julialang/julia` images from [Docker # Example documentation deployment
pages:
image: julia:0.7
stage: deploy
script:
- apt-get update -qq && apt-get install -y git # needed by Documenter
- julia -e 'using Pkg; Pkg.clone(pwd()); Pkg.build("MyPackage");' # rebuild Julia (can be put somewhere else I'm sure
- julia -e 'using Pkg; import MyPackage; Pkg.add("Documenter")' # install Documenter
- julia --color=yes docs/make.jl # make documentation
- mv docs/build public # move to the directory picked up by Gitlab pages
artifacts:
paths:
- public
only:
- master
# WARNING: This template is using the `julia` images from [Docker
# Hub][3]. One can use custom Julia images and/or the official ones found # Hub][3]. One can use custom Julia images and/or the official ones found
# in the same place. However, care must be taken to correctly locate the binary # in the same place. However, care must be taken to correctly locate the binary
# file (`/opt/julia/bin/julia` above), which is usually given on the image's # file (`/opt/julia/bin/julia` above), which is usually given on the image's
# description page. # description page.
# #
# [3]: http://hub.docker.com/ # [3]: https://hub.docker.com/_/julia/
# Jigsaw is a simple static sites generator with Laravel's Blade.
#
# Full project: https://github.com/tightenco/jigsaw
image: php:7.2
# These folders are cached between builds
cache:
paths:
- vendor/
- node_modules/
before_script:
# Update packages
- apt-get update -yqq
# Install dependencies
- apt-get install -yqq gnupg zlib1g-dev libpng-dev
# Install Node 8
- curl -sL https://deb.nodesource.com/setup_8.x | bash -
- apt-get install -yqq nodejs
# Install php extensions
- docker-php-ext-install zip
# Install Composer and project dependencies.
- curl -sS https://getcomposer.org/installer | php
- php composer.phar install
# Install Node dependencies.
- npm install
pages:
script:
- npm run production
- mv build_production public
artifacts:
paths:
- public
only:
- master
# Lifted from: https://about.gitlab.com/2016/03/10/setting-up-gitlab-ci-for-ios-projects/ # Lifted from: https://about.gitlab.com/2016/03/10/setting-up-gitlab-ci-for-ios-projects/
# This file assumes an own GitLab CI runner, set up on a macOS system. # This file assumes an own GitLab CI runner, setup on a macOS system.
stages: stages:
- build - build
- archive - archive
......
...@@ -333,7 +333,8 @@ module Gitlab ...@@ -333,7 +333,8 @@ module Gitlab
action: action[:action].upcase.to_sym, action: action[:action].upcase.to_sym,
file_path: encode_binary(action[:file_path]), file_path: encode_binary(action[:file_path]),
previous_path: encode_binary(action[:previous_path]), previous_path: encode_binary(action[:previous_path]),
base64_content: action[:encoding] == 'base64' base64_content: action[:encoding] == 'base64',
execute_filemode: !!action[:execute_filemode]
) )
rescue RangeError rescue RangeError
raise ArgumentError, "Unknown action '#{action[:action]}'" raise ArgumentError, "Unknown action '#{action[:action]}'"
......
...@@ -15,7 +15,7 @@ namespace :gitlab do ...@@ -15,7 +15,7 @@ namespace :gitlab do
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE) build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE) build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage") logger.info("Transferred artifact ID #{build.id} with size #{build.artifacts_size} to object storage")
rescue => e rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}") logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end end
......
...@@ -4212,9 +4212,6 @@ msgstr "" ...@@ -4212,9 +4212,6 @@ msgstr ""
msgid "Jobs" msgid "Jobs"
msgstr "" msgstr ""
msgid "Job|Are you sure you want to erase this job?"
msgstr ""
msgid "Job|Browse" msgid "Job|Browse"
msgstr "" msgstr ""
......
...@@ -108,6 +108,7 @@ module QA ...@@ -108,6 +108,7 @@ module QA
end end
def switch_to_register_tab def switch_to_register_tab
set_initial_password_if_present
click_element :register_tab click_element :register_tab
end end
......
...@@ -3,7 +3,7 @@ module QA ...@@ -3,7 +3,7 @@ module QA
module Project module Project
class Activity < Page::Base class Activity < Page::Base
view 'app/views/shared/_event_filter.html.haml' do view 'app/views/shared/_event_filter.html.haml' do
element :push_events, "event_filter_link EventFilter.push, _('Push events')" element :push_events, "event_filter_link EventFilter::PUSH, _('Push events')"
end end
def go_to_push_events def go_to_push_events
......
...@@ -6,9 +6,7 @@ module QA ...@@ -6,9 +6,7 @@ module QA
it 'succeeds' do it 'succeeds' do
Runtime::Browser.visit(:gitlab, Page::Main::Login) Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.perform do |main_login| expect(page).to have_text('Open source software to collaborate on code')
expect(main_login.sign_in_tab?).to be(true)
end
end end
end end
......
...@@ -337,6 +337,22 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do ...@@ -337,6 +337,22 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end end
end end
end end
context 'when no trace is available' do
it 'has_trace is false' do
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be false
end
end
context 'when job has trace' do
let(:job) { create(:ci_build, :running, :trace_live, pipeline: pipeline) }
it "has_trace is true" do
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be true
end
end
end end
context 'when requesting JSON job is triggered' do context 'when requesting JSON job is triggered' do
......
...@@ -14,6 +14,33 @@ FactoryBot.define do ...@@ -14,6 +14,33 @@ FactoryBot.define do
artifact.project ||= artifact.job.project artifact.project ||= artifact.job.project
end end
trait :raw do
file_format :raw
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain')
end
end
trait :zip do
file_format :zip
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
end
trait :gzip do
file_format :gzip
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
trait :archive do trait :archive do
file_type :archive file_type :archive
file_format :zip file_format :zip
......
...@@ -24,7 +24,7 @@ FactoryBot.define do ...@@ -24,7 +24,7 @@ FactoryBot.define do
factory :push_event, class: PushEvent do factory :push_event, class: PushEvent do
project factory: :project_empty_repo project factory: :project_empty_repo
author factory: :user author(factory: :user) { project.creator }
action Event::PUSHED action Event::PUSHED
end end
......
...@@ -3,8 +3,10 @@ require 'spec_helper' ...@@ -3,8 +3,10 @@ require 'spec_helper'
describe 'Projects > Activity > User sees activity' do describe 'Projects > Activity > User sees activity' do
let(:project) { create(:project, :repository, :public) } let(:project) { create(:project, :repository, :public) }
let(:user) { project.creator } let(:user) { project.creator }
let(:issue) { create(:issue, project: project) }
before do before do
create(:event, :created, project: project, target: issue, author: user)
event = create(:push_event, project: project, author: user) event = create(:push_event, project: project, author: user)
create(:push_event_payload, create(:push_event_payload,
event: event, event: event,
...@@ -12,10 +14,18 @@ describe 'Projects > Activity > User sees activity' do ...@@ -12,10 +14,18 @@ describe 'Projects > Activity > User sees activity' do
commit_to: '6d394385cf567f80a8fd85055db1ab4c5295806f', commit_to: '6d394385cf567f80a8fd85055db1ab4c5295806f',
ref: 'fix', ref: 'fix',
commit_count: 1) commit_count: 1)
visit activity_project_path(project)
end end
it 'shows the last push in the activity page', :js do it 'shows the last push in the activity page', :js do
visit activity_project_path(project)
expect(page).to have_content "#{user.name} pushed new branch fix" expect(page).to have_content "#{user.name} pushed new branch fix"
end end
it 'allows to filter event with the "event_filter=issue" URL param', :js do
visit activity_project_path(project, event_filter: 'issue')
expect(page).not_to have_content "#{user.name} pushed new branch fix"
expect(page).to have_content "#{user.name} opened issue #{issue.to_reference}"
end
end end
...@@ -3,12 +3,16 @@ ...@@ -3,12 +3,16 @@
{ "$ref": "job.json" } { "$ref": "job.json" }
], ],
"description": "An extension of job.json with more detailed information", "description": "An extension of job.json with more detailed information",
"required": [
"has_trace"
],
"properties": { "properties": {
"artifact": { "$ref": "artifact.json" }, "artifact": { "$ref": "artifact.json" },
"terminal_path": { "type": "string" }, "terminal_path": { "type": "string" },
"trigger": { "$ref": "trigger.json" }, "trigger": { "$ref": "trigger.json" },
"deployment_status": { "$ref": "deployment_status.json" }, "deployment_status": { "$ref": "deployment_status.json" },
"runner": { "$ref": "runner.json" }, "runner": { "$ref": "runner.json" },
"runners": { "type": "runners.json" } "runners": { "type": "runners.json" },
"has_trace": { "type": "boolean" }
} }
} }
...@@ -66,7 +66,7 @@ describe('Empty State', () => { ...@@ -66,7 +66,7 @@ describe('Empty State', () => {
...props, ...props,
content, content,
action: { action: {
link: 'runner', path: 'runner',
title: 'Check runner', title: 'Check runner',
method: 'post', method: 'post',
}, },
......
...@@ -18,9 +18,10 @@ describe('Erased block', () => { ...@@ -18,9 +18,10 @@ describe('Erased block', () => {
describe('with job erased by user', () => { describe('with job erased by user', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
erasedByUser: true, user: {
username: 'root', username: 'root',
linkToUser: 'gitlab.com/root', web_url: 'gitlab.com/root',
},
erasedAt, erasedAt,
}); });
}); });
...@@ -40,7 +41,6 @@ describe('Erased block', () => { ...@@ -40,7 +41,6 @@ describe('Erased block', () => {
describe('with erased job', () => { describe('with erased job', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
erasedByUser: false,
erasedAt, erasedAt,
}); });
}); });
......
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import JobMediator from '~/jobs/job_details_mediator';
import job from '../mock_data';
describe('JobMediator', () => {
let mediator;
let mock;
beforeEach(() => {
mediator = new JobMediator({ endpoint: 'jobs/40291672.json' });
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
it('should set defaults', () => {
expect(mediator.store).toBeDefined();
expect(mediator.service).toBeDefined();
expect(mediator.options).toEqual({ endpoint: 'jobs/40291672.json' });
expect(mediator.state.isLoading).toEqual(false);
});
describe('request and store data', () => {
beforeEach(() => {
mock.onGet().reply(200, job, {});
});
it('should store received data', (done) => {
mediator.fetchJob();
setTimeout(() => {
expect(mediator.store.state.job).toEqual(job);
done();
}, 0);
});
});
});
...@@ -10,18 +10,21 @@ describe('Job log controllers', () => { ...@@ -10,18 +10,21 @@ describe('Job log controllers', () => {
vm.$destroy(); vm.$destroy();
}); });
describe('Truncate information', () => { const props = {
rawPath: '/raw',
erasePath: '/erase',
size: 511952,
isScrollTopDisabled: false,
isScrollBottomDisabled: false,
isScrollingDown: true,
isTraceSizeVisible: true,
};
describe('Truncate information', () => {
describe('with isTraceSizeVisible', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, props);
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
}); });
it('renders size information', () => { it('renders size information', () => {
expect(vm.$el.querySelector('.js-truncated-info').textContent).toContain('499.95 KiB'); expect(vm.$el.querySelector('.js-truncated-info').textContent).toContain('499.95 KiB');
}); });
...@@ -29,31 +32,29 @@ describe('Job log controllers', () => { ...@@ -29,31 +32,29 @@ describe('Job log controllers', () => {
it('renders link to raw trace', () => { it('renders link to raw trace', () => {
expect(vm.$el.querySelector('.js-raw-link').getAttribute('href')).toEqual('/raw'); expect(vm.$el.querySelector('.js-raw-link').getAttribute('href')).toEqual('/raw');
}); });
});
}); });
describe('links section', () => { describe('links section', () => {
describe('with raw trace path', () => { describe('with raw trace path', () => {
it('renders raw trace link', () => { it('renders raw trace link', () => {
vm = mountComponent(Component, { vm = mountComponent(Component, props);
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
expect(vm.$el.querySelector('.js-raw-link-controller').getAttribute('href')).toEqual('/raw'); expect(vm.$el.querySelector('.js-raw-link-controller').getAttribute('href')).toEqual(
'/raw',
);
}); });
}); });
describe('without raw trace path', () => { describe('without raw trace path', () => {
it('does not render raw trace link', () => { it('does not render raw trace link', () => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
canEraseJob: true, erasePath: '/erase',
size: 511952, size: 511952,
canScrollToTop: true, isScrollTopDisabled: true,
canScrollToBottom: true, isScrollBottomDisabled: true,
isScrollingDown: false,
isTraceSizeVisible: true,
}); });
expect(vm.$el.querySelector('.js-raw-link-controller')).toBeNull(); expect(vm.$el.querySelector('.js-raw-link-controller')).toBeNull();
...@@ -62,52 +63,23 @@ describe('Job log controllers', () => { ...@@ -62,52 +63,23 @@ describe('Job log controllers', () => {
describe('when is erasable', () => { describe('when is erasable', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, props);
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
}); });
it('renders erase job button', () => { it('renders erase job link', () => {
expect(vm.$el.querySelector('.js-erase-link')).not.toBeNull(); expect(vm.$el.querySelector('.js-erase-link')).not.toBeNull();
}); });
describe('on click', () => {
describe('when user confirms action', () => {
it('emits eraseJob event', () => {
spyOn(window, 'confirm').and.returnValue(true);
spyOn(vm, '$emit');
vm.$el.querySelector('.js-erase-link').click();
expect(vm.$emit).toHaveBeenCalledWith('eraseJob');
});
});
describe('when user does not confirm action', () => {
it('does not emit eraseJob event', () => {
spyOn(window, 'confirm').and.returnValue(false);
spyOn(vm, '$emit');
vm.$el.querySelector('.js-erase-link').click();
expect(vm.$emit).not.toHaveBeenCalledWith('eraseJob');
});
});
});
}); });
describe('when it is not erasable', () => { describe('when it is not erasable', () => {
it('does not render erase button', () => { it('does not render erase button', () => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
rawTracePath: '/raw', rawPath: '/raw',
canEraseJob: false,
size: 511952, size: 511952,
canScrollToTop: true, isScrollTopDisabled: true,
canScrollToBottom: true, isScrollBottomDisabled: true,
isScrollingDown: false,
isTraceSizeVisible: true,
}); });
expect(vm.$el.querySelector('.js-erase-link')).toBeNull(); expect(vm.$el.querySelector('.js-erase-link')).toBeNull();
...@@ -119,13 +91,7 @@ describe('Job log controllers', () => { ...@@ -119,13 +91,7 @@ describe('Job log controllers', () => {
describe('scroll top button', () => { describe('scroll top button', () => {
describe('when user can scroll top', () => { describe('when user can scroll top', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, props);
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
}); });
it('renders enabled scroll top button', () => { it('renders enabled scroll top button', () => {
...@@ -143,16 +109,20 @@ describe('Job log controllers', () => { ...@@ -143,16 +109,20 @@ describe('Job log controllers', () => {
describe('when user can not scroll top', () => { describe('when user can not scroll top', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
rawTracePath: '/raw', rawPath: '/raw',
canEraseJob: true, erasePath: '/erase',
size: 511952, size: 511952,
canScrollToTop: false, isScrollTopDisabled: true,
canScrollToBottom: true, isScrollBottomDisabled: false,
isScrollingDown: false,
isTraceSizeVisible: true,
}); });
}); });
it('renders disabled scroll top button', () => { it('renders disabled scroll top button', () => {
expect(vm.$el.querySelector('.js-scroll-top').getAttribute('disabled')).toEqual('disabled'); expect(vm.$el.querySelector('.js-scroll-top').getAttribute('disabled')).toEqual(
'disabled',
);
}); });
it('does not emit scrollJobLogTop event on click', () => { it('does not emit scrollJobLogTop event on click', () => {
...@@ -167,13 +137,7 @@ describe('Job log controllers', () => { ...@@ -167,13 +137,7 @@ describe('Job log controllers', () => {
describe('scroll bottom button', () => { describe('scroll bottom button', () => {
describe('when user can scroll bottom', () => { describe('when user can scroll bottom', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, props);
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
}); });
it('renders enabled scroll bottom button', () => { it('renders enabled scroll bottom button', () => {
...@@ -191,17 +155,20 @@ describe('Job log controllers', () => { ...@@ -191,17 +155,20 @@ describe('Job log controllers', () => {
describe('when user can not scroll bottom', () => { describe('when user can not scroll bottom', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
rawTracePath: '/raw', rawPath: '/raw',
canEraseJob: true, erasePath: '/erase',
size: 511952, size: 511952,
canScrollToTop: true, isScrollTopDisabled: false,
canScrollToBottom: false, isScrollBottomDisabled: true,
isScrollingDown: false,
isTraceSizeVisible: true,
}); });
}); });
it('renders disabled scroll bottom button', () => { it('renders disabled scroll bottom button', () => {
expect(vm.$el.querySelector('.js-scroll-bottom').getAttribute('disabled')).toEqual('disabled'); expect(vm.$el.querySelector('.js-scroll-bottom').getAttribute('disabled')).toEqual(
'disabled',
);
}); });
it('does not emit scrollJobLogBottom event on click', () => { it('does not emit scrollJobLogBottom event on click', () => {
...@@ -211,7 +178,29 @@ describe('Job log controllers', () => { ...@@ -211,7 +178,29 @@ describe('Job log controllers', () => {
expect(vm.$emit).not.toHaveBeenCalledWith('scrollJobLogBottom'); expect(vm.$emit).not.toHaveBeenCalledWith('scrollJobLogBottom');
}); });
}); });
describe('while isScrollingDown is true', () => {
it('renders animate class for the scroll down button', () => {
vm = mountComponent(Component, props);
expect(vm.$el.querySelector('.js-scroll-bottom').className).toContain('animate');
}); });
}); });
});
describe('while isScrollingDown is false', () => {
it('does not render animate class for the scroll down button', () => {
vm = mountComponent(Component, {
rawPath: '/raw',
erasePath: '/erase',
size: 511952,
isScrollTopDisabled: true,
isScrollBottomDisabled: false,
isScrollingDown: false,
isTraceSizeVisible: true,
});
expect(vm.$el.querySelector('.js-scroll-bottom').className).not.toContain('animate');
});
});
});
});
});
...@@ -15,7 +15,7 @@ describe('Job Log', () => { ...@@ -15,7 +15,7 @@ describe('Job Log', () => {
it('renders provided trace', () => { it('renders provided trace', () => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
trace, trace,
isReceivingBuildTrace: true, isComplete: true,
}); });
expect(vm.$el.querySelector('code').textContent).toContain('Running with gitlab-runner 11.1.0 (081978aa)'); expect(vm.$el.querySelector('code').textContent).toContain('Running with gitlab-runner 11.1.0 (081978aa)');
...@@ -25,7 +25,7 @@ describe('Job Log', () => { ...@@ -25,7 +25,7 @@ describe('Job Log', () => {
it('renders animation', () => { it('renders animation', () => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
trace, trace,
isReceivingBuildTrace: true, isComplete: true,
}); });
expect(vm.$el.querySelector('.js-log-animation')).not.toBeNull(); expect(vm.$el.querySelector('.js-log-animation')).not.toBeNull();
...@@ -36,7 +36,7 @@ describe('Job Log', () => { ...@@ -36,7 +36,7 @@ describe('Job Log', () => {
it('does not render animation', () => { it('does not render animation', () => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
trace, trace,
isReceivingBuildTrace: false, isComplete: false,
}); });
expect(vm.$el.querySelector('.js-log-animation')).toBeNull(); expect(vm.$el.querySelector('.js-log-animation')).toBeNull();
......
import JobStore from '~/jobs/stores/job_store';
import job from '../mock_data';
describe('Job Store', () => {
let store;
beforeEach(() => {
store = new JobStore();
});
it('should set defaults', () => {
expect(store.state.job).toEqual({});
});
describe('storeJob', () => {
it('should store empty object if none is provided', () => {
store.storeJob();
expect(store.state.job).toEqual({});
});
it('should store provided argument', () => {
store.storeJob(job);
expect(store.state.job).toEqual(job);
});
});
});
...@@ -12,6 +12,13 @@ describe('Jobs Store Mutations', () => { ...@@ -12,6 +12,13 @@ describe('Jobs Store Mutations', () => {
stateCopy = state(); stateCopy = state();
}); });
describe('SET_JOB_ENDPOINT', () => {
it('should set jobEndpoint', () => {
mutations[types.SET_JOB_ENDPOINT](stateCopy, 'job/21312321.json');
expect(stateCopy.jobEndpoint).toEqual('job/21312321.json');
});
});
describe('REQUEST_STATUS_FAVICON', () => { describe('REQUEST_STATUS_FAVICON', () => {
it('should set fetchingStatusFavicon to true', () => { it('should set fetchingStatusFavicon to true', () => {
mutations[types.REQUEST_STATUS_FAVICON](stateCopy); mutations[types.REQUEST_STATUS_FAVICON](stateCopy);
......
require 'spec_helper' require 'spec_helper'
describe EventFilter do describe EventFilter do
describe 'FILTERS' do
it 'returns a definite list of filters' do
expect(described_class::FILTERS).to eq(%w[all push merged issue comments team])
end
end
describe '#filter' do
it 'returns "all" if given filter is nil' do
expect(described_class.new(nil).filter).to eq(described_class::ALL)
end
it 'returns "all" if given filter is ""' do
expect(described_class.new('').filter).to eq(described_class::ALL)
end
it 'returns "all" if given filter is "foo"' do
expect(described_class.new('foo').filter).to eq('all')
end
end
describe '#apply_filter' do describe '#apply_filter' do
let(:source_user) { create(:user) } set(:public_project) { create(:project, :public) }
let!(:public_project) { create(:project, :public) }
set(:push_event) { create(:push_event, project: public_project) }
set(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
set(:created_event) { create(:event, :created, project: public_project, target: public_project) }
set(:updated_event) { create(:event, :updated, project: public_project, target: public_project) }
set(:closed_event) { create(:event, :closed, project: public_project, target: public_project) }
set(:reopened_event) { create(:event, :reopened, project: public_project, target: public_project) }
set(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
set(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
set(:left_event) { create(:event, :left, project: public_project, target: public_project) }
let(:filtered_events) { described_class.new(filter).apply_filter(Event.all) }
let!(:push_event) { create(:push_event, project: public_project, author: source_user) } context 'with the "push" filter' do
let!(:merged_event) { create(:event, :merged, project: public_project, target: public_project, author: source_user) } let(:filter) { described_class::PUSH }
let!(:created_event) { create(:event, :created, project: public_project, target: public_project, author: source_user) }
let!(:updated_event) { create(:event, :updated, project: public_project, target: public_project, author: source_user) }
let!(:closed_event) { create(:event, :closed, project: public_project, target: public_project, author: source_user) }
let!(:reopened_event) { create(:event, :reopened, project: public_project, target: public_project, author: source_user) }
let!(:comments_event) { create(:event, :commented, project: public_project, target: public_project, author: source_user) }
let!(:joined_event) { create(:event, :joined, project: public_project, target: public_project, author: source_user) }
let!(:left_event) { create(:event, :left, project: public_project, target: public_project, author: source_user) }
it 'applies push filter' do it 'filters push events only' do
events = described_class.new(described_class.push).apply_filter(Event.all) expect(filtered_events).to contain_exactly(push_event)
expect(events).to contain_exactly(push_event)
end end
end
context 'with the "merged" filter' do
let(:filter) { described_class::MERGED }
it 'applies merged filter' do it 'filters merged events only' do
events = described_class.new(described_class.merged).apply_filter(Event.all) expect(filtered_events).to contain_exactly(merged_event)
expect(events).to contain_exactly(merged_event)
end end
end
context 'with the "issue" filter' do
let(:filter) { described_class::ISSUE }
it 'applies issue filter' do it 'filters issue events only' do
events = described_class.new(described_class.issue).apply_filter(Event.all) expect(filtered_events).to contain_exactly(created_event, updated_event, closed_event, reopened_event)
expect(events).to contain_exactly(created_event, updated_event, closed_event, reopened_event)
end end
end
context 'with the "comments" filter' do
let(:filter) { described_class::COMMENTS }
it 'applies comments filter' do it 'filters comment events only' do
events = described_class.new(described_class.comments).apply_filter(Event.all) expect(filtered_events).to contain_exactly(comments_event)
expect(events).to contain_exactly(comments_event)
end end
end
context 'with the "team" filter' do
let(:filter) { described_class::TEAM }
it 'applies team filter' do it 'filters team events only' do
events = described_class.new(described_class.team).apply_filter(Event.all) expect(filtered_events).to contain_exactly(joined_event, left_event)
expect(events).to contain_exactly(joined_event, left_event) end
end end
it 'applies all filter' do context 'with the "all" filter' do
events = described_class.new(described_class.all).apply_filter(Event.all) let(:filter) { described_class::ALL }
expect(events).to contain_exactly(push_event, merged_event, created_event, updated_event, closed_event, reopened_event, comments_event, joined_event, left_event)
it 'returns all events' do
expect(filtered_events).to eq(Event.all)
end
end
context 'with an unknown filter' do
let(:filter) { 'foo' }
it 'returns all events' do
expect(filtered_events).to eq(Event.all)
end
end
context 'with a nil filter' do
let(:filter) { nil }
it 'returns all events' do
expect(filtered_events).to eq(Event.all)
end
end
end
describe '#active?' do
let(:event_filter) { described_class.new(described_class::TEAM) }
it 'returns false if filter does not include the given key' do
expect(event_filter.active?('foo')).to eq(false)
end end
it 'applies no filter' do it 'returns false if the given key is nil' do
events = described_class.new(nil).apply_filter(Event.all) expect(event_filter.active?(nil)).to eq(false)
expect(events).to contain_exactly(push_event, merged_event, created_event, updated_event, closed_event, reopened_event, comments_event, joined_event, left_event)
end end
it 'applies unknown filter' do it 'returns true if filter does not include the given key' do
events = described_class.new('').apply_filter(Event.all) expect(event_filter.active?(described_class::TEAM)).to eq(true)
expect(events).to contain_exactly(push_event, merged_event, created_event, updated_event, closed_event, reopened_event, comments_event, joined_event, left_event)
end end
end end
end end
...@@ -3,13 +3,24 @@ require 'spec_helper' ...@@ -3,13 +3,24 @@ require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Reports do describe Gitlab::Ci::Config::Entry::Reports do
let(:entry) { described_class.new(config) } let(:entry) { described_class.new(config) }
describe 'validates ALLOWED_KEYS' do
let(:artifact_file_types) { Ci::JobArtifact.file_types }
described_class::ALLOWED_KEYS.each do |keyword, _|
it "expects #{keyword} to be an artifact file_type" do
expect(artifact_file_types).to include(keyword)
end
end
end
describe 'validation' do describe 'validation' do
context 'when entry config value is correct' do context 'when entry config value is correct' do
let(:config) { { junit: %w[junit.xml] } } using RSpec::Parameterized::TableSyntax
shared_examples 'a valid entry' do |keyword, file|
describe '#value' do describe '#value' do
it 'returns artifacs configuration' do it 'returns artifacs configuration' do
expect(entry.value).to eq config expect(entry.value).to eq({ "#{keyword}": [file] } )
end end
end end
...@@ -18,12 +29,27 @@ describe Gitlab::Ci::Config::Entry::Reports do ...@@ -18,12 +29,27 @@ describe Gitlab::Ci::Config::Entry::Reports do
expect(entry).to be_valid expect(entry).to be_valid
end end
end end
end
where(:keyword, :file) do
:junit | 'junit.xml'
:sast | 'gl-sast-report.json'
:dependency_scanning | 'gl-dependency-scanning-report.json'
:container_scanning | 'gl-container-scanning-report.json'
:dast | 'gl-dast-report.json'
end
with_them do
context 'when value is an array' do
let(:config) { { "#{keyword}": [file] } }
it_behaves_like 'a valid entry', params[:keyword], params[:file]
end
context 'when value is not array' do context 'when value is not array' do
let(:config) { { junit: 'junit.xml' } } let(:config) { { "#{keyword}": file } }
it 'converts to array' do it_behaves_like 'a valid entry', params[:keyword], params[:file]
expect(entry.value).to eq({ junit: ['junit.xml'] } )
end end
end end
end end
...@@ -31,11 +57,13 @@ describe Gitlab::Ci::Config::Entry::Reports do ...@@ -31,11 +57,13 @@ describe Gitlab::Ci::Config::Entry::Reports do
context 'when entry value is not correct' do context 'when entry value is not correct' do
describe '#errors' do describe '#errors' do
context 'when value of attribute is invalid' do context 'when value of attribute is invalid' do
let(:config) { { junit: 10 } } where(key: described_class::ALLOWED_KEYS) do
let(:config) { { "#{key}": 10 } }
it 'reports error' do it 'reports error' do
expect(entry.errors) expect(entry.errors)
.to include 'reports junit should be an array of strings or a string' .to include "reports #{key} should be an array of strings or a string"
end
end end
end end
......
require 'fast_spec_helper' require 'fast_spec_helper'
describe Gitlab::Ci::Parsers::Junit do describe Gitlab::Ci::Parsers::Test::Junit do
describe '#parse!' do describe '#parse!' do
subject { described_class.new.parse!(junit, test_suite) } subject { described_class.new.parse!(junit, test_suite) }
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::Ci::Parsers do describe Gitlab::Ci::Parsers::Test do
describe '.fabricate!' do describe '.fabricate!' do
subject { described_class.fabricate!(file_type) } subject { described_class.fabricate!(file_type) }
...@@ -16,7 +16,7 @@ describe Gitlab::Ci::Parsers do ...@@ -16,7 +16,7 @@ describe Gitlab::Ci::Parsers do
let(:file_type) { 'undefined' } let(:file_type) { 'undefined' }
it 'raises an error' do it 'raises an error' do
expect { subject }.to raise_error(NameError) expect { subject }.to raise_error(Gitlab::Ci::Parsers::Test::ParserNotFoundError)
end end
end end
end end
......
...@@ -177,9 +177,7 @@ describe Ci::Build do ...@@ -177,9 +177,7 @@ describe Ci::Build do
it 'does not execute a query for selecting job artifact one by one' do it 'does not execute a query for selecting job artifact one by one' do
recorded = ActiveRecord::QueryRecorder.new do recorded = ActiveRecord::QueryRecorder.new do
subject.each do |build| subject.each do |build|
Ci::JobArtifact::TEST_REPORT_FILE_TYPES.each do |file_type| build.job_artifacts.map { |a| a.file.exists? }
build.public_send("job_artifacts_#{file_type}").file.exists?
end
end end
end end
...@@ -551,44 +549,22 @@ describe Ci::Build do ...@@ -551,44 +549,22 @@ describe Ci::Build do
end end
end end
describe '#has_test_reports?' do describe '#has_job_artifacts?' do
subject { build.has_test_reports? } subject { build.has_job_artifacts? }
context 'when build has a test report' do context 'when build has a job artifact' do
let(:build) { create(:ci_build, :test_reports) } let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
end end
context 'when build does not have test reports' do context 'when build does not have job artifacts' do
let(:build) { create(:ci_build, :artifacts) } let(:build) { create(:ci_build, :legacy_artifacts) }
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
end end
end end
describe '#erase_test_reports!' do
subject { build.erase_test_reports! }
context 'when build has a test report' do
let!(:build) { create(:ci_build, :test_reports) }
it 'removes a test report' do
subject
expect(build.has_test_reports?).to be_falsy
end
end
context 'when build does not have test reports' do
let!(:build) { create(:ci_build, :artifacts) }
it 'does not erase anything' do
expect { subject }.not_to change { Ci::JobArtifact.count }
end
end
end
describe '#has_old_trace?' do describe '#has_old_trace?' do
subject { build.has_old_trace? } subject { build.has_old_trace? }
...@@ -851,8 +827,8 @@ describe Ci::Build do ...@@ -851,8 +827,8 @@ describe Ci::Build do
expect(build.artifacts_metadata.exists?).to be_falsy expect(build.artifacts_metadata.exists?).to be_falsy
end end
it 'removes test reports' do it 'removes all job_artifacts' do
expect(build.job_artifacts.test_reports.count).to eq(0) expect(build.job_artifacts.count).to eq(0)
end end
it 'erases build trace in trace file' do it 'erases build trace in trace file' do
...@@ -1023,6 +999,32 @@ describe Ci::Build do ...@@ -1023,6 +999,32 @@ describe Ci::Build do
end end
end end
describe '#erase_erasable_artifacts!' do
let!(:build) { create(:ci_build, :success) }
subject { build.erase_erasable_artifacts! }
before do
Ci::JobArtifact.file_types.keys.each do |file_type|
create(:ci_job_artifact, job: build, file_type: file_type, file_format: Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[file_type.to_sym])
end
end
it "erases erasable artifacts" do
subject
expect(build.job_artifacts.erasable).to be_empty
end
it "keeps non erasable artifacts" do
subject
Ci::JobArtifact::NON_ERASABLE_FILE_TYPES.each do |file_type|
expect(build.send("job_artifacts_#{file_type}")).not_to be_nil
end
end
end
describe '#first_pending' do describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) } let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') } let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
...@@ -2846,16 +2848,10 @@ describe Ci::Build do ...@@ -2846,16 +2848,10 @@ describe Ci::Build do
end end
it 'raises an error' do it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Ci::Parsers::Junit::JunitParserError) expect { subject }.to raise_error(Gitlab::Ci::Parsers::Test::Junit::JunitParserError)
end end
end end
end end
context 'when build does not have test reports' do
it 'raises an error' do
expect { subject }.to raise_error(NoMethodError)
end
end
end end
describe '#artifacts_metadata_entry' do describe '#artifacts_metadata_entry' do
......
...@@ -31,6 +31,22 @@ describe Ci::JobArtifact do ...@@ -31,6 +31,22 @@ describe Ci::JobArtifact do
end end
end end
describe '.erasable' do
subject { described_class.erasable }
context 'when there is am erasable artifact' do
let!(:artifact) { create(:ci_job_artifact, :junit) }
it { is_expected.to eq([artifact]) }
end
context 'when there are no erasable artifacts' do
let!(:artifact) { create(:ci_job_artifact, :trace) }
it { is_expected.to be_empty }
end
end
describe 'callbacks' do describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) } subject { create(:ci_job_artifact, :archive) }
...@@ -106,34 +122,46 @@ describe Ci::JobArtifact do ...@@ -106,34 +122,46 @@ describe Ci::JobArtifact do
describe 'validates file format' do describe 'validates file format' do
subject { artifact } subject { artifact }
context 'when archive type with zip format' do described_class::TYPE_AND_FORMAT_PAIRS.except(:trace).each do |file_type, file_format|
let(:artifact) { build(:ci_job_artifact, :archive, file_format: :zip) } context "when #{file_type} type with #{file_format} format" do
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: file_format) }
it { is_expected.to be_valid } it { is_expected.to be_valid }
end end
context 'when archive type with gzip format' do context "when #{file_type} type without format specification" do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: :gzip) } let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: nil) }
it { is_expected.not_to be_valid } it { is_expected.not_to be_valid }
end end
context 'when archive type without format specification' do context "when #{file_type} type with other formats" do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: nil) } described_class.file_formats.except(file_format).values.each do |other_format|
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: other_format) }
it { is_expected.not_to be_valid } it { is_expected.not_to be_valid }
end end
end
end
end
context 'when junit type with zip format' do describe 'validates DEFAULT_FILE_NAMES' do
let(:artifact) { build(:ci_job_artifact, :junit, file_format: :zip) } subject { described_class::DEFAULT_FILE_NAMES }
it { is_expected.not_to be_valid } described_class.file_types.each do |file_type, _|
it "expects #{file_type} to be included" do
is_expected.to include(file_type.to_sym)
end
end
end end
context 'when junit type with gzip format' do describe 'validates TYPE_AND_FORMAT_PAIRS' do
let(:artifact) { build(:ci_job_artifact, :junit, file_format: :gzip) } subject { described_class::TYPE_AND_FORMAT_PAIRS }
it { is_expected.to be_valid } described_class.file_types.each do |file_type, _|
it "expects #{file_type} to be included" do
expect(described_class.file_formats).to include(subject[file_type.to_sym])
end
end end
end end
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment