Commit e1f53011 authored by Rémy Coutable's avatar Rémy Coutable

Merge branch 'ce-to-ee-2018-06-06' into 'master'

CE upstream - 2018-06-06 15:12 UTC

Closes #4811 and gitlab-ce#47280

See merge request gitlab-org/gitlab-ee!6012
parents c668cdfe 2f448ee7
...@@ -385,7 +385,7 @@ GEM ...@@ -385,7 +385,7 @@ GEM
grape-entity (0.7.1) grape-entity (0.7.1)
activesupport (>= 4.0) activesupport (>= 4.0)
multi_json (>= 1.3.2) multi_json (>= 1.3.2)
grape-path-helpers (1.0.1) grape-path-helpers (1.0.2)
activesupport (~> 4) activesupport (~> 4)
grape (~> 1.0) grape (~> 1.0)
rake (~> 12) rake (~> 12)
......
<script>
import { mapActions, mapState } from 'vuex';
import _ from 'underscore';
import { __ } from '../../../locale';
import tooltip from '../../../vue_shared/directives/tooltip';
import Icon from '../../../vue_shared/components/icon.vue';
import ScrollButton from './detail/scroll_button.vue';
import JobDescription from './detail/description.vue';
const scrollPositions = {
top: 0,
bottom: 1,
};
export default {
directives: {
tooltip,
},
components: {
Icon,
ScrollButton,
JobDescription,
},
data() {
return {
scrollPos: scrollPositions.top,
};
},
computed: {
...mapState('pipelines', ['detailJob']),
isScrolledToBottom() {
return this.scrollPos === scrollPositions.bottom;
},
isScrolledToTop() {
return this.scrollPos === scrollPositions.top;
},
jobOutput() {
return this.detailJob.output || __('No messages were logged');
},
},
mounted() {
this.getTrace();
},
methods: {
...mapActions('pipelines', ['fetchJobTrace', 'setDetailJob']),
scrollDown() {
if (this.$refs.buildTrace) {
this.$refs.buildTrace.scrollTo(0, this.$refs.buildTrace.scrollHeight);
}
},
scrollUp() {
if (this.$refs.buildTrace) {
this.$refs.buildTrace.scrollTo(0, 0);
}
},
scrollBuildLog: _.throttle(function buildLogScrollDebounce() {
const { scrollTop } = this.$refs.buildTrace;
const { offsetHeight, scrollHeight } = this.$refs.buildTrace;
if (scrollTop + offsetHeight === scrollHeight) {
this.scrollPos = scrollPositions.bottom;
} else if (scrollTop === 0) {
this.scrollPos = scrollPositions.top;
} else {
this.scrollPos = '';
}
}),
getTrace() {
return this.fetchJobTrace().then(() => this.scrollDown());
},
},
};
</script>
<template>
<div class="ide-pipeline build-page d-flex flex-column flex-fill">
<header class="ide-job-header d-flex align-items-center">
<button
class="btn btn-default btn-sm d-flex"
@click="setDetailJob(null)"
>
<icon
name="chevron-left"
/>
{{ __('View jobs') }}
</button>
</header>
<div class="top-bar d-flex border-left-0">
<job-description
:job="detailJob"
/>
<div class="controllers ml-auto">
<a
v-tooltip
:title="__('Show complete raw log')"
data-placement="top"
data-container="body"
class="controllers-buttons"
:href="detailJob.rawPath"
target="_blank"
>
<i
aria-hidden="true"
class="fa fa-file-text-o"
></i>
</a>
<scroll-button
direction="up"
:disabled="isScrolledToTop"
@click="scrollUp"
/>
<scroll-button
direction="down"
:disabled="isScrolledToBottom"
@click="scrollDown"
/>
</div>
</div>
<pre
class="build-trace mb-0 h-100"
ref="buildTrace"
@scroll="scrollBuildLog"
>
<code
class="bash"
v-html="jobOutput"
>
</code>
<div
v-show="detailJob.isLoading"
class="build-loader-animation"
>
</div>
</pre>
</div>
</template>
<script>
import Icon from '../../../../vue_shared/components/icon.vue';
import CiIcon from '../../../../vue_shared/components/ci_icon.vue';
export default {
components: {
Icon,
CiIcon,
},
props: {
job: {
type: Object,
required: true,
},
},
computed: {
jobId() {
return `#${this.job.id}`;
},
},
};
</script>
<template>
<div class="d-flex align-items-center">
<ci-icon
class="d-flex"
:status="job.status"
:borderless="true"
:size="24"
/>
<span class="prepend-left-8">
{{ job.name }}
<a
:href="job.path"
target="_blank"
class="ide-external-link"
>
{{ jobId }}
<icon
name="external-link"
:size="12"
/>
</a>
</span>
</div>
</template>
<script>
import { __ } from '../../../../locale';
import Icon from '../../../../vue_shared/components/icon.vue';
import tooltip from '../../../../vue_shared/directives/tooltip';
const directions = {
up: 'up',
down: 'down',
};
export default {
directives: {
tooltip,
},
components: {
Icon,
},
props: {
direction: {
type: String,
required: true,
validator(value) {
return Object.keys(directions).includes(value);
},
},
disabled: {
type: Boolean,
required: true,
},
},
computed: {
tooltipTitle() {
return this.direction === directions.up ? __('Scroll to top') : __('Scroll to bottom');
},
iconName() {
return `scroll_${this.direction}`;
},
},
methods: {
clickedScroll() {
this.$emit('click');
},
},
};
</script>
<template>
<div
v-tooltip
class="controllers-buttons"
data-container="body"
data-placement="top"
:title="tooltipTitle"
>
<button
class="btn-scroll btn-transparent btn-blank"
type="button"
:disabled="disabled"
@click="clickedScroll"
>
<icon
:name="iconName"
/>
</button>
</div>
</template>
<script> <script>
import Icon from '../../../vue_shared/components/icon.vue'; import JobDescription from './detail/description.vue';
import CiIcon from '../../../vue_shared/components/ci_icon.vue';
export default { export default {
components: { components: {
Icon, JobDescription,
CiIcon,
}, },
props: { props: {
job: { job: {
...@@ -18,29 +16,29 @@ export default { ...@@ -18,29 +16,29 @@ export default {
return `#${this.job.id}`; return `#${this.job.id}`;
}, },
}, },
methods: {
clickViewLog() {
this.$emit('clickViewLog', this.job);
},
},
}; };
</script> </script>
<template> <template>
<div class="ide-job-item"> <div class="ide-job-item">
<ci-icon <job-description
:status="job.status" class="append-right-default"
:borderless="true" :job="job"
:size="24"
/> />
<span class="prepend-left-8"> <div class="ml-auto align-self-center">
{{ job.name }} <button
<a v-if="job.started"
:href="job.path" type="button"
target="_blank" class="btn btn-default btn-sm"
class="ide-external-link" @click="clickViewLog"
> >
{{ jobId }} {{ __('View log') }}
<icon </button>
name="external-link" </div>
:size="12"
/>
</a>
</span>
</div> </div>
</template> </template>
...@@ -19,7 +19,7 @@ export default { ...@@ -19,7 +19,7 @@ export default {
}, },
}, },
methods: { methods: {
...mapActions('pipelines', ['fetchJobs', 'toggleStageCollapsed']), ...mapActions('pipelines', ['fetchJobs', 'toggleStageCollapsed', 'setDetailJob']),
}, },
}; };
</script> </script>
...@@ -38,6 +38,7 @@ export default { ...@@ -38,6 +38,7 @@ export default {
:stage="stage" :stage="stage"
@fetch="fetchJobs" @fetch="fetchJobs"
@toggleCollapsed="toggleStageCollapsed" @toggleCollapsed="toggleStageCollapsed"
@clickViewLog="setDetailJob"
/> />
</template> </template>
</div> </div>
......
...@@ -48,6 +48,9 @@ export default { ...@@ -48,6 +48,9 @@ export default {
toggleCollapsed() { toggleCollapsed() {
this.$emit('toggleCollapsed', this.stage.id); this.$emit('toggleCollapsed', this.stage.id);
}, },
clickViewLog(job) {
this.$emit('clickViewLog', job);
},
}, },
}; };
</script> </script>
...@@ -101,6 +104,7 @@ export default { ...@@ -101,6 +104,7 @@ export default {
v-for="job in stage.jobs" v-for="job in stage.jobs"
:key="job.id" :key="job.id"
:job="job" :job="job"
@clickViewLog="clickViewLog"
/> />
</template> </template>
</div> </div>
......
...@@ -4,6 +4,7 @@ import tooltip from '../../../vue_shared/directives/tooltip'; ...@@ -4,6 +4,7 @@ import tooltip from '../../../vue_shared/directives/tooltip';
import Icon from '../../../vue_shared/components/icon.vue'; import Icon from '../../../vue_shared/components/icon.vue';
import { rightSidebarViews } from '../../constants'; import { rightSidebarViews } from '../../constants';
import PipelinesList from '../pipelines/list.vue'; import PipelinesList from '../pipelines/list.vue';
import JobsDetail from '../jobs/detail.vue';
export default { export default {
directives: { directives: {
...@@ -12,9 +13,16 @@ export default { ...@@ -12,9 +13,16 @@ export default {
components: { components: {
Icon, Icon,
PipelinesList, PipelinesList,
JobsDetail,
}, },
computed: { computed: {
...mapState(['rightPane']), ...mapState(['rightPane']),
pipelinesActive() {
return (
this.rightPane === rightSidebarViews.pipelines ||
this.rightPane === rightSidebarViews.jobsDetail
);
},
}, },
methods: { methods: {
...mapActions(['setRightPane']), ...mapActions(['setRightPane']),
...@@ -48,7 +56,7 @@ export default { ...@@ -48,7 +56,7 @@ export default {
:title="__('Pipelines')" :title="__('Pipelines')"
class="ide-sidebar-link is-right" class="ide-sidebar-link is-right"
:class="{ :class="{
active: rightPane === $options.rightSidebarViews.pipelines active: pipelinesActive
}" }"
type="button" type="button"
@click="clickTab($event, $options.rightSidebarViews.pipelines)" @click="clickTab($event, $options.rightSidebarViews.pipelines)"
......
...@@ -23,4 +23,5 @@ export const viewerTypes = { ...@@ -23,4 +23,5 @@ export const viewerTypes = {
export const rightSidebarViews = { export const rightSidebarViews = {
pipelines: 'pipelines-list', pipelines: 'pipelines-list',
jobsDetail: 'jobs-detail',
}; };
...@@ -4,6 +4,7 @@ import { __ } from '../../../../locale'; ...@@ -4,6 +4,7 @@ import { __ } from '../../../../locale';
import flash from '../../../../flash'; import flash from '../../../../flash';
import Poll from '../../../../lib/utils/poll'; import Poll from '../../../../lib/utils/poll';
import service from '../../../services'; import service from '../../../services';
import { rightSidebarViews } from '../../../constants';
import * as types from './mutation_types'; import * as types from './mutation_types';
let eTagPoll; let eTagPoll;
...@@ -77,4 +78,28 @@ export const fetchJobs = ({ dispatch }, stage) => { ...@@ -77,4 +78,28 @@ export const fetchJobs = ({ dispatch }, stage) => {
export const toggleStageCollapsed = ({ commit }, stageId) => export const toggleStageCollapsed = ({ commit }, stageId) =>
commit(types.TOGGLE_STAGE_COLLAPSE, stageId); commit(types.TOGGLE_STAGE_COLLAPSE, stageId);
export const setDetailJob = ({ commit, dispatch }, job) => {
commit(types.SET_DETAIL_JOB, job);
dispatch('setRightPane', job ? rightSidebarViews.jobsDetail : rightSidebarViews.pipelines, {
root: true,
});
};
export const requestJobTrace = ({ commit }) => commit(types.REQUEST_JOB_TRACE);
export const receiveJobTraceError = ({ commit }) => {
flash(__('Error fetching job trace'));
commit(types.RECEIVE_JOB_TRACE_ERROR);
};
export const receiveJobTraceSuccess = ({ commit }, data) =>
commit(types.RECEIVE_JOB_TRACE_SUCCESS, data);
export const fetchJobTrace = ({ dispatch, state }) => {
dispatch('requestJobTrace');
return axios
.get(`${state.detailJob.path}/trace`, { params: { format: 'json' } })
.then(({ data }) => dispatch('receiveJobTraceSuccess', data))
.catch(() => dispatch('receiveJobTraceError'));
};
export default () => {}; export default () => {};
...@@ -7,3 +7,9 @@ export const RECEIVE_JOBS_ERROR = 'RECEIVE_JOBS_ERROR'; ...@@ -7,3 +7,9 @@ export const RECEIVE_JOBS_ERROR = 'RECEIVE_JOBS_ERROR';
export const RECEIVE_JOBS_SUCCESS = 'RECEIVE_JOBS_SUCCESS'; export const RECEIVE_JOBS_SUCCESS = 'RECEIVE_JOBS_SUCCESS';
export const TOGGLE_STAGE_COLLAPSE = 'TOGGLE_STAGE_COLLAPSE'; export const TOGGLE_STAGE_COLLAPSE = 'TOGGLE_STAGE_COLLAPSE';
export const SET_DETAIL_JOB = 'SET_DETAIL_JOB';
export const REQUEST_JOB_TRACE = 'REQUEST_JOB_TRACE';
export const RECEIVE_JOB_TRACE_ERROR = 'RECEIVE_JOB_TRACE_ERROR';
export const RECEIVE_JOB_TRACE_SUCCESS = 'RECEIVE_JOB_TRACE_SUCCESS';
...@@ -63,4 +63,17 @@ export default { ...@@ -63,4 +63,17 @@ export default {
isCollapsed: stage.id === id ? !stage.isCollapsed : stage.isCollapsed, isCollapsed: stage.id === id ? !stage.isCollapsed : stage.isCollapsed,
})); }));
}, },
[types.SET_DETAIL_JOB](state, job) {
state.detailJob = { ...job };
},
[types.REQUEST_JOB_TRACE](state) {
state.detailJob.isLoading = true;
},
[types.RECEIVE_JOB_TRACE_ERROR](state) {
state.detailJob.isLoading = false;
},
[types.RECEIVE_JOB_TRACE_SUCCESS](state, data) {
state.detailJob.isLoading = false;
state.detailJob.output = data.html;
},
}; };
...@@ -3,4 +3,5 @@ export default () => ({ ...@@ -3,4 +3,5 @@ export default () => ({
isLoadingJobs: false, isLoadingJobs: false,
latestPipeline: null, latestPipeline: null,
stages: [], stages: [],
detailJob: null,
}); });
...@@ -4,4 +4,8 @@ export const normalizeJob = job => ({ ...@@ -4,4 +4,8 @@ export const normalizeJob = job => ({
name: job.name, name: job.name,
status: job.status, status: job.status,
path: job.build_path, path: job.build_path,
rawPath: `${job.build_path}/raw`,
started: job.started,
output: '',
isLoading: false,
}); });
...@@ -58,7 +58,7 @@ class ImporterStatus { ...@@ -58,7 +58,7 @@ class ImporterStatus {
job.find('.import-target').html(`<a href="${data.full_path}">${data.full_path}</a>`); job.find('.import-target').html(`<a href="${data.full_path}">${data.full_path}</a>`);
$('table.import-jobs tbody').prepend(job); $('table.import-jobs tbody').prepend(job);
job.addClass('active'); job.addClass('table-active');
const connectingVerb = this.ciCdOnly ? __('connecting') : __('importing'); const connectingVerb = this.ciCdOnly ? __('connecting') : __('importing');
job.find('.import-actions').html(sprintf( job.find('.import-actions').html(sprintf(
_.escape(__('%{loadingIcon} Started')), { _.escape(__('%{loadingIcon} Started')), {
...@@ -67,7 +67,15 @@ class ImporterStatus { ...@@ -67,7 +67,15 @@ class ImporterStatus {
false, false,
)); ));
}) })
.catch(() => flash(__('An error occurred while importing project'))); .catch((error) => {
let details = error;
if (error.response && error.response.data && error.response.data.errors) {
details = error.response.data.errors;
}
flash(__(`An error occurred while importing project: ${details}`));
});
} }
autoUpdate() { autoUpdate() {
...@@ -81,7 +89,7 @@ class ImporterStatus { ...@@ -81,7 +89,7 @@ class ImporterStatus {
switch (job.import_status) { switch (job.import_status) {
case 'finished': case 'finished':
jobItem.removeClass('active').addClass('success'); jobItem.removeClass('table-active').addClass('table-success');
statusField.html(`<span><i class="fa fa-check"></i> ${__('Done')}</span>`); statusField.html(`<span><i class="fa fa-check"></i> ${__('Done')}</span>`);
break; break;
case 'scheduled': case 'scheduled':
......
...@@ -42,6 +42,9 @@ export default { ...@@ -42,6 +42,9 @@ export default {
jobStarted() { jobStarted() {
return !this.job.started === false; return !this.job.started === false;
}, },
headerTime() {
return this.jobStarted ? this.job.started : this.job.created_at;
},
}, },
watch: { watch: {
job() { job() {
...@@ -73,7 +76,7 @@ export default { ...@@ -73,7 +76,7 @@ export default {
:status="status" :status="status"
item-name="Job" item-name="Job"
:item-id="job.id" :item-id="job.id"
:time="job.created_at" :time="headerTime"
:user="job.user" :user="job.user"
:actions="actions" :actions="actions"
:has-sidebar-button="true" :has-sidebar-button="true"
......
...@@ -174,7 +174,10 @@ export default { ...@@ -174,7 +174,10 @@ export default {
:tags-path="tagsPath" :tags-path="tagsPath"
:show-legend="showLegend" :show-legend="showLegend"
:small-graph="forceSmallGraph" :small-graph="forceSmallGraph"
/> >
<!-- EE content -->
{{ null }}
</graph>
</graph-group> </graph-group>
</div> </div>
<empty-state <empty-state
......
...@@ -232,9 +232,14 @@ export default { ...@@ -232,9 +232,14 @@ export default {
@mouseover="showFlagContent = true" @mouseover="showFlagContent = true"
@mouseleave="showFlagContent = false" @mouseleave="showFlagContent = false"
> >
<h5 class="text-center graph-title"> <div class="prometheus-graph-header">
<h5 class="prometheus-graph-title">
{{ graphData.title }} {{ graphData.title }}
</h5> </h5>
<div class="prometheus-graph-widgets">
<slot></slot>
</div>
</div>
<div <div
class="prometheus-svg-container" class="prometheus-svg-container"
:style="paddingBottomRootSvg" :style="paddingBottomRootSvg"
......
...@@ -56,6 +56,7 @@ export default { ...@@ -56,6 +56,7 @@ export default {
<gl-modal <gl-modal
:id="`modal-peek-${metric}-details`" :id="`modal-peek-${metric}-details`"
:header-title-text="header" :header-title-text="header"
modal-size="lg"
class="performance-bar-modal" class="performance-bar-modal"
> >
<table <table
...@@ -70,7 +71,7 @@ export default { ...@@ -70,7 +71,7 @@ export default {
<td <td
v-for="key in keys" v-for="key in keys"
:key="key" :key="key"
class="break-word" class="break-word all-words"
> >
{{ item[key] }} {{ item[key] }}
</td> </td>
......
<script> <script>
const buttonVariants = ['danger', 'primary', 'success', 'warning']; const buttonVariants = ['danger', 'primary', 'success', 'warning'];
const sizeVariants = ['sm', 'md', 'lg'];
export default { export default {
name: 'GlModal', name: 'GlModal',
props: { props: {
id: { id: {
type: String, type: String,
required: false, required: false,
default: null, default: null,
}, },
modalSize: {
type: String,
required: false,
default: 'md',
validator: value => sizeVariants.includes(value),
},
headerTitleText: { headerTitleText: {
type: String, type: String,
required: false, required: false,
...@@ -27,7 +33,11 @@ export default { ...@@ -27,7 +33,11 @@ export default {
default: '', default: '',
}, },
}, },
computed: {
modalSizeClass() {
return this.modalSize === 'md' ? '' : `modal-${this.modalSize}`;
},
},
methods: { methods: {
emitCancel(event) { emitCancel(event) {
this.$emit('cancel', event); this.$emit('cancel', event);
...@@ -48,6 +58,7 @@ export default { ...@@ -48,6 +58,7 @@ export default {
> >
<div <div
class="modal-dialog" class="modal-dialog"
:class="modalSizeClass"
role="document" role="document"
> >
<div class="modal-content"> <div class="modal-content">
......
...@@ -150,6 +150,16 @@ table { ...@@ -150,6 +150,16 @@ table {
color: $gl-text-color-secondary !important; color: $gl-text-color-secondary !important;
} }
.bg-success,
.bg-primary,
.bg-info,
.bg-danger,
.bg-warning {
.card-header {
color: $white-light;
}
}
// Polyfill deprecated selectors // Polyfill deprecated selectors
.hidden { .hidden {
......
...@@ -456,6 +456,10 @@ img.emoji { ...@@ -456,6 +456,10 @@ img.emoji {
.break-word { .break-word {
word-wrap: break-word; word-wrap: break-word;
&.all-words {
word-break: break-word;
}
} }
/** COMMON CLASSES **/ /** COMMON CLASSES **/
......
...@@ -75,6 +75,7 @@ ...@@ -75,6 +75,7 @@
.top-bar { .top-bar {
height: 35px; height: 35px;
min-height: 35px;
background: $gray-light; background: $gray-light;
border: 1px solid $border-color; border: 1px solid $border-color;
color: $gl-text-color; color: $gl-text-color;
......
...@@ -23,7 +23,6 @@ ...@@ -23,7 +23,6 @@
} }
.btn-group { .btn-group {
> a { > a {
color: $gl-text-color-secondary; color: $gl-text-color-secondary;
} }
...@@ -407,6 +406,7 @@ ...@@ -407,6 +406,7 @@
.prometheus-graph { .prometheus-graph {
flex: 1 0 auto; flex: 1 0 auto;
min-width: 450px; min-width: 450px;
max-width: 100%;
padding: $gl-padding / 2; padding: $gl-padding / 2;
h5 { h5 {
...@@ -418,6 +418,17 @@ ...@@ -418,6 +418,17 @@
} }
} }
.prometheus-graph-header {
display: flex;
align-items: center;
justify-content: space-between;
margin-bottom: $gl-padding-8;
h5 {
margin: 0;
}
}
.prometheus-graph-cursor { .prometheus-graph-cursor {
position: absolute; position: absolute;
background: $theme-gray-600; background: $theme-gray-600;
......
...@@ -1146,8 +1146,13 @@ ...@@ -1146,8 +1146,13 @@
} }
.ide-external-link { .ide-external-link {
position: relative;
svg { svg {
display: none; display: none;
position: absolute;
top: 2px;
right: -$gl-padding;
} }
&:hover, &:hover,
...@@ -1178,6 +1183,8 @@ ...@@ -1178,6 +1183,8 @@
display: flex; display: flex;
flex-direction: column; flex-direction: column;
height: 100%; height: 100%;
margin-top: -$grid-size;
margin-bottom: -$grid-size;
.empty-state { .empty-state {
margin-top: auto; margin-top: auto;
...@@ -1194,6 +1201,17 @@ ...@@ -1194,6 +1201,17 @@
margin: 0; margin: 0;
} }
} }
.build-trace,
.top-bar {
margin-left: -$gl-padding;
}
&.build-page .top-bar {
top: 0;
font-size: 12px;
border-top-right-radius: $border-radius-default;
}
} }
.ide-pipeline-list { .ide-pipeline-list {
...@@ -1202,7 +1220,7 @@ ...@@ -1202,7 +1220,7 @@
} }
.ide-pipeline-header { .ide-pipeline-header {
min-height: 50px; min-height: 55px;
padding-left: $gl-padding; padding-left: $gl-padding;
padding-right: $gl-padding; padding-right: $gl-padding;
...@@ -1222,8 +1240,7 @@ ...@@ -1222,8 +1240,7 @@
.ci-status-icon { .ci-status-icon {
display: flex; display: flex;
justify-content: center; justify-content: center;
height: 20px; min-width: 24px;
margin-top: -2px;
overflow: hidden; overflow: hidden;
} }
} }
...@@ -1253,3 +1270,7 @@ ...@@ -1253,3 +1270,7 @@
overflow: hidden; overflow: hidden;
text-overflow: ellipsis; text-overflow: ellipsis;
} }
.ide-job-header {
min-height: 60px;
}
...@@ -102,10 +102,6 @@ ...@@ -102,10 +102,6 @@
.form-text.text-muted { .form-text.text-muted {
margin-top: 0; margin-top: 0;
} }
.label-light {
margin-bottom: 0;
}
} }
.settings-list-icon { .settings-list-icon {
......
...@@ -25,4 +25,8 @@ class Import::BaseController < ApplicationController ...@@ -25,4 +25,8 @@ class Import::BaseController < ApplicationController
current_user.namespace current_user.namespace
end end
def project_save_error(project)
project.errors.full_messages.join(', ')
end
end end
...@@ -55,7 +55,7 @@ class Import::BitbucketController < Import::BaseController ...@@ -55,7 +55,7 @@ class Import::BitbucketController < Import::BaseController
if project.persisted? if project.persisted?
render json: ProjectSerializer.new.represent(project) render json: ProjectSerializer.new.represent(project)
else else
render json: { errors: project.errors.full_messages }, status: :unprocessable_entity render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end end
else else
render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity
......
...@@ -66,7 +66,7 @@ class Import::FogbugzController < Import::BaseController ...@@ -66,7 +66,7 @@ class Import::FogbugzController < Import::BaseController
if project.persisted? if project.persisted?
render json: ProjectSerializer.new.represent(project) render json: ProjectSerializer.new.represent(project)
else else
render json: { errors: project.errors.full_messages }, status: :unprocessable_entity render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end end
end end
......
...@@ -50,7 +50,7 @@ class Import::GithubController < Import::BaseController ...@@ -50,7 +50,7 @@ class Import::GithubController < Import::BaseController
if project.persisted? if project.persisted?
render json: ProjectSerializer.new.represent(project) render json: ProjectSerializer.new.represent(project)
else else
render json: { errors: project.errors.full_messages }, status: :unprocessable_entity render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end end
else else
render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity
......
...@@ -32,7 +32,7 @@ class Import::GitlabController < Import::BaseController ...@@ -32,7 +32,7 @@ class Import::GitlabController < Import::BaseController
if project.persisted? if project.persisted?
render json: ProjectSerializer.new.represent(project) render json: ProjectSerializer.new.represent(project)
else else
render json: { errors: project.errors.full_messages }, status: :unprocessable_entity render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end end
else else
render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity
......
...@@ -92,7 +92,7 @@ class Import::GoogleCodeController < Import::BaseController ...@@ -92,7 +92,7 @@ class Import::GoogleCodeController < Import::BaseController
if project.persisted? if project.persisted?
render json: ProjectSerializer.new.represent(project) render json: ProjectSerializer.new.represent(project)
else else
render json: { errors: project.errors.full_messages }, status: :unprocessable_entity render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end end
end end
......
...@@ -31,15 +31,14 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -31,15 +31,14 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
end end
def show def show
validates_merge_request close_merge_request_if_no_source_project
close_merge_request_without_source_project mark_merge_request_mergeable
check_if_can_be_merged
# Return if the response has already been rendered
return if response_body
respond_to do |format| respond_to do |format|
format.html do format.html do
# use next to appease Rubocop
next render('invalid') if target_branch_missing?
# Build a note object for comment form # Build a note object for comment form
@note = @project.notes.new(noteable: @merge_request) @note = @project.notes.new(noteable: @merge_request)
...@@ -238,20 +237,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -238,20 +237,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
alias_method :issuable, :merge_request alias_method :issuable, :merge_request
alias_method :awardable, :merge_request alias_method :awardable, :merge_request
def validates_merge_request
# Show git not found page
# if there is no saved commits between source & target branch
if @merge_request.has_no_commits?
# and if target branch doesn't exist
return invalid_mr unless @merge_request.target_branch_exists?
end
end
def invalid_mr
# Render special view for MR with removed target branch
render 'invalid'
end
def merge_params def merge_params
params.permit(merge_params_attributes) params.permit(merge_params_attributes)
end end
...@@ -265,7 +250,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -265,7 +250,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
@merge_request.head_pipeline && @merge_request.head_pipeline.active? @merge_request.head_pipeline && @merge_request.head_pipeline.active?
end end
def close_merge_request_without_source_project def close_merge_request_if_no_source_project
if !@merge_request.source_project && @merge_request.open? if !@merge_request.source_project && @merge_request.open?
@merge_request.close @merge_request.close
end end
...@@ -273,7 +258,11 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -273,7 +258,11 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
private private
def check_if_can_be_merged def target_branch_missing?
@merge_request.has_no_commits? && !@merge_request.target_branch_exists?
end
def mark_merge_request_mergeable
@merge_request.check_if_can_be_merged @merge_request.check_if_can_be_merged
end end
......
...@@ -410,11 +410,11 @@ module ProjectsHelper ...@@ -410,11 +410,11 @@ module ProjectsHelper
def project_status_css_class(status) def project_status_css_class(status)
case status case status
when "started" when "started"
"active" "table-active"
when "failed" when "failed"
"danger" "table-danger"
when "finished" when "finished"
"success" "table-success"
end end
end end
......
...@@ -6,7 +6,7 @@ module WorkhorseHelper ...@@ -6,7 +6,7 @@ module WorkhorseHelper
headers.store(*Gitlab::Workhorse.send_git_blob(repository, blob)) headers.store(*Gitlab::Workhorse.send_git_blob(repository, blob))
headers['Content-Disposition'] = 'inline' headers['Content-Disposition'] = 'inline'
headers['Content-Type'] = safe_content_type(blob) headers['Content-Type'] = safe_content_type(blob)
head :ok # 'render nothing: true' messes up the Content-Type render plain: ""
end end
# Send a Git diff through Workhorse # Send a Git diff through Workhorse
......
...@@ -70,6 +70,7 @@ module Ci ...@@ -70,6 +70,7 @@ module Ci
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) } scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) } scope :ref_protected, -> { where(protected: true) }
scope :with_live_trace, -> { where('EXISTS (?)', Ci::BuildTraceChunk.where('ci_builds.id = ci_build_trace_chunks.build_id').select(1)) }
scope :matches_tag_ids, -> (tag_ids) do scope :matches_tag_ids, -> (tag_ids) do
matcher = ::ActsAsTaggableOn::Tagging matcher = ::ActsAsTaggableOn::Tagging
......
...@@ -47,6 +47,6 @@ module ExclusiveLeaseGuard ...@@ -47,6 +47,6 @@ module ExclusiveLeaseGuard
end end
def log_error(message, extra_args = {}) def log_error(message, extra_args = {})
logger.error(message) Rails.logger.error(message)
end end
end end
...@@ -68,6 +68,7 @@ module Projects ...@@ -68,6 +68,7 @@ module Projects
message = "Unable to save #{e.record.type}: #{e.record.errors.full_messages.join(", ")} " message = "Unable to save #{e.record.type}: #{e.record.errors.full_messages.join(", ")} "
fail(error: message) fail(error: message)
rescue => e rescue => e
@project.errors.add(:base, e.message) if @project
fail(error: e.message) fail(error: e.message)
end end
...@@ -146,7 +147,6 @@ module Projects ...@@ -146,7 +147,6 @@ module Projects
Rails.logger.error(log_message) Rails.logger.error(log_message)
if @project if @project
@project.errors.add(:base, message)
@project.mark_import_as_failed(message) if @project.persisted? && @project.import? @project.mark_import_as_failed(message) if @project.persisted? && @project.import?
end end
......
...@@ -67,7 +67,7 @@ ...@@ -67,7 +67,7 @@
%th Projects %th Projects
%th Jobs %th Jobs
%th Tags %th Tags
%th= link_to 'Last contact', admin_runners_path(params.slice(:search).merge(sort: 'contacted_asc')) %th= link_to 'Last contact', admin_runners_path(safe_params.slice(:search).merge(sort: 'contacted_asc'))
%th %th
- @runners.each do |runner| - @runners.each do |runner|
......
...@@ -43,7 +43,7 @@ ...@@ -43,7 +43,7 @@
.settings-header .settings-header
%h4 %h4
= _('Variables') = _('Variables')
= link_to icon('question-circle'), help_page_path('ci/variables/README', anchor: 'secret-variables'), target: '_blank', rel: 'noopener noreferrer' = link_to icon('question-circle'), help_page_path('ci/variables/README', anchor: 'variables'), target: '_blank', rel: 'noopener noreferrer'
%button.btn.js-settings-toggle{ type: 'button' } %button.btn.js-settings-toggle{ type: 'button' }
= expanded ? 'Collapse' : 'Expand' = expanded ? 'Collapse' : 'Expand'
%p.append-bottom-0 %p.append-bottom-0
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
- cronjob:stuck_ci_jobs - cronjob:stuck_ci_jobs
- cronjob:stuck_import_jobs - cronjob:stuck_import_jobs
- cronjob:stuck_merge_jobs - cronjob:stuck_merge_jobs
- cronjob:ci_archive_traces_cron
- cronjob:trending_projects - cronjob:trending_projects
- cronjob:issue_due_scheduler - cronjob:issue_due_scheduler
......
module Ci
class ArchiveTracesCronWorker
include ApplicationWorker
include CronjobQueue
def perform
# Archive stale live traces which still resides in redis or database
# This could happen when ArchiveTraceWorker sidekiq jobs were lost by receiving SIGKILL
# More details in https://gitlab.com/gitlab-org/gitlab-ce/issues/36791
Ci::Build.finished.with_live_trace.find_each(batch_size: 100) do |build|
begin
build.trace.archive!
rescue => e
failed_archive_counter.increment
Rails.logger.error "Failed to archive stale live trace. id: #{build.id} message: #{e.message}"
end
end
end
private
def failed_archive_counter
@failed_archive_counter ||= Gitlab::Metrics.counter(:job_trace_archive_failed_total, "Counter of failed attempts of traces archiving")
end
end
end
...@@ -29,7 +29,7 @@ class GitGarbageCollectWorker ...@@ -29,7 +29,7 @@ class GitGarbageCollectWorker
task = task.to_sym task = task.to_sym
cmd = command(task) cmd = command(task)
gitaly_migrate(GITALY_MIGRATED_TASKS[task]) do |is_enabled| gitaly_migrate(GITALY_MIGRATED_TASKS[task], status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_call(task, project.repository.raw_repository) gitaly_call(task, project.repository.raw_repository)
else else
...@@ -114,8 +114,8 @@ class GitGarbageCollectWorker ...@@ -114,8 +114,8 @@ class GitGarbageCollectWorker
%W[git -c repack.writeBitmaps=#{config_value}] %W[git -c repack.writeBitmaps=#{config_value}]
end end
def gitaly_migrate(method, &block) def gitaly_migrate(method, status: Gitlab::GitalyClient::MigrationStatus::OPT_IN, &block)
Gitlab::GitalyClient.migrate(method, &block) Gitlab::GitalyClient.migrate(method, status: status, &block)
rescue GRPC::NotFound => e rescue GRPC::NotFound => e
Gitlab::GitLogger.error("#{method} failed:\nRepository not found") Gitlab::GitLogger.error("#{method} failed:\nRepository not found")
raise Gitlab::Git::Repository::NoRepository.new(e) raise Gitlab::Git::Repository::NoRepository.new(e)
......
---
title: Add variables to POST api/v4/projects/:id/pipeline
merge_request: 19124
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Add Avatar API
merge_request: 19121
author: Imre Farkas
type: added
---
title: Use Github repo visibility during import while respecting restricted visibility
levels
merge_request:
author:
type: fixed
---
title: Migrate any remaining jobs from deprecated `object_storage_upload` queue.
merge_request:
author:
type: deprecated
---
title: Add a cronworker to rescue stale live traces
merge_request: 18680
author:
type: performance
---
title: 'Rails 5 fix unknown keywords: changes, key_id, project, gl_repository, action,
secret_token, protocol'
merge_request: 19466
author: Jasper Maes
type: fixed
---
title: Rails 5 fix glob spec
merge_request: 19469
author: Jasper Maes
type: fixed
---
title: Show a more helpful error for import status
merge_request:
author:
type: other
...@@ -341,6 +341,9 @@ Settings.cron_jobs['geo_migrated_local_files_clean_up_worker']['job_class'] ||= ...@@ -341,6 +341,9 @@ Settings.cron_jobs['geo_migrated_local_files_clean_up_worker']['job_class'] ||=
Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *' Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker' Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker'
Settings.cron_jobs['ci_archive_traces_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_archive_traces_cron_worker']['cron'] ||= '17 * * * *'
Settings.cron_jobs['ci_archive_traces_cron_worker']['job_class'] = 'Ci::ArchiveTracesCronWorker'
Settings.cron_jobs['requests_profiles_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['requests_profiles_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['requests_profiles_worker']['cron'] ||= '0 0 * * *' Settings.cron_jobs['requests_profiles_worker']['cron'] ||= '0 0 * * *'
Settings.cron_jobs['requests_profiles_worker']['job_class'] = 'RequestsProfilesWorker' Settings.cron_jobs['requests_profiles_worker']['job_class'] = 'RequestsProfilesWorker'
......
class FixupEnvironmentNameUniqueness < ActiveRecord::Migration class FixupEnvironmentNameUniqueness < ActiveRecord::Migration
include Gitlab::Database::ArelMethods
include Gitlab::Database::MigrationHelpers include Gitlab::Database::MigrationHelpers
DOWNTIME = true DOWNTIME = true
...@@ -41,7 +42,7 @@ class FixupEnvironmentNameUniqueness < ActiveRecord::Migration ...@@ -41,7 +42,7 @@ class FixupEnvironmentNameUniqueness < ActiveRecord::Migration
conflicts.each do |id, name| conflicts.each do |id, name|
update_sql = update_sql =
Arel::UpdateManager.new(ActiveRecord::Base) arel_update_manager
.table(environments) .table(environments)
.set(environments[:name] => name + "-" + id.to_s) .set(environments[:name] => name + "-" + id.to_s)
.where(environments[:id].eq(id)) .where(environments[:id].eq(id))
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
# for more information on how to write migrations for GitLab. # for more information on how to write migrations for GitLab.
class AddEnvironmentSlug < ActiveRecord::Migration class AddEnvironmentSlug < ActiveRecord::Migration
include Gitlab::Database::ArelMethods
include Gitlab::Database::MigrationHelpers include Gitlab::Database::MigrationHelpers
DOWNTIME = true DOWNTIME = true
...@@ -19,7 +20,7 @@ class AddEnvironmentSlug < ActiveRecord::Migration ...@@ -19,7 +20,7 @@ class AddEnvironmentSlug < ActiveRecord::Migration
finder = environments.project(:id, :name) finder = environments.project(:id, :name)
connection.exec_query(finder.to_sql).rows.each do |id, name| connection.exec_query(finder.to_sql).rows.each do |id, name|
updater = Arel::UpdateManager.new(ActiveRecord::Base) updater = arel_update_manager
.table(environments) .table(environments)
.set(environments[:slug] => generate_slug(name)) .set(environments[:slug] => generate_slug(name))
.where(environments[:id].eq(id)) .where(environments[:id].eq(id))
......
class FixProjectRecordsWithInvalidVisibility < ActiveRecord::Migration class FixProjectRecordsWithInvalidVisibility < ActiveRecord::Migration
include Gitlab::Database::ArelMethods
include Gitlab::Database::MigrationHelpers include Gitlab::Database::MigrationHelpers
BATCH_SIZE = 500 BATCH_SIZE = 500
...@@ -33,7 +34,7 @@ class FixProjectRecordsWithInvalidVisibility < ActiveRecord::Migration ...@@ -33,7 +34,7 @@ class FixProjectRecordsWithInvalidVisibility < ActiveRecord::Migration
end end
updates.each do |visibility_level, project_ids| updates.each do |visibility_level, project_ids|
updater = Arel::UpdateManager.new(ActiveRecord::Base) updater = arel_update_manager
.table(projects) .table(projects)
.set(projects[:visibility_level] => visibility_level) .set(projects[:visibility_level] => visibility_level)
.where(projects[:id].in(project_ids)) .where(projects[:id].in(project_ids))
......
# rubocop:disable Migration/UpdateLargeTable # rubocop:disable Migration/UpdateLargeTable
class MigrateUserActivitiesToUsersLastActivityOn < ActiveRecord::Migration class MigrateUserActivitiesToUsersLastActivityOn < ActiveRecord::Migration
include Gitlab::Database::ArelMethods
include Gitlab::Database::MigrationHelpers include Gitlab::Database::MigrationHelpers
disable_ddl_transaction! disable_ddl_transaction!
...@@ -39,7 +40,7 @@ class MigrateUserActivitiesToUsersLastActivityOn < ActiveRecord::Migration ...@@ -39,7 +40,7 @@ class MigrateUserActivitiesToUsersLastActivityOn < ActiveRecord::Migration
activities = activities(day.at_beginning_of_day, day.at_end_of_day, page: page) activities = activities(day.at_beginning_of_day, day.at_end_of_day, page: page)
update_sql = update_sql =
Arel::UpdateManager.new(ActiveRecord::Base) arel_update_manager
.table(users_table) .table(users_table)
.set(users_table[:last_activity_on] => day.to_date) .set(users_table[:last_activity_on] => day.to_date)
.where(users_table[:username].in(activities.map(&:first))) .where(users_table[:username].in(activities.map(&:first)))
......
class MigrateObjectStorageUploadSidekiqQueue < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
sidekiq_queue_migrate 'object_storage_upload', to: 'object_storage:object_storage_background_move'
end
def down
# do not migrate any jobs back because we would migrate also
# jobs which were not part of the 'object_storage_upload'
end
end
# Avatar API
> [Introduced][ce-19121] in GitLab 11.0
## Get a single avatar URL
Get a single avatar URL for a given email addres. If user with matching public
email address is not found, results from external avatar services are returned.
This endpoint can be accessed without authentication. In case public visibility
is restricted, response will be `403 Forbidden` when unauthenticated.
```
GET /avatar?email=admin@example.com
```
| Attribute | Type | Required | Description |
| --------- | ------- | -------- | --------------------- |
| `email` | string | yes | Public email address of the user |
| `size` | integer | no | Single pixel dimension (since images are squares). Only used for avatar lookups at `Gravatar` or at the configured `Libravatar` server |
```bash
curl https://gitlab.example.com/api/v4/avatar?email=admin@example.com
```
Example response:
```json
{
"avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon"
}
```
[ce-19121]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/19121
...@@ -102,6 +102,7 @@ POST /projects/:id/pipeline ...@@ -102,6 +102,7 @@ POST /projects/:id/pipeline
|------------|---------|----------|---------------------| |------------|---------|----------|---------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user | | `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `ref` | string | yes | Reference to commit | | `ref` | string | yes | Reference to commit |
| `variables` | array | no | An array containing the variables available in the pipeline, matching the structure [{ 'key' => 'UPLOAD_TO_S3', 'value' => 'true' }] |
``` ```
curl --request POST --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/projects/1/pipeline?ref=master" curl --request POST --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/projects/1/pipeline?ref=master"
......
...@@ -176,3 +176,20 @@ git push -u origin update-project-templates ...@@ -176,3 +176,20 @@ git push -u origin update-project-templates
``` ```
Now create a merge request and merge that to master. Now create a merge request and merge that to master.
## Generate route lists
To see the full list of API routes, you can run:
```shell
bundle exec rake grape:path_helpers
```
For the Rails controllers, run:
```shell
bundle exec rake routes
```
Since these take some time to create, it's often helpful to save the output to
a file for quick reference.
...@@ -80,8 +80,8 @@ More information can be found on the [yarn website](https://yarnpkg.com/en/docs/ ...@@ -80,8 +80,8 @@ More information can be found on the [yarn website](https://yarnpkg.com/en/docs/
### 5. Update Go ### 5. Update Go
NOTE: GitLab 9.2 and higher only supports Go 1.8.3 and dropped support for Go NOTE: GitLab 9.2 and higher only supports Go 1.9 and dropped support for Go
1.5.x through 1.7.x. Be sure to upgrade your installation if necessary. 1.5.x through 1.8.x. Be sure to upgrade your installation if necessary.
You can check which version you are running with `go version`. You can check which version you are running with `go version`.
...@@ -91,11 +91,11 @@ Download and install Go: ...@@ -91,11 +91,11 @@ Download and install Go:
# Remove former Go installation folder # Remove former Go installation folder
sudo rm -rf /usr/local/go sudo rm -rf /usr/local/go
curl --remote-name --progress https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz curl --remote-name --progress https://storage.googleapis.com/golang/go1.9.linux-amd64.tar.gz
echo '1862f4c3d3907e59b04a757cfda0ea7aa9ef39274af99a784f5be843c80c6772 go1.8.3.linux-amd64.tar.gz' | shasum -a256 -c - && \ echo 'd70eadefce8e160638a9a6db97f7192d8463069ab33138893ad3bf31b0650a79 go1.9.linux-amd64.tar.gz' | shasum -a256 -c - && \
sudo tar -C /usr/local -xzf go1.8.3.linux-amd64.tar.gz sudo tar -C /usr/local -xzf go1.9.linux-amd64.tar.gz
sudo ln -sf /usr/local/go/bin/{go,godoc,gofmt} /usr/local/bin/ sudo ln -sf /usr/local/go/bin/{go,godoc,gofmt} /usr/local/bin/
rm go1.8.3.linux-amd64.tar.gz rm go1.9.linux-amd64.tar.gz
``` ```
### 6. Get latest code ### 6. Get latest code
......
...@@ -143,6 +143,24 @@ docker login registry.example.com -u <your_username> -p <your_access_token> ...@@ -143,6 +143,24 @@ docker login registry.example.com -u <your_username> -p <your_access_token>
for errors (e.g. `/var/log/gitlab/gitlab-rails/production.log`). You may be able to find clues for errors (e.g. `/var/log/gitlab/gitlab-rails/production.log`). You may be able to find clues
there. there.
#### Enable the registry debug server
The optional debug server can be enabled by setting the registry debug address
in your `gitlab.rb` configuration.
```ruby
registry['debug_addr'] = "localhost:5001"
```
After adding the setting, [reconfigure] GitLab to apply the change.
Use curl to request debug output from the debug server:
```bash
curl localhost:5001/debug/health
curl localhost:5001/debug/vars
```
### Advanced Troubleshooting ### Advanced Troubleshooting
>**NOTE:** The following section is only recommended for experts. >**NOTE:** The following section is only recommended for experts.
...@@ -275,3 +293,4 @@ Once the right permissions were set, the error will go away. ...@@ -275,3 +293,4 @@ Once the right permissions were set, the error will go away.
[docker-docs]: https://docs.docker.com/engine/userguide/intro/ [docker-docs]: https://docs.docker.com/engine/userguide/intro/
[pat]: ../profile/personal_access_tokens.md [pat]: ../profile/personal_access_tokens.md
[pdt]: ../project/deploy_tokens/index.md [pdt]: ../project/deploy_tokens/index.md
[reconfigure]: ../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
\ No newline at end of file
...@@ -90,6 +90,7 @@ module API ...@@ -90,6 +90,7 @@ module API
# Keep in alphabetical order # Keep in alphabetical order
mount ::API::AccessRequests mount ::API::AccessRequests
mount ::API::Applications mount ::API::Applications
mount ::API::Avatar
mount ::API::AwardEmoji mount ::API::AwardEmoji
mount ::API::Badges mount ::API::Badges
mount ::API::Boards mount ::API::Boards
......
module API
class Avatar < Grape::API
resource :avatar do
desc 'Return avatar url for a user' do
success Entities::Avatar
end
params do
requires :email, type: String, desc: 'Public email address of the user'
optional :size, type: Integer, desc: 'Single pixel dimension for Gravatar images'
end
get do
forbidden!('Unauthorized access') unless can?(current_user, :read_users_list)
user = User.find_by_public_email(params[:email])
user ||= User.new(email: params[:email])
present user, with: Entities::Avatar, size: params[:size]
end
end
end
end
...@@ -722,6 +722,12 @@ module API ...@@ -722,6 +722,12 @@ module API
expose :notes, using: Entities::Note expose :notes, using: Entities::Note
end end
class Avatar < Grape::Entity
expose :avatar_url do |avatarable, options|
avatarable.avatar_url(only_path: false, size: options[:size])
end
end
class AwardEmoji < Grape::Entity class AwardEmoji < Grape::Entity
expose :id expose :id
expose :name expose :name
......
...@@ -41,15 +41,20 @@ module API ...@@ -41,15 +41,20 @@ module API
end end
params do params do
requires :ref, type: String, desc: 'Reference' requires :ref, type: String, desc: 'Reference'
optional :variables, Array, desc: 'Array of variables available in the pipeline'
end end
post ':id/pipeline' do post ':id/pipeline' do
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42124') Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42124')
authorize! :create_pipeline, user_project authorize! :create_pipeline, user_project
pipeline_params = declared_params(include_missing: false)
.merge(variables_attributes: params[:variables])
.except(:variables)
new_pipeline = Ci::CreatePipelineService.new(user_project, new_pipeline = Ci::CreatePipelineService.new(user_project,
current_user, current_user,
declared_params(include_missing: false)) pipeline_params)
.execute(:api, ignore_skip_ci: true, save_on_errors: false) .execute(:api, ignore_skip_ci: true, save_on_errors: false)
if new_pipeline.persisted? if new_pipeline.persisted?
......
module Gitlab module Gitlab
module Ci module Ci
class Trace class Trace
include ExclusiveLeaseGuard
LEASE_TIMEOUT = 1.hour
ArchiveError = Class.new(StandardError) ArchiveError = Class.new(StandardError)
attr_reader :job attr_reader :job
...@@ -105,6 +109,14 @@ module Gitlab ...@@ -105,6 +109,14 @@ module Gitlab
end end
def archive! def archive!
try_obtain_lease do
unsafe_archive!
end
end
private
def unsafe_archive!
raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Already archived' if trace_artifact
raise ArchiveError, 'Job is not finished yet' unless job.complete? raise ArchiveError, 'Job is not finished yet' unless job.complete?
...@@ -126,8 +138,6 @@ module Gitlab ...@@ -126,8 +138,6 @@ module Gitlab
end end
end end
private
def archive_stream!(stream) def archive_stream!(stream)
clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path| clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path|
create_build_trace!(job, clone_path) create_build_trace!(job, clone_path)
...@@ -206,6 +216,16 @@ module Gitlab ...@@ -206,6 +216,16 @@ module Gitlab
def trace_artifact def trace_artifact
job.job_artifacts_trace job.job_artifacts_trace
end end
# For ExclusiveLeaseGuard concern
def lease_key
@lease_key ||= "trace:archive:#{job.id}"
end
# For ExclusiveLeaseGuard concern
def lease_timeout
LEASE_TIMEOUT
end
end end
end end
end end
...@@ -60,6 +60,9 @@ module Gitlab ...@@ -60,6 +60,9 @@ module Gitlab
# Some weird thing? # Some weird thing?
return nil unless commit_id.is_a?(String) return nil unless commit_id.is_a?(String)
# This saves us an RPC round trip.
return nil if commit_id.include?(':')
commit = repo.gitaly_migrate(:find_commit) do |is_enabled| commit = repo.gitaly_migrate(:find_commit) do |is_enabled|
if is_enabled if is_enabled
repo.gitaly_commit_client.find_commit(commit_id) repo.gitaly_commit_client.find_commit(commit_id)
......
...@@ -191,6 +191,8 @@ module Gitlab ...@@ -191,6 +191,8 @@ module Gitlab
metadata['call_site'] = feature.to_s if feature metadata['call_site'] = feature.to_s if feature
metadata['gitaly-servers'] = address_metadata(remote_storage) if remote_storage metadata['gitaly-servers'] = address_metadata(remote_storage) if remote_storage
metadata.merge!(server_feature_flags)
result = { metadata: metadata } result = { metadata: metadata }
# nil timeout indicates that we should use the default # nil timeout indicates that we should use the default
...@@ -209,6 +211,14 @@ module Gitlab ...@@ -209,6 +211,14 @@ module Gitlab
result result
end end
SERVER_FEATURE_FLAGS = %w[gogit_findcommit].freeze
def self.server_feature_flags
SERVER_FEATURE_FLAGS.map do |f|
["gitaly-feature-#{f.tr('_', '-')}", feature_enabled?(f).to_s]
end.to_h
end
def self.token(storage) def self.token(storage)
params = Gitlab.config.repositories.storages[storage] params = Gitlab.config.repositories.storages[storage]
raise "storage not found: #{storage.inspect}" if params.nil? raise "storage not found: #{storage.inspect}" if params.nil?
...@@ -243,6 +253,10 @@ module Gitlab ...@@ -243,6 +253,10 @@ module Gitlab
else else
false false
end end
rescue => ex
# During application startup feature lookups in SQL can fail
Rails.logger.warn "exception while checking Gitaly feature status for #{feature_name}: #{ex}"
false
end end
# opt_into_all_features? returns true when the current environment # opt_into_all_features? returns true when the current environment
......
...@@ -54,7 +54,11 @@ module Gitlab ...@@ -54,7 +54,11 @@ module Gitlab
fingerprints = CurrentKeyChain.fingerprints_from_key(key) fingerprints = CurrentKeyChain.fingerprints_from_key(key)
GPGME::Key.find(:public, fingerprints).flat_map do |raw_key| GPGME::Key.find(:public, fingerprints).flat_map do |raw_key|
raw_key.uids.map { |uid| { name: uid.name, email: uid.email.downcase } } raw_key.uids.each_with_object([]) do |uid, arr|
name = uid.name.force_encoding('UTF-8')
email = uid.email.force_encoding('UTF-8')
arr << { name: name, email: email.downcase } if name.valid_encoding? && email.valid_encoding?
end
end end
end end
end end
......
...@@ -35,7 +35,10 @@ module Gitlab ...@@ -35,7 +35,10 @@ module Gitlab
end end
def visibility_level def visibility_level
repo.private ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.default_project_visibility visibility_level = repo.private ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::VisibilityLevel::PUBLIC
visibility_level = Gitlab::CurrentSettings.default_project_visibility if Gitlab::CurrentSettings.restricted_visibility_levels.include?(visibility_level)
visibility_level
end end
# #
......
...@@ -28,6 +28,15 @@ RUN apt-get update -q && apt-get install -y google-chrome-stable && apt-get clea ...@@ -28,6 +28,15 @@ RUN apt-get update -q && apt-get install -y google-chrome-stable && apt-get clea
RUN wget -q https://chromedriver.storage.googleapis.com/$(wget -q -O - https://chromedriver.storage.googleapis.com/LATEST_RELEASE)/chromedriver_linux64.zip RUN wget -q https://chromedriver.storage.googleapis.com/$(wget -q -O - https://chromedriver.storage.googleapis.com/LATEST_RELEASE)/chromedriver_linux64.zip
RUN unzip chromedriver_linux64.zip -d /usr/local/bin RUN unzip chromedriver_linux64.zip -d /usr/local/bin
##
# Install gcloud and kubectl CLI used in Auto DevOps test to create K8s
# clusters
#
RUN export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \
echo "deb http://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \
curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
apt-get update -y && apt-get install google-cloud-sdk kubectl -y
WORKDIR /home/qa WORKDIR /home/qa
COPY ./Gemfile* ./ COPY ./Gemfile* ./
RUN bundle install RUN bundle install
......
...@@ -41,6 +41,7 @@ module QA ...@@ -41,6 +41,7 @@ module QA
autoload :SecretVariable, 'qa/factory/resource/secret_variable' autoload :SecretVariable, 'qa/factory/resource/secret_variable'
autoload :Runner, 'qa/factory/resource/runner' autoload :Runner, 'qa/factory/resource/runner'
autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token' autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token'
autoload :KubernetesCluster, 'qa/factory/resource/kubernetes_cluster'
end end
module Repository module Repository
...@@ -72,6 +73,7 @@ module QA ...@@ -72,6 +73,7 @@ module QA
module Integration module Integration
autoload :LDAP, 'qa/scenario/test/integration/ldap' autoload :LDAP, 'qa/scenario/test/integration/ldap'
autoload :Kubernetes, 'qa/scenario/test/integration/kubernetes'
autoload :Mattermost, 'qa/scenario/test/integration/mattermost' autoload :Mattermost, 'qa/scenario/test/integration/mattermost'
end end
...@@ -150,6 +152,15 @@ module QA ...@@ -150,6 +152,15 @@ module QA
autoload :Show, 'qa/page/project/issue/show' autoload :Show, 'qa/page/project/issue/show'
autoload :Index, 'qa/page/project/issue/index' autoload :Index, 'qa/page/project/issue/index'
end end
module Operations
module Kubernetes
autoload :Index, 'qa/page/project/operations/kubernetes/index'
autoload :Add, 'qa/page/project/operations/kubernetes/add'
autoload :AddExisting, 'qa/page/project/operations/kubernetes/add_existing'
autoload :Show, 'qa/page/project/operations/kubernetes/show'
end
end
end end
module Profile module Profile
...@@ -195,6 +206,7 @@ module QA ...@@ -195,6 +206,7 @@ module QA
# #
module Service module Service
autoload :Shellout, 'qa/service/shellout' autoload :Shellout, 'qa/service/shellout'
autoload :KubernetesCluster, 'qa/service/kubernetes_cluster'
autoload :Omnibus, 'qa/service/omnibus' autoload :Omnibus, 'qa/service/omnibus'
autoload :Runner, 'qa/service/runner' autoload :Runner, 'qa/service/runner'
end end
......
...@@ -15,7 +15,7 @@ module QA ...@@ -15,7 +15,7 @@ module QA
def initialize def initialize
@file_name = 'file.txt' @file_name = 'file.txt'
@file_content = '# This is test project' @file_content = '# This is test project'
@commit_message = "Add #{@file_name}" @commit_message = "This is a test commit"
@branch_name = 'master' @branch_name = 'master'
@new_branch = true @new_branch = true
end end
...@@ -24,6 +24,12 @@ module QA ...@@ -24,6 +24,12 @@ module QA
@remote_branch ||= branch_name @remote_branch ||= branch_name
end end
def directory=(dir)
raise "Must set directory as a Pathname" unless dir.is_a?(Pathname)
@directory = dir
end
def fabricate! def fabricate!
project.visit! project.visit!
...@@ -43,7 +49,14 @@ module QA ...@@ -43,7 +49,14 @@ module QA
repository.checkout(branch_name) repository.checkout(branch_name)
end end
if @directory
@directory.each_child do |f|
repository.add_file(f.basename, f.read) if f.file?
end
else
repository.add_file(file_name, file_content) repository.add_file(file_name, file_content)
end
repository.commit(commit_message) repository.commit(commit_message)
repository.push_changes("#{branch_name}:#{remote_branch}") repository.push_changes("#{branch_name}:#{remote_branch}")
end end
......
require 'securerandom'
module QA
module Factory
module Resource
class KubernetesCluster < Factory::Base
attr_writer :project, :cluster,
:install_helm_tiller, :install_ingress, :install_prometheus, :install_runner
product :ingress_ip do
Page::Project::Operations::Kubernetes::Show.perform do |page|
page.ingress_ip
end
end
def fabricate!
@project.visit!
Page::Menu::Side.act { click_operations_kubernetes }
Page::Project::Operations::Kubernetes::Index.perform do |page|
page.add_kubernetes_cluster
end
Page::Project::Operations::Kubernetes::Add.perform do |page|
page.add_existing_cluster
end
Page::Project::Operations::Kubernetes::AddExisting.perform do |page|
page.set_cluster_name(@cluster.cluster_name)
page.set_api_url(@cluster.api_url)
page.set_ca_certificate(@cluster.ca_certificate)
page.set_token(@cluster.token)
page.add_cluster!
end
if @install_helm_tiller
Page::Project::Operations::Kubernetes::Show.perform do |page|
# Helm must be installed before everything else
page.install!(:helm)
page.await_installed(:helm)
page.install!(:ingress) if @install_ingress
page.await_installed(:ingress) if @install_ingress
page.install!(:prometheus) if @install_prometheus
page.await_installed(:prometheus) if @install_prometheus
page.install!(:runner) if @install_runner
page.await_installed(:runner) if @install_runner
end
end
end
end
end
end
end
source 'https://rubygems.org'
gem 'rack'
gem 'rake'
GEM
remote: https://rubygems.org/
specs:
rack (2.0.4)
rake (12.3.0)
PLATFORMS
ruby
DEPENDENCIES
rack
rake
BUNDLED WITH
1.16.1
require 'rake/testtask'
task default: %w[test]
task :test do
puts "ok"
end
run lambda { |env| [200, { 'Content-Type' => 'text/plain' }, StringIO.new("Hello World!\n")] }
...@@ -7,9 +7,11 @@ module QA ...@@ -7,9 +7,11 @@ module QA
element :settings_link, 'link_to edit_project_path' element :settings_link, 'link_to edit_project_path'
element :repository_link, "title: 'Repository'" element :repository_link, "title: 'Repository'"
element :pipelines_settings_link, "title: 'CI / CD'" element :pipelines_settings_link, "title: 'CI / CD'"
element :operations_kubernetes_link, "title: _('Kubernetes')"
element :issues_link, /link_to.*shortcuts-issues/ element :issues_link, /link_to.*shortcuts-issues/
element :issues_link_text, "Issues" element :issues_link_text, "Issues"
element :top_level_items, '.sidebar-top-level-items' element :top_level_items, '.sidebar-top-level-items'
element :operations_section, "class: 'shortcuts-operations'"
element :activity_link, "title: 'Activity'" element :activity_link, "title: 'Activity'"
end end
...@@ -33,6 +35,14 @@ module QA ...@@ -33,6 +35,14 @@ module QA
end end
end end
def click_operations_kubernetes
hover_operations do
within_submenu do
click_link('Kubernetes')
end
end
end
def click_ci_cd_pipelines def click_ci_cd_pipelines
within_sidebar do within_sidebar do
click_link('CI / CD') click_link('CI / CD')
...@@ -61,6 +71,14 @@ module QA ...@@ -61,6 +71,14 @@ module QA
end end
end end
def hover_operations
within_sidebar do
find('.shortcuts-operations').hover
yield
end
end
def within_sidebar def within_sidebar
page.within('.sidebar-top-level-items') do page.within('.sidebar-top-level-items') do
yield yield
......
module QA
module Page
module Project
module Operations
module Kubernetes
class Add < Page::Base
view 'app/views/projects/clusters/new.html.haml' do
element :add_kubernetes_cluster_button, "link_to s_('ClusterIntegration|Add an existing Kubernetes cluster')"
end
def add_existing_cluster
click_on 'Add an existing Kubernetes cluster'
end
end
end
end
end
end
end
module QA
module Page
module Project
module Operations
module Kubernetes
class AddExisting < Page::Base
view 'app/views/projects/clusters/user/_form.html.haml' do
element :cluster_name, 'text_field :name'
element :api_url, 'text_field :api_url'
element :ca_certificate, 'text_area :ca_cert'
element :token, 'text_field :token'
element :add_cluster_button, "submit s_('ClusterIntegration|Add Kubernetes cluster')"
end
def set_cluster_name(name)
fill_in 'cluster_name', with: name
end
def set_api_url(api_url)
fill_in 'cluster_platform_kubernetes_attributes_api_url', with: api_url
end
def set_ca_certificate(ca_certificate)
fill_in 'cluster_platform_kubernetes_attributes_ca_cert', with: ca_certificate
end
def set_token(token)
fill_in 'cluster_platform_kubernetes_attributes_token', with: token
end
def add_cluster!
click_on 'Add Kubernetes cluster'
end
end
end
end
end
end
end
module QA
module Page
module Project
module Operations
module Kubernetes
class Index < Page::Base
view 'app/views/projects/clusters/_empty_state.html.haml' do
element :add_kubernetes_cluster_button, "link_to s_('ClusterIntegration|Add Kubernetes cluster')"
end
def add_kubernetes_cluster
click_on 'Add Kubernetes cluster'
end
end
end
end
end
end
end
module QA
module Page
module Project
module Operations
module Kubernetes
class Show < Page::Base
view 'app/assets/javascripts/clusters/components/application_row.vue' do
element :application_row, 'js-cluster-application-row-${this.id}'
element :install_button, "s__('ClusterIntegration|Install')"
element :installed_button, "s__('ClusterIntegration|Installed')"
end
view 'app/assets/javascripts/clusters/components/applications.vue' do
element :ingress_ip_address, 'id="ingress-ip-address"'
end
def install!(application_name)
within(".js-cluster-application-row-#{application_name}") do
click_on 'Install'
end
end
def await_installed(application_name)
within(".js-cluster-application-row-#{application_name}") do
page.has_text?('Installed', wait: 300)
end
end
def ingress_ip
# We need to wait longer since it can take some time before the
# ip address is assigned for the ingress controller
page.find('#ingress-ip-address', wait: 500).value
end
end
end
end
end
end
end
...@@ -24,10 +24,10 @@ module QA::Page ...@@ -24,10 +24,10 @@ module QA::Page
end end
end end
def has_build?(name, status: :success) def has_build?(name, status: :success, wait:)
within('.pipeline-graph') do within('.pipeline-graph') do
within('.ci-job-component', text: name) do within('.ci-job-component', text: name) do
has_selector?(".ci-status-icon-#{status}") has_selector?(".ci-status-icon-#{status}", wait: wait)
end end
end end
end end
......
...@@ -8,6 +8,13 @@ module QA # rubocop:disable Naming/FileName ...@@ -8,6 +8,13 @@ module QA # rubocop:disable Naming/FileName
view 'app/views/projects/settings/ci_cd/show.html.haml' do view 'app/views/projects/settings/ci_cd/show.html.haml' do
element :runners_settings, 'Runners settings' element :runners_settings, 'Runners settings'
element :secret_variables, 'Variables' element :secret_variables, 'Variables'
element :auto_devops_section, 'Auto DevOps'
end
view 'app/views/projects/settings/ci_cd/_autodevops_form.html.haml' do
element :enable_auto_devops_button, 'Enable Auto DevOps'
element :domain_input, 'Domain'
element :save_changes_button, "submit 'Save changes'"
end end
def expand_runners_settings(&block) def expand_runners_settings(&block)
...@@ -21,6 +28,14 @@ module QA # rubocop:disable Naming/FileName ...@@ -21,6 +28,14 @@ module QA # rubocop:disable Naming/FileName
Settings::SecretVariables.perform(&block) Settings::SecretVariables.perform(&block)
end end
end end
def enable_auto_devops_with_domain(domain)
expand_section('Auto DevOps') do
choose 'Enable Auto DevOps'
fill_in 'Domain', with: domain
click_on 'Save changes'
end
end
end end
end end
end end
......
module QA
module Scenario
module Test
module Integration
class Kubernetes < Test::Instance
tags :kubernetes
end
end
end
end
end
require 'securerandom'
require 'mkmf'
module QA
module Service
class KubernetesCluster
include Service::Shellout
attr_reader :api_url, :ca_certificate, :token
def cluster_name
@cluster_name ||= "qa-cluster-#{SecureRandom.hex(4)}-#{Time.now.utc.strftime("%Y%m%d%H%M%S")}"
end
def create!
validate_dependencies
login_if_not_already_logged_in
shell <<~CMD.tr("\n", ' ')
gcloud container clusters
create #{cluster_name}
--enable-legacy-authorization
--zone us-central1-a
&& gcloud container clusters
get-credentials #{cluster_name}
CMD
@api_url = `kubectl config view --minify -o jsonpath='{.clusters[].cluster.server}'`
@ca_certificate = Base64.decode64(`kubectl get secrets -o jsonpath="{.items[0].data['ca\\.crt']}"`)
@token = Base64.decode64(`kubectl get secrets -o jsonpath='{.items[0].data.token}'`)
self
end
def remove!
shell("gcloud container clusters delete #{cluster_name} --quiet --async")
end
private
def validate_dependencies
find_executable('gcloud') || raise("You must first install `gcloud` executable to run these tests.")
find_executable('kubectl') || raise("You must first install `kubectl` executable to run these tests.")
end
def login_if_not_already_logged_in
account = `gcloud auth list --filter=status:ACTIVE --format="value(account)"`
if account.empty?
attempt_login_with_env_vars
else
puts "gcloud account found. Using: #{account} for creating K8s cluster."
end
end
def attempt_login_with_env_vars
puts "No gcloud account. Attempting to login from env vars GCLOUD_ACCOUNT_EMAIL and GCLOUD_ACCOUNT_KEY."
gcloud_account_key = Tempfile.new('gcloud-account-key')
gcloud_account_key.write(ENV.fetch("GCLOUD_ACCOUNT_KEY"))
gcloud_account_key.close
gcloud_account_email = ENV.fetch("GCLOUD_ACCOUNT_EMAIL")
shell("gcloud auth activate-service-account #{gcloud_account_email} --key-file #{gcloud_account_key.path}")
ensure
gcloud_account_key && gcloud_account_key.unlink
end
end
end
end
...@@ -3,7 +3,6 @@ require 'securerandom' ...@@ -3,7 +3,6 @@ require 'securerandom'
module QA module QA
module Service module Service
class Runner class Runner
include Scenario::Actable
include Service::Shellout include Service::Shellout
attr_accessor :token, :address, :tags, :image attr_accessor :token, :address, :tags, :image
......
module QA
feature 'Auto Devops', :kubernetes do
after do
@cluster&.remove!
end
scenario 'user creates a new project and runs auto devops' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.act { sign_in_using_credentials }
project = Factory::Resource::Project.fabricate! do |p|
p.name = 'project-with-autodevops'
p.description = 'Project with Auto Devops'
end
# Create Auto Devops compatible repo
Factory::Repository::Push.fabricate! do |push|
push.project = project
push.directory = Pathname
.new(__dir__)
.join('../../../fixtures/auto_devops_rack')
push.commit_message = 'Create Auto DevOps compatible rack application'
end
Page::Project::Show.act { wait_for_push }
# Create and connect K8s cluster
@cluster = Service::KubernetesCluster.new.create!
kubernetes_cluster = Factory::Resource::KubernetesCluster.fabricate! do |cluster|
cluster.project = project
cluster.cluster = @cluster
cluster.install_helm_tiller = true
cluster.install_ingress = true
cluster.install_prometheus = true
cluster.install_runner = true
end
project.visit!
Page::Menu::Side.act { click_ci_cd_settings }
Page::Project::Settings::CICD.perform do |p|
p.enable_auto_devops_with_domain("#{kubernetes_cluster.ingress_ip}.nip.io")
end
project.visit!
Page::Menu::Side.act { click_ci_cd_pipelines }
Page::Project::Pipeline::Index.act { go_to_latest_pipeline }
Page::Project::Pipeline::Show.perform do |pipeline|
expect(pipeline).to have_build('build', status: :success, wait: 600)
expect(pipeline).to have_build('test', status: :success, wait: 600)
expect(pipeline).to have_build('production', status: :success, wait: 600)
end
end
end
end
...@@ -80,6 +80,16 @@ describe Projects::MergeRequestsController do ...@@ -80,6 +80,16 @@ describe Projects::MergeRequestsController do
)) ))
end end
end end
context "that is invalid" do
let(:merge_request) { create(:invalid_merge_request, target_project: project, source_project: project) }
it "renders merge request page" do
go(format: :html)
expect(response).to be_success
end
end
end end
describe 'as json' do describe 'as json' do
...@@ -106,6 +116,16 @@ describe Projects::MergeRequestsController do ...@@ -106,6 +116,16 @@ describe Projects::MergeRequestsController do
expect(response).to match_response_schema('entities/merge_request_widget') expect(response).to match_response_schema('entities/merge_request_widget')
end end
end end
context "that is invalid" do
let(:merge_request) { create(:invalid_merge_request, target_project: project, source_project: project) }
it "renders merge request page" do
go(format: :json)
expect(response).to be_success
end
end
end end
describe "as diff" do describe "as diff" do
......
...@@ -54,6 +54,11 @@ FactoryBot.define do ...@@ -54,6 +54,11 @@ FactoryBot.define do
state :opened state :opened
end end
trait :invalid do
source_branch "feature_one"
target_branch "feature_two"
end
trait :locked do trait :locked do
state :locked state :locked
end end
...@@ -104,6 +109,7 @@ FactoryBot.define do ...@@ -104,6 +109,7 @@ FactoryBot.define do
factory :merged_merge_request, traits: [:merged] factory :merged_merge_request, traits: [:merged]
factory :closed_merge_request, traits: [:closed] factory :closed_merge_request, traits: [:closed]
factory :reopened_merge_request, traits: [:opened] factory :reopened_merge_request, traits: [:opened]
factory :invalid_merge_request, traits: [:invalid]
factory :merge_request_with_diffs, traits: [:with_diffs] factory :merge_request_with_diffs, traits: [:with_diffs]
factory :merge_request_with_approver, traits: [:with_approver] factory :merge_request_with_approver, traits: [:with_approver]
factory :merge_request_with_diff_notes do factory :merge_request_with_diff_notes do
......
...@@ -24,7 +24,7 @@ require 'erb' ...@@ -24,7 +24,7 @@ require 'erb'
# #
# See the MarkdownFeature class for setup details. # See the MarkdownFeature class for setup details.
describe 'GitLab Markdown' do describe 'GitLab Markdown', :aggregate_failures do
include Capybara::Node::Matchers include Capybara::Node::Matchers
include MarkupHelper include MarkupHelper
include MarkdownMatchers include MarkdownMatchers
...@@ -53,112 +53,102 @@ describe 'GitLab Markdown' do ...@@ -53,112 +53,102 @@ describe 'GitLab Markdown' do
# Shared behavior that all pipelines should exhibit # Shared behavior that all pipelines should exhibit
shared_examples 'all pipelines' do shared_examples 'all pipelines' do
describe 'Redcarpet extensions' do it 'includes Redcarpet extensions' do
it 'does not parse emphasis inside of words' do aggregate_failures 'does not parse emphasis inside of words' do
expect(doc.to_html).not_to match('foo<em>bar</em>baz') expect(doc.to_html).not_to match('foo<em>bar</em>baz')
end end
it 'parses table Markdown' do aggregate_failures 'parses table Markdown' do
aggregate_failures do
expect(doc).to have_selector('th:contains("Header")') expect(doc).to have_selector('th:contains("Header")')
expect(doc).to have_selector('th:contains("Row")') expect(doc).to have_selector('th:contains("Row")')
expect(doc).to have_selector('th:contains("Example")') expect(doc).to have_selector('th:contains("Example")')
end end
end
it 'allows Markdown in tables' do aggregate_failures 'allows Markdown in tables' do
expect(doc.at_css('td:contains("Baz")').children.to_html) expect(doc.at_css('td:contains("Baz")').children.to_html)
.to eq '<strong>Baz</strong>' .to eq '<strong>Baz</strong>'
end end
it 'parses fenced code blocks' do aggregate_failures 'parses fenced code blocks' do
aggregate_failures do
expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.c') expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.c')
expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.python') expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.python')
end end
end
it 'parses mermaid code block' do aggregate_failures 'parses mermaid code block' do
aggregate_failures do
expect(doc).to have_selector('pre[lang=mermaid] > code.js-render-mermaid') expect(doc).to have_selector('pre[lang=mermaid] > code.js-render-mermaid')
end end
end
it 'parses strikethroughs' do aggregate_failures 'parses strikethroughs' do
expect(doc).to have_selector(%{del:contains("and this text doesn't")}) expect(doc).to have_selector(%{del:contains("and this text doesn't")})
end end
it 'parses superscript' do aggregate_failures 'parses superscript' do
expect(doc).to have_selector('sup', count: 2) expect(doc).to have_selector('sup', count: 2)
end end
end end
describe 'SanitizationFilter' do it 'includes SanitizationFilter' do
it 'permits b elements' do aggregate_failures 'permits b elements' do
expect(doc).to have_selector('b:contains("b tag")') expect(doc).to have_selector('b:contains("b tag")')
end end
it 'permits em elements' do aggregate_failures 'permits em elements' do
expect(doc).to have_selector('em:contains("em tag")') expect(doc).to have_selector('em:contains("em tag")')
end end
it 'permits code elements' do aggregate_failures 'permits code elements' do
expect(doc).to have_selector('code:contains("code tag")') expect(doc).to have_selector('code:contains("code tag")')
end end
it 'permits kbd elements' do aggregate_failures 'permits kbd elements' do
expect(doc).to have_selector('kbd:contains("s")') expect(doc).to have_selector('kbd:contains("s")')
end end
it 'permits strike elements' do aggregate_failures 'permits strike elements' do
expect(doc).to have_selector('strike:contains(Emoji)') expect(doc).to have_selector('strike:contains(Emoji)')
end end
it 'permits img elements' do aggregate_failures 'permits img elements' do
expect(doc).to have_selector('img[data-src*="smile.png"]') expect(doc).to have_selector('img[data-src*="smile.png"]')
end end
it 'permits br elements' do aggregate_failures 'permits br elements' do
expect(doc).to have_selector('br') expect(doc).to have_selector('br')
end end
it 'permits hr elements' do aggregate_failures 'permits hr elements' do
expect(doc).to have_selector('hr') expect(doc).to have_selector('hr')
end end
it 'permits span elements' do aggregate_failures 'permits span elements' do
expect(doc).to have_selector('span:contains("span tag")') expect(doc).to have_selector('span:contains("span tag")')
end end
it 'permits details elements' do aggregate_failures 'permits details elements' do
expect(doc).to have_selector('details:contains("Hiding the details")') expect(doc).to have_selector('details:contains("Hiding the details")')
end end
it 'permits summary elements' do aggregate_failures 'permits summary elements' do
expect(doc).to have_selector('details summary:contains("collapsible")') expect(doc).to have_selector('details summary:contains("collapsible")')
end end
it 'permits style attribute in th elements' do aggregate_failures 'permits style attribute in th elements' do
aggregate_failures do
expect(doc.at_css('th:contains("Header")')['style']).to eq 'text-align: center' expect(doc.at_css('th:contains("Header")')['style']).to eq 'text-align: center'
expect(doc.at_css('th:contains("Row")')['style']).to eq 'text-align: right' expect(doc.at_css('th:contains("Row")')['style']).to eq 'text-align: right'
expect(doc.at_css('th:contains("Example")')['style']).to eq 'text-align: left' expect(doc.at_css('th:contains("Example")')['style']).to eq 'text-align: left'
end end
end
it 'permits style attribute in td elements' do aggregate_failures 'permits style attribute in td elements' do
aggregate_failures do
expect(doc.at_css('td:contains("Foo")')['style']).to eq 'text-align: center' expect(doc.at_css('td:contains("Foo")')['style']).to eq 'text-align: center'
expect(doc.at_css('td:contains("Bar")')['style']).to eq 'text-align: right' expect(doc.at_css('td:contains("Bar")')['style']).to eq 'text-align: right'
expect(doc.at_css('td:contains("Baz")')['style']).to eq 'text-align: left' expect(doc.at_css('td:contains("Baz")')['style']).to eq 'text-align: left'
end end
end
it 'removes `rel` attribute from links' do aggregate_failures 'removes `rel` attribute from links' do
expect(doc).not_to have_selector('a[rel="bookmark"]') expect(doc).not_to have_selector('a[rel="bookmark"]')
end end
it "removes `href` from `a` elements if it's fishy" do aggregate_failures "removes `href` from `a` elements if it's fishy" do
expect(doc).not_to have_selector('a[href*="javascript"]') expect(doc).not_to have_selector('a[href*="javascript"]')
end end
end end
...@@ -185,26 +175,26 @@ describe 'GitLab Markdown' do ...@@ -185,26 +175,26 @@ describe 'GitLab Markdown' do
end end
end end
describe 'ExternalLinkFilter' do it 'includes ExternalLinkFilter' do
it 'adds nofollow to external link' do aggregate_failures 'adds nofollow to external link' do
link = doc.at_css('a:contains("Google")') link = doc.at_css('a:contains("Google")')
expect(link.attr('rel')).to include('nofollow') expect(link.attr('rel')).to include('nofollow')
end end
it 'adds noreferrer to external link' do aggregate_failures 'adds noreferrer to external link' do
link = doc.at_css('a:contains("Google")') link = doc.at_css('a:contains("Google")')
expect(link.attr('rel')).to include('noreferrer') expect(link.attr('rel')).to include('noreferrer')
end end
it 'adds _blank to target attribute for external links' do aggregate_failures 'adds _blank to target attribute for external links' do
link = doc.at_css('a:contains("Google")') link = doc.at_css('a:contains("Google")')
expect(link.attr('target')).to match('_blank') expect(link.attr('target')).to match('_blank')
end end
it 'ignores internal link' do aggregate_failures 'ignores internal link' do
link = doc.at_css('a:contains("GitLab Root")') link = doc.at_css('a:contains("GitLab Root")')
expect(link.attr('rel')).not_to match 'nofollow' expect(link.attr('rel')).not_to match 'nofollow'
...@@ -220,24 +210,24 @@ describe 'GitLab Markdown' do ...@@ -220,24 +210,24 @@ describe 'GitLab Markdown' do
it_behaves_like 'all pipelines' it_behaves_like 'all pipelines'
it 'includes RelativeLinkFilter' do it 'includes custom filters' do
aggregate_failures 'RelativeLinkFilter' do
expect(doc).to parse_relative_links expect(doc).to parse_relative_links
end end
it 'includes EmojiFilter' do aggregate_failures 'EmojiFilter' do
expect(doc).to parse_emoji expect(doc).to parse_emoji
end end
it 'includes TableOfContentsFilter' do aggregate_failures 'TableOfContentsFilter' do
expect(doc).to create_header_links expect(doc).to create_header_links
end end
it 'includes AutolinkFilter' do aggregate_failures 'AutolinkFilter' do
expect(doc).to create_autolinks expect(doc).to create_autolinks
end end
it 'includes all reference filters' do aggregate_failures 'all reference filters' do
aggregate_failures do
expect(doc).to reference_users expect(doc).to reference_users
expect(doc).to reference_issues expect(doc).to reference_issues
expect(doc).to reference_merge_requests expect(doc).to reference_merge_requests
...@@ -248,24 +238,24 @@ describe 'GitLab Markdown' do ...@@ -248,24 +238,24 @@ describe 'GitLab Markdown' do
expect(doc).to reference_milestones expect(doc).to reference_milestones
expect(doc).to reference_epics expect(doc).to reference_epics
end end
end
it 'includes TaskListFilter' do aggregate_failures 'TaskListFilter' do
expect(doc).to parse_task_lists expect(doc).to parse_task_lists
end end
it 'includes InlineDiffFilter' do aggregate_failures 'InlineDiffFilter' do
expect(doc).to parse_inline_diffs expect(doc).to parse_inline_diffs
end end
it 'includes VideoLinkFilter' do aggregate_failures 'VideoLinkFilter' do
expect(doc).to parse_video_links expect(doc).to parse_video_links
end end
it 'includes ColorFilter' do aggregate_failures 'ColorFilter' do
expect(doc).to parse_colors expect(doc).to parse_colors
end end
end end
end
context 'wiki pipeline' do context 'wiki pipeline' do
before do before do
...@@ -282,24 +272,24 @@ describe 'GitLab Markdown' do ...@@ -282,24 +272,24 @@ describe 'GitLab Markdown' do
it_behaves_like 'all pipelines' it_behaves_like 'all pipelines'
it 'includes RelativeLinkFilter' do it 'includes custom filters' do
aggregate_failures 'RelativeLinkFilter' do
expect(doc).not_to parse_relative_links expect(doc).not_to parse_relative_links
end end
it 'includes EmojiFilter' do aggregate_failures 'EmojiFilter' do
expect(doc).to parse_emoji expect(doc).to parse_emoji
end end
it 'includes TableOfContentsFilter' do aggregate_failures 'TableOfContentsFilter' do
expect(doc).to create_header_links expect(doc).to create_header_links
end end
it 'includes AutolinkFilter' do aggregate_failures 'AutolinkFilter' do
expect(doc).to create_autolinks expect(doc).to create_autolinks
end end
it 'includes all reference filters' do aggregate_failures 'all reference filters' do
aggregate_failures do
expect(doc).to reference_users expect(doc).to reference_users
expect(doc).to reference_issues expect(doc).to reference_issues
expect(doc).to reference_merge_requests expect(doc).to reference_merge_requests
...@@ -310,28 +300,28 @@ describe 'GitLab Markdown' do ...@@ -310,28 +300,28 @@ describe 'GitLab Markdown' do
expect(doc).to reference_milestones expect(doc).to reference_milestones
expect(doc).to reference_epics expect(doc).to reference_epics
end end
end
it 'includes TaskListFilter' do aggregate_failures 'TaskListFilter' do
expect(doc).to parse_task_lists expect(doc).to parse_task_lists
end end
it 'includes GollumTagsFilter' do aggregate_failures 'GollumTagsFilter' do
expect(doc).to parse_gollum_tags expect(doc).to parse_gollum_tags
end end
it 'includes InlineDiffFilter' do aggregate_failures 'InlineDiffFilter' do
expect(doc).to parse_inline_diffs expect(doc).to parse_inline_diffs
end end
it 'includes VideoLinkFilter' do aggregate_failures 'VideoLinkFilter' do
expect(doc).to parse_video_links expect(doc).to parse_video_links
end end
it 'includes ColorFilter' do aggregate_failures 'ColorFilter' do
expect(doc).to parse_colors expect(doc).to parse_colors
end end
end end
end
# Fake a `current_user` helper # Fake a `current_user` helper
def current_user def current_user
......
...@@ -5,9 +5,9 @@ describe ProjectsHelper do ...@@ -5,9 +5,9 @@ describe ProjectsHelper do
describe "#project_status_css_class" do describe "#project_status_css_class" do
it "returns appropriate class" do it "returns appropriate class" do
expect(project_status_css_class("started")).to eq("active") expect(project_status_css_class("started")).to eq("table-active")
expect(project_status_css_class("failed")).to eq("danger") expect(project_status_css_class("failed")).to eq("table-danger")
expect(project_status_css_class("finished")).to eq("success") expect(project_status_css_class("finished")).to eq("table-success")
end end
end end
......
import Vue from 'vue';
import Description from '~/ide/components/jobs/detail/description.vue';
import mountComponent from '../../../../helpers/vue_mount_component_helper';
import { jobs } from '../../../mock_data';
describe('IDE job description', () => {
const Component = Vue.extend(Description);
let vm;
beforeEach(() => {
vm = mountComponent(Component, {
job: jobs[0],
});
});
afterEach(() => {
vm.$destroy();
});
it('renders job details', () => {
expect(vm.$el.textContent).toContain('#1');
expect(vm.$el.textContent).toContain('test');
});
it('renders CI icon', () => {
expect(vm.$el.querySelector('.ci-status-icon .ic-status_passed_borderless')).not.toBe(null);
});
});
import Vue from 'vue';
import ScrollButton from '~/ide/components/jobs/detail/scroll_button.vue';
import mountComponent from '../../../../helpers/vue_mount_component_helper';
describe('IDE job log scroll button', () => {
const Component = Vue.extend(ScrollButton);
let vm;
beforeEach(() => {
vm = mountComponent(Component, {
direction: 'up',
disabled: false,
});
});
afterEach(() => {
vm.$destroy();
});
describe('iconName', () => {
['up', 'down'].forEach(direction => {
it(`returns icon name for ${direction}`, () => {
vm.direction = direction;
expect(vm.iconName).toBe(`scroll_${direction}`);
});
});
});
describe('tooltipTitle', () => {
it('returns title for up', () => {
expect(vm.tooltipTitle).toBe('Scroll to top');
});
it('returns title for down', () => {
vm.direction = 'down';
expect(vm.tooltipTitle).toBe('Scroll to bottom');
});
});
it('emits click event on click', () => {
spyOn(vm, '$emit');
vm.$el.querySelector('.btn-scroll').click();
expect(vm.$emit).toHaveBeenCalledWith('click');
});
it('disables button when disabled is true', done => {
vm.disabled = true;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.btn-scroll').hasAttribute('disabled')).toBe(true);
done();
});
});
});
import Vue from 'vue';
import JobDetail from '~/ide/components/jobs/detail.vue';
import { createStore } from '~/ide/stores';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { jobs } from '../../mock_data';
describe('IDE jobs detail view', () => {
const Component = Vue.extend(JobDetail);
let vm;
beforeEach(() => {
const store = createStore();
store.state.pipelines.detailJob = {
...jobs[0],
isLoading: true,
output: 'testing',
rawPath: `${gl.TEST_HOST}/raw`,
};
vm = createComponentWithStore(Component, store);
spyOn(vm, 'fetchJobTrace').and.returnValue(Promise.resolve());
vm = vm.$mount();
spyOn(vm.$refs.buildTrace, 'scrollTo');
});
afterEach(() => {
vm.$destroy();
});
it('calls fetchJobTrace on mount', () => {
expect(vm.fetchJobTrace).toHaveBeenCalled();
});
it('scrolls to bottom on mount', done => {
setTimeout(() => {
expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalled();
done();
});
});
it('renders job output', () => {
expect(vm.$el.querySelector('.bash').textContent).toContain('testing');
});
it('renders empty message output', done => {
vm.$store.state.pipelines.detailJob.output = '';
vm.$nextTick(() => {
expect(vm.$el.querySelector('.bash').textContent).toContain('No messages were logged');
done();
});
});
it('renders loading icon', () => {
expect(vm.$el.querySelector('.build-loader-animation')).not.toBe(null);
expect(vm.$el.querySelector('.build-loader-animation').style.display).toBe('');
});
it('hide loading icon when isLoading is false', done => {
vm.$store.state.pipelines.detailJob.isLoading = false;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.build-loader-animation').style.display).toBe('none');
done();
});
});
it('resets detailJob when clicking header button', () => {
spyOn(vm, 'setDetailJob');
vm.$el.querySelector('.btn').click();
expect(vm.setDetailJob).toHaveBeenCalledWith(null);
});
it('renders raw path link', () => {
expect(vm.$el.querySelector('.controllers-buttons').getAttribute('href')).toBe(
`${gl.TEST_HOST}/raw`,
);
});
describe('scroll buttons', () => {
it('triggers scrollDown when clicking down button', done => {
spyOn(vm, 'scrollDown');
vm.$el.querySelectorAll('.btn-scroll')[1].click();
vm.$nextTick(() => {
expect(vm.scrollDown).toHaveBeenCalled();
done();
});
});
it('triggers scrollUp when clicking up button', done => {
spyOn(vm, 'scrollUp');
vm.scrollPos = 1;
vm
.$nextTick()
.then(() => vm.$el.querySelector('.btn-scroll').click())
.then(() => vm.$nextTick())
.then(() => {
expect(vm.scrollUp).toHaveBeenCalled();
})
.then(done)
.catch(done.fail);
});
});
describe('scrollDown', () => {
it('scrolls build trace to bottom', () => {
spyOnProperty(vm.$refs.buildTrace, 'scrollHeight').and.returnValue(1000);
vm.scrollDown();
expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalledWith(0, 1000);
});
});
describe('scrollUp', () => {
it('scrolls build trace to top', () => {
vm.scrollUp();
expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalledWith(0, 0);
});
});
describe('scrollBuildLog', () => {
beforeEach(() => {
spyOnProperty(vm.$refs.buildTrace, 'offsetHeight').and.returnValue(100);
spyOnProperty(vm.$refs.buildTrace, 'scrollHeight').and.returnValue(200);
});
it('sets scrollPos to bottom when at the bottom', done => {
spyOnProperty(vm.$refs.buildTrace, 'scrollTop').and.returnValue(100);
vm.scrollBuildLog();
setTimeout(() => {
expect(vm.scrollPos).toBe(1);
done();
});
});
it('sets scrollPos to top when at the top', done => {
spyOnProperty(vm.$refs.buildTrace, 'scrollTop').and.returnValue(0);
vm.scrollPos = 1;
vm.scrollBuildLog();
setTimeout(() => {
expect(vm.scrollPos).toBe(0);
done();
});
});
it('resets scrollPos when not at top or bottom', done => {
spyOnProperty(vm.$refs.buildTrace, 'scrollTop').and.returnValue(10);
vm.scrollBuildLog();
setTimeout(() => {
expect(vm.scrollPos).toBe('');
done();
});
});
});
});
...@@ -26,4 +26,14 @@ describe('IDE jobs item', () => { ...@@ -26,4 +26,14 @@ describe('IDE jobs item', () => {
it('renders CI icon', () => { it('renders CI icon', () => {
expect(vm.$el.querySelector('.ic-status_passed_borderless')).not.toBe(null); expect(vm.$el.querySelector('.ic-status_passed_borderless')).not.toBe(null);
}); });
it('does not render view logs button if not started', done => {
vm.job.started = false;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.btn')).toBe(null);
done();
});
});
}); });
...@@ -75,6 +75,7 @@ export const jobs = [ ...@@ -75,6 +75,7 @@ export const jobs = [
}, },
stage: 'test', stage: 'test',
duration: 1, duration: 1,
started: new Date(),
}, },
{ {
id: 2, id: 2,
...@@ -86,6 +87,7 @@ export const jobs = [ ...@@ -86,6 +87,7 @@ export const jobs = [
}, },
stage: 'test', stage: 'test',
duration: 1, duration: 1,
started: new Date(),
}, },
{ {
id: 3, id: 3,
...@@ -97,6 +99,7 @@ export const jobs = [ ...@@ -97,6 +99,7 @@ export const jobs = [
}, },
stage: 'test', stage: 'test',
duration: 1, duration: 1,
started: new Date(),
}, },
{ {
id: 4, id: 4,
...@@ -108,6 +111,7 @@ export const jobs = [ ...@@ -108,6 +111,7 @@ export const jobs = [
}, },
stage: 'build', stage: 'build',
duration: 1, duration: 1,
started: new Date(),
}, },
]; ];
......
...@@ -13,9 +13,15 @@ import actions, { ...@@ -13,9 +13,15 @@ import actions, {
receiveJobsSuccess, receiveJobsSuccess,
fetchJobs, fetchJobs,
toggleStageCollapsed, toggleStageCollapsed,
setDetailJob,
requestJobTrace,
receiveJobTraceError,
receiveJobTraceSuccess,
fetchJobTrace,
} from '~/ide/stores/modules/pipelines/actions'; } from '~/ide/stores/modules/pipelines/actions';
import state from '~/ide/stores/modules/pipelines/state'; import state from '~/ide/stores/modules/pipelines/state';
import * as types from '~/ide/stores/modules/pipelines/mutation_types'; import * as types from '~/ide/stores/modules/pipelines/mutation_types';
import { rightSidebarViews } from '~/ide/constants';
import testAction from '../../../../helpers/vuex_action_helper'; import testAction from '../../../../helpers/vuex_action_helper';
import { pipelines, jobs } from '../../../mock_data'; import { pipelines, jobs } from '../../../mock_data';
...@@ -281,4 +287,133 @@ describe('IDE pipelines actions', () => { ...@@ -281,4 +287,133 @@ describe('IDE pipelines actions', () => {
); );
}); });
}); });
describe('setDetailJob', () => {
it('commits job', done => {
testAction(
setDetailJob,
'job',
mockedState,
[{ type: types.SET_DETAIL_JOB, payload: 'job' }],
[{ type: 'setRightPane' }],
done,
);
});
it('dispatches setRightPane as pipeline when job is null', done => {
testAction(
setDetailJob,
null,
mockedState,
[{ type: types.SET_DETAIL_JOB }],
[{ type: 'setRightPane', payload: rightSidebarViews.pipelines }],
done,
);
});
it('dispatches setRightPane as job', done => {
testAction(
setDetailJob,
'job',
mockedState,
[{ type: types.SET_DETAIL_JOB }],
[{ type: 'setRightPane', payload: rightSidebarViews.jobsDetail }],
done,
);
});
});
describe('requestJobTrace', () => {
it('commits request', done => {
testAction(requestJobTrace, null, mockedState, [{ type: types.REQUEST_JOB_TRACE }], [], done);
});
});
describe('receiveJobTraceError', () => {
it('commits error', done => {
testAction(
receiveJobTraceError,
null,
mockedState,
[{ type: types.RECEIVE_JOB_TRACE_ERROR }],
[],
done,
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveJobTraceError({ commit() {} });
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveJobTraceSuccess', () => {
it('commits data', done => {
testAction(
receiveJobTraceSuccess,
'data',
mockedState,
[{ type: types.RECEIVE_JOB_TRACE_SUCCESS, payload: 'data' }],
[],
done,
);
});
});
describe('fetchJobTrace', () => {
beforeEach(() => {
mockedState.detailJob = {
path: `${gl.TEST_HOST}/project/builds`,
};
});
describe('success', () => {
beforeEach(() => {
spyOn(axios, 'get').and.callThrough();
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
});
it('dispatches request', done => {
testAction(
fetchJobTrace,
null,
mockedState,
[],
[
{ type: 'requestJobTrace' },
{ type: 'receiveJobTraceSuccess', payload: { html: 'html' } },
],
done,
);
});
it('sends get request to correct URL', () => {
fetchJobTrace({ state: mockedState, dispatch() {} });
expect(axios.get).toHaveBeenCalledWith(`${gl.TEST_HOST}/project/builds/trace`, {
params: { format: 'json' },
});
});
});
describe('error', () => {
beforeEach(() => {
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(500);
});
it('dispatches error', done => {
testAction(
fetchJobTrace,
null,
mockedState,
[],
[{ type: 'requestJobTrace' }, { type: 'receiveJobTraceError' }],
done,
);
});
});
});
}); });
...@@ -147,6 +147,10 @@ describe('IDE pipelines mutations', () => { ...@@ -147,6 +147,10 @@ describe('IDE pipelines mutations', () => {
name: job.name, name: job.name,
status: job.status, status: job.status,
path: job.build_path, path: job.build_path,
rawPath: `${job.build_path}/raw`,
started: job.started,
isLoading: false,
output: '',
})), })),
); );
}); });
...@@ -171,4 +175,49 @@ describe('IDE pipelines mutations', () => { ...@@ -171,4 +175,49 @@ describe('IDE pipelines mutations', () => {
expect(mockedState.stages[0].isCollapsed).toBe(false); expect(mockedState.stages[0].isCollapsed).toBe(false);
}); });
}); });
describe(types.SET_DETAIL_JOB, () => {
it('sets detail job', () => {
mutations[types.SET_DETAIL_JOB](mockedState, jobs[0]);
expect(mockedState.detailJob).toEqual(jobs[0]);
});
});
describe(types.REQUEST_JOB_TRACE, () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0] };
});
it('sets loading on detail job', () => {
mutations[types.REQUEST_JOB_TRACE](mockedState);
expect(mockedState.detailJob.isLoading).toBe(true);
});
});
describe(types.RECEIVE_JOB_TRACE_ERROR, () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true };
});
it('sets loading to false on detail job', () => {
mutations[types.RECEIVE_JOB_TRACE_ERROR](mockedState);
expect(mockedState.detailJob.isLoading).toBe(false);
});
});
describe(types.RECEIVE_JOB_TRACE_SUCCESS, () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true };
});
it('sets output on detail job', () => {
mutations[types.RECEIVE_JOB_TRACE_SUCCESS](mockedState, { html: 'html' });
expect(mockedState.detailJob.output).toBe('html');
expect(mockedState.detailJob.isLoading).toBe(false);
});
});
}); });
...@@ -45,7 +45,25 @@ describe('Importer Status', () => { ...@@ -45,7 +45,25 @@ describe('Importer Status', () => {
currentTarget: document.querySelector('.js-add-to-import'), currentTarget: document.querySelector('.js-add-to-import'),
}) })
.then(() => { .then(() => {
expect(document.querySelector('tr').classList.contains('active')).toEqual(true); expect(document.querySelector('tr').classList.contains('table-active')).toEqual(true);
done();
})
.catch(done.fail);
});
it('shows error message after failed POST request', (done) => {
appendSetFixtures('<div class="flash-container"></div>');
mock.onPost(importUrl).reply(422, {
errors: 'You forgot your lunch',
});
instance.addToImport({
currentTarget: document.querySelector('.js-add-to-import'),
})
.then(() => {
const flashMessage = document.querySelector('.flash-text');
expect(flashMessage.textContent.trim()).toEqual('An error occurred while importing project: You forgot your lunch');
done(); done();
}) })
.catch(done.fail); .catch(done.fail);
......
...@@ -13,6 +13,9 @@ describe('Job details header', () => { ...@@ -13,6 +13,9 @@ describe('Job details header', () => {
const threeWeeksAgo = new Date(); const threeWeeksAgo = new Date();
threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21); threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
const twoDaysAgo = new Date();
twoDaysAgo.setDate(twoDaysAgo.getDate() - 2);
props = { props = {
job: { job: {
status: { status: {
...@@ -31,7 +34,7 @@ describe('Job details header', () => { ...@@ -31,7 +34,7 @@ describe('Job details header', () => {
email: 'foo@bar.com', email: 'foo@bar.com',
avatar_url: 'link', avatar_url: 'link',
}, },
started: '2018-01-08T09:48:27.319Z', started: twoDaysAgo.toISOString(),
new_issue_path: 'path', new_issue_path: 'path',
}, },
isLoading: false, isLoading: false,
...@@ -69,7 +72,7 @@ describe('Job details header', () => { ...@@ -69,7 +72,7 @@ describe('Job details header', () => {
.querySelector('.header-main-content') .querySelector('.header-main-content')
.textContent.replace(/\s+/g, ' ') .textContent.replace(/\s+/g, ' ')
.trim(), .trim(),
).toEqual('failed Job #123 triggered 3 weeks ago by Foo'); ).toEqual('failed Job #123 triggered 2 days ago by Foo');
}); });
it('should render new issue link', () => { it('should render new issue link', () => {
......
...@@ -18,9 +18,7 @@ const createComponent = propsData => { ...@@ -18,9 +18,7 @@ const createComponent = propsData => {
}).$mount(); }).$mount();
}; };
const convertedMetrics = convertDatesMultipleSeries( const convertedMetrics = convertDatesMultipleSeries(singleRowMetricsMultipleSeries);
singleRowMetricsMultipleSeries,
);
describe('Graph', () => { describe('Graph', () => {
beforeEach(() => { beforeEach(() => {
...@@ -36,7 +34,7 @@ describe('Graph', () => { ...@@ -36,7 +34,7 @@ describe('Graph', () => {
projectPath, projectPath,
}); });
expect(component.$el.querySelector('.text-center').innerText.trim()).toBe( expect(component.$el.querySelector('.prometheus-graph-title').innerText.trim()).toBe(
component.graphData.title, component.graphData.title,
); );
}); });
...@@ -52,9 +50,7 @@ describe('Graph', () => { ...@@ -52,9 +50,7 @@ describe('Graph', () => {
}); });
const transformedHeight = `${component.graphHeight - 100}`; const transformedHeight = `${component.graphHeight - 100}`;
expect(component.axisTransform.indexOf(transformedHeight)).not.toEqual( expect(component.axisTransform.indexOf(transformedHeight)).not.toEqual(-1);
-1,
);
}); });
it('outerViewBox gets a width and height property based on the DOM size of the element', () => { it('outerViewBox gets a width and height property based on the DOM size of the element', () => {
......
...@@ -190,4 +190,37 @@ describe('GlModal', () => { ...@@ -190,4 +190,37 @@ describe('GlModal', () => {
}); });
}); });
}); });
describe('handling sizes', () => {
it('should render modal-sm', () => {
vm = mountComponent(modalComponent, {
modalSize: 'sm',
});
expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-sm')).toEqual(true);
});
it('should render modal-lg', () => {
vm = mountComponent(modalComponent, {
modalSize: 'lg',
});
expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-lg')).toEqual(true);
});
it('should not add modal size classes when md size is passed', () => {
vm = mountComponent(modalComponent, {
modalSize: 'md',
});
expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-md')).toEqual(false);
});
it('should not add modal size classes by default', () => {
vm = mountComponent(modalComponent, {});
expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-sm')).toEqual(false);
expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-lg')).toEqual(false);
});
});
}); });
require 'spec_helper' require 'spec_helper'
describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state do
let(:build) { create(:ci_build) } let(:build) { create(:ci_build) }
let(:trace) { described_class.new(build) } let(:trace) { described_class.new(build) }
......
...@@ -74,6 +74,19 @@ describe Gitlab::Gpg do ...@@ -74,6 +74,19 @@ describe Gitlab::Gpg do
email: 'nannie.bernhard@example.com' email: 'nannie.bernhard@example.com'
}]) }])
end end
it 'rejects non UTF-8 names and addresses' do
public_key = double(:key)
fingerprints = double(:fingerprints)
email = "\xEEch@test.com".force_encoding('ASCII-8BIT')
uid = double(:uid, name: 'Test User', email: email)
raw_key = double(:raw_key, uids: [uid])
allow(Gitlab::Gpg::CurrentKeyChain).to receive(:fingerprints_from_key).with(public_key).and_return(fingerprints)
allow(GPGME::Key).to receive(:find).with(:public, anything).and_return([raw_key])
user_infos = described_class.user_infos_from_key(public_key)
expect(user_infos).to eq([])
end
end end
describe '.current_home_dir' do describe '.current_home_dir' do
......
...@@ -44,7 +44,34 @@ describe Gitlab::LegacyGithubImport::ProjectCreator do ...@@ -44,7 +44,34 @@ describe Gitlab::LegacyGithubImport::ProjectCreator do
end end
context 'when GitHub project is public' do context 'when GitHub project is public' do
it 'sets project visibility to public' do
repo.private = false
project = service.execute
expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
end
context 'when visibility level is restricted' do
context 'when GitHub project is private' do
before do before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE])
allow_any_instance_of(ApplicationSetting).to receive(:default_project_visibility).and_return(Gitlab::VisibilityLevel::INTERNAL)
end
it 'sets project visibility to the default project visibility' do
repo.private = true
project = service.execute
expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
end
end
context 'when GitHub project is public' do
before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
allow_any_instance_of(ApplicationSetting).to receive(:default_project_visibility).and_return(Gitlab::VisibilityLevel::INTERNAL) allow_any_instance_of(ApplicationSetting).to receive(:default_project_visibility).and_return(Gitlab::VisibilityLevel::INTERNAL)
end end
...@@ -56,6 +83,7 @@ describe Gitlab::LegacyGithubImport::ProjectCreator do ...@@ -56,6 +83,7 @@ describe Gitlab::LegacyGithubImport::ProjectCreator do
expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL) expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
end end
end end
end
context 'when GitHub project has wiki' do context 'when GitHub project has wiki' do
it 'does not create the wiki repository' do it 'does not create the wiki repository' do
......
...@@ -35,8 +35,9 @@ describe Gitlab::SQL::Glob do ...@@ -35,8 +35,9 @@ describe Gitlab::SQL::Glob do
value = query("SELECT #{quote(string)} LIKE #{pattern}") value = query("SELECT #{quote(string)} LIKE #{pattern}")
.rows.flatten.first .rows.flatten.first
check = Gitlab.rails5? ? true : 't'
case value case value
when 't', 1 when check, 1
true true
else else
false false
......
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180603190921_migrate_object_storage_upload_sidekiq_queue.rb')
describe MigrateObjectStorageUploadSidekiqQueue, :sidekiq, :redis do
include Gitlab::Database::MigrationHelpers
context 'when there are jobs in the queue' do
it 'correctly migrates queue when migrating up' do
Sidekiq::Testing.disable! do
stubbed_worker(queue: 'object_storage_upload').perform_async('Something', [1])
stubbed_worker(queue: 'object_storage:object_storage_background_move').perform_async('Something', [1])
described_class.new.up
expect(sidekiq_queue_length('object_storage_upload')).to eq 0
expect(sidekiq_queue_length('object_storage:object_storage_background_move')).to eq 2
end
end
end
context 'when there are no jobs in the queues' do
it 'does not raise error when migrating up' do
expect { described_class.new.up }.not_to raise_error
end
end
def stubbed_worker(queue:)
Class.new do
include Sidekiq::Worker
sidekiq_options queue: queue
end
end
end
...@@ -117,6 +117,26 @@ describe Ci::Build do ...@@ -117,6 +117,26 @@ describe Ci::Build do
end end
end end
describe '.with_live_trace' do
subject { described_class.with_live_trace }
context 'when build has live trace' do
let!(:build) { create(:ci_build, :success, :trace_live) }
it 'selects the build' do
is_expected.to eq([build])
end
end
context 'when build does not have live trace' do
let!(:build) { create(:ci_build, :success, :trace_artifact) }
it 'does not select the build' do
is_expected.to be_empty
end
end
end
describe '#actionize' do describe '#actionize' do
context 'when build is a created' do context 'when build is a created' do
before do before do
......
require 'spec_helper'
describe API::Avatar do
let(:gravatar_service) { double('GravatarService') }
describe 'GET /avatar' do
context 'avatar uploaded to GitLab' do
context 'user with matching public email address' do
let(:user) { create(:user, :with_avatar, email: 'public@example.com', public_email: 'public@example.com') }
before do
user
end
it 'returns the avatar url' do
get api('/avatar'), { email: 'public@example.com' }
expect(response.status).to eq 200
expect(json_response['avatar_url']).to eql("#{::Settings.gitlab.base_url}#{user.avatar.local_url}")
end
end
context 'no user with matching public email address' do
before do
expect(GravatarService).to receive(:new).and_return(gravatar_service)
expect(gravatar_service).to(
receive(:execute)
.with('private@example.com', nil, 2, { username: nil })
.and_return('https://gravatar'))
end
it 'returns the avatar url from Gravatar' do
get api('/avatar'), { email: 'private@example.com' }
expect(response.status).to eq 200
expect(json_response['avatar_url']).to eq('https://gravatar')
end
end
end
context 'avatar uploaded to Gravatar' do
context 'user with matching public email address' do
let(:user) { create(:user, email: 'public@example.com', public_email: 'public@example.com') }
before do
user
expect(GravatarService).to receive(:new).and_return(gravatar_service)
expect(gravatar_service).to(
receive(:execute)
.with('public@example.com', nil, 2, { username: user.username })
.and_return('https://gravatar'))
end
it 'returns the avatar url from Gravatar' do
get api('/avatar'), { email: 'public@example.com' }
expect(response.status).to eq 200
expect(json_response['avatar_url']).to eq('https://gravatar')
end
end
context 'no user with matching public email address' do
before do
expect(GravatarService).to receive(:new).and_return(gravatar_service)
expect(gravatar_service).to(
receive(:execute)
.with('private@example.com', nil, 2, { username: nil })
.and_return('https://gravatar'))
end
it 'returns the avatar url from Gravatar' do
get api('/avatar'), { email: 'private@example.com' }
expect(response.status).to eq 200
expect(json_response['avatar_url']).to eq('https://gravatar')
end
end
context 'public visibility level restricted' do
let(:user) { create(:user, :with_avatar, email: 'public@example.com', public_email: 'public@example.com') }
before do
user
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
context 'when authenticated' do
it 'returns the avatar url' do
get api('/avatar', user), { email: 'public@example.com' }
expect(response.status).to eq 200
expect(json_response['avatar_url']).to eql("#{::Settings.gitlab.base_url}#{user.avatar.local_url}")
end
end
context 'when unauthenticated' do
it_behaves_like '403 response' do
let(:request) { get api('/avatar'), { email: 'public@example.com' } }
end
end
end
end
end
end
...@@ -835,8 +835,7 @@ describe API::Internal do ...@@ -835,8 +835,7 @@ describe API::Internal do
end end
def push(key, project, protocol = 'ssh', env: nil) def push(key, project, protocol = 'ssh', env: nil)
post( params = {
api("/internal/allowed"),
changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master', changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master',
key_id: key.id, key_id: key.id,
project: project.full_path, project: project.full_path,
...@@ -845,7 +844,19 @@ describe API::Internal do ...@@ -845,7 +844,19 @@ describe API::Internal do
secret_token: secret_token, secret_token: secret_token,
protocol: protocol, protocol: protocol,
env: env env: env
}
if Gitlab.rails5?
post(
api("/internal/allowed"),
params: params
) )
else
post(
api("/internal/allowed"),
params
)
end
end end
def archive(key, project) def archive(key, project)
......
...@@ -285,6 +285,15 @@ describe API::Pipelines do ...@@ -285,6 +285,15 @@ describe API::Pipelines do
end end
describe 'POST /projects/:id/pipeline ' do describe 'POST /projects/:id/pipeline ' do
def expect_variables(variables, expected_variables)
variables.each_with_index do |variable, index|
expected_variable = expected_variables[index]
expect(variable.key).to eq(expected_variable['key'])
expect(variable.value).to eq(expected_variable['value'])
end
end
context 'authorized user' do context 'authorized user' do
context 'with gitlab-ci.yml' do context 'with gitlab-ci.yml' do
before do before do
...@@ -294,13 +303,62 @@ describe API::Pipelines do ...@@ -294,13 +303,62 @@ describe API::Pipelines do
it 'creates and returns a new pipeline' do it 'creates and returns a new pipeline' do
expect do expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch
end.to change { Ci::Pipeline.count }.by(1) end.to change { project.pipelines.count }.by(1)
expect(response).to have_gitlab_http_status(201) expect(response).to have_gitlab_http_status(201)
expect(json_response).to be_a Hash expect(json_response).to be_a Hash
expect(json_response['sha']).to eq project.commit.id expect(json_response['sha']).to eq project.commit.id
end end
context 'variables given' do
let(:variables) { [{ 'key' => 'UPLOAD_TO_S3', 'value' => 'true' }] }
it 'creates and returns a new pipeline using the given variables' do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch, variables: variables
end.to change { project.pipelines.count }.by(1)
expect_variables(project.pipelines.last.variables, variables)
expect(response).to have_gitlab_http_status(201)
expect(json_response).to be_a Hash
expect(json_response['sha']).to eq project.commit.id
expect(json_response).not_to have_key('variables')
end
end
describe 'using variables conditions' do
let(:variables) { [{ 'key' => 'STAGING', 'value' => 'true' }] }
before do
config = YAML.dump(test: { script: 'test', only: { variables: ['$STAGING'] } })
stub_ci_pipeline_yaml_file(config)
end
it 'creates and returns a new pipeline using the given variables' do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch, variables: variables
end.to change { project.pipelines.count }.by(1)
expect_variables(project.pipelines.last.variables, variables)
expect(response).to have_gitlab_http_status(201)
expect(json_response).to be_a Hash
expect(json_response['sha']).to eq project.commit.id
expect(json_response).not_to have_key('variables')
end
context 'condition unmatch' do
let(:variables) { [{ 'key' => 'STAGING', 'value' => 'false' }] }
it "doesn't create a job" do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch
end.not_to change { project.pipelines.count }
expect(response).to have_gitlab_http_status(400)
end
end
end
it 'fails when using an invalid ref' do it 'fails when using an invalid ref' do
post api("/projects/#{project.id}/pipeline", user), ref: 'invalid_ref' post api("/projects/#{project.id}/pipeline", user), ref: 'invalid_ref'
......
...@@ -118,14 +118,19 @@ shared_examples 'a GitHub-ish import controller: POST create' do ...@@ -118,14 +118,19 @@ shared_examples 'a GitHub-ish import controller: POST create' do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it 'returns 422 response when the project could not be imported' do it 'returns 422 response with the base error when the project could not be imported' do
project = build(:project)
project.errors.add(:name, 'is invalid')
project.errors.add(:path, 'is old')
allow(Gitlab::LegacyGithubImport::ProjectCreator) allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider) .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
.and_return(double(execute: build(:project))) .and_return(double(execute: project))
post :create, format: :json post :create, format: :json
expect(response).to have_gitlab_http_status(422) expect(response).to have_gitlab_http_status(422)
expect(json_response['errors']).to eq('Name is invalid, Path is old')
end end
context "when the repository owner is the provider user" do context "when the repository owner is the provider user" do
......
...@@ -227,6 +227,42 @@ shared_examples_for 'common trace features' do ...@@ -227,6 +227,42 @@ shared_examples_for 'common trace features' do
end end
end end
end end
describe '#archive!' do
subject { trace.archive! }
context 'when build status is success' do
let!(:build) { create(:ci_build, :success, :trace_live) }
it 'does not have an archived trace yet' do
expect(build.job_artifacts_trace).to be_nil
end
context 'when archives' do
it 'has an archived trace' do
subject
build.reload
expect(build.job_artifacts_trace).to be_exist
end
context 'when another process has already been archiving', :clean_gitlab_redis_shared_state do
before do
Gitlab::ExclusiveLease.new("trace:archive:#{trace.job.id}", timeout: 1.hour).try_obtain
end
it 'blocks concurrent archiving' do
expect(Rails.logger).to receive(:error).with('Cannot obtain an exclusive lease. There must be another instance already in execution.')
subject
build.reload
expect(build.job_artifacts_trace).to be_nil
end
end
end
end
end
end end
shared_examples_for 'trace with disabled live trace feature' do shared_examples_for 'trace with disabled live trace feature' do
......
require 'spec_helper'
describe Ci::ArchiveTracesCronWorker do
subject { described_class.new.perform }
before do
stub_feature_flags(ci_enable_live_trace: true)
end
shared_examples_for 'archives trace' do
it do
subject
build.reload
expect(build.job_artifacts_trace).to be_exist
end
end
shared_examples_for 'does not archive trace' do
it do
subject
build.reload
expect(build.job_artifacts_trace).to be_nil
end
end
context 'when a job was succeeded' do
let!(:build) { create(:ci_build, :success, :trace_live) }
it_behaves_like 'archives trace'
context 'when archive raised an exception' do
let!(:build) { create(:ci_build, :success, :trace_artifact, :trace_live) }
let!(:build2) { create(:ci_build, :success, :trace_live) }
it 'archives valid targets' do
expect(Rails.logger).to receive(:error).with("Failed to archive stale live trace. id: #{build.id} message: Already archived")
subject
build2.reload
expect(build2.job_artifacts_trace).to be_exist
end
end
end
context 'when a job was cancelled' do
let!(:build) { create(:ci_build, :canceled, :trace_live) }
it_behaves_like 'archives trace'
end
context 'when a job is running' do
let!(:build) { create(:ci_build, :running, :trace_live) }
it_behaves_like 'does not archive trace'
end
end
...@@ -104,7 +104,7 @@ describe GitGarbageCollectWorker do ...@@ -104,7 +104,7 @@ describe GitGarbageCollectWorker do
it_should_behave_like 'flushing ref caches', true it_should_behave_like 'flushing ref caches', true
end end
context "with Gitaly turned off", :skip_gitaly_mock do context "with Gitaly turned off", :disable_gitaly do
it_should_behave_like 'flushing ref caches', false it_should_behave_like 'flushing ref caches', false
end end
......
...@@ -132,8 +132,10 @@ describe StuckCiJobsWorker do ...@@ -132,8 +132,10 @@ describe StuckCiJobsWorker do
end end
it 'cancels exclusive lease after worker perform' do it 'cancels exclusive lease after worker perform' do
expect(Gitlab::ExclusiveLease).to receive(:cancel).with(described_class::EXCLUSIVE_LEASE_KEY, exclusive_lease_uuid)
worker.perform worker.perform
expect(Gitlab::ExclusiveLease.new(described_class::EXCLUSIVE_LEASE_KEY, timeout: 1.hour))
.not_to be_exists
end end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment