Commit e957ea4a authored by Kamil Trzcinski's avatar Kamil Trzcinski

Merge branch '38464-k8s-apps' of https://gitlab.com/gitlab-org/gitlab-ce into...

Merge branch '38464-k8s-apps' of https://gitlab.com/gitlab-org/gitlab-ce into add-ingress-to-cluster-applications
parents 0a459db0 18ac8acb
<script> <script>
import { mapActions, mapGetters } from 'vuex'; import { mapActions, mapGetters } from 'vuex';
import timeAgoMixin from '../../vue_shared/mixins/timeago'; import timeAgoMixin from '../../vue_shared/mixins/timeago';
import skeletonLoadingContainer from '../../vue_shared/components/skeleton_loading_container.vue';
export default { export default {
mixins: [ mixins: [
timeAgoMixin, timeAgoMixin,
], ],
components: {
skeletonLoadingContainer,
},
props: { props: {
file: { file: {
type: Object, type: Object,
...@@ -16,6 +20,9 @@ ...@@ -16,6 +20,9 @@
...mapGetters([ ...mapGetters([
'isCollapsed', 'isCollapsed',
]), ]),
isSubmodule() {
return this.file.type === 'submodule';
},
fileIcon() { fileIcon() {
return { return {
'fa-spinner fa-spin': this.file.loading, 'fa-spinner fa-spin': this.file.loading,
...@@ -31,6 +38,9 @@ ...@@ -31,6 +38,9 @@
shortId() { shortId() {
return this.file.id.substr(0, 8); return this.file.id.substr(0, 8);
}, },
submoduleColSpan() {
return !this.isCollapsed && this.isSubmodule ? 3 : 1;
},
}, },
methods: { methods: {
...mapActions([ ...mapActions([
...@@ -44,7 +54,7 @@ ...@@ -44,7 +54,7 @@
<tr <tr
class="file" class="file"
@click.prevent="clickedTreeRow(file)"> @click.prevent="clickedTreeRow(file)">
<td> <td :colspan="submoduleColSpan">
<i <i
class="fa fa-fw file-icon" class="fa fa-fw file-icon"
:class="fileIcon" :class="fileIcon"
...@@ -58,7 +68,7 @@ ...@@ -58,7 +68,7 @@
> >
{{ file.name }} {{ file.name }}
</a> </a>
<template v-if="file.type === 'submodule' && file.id"> <template v-if="isSubmodule && file.id">
@ @
<span class="commit-sha"> <span class="commit-sha">
<a <a
...@@ -71,15 +81,20 @@ ...@@ -71,15 +81,20 @@
</template> </template>
</td> </td>
<template v-if="!isCollapsed"> <template v-if="!isCollapsed && !isSubmodule">
<td class="hidden-sm hidden-xs"> <td class="hidden-sm hidden-xs">
<a <a
v-if="file.lastCommit.message"
@click.stop @click.stop
:href="file.lastCommit.url" :href="file.lastCommit.url"
class="commit-message" class="commit-message"
> >
{{ file.lastCommit.message }} {{ file.lastCommit.message }}
</a> </a>
<skeleton-loading-container
v-else
:small="true"
/>
</td> </td>
<td class="commit-update hidden-xs text-right"> <td class="commit-update hidden-xs text-right">
...@@ -89,6 +104,11 @@ ...@@ -89,6 +104,11 @@
> >
{{ timeFormated(file.lastCommit.updatedAt) }} {{ timeFormated(file.lastCommit.updatedAt) }}
</span> </span>
<skeleton-loading-container
v-else
class="animation-container-right"
:small="true"
/>
</td> </td>
</template> </template>
</tr> </tr>
......
<script> <script>
import { mapGetters } from 'vuex'; import { mapGetters } from 'vuex';
import skeletonLoadingContainer from '../../vue_shared/components/skeleton_loading_container.vue';
export default { export default {
components: {
skeletonLoadingContainer,
},
computed: { computed: {
...mapGetters([ ...mapGetters([
'isCollapsed', 'isCollapsed',
]), ]),
}, },
methods: {
lineOfCode(n) {
return `skeleton-line-${n}`;
},
},
}; };
</script> </script>
...@@ -21,36 +20,24 @@ ...@@ -21,36 +20,24 @@
aria-label="Loading files" aria-label="Loading files"
> >
<td> <td>
<div <skeleton-loading-container
class="animation-container animation-container-small"> :small="true"
<div />
v-for="n in 6"
:key="n"
:class="lineOfCode(n)">
</div>
</div>
</td> </td>
<template v-if="!isCollapsed"> <template v-if="!isCollapsed">
<td <td
class="hidden-sm hidden-xs"> class="hidden-sm hidden-xs">
<div class="animation-container"> <skeleton-loading-container
<div :small="true"
v-for="n in 6" />
:key="n"
:class="lineOfCode(n)">
</div>
</div>
</td> </td>
<td <td
class="hidden-xs"> class="hidden-xs">
<div class="animation-container animation-container-small animation-container-right"> <skeleton-loading-container
<div class="animation-container-right"
v-for="n in 6" :small="true"
:key="n" />
:class="lineOfCode(n)">
</div>
</div>
</td> </td>
</template> </template>
</tr> </tr>
......
...@@ -80,7 +80,7 @@ export default { ...@@ -80,7 +80,7 @@ export default {
/> />
<repo-file <repo-file
v-for="(file, index) in treeList" v-for="(file, index) in treeList"
:key="index" :key="file.key"
:file="file" :file="file"
/> />
</tbody> </tbody>
......
...@@ -30,4 +30,11 @@ export default { ...@@ -30,4 +30,11 @@ export default {
commit(projectId, payload) { commit(projectId, payload) {
return Api.commitMultiple(projectId, payload); return Api.commitMultiple(projectId, payload);
}, },
getTreeLastCommit(endpoint) {
return Vue.http.get(endpoint, {
params: {
format: 'json',
},
});
},
}; };
...@@ -64,7 +64,7 @@ export const checkCommitStatus = ({ state }) => service.getBranchData( ...@@ -64,7 +64,7 @@ export const checkCommitStatus = ({ state }) => service.getBranchData(
}) })
.catch(() => flash('Error checking branch data. Please try again.')); .catch(() => flash('Error checking branch data. Please try again.'));
export const commitChanges = ({ commit, state, dispatch }, { payload, newMr }) => export const commitChanges = ({ commit, state, dispatch, getters }, { payload, newMr }) =>
service.commit(state.project.id, payload) service.commit(state.project.id, payload)
.then((data) => { .then((data) => {
const { branch } = payload; const { branch } = payload;
...@@ -73,12 +73,28 @@ export const commitChanges = ({ commit, state, dispatch }, { payload, newMr }) = ...@@ -73,12 +73,28 @@ export const commitChanges = ({ commit, state, dispatch }, { payload, newMr }) =
return; return;
} }
const lastCommit = {
commit_path: `${state.project.url}/commit/${data.id}`,
commit: {
message: data.message,
authored_date: data.committed_date,
},
};
flash(`Your changes have been committed. Commit ${data.short_id} with ${data.stats.additions} additions, ${data.stats.deletions} deletions.`, 'notice'); flash(`Your changes have been committed. Commit ${data.short_id} with ${data.stats.additions} additions, ${data.stats.deletions} deletions.`, 'notice');
if (newMr) { if (newMr) {
redirectToUrl(`${state.endpoints.newMergeRequestUrl}${branch}`); redirectToUrl(`${state.endpoints.newMergeRequestUrl}${branch}`);
} else { } else {
commit(types.SET_COMMIT_REF, data.id); commit(types.SET_COMMIT_REF, data.id);
getters.changedFiles.forEach((entry) => {
commit(types.SET_LAST_COMMIT_DATA, {
entry,
lastCommit,
});
});
dispatch('discardAllChanges'); dispatch('discardAllChanges');
dispatch('closeAllFiles'); dispatch('closeAllFiles');
dispatch('toggleEditMode'); dispatch('toggleEditMode');
......
...@@ -27,6 +27,8 @@ export const closeFile = ({ commit, state, dispatch }, { file, force = false }) ...@@ -27,6 +27,8 @@ export const closeFile = ({ commit, state, dispatch }, { file, force = false })
} else if (!state.openFiles.length) { } else if (!state.openFiles.length) {
pushState(file.parentTreeUrl); pushState(file.parentTreeUrl);
} }
dispatch('getLastCommitData');
}; };
export const setFileActive = ({ commit, state, getters, dispatch }, file) => { export const setFileActive = ({ commit, state, getters, dispatch }, file) => {
......
...@@ -7,10 +7,11 @@ import { ...@@ -7,10 +7,11 @@ import {
setPageTitle, setPageTitle,
findEntry, findEntry,
createTemp, createTemp,
createOrMergeEntry,
} from '../utils'; } from '../utils';
export const getTreeData = ( export const getTreeData = (
{ commit, state }, { commit, state, dispatch },
{ endpoint = state.endpoints.rootEndpoint, tree = state } = {}, { endpoint = state.endpoints.rootEndpoint, tree = state } = {},
) => { ) => {
commit(types.TOGGLE_LOADING, tree); commit(types.TOGGLE_LOADING, tree);
...@@ -24,14 +25,20 @@ export const getTreeData = ( ...@@ -24,14 +25,20 @@ export const getTreeData = (
return res.json(); return res.json();
}) })
.then((data) => { .then((data) => {
const prevLastCommitPath = tree.lastCommitPath;
if (!state.isInitialRoot) { if (!state.isInitialRoot) {
commit(types.SET_ROOT, data.path === '/'); commit(types.SET_ROOT, data.path === '/');
} }
commit(types.SET_DIRECTORY_DATA, { data, tree }); dispatch('updateDirectoryData', { data, tree });
commit(types.SET_PARENT_TREE_URL, data.parent_tree_url); commit(types.SET_PARENT_TREE_URL, data.parent_tree_url);
commit(types.SET_LAST_COMMIT_URL, { tree, url: data.last_commit_path });
commit(types.TOGGLE_LOADING, tree); commit(types.TOGGLE_LOADING, tree);
if (prevLastCommitPath !== null) {
dispatch('getLastCommitData', tree);
}
pushState(endpoint); pushState(endpoint);
}) })
.catch(() => { .catch(() => {
...@@ -48,7 +55,7 @@ export const toggleTreeOpen = ({ commit, dispatch }, { endpoint, tree }) => { ...@@ -48,7 +55,7 @@ export const toggleTreeOpen = ({ commit, dispatch }, { endpoint, tree }) => {
pushState(tree.parentTreeUrl); pushState(tree.parentTreeUrl);
commit(types.SET_PREVIOUS_URL, tree.parentTreeUrl); commit(types.SET_PREVIOUS_URL, tree.parentTreeUrl);
commit(types.SET_DIRECTORY_DATA, { data, tree }); dispatch('updateDirectoryData', { data, tree });
} else { } else {
commit(types.SET_PREVIOUS_URL, endpoint); commit(types.SET_PREVIOUS_URL, endpoint);
dispatch('getTreeData', { endpoint, tree }); dispatch('getTreeData', { endpoint, tree });
...@@ -108,3 +115,48 @@ export const createTempTree = ({ state, commit, dispatch }, name) => { ...@@ -108,3 +115,48 @@ export const createTempTree = ({ state, commit, dispatch }, name) => {
}); });
} }
}; };
export const getLastCommitData = ({ state, commit, dispatch, getters }, tree = state) => {
if (tree.lastCommitPath === null || getters.isCollapsed) return;
service.getTreeLastCommit(tree.lastCommitPath)
.then((res) => {
const lastCommitPath = normalizeHeaders(res.headers)['MORE-LOGS-URL'] || null;
commit(types.SET_LAST_COMMIT_URL, { tree, url: lastCommitPath });
return res.json();
})
.then((data) => {
data.forEach((lastCommit) => {
const entry = findEntry(tree, lastCommit.type, lastCommit.file_name);
if (entry) {
commit(types.SET_LAST_COMMIT_DATA, { entry, lastCommit });
}
});
dispatch('getLastCommitData', tree);
})
.catch(() => flash('Error fetching log data.'));
};
export const updateDirectoryData = ({ commit, state }, { data, tree }) => {
const level = tree.level !== undefined ? tree.level + 1 : 0;
const parentTreeUrl = data.parent_tree_url ? `${data.parent_tree_url}${data.path}` : state.endpoints.rootUrl;
const createEntry = (entry, type) => createOrMergeEntry({
tree,
entry,
level,
type,
parentTreeUrl,
});
const formattedData = [
...data.trees.map(t => createEntry(t, 'tree')),
...data.submodules.map(m => createEntry(m, 'submodule')),
...data.blobs.map(b => createEntry(b, 'blob')),
];
commit(types.SET_DIRECTORY_DATA, { tree, data: formattedData });
};
...@@ -4,11 +4,13 @@ export const SET_COMMIT_REF = 'SET_COMMIT_REF'; ...@@ -4,11 +4,13 @@ export const SET_COMMIT_REF = 'SET_COMMIT_REF';
export const SET_PARENT_TREE_URL = 'SET_PARENT_TREE_URL'; export const SET_PARENT_TREE_URL = 'SET_PARENT_TREE_URL';
export const SET_ROOT = 'SET_ROOT'; export const SET_ROOT = 'SET_ROOT';
export const SET_PREVIOUS_URL = 'SET_PREVIOUS_URL'; export const SET_PREVIOUS_URL = 'SET_PREVIOUS_URL';
export const SET_LAST_COMMIT_DATA = 'SET_LAST_COMMIT_DATA';
// Tree mutation types // Tree mutation types
export const SET_DIRECTORY_DATA = 'SET_DIRECTORY_DATA'; export const SET_DIRECTORY_DATA = 'SET_DIRECTORY_DATA';
export const TOGGLE_TREE_OPEN = 'TOGGLE_TREE_OPEN'; export const TOGGLE_TREE_OPEN = 'TOGGLE_TREE_OPEN';
export const CREATE_TMP_TREE = 'CREATE_TMP_TREE'; export const CREATE_TMP_TREE = 'CREATE_TMP_TREE';
export const SET_LAST_COMMIT_URL = 'SET_LAST_COMMIT_URL';
// File mutation types // File mutation types
export const SET_FILE_DATA = 'SET_FILE_DATA'; export const SET_FILE_DATA = 'SET_FILE_DATA';
......
...@@ -48,6 +48,13 @@ export default { ...@@ -48,6 +48,13 @@ export default {
previousUrl, previousUrl,
}); });
}, },
[types.SET_LAST_COMMIT_DATA](state, { entry, lastCommit }) {
Object.assign(entry.lastCommit, {
url: lastCommit.commit_path,
message: lastCommit.commit.message,
updatedAt: lastCommit.commit.authored_date,
});
},
...fileMutations, ...fileMutations,
...treeMutations, ...treeMutations,
...branchMutations, ...branchMutations,
......
import * as types from '../mutation_types'; import * as types from '../mutation_types';
import * as utils from '../utils';
export default { export default {
[types.TOGGLE_TREE_OPEN](state, tree) { [types.TOGGLE_TREE_OPEN](state, tree) {
...@@ -8,30 +7,8 @@ export default { ...@@ -8,30 +7,8 @@ export default {
}); });
}, },
[types.SET_DIRECTORY_DATA](state, { data, tree }) { [types.SET_DIRECTORY_DATA](state, { data, tree }) {
const level = tree.level !== undefined ? tree.level + 1 : 0;
const parentTreeUrl = data.parent_tree_url ? `${data.parent_tree_url}${data.path}` : state.endpoints.rootUrl;
Object.assign(tree, { Object.assign(tree, {
tree: [ tree: data,
...data.trees.map(t => utils.decorateData({
...t,
type: 'tree',
parentTreeUrl,
level,
}, state.project.url)),
...data.submodules.map(m => utils.decorateData({
...m,
type: 'submodule',
parentTreeUrl,
level,
}, state.project.url)),
...data.blobs.map(b => utils.decorateData({
...b,
type: 'blob',
parentTreeUrl,
level,
}, state.project.url)),
],
}); });
}, },
[types.SET_PARENT_TREE_URL](state, url) { [types.SET_PARENT_TREE_URL](state, url) {
...@@ -39,6 +16,11 @@ export default { ...@@ -39,6 +16,11 @@ export default {
parentTreeUrl: url, parentTreeUrl: url,
}); });
}, },
[types.SET_LAST_COMMIT_URL](state, { tree = state, url }) {
Object.assign(tree, {
lastCommitPath: url,
});
},
[types.CREATE_TMP_TREE](state, { parent, tmpEntry }) { [types.CREATE_TMP_TREE](state, { parent, tmpEntry }) {
parent.tree.push(tmpEntry); parent.tree.push(tmpEntry);
}, },
......
...@@ -8,6 +8,7 @@ export default () => ({ ...@@ -8,6 +8,7 @@ export default () => ({
endpoints: {}, endpoints: {},
isRoot: false, isRoot: false,
isInitialRoot: false, isInitialRoot: false,
lastCommitPath: '',
loading: false, loading: false,
onTopOfBranch: false, onTopOfBranch: false,
openFiles: [], openFiles: [],
......
export const dataStructure = () => ({ export const dataStructure = () => ({
id: '', id: '',
key: '',
type: '', type: '',
name: '', name: '',
url: '', url: '',
...@@ -12,7 +13,12 @@ export const dataStructure = () => ({ ...@@ -12,7 +13,12 @@ export const dataStructure = () => ({
opened: false, opened: false,
active: false, active: false,
changed: false, changed: false,
lastCommit: {}, lastCommitPath: '',
lastCommit: {
url: '',
message: '',
updatedAt: '',
},
tree_url: '', tree_url: '',
blamePath: '', blamePath: '',
commitsPath: '', commitsPath: '',
...@@ -27,14 +33,13 @@ export const dataStructure = () => ({ ...@@ -27,14 +33,13 @@ export const dataStructure = () => ({
base64: false, base64: false,
}); });
export const decorateData = (entity, projectUrl = '') => { export const decorateData = (entity) => {
const { const {
id, id,
type, type,
url, url,
name, name,
icon, icon,
last_commit,
tree_url, tree_url,
path, path,
renderError, renderError,
...@@ -51,6 +56,7 @@ export const decorateData = (entity, projectUrl = '') => { ...@@ -51,6 +56,7 @@ export const decorateData = (entity, projectUrl = '') => {
return { return {
...dataStructure(), ...dataStructure(),
id, id,
key: `${name}-${type}-${id}`,
type, type,
name, name,
url, url,
...@@ -66,12 +72,6 @@ export const decorateData = (entity, projectUrl = '') => { ...@@ -66,12 +72,6 @@ export const decorateData = (entity, projectUrl = '') => {
renderError, renderError,
content, content,
base64, base64,
// eslint-disable-next-line camelcase
lastCommit: last_commit ? {
url: `${projectUrl}/commit/${last_commit.id}`,
message: last_commit.message,
updatedAt: last_commit.committed_date,
} : {},
}; };
}; };
...@@ -106,3 +106,22 @@ export const createTemp = ({ name, path, type, level, changed, content, base64 } ...@@ -106,3 +106,22 @@ export const createTemp = ({ name, path, type, level, changed, content, base64 }
renderError: base64, renderError: base64,
}); });
}; };
export const createOrMergeEntry = ({ tree, entry, type, parentTreeUrl, level }) => {
const found = findEntry(tree, type, entry.name);
if (found) {
return Object.assign({}, found, {
id: entry.id,
url: entry.url,
tempFile: false,
});
}
return decorateData({
...entry,
type,
parentTreeUrl,
level,
});
};
...@@ -3,9 +3,10 @@ ...@@ -3,9 +3,10 @@
* and controllable by a public API. * and controllable by a public API.
*/ */
class SmartInterval { export default class SmartInterval {
/** /**
* @param { function } opts.callback Function to be called on each iteration (required) * @param { function } opts.callback Function that returns a promise, called on each iteration
* unless still in progress (required)
* @param { milliseconds } opts.startingInterval `currentInterval` is set to this initially * @param { milliseconds } opts.startingInterval `currentInterval` is set to this initially
* @param { milliseconds } opts.maxInterval `currentInterval` will be incremented to this * @param { milliseconds } opts.maxInterval `currentInterval` will be incremented to this
* @param { milliseconds } opts.hiddenInterval `currentInterval` is set to this * @param { milliseconds } opts.hiddenInterval `currentInterval` is set to this
...@@ -42,13 +43,16 @@ class SmartInterval { ...@@ -42,13 +43,16 @@ class SmartInterval {
const cfg = this.cfg; const cfg = this.cfg;
const state = this.state; const state = this.state;
if (cfg.immediateExecution) { if (cfg.immediateExecution && !this.isLoading) {
cfg.immediateExecution = false; cfg.immediateExecution = false;
cfg.callback(); this.triggerCallback();
} }
state.intervalId = window.setInterval(() => { state.intervalId = window.setInterval(() => {
cfg.callback(); if (this.isLoading) {
return;
}
this.triggerCallback();
if (this.getCurrentInterval() === cfg.maxInterval) { if (this.getCurrentInterval() === cfg.maxInterval) {
return; return;
...@@ -76,7 +80,7 @@ class SmartInterval { ...@@ -76,7 +80,7 @@ class SmartInterval {
// start a timer, using the existing interval // start a timer, using the existing interval
resume() { resume() {
this.stopTimer(); // stop exsiting timer, in case timer was not previously stopped this.stopTimer(); // stop existing timer, in case timer was not previously stopped
this.start(); this.start();
} }
...@@ -104,6 +108,18 @@ class SmartInterval { ...@@ -104,6 +108,18 @@ class SmartInterval {
this.initPageUnloadHandling(); this.initPageUnloadHandling();
} }
triggerCallback() {
this.isLoading = true;
this.cfg.callback()
.then(() => {
this.isLoading = false;
})
.catch((err) => {
this.isLoading = false;
throw err;
});
}
initVisibilityChangeHandling() { initVisibilityChangeHandling() {
// cancel interval when tab no longer shown (prevents cached pages from polling) // cancel interval when tab no longer shown (prevents cached pages from polling)
document.addEventListener('visibilitychange', this.handleVisibilityChange.bind(this)); document.addEventListener('visibilitychange', this.handleVisibilityChange.bind(this));
...@@ -154,4 +170,3 @@ class SmartInterval { ...@@ -154,4 +170,3 @@ class SmartInterval {
} }
} }
window.gl.SmartInterval = SmartInterval;
import SmartInterval from '~/smart_interval';
import Flash from '../flash'; import Flash from '../flash';
import { import {
WidgetHeader, WidgetHeader,
...@@ -81,7 +82,7 @@ export default { ...@@ -81,7 +82,7 @@ export default {
return new MRWidgetService(endpoints); return new MRWidgetService(endpoints);
}, },
checkStatus(cb) { checkStatus(cb) {
this.service.checkStatus() return this.service.checkStatus()
.then(res => res.json()) .then(res => res.json())
.then((res) => { .then((res) => {
this.handleNotification(res); this.handleNotification(res);
...@@ -97,7 +98,7 @@ export default { ...@@ -97,7 +98,7 @@ export default {
}); });
}, },
initPolling() { initPolling() {
this.pollingInterval = new gl.SmartInterval({ this.pollingInterval = new SmartInterval({
callback: this.checkStatus, callback: this.checkStatus,
startingInterval: 10000, startingInterval: 10000,
maxInterval: 30000, maxInterval: 30000,
...@@ -106,7 +107,7 @@ export default { ...@@ -106,7 +107,7 @@ export default {
}); });
}, },
initDeploymentsPolling() { initDeploymentsPolling() {
this.deploymentsInterval = new gl.SmartInterval({ this.deploymentsInterval = new SmartInterval({
callback: this.fetchDeployments, callback: this.fetchDeployments,
startingInterval: 30000, startingInterval: 30000,
maxInterval: 120000, maxInterval: 120000,
...@@ -121,7 +122,7 @@ export default { ...@@ -121,7 +122,7 @@ export default {
} }
}, },
fetchDeployments() { fetchDeployments() {
this.service.fetchDeployments() return this.service.fetchDeployments()
.then(res => res.json()) .then(res => res.json())
.then((res) => { .then((res) => {
if (res.length) { if (res.length) {
......
<script>
export default {
props: {
small: {
type: Boolean,
required: false,
default: false,
},
lines: {
type: Number,
required: false,
default: 6,
},
},
computed: {
lineClasses() {
return new Array(this.lines).fill().map((_, i) => `skeleton-line-${i + 1}`);
},
},
};
</script>
<template>
<div
class="animation-container"
:class="{
'animation-container-small': small,
}"
>
<div
v-for="(css, index) in lineClasses"
:key="index"
:class="css"
>
</div>
</div>
</template>
...@@ -9,9 +9,7 @@ module IssuableActions ...@@ -9,9 +9,7 @@ module IssuableActions
def show def show
respond_to do |format| respond_to do |format|
format.html do format.html
render show_view
end
format.json do format.json do
render json: serializer.represent(issuable, serializer: params[:serializer]) render json: serializer.represent(issuable, serializer: params[:serializer])
end end
...@@ -152,10 +150,6 @@ module IssuableActions ...@@ -152,10 +150,6 @@ module IssuableActions
end end
end end
def show_view
'show'
end
def serializer def serializer
raise NotImplementedError raise NotImplementedError
end end
......
...@@ -4,58 +4,44 @@ module IssuableCollections ...@@ -4,58 +4,44 @@ module IssuableCollections
include Gitlab::IssuableMetadata include Gitlab::IssuableMetadata
included do included do
helper_method :issues_finder helper_method :finder
helper_method :merge_requests_finder
end end
private private
def set_issues_index def set_issuables_index
@collection_type = "Issue" @issuables = issuables_collection
@issues = issues_collection @issuables = @issuables.page(params[:page])
@issues = @issues.page(params[:page]) @issuable_meta_data = issuable_meta_data(@issuables, collection_type)
@issuable_meta_data = issuable_meta_data(@issues, @collection_type) @total_pages = issuable_page_count
@total_pages = issues_page_count(@issues)
return if redirect_out_of_range(@issues, @total_pages) return if redirect_out_of_range(@total_pages)
if params[:label_name].present? if params[:label_name].present?
@labels = LabelsFinder.new(current_user, project_id: @project.id, title: params[:label_name]).execute labels_params = { project_id: @project.id, title: params[:label_name] }
@labels = LabelsFinder.new(current_user, labels_params).execute
end end
@users = [] @users = []
end if params[:assignee_id].present?
assignee = User.find_by_id(params[:assignee_id])
def issues_collection @users.push(assignee) if assignee
issues_finder.execute.preload(:project, :author, :assignees, :labels, :milestone, project: :namespace) end
end
def merge_requests_collection
merge_requests_finder.execute.preload(
:source_project,
:target_project,
:author,
:assignee,
:labels,
:milestone,
head_pipeline: :project,
target_project: :namespace,
merge_request_diff: :merge_request_diff_commits
)
end
def issues_finder if params[:author_id].present?
@issues_finder ||= issuable_finder_for(IssuesFinder) author = User.find_by_id(params[:author_id])
@users.push(author) if author
end
end end
def merge_requests_finder def issuables_collection
@merge_requests_finder ||= issuable_finder_for(MergeRequestsFinder) finder.execute.preload(preload_for_collection)
end end
def redirect_out_of_range(relation, total_pages) def redirect_out_of_range(total_pages)
return false if total_pages.zero? return false if total_pages.zero?
out_of_range = relation.current_page > total_pages out_of_range = @issuables.current_page > total_pages
if out_of_range if out_of_range
redirect_to(url_for(params.merge(page: total_pages, only_path: true))) redirect_to(url_for(params.merge(page: total_pages, only_path: true)))
...@@ -64,12 +50,8 @@ module IssuableCollections ...@@ -64,12 +50,8 @@ module IssuableCollections
out_of_range out_of_range
end end
def issues_page_count(relation) def issuable_page_count
page_count_for_relation(relation, issues_finder.row_count) page_count_for_relation(@issuables, finder.row_count)
end
def merge_requests_page_count(relation)
page_count_for_relation(relation, merge_requests_finder.row_count)
end end
def page_count_for_relation(relation, row_count) def page_count_for_relation(relation, row_count)
...@@ -145,4 +127,31 @@ module IssuableCollections ...@@ -145,4 +127,31 @@ module IssuableCollections
else value else value
end end
end end
def finder
return @finder if defined?(@finder)
@finder = issuable_finder_for(@finder_type)
end
def collection_type
@collection_type ||= case finder
when IssuesFinder
'Issue'
when MergeRequestsFinder
'MergeRequest'
end
end
def preload_for_collection
@preload_for_collection ||= case collection_type
when 'Issue'
[:project, :author, :assignees, :labels, :milestone, project: :namespace]
when 'MergeRequest'
[
:source_project, :target_project, :author, :assignee, :labels, :milestone,
head_pipeline: :project, target_project: :namespace, merge_request_diff: :merge_request_diff_commits
]
end
end
end end
...@@ -3,14 +3,14 @@ module IssuesAction ...@@ -3,14 +3,14 @@ module IssuesAction
include IssuableCollections include IssuableCollections
def issues def issues
@label = issues_finder.labels.first @finder_type = IssuesFinder
@label = finder.labels.first
@issues = issues_collection @issues = issuables_collection
.non_archived .non_archived
.page(params[:page]) .page(params[:page])
@collection_type = "Issue" @issuable_meta_data = issuable_meta_data(@issues, collection_type)
@issuable_meta_data = issuable_meta_data(@issues, @collection_type)
respond_to do |format| respond_to do |format|
format.html format.html
......
...@@ -3,13 +3,12 @@ module MergeRequestsAction ...@@ -3,13 +3,12 @@ module MergeRequestsAction
include IssuableCollections include IssuableCollections
def merge_requests def merge_requests
@label = merge_requests_finder.labels.first @finder_type = MergeRequestsFinder
@label = finder.labels.first
@merge_requests = merge_requests_collection @merge_requests = issuables_collection.page(params[:page])
.page(params[:page])
@collection_type = "MergeRequest" @issuable_meta_data = issuable_meta_data(@merge_requests, collection_type)
@issuable_meta_data = issuable_meta_data(@merge_requests, @collection_type)
end end
private private
......
...@@ -10,7 +10,7 @@ class Projects::IssuesController < Projects::ApplicationController ...@@ -10,7 +10,7 @@ class Projects::IssuesController < Projects::ApplicationController
before_action :check_issues_available! before_action :check_issues_available!
before_action :issue, except: [:index, :new, :create, :bulk_update] before_action :issue, except: [:index, :new, :create, :bulk_update]
before_action :set_issues_index, only: [:index] before_action :set_issuables_index, only: [:index]
# Allow write(create) issue # Allow write(create) issue
before_action :authorize_create_issue!, only: [:new, :create] before_action :authorize_create_issue!, only: [:new, :create]
...@@ -24,15 +24,7 @@ class Projects::IssuesController < Projects::ApplicationController ...@@ -24,15 +24,7 @@ class Projects::IssuesController < Projects::ApplicationController
respond_to :html respond_to :html
def index def index
if params[:assignee_id].present? @issues = @issuables
assignee = User.find_by_id(params[:assignee_id])
@users.push(assignee) if assignee
end
if params[:author_id].present?
author = User.find_by_id(params[:author_id])
@users.push(author) if author
end
respond_to do |format| respond_to do |format|
format.html format.html
...@@ -252,4 +244,9 @@ class Projects::IssuesController < Projects::ApplicationController ...@@ -252,4 +244,9 @@ class Projects::IssuesController < Projects::ApplicationController
update_params = issue_params.merge(spammable_params) update_params = issue_params.merge(spammable_params)
Issues::UpdateService.new(project, current_user, update_params) Issues::UpdateService.new(project, current_user, update_params)
end end
def set_issuables_index
@finder_type = IssuesFinder
super
end
end end
...@@ -10,33 +10,12 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -10,33 +10,12 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
before_action :authorize_update_issuable!, only: [:close, :edit, :update, :remove_wip, :sort] before_action :authorize_update_issuable!, only: [:close, :edit, :update, :remove_wip, :sort]
before_action :set_issuables_index, only: [:index]
before_action :authenticate_user!, only: [:assign_related_issues] before_action :authenticate_user!, only: [:assign_related_issues]
def index def index
@collection_type = "MergeRequest" @merge_requests = @issuables
@merge_requests = merge_requests_collection
@merge_requests = @merge_requests.page(params[:page])
@merge_requests = @merge_requests.preload(merge_request_diff: :merge_request)
@issuable_meta_data = issuable_meta_data(@merge_requests, @collection_type)
@total_pages = merge_requests_page_count(@merge_requests)
return if redirect_out_of_range(@merge_requests, @total_pages)
if params[:label_name].present?
labels_params = { project_id: @project.id, title: params[:label_name] }
@labels = LabelsFinder.new(current_user, labels_params).execute
end
@users = []
if params[:assignee_id].present?
assignee = User.find_by_id(params[:assignee_id])
@users.push(assignee) if assignee
end
if params[:author_id].present?
author = User.find_by_id(params[:author_id])
@users.push(author) if author
end
respond_to do |format| respond_to do |format|
format.html format.html
...@@ -338,4 +317,9 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -338,4 +317,9 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
@target_project = @merge_request.target_project @target_project = @merge_request.target_project
@target_branches = @merge_request.target_project.repository.branch_names @target_branches = @merge_request.target_project.repository.branch_names
end end
def set_issuables_index
@finder_type = MergeRequestsFinder
super
end
end end
...@@ -56,9 +56,12 @@ class Projects::RefsController < Projects::ApplicationController ...@@ -56,9 +56,12 @@ class Projects::RefsController < Projects::ApplicationController
contents[@offset, @limit].to_a.map do |content| contents[@offset, @limit].to_a.map do |content|
file = @path ? File.join(@path, content.name) : content.name file = @path ? File.join(@path, content.name) : content.name
last_commit = @repo.last_commit_for_path(@commit.id, file) last_commit = @repo.last_commit_for_path(@commit.id, file)
commit_path = project_commit_path(@project, last_commit) if last_commit
{ {
file_name: content.name, file_name: content.name,
commit: last_commit commit: last_commit,
type: content.type,
commit_path: commit_path
} }
end end
end end
...@@ -70,6 +73,11 @@ class Projects::RefsController < Projects::ApplicationController ...@@ -70,6 +73,11 @@ class Projects::RefsController < Projects::ApplicationController
respond_to do |format| respond_to do |format|
format.html { render_404 } format.html { render_404 }
format.json do
response.headers["More-Logs-Url"] = @more_log_url
render json: @logs
end
format.js format.js
end end
end end
......
...@@ -275,7 +275,8 @@ class ProjectsController < Projects::ApplicationController ...@@ -275,7 +275,8 @@ class ProjectsController < Projects::ApplicationController
@project_wiki = @project.wiki @project_wiki = @project.wiki
@wiki_home = @project_wiki.find_page('home', params[:version_id]) @wiki_home = @project_wiki.find_page('home', params[:version_id])
elsif @project.feature_available?(:issues, current_user) elsif @project.feature_available?(:issues, current_user)
@issues = issues_collection.page(params[:page]) @finder_type = IssuesFinder
@issues = issuables_collection.page(params[:page])
@collection_type = 'Issue' @collection_type = 'Issue'
@issuable_meta_data = issuable_meta_data(@issues, @collection_type) @issuable_meta_data = issuable_meta_data(@issues, @collection_type)
end end
......
...@@ -249,8 +249,6 @@ module IssuablesHelper ...@@ -249,8 +249,6 @@ module IssuablesHelper
end end
def issuables_count_for_state(issuable_type, state) def issuables_count_for_state(issuable_type, state)
finder = public_send("#{issuable_type}_finder") # rubocop:disable GitlabSecurity/PublicSend
Gitlab::IssuablesCountForState.new(finder)[state] Gitlab::IssuablesCountForState.new(finder)[state]
end end
......
...@@ -7,7 +7,7 @@ module Clusters ...@@ -7,7 +7,7 @@ module Clusters
default_value_for :zone, 'us-central1-a' default_value_for :zone, 'us-central1-a'
default_value_for :num_nodes, 3 default_value_for :num_nodes, 3
default_value_for :machine_type, 'n1-standard-4' default_value_for :machine_type, 'n1-standard-2'
attr_encrypted :access_token, attr_encrypted :access_token,
mode: :per_attribute_iv, mode: :per_attribute_iv,
......
...@@ -14,7 +14,6 @@ class CommitStatus < ActiveRecord::Base ...@@ -14,7 +14,6 @@ class CommitStatus < ActiveRecord::Base
delegate :sha, :short_sha, to: :pipeline delegate :sha, :short_sha, to: :pipeline
validates :pipeline, presence: true, unless: :importing? validates :pipeline, presence: true, unless: :importing?
validates :name, presence: true, unless: :importing? validates :name, presence: true, unless: :importing?
alias_attribute :author, :user alias_attribute :author, :user
...@@ -46,6 +45,17 @@ class CommitStatus < ActiveRecord::Base ...@@ -46,6 +45,17 @@ class CommitStatus < ActiveRecord::Base
runner_system_failure: 4 runner_system_failure: 4
} }
##
# We still create some CommitStatuses outside of CreatePipelineService.
#
# These are pages deployments and external statuses.
#
before_create unless: :importing? do
Ci::EnsureStageService.new(project, user).execute(self) do |stage|
self.run_after_commit { StageUpdateWorker.perform_async(stage.id) }
end
end
state_machine :status do state_machine :status do
event :process do event :process do
transition [:skipped, :manual] => :created transition [:skipped, :manual] => :created
......
...@@ -17,6 +17,8 @@ module Issuable ...@@ -17,6 +17,8 @@ module Issuable
include Importable include Importable
include Editable include Editable
include AfterCommitQueue include AfterCommitQueue
include Sortable
include CreatedAtFilterable
# This object is used to gather issuable meta data for displaying # This object is used to gather issuable meta data for displaying
# upvotes, downvotes, notes and closing merge requests count for issues and merge requests # upvotes, downvotes, notes and closing merge requests count for issues and merge requests
......
...@@ -5,11 +5,9 @@ class Issue < ActiveRecord::Base ...@@ -5,11 +5,9 @@ class Issue < ActiveRecord::Base
include Issuable include Issuable
include Noteable include Noteable
include Referable include Referable
include Sortable
include Spammable include Spammable
include FasterCacheKeys include FasterCacheKeys
include RelativePositioning include RelativePositioning
include CreatedAtFilterable
include TimeTrackable include TimeTrackable
DueDateStruct = Struct.new(:title, :name).freeze DueDateStruct = Struct.new(:title, :name).freeze
......
...@@ -3,9 +3,7 @@ class MergeRequest < ActiveRecord::Base ...@@ -3,9 +3,7 @@ class MergeRequest < ActiveRecord::Base
include Issuable include Issuable
include Noteable include Noteable
include Referable include Referable
include Sortable
include IgnorableColumn include IgnorableColumn
include CreatedAtFilterable
include TimeTrackable include TimeTrackable
ignore_column :locked_at, ignore_column :locked_at,
......
...@@ -3,10 +3,6 @@ class BlobEntity < Grape::Entity ...@@ -3,10 +3,6 @@ class BlobEntity < Grape::Entity
expose :id, :path, :name, :mode expose :id, :path, :name, :mode
expose :last_commit do |blob|
request.project.repository.last_commit_for_path(blob.commit_id, blob.path)
end
expose :icon do |blob| expose :icon do |blob|
IconsHelper.file_type_icon_class('file', blob.mode, blob.name) IconsHelper.file_type_icon_class('file', blob.mode, blob.name)
end end
......
...@@ -3,10 +3,6 @@ class TreeEntity < Grape::Entity ...@@ -3,10 +3,6 @@ class TreeEntity < Grape::Entity
expose :id, :path, :name, :mode expose :id, :path, :name, :mode
expose :last_commit do |tree|
request.project.repository.last_commit_for_path(tree.commit_id, tree.path)
end
expose :icon do |tree| expose :icon do |tree|
IconsHelper.file_type_icon_class('folder', tree.mode, tree.name) IconsHelper.file_type_icon_class('folder', tree.mode, tree.name)
end end
......
...@@ -18,4 +18,8 @@ class TreeRootEntity < Grape::Entity ...@@ -18,4 +18,8 @@ class TreeRootEntity < Grape::Entity
project_tree_path(request.project, File.join(request.ref, parent_tree_path)) project_tree_path(request.project, File.join(request.ref, parent_tree_path))
end end
expose :last_commit_path do |tree|
logs_file_project_ref_path(request.project, request.ref, tree.path)
end
end end
module Ci
##
# We call this service everytime we persist a CI/CD job.
#
# In most cases a job should already have a stage assigned, but in cases it
# doesn't have we need to either find existing one or create a brand new
# stage.
#
class EnsureStageService < BaseService
def execute(build)
@build = build
return if build.stage_id.present?
return if build.invalid?
ensure_stage.tap do |stage|
build.stage_id = stage.id
yield stage if block_given?
end
end
private
def ensure_stage
find_stage || create_stage
end
def find_stage
@build.pipeline.stages.find_by(name: @build.stage)
end
def create_stage
Ci::Stage.create!(name: @build.stage,
pipeline: @build.pipeline,
project: @build.project)
end
end
end
...@@ -29,7 +29,7 @@ ...@@ -29,7 +29,7 @@
.form-group .form-group
= provider_gcp_field.label :machine_type, s_('ClusterIntegration|Machine type') = provider_gcp_field.label :machine_type, s_('ClusterIntegration|Machine type')
= link_to(s_('ClusterIntegration|See machine types'), 'https://cloud.google.com/compute/docs/machine-types', target: '_blank', rel: 'noopener noreferrer') = link_to(s_('ClusterIntegration|See machine types'), 'https://cloud.google.com/compute/docs/machine-types', target: '_blank', rel: 'noopener noreferrer')
= provider_gcp_field.text_field :machine_type, class: 'form-control', placeholder: 'n1-standard-4' = provider_gcp_field.text_field :machine_type, class: 'form-control', placeholder: 'n1-standard-2'
.form-group .form-group
= field.submit s_('ClusterIntegration|Create cluster'), class: 'btn btn-save' = field.submit s_('ClusterIntegration|Create cluster'), class: 'btn btn-save'
...@@ -67,7 +67,7 @@ ...@@ -67,7 +67,7 @@
- if @commit.last_pipeline - if @commit.last_pipeline
- last_pipeline = @commit.last_pipeline - last_pipeline = @commit.last_pipeline
.well-segment.pipeline-info .well-segment.pipeline-info
.status-icon-container{ class: "ci-status-icon-#{@commit.status}" } .status-icon-container{ class: "ci-status-icon-#{last_pipeline.status}" }
= link_to project_pipeline_path(@project, last_pipeline.id) do = link_to project_pipeline_path(@project, last_pipeline.id) do
= ci_icon_for_status(last_pipeline.status) = ci_icon_for_status(last_pipeline.status)
#{ _('Pipeline') } #{ _('Pipeline') }
......
...@@ -3,8 +3,8 @@ ...@@ -3,8 +3,8 @@
- if @can_bulk_update - if @can_bulk_update
= button_tag "Edit issues", class: "btn btn-default append-right-10 js-bulk-update-toggle" = button_tag "Edit issues", class: "btn btn-default append-right-10 js-bulk-update-toggle"
= link_to "New issue", new_project_issue_path(@project, = link_to "New issue", new_project_issue_path(@project,
issue: { assignee_id: issues_finder.assignee.try(:id), issue: { assignee_id: finder.assignee.try(:id),
milestone_id: issues_finder.milestones.first.try(:id) }), milestone_id: finder.milestones.first.try(:id) }),
class: "btn btn-new", class: "btn btn-new",
title: "New issue", title: "New issue",
id: "new_issue_link" id: "new_issue_link"
- finder = controller.controller_name == 'issues' ? issues_finder : merge_requests_finder
- boards_page = controller.controller_name == 'boards' - boards_page = controller.controller_name == 'boards'
.issues-filters .issues-filters
......
---
title: Update Merge Request polling so there is only one request at a time
merge_request: 15032
author:
type: fixed
---
title: Change default cluster size to n1-default-2
merge_request: 39649
author: Fabio Busatto
type: changed
---
title: Fix commit pipeline showing wrong status
merge_request:
author:
type: fixed
---
title: Fix arguments Import/Export error importing project merge requests
merge_request:
author:
type: fixed
---
title: Fix TRIGGER checks for MySQL
merge_request:
author:
type: fixed
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171106101200) do ActiveRecord::Schema.define(version: 20171101134435) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
......
...@@ -166,12 +166,26 @@ board itself. ...@@ -166,12 +166,26 @@ board itself.
![Remove issue from list](img/issue_boards_remove_issue.png) ![Remove issue from list](img/issue_boards_remove_issue.png)
## Re-ordering an issue in a list ## Issue ordering in a list
> Introduced in GitLab 9.0. When visiting a board, issues appear ordered in any list. You are able to change
that order simply by dragging and dropping the issues. The changed order will be saved
Issues can be re-ordered inside of lists. This is as simple as dragging and dropping to the system so that anybody who visits the same board later will see the reordering,
an issue into the order you want. with some exceptions.
The first time a given issue appears in any board (i.e. the first time a user
loads a board containing that issue), it will be ordered with
respect to other issues in that list according to [Priority order][label-priority].
At that point, that issue will be assigned a relative order value by the system
representing its relative order with respect to the other issues in the list. Any time
you drag-and-drop reorder that issue, its relative order value will change accordingly.
Also, any time that issue appears in any board when it is loaded by a user,
the updated relative order value will be used for the ordering. (It's only the first
time an issue appears that it takes from the Priority order mentioned above.) This means that
if issue `A` is drag-and-drop reordered to be above issue `B` by any user in
a given board inside your GitLab instance, any time those two issues are subsequently
loaded in any board in the same instance (could be a different project board or a different group board, for example),
that ordering will be maintained.
## Filtering issues ## Filtering issues
......
...@@ -8,7 +8,7 @@ module Gitlab ...@@ -8,7 +8,7 @@ module Gitlab
end end
def icon def icon
'warning' 'status_warning'
end end
def group def group
......
...@@ -6,28 +6,36 @@ module Gitlab ...@@ -6,28 +6,36 @@ module Gitlab
if Database.postgresql? if Database.postgresql?
'information_schema.role_table_grants' 'information_schema.role_table_grants'
else else
'mysql.user' 'information_schema.schema_privileges'
end end
def self.scope_to_current_user
if Database.postgresql?
where('grantee = user')
else
where("CONCAT(User, '@', Host) = current_user()")
end
end
# Returns true if the current user can create and execute triggers on the # Returns true if the current user can create and execute triggers on the
# given table. # given table.
def self.create_and_execute_trigger?(table) def self.create_and_execute_trigger?(table)
priv = priv =
if Database.postgresql? if Database.postgresql?
where(privilege_type: 'TRIGGER', table_name: table) where(privilege_type: 'TRIGGER', table_name: table)
.where('grantee = user')
else else
where(Trigger_priv: 'Y') queries = [
Grant.select(1)
.from('information_schema.user_privileges')
.where("PRIVILEGE_TYPE = 'SUPER'")
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')"),
Grant.select(1)
.from('information_schema.schema_privileges')
.where("PRIVILEGE_TYPE = 'TRIGGER'")
.where('TABLE_SCHEMA = ?', Gitlab::Database.database_name)
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')")
]
union = SQL::Union.new(queries).to_sql
Grant.from("(#{union}) privs")
end end
priv.scope_to_current_user.any? priv.any?
end end
end end
end end
......
...@@ -26,7 +26,7 @@ module Gitlab ...@@ -26,7 +26,7 @@ module Gitlab
end end
def fetch_ref def fetch_ref
@project.repository.fetch_ref(@project.repository.path, @diff_head_sha, @merge_request.source_branch) @project.repository.fetch_ref(@project.repository, source_ref: @diff_head_sha, target_ref: @merge_request.source_branch)
end end
def branch_exists?(branch_name) def branch_exists?(branch_name)
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
attr_accessor :name attr_accessor :name
def initialize(name, client) def initialize(name, client)
self.name = name @name = name
@client = client @client = client
end end
......
...@@ -33,24 +33,29 @@ namespace :gitlab do ...@@ -33,24 +33,29 @@ namespace :gitlab do
backup.unpack backup.unpack
unless backup.skipped?('db') unless backup.skipped?('db')
unless ENV['force'] == 'yes' begin
warning = <<-MSG.strip_heredoc unless ENV['force'] == 'yes'
Before restoring the database we recommend removing all existing warning = <<-MSG.strip_heredoc
tables to avoid future upgrade problems. Be aware that if you have Before restoring the database, we will remove all existing
custom tables in the GitLab database these tables and all data will be tables to avoid future upgrade problems. Be aware that if you have
removed. custom tables in the GitLab database these tables and all data will be
MSG removed.
puts warning.color(:red) MSG
ask_to_continue puts warning.color(:red)
puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow) ask_to_continue
sleep(5) puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
sleep(5)
end
# Drop all tables Load the schema to ensure we don't have any newer tables
# hanging out from a failed upgrade
$progress.puts 'Cleaning the database ... '.color(:blue)
Rake::Task['gitlab:db:drop_tables'].invoke
$progress.puts 'done'.color(:green)
Rake::Task['gitlab:backup:db:restore'].invoke
rescue Gitlab::TaskAbortedByUserError
puts "Quitting...".color(:red)
exit 1
end end
# Drop all tables Load the schema to ensure we don't have any newer tables
# hanging out from a failed upgrade
$progress.puts 'Cleaning the database ... '.color(:blue)
Rake::Task['gitlab:db:drop_tables'].invoke
$progress.puts 'done'.color(:green)
Rake::Task['gitlab:backup:db:restore'].invoke
end end
Rake::Task['gitlab:backup:repo:restore'].invoke unless backup.skipped?('repositories') Rake::Task['gitlab:backup:repo:restore'].invoke unless backup.skipped?('repositories')
......
...@@ -17,60 +17,6 @@ describe IssuableCollections do ...@@ -17,60 +17,6 @@ describe IssuableCollections do
controller controller
end end
describe '#redirect_out_of_range' do
before do
allow(controller).to receive(:url_for)
end
it 'returns true and redirects if the offset is out of range' do
relation = double(:relation, current_page: 10)
expect(controller).to receive(:redirect_to)
expect(controller.send(:redirect_out_of_range, relation, 2)).to eq(true)
end
it 'returns false if the offset is not out of range' do
relation = double(:relation, current_page: 1)
expect(controller).not_to receive(:redirect_to)
expect(controller.send(:redirect_out_of_range, relation, 2)).to eq(false)
end
end
describe '#issues_page_count' do
it 'returns the number of issue pages' do
project = create(:project, :public)
create(:issue, project: project)
finder = IssuesFinder.new(user)
issues = finder.execute
allow(controller).to receive(:issues_finder)
.and_return(finder)
expect(controller.send(:issues_page_count, issues)).to eq(1)
end
end
describe '#merge_requests_page_count' do
it 'returns the number of merge request pages' do
project = create(:project, :public)
create(:merge_request, source_project: project, target_project: project)
finder = MergeRequestsFinder.new(user)
merge_requests = finder.execute
allow(controller).to receive(:merge_requests_finder)
.and_return(finder)
pages = controller.send(:merge_requests_page_count, merge_requests)
expect(pages).to eq(1)
end
end
describe '#page_count_for_relation' do describe '#page_count_for_relation' do
it 'returns the number of pages' do it 'returns the number of pages' do
relation = double(:relation, limit_value: 20) relation = double(:relation, limit_value: 20)
......
...@@ -23,12 +23,15 @@ describe Projects::RefsController do ...@@ -23,12 +23,15 @@ describe Projects::RefsController do
xhr :get, xhr :get,
:logs_tree, :logs_tree,
namespace_id: project.namespace.to_param, namespace_id: project.namespace.to_param,
project_id: project, id: 'master', project_id: project,
path: 'foo/bar/baz.html', format: format id: 'master',
path: 'foo/bar/baz.html',
format: format
end end
it 'never throws MissingTemplate' do it 'never throws MissingTemplate' do
expect { default_get }.not_to raise_error expect { default_get }.not_to raise_error
expect { xhr_get(:json) }.not_to raise_error
expect { xhr_get }.not_to raise_error expect { xhr_get }.not_to raise_error
end end
...@@ -42,5 +45,12 @@ describe Projects::RefsController do ...@@ -42,5 +45,12 @@ describe Projects::RefsController do
xhr_get(:js) xhr_get(:js)
expect(response).to be_success expect(response).to be_success
end end
it 'renders JSON' do
xhr_get(:json)
expect(response).to be_success
expect(json_response).to be_kind_of(Array)
end
end end
end end
FactoryGirl.define do FactoryGirl.define do
factory :commit_status, class: CommitStatus do factory :commit_status, class: CommitStatus do
name 'default' name 'default'
stage 'test'
status 'success' status 'success'
description 'commit status' description 'commit status'
pipeline factory: :ci_pipeline_with_one_job pipeline factory: :ci_pipeline_with_one_job
......
...@@ -12,6 +12,13 @@ feature 'Mini Pipeline Graph in Commit View', :js do ...@@ -12,6 +12,13 @@ feature 'Mini Pipeline Graph in Commit View', :js do
end end
let(:build) { create(:ci_build, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
it 'display icon with status' do
build.run
visit project_commit_path(project, project.commit.id)
expect(page).to have_selector('.ci-status-icon-running')
end
it 'displays a mini pipeline graph' do it 'displays a mini pipeline graph' do
build.run build.run
visit project_commit_path(project, project.commit.id) visit project_commit_path(project, project.commit.id)
......
...@@ -31,10 +31,5 @@ feature 'Multi-file editor new directory', :js do ...@@ -31,10 +31,5 @@ feature 'Multi-file editor new directory', :js do
click_button('Commit 1 file') click_button('Commit 1 file')
expect(page).to have_selector('td', text: 'commit message') expect(page).to have_selector('td', text: 'commit message')
click_link('foldername')
expect(page).to have_selector('td', text: 'commit message', count: 2)
expect(page).to have_selector('td', text: '.gitkeep')
end end
end end
...@@ -20,7 +20,7 @@ describe('RepoFile', () => { ...@@ -20,7 +20,7 @@ describe('RepoFile', () => {
resetStore(vm.$store); resetStore(vm.$store);
}); });
it('renders link, icon, name and last commit details', () => { it('renders link, icon and name', () => {
const RepoFile = Vue.extend(repoFile); const RepoFile = Vue.extend(repoFile);
vm = new RepoFile({ vm = new RepoFile({
store, store,
...@@ -37,10 +37,9 @@ describe('RepoFile', () => { ...@@ -37,10 +37,9 @@ describe('RepoFile', () => {
expect(vm.$el.querySelector(`.${vm.file.icon}`).style.marginLeft).toEqual('0px'); expect(vm.$el.querySelector(`.${vm.file.icon}`).style.marginLeft).toEqual('0px');
expect(name.href).toMatch(`/${vm.file.url}`); expect(name.href).toMatch(`/${vm.file.url}`);
expect(name.textContent.trim()).toEqual(vm.file.name); expect(name.textContent.trim()).toEqual(vm.file.name);
expect(vm.$el.querySelector('.commit-message').textContent.trim()).toBe(vm.file.lastCommit.message);
expect(vm.$el.querySelector('.commit-update').textContent.trim()).toBe(updated);
expect(fileIcon.classList.contains(vm.file.icon)).toBeTruthy(); expect(fileIcon.classList.contains(vm.file.icon)).toBeTruthy();
expect(fileIcon.style.marginLeft).toEqual(`${vm.file.level * 10}px`); expect(fileIcon.style.marginLeft).toEqual(`${vm.file.level * 10}px`);
expect(vm.$el.querySelectorAll('.animation-container').length).toBe(2);
}); });
it('does render if hasFiles is true and is loading tree', () => { it('does render if hasFiles is true and is loading tree', () => {
......
import '~/smart_interval'; import SmartInterval from '~/smart_interval';
(() => { describe('SmartInterval', function () {
const DEFAULT_MAX_INTERVAL = 100; const DEFAULT_MAX_INTERVAL = 100;
const DEFAULT_STARTING_INTERVAL = 5; const DEFAULT_STARTING_INTERVAL = 5;
const DEFAULT_SHORT_TIMEOUT = 75; const DEFAULT_SHORT_TIMEOUT = 75;
...@@ -9,7 +9,7 @@ import '~/smart_interval'; ...@@ -9,7 +9,7 @@ import '~/smart_interval';
function createDefaultSmartInterval(config) { function createDefaultSmartInterval(config) {
const defaultParams = { const defaultParams = {
callback: () => {}, callback: () => Promise.resolve(),
startingInterval: DEFAULT_STARTING_INTERVAL, startingInterval: DEFAULT_STARTING_INTERVAL,
maxInterval: DEFAULT_MAX_INTERVAL, maxInterval: DEFAULT_MAX_INTERVAL,
incrementByFactorOf: DEFAULT_INCREMENT_FACTOR, incrementByFactorOf: DEFAULT_INCREMENT_FACTOR,
...@@ -22,158 +22,171 @@ import '~/smart_interval'; ...@@ -22,158 +22,171 @@ import '~/smart_interval';
_.extend(defaultParams, config); _.extend(defaultParams, config);
} }
return new gl.SmartInterval(defaultParams); return new SmartInterval(defaultParams);
} }
describe('SmartInterval', function () { describe('Increment Interval', function () {
describe('Increment Interval', function () { beforeEach(function () {
beforeEach(function () { this.smartInterval = createDefaultSmartInterval();
this.smartInterval = createDefaultSmartInterval(); });
});
it('should increment the interval delay', function (done) { it('should increment the interval delay', function (done) {
const interval = this.smartInterval; const interval = this.smartInterval;
setTimeout(() => { setTimeout(() => {
const intervalConfig = this.smartInterval.cfg; const intervalConfig = this.smartInterval.cfg;
const iterationCount = 4; const iterationCount = 4;
const maxIntervalAfterIterations = intervalConfig.startingInterval * const maxIntervalAfterIterations = intervalConfig.startingInterval *
(intervalConfig.incrementByFactorOf ** (iterationCount - 1)); // 40 (intervalConfig.incrementByFactorOf ** (iterationCount - 1)); // 40
const currentInterval = interval.getCurrentInterval(); const currentInterval = interval.getCurrentInterval();
// Provide some flexibility for performance of testing environment // Provide some flexibility for performance of testing environment
expect(currentInterval).toBeGreaterThan(intervalConfig.startingInterval); expect(currentInterval).toBeGreaterThan(intervalConfig.startingInterval);
expect(currentInterval <= maxIntervalAfterIterations).toBeTruthy(); expect(currentInterval <= maxIntervalAfterIterations).toBeTruthy();
done(); done();
}, DEFAULT_SHORT_TIMEOUT); // 4 iterations, increment by 2x = (5 + 10 + 20 + 40) }, DEFAULT_SHORT_TIMEOUT); // 4 iterations, increment by 2x = (5 + 10 + 20 + 40)
}); });
it('should not increment past maxInterval', function (done) { it('should not increment past maxInterval', function (done) {
const interval = this.smartInterval; const interval = this.smartInterval;
setTimeout(() => { setTimeout(() => {
const currentInterval = interval.getCurrentInterval(); const currentInterval = interval.getCurrentInterval();
expect(currentInterval).toBe(interval.cfg.maxInterval); expect(currentInterval).toBe(interval.cfg.maxInterval);
done(); done();
}, DEFAULT_LONG_TIMEOUT); }, DEFAULT_LONG_TIMEOUT);
});
}); });
describe('Public methods', function () { it('does not increment while waiting for callback', function () {
beforeEach(function () { jasmine.clock().install();
this.smartInterval = createDefaultSmartInterval();
const smartInterval = createDefaultSmartInterval({
callback: () => new Promise($.noop),
}); });
it('should cancel an interval', function (done) { jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
const interval = this.smartInterval;
const oneInterval = smartInterval.cfg.startingInterval * DEFAULT_INCREMENT_FACTOR;
expect(smartInterval.getCurrentInterval()).toEqual(oneInterval);
setTimeout(() => { jasmine.clock().uninstall();
interval.cancel(); });
});
const intervalId = interval.state.intervalId; describe('Public methods', function () {
const currentInterval = interval.getCurrentInterval(); beforeEach(function () {
const intervalLowerLimit = interval.cfg.startingInterval; this.smartInterval = createDefaultSmartInterval();
});
expect(intervalId).toBeUndefined(); it('should cancel an interval', function (done) {
expect(currentInterval).toBe(intervalLowerLimit); const interval = this.smartInterval;
done(); setTimeout(() => {
}, DEFAULT_SHORT_TIMEOUT); interval.cancel();
});
it('should resume an interval', function (done) { const intervalId = interval.state.intervalId;
const interval = this.smartInterval; const currentInterval = interval.getCurrentInterval();
const intervalLowerLimit = interval.cfg.startingInterval;
setTimeout(() => { expect(intervalId).toBeUndefined();
interval.cancel(); expect(currentInterval).toBe(intervalLowerLimit);
interval.resume(); done();
}, DEFAULT_SHORT_TIMEOUT);
});
const intervalId = interval.state.intervalId; it('should resume an interval', function (done) {
const interval = this.smartInterval;
expect(intervalId).toBeTruthy(); setTimeout(() => {
interval.cancel();
done(); interval.resume();
}, DEFAULT_SHORT_TIMEOUT);
}); const intervalId = interval.state.intervalId;
expect(intervalId).toBeTruthy();
done();
}, DEFAULT_SHORT_TIMEOUT);
}); });
});
describe('DOM Events', function () { describe('DOM Events', function () {
beforeEach(function () { beforeEach(function () {
// This ensures DOM and DOM events are initialized for these specs. // This ensures DOM and DOM events are initialized for these specs.
setFixtures('<div></div>'); setFixtures('<div></div>');
this.smartInterval = createDefaultSmartInterval(); this.smartInterval = createDefaultSmartInterval();
}); });
it('should pause when page is not visible', function (done) { it('should pause when page is not visible', function (done) {
const interval = this.smartInterval; const interval = this.smartInterval;
setTimeout(() => { setTimeout(() => {
expect(interval.state.intervalId).toBeTruthy(); expect(interval.state.intervalId).toBeTruthy();
// simulates triggering of visibilitychange event // simulates triggering of visibilitychange event
interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } }); interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } });
expect(interval.state.intervalId).toBeUndefined(); expect(interval.state.intervalId).toBeUndefined();
done(); done();
}, DEFAULT_SHORT_TIMEOUT); }, DEFAULT_SHORT_TIMEOUT);
}); });
it('should change to the hidden interval when page is not visible', function (done) { it('should change to the hidden interval when page is not visible', function (done) {
const HIDDEN_INTERVAL = 1500; const HIDDEN_INTERVAL = 1500;
const interval = createDefaultSmartInterval({ hiddenInterval: HIDDEN_INTERVAL }); const interval = createDefaultSmartInterval({ hiddenInterval: HIDDEN_INTERVAL });
setTimeout(() => { setTimeout(() => {
expect(interval.state.intervalId).toBeTruthy(); expect(interval.state.intervalId).toBeTruthy();
expect(interval.getCurrentInterval() >= DEFAULT_STARTING_INTERVAL && expect(interval.getCurrentInterval() >= DEFAULT_STARTING_INTERVAL &&
interval.getCurrentInterval() <= DEFAULT_MAX_INTERVAL).toBeTruthy(); interval.getCurrentInterval() <= DEFAULT_MAX_INTERVAL).toBeTruthy();
// simulates triggering of visibilitychange event // simulates triggering of visibilitychange event
interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } }); interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } });
expect(interval.state.intervalId).toBeTruthy(); expect(interval.state.intervalId).toBeTruthy();
expect(interval.getCurrentInterval()).toBe(HIDDEN_INTERVAL); expect(interval.getCurrentInterval()).toBe(HIDDEN_INTERVAL);
done(); done();
}, DEFAULT_SHORT_TIMEOUT); }, DEFAULT_SHORT_TIMEOUT);
}); });
it('should resume when page is becomes visible at the previous interval', function (done) { it('should resume when page is becomes visible at the previous interval', function (done) {
const interval = this.smartInterval; const interval = this.smartInterval;
setTimeout(() => { setTimeout(() => {
expect(interval.state.intervalId).toBeTruthy(); expect(interval.state.intervalId).toBeTruthy();
// simulates triggering of visibilitychange event // simulates triggering of visibilitychange event
interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } }); interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } });
expect(interval.state.intervalId).toBeUndefined(); expect(interval.state.intervalId).toBeUndefined();
// simulates triggering of visibilitychange event // simulates triggering of visibilitychange event
interval.handleVisibilityChange({ target: { visibilityState: 'visible' } }); interval.handleVisibilityChange({ target: { visibilityState: 'visible' } });
expect(interval.state.intervalId).toBeTruthy(); expect(interval.state.intervalId).toBeTruthy();
done(); done();
}, DEFAULT_SHORT_TIMEOUT); }, DEFAULT_SHORT_TIMEOUT);
}); });
it('should cancel on page unload', function (done) { it('should cancel on page unload', function (done) {
const interval = this.smartInterval; const interval = this.smartInterval;
setTimeout(() => { setTimeout(() => {
$(document).triggerHandler('beforeunload'); $(document).triggerHandler('beforeunload');
expect(interval.state.intervalId).toBeUndefined(); expect(interval.state.intervalId).toBeUndefined();
expect(interval.getCurrentInterval()).toBe(interval.cfg.startingInterval); expect(interval.getCurrentInterval()).toBe(interval.cfg.startingInterval);
done(); done();
}, DEFAULT_SHORT_TIMEOUT); }, DEFAULT_SHORT_TIMEOUT);
}); });
it('should execute callback before first interval', function () { it('should execute callback before first interval', function () {
const interval = createDefaultSmartInterval({ immediateExecution: true }); const interval = createDefaultSmartInterval({ immediateExecution: true });
expect(interval.cfg.immediateExecution).toBeFalsy(); expect(interval.cfg.immediateExecution).toBeFalsy();
});
}); });
}); });
})(window.gl || (window.gl = {})); });
...@@ -121,24 +121,28 @@ describe('mrWidgetOptions', () => { ...@@ -121,24 +121,28 @@ describe('mrWidgetOptions', () => {
describe('initPolling', () => { describe('initPolling', () => {
it('should call SmartInterval', () => { it('should call SmartInterval', () => {
spyOn(gl, 'SmartInterval').and.returnValue({ spyOn(vm, 'checkStatus').and.returnValue(Promise.resolve());
resume() {}, jasmine.clock().install();
stopTimer() {},
});
vm.initPolling(); vm.initPolling();
expect(vm.checkStatus).not.toHaveBeenCalled();
jasmine.clock().tick(10000);
expect(vm.pollingInterval).toBeDefined(); expect(vm.pollingInterval).toBeDefined();
expect(gl.SmartInterval).toHaveBeenCalled(); expect(vm.checkStatus).toHaveBeenCalled();
jasmine.clock().uninstall();
}); });
}); });
describe('initDeploymentsPolling', () => { describe('initDeploymentsPolling', () => {
it('should call SmartInterval', () => { it('should call SmartInterval', () => {
spyOn(gl, 'SmartInterval'); spyOn(vm, 'fetchDeployments').and.returnValue(Promise.resolve());
vm.initDeploymentsPolling(); vm.initDeploymentsPolling();
expect(vm.deploymentsInterval).toBeDefined(); expect(vm.deploymentsInterval).toBeDefined();
expect(gl.SmartInterval).toHaveBeenCalled(); expect(vm.fetchDeployments).toHaveBeenCalled();
}); });
}); });
......
import Vue from 'vue';
import skeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue';
import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Skeleton loading container', () => {
let vm;
beforeEach(() => {
const component = Vue.extend(skeletonLoadingContainer);
vm = mountComponent(component);
});
afterEach(() => {
vm.$destroy();
});
it('renders 6 skeleton lines by default', () => {
expect(vm.$el.querySelector('.skeleton-line-6')).not.toBeNull();
});
it('renders in full mode by default', () => {
expect(vm.$el.classList.contains('animation-container-small')).toBeFalsy();
});
describe('small', () => {
beforeEach((done) => {
vm.small = true;
Vue.nextTick(done);
});
it('renders in small mode', () => {
expect(vm.$el.classList.contains('animation-container-small')).toBeTruthy();
});
});
describe('lines', () => {
beforeEach((done) => {
vm.lines = 5;
Vue.nextTick(done);
});
it('renders 5 lines', () => {
expect(vm.$el.querySelector('.skeleton-line-5')).not.toBeNull();
expect(vm.$el.querySelector('.skeleton-line-6')).toBeNull();
});
});
});
...@@ -84,7 +84,7 @@ describe Gitlab::Ci::Status::Build::Factory do ...@@ -84,7 +84,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do it 'fabricates status with correct details' do
expect(status.text).to eq 'failed' expect(status.text).to eq 'failed'
expect(status.icon).to eq 'warning' expect(status.icon).to eq 'status_warning'
expect(status.favicon).to eq 'favicon_status_failed' expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq 'failed (allowed to fail)' expect(status.label).to eq 'failed (allowed to fail)'
expect(status).to have_details expect(status).to have_details
......
...@@ -18,7 +18,7 @@ describe Gitlab::Ci::Status::Build::FailedAllowed do ...@@ -18,7 +18,7 @@ describe Gitlab::Ci::Status::Build::FailedAllowed do
describe '#icon' do describe '#icon' do
it 'returns a warning icon' do it 'returns a warning icon' do
expect(subject.icon).to eq 'warning' expect(subject.icon).to eq 'status_warning'
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::Database::Grant do describe Gitlab::Database::Grant do
describe '.scope_to_current_user' do
it 'scopes the relation to the current user' do
user = Gitlab::Database.username
column = Gitlab::Database.postgresql? ? :grantee : :User
names = described_class.scope_to_current_user.pluck(column).uniq
expect(names).to eq([user])
end
end
describe '.create_and_execute_trigger' do describe '.create_and_execute_trigger' do
it 'returns true when the user can create and execute a trigger' do it 'returns true when the user can create and execute a trigger' do
# We assume the DB/user is set up correctly so that triggers can be # We assume the DB/user is set up correctly so that triggers can be
...@@ -18,13 +8,11 @@ describe Gitlab::Database::Grant do ...@@ -18,13 +8,11 @@ describe Gitlab::Database::Grant do
expect(described_class.create_and_execute_trigger?('users')).to eq(true) expect(described_class.create_and_execute_trigger?('users')).to eq(true)
end end
it 'returns false when the user can not create and/or execute a trigger' do it 'returns false when the user can not create and/or execute a trigger', :postgresql do
allow(described_class).to receive(:scope_to_current_user) # In case of MySQL the user may have SUPER permissions, making it
.and_return(described_class.none) # impossible to have `false` returned when running tests; hence we only
# run these tests on PostgreSQL.
result = described_class.create_and_execute_trigger?('kittens') expect(described_class.create_and_execute_trigger?('foo')).to eq(false)
expect(result).to eq(false)
end end
end end
end end
...@@ -13,7 +13,7 @@ describe Gitlab::ImportExport::MergeRequestParser do ...@@ -13,7 +13,7 @@ describe Gitlab::ImportExport::MergeRequestParser do
let(:parsed_merge_request) do let(:parsed_merge_request) do
described_class.new(project, described_class.new(project,
merge_request.diff_head_sha, 'abcd',
merge_request, merge_request,
merge_request.as_json).parse! merge_request.as_json).parse!
end end
...@@ -29,4 +29,14 @@ describe Gitlab::ImportExport::MergeRequestParser do ...@@ -29,4 +29,14 @@ describe Gitlab::ImportExport::MergeRequestParser do
it 'has a target branch' do it 'has a target branch' do
expect(project.repository.branch_exists?(parsed_merge_request.target_branch)).to be true expect(project.repository.branch_exists?(parsed_merge_request.target_branch)).to be true
end end
it 'parses a MR that has no source branch' do
allow_any_instance_of(described_class).to receive(:branch_exists?).and_call_original
allow_any_instance_of(described_class).to receive(:branch_exists?).with(merge_request.source_branch).and_return(false)
allow_any_instance_of(described_class).to receive(:fork_merge_request?).and_return(true)
allow(Gitlab::GitalyClient).to receive(:migrate).and_call_original
allow(Gitlab::GitalyClient).to receive(:migrate).with(:fetch_ref).and_return([nil, 0])
expect(parsed_merge_request).to eq(merge_request)
end
end end
...@@ -69,7 +69,7 @@ describe GoogleApi::CloudPlatform::Client do ...@@ -69,7 +69,7 @@ describe GoogleApi::CloudPlatform::Client do
let(:cluster_name) { 'test-cluster' } let(:cluster_name) { 'test-cluster' }
let(:cluster_size) { 1 } let(:cluster_size) { 1 }
let(:machine_type) { 'n1-standard-4' } let(:machine_type) { 'n1-standard-2' }
let(:operation) { double } let(:operation) { double }
before do before do
......
...@@ -10,7 +10,7 @@ describe Clusters::Providers::Gcp do ...@@ -10,7 +10,7 @@ describe Clusters::Providers::Gcp do
it "has default value" do it "has default value" do
expect(gcp.zone).to eq('us-central1-a') expect(gcp.zone).to eq('us-central1-a')
expect(gcp.num_nodes).to eq(3) expect(gcp.num_nodes).to eq(3)
expect(gcp.machine_type).to eq('n1-standard-4') expect(gcp.machine_type).to eq('n1-standard-2')
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe CommitStatus do describe CommitStatus do
let(:project) { create(:project, :repository) } set(:project) { create(:project, :repository) }
let(:pipeline) do set(:pipeline) do
create(:ci_pipeline, project: project, sha: project.commit.id) create(:ci_pipeline, project: project, sha: project.commit.id)
end end
...@@ -464,4 +464,73 @@ describe CommitStatus do ...@@ -464,4 +464,73 @@ describe CommitStatus do
it { is_expected.to be_script_failure } it { is_expected.to be_script_failure }
end end
end end
describe 'ensure stage assignment' do
context 'when commit status has a stage_id assigned' do
let!(:stage) do
create(:ci_stage_entity, project: project, pipeline: pipeline)
end
let(:commit_status) do
create(:commit_status, stage_id: stage.id, name: 'rspec', stage: 'test')
end
it 'does not create a new stage' do
expect { commit_status }.not_to change { Ci::Stage.count }
expect(commit_status.stage_id).to eq stage.id
end
end
context 'when commit status does not have a stage_id assigned' do
let(:commit_status) do
create(:commit_status, name: 'rspec', stage: 'test', status: :success)
end
let(:stage) { Ci::Stage.first }
it 'creates a new stage' do
expect { commit_status }.to change { Ci::Stage.count }.by(1)
expect(stage.name).to eq 'test'
expect(stage.project).to eq commit_status.project
expect(stage.pipeline).to eq commit_status.pipeline
expect(stage.status).to eq commit_status.status
expect(commit_status.stage_id).to eq stage.id
end
end
context 'when commit status does not have stage but it exists' do
let!(:stage) do
create(:ci_stage_entity, project: project,
pipeline: pipeline,
name: 'test')
end
let(:commit_status) do
create(:commit_status, project: project,
pipeline: pipeline,
name: 'rspec',
stage: 'test',
status: :success)
end
it 'uses existing stage' do
expect { commit_status }.not_to change { Ci::Stage.count }
expect(commit_status.stage_id).to eq stage.id
expect(stage.reload.status).to eq commit_status.status
end
end
context 'when commit status is being imported' do
let(:commit_status) do
create(:commit_status, name: 'rspec', stage: 'test', importing: true)
end
it 'does not create a new stage' do
expect { commit_status }.not_to change { Ci::Stage.count }
expect(commit_status.stage_id).not_to be_present
end
end
end
end end
...@@ -107,7 +107,7 @@ describe PipelineDetailsEntity do ...@@ -107,7 +107,7 @@ describe PipelineDetailsEntity do
it 'contains stages' do it 'contains stages' do
expect(subject).to include(:details) expect(subject).to include(:details)
expect(subject[:details]).to include(:stages) expect(subject[:details]).to include(:stages)
expect(subject[:details][:stages].first).to include(name: 'external') expect(subject[:details][:stages].first).to include(name: 'test')
end end
end end
......
...@@ -94,6 +94,7 @@ module CycleAnalyticsHelpers ...@@ -94,6 +94,7 @@ module CycleAnalyticsHelpers
ref: 'master', ref: 'master',
tag: false, tag: false,
name: 'dummy', name: 'dummy',
stage: 'dummy',
pipeline: dummy_pipeline, pipeline: dummy_pipeline,
protected: false) protected: false)
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment