Commit 9dcb52fa authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-09-27

# Conflicts:
#	spec/services/quick_actions/interpret_service_spec.rb

[ci skip]
parents bb65ff07 c02057b4
......@@ -72,11 +72,13 @@ For a first-time step-by-step guide to the contribution process, please see
Looking for something to work on? Look for issues in the [Backlog (Accepting merge requests) milestone](#i-want-to-contribute).
GitLab comes into two flavors, GitLab Community Edition (CE) our free and open
GitLab comes in two flavors, GitLab Community Edition (CE) our free and open
source edition, and GitLab Enterprise Edition (EE) which is our commercial
edition. Throughout this guide you will see references to CE and EE for
abbreviation.
To get an overview of GitLab community membership including those that would be reviewing or merging your contributions, please visit [the community roles page](doc/development/contributing/community_roles.md).
If you want to know how the GitLab [core team]
operates please see [the GitLab contributing process](PROCESS.md).
......
......@@ -6,7 +6,7 @@ import { visitUrl } from './lib/utils/url_utility';
import bp from './breakpoints';
import { numberToHumanSize } from './lib/utils/number_utils';
import { setCiStatusFavicon } from './lib/utils/common_utils';
import { isScrolledToBottom, scrollDown } from './lib/utils/scroll_utils';
import { isScrolledToBottom, scrollDown, scrollUp } from './lib/utils/scroll_utils';
import LogOutputBehaviours from './lib/utils/logoutput_behaviours';
export default class Job extends LogOutputBehaviours {
......@@ -80,7 +80,7 @@ export default class Job extends LogOutputBehaviours {
}
scrollToTop() {
$(document).scrollTop(0);
scrollUp();
this.hasBeenScrolled = true;
this.toggleScroll();
}
......
......@@ -25,7 +25,7 @@
validator(value) {
return (
value === null ||
(Object.prototype.hasOwnProperty.call(value, 'link') &&
(Object.prototype.hasOwnProperty.call(value, 'path') &&
Object.prototype.hasOwnProperty.call(value, 'method') &&
Object.prototype.hasOwnProperty.call(value, 'title'))
);
......@@ -63,7 +63,7 @@
class="text-center"
>
<a
:href="action.link"
:href="action.path"
:data-method="action.method"
class="js-job-empty-state-action btn btn-primary"
>
......
<script>
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import _ from 'underscore';
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
export default {
components: {
TimeagoTooltip,
},
props: {
erasedByUser: {
type: Boolean,
required: true,
export default {
components: {
TimeagoTooltip,
},
username: {
type: String,
required: false,
default: null,
props: {
user: {
type: Object,
required: false,
default: () => ({}),
},
erasedAt: {
type: String,
required: true,
},
},
linkToUser: {
type: String,
required: false,
default: null,
computed: {
isErasedByUser() {
return !_.isEmpty(this.user);
},
},
erasedAt: {
type: String,
required: true,
},
},
};
};
</script>
<template>
<div class="prepend-top-default js-build-erased">
<div class="erased alert alert-warning">
<template v-if="erasedByUser">
<template v-if="isErasedByUser">
{{ s__("Job|Job has been erased by") }}
<a :href="linkToUser">
{{ username }}
<a :href="user.web_url">
{{ user.username }}
</a>
</template>
<template v-else>
......
......@@ -6,7 +6,7 @@
type: String,
required: true,
},
isReceivingBuildTrace: {
isComplete: {
type: Boolean,
required: true,
},
......@@ -22,7 +22,7 @@
</code>
<div
v-if="isReceivingBuildTrace"
v-if="isComplete"
class="js-log-animation build-loader-animation"
>
<div class="dot"></div>
......
<script>
import { polyfillSticky } from '~/lib/utils/sticky';
import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '~/vue_shared/directives/tooltip';
import { numberToHumanSize } from '~/lib/utils/number_utils';
import { s__, sprintf } from '~/locale';
import { sprintf } from '~/locale';
export default {
components: {
......@@ -12,44 +13,48 @@
tooltip,
},
props: {
canEraseJob: {
type: Boolean,
required: true,
erasePath: {
type: String,
required: false,
default: null,
},
size: {
type: Number,
required: true,
},
rawTracePath: {
rawPath: {
type: String,
required: false,
default: null,
},
canScrollToTop: {
isScrollTopDisabled: {
type: Boolean,
required: true,
},
isScrollBottomDisabled: {
type: Boolean,
required: true,
},
isScrollingDown: {
type: Boolean,
required: true,
},
canScrollToBottom: {
isTraceSizeVisible: {
type: Boolean,
required: true,
},
},
computed: {
jobLogSize() {
return sprintf('Showing last %{startSpanTag} %{size} %{endSpanTag} of log -', {
startSpanTag: '<span class="s-truncated-info-size truncated-info-size">',
endSpanTag: '</span>',
return sprintf('Showing last %{size} of log -', {
size: numberToHumanSize(this.size),
});
},
},
mounted() {
polyfillSticky(this.$el);
},
methods: {
handleEraseJobClick() {
// eslint-disable-next-line no-alert
if (window.confirm(s__('Job|Are you sure you want to erase this job?'))) {
this.$emit('eraseJob');
}
},
handleScrollToTop() {
this.$emit('scrollJobLogTop');
},
......@@ -57,48 +62,52 @@
this.$emit('scrollJobLogBottom');
},
},
};
</script>
<template>
<div class="top-bar">
<!-- truncate information -->
<div class="js-truncated-info truncated-info d-none d-sm-block float-left">
<p v-html="jobLogSize"></p>
<template v-if="isTraceSizeVisible">
{{ jobLogSize }}
<a
v-if="rawTracePath"
:href="rawTracePath"
class="js-raw-link raw-link"
>
{{ s__("Job|Complete Raw") }}
</a>
<a
v-if="rawPath"
:href="rawPath"
class="js-raw-link raw-link"
>
{{ s__("Job|Complete Raw") }}
</a>
</template>
</div>
<!-- eo truncate information -->
<div class="controllers float-right">
<!-- links -->
<a
v-if="rawTracePath"
v-if="rawPath"
v-tooltip
:title="s__('Job|Show complete raw')"
:href="rawTracePath"
:href="rawPath"
class="js-raw-link-controller controllers-buttons"
data-container="body"
>
<icon name="doc-text" />
</a>
<button
v-if="canEraseJob"
<a
v-if="erasePath"
v-tooltip
:title="s__('Job|Erase job log')"
type="button"
:href="erasePath"
data-confirm="__('Are you sure you want to erase this build?')"
class="js-erase-link controllers-buttons"
data-container="body"
@click="handleEraseJobClick"
data-method="post"
>
<icon name="remove" />
</button>
</a>
<!-- eo links -->
<!-- scroll buttons -->
......@@ -109,7 +118,7 @@
data-container="body"
>
<button
:disabled="!canScrollToTop"
:disabled="isScrollTopDisabled"
type="button"
class="js-scroll-top btn-scroll btn-transparent btn-blank"
@click="handleScrollToTop"
......@@ -125,9 +134,10 @@
data-container="body"
>
<button
:disabled="!canScrollToBottom"
:disabled="isScrollBottomDisabled"
type="button"
class="js-scroll-bottom btn-scroll btn-transparent btn-blank"
:class="{ animate: isScrollingDown }"
@click="handleScrollToBottom"
>
<icon name="scroll_down"/>
......
......@@ -24,14 +24,14 @@ export default {
<div class="bs-callout bs-callout-warning">
<p
v-if="hasNoRunnersForProject"
class="js-stuck-no-runners"
class="js-stuck-no-runners append-bottom-0"
>
{{ s__(`Job|This job is stuck, because the project
doesn't have any runners online assigned to it.`) }}
</p>
<p
v-else-if="tags.length"
class="js-stuck-with-tags"
class="js-stuck-with-tags append-bottom-0"
>
{{ s__(`This job is stuck, because you don't have
any active runners online with any of these tags assigned to them:`) }}
......@@ -45,7 +45,7 @@ export default {
</p>
<p
v-else
class="js-stuck-no-active-runner"
class="js-stuck-no-active-runner append-bottom-0"
>
{{ s__(`This job is stuck, because you don't
have any active runners that can run this job.`) }}
......
import { mapState } from 'vuex';
import Vue from 'vue';
import JobMediator from './job_details_mediator';
import jobHeader from './components/header.vue';
import detailsBlock from './components/sidebar_details_block.vue';
import Job from '../job';
import JobHeader from './components/header.vue';
import DetailsBlock from './components/sidebar_details_block.vue';
import createStore from './store';
export default () => {
const { dataset } = document.getElementById('js-job-details-vue');
const mediator = new JobMediator({ endpoint: dataset.endpoint });
mediator.fetchJob();
// eslint-disable-next-line no-new
new Job();
const store = createStore();
store.dispatch('setJobEndpoint', dataset.endpoint);
store.dispatch('fetchJob');
// Header
// eslint-disable-next-line no-new
new Vue({
el: '#js-build-header-vue',
components: {
jobHeader,
},
data() {
return {
mediator,
};
JobHeader,
},
mounted() {
this.mediator.initBuildClass();
store,
computed: {
...mapState(['job', 'isLoading']),
},
render(createElement) {
return createElement('job-header', {
props: {
isLoading: this.mediator.state.isLoading,
job: this.mediator.store.state.job,
isLoading: this.isLoading,
job: this.job,
},
});
},
......@@ -41,18 +43,17 @@ export default () => {
new Vue({
el: detailsBlockElement,
components: {
detailsBlock,
DetailsBlock,
},
data() {
return {
mediator,
};
store,
computed: {
...mapState(['job', 'isLoading']),
},
render(createElement) {
return createElement('details-block', {
props: {
isLoading: this.mediator.state.isLoading,
job: this.mediator.store.state.job,
isLoading: this.isLoading,
job: this.job,
runnerHelpUrl: dataset.runnerHelpUrl,
terminalPath: detailsBlockDataset.terminalPath,
},
......
import Visibility from 'visibilityjs';
import Flash from '../flash';
import Poll from '../lib/utils/poll';
import JobStore from './stores/job_store';
import JobService from './services/job_service';
import Job from '../job';
export default class JobMediator {
constructor(options = {}) {
this.options = options;
this.store = new JobStore();
this.service = new JobService(options.endpoint);
this.state = {
isLoading: false,
};
}
initBuildClass() {
this.build = new Job();
}
fetchJob() {
this.poll = new Poll({
resource: this.service,
method: 'getJob',
successCallback: response => this.successCallback(response),
errorCallback: () => this.errorCallback(),
});
if (!Visibility.hidden()) {
this.state.isLoading = true;
this.poll.makeRequest();
} else {
this.getJob();
}
Visibility.change(() => {
if (!Visibility.hidden()) {
this.poll.restart();
} else {
this.poll.stop();
}
});
}
getJob() {
return this.service
.getJob()
.then(response => this.successCallback(response))
.catch(() => this.errorCallback());
}
successCallback(response) {
this.state.isLoading = false;
return this.store.storeJob(response.data);
}
errorCallback() {
this.state.isLoading = false;
return new Flash('An error occurred while fetching the job.');
}
}
import axios from '../../lib/utils/axios_utils';
export default class JobService {
constructor(endpoint) {
this.job = endpoint;
}
getJob() {
return axios.get(this.job);
}
}
import * as types from './mutation_types';
export default {
[types.SET_JOB_ENDPOINT](state, endpoint) {
state.jobEndpoint = endpoint;
},
[types.REQUEST_STATUS_FAVICON](state) {
state.fetchingStatusFavicon = true;
},
......
export default class JobStore {
constructor() {
this.state = {
job: {},
};
}
storeJob(job = {}) {
this.state.job = job;
}
}
import $ from 'jquery';
import { canScroll, isScrolledToBottom, toggleDisableButton } from './scroll_utils';
import {
canScroll,
isScrolledToBottom,
isScrolledToTop,
isScrolledToMiddle,
toggleDisableButton,
} from './scroll_utils';
export default class LogOutputBehaviours {
constructor() {
......@@ -12,18 +18,13 @@ export default class LogOutputBehaviours {
}
toggleScroll() {
const $document = $(document);
const currentPosition = $document.scrollTop();
const scrollHeight = $document.height();
const windowHeight = $(window).height();
if (canScroll()) {
if (currentPosition > 0 && scrollHeight - currentPosition !== windowHeight) {
if (isScrolledToMiddle()) {
// User is in the middle of the log
toggleDisableButton(this.$scrollTopBtn, false);
toggleDisableButton(this.$scrollBottomBtn, false);
} else if (currentPosition === 0) {
} else if (isScrolledToTop()) {
// User is at Top of Log
toggleDisableButton(this.$scrollTopBtn, true);
......
......@@ -4,6 +4,7 @@ export const canScroll = () => $(document).height() > $(window).height();
/**
* Checks if the entire page is scrolled down all the way to the bottom
* @returns {Boolean}
*/
export const isScrolledToBottom = () => {
const $document = $(document);
......@@ -16,11 +17,34 @@ export const isScrolledToBottom = () => {
return scrollHeight - currentPosition === windowHeight;
};
/**
* Checks if page is scrolled to the top
* @returns {Boolean}
*/
export const isScrolledToTop = () => $(document).scrollTop() === 0;
export const scrollDown = () => {
const $document = $(document);
$document.scrollTop($document.height());
};
export const scrollUp = () => {
$(document).scrollTop(0);
};
/**
* Checks if scroll position is in the middle of the page
* @returns {Boolean}
*/
export const isScrolledToMiddle = () => {
const $document = $(document);
const currentPosition = $document.scrollTop();
const scrollHeight = $document.height();
const windowHeight = $(window).height();
return currentPosition > 0 && scrollHeight - currentPosition !== windowHeight;
};
export const toggleDisableButton = ($button, disable) => {
if (disable && $button.prop('disabled')) return;
$button.prop('disabled', disable);
......
......@@ -42,7 +42,7 @@ export default {
keys: ['feature', 'request'],
},
],
simpleMetrics: ['redis', 'sidekiq'],
simpleMetrics: ['redis'],
data() {
return { currentRequestId: '' };
},
......
<script>
export default {
props: {
currentRequest: {
type: Object,
required: true,
export default {
props: {
currentRequest: {
type: Object,
required: true,
},
metric: {
type: String,
required: true,
},
},
metric: {
type: String,
required: true,
computed: {
duration() {
return (
this.currentRequest.details[this.metric] &&
this.currentRequest.details[this.metric].duration
);
},
calls() {
return (
this.currentRequest.details[this.metric] && this.currentRequest.details[this.metric].calls
);
},
},
},
};
};
</script>
<template>
<div
......@@ -21,9 +34,9 @@ export default {
v-if="currentRequest.details"
class="bold"
>
{{ currentRequest.details[metric].duration }}
{{ duration }}
/
{{ currentRequest.details[metric].calls }}
{{ calls }}
</span>
{{ metric }}
</div>
......
......@@ -39,7 +39,7 @@
.table-section {
white-space: nowrap;
$section-widths: 5 10 15 20 25 30 40 50 100;
$section-widths: 5 10 15 20 25 30 40 50 60 100;
@each $width in $section-widths {
&.section-#{$width} {
flex: 0 0 #{$width + '%'};
......
......@@ -56,7 +56,7 @@ $blue-50: #f6fafe;
$blue-100: #e4f0fb;
$blue-200: #b8d6f4;
$blue-300: #73afea;
$blue-400: #2e87e0;
$blue-400: #418cd8;
$blue-500: #1f78d1;
$blue-600: #1b69b6;
$blue-700: #17599c;
......@@ -68,7 +68,7 @@ $orange-50: #fffaf4;
$orange-100: #fff1de;
$orange-200: #fed69f;
$orange-300: #fdbc60;
$orange-400: #fca121;
$orange-400: #fca429;
$orange-500: #fc9403;
$orange-600: #de7e00;
$orange-700: #c26700;
......@@ -79,7 +79,7 @@ $orange-950: #592800;
$red-50: #fef6f5;
$red-100: #fbe5e1;
$red-200: #f2b4a9;
$red-300: #e67664;
$red-300: #ea8271;
$red-400: #e05842;
$red-500: #db3b21;
$red-600: #c0341d;
......
......@@ -281,9 +281,10 @@ class ApplicationController < ActionController::Base
end
def event_filter
# Split using comma to maintain backward compatibility Ex/ "filter1,filter2"
filters = cookies['event_filter'].split(',')[0] if cookies['event_filter'].present?
@event_filter ||= EventFilter.new(filters)
@event_filter ||=
EventFilter.new(params[:event_filter].presence || cookies[:event_filter]).tap do |new_event_filter|
cookies[:event_filter] = new_event_filter.filter
end
end
# JSON for infinite scroll via Pager object
......
......@@ -41,7 +41,7 @@ class DashboardController < Dashboard::ApplicationController
end
@events = EventCollection
.new(projects, offset: params[:offset].to_i, filter: @event_filter)
.new(projects, offset: params[:offset].to_i, filter: event_filter)
.to_a
Events::RenderService.new(current_user).execute(@events)
......
......@@ -72,8 +72,12 @@ module Ci
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end
scope :with_existing_job_artifacts, ->(query) do
where('EXISTS (?)', ::Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').merge(query))
end
scope :with_archived_trace, ->() do
where('EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').trace)
with_existing_job_artifacts(Ci::JobArtifact.trace)
end
scope :without_archived_trace, ->() do
......@@ -81,10 +85,12 @@ module Ci
end
scope :with_test_reports, ->() do
includes(:job_artifacts_junit) # Prevent N+1 problem when iterating each ci_job_artifact row
.where('EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').test_reports)
with_existing_job_artifacts(Ci::JobArtifact.test_reports)
.eager_load_job_artifacts
end
scope :eager_load_job_artifacts, -> { includes(:job_artifacts) }
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_archived_trace_stored_locally, -> { with_archived_trace.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
......@@ -408,8 +414,8 @@ module Ci
trace.exist?
end
def has_test_reports?
job_artifacts.test_reports.any?
def has_job_artifacts?
job_artifacts.any?
end
def has_old_trace?
......@@ -473,28 +479,23 @@ module Ci
end
end
def erase_artifacts!
remove_artifacts_file!
remove_artifacts_metadata!
save
end
def erase_test_reports!
# TODO: Use fast_destroy_all in the context of https://gitlab.com/gitlab-org/gitlab-ce/issues/35240
job_artifacts_junit&.destroy
# and use that for `ExpireBuildInstanceArtifactsWorker`?
def erase_erasable_artifacts!
job_artifacts.erasable.destroy_all # rubocop: disable DestroyAll
erase_old_artifacts!
end
def erase(opts = {})
return false unless erasable?
erase_artifacts!
erase_test_reports!
job_artifacts.destroy_all # rubocop: disable DestroyAll
erase_old_artifacts!
erase_trace!
update_erased!(opts[:erased_by])
end
def erasable?
complete? && (artifacts? || has_test_reports? || has_trace?)
complete? && (artifacts? || has_job_artifacts? || has_trace?)
end
def erased?
......@@ -652,8 +653,8 @@ module Ci
def collect_test_reports!(test_reports)
test_reports.get_suite(group_name).tap do |test_suite|
each_test_report do |file_type, blob|
Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, test_suite)
each_report(Ci::JobArtifact::TEST_REPORT_FILE_TYPES) do |file_type, blob|
Gitlab::Ci::Parsers::Test.fabricate!(file_type).parse!(blob, test_suite)
end
end
end
......@@ -673,6 +674,13 @@ module Ci
private
def erase_old_artifacts!
# TODO: To be removed once we get rid of
remove_artifacts_file!
remove_artifacts_metadata!
save
end
def successful_deployment_status
if success? && last_deployment&.last?
return :last
......@@ -683,14 +691,19 @@ module Ci
:creating
end
def each_test_report
Ci::JobArtifact::TEST_REPORT_FILE_TYPES.each do |file_type|
public_send("job_artifacts_#{file_type}").each_blob do |blob| # rubocop:disable GitlabSecurity/PublicSend
yield file_type, blob
def each_report(report_types)
job_artifacts_for_types(report_types).each do |report_artifact|
report_artifact.each_blob do |blob|
yield report_artifact.file_type, blob
end
end
end
def job_artifacts_for_types(report_types)
# Use select to leverage cached associations and avoid N+1 queries
job_artifacts.select { |artifact| artifact.file_type.in?(report_types) }
end
def update_artifacts_size
self.artifacts_size = legacy_artifacts_file&.size
end
......
......@@ -11,8 +11,28 @@ module Ci
NotSupportedAdapterError = Class.new(StandardError)
TEST_REPORT_FILE_TYPES = %w[junit].freeze
DEFAULT_FILE_NAMES = { junit: 'junit.xml' }.freeze
TYPE_AND_FORMAT_PAIRS = { archive: :zip, metadata: :gzip, trace: :raw, junit: :gzip }.freeze
NON_ERASABLE_FILE_TYPES = %w[trace].freeze
DEFAULT_FILE_NAMES = {
archive: nil,
metadata: nil,
trace: nil,
junit: 'junit.xml',
sast: 'gl-sast-report.json',
dependency_scanning: 'gl-dependency-scanning-report.json',
container_scanning: 'gl-container-scanning-report.json',
dast: 'gl-dast-report.json'
}.freeze
TYPE_AND_FORMAT_PAIRS = {
archive: :zip,
metadata: :gzip,
trace: :raw,
junit: :gzip,
sast: :gzip,
dependency_scanning: :gzip,
container_scanning: :gzip,
dast: :gzip
}.freeze
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
......@@ -30,8 +50,18 @@ module Ci
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
scope :with_file_types, -> (file_types) do
types = self.file_types.select { |file_type| file_types.include?(file_type) }.values
where(file_type: types)
end
scope :test_reports, -> do
types = self.file_types.select { |file_type| TEST_REPORT_FILE_TYPES.include?(file_type) }.values
with_file_types(TEST_REPORT_FILE_TYPES)
end
scope :erasable, -> do
types = self.file_types.reject { |file_type| NON_ERASABLE_FILE_TYPES.include?(file_type) }.values
where(file_type: types)
end
......@@ -42,7 +72,11 @@ module Ci
archive: 1,
metadata: 2,
trace: 3,
junit: 4
junit: 4,
sast: 5, ## EE-specific
dependency_scanning: 6, ## EE-specific
container_scanning: 7, ## EE-specific
dast: 8 ## EE-specific
}
enum file_format: {
......
......@@ -73,6 +73,11 @@ module Clusters
"clientSecret" => oauth_application.secret,
"callbackUrl" => callback_url
}
},
"singleuser" => {
"extraEnv" => {
"GITLAB_CLUSTER_ID" => cluster.id
}
}
}
end
......
......@@ -9,6 +9,7 @@ class BuildDetailsEntity < JobEntity
expose :coverage, :erased_at, :duration
expose :tag_list, as: :tags
expose :has_trace?, as: :has_trace
expose :user, using: UserEntity
expose :runner, using: RunnerEntity
expose :pipeline, using: PipelineEntity
......
......@@ -2,7 +2,7 @@
module Files
class MultiService < Files::BaseService
UPDATE_FILE_ACTIONS = %w(update move delete).freeze
UPDATE_FILE_ACTIONS = %w(update move delete chmod).freeze
def create_commit!
transformer = Lfs::FileTransformer.new(project, @branch_name)
......
......@@ -287,8 +287,7 @@ module QuickActions
end
params '#issue | !merge_request'
condition do
issuable.persisted? &&
current_user.can?(:"update_#{issuable.to_ability_name}", issuable)
current_user.can?(:"update_#{issuable.to_ability_name}", issuable)
end
parse_params do |issuable_param|
extract_references(issuable_param, :issue).first ||
......
......@@ -117,7 +117,7 @@
= link_to edit_group_path(@group) do
.nav-icon-container
= sprite_icon('settings')
%span.nav-item-name
%span.nav-item-name.qa-settings-item
= _('Settings')
%ul.sidebar-sub-level-items
= nav_link(path: %w[groups#projects groups#edit badges#index ci_cd#show], html_options: { class: "fly-out-top-item" } ) do
......
......@@ -2,13 +2,13 @@
.fade-left= icon('angle-left')
.fade-right= icon('angle-right')
%ul.nav-links.event-filter.scrolling-tabs.nav.nav-tabs
= event_filter_link EventFilter.all, _('All'), s_('EventFilterBy|Filter by all')
= event_filter_link EventFilter::ALL, _('All'), s_('EventFilterBy|Filter by all')
- if event_filter_visible(:repository)
= event_filter_link EventFilter.push, _('Push events'), s_('EventFilterBy|Filter by push events')
= event_filter_link EventFilter::PUSH, _('Push events'), s_('EventFilterBy|Filter by push events')
- if event_filter_visible(:merge_requests)
= event_filter_link EventFilter.merged, _('Merge events'), s_('EventFilterBy|Filter by merge events')
= event_filter_link EventFilter::MERGED, _('Merge events'), s_('EventFilterBy|Filter by merge events')
- if event_filter_visible(:issues)
= event_filter_link EventFilter.issue, _('Issue events'), s_('EventFilterBy|Filter by issue events')
= event_filter_link EventFilter::ISSUE, _('Issue events'), s_('EventFilterBy|Filter by issue events')
- if comments_visible?
= event_filter_link EventFilter.comments, _('Comments'), s_('EventFilterBy|Filter by comments')
= event_filter_link EventFilter.team, _('Team'), s_('EventFilterBy|Filter by team')
= event_filter_link EventFilter::COMMENTS, _('Comments'), s_('EventFilterBy|Filter by comments')
= event_filter_link EventFilter::TEAM, _('Team'), s_('EventFilterBy|Filter by team')
......@@ -13,7 +13,7 @@ class ExpireBuildInstanceArtifactsWorker
return unless build&.project && !build.project.pending_delete
Rails.logger.info "Removing artifacts for build #{build.id}..."
build.erase_artifacts!
build.erase_erasable_artifacts!
end
# rubocop: enable CodeReuse/ActiveRecord
end
---
title: "Allow events filter to be set in the URL in addition to cookie"
merge_request: 21557
author: Igor @igas
type: added
---
title: Allows to chmod file with commits API
merge_request: 21866
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Dont create license_management build when not included in license
merge_request: 21958
author:
type: performance
---
title: Update all gitlab CI templates from gitlab-org/gitlab-ci-yml
merge_request: 21929
author:
type: added
---
title: Extracts scroll position check into reusable functions
merge_request:
author:
type: other
---
title: Uses Vuex store in job details page and removes old mediator pattern
merge_request:
author:
type: other
---
title: Fixes performance bar looking for a key in a undefined prop
merge_request:
author:
type: fixed
---
title: Expose has_trace in job API
merge_request: 21950
author:
type: other
---
title: Extend reports feature to support Security Products
merge_request: 21892
author:
type: added
---
title: Adds the user's public_email attribute to the API
merge_request: 21909
author: Alexis Reigel
type: added
---
title: Adds an extra width to the responsive tables
merge_request: 21928
author:
type: other
---
title: Fix blue, orange, and red color inconsistencies
merge_request: 21972
author:
type: other
---
title: Guard against a login attempt with invalid CSRF token
merge_request: 21934
author:
type: fixed
---
title: Allow /copy_metadata for new issues and MRs
merge_request: 21953
author:
type: changed
......@@ -31,6 +31,11 @@ Rails.application.configure do |config|
Warden::Manager.before_logout(scope: :user) do |user, auth, opts|
user ||= auth.user
# Rails CSRF protection may attempt to log out a user before that
# user even logs in
next unless user
activity = Gitlab::Auth::Activity.new(opts)
tracker = Gitlab::Auth::BlockedUserTracker.new(user, auth)
......
......@@ -83,12 +83,13 @@ POST /projects/:id/repository/commits
| `actions[]` Attribute | Type | Required | Description |
| --------------------- | ---- | -------- | ----------- |
| `action` | string | yes | The action to perform, `create`, `delete`, `move`, `update` |
| `action` | string | yes | The action to perform, `create`, `delete`, `move`, `update`, `chmod`|
| `file_path` | string | yes | Full path to the file. Ex. `lib/class.rb` |
| `previous_path` | string | no | Original full path to the file being moved. Ex. `lib/class1.rb` |
| `content` | string | no | File content, required for all except `delete`. Optional for `move` |
| `previous_path` | string | no | Original full path to the file being moved. Ex. `lib/class1.rb`. Only considered for `move` action. |
| `content` | string | no | File content, required for all except `delete` and `chmod`. Optional for `move` |
| `encoding` | string | no | `text` or `base64`. `text` is default. |
| `last_commit_id` | string | no | Last known file commit id. Will be only considered in update, move and delete actions. |
| `execute_filemode` | boolean | no | When `true/false` enables/disables the execute flag on the file. Only considered for `chmod` action. |
```bash
PAYLOAD=$(cat << 'JSON'
......@@ -115,6 +116,11 @@ PAYLOAD=$(cat << 'JSON'
"action": "update",
"file_path": "foo/bar5",
"content": "new content"
},
{
"action": "chmod",
"file_path": "foo/bar5",
"execute_filemode": true
}
]
}
......
......@@ -46,19 +46,21 @@ Example of response
"status": "success",
"tag": false,
"user": {
"id": 1,
"name": "Administrator",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"created_at": "2016-08-11T07:09:20.351Z",
"id": 1,
"linkedin": "",
"location": null,
"name": "Administrator",
"public_email": "",
"skype": "",
"state": "active",
"linkedin": "",
"twitter": "",
"username": "root",
"web_url": "http://localhost:3000/root",
"website_url": ""
"website_url": "",
"organization": ""
}
},
"environment": {
......@@ -103,19 +105,21 @@ Example of response
"status": "success",
"tag": false,
"user": {
"id": 1,
"name": "Administrator",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"created_at": "2016-08-11T07:09:20.351Z",
"id": 1,
"linkedin": "",
"location": null,
"name": "Administrator",
"public_email": "",
"skype": "",
"state": "active",
"linkedin": "",
"twitter": "",
"username": "root",
"web_url": "http://localhost:3000/root",
"website_url": ""
"website_url": "",
"organization": ""
}
},
"environment": {
......@@ -188,19 +192,20 @@ Example of response
"started_at": null,
"finished_at": "2016-08-11T11:32:35.145Z",
"user": {
"id": 1,
"name": "Administrator",
"username": "root",
"id": 1,
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://localhost:3000/root",
"created_at": "2016-08-11T07:09:20.351Z",
"web_url": "http://gitlab.dev/root",
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": ""
"website_url": "",
"organization": ""
},
"commit": {
"id": "a91957a858320c0e17f3a0eca7cfacbff50ea29a",
......
......@@ -53,18 +53,21 @@ Example of response
"tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/6",
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"website_url": ""
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
}
},
{
......@@ -109,18 +112,21 @@ Example of response
"tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/7",
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"website_url": ""
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
}
}
]
......@@ -180,18 +186,21 @@ Example of response
"tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/6",
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"website_url": ""
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
}
},
{
......@@ -236,18 +245,21 @@ Example of response
"tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/7",
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"website_url": ""
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
}
}
]
......@@ -305,18 +317,21 @@ Example of response
"tag": false,
"web_url": "https://example.com/foo/bar/-/jobs/8",
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"website_url": ""
"created_at": "2015-12-21T13:14:24.077Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": ""
}
}
```
......
......@@ -27,10 +27,16 @@ Parameters:
"web_url": "http://localhost:3000/john_smith",
"created_at": "2015-09-03T07:24:01.670Z",
"bio": null,
"location": null,
"public_email": "john@example.com",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": null,
"last_sign_in_at": "2015-09-03T07:24:01.670Z",
"confirmed_at": "2015-09-03T07:24:01.670Z",
"last_activity_on": "2015-09-03",
"email": "john@example.com",
"theme_id": 2,
"color_scheme_id": 1,
......@@ -40,6 +46,8 @@ Parameters:
"can_create_group": true,
"can_create_project": true,
"two_factor_enabled": false
"external": false,
"private_profile": null
}
}
```
......@@ -439,6 +439,11 @@ Parameters:
"id" : 1,
"name" : "Administrator"
},
"diff_refs": {
"base_sha": "1111111111111111111111111111111111111111",
"head_sha": "2222222222222222222222222222222222222222",
"start_sha": "3333333333333333333333333333333333333333"
},
"diverged_commits_count": 2
}
```
......
......@@ -294,6 +294,7 @@ Example response:
"created_at": "2017-11-16T18:38:46.000Z",
"bio": null,
"location": null,
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
......
......@@ -201,6 +201,7 @@ Parameters:
"created_at": "2012-05-23T08:00:58Z",
"bio": null,
"location": null,
"public_email": "john@example.com",
"skype": "",
"linkedin": "",
"twitter": "",
......@@ -232,6 +233,7 @@ Parameters:
"is_admin": false,
"bio": null,
"location": null,
"public_email": "john@example.com",
"skype": "",
"linkedin": "",
"twitter": "",
......@@ -372,6 +374,7 @@ GET /user
"created_at": "2012-05-23T08:00:58Z",
"bio": null,
"location": null,
"public_email": "john@example.com",
"skype": "",
"linkedin": "",
"twitter": "",
......@@ -420,6 +423,7 @@ GET /user
"is_admin": false,
"bio": null,
"location": null,
"public_email": "john@example.com",
"skype": "",
"linkedin": "",
"twitter": "",
......
......@@ -314,8 +314,8 @@ build:
stage: build
script:
- docker pull $CONTAINER_IMAGE:latest || true
- docker build --cache-from $CONTAINER_IMAGE:latest --tag $CONTAINER_IMAGE:$CI_BUILD_REF --tag $CONTAINER_IMAGE:latest .
- docker push $CONTAINER_IMAGE:$CI_BUILD_REF
- docker build --cache-from $CONTAINER_IMAGE:latest --tag $CONTAINER_IMAGE:$CI_COMMIT_SHA --tag $CONTAINER_IMAGE:latest .
- docker push $CONTAINER_IMAGE:$CI_COMMIT_SHA
- docker push $CONTAINER_IMAGE:latest
```
......
### Community members & roles
GitLab community members and their privileges/responsibilities.
| Roles | Responsibilities | Requirements |
|-------|------------------|--------------|
| Maintainer | Accepts merge requests on several GitLab projects | Added to the [team page](https://about.gitlab.com/team/). An expert on code reviews and knows the product/code base |
| Reviewer | Performs code reviews on MRs | Added to the [team page](https://about.gitlab.com/team/) |
| Developer |Has access to GitLab internal infrastructure & issues (e.g. HR-related) | GitLab employee or a Core Team member (with an NDA) |
| Contributor | Can make contributions to all GitLab public projects | Have a GitLab.com account |
[List of current reviewers/maintainers](https://about.gitlab.com/handbook/engineering/projects/#gitlab-ce)
\ No newline at end of file
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Implement design & UI elements](#implement-design--ui-elements)
- [Style guides](#style-guides)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Implement design & UI elements
# Implement design & UI elements
For guidance on UX implementation at GitLab, please refer to our [Design System](https://design.gitlab.com/).
......@@ -34,27 +25,27 @@ In order to complete a product discovery issue in a release, you must complete t
## Style guides
1. [Ruby](https://github.com/bbatsov/ruby-style-guide).
Important sections include [Source Code Layout][rss-source] and
[Naming][rss-naming]. Use:
- multi-line method chaining style **Option A**: dot `.` on the second line
- string literal quoting style **Option A**: single quoted by default
1. [Rails](https://github.com/bbatsov/rails-style-guide)
1. [Newlines styleguide][newlines-styleguide]
1. [Testing][testing]
1. [JavaScript styleguide][js-styleguide]
1. [SCSS styleguide][scss-styleguide]
1. [Shell commands](../shell_commands.md) created by GitLab
contributors to enhance security
1. [Database Migrations](../migration_style_guide.md)
1. [Markdown](http://www.cirosantilli.com/markdown-styleguide)
1. [Documentation styleguide](https://docs.gitlab.com/ee/development/documentation/styleguide.html)
1. Interface text should be written subjectively instead of objectively. It
should be the GitLab core team addressing a person. It should be written in
present time and never use past tense (has been/was). For example instead
of _prohibited this user from being saved due to the following errors:_ the
text should be _sorry, we could not create your account because:_
1. Code should be written in [US English][us-english]
1. [Ruby](https://github.com/bbatsov/ruby-style-guide).
Important sections include [Source Code Layout][rss-source] and
[Naming][rss-naming]. Use:
- multi-line method chaining style **Option A**: dot `.` on the second line
- string literal quoting style **Option A**: single quoted by default
1. [Rails](https://github.com/bbatsov/rails-style-guide)
1. [Newlines styleguide][newlines-styleguide]
1. [Testing][testing]
1. [JavaScript styleguide][js-styleguide]
1. [SCSS styleguide][scss-styleguide]
1. [Shell commands](../shell_commands.md) created by GitLab
contributors to enhance security
1. [Database Migrations](../migration_style_guide.md)
1. [Markdown](http://www.cirosantilli.com/markdown-styleguide)
1. [Documentation styleguide](https://docs.gitlab.com/ee/development/documentation/styleguide.html)
1. Interface text should be written subjectively instead of objectively. It
should be the GitLab core team addressing a person. It should be written in
present time and never use past tense (has been/was). For example instead
of _prohibited this user from being saved due to the following errors:_ the
text should be _sorry, we could not create your account because:_
1. Code should be written in [US English][us-english]
This is also the style used by linting tools such as
[RuboCop](https://github.com/bbatsov/rubocop),
......
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Contribute to GitLab](#contribute-to-gitlab)
- [Security vulnerability disclosure](#security-vulnerability-disclosure)
- [Code of conduct](#code-of-conduct)
- [Closing policy for issues and merge requests](#closing-policy-for-issues-and-merge-requests)
- [Helping others](#helping-others)
- [I want to contribute!](#i-want-to-contribute)
- [Contribution Flow](#contribution-flow)
- [Workflow labels](#workflow-labels)
- [Type labels](#type-labels)
- [Subject labels](#subject-labels)
- [Team labels](#team-labels)
- [Milestone labels](#milestone-labels)
- [Bug Priority labels](#bug-priority-labels)
- [Bug Severity labels](#bug-severity-labels)
- [Severity impact guidance](#severity-impact-guidance)
- [Label for community contributors](#label-for-community-contributors)
- [Implement design & UI elements](#implement-design--ui-elements)
- [Issue tracker](#issue-tracker)
- [Issue triaging](#issue-triaging)
- [Feature proposals](#feature-proposals)
- [Issue tracker guidelines](#issue-tracker-guidelines)
- [Issue weight](#issue-weight)
- [Regression issues](#regression-issues)
- [Technical and UX debt](#technical-and-ux-debt)
- [Stewardship](#stewardship)
- [Merge requests](#merge-requests)
- [Merge request guidelines](#merge-request-guidelines)
- [Contribution acceptance criteria](#contribution-acceptance-criteria)
- [Definition of done](#definition-of-done)
- [Style guides](#style-guides)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Contribute to GitLab
# Contribute to GitLab
For a first-time step-by-step guide to the contribution process, see
["Contributing to GitLab"](https://about.gitlab.com/contributing/).
......
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Workflow labels](#workflow-labels)
- [Type labels](#type-labels)
- [Subject labels](#subject-labels)
- [Team labels](#team-labels)
- [Release Scoping labels](#release-scoping-labels)
- [Priority labels](#priority-labels)
- [Severity labels](#severity-labels)
- [Severity impact guidance](#severity-impact-guidance)
- [Label for community contributors](#label-for-community-contributors)
- [Issue triaging](#issue-triaging)
- [Feature proposals](#feature-proposals)
- [Issue tracker guidelines](#issue-tracker-guidelines)
- [Issue weight](#issue-weight)
- [Regression issues](#regression-issues)
- [Technical and UX debt](#technical-and-ux-debt)
- [Stewardship](#stewardship)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Workflow labels
# Workflow labels
To allow for asynchronous issue handling, we use [milestones][milestones-page]
and [labels][labels-page]. Leads and product managers handle most of the
......@@ -45,7 +22,7 @@ labels, you can _always_ add the team and type, and often also the subject.
[milestones-page]: https://gitlab.com/gitlab-org/gitlab-ce/milestones
[labels-page]: https://gitlab.com/gitlab-org/gitlab-ce/labels
### Type labels
## Type labels
Type labels are very important. They define what kind of issue this is. Every
issue should have one or more.
......@@ -61,7 +38,7 @@ already reserved for subject labels).
The descriptions on the [labels page][labels-page] explain what falls under each type label.
### Subject labels
## Subject labels
Subject labels are labels that define what area or feature of GitLab this issue
hits. They are not always necessary, but very convenient.
......@@ -75,7 +52,7 @@ issue is labeled with a subject label corresponding to your expertise.
Subject labels are always all-lowercase.
### Team labels
## Team labels
Team labels specify what team is responsible for this issue.
Assigning a team label makes sure issues get the attention of the appropriate
......@@ -107,7 +84,7 @@ indicate if an issue needs backend work, frontend work, or both.
Team labels are always capitalized so that they show up as the first label for
any issue.
### Release Scoping labels
## Release Scoping labels
Release Scoping labels help us clearly communicate expectations of the work for the
release. There are three levels of Release Scoping labels:
......@@ -138,7 +115,7 @@ This label documents the planned timeline & urgency which is used to measure aga
| ~P3 | Medium Priority | Within the next 3 releases (approx one quarter) |
| ~P4 | Low Priority | Anything outside the next 3 releases (approx beyond one quarter) |
### Severity labels
## Severity labels
Severity labels help us clearly communicate the impact of a ~bug on users.
......@@ -149,7 +126,7 @@ Severity labels help us clearly communicate the impact of a ~bug on users.
| ~S3 | Major Severity | Broken Feature, workaround acceptable | Can create merge requests only from the Merge Requests page, not through the Issue. |
| ~S4 | Low Severity | Functionality inconvenience or cosmetic issue | Label colors are incorrect / not being displayed. |
#### Severity impact guidance
### Severity impact guidance
Severity levels can be applied further depending on the facet of the impact; e.g. Affected customers, GitLab.com availability, performance and etc. The below is a guideline.
......@@ -160,7 +137,7 @@ Severity levels can be applied further depending on the facet of the impact; e.g
| ~S3 | A few users or a single paid customer affected | Limited impact on important portions of GitLab.com | Degradation is likely to occur in the near future |
| ~S4 | No paid users/customer affected, or expected to in the near future | Minor impact on on GitLab.com | Degradation _may_ occur but it's not likely |
### Label for community contributors
## Label for community contributors
Issues that are beneficial to our users, 'nice to haves', that we currently do
not have the capacity for or want to give the priority to, are labeled as
......@@ -210,8 +187,7 @@ any potential community contributor to @-mention per above.
[up-for-grabs]: https://gitlab.com/gitlab-org/gitlab-ce/issues?label_name=Accepting+Merge+Requests&scope=all&sort=weight_asc&state=opened
[firt-timers]: https://gitlab.com/gitlab-org/gitlab-ce/issues?label_name%5B%5D=Accepting+Merge+Requests&scope=all&sort=upvotes_desc&state=opened&weight=1
### Issue triaging
## Issue triaging
Our issue triage policies are [described in our handbook]. You are very welcome
to help the GitLab team triage issues. We also organize [issue bash events] once
......@@ -233,7 +209,7 @@ project.
[scheduled pipeline]: https://gitlab.com/gitlab-org/quality/triage-ops/pipeline_schedules/10512/edit
[quality/triage-ops]: https://gitlab.com/gitlab-org/quality/triage-ops
### Feature proposals
## Feature proposals
To create a feature proposal for CE, open an issue on the
[issue tracker of CE][ce-tracker].
......@@ -259,7 +235,7 @@ need to ask one of the [core team] members to add the label, if you do not have
If you want to create something yourself, consider opening an issue first to
discuss whether it is interesting to include this in GitLab.
### Issue tracker guidelines
## Issue tracker guidelines
**[Search the issue tracker][ce-tracker]** for similar entries before
submitting your own, there's a good chance somebody else had the same issue or
......@@ -271,7 +247,7 @@ The text in the parenthesis is there to help you with what to include. Omit it
when submitting the actual issue. You can copy-paste it and then edit as you
see fit.
### Issue weight
## Issue weight
Issue weight allows us to get an idea of the amount of work required to solve
one or multiple issues. This makes it possible to schedule work more accurately.
......@@ -293,7 +269,7 @@ is probably 1, adding a new Git Hook maybe 4 or 5, big features 7-9.
issues or chunks. You can simply not set the weight of a parent issue and set
weights to children issues.
### Regression issues
## Regression issues
Every monthly release has a corresponding issue on the CE issue tracker to keep
track of functionality broken by that release and any fixes that need to be
......@@ -313,7 +289,7 @@ addressed.
[8.3 Regressions]: https://gitlab.com/gitlab-org/gitlab-ce/issues/4127
[update the notes]: https://gitlab.com/gitlab-org/release-tools/blob/master/doc/pro-tips.md#update-the-regression-issue
### Technical and UX debt
## Technical and UX debt
In order to track things that can be improved in GitLab's codebase,
we use the ~"technical debt" label in [GitLab's issue tracker][ce-tracker].
......@@ -337,7 +313,7 @@ for a release by the appropriate person.
Make sure to mention the merge request that the ~"technical debt" issue or
~"UX debt" issue is associated with in the description of the issue.
### Stewardship
## Stewardship
For issues related to the open source stewardship of GitLab,
there is the ~"stewardship" label.
......
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Merge requests](#merge-requests)
- [Merge request guidelines](#merge-request-guidelines)
- [Contribution acceptance criteria](#contribution-acceptance-criteria)
- [Definition of done](#definition-of-done)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Merge requests
# Merge requests
We welcome merge requests with fixes and improvements to GitLab code, tests,
and/or documentation. The issues that are specifically suitable for
......@@ -36,7 +25,7 @@ some potentially easy issues.
To start with GitLab development download the [GitLab Development Kit][gdk] and
see the [Development section](../README.md) for some guidelines.
### Merge request guidelines
## Merge request guidelines
If you can, please submit a merge request with the fix or improvements
including tests. If you don't know how to fix the issue but can write a test
......@@ -114,7 +103,7 @@ Please ensure that your merge request meets the contribution acceptance criteria
When having your code reviewed and when reviewing merge requests please take the
[code review guidelines](../code_review.md) into account.
### Contribution acceptance criteria
## Contribution acceptance criteria
1. The change is as small as possible
1. Include proper tests and make all tests pass (unless it contains a test
......
......@@ -43,13 +43,13 @@ how to structure GitLab docs.
Currently GitLab docs use Redcarpet as [markdown](../../user/markdown.md) engine, but there's an [open discussion](https://gitlab.com/gitlab-com/gitlab-docs/issues/50) for implementing Kramdown in the near future.
All the docs follow the [documentation style guidelines](styleguide.md).
All the docs follow the [documentation style guidelines](styleguide.md). See [Linting](#linting) for help to follow the guidelines.
## Documentation directory structure
The documentation is structured based on the GitLab UI structure itself,
separated by [`user`](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/user),
[`administrator`](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/administration), and [`contributor`](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/development).
[`administrator`](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/administration), and [`contributor`](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/development).
In order to have a [solid site structure](https://searchengineland.com/seo-benefits-developing-solid-site-structure-277456) for our documentation,
all docs should be linked. Every new document should be cross-linked to its related documentation, and linked from its topic-related index, when existent.
......@@ -223,6 +223,108 @@ redirect_from: 'https://docs.gitlab.com/my-old-location/README.html'
Note: it is necessary to include the file name in the `redirect_from` URL,
even if it's `index.html` or `README.html`.
## Linting
To help adhere to the [documentation style guidelines](styleguide.md), and to improve the content
added to documentation, consider locally installing and running documentation linters. This will
help you catch common issues before raising merge requests for review of documentation.
The following are some suggested linters you can install locally and sample configuration:
- `proselint`
- `markdownlint`
NOTE: **Note:**
This list does not limit what other linters you can add to your local documentation writing
toolchain.
### `proselint`
`proselint` checks for common problems with English prose. It provides a
[plethora of checks](http://proselint.com/checks/) that are helpful for technical writing.
`proselint` can be used [on the command line](http://proselint.com/utility/), either on a single
Markdown file or on all Markdown files in a project. For example, to run `proselint` on all
documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce), run the
following commands from within the `gitlab-ce` project:
```sh
cd doc
proselint **/*.md
```
`proselint` can also be run from within editors using plugins. For example, the following plugins
are available:
- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-proselint)
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=PatrykPeszko.vscode-proselint)
- [Others](https://github.com/amperser/proselint#plugins-for-other-software)
#### Sample `proselint` configuration
All of the checks are good to use. However, excluding the `typography.symbols` checks might reduce
noise. The following sample `proselint` configuration disables the `typography.symbols` checks:
```json
{
"checks": {
"typography.symbols": false
}
}
```
A file with `proselint` configuration must be placed in a
[valid location](https://github.com/amperser/proselint#checks). For example, `~/.config/proselint/config`.
### `markdownlint`
`markdownlint` checks that certain rules ([example](https://github.com/DavidAnson/markdownlint/blob/master/README.md#rules--aliases))
are followed for Markdown syntax. Our [style guidelines](styleguide.md) elaborate on which choices
must be made when selecting Markdown syntax for GitLab documentation and this tool helps
catch deviations from those guidelines.
`markdownlint` can be used [on the command line](https://github.com/igorshubovych/markdownlint-cli#markdownlint-cli--),
either on a single Markdown file or on all Markdown files in a project. For example, to run
`markdownlint` on all documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce),
run the following commands from within the `gitlab-ce` project:
```sh
cd doc
markdownlint **/*.md
```
`markdownlint` can also be run from within editors using plugins. For example, the following plugins
are available:
- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-markdownlint)
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint)
- [Others](https://github.com/DavidAnson/markdownlint#related)
#### Sample `markdownlint` configuration
The following sample `markdownlint` configuration modifies the available default rules to:
- Adhere to the [style guidelines](styleguide.md).
- Apply conventions found in the GitLab documentation.
```json
{
"default": true,
"header-style": { "style": "atx" },
"ul-style": { "style": "dash" },
"line-length": false,
"no-trailing-punctuation": false,
"ol-prefix": { "style": "one" },
"blanks-around-fences": false,
"hr-style": { "style": "---" },
"fenced-code-language": false
}
```
For [`markdownlint`](https://gitahub.com/DavidAnson/markdownlint/), this configuration must be
placed in a [valid location](https://github.com/igorshubovych/markdownlint-cli#configuration). For
example, `~/.markdownlintrc`.
## Testing
We treat documentation as code, thus have implemented some testing.
......@@ -278,7 +380,6 @@ for GitLab Team members.
- Label the MR `Documentation`
- Assign the correct milestone (see note below)
NOTE: **Note:**
If the release version you want to add the documentation to has already been
frozen or released, use the label `Pick into X.Y` to get it merged into
......
......@@ -10,6 +10,8 @@ GitLab documentation. Check the
Check the GitLab handbook for the [writing styles guidelines](https://about.gitlab.com/handbook/communication/#writing-style-guidelines).
For help adhering to the guidelines, see [Linting](index.md#linting).
## Files
- [Directory structure](index.md#location-and-naming-documents): place the docs
......
......@@ -591,10 +591,11 @@ This procedure assumes that:
First make sure your backup tar file is in the backup directory described in the
`gitlab.rb` configuration `gitlab_rails['backup_path']`. The default is
`/var/opt/gitlab/backups`.
`/var/opt/gitlab/backups`. It needs to be owned by the `git` user.
```shell
sudo cp 11493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar /var/opt/gitlab/backups/
sudo chown git.git /var/opt/gitlab/backups/11493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar
```
Stop the processes that are connected to the database. Leave the rest of GitLab
......
......@@ -27,7 +27,7 @@ to enable it.
1. First, ask your system administrator to enable GitLab Container Registry
following the [administration documentation](../../administration/container_registry.md).
If you are using GitLab.com, this is enabled by default so you can start using
the Registry immediately. Currently there is a soft (10GB) size restriction for
the Registry immediately. Currently there is a soft (10GB) size restriction for
registry on GitLab.com, as part of the [repository size limit](repository/index.html#repository-size).
1. Go to your [project's General settings](settings/index.md#sharing-and-permissions)
and enable the **Container Registry** feature on your project. For new
......@@ -216,7 +216,7 @@ needs to trust the mitmproxy SSL certificates for this to work.
The following installation instructions assume you are running Ubuntu:
1. Install mitmproxy (see http://docs.mitmproxy.org/en/stable/install.html)
1. [Install mitmproxy](https://docs.mitmproxy.org/stable/overview-installation/).
1. Run `mitmproxy --port 9000` to generate its certificates.
Enter <kbd>CTRL</kbd>-<kbd>C</kbd> to quit.
1. Install the certificate from `~/.mitmproxy` to your system:
......@@ -293,4 +293,4 @@ Once the right permissions were set, the error will go away.
[docker-docs]: https://docs.docker.com/engine/userguide/intro/
[pat]: ../profile/personal_access_tokens.md
[pdt]: ../project/deploy_tokens/index.md
[reconfigure]: ../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
\ No newline at end of file
[reconfigure]: ../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
......@@ -73,7 +73,26 @@ module API
params do
requires :branch, type: String, desc: 'Name of the branch to commit into. To create a new branch, also provide `start_branch`.', allow_blank: false
requires :commit_message, type: String, desc: 'Commit message'
requires :actions, type: Array[Hash], desc: 'Actions to perform in commit'
requires :actions, type: Array, desc: 'Actions to perform in commit' do
requires :action, type: String, desc: 'The action to perform, `create`, `delete`, `move`, `update`, `chmod`', values: %w[create update move delete chmod].freeze
requires :file_path, type: String, desc: 'Full path to the file. Ex. `lib/class.rb`'
given action: ->(action) { action == 'move' } do
requires :previous_path, type: String, desc: 'Original full path to the file being moved. Ex. `lib/class1.rb`'
end
given action: ->(action) { %w[create move].include? action } do
optional :content, type: String, desc: 'File content'
end
given action: ->(action) { action == 'update' } do
requires :content, type: String, desc: 'File content'
end
optional :encoding, type: String, desc: '`text` or `base64`', default: 'text', values: %w[text base64]
given action: ->(action) { %w[update move delete].include? action } do
optional :last_commit_id, type: String, desc: 'Last known file commit id'
end
given action: ->(action) { action == 'chmod' } do
requires :execute_filemode, type: Boolean, desc: 'When `true/false` enables/disables the execute flag on the file.'
end
end
optional :start_branch, type: String, desc: 'Name of the branch to start the new commit from'
optional :author_email, type: String, desc: 'Author email for commit'
optional :author_name, type: String, desc: 'Author name for commit'
......
......@@ -53,7 +53,7 @@ module API
class User < UserBasic
expose :created_at, if: ->(user, opts) { Ability.allowed?(opts[:current_user], :read_user_profile, user) }
expose :bio, :location, :skype, :linkedin, :twitter, :website_url, :organization
expose :bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization
end
class UserActivity < Grape::Entity
......
class EventFilter
attr_accessor :params
class << self
def all
'all'
end
def push
'push'
end
def merged
'merged'
end
# frozen_string_literal: true
def issue
'issue'
end
def comments
'comments'
end
def team
'team'
end
class EventFilter
attr_accessor :filter
ALL = 'all'
PUSH = 'push'
MERGED = 'merged'
ISSUE = 'issue'
COMMENTS = 'comments'
TEAM = 'team'
FILTERS = [ALL, PUSH, MERGED, ISSUE, COMMENTS, TEAM].freeze
def initialize(filter)
# Split using comma to maintain backward compatibility Ex/ "filter1,filter2"
filter = filter.to_s.split(',')[0].to_s
@filter = FILTERS.include?(filter) ? filter : ALL
end
def initialize(params)
@params = if params
params.dup
else
[] # EventFilter.default_filter
end
def active?(key)
filter == key.to_s
end
# rubocop: disable CodeReuse/ActiveRecord
def apply_filter(events)
return events if params.blank? || params == EventFilter.all
case params
when EventFilter.push
case filter
when PUSH
events.where(action: Event::PUSHED)
when EventFilter.merged
when MERGED
events.where(action: Event::MERGED)
when EventFilter.comments
when COMMENTS
events.where(action: Event::COMMENTED)
when EventFilter.team
when TEAM
events.where(action: [Event::JOINED, Event::LEFT, Event::EXPIRED])
when EventFilter.issue
when ISSUE
events.where(action: [Event::CREATED, Event::UPDATED, Event::CLOSED, Event::REOPENED])
end
end
# rubocop: enable CodeReuse/ActiveRecord
def options(key)
filter = params.dup
if filter.include? key
filter.delete key
else
filter << key
end
filter
end
def active?(key)
if params.present?
params.include? key
else
key == EventFilter.all
events
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
# frozen_string_literal: true
module Gitlab
module Ci
class Config
......@@ -9,7 +11,7 @@ module Gitlab
include Validatable
include Attributable
ALLOWED_KEYS = %i[junit].freeze
ALLOWED_KEYS = %i[junit sast dependency_scanning container_scanning dast].freeze
attributes ALLOWED_KEYS
......@@ -19,6 +21,10 @@ module Gitlab
with_options allow_nil: true do
validates :junit, array_of_strings_or_string: true
validates :sast, array_of_strings_or_string: true
validates :dependency_scanning, array_of_strings_or_string: true
validates :container_scanning, array_of_strings_or_string: true
validates :dast, array_of_strings_or_string: true
end
end
......
module Gitlab
module Ci
module Parsers
def self.fabricate!(file_type)
"Gitlab::Ci::Parsers::#{file_type.classify}".constantize.new
end
end
end
end
module Gitlab
module Ci
module Parsers
class Junit
JunitParserError = Class.new(StandardError)
def parse!(xml_data, test_suite)
root = Hash.from_xml(xml_data)
all_cases(root) do |test_case|
test_case = create_test_case(test_case)
test_suite.add_test_case(test_case)
end
rescue REXML::ParseException => e
raise JunitParserError, "XML parsing failed: #{e.message}"
rescue => e
raise JunitParserError, "JUnit parsing failed: #{e.message}"
end
private
def all_cases(root, parent = nil, &blk)
return unless root.present?
[root].flatten.compact.map do |node|
next unless node.is_a?(Hash)
# we allow only one top-level 'testsuites'
all_cases(node['testsuites'], root, &blk) unless parent
# we require at least one level of testsuites or testsuite
each_case(node['testcase'], &blk) if parent
# we allow multiple nested 'testsuite' (eg. PHPUnit)
all_cases(node['testsuite'], root, &blk)
end
end
def each_case(testcase, &blk)
return unless testcase.present?
[testcase].flatten.compact.map(&blk)
end
def create_test_case(data)
if data['failure']
status = ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED
system_output = data['failure']
else
status = ::Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS
system_output = nil
end
::Gitlab::Ci::Reports::TestCase.new(
classname: data['classname'],
name: data['name'],
file: data['file'],
execution_time: data['time'],
status: status,
system_output: system_output
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Parsers
module Test
ParserNotFoundError = Class.new(StandardError)
PARSERS = {
junit: ::Gitlab::Ci::Parsers::Test::Junit
}.freeze
def self.fabricate!(file_type)
PARSERS.fetch(file_type.to_sym).new
rescue KeyError
raise ParserNotFoundError, "Cannot find any parser matching file type '#{file_type}'"
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Parsers
module Test
class Junit
JunitParserError = Class.new(StandardError)
def parse!(xml_data, test_suite)
root = Hash.from_xml(xml_data)
all_cases(root) do |test_case|
test_case = create_test_case(test_case)
test_suite.add_test_case(test_case)
end
rescue REXML::ParseException => e
raise JunitParserError, "XML parsing failed: #{e.message}"
rescue => e
raise JunitParserError, "JUnit parsing failed: #{e.message}"
end
private
def all_cases(root, parent = nil, &blk)
return unless root.present?
[root].flatten.compact.map do |node|
next unless node.is_a?(Hash)
# we allow only one top-level 'testsuites'
all_cases(node['testsuites'], root, &blk) unless parent
# we require at least one level of testsuites or testsuite
each_case(node['testcase'], &blk) if parent
# we allow multiple nested 'testsuite' (eg. PHPUnit)
all_cases(node['testsuite'], root, &blk)
end
end
def each_case(testcase, &blk)
return unless testcase.present?
[testcase].flatten.compact.map(&blk)
end
def create_test_case(data)
if data['failure']
status = ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED
system_output = data['failure']
else
status = ::Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS
system_output = nil
end
::Gitlab::Ci::Reports::TestCase.new(
classname: data['classname'],
name: data['name'],
file: data['file'],
execution_time: data['time'],
status: status,
system_output: system_output
)
end
end
end
end
end
end
......@@ -49,7 +49,7 @@ variables:
POSTGRES_DB: $CI_ENVIRONMENT_SLUG
KUBERNETES_VERSION: 1.8.6
HELM_VERSION: 2.6.1
HELM_VERSION: 2.10.0
DOCKER_DRIVER: overlay2
......@@ -122,6 +122,9 @@ license_management:
paths: [gl-license-management-report.json]
only:
- branches
only:
variables:
- $GITLAB_FEATURES =~ /\blicense_management\b/
except:
variables:
- $LICENSE_MANAGEMENT_DISABLED
......@@ -484,15 +487,11 @@ rollout 100%:
}
function license_management() {
if echo $GITLAB_FEATURES |grep license_management > /dev/null ; then
# Extract "MAJOR.MINOR" from CI_SERVER_VERSION and generate "MAJOR-MINOR-stable"
LICENSE_MANAGEMENT_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
# Extract "MAJOR.MINOR" from CI_SERVER_VERSION and generate "MAJOR-MINOR-stable"
LICENSE_MANAGEMENT_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
docker run --volume "$PWD:/code" \
"registry.gitlab.com/gitlab-org/security-products/license-management:$LICENSE_MANAGEMENT_VERSION" analyze /code
else
echo "License management is not available in your subscription"
fi
docker run --volume "$PWD:/code" \
"registry.gitlab.com/gitlab-org/security-products/license-management:$LICENSE_MANAGEMENT_VERSION" analyze /code
}
function sast() {
......@@ -605,7 +604,6 @@ rollout 100%:
--set postgresql.postgresPassword="$POSTGRES_PASSWORD" \
--set postgresql.postgresDatabase="$POSTGRES_DB" \
--namespace="$KUBE_NAMESPACE" \
--version="$CI_PIPELINE_ID-$CI_JOB_ID" \
"$name" \
chart/
......
# An example .gitlab-ci.yml file to test (and optionally report the coverage
# This is an example .gitlab-ci.yml file to test (and optionally report the coverage
# results of) your [Julia][1] packages. Please refer to the [documentation][2]
# for more information about package development in Julia.
#
......@@ -6,7 +6,7 @@
# whatever name you have given to your package.
#
# [1]: http://julialang.org/
# [2]: http://julia.readthedocs.org/
# [2]: https://docs.julialang.org/en/v1/manual/documentation/index.html
# Below is the template to run your tests in Julia
.test_template: &test_definition
......@@ -18,25 +18,30 @@
script:
# Let's run the tests. Substitute `coverage = false` below, if you do not
# want coverage results.
- /opt/julia/bin/julia -e 'Pkg.clone(pwd()); Pkg.test("MyPackage",
coverage = true)'
- julia -e 'using Pkg; Pkg.clone(pwd()); Pkg.build("MyPackage"); Pkg.test("MyPackage"; coverage = true)'
# Comment out below if you do not want coverage results.
- /opt/julia/bin/julia -e 'Pkg.add("Coverage"); cd(Pkg.dir("MyPackage"));
- julia -e 'using Pkg; Pkg.add("Coverage");
import MyPackage; cd(joinpath(dirname(pathof(MyPackage)), ".."));
using Coverage; cl, tl = get_summary(process_folder());
println("(", cl/tl*100, "%) covered")'
# Name a test and select an appropriate image.
test:0.4.6:
image: julialang/julia:v0.4.6
# images comes from Docker hub
test:0.7:
image: julia:0.7
<<: *test_definition
test:1.0:
image: julia:1.0
<<: *test_definition
# Maybe you would like to test your package against the development branch:
test:0.5.0-dev:
image: julialang/julia:v0.5.0-dev
# ... allowing for failures, since we are testing against the development
# branch:
allow_failure: true
<<: *test_definition
# test:1.1-dev (not sure there is such an image in docker, so not tested yet):
# image: julia:v1.1-dev
# # ... allowing for failures, since we are testing against the development
# # branch:
# allow_failure: true
# <<: *test_definition
# REMARK: Do not forget to enable the coverage feature for your project, if you
# are using code coverage reporting above. This can be done by
......@@ -44,11 +49,28 @@ test:0.5.0-dev:
# - Navigating to the `CI/CD Pipelines` settings of your project,
# - Copying and pasting the default `Simplecov` regex example provided, i.e.,
# `\(\d+.\d+\%\) covered` in the `test coverage parsing` textfield.
#
# WARNING: This template is using the `julialang/julia` images from [Docker
# Example documentation deployment
pages:
image: julia:0.7
stage: deploy
script:
- apt-get update -qq && apt-get install -y git # needed by Documenter
- julia -e 'using Pkg; Pkg.clone(pwd()); Pkg.build("MyPackage");' # rebuild Julia (can be put somewhere else I'm sure
- julia -e 'using Pkg; import MyPackage; Pkg.add("Documenter")' # install Documenter
- julia --color=yes docs/make.jl # make documentation
- mv docs/build public # move to the directory picked up by Gitlab pages
artifacts:
paths:
- public
only:
- master
# WARNING: This template is using the `julia` images from [Docker
# Hub][3]. One can use custom Julia images and/or the official ones found
# in the same place. However, care must be taken to correctly locate the binary
# file (`/opt/julia/bin/julia` above), which is usually given on the image's
# description page.
#
# [3]: http://hub.docker.com/
# [3]: https://hub.docker.com/_/julia/
# Jigsaw is a simple static sites generator with Laravel's Blade.
#
# Full project: https://github.com/tightenco/jigsaw
image: php:7.2
# These folders are cached between builds
cache:
paths:
- vendor/
- node_modules/
before_script:
# Update packages
- apt-get update -yqq
# Install dependencies
- apt-get install -yqq gnupg zlib1g-dev libpng-dev
# Install Node 8
- curl -sL https://deb.nodesource.com/setup_8.x | bash -
- apt-get install -yqq nodejs
# Install php extensions
- docker-php-ext-install zip
# Install Composer and project dependencies.
- curl -sS https://getcomposer.org/installer | php
- php composer.phar install
# Install Node dependencies.
- npm install
pages:
script:
- npm run production
- mv build_production public
artifacts:
paths:
- public
only:
- master
# Lifted from: https://about.gitlab.com/2016/03/10/setting-up-gitlab-ci-for-ios-projects/
# This file assumes an own GitLab CI runner, set up on a macOS system.
# This file assumes an own GitLab CI runner, setup on a macOS system.
stages:
- build
- archive
......
......@@ -333,7 +333,8 @@ module Gitlab
action: action[:action].upcase.to_sym,
file_path: encode_binary(action[:file_path]),
previous_path: encode_binary(action[:previous_path]),
base64_content: action[:encoding] == 'base64'
base64_content: action[:encoding] == 'base64',
execute_filemode: !!action[:execute_filemode]
)
rescue RangeError
raise ArgumentError, "Unknown action '#{action[:action]}'"
......
......@@ -15,7 +15,7 @@ namespace :gitlab do
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
logger.info("Transferred artifact ID #{build.id} with size #{build.artifacts_size} to object storage")
rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end
......
......@@ -4212,9 +4212,6 @@ msgstr ""
msgid "Jobs"
msgstr ""
msgid "Job|Are you sure you want to erase this job?"
msgstr ""
msgid "Job|Browse"
msgstr ""
......
......@@ -108,6 +108,7 @@ module QA
end
def switch_to_register_tab
set_initial_password_if_present
click_element :register_tab
end
......
......@@ -3,7 +3,7 @@ module QA
module Project
class Activity < Page::Base
view 'app/views/shared/_event_filter.html.haml' do
element :push_events, "event_filter_link EventFilter.push, _('Push events')"
element :push_events, "event_filter_link EventFilter::PUSH, _('Push events')"
end
def go_to_push_events
......
......@@ -6,9 +6,7 @@ module QA
it 'succeeds' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.perform do |main_login|
expect(main_login.sign_in_tab?).to be(true)
end
expect(page).to have_text('Open source software to collaborate on code')
end
end
......
......@@ -337,6 +337,22 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
end
context 'when no trace is available' do
it 'has_trace is false' do
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be false
end
end
context 'when job has trace' do
let(:job) { create(:ci_build, :running, :trace_live, pipeline: pipeline) }
it "has_trace is true" do
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be true
end
end
end
context 'when requesting JSON job is triggered' do
......
......@@ -14,6 +14,33 @@ FactoryBot.define do
artifact.project ||= artifact.job.project
end
trait :raw do
file_format :raw
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain')
end
end
trait :zip do
file_format :zip
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
end
trait :gzip do
file_format :gzip
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
trait :archive do
file_type :archive
file_format :zip
......
......@@ -24,7 +24,7 @@ FactoryBot.define do
factory :push_event, class: PushEvent do
project factory: :project_empty_repo
author factory: :user
author(factory: :user) { project.creator }
action Event::PUSHED
end
......
......@@ -3,8 +3,10 @@ require 'spec_helper'
describe 'Projects > Activity > User sees activity' do
let(:project) { create(:project, :repository, :public) }
let(:user) { project.creator }
let(:issue) { create(:issue, project: project) }
before do
create(:event, :created, project: project, target: issue, author: user)
event = create(:push_event, project: project, author: user)
create(:push_event_payload,
event: event,
......@@ -12,10 +14,18 @@ describe 'Projects > Activity > User sees activity' do
commit_to: '6d394385cf567f80a8fd85055db1ab4c5295806f',
ref: 'fix',
commit_count: 1)
visit activity_project_path(project)
end
it 'shows the last push in the activity page', :js do
visit activity_project_path(project)
expect(page).to have_content "#{user.name} pushed new branch fix"
end
it 'allows to filter event with the "event_filter=issue" URL param', :js do
visit activity_project_path(project, event_filter: 'issue')
expect(page).not_to have_content "#{user.name} pushed new branch fix"
expect(page).to have_content "#{user.name} opened issue #{issue.to_reference}"
end
end
......@@ -3,12 +3,16 @@
{ "$ref": "job.json" }
],
"description": "An extension of job.json with more detailed information",
"required": [
"has_trace"
],
"properties": {
"artifact": { "$ref": "artifact.json" },
"terminal_path": { "type": "string" },
"trigger": { "$ref": "trigger.json" },
"deployment_status": { "$ref": "deployment_status.json" },
"runner": { "$ref": "runner.json" },
"runners": { "type": "runners.json" }
"runners": { "type": "runners.json" },
"has_trace": { "type": "boolean" }
}
}
......@@ -66,7 +66,7 @@ describe('Empty State', () => {
...props,
content,
action: {
link: 'runner',
path: 'runner',
title: 'Check runner',
method: 'post',
},
......
......@@ -18,9 +18,10 @@ describe('Erased block', () => {
describe('with job erased by user', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: true,
username: 'root',
linkToUser: 'gitlab.com/root',
user: {
username: 'root',
web_url: 'gitlab.com/root',
},
erasedAt,
});
});
......@@ -40,7 +41,6 @@ describe('Erased block', () => {
describe('with erased job', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: false,
erasedAt,
});
});
......
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import JobMediator from '~/jobs/job_details_mediator';
import job from '../mock_data';
describe('JobMediator', () => {
let mediator;
let mock;
beforeEach(() => {
mediator = new JobMediator({ endpoint: 'jobs/40291672.json' });
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
it('should set defaults', () => {
expect(mediator.store).toBeDefined();
expect(mediator.service).toBeDefined();
expect(mediator.options).toEqual({ endpoint: 'jobs/40291672.json' });
expect(mediator.state.isLoading).toEqual(false);
});
describe('request and store data', () => {
beforeEach(() => {
mock.onGet().reply(200, job, {});
});
it('should store received data', (done) => {
mediator.fetchJob();
setTimeout(() => {
expect(mediator.store.state.job).toEqual(job);
done();
}, 0);
});
});
});
......@@ -10,50 +10,51 @@ describe('Job log controllers', () => {
vm.$destroy();
});
describe('Truncate information', () => {
const props = {
rawPath: '/raw',
erasePath: '/erase',
size: 511952,
isScrollTopDisabled: false,
isScrollBottomDisabled: false,
isScrollingDown: true,
isTraceSizeVisible: true,
};
beforeEach(() => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
describe('Truncate information', () => {
describe('with isTraceSizeVisible', () => {
beforeEach(() => {
vm = mountComponent(Component, props);
});
it('renders size information', () => {
expect(vm.$el.querySelector('.js-truncated-info').textContent).toContain('499.95 KiB');
});
});
it('renders size information', () => {
expect(vm.$el.querySelector('.js-truncated-info').textContent).toContain('499.95 KiB');
});
it('renders link to raw trace', () => {
expect(vm.$el.querySelector('.js-raw-link').getAttribute('href')).toEqual('/raw');
it('renders link to raw trace', () => {
expect(vm.$el.querySelector('.js-raw-link').getAttribute('href')).toEqual('/raw');
});
});
});
describe('links section', () => {
describe('with raw trace path', () => {
it('renders raw trace link', () => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
vm = mountComponent(Component, props);
expect(vm.$el.querySelector('.js-raw-link-controller').getAttribute('href')).toEqual('/raw');
expect(vm.$el.querySelector('.js-raw-link-controller').getAttribute('href')).toEqual(
'/raw',
);
});
});
describe('without raw trace path', () => {
it('does not render raw trace link', () => {
vm = mountComponent(Component, {
canEraseJob: true,
erasePath: '/erase',
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
isScrollTopDisabled: true,
isScrollBottomDisabled: true,
isScrollingDown: false,
isTraceSizeVisible: true,
});
expect(vm.$el.querySelector('.js-raw-link-controller')).toBeNull();
......@@ -62,52 +63,23 @@ describe('Job log controllers', () => {
describe('when is erasable', () => {
beforeEach(() => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
vm = mountComponent(Component, props);
});
it('renders erase job button', () => {
it('renders erase job link', () => {
expect(vm.$el.querySelector('.js-erase-link')).not.toBeNull();
});
describe('on click', () => {
describe('when user confirms action', () => {
it('emits eraseJob event', () => {
spyOn(window, 'confirm').and.returnValue(true);
spyOn(vm, '$emit');
vm.$el.querySelector('.js-erase-link').click();
expect(vm.$emit).toHaveBeenCalledWith('eraseJob');
});
});
describe('when user does not confirm action', () => {
it('does not emit eraseJob event', () => {
spyOn(window, 'confirm').and.returnValue(false);
spyOn(vm, '$emit');
vm.$el.querySelector('.js-erase-link').click();
expect(vm.$emit).not.toHaveBeenCalledWith('eraseJob');
});
});
});
});
describe('when it is not erasable', () => {
it('does not render erase button', () => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: false,
rawPath: '/raw',
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
isScrollTopDisabled: true,
isScrollBottomDisabled: true,
isScrollingDown: false,
isTraceSizeVisible: true,
});
expect(vm.$el.querySelector('.js-erase-link')).toBeNull();
......@@ -119,13 +91,7 @@ describe('Job log controllers', () => {
describe('scroll top button', () => {
describe('when user can scroll top', () => {
beforeEach(() => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
vm = mountComponent(Component, props);
});
it('renders enabled scroll top button', () => {
......@@ -143,16 +109,20 @@ describe('Job log controllers', () => {
describe('when user can not scroll top', () => {
beforeEach(() => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: true,
rawPath: '/raw',
erasePath: '/erase',
size: 511952,
canScrollToTop: false,
canScrollToBottom: true,
isScrollTopDisabled: true,
isScrollBottomDisabled: false,
isScrollingDown: false,
isTraceSizeVisible: true,
});
});
it('renders disabled scroll top button', () => {
expect(vm.$el.querySelector('.js-scroll-top').getAttribute('disabled')).toEqual('disabled');
expect(vm.$el.querySelector('.js-scroll-top').getAttribute('disabled')).toEqual(
'disabled',
);
});
it('does not emit scrollJobLogTop event on click', () => {
......@@ -167,13 +137,7 @@ describe('Job log controllers', () => {
describe('scroll bottom button', () => {
describe('when user can scroll bottom', () => {
beforeEach(() => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: true,
size: 511952,
canScrollToTop: true,
canScrollToBottom: true,
});
vm = mountComponent(Component, props);
});
it('renders enabled scroll bottom button', () => {
......@@ -191,17 +155,20 @@ describe('Job log controllers', () => {
describe('when user can not scroll bottom', () => {
beforeEach(() => {
vm = mountComponent(Component, {
rawTracePath: '/raw',
canEraseJob: true,
rawPath: '/raw',
erasePath: '/erase',
size: 511952,
canScrollToTop: true,
canScrollToBottom: false,
isScrollTopDisabled: false,
isScrollBottomDisabled: true,
isScrollingDown: false,
isTraceSizeVisible: true,
});
});
it('renders disabled scroll bottom button', () => {
expect(vm.$el.querySelector('.js-scroll-bottom').getAttribute('disabled')).toEqual('disabled');
expect(vm.$el.querySelector('.js-scroll-bottom').getAttribute('disabled')).toEqual(
'disabled',
);
});
it('does not emit scrollJobLogBottom event on click', () => {
......@@ -211,7 +178,29 @@ describe('Job log controllers', () => {
expect(vm.$emit).not.toHaveBeenCalledWith('scrollJobLogBottom');
});
});
describe('while isScrollingDown is true', () => {
it('renders animate class for the scroll down button', () => {
vm = mountComponent(Component, props);
expect(vm.$el.querySelector('.js-scroll-bottom').className).toContain('animate');
});
});
describe('while isScrollingDown is false', () => {
it('does not render animate class for the scroll down button', () => {
vm = mountComponent(Component, {
rawPath: '/raw',
erasePath: '/erase',
size: 511952,
isScrollTopDisabled: true,
isScrollBottomDisabled: false,
isScrollingDown: false,
isTraceSizeVisible: true,
});
expect(vm.$el.querySelector('.js-scroll-bottom').className).not.toContain('animate');
});
});
});
});
});
......@@ -15,7 +15,7 @@ describe('Job Log', () => {
it('renders provided trace', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
isComplete: true,
});
expect(vm.$el.querySelector('code').textContent).toContain('Running with gitlab-runner 11.1.0 (081978aa)');
......@@ -25,7 +25,7 @@ describe('Job Log', () => {
it('renders animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
isComplete: true,
});
expect(vm.$el.querySelector('.js-log-animation')).not.toBeNull();
......@@ -36,7 +36,7 @@ describe('Job Log', () => {
it('does not render animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: false,
isComplete: false,
});
expect(vm.$el.querySelector('.js-log-animation')).toBeNull();
......
import JobStore from '~/jobs/stores/job_store';
import job from '../mock_data';
describe('Job Store', () => {
let store;
beforeEach(() => {
store = new JobStore();
});
it('should set defaults', () => {
expect(store.state.job).toEqual({});
});
describe('storeJob', () => {
it('should store empty object if none is provided', () => {
store.storeJob();
expect(store.state.job).toEqual({});
});
it('should store provided argument', () => {
store.storeJob(job);
expect(store.state.job).toEqual(job);
});
});
});
......@@ -12,6 +12,13 @@ describe('Jobs Store Mutations', () => {
stateCopy = state();
});
describe('SET_JOB_ENDPOINT', () => {
it('should set jobEndpoint', () => {
mutations[types.SET_JOB_ENDPOINT](stateCopy, 'job/21312321.json');
expect(stateCopy.jobEndpoint).toEqual('job/21312321.json');
});
});
describe('REQUEST_STATUS_FAVICON', () => {
it('should set fetchingStatusFavicon to true', () => {
mutations[types.REQUEST_STATUS_FAVICON](stateCopy);
......
require 'spec_helper'
describe EventFilter do
describe 'FILTERS' do
it 'returns a definite list of filters' do
expect(described_class::FILTERS).to eq(%w[all push merged issue comments team])
end
end
describe '#filter' do
it 'returns "all" if given filter is nil' do
expect(described_class.new(nil).filter).to eq(described_class::ALL)
end
it 'returns "all" if given filter is ""' do
expect(described_class.new('').filter).to eq(described_class::ALL)
end
it 'returns "all" if given filter is "foo"' do
expect(described_class.new('foo').filter).to eq('all')
end
end
describe '#apply_filter' do
let(:source_user) { create(:user) }
let!(:public_project) { create(:project, :public) }
set(:public_project) { create(:project, :public) }
set(:push_event) { create(:push_event, project: public_project) }
set(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
set(:created_event) { create(:event, :created, project: public_project, target: public_project) }
set(:updated_event) { create(:event, :updated, project: public_project, target: public_project) }
set(:closed_event) { create(:event, :closed, project: public_project, target: public_project) }
set(:reopened_event) { create(:event, :reopened, project: public_project, target: public_project) }
set(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
set(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
set(:left_event) { create(:event, :left, project: public_project, target: public_project) }
let!(:push_event) { create(:push_event, project: public_project, author: source_user) }
let!(:merged_event) { create(:event, :merged, project: public_project, target: public_project, author: source_user) }
let!(:created_event) { create(:event, :created, project: public_project, target: public_project, author: source_user) }
let!(:updated_event) { create(:event, :updated, project: public_project, target: public_project, author: source_user) }
let!(:closed_event) { create(:event, :closed, project: public_project, target: public_project, author: source_user) }
let!(:reopened_event) { create(:event, :reopened, project: public_project, target: public_project, author: source_user) }
let!(:comments_event) { create(:event, :commented, project: public_project, target: public_project, author: source_user) }
let!(:joined_event) { create(:event, :joined, project: public_project, target: public_project, author: source_user) }
let!(:left_event) { create(:event, :left, project: public_project, target: public_project, author: source_user) }
let(:filtered_events) { described_class.new(filter).apply_filter(Event.all) }
it 'applies push filter' do
events = described_class.new(described_class.push).apply_filter(Event.all)
expect(events).to contain_exactly(push_event)
context 'with the "push" filter' do
let(:filter) { described_class::PUSH }
it 'filters push events only' do
expect(filtered_events).to contain_exactly(push_event)
end
end
it 'applies merged filter' do
events = described_class.new(described_class.merged).apply_filter(Event.all)
expect(events).to contain_exactly(merged_event)
context 'with the "merged" filter' do
let(:filter) { described_class::MERGED }
it 'filters merged events only' do
expect(filtered_events).to contain_exactly(merged_event)
end
end
it 'applies issue filter' do
events = described_class.new(described_class.issue).apply_filter(Event.all)
expect(events).to contain_exactly(created_event, updated_event, closed_event, reopened_event)
context 'with the "issue" filter' do
let(:filter) { described_class::ISSUE }
it 'filters issue events only' do
expect(filtered_events).to contain_exactly(created_event, updated_event, closed_event, reopened_event)
end
end
it 'applies comments filter' do
events = described_class.new(described_class.comments).apply_filter(Event.all)
expect(events).to contain_exactly(comments_event)
context 'with the "comments" filter' do
let(:filter) { described_class::COMMENTS }
it 'filters comment events only' do
expect(filtered_events).to contain_exactly(comments_event)
end
end
it 'applies team filter' do
events = described_class.new(described_class.team).apply_filter(Event.all)
expect(events).to contain_exactly(joined_event, left_event)
context 'with the "team" filter' do
let(:filter) { described_class::TEAM }
it 'filters team events only' do
expect(filtered_events).to contain_exactly(joined_event, left_event)
end
end
it 'applies all filter' do
events = described_class.new(described_class.all).apply_filter(Event.all)
expect(events).to contain_exactly(push_event, merged_event, created_event, updated_event, closed_event, reopened_event, comments_event, joined_event, left_event)
context 'with the "all" filter' do
let(:filter) { described_class::ALL }
it 'returns all events' do
expect(filtered_events).to eq(Event.all)
end
end
context 'with an unknown filter' do
let(:filter) { 'foo' }
it 'returns all events' do
expect(filtered_events).to eq(Event.all)
end
end
context 'with a nil filter' do
let(:filter) { nil }
it 'returns all events' do
expect(filtered_events).to eq(Event.all)
end
end
end
describe '#active?' do
let(:event_filter) { described_class.new(described_class::TEAM) }
it 'returns false if filter does not include the given key' do
expect(event_filter.active?('foo')).to eq(false)
end
it 'applies no filter' do
events = described_class.new(nil).apply_filter(Event.all)
expect(events).to contain_exactly(push_event, merged_event, created_event, updated_event, closed_event, reopened_event, comments_event, joined_event, left_event)
it 'returns false if the given key is nil' do
expect(event_filter.active?(nil)).to eq(false)
end
it 'applies unknown filter' do
events = described_class.new('').apply_filter(Event.all)
expect(events).to contain_exactly(push_event, merged_event, created_event, updated_event, closed_event, reopened_event, comments_event, joined_event, left_event)
it 'returns true if filter does not include the given key' do
expect(event_filter.active?(described_class::TEAM)).to eq(true)
end
end
end
......@@ -3,27 +3,53 @@ require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Reports do
let(:entry) { described_class.new(config) }
describe 'validates ALLOWED_KEYS' do
let(:artifact_file_types) { Ci::JobArtifact.file_types }
described_class::ALLOWED_KEYS.each do |keyword, _|
it "expects #{keyword} to be an artifact file_type" do
expect(artifact_file_types).to include(keyword)
end
end
end
describe 'validation' do
context 'when entry config value is correct' do
let(:config) { { junit: %w[junit.xml] } }
using RSpec::Parameterized::TableSyntax
describe '#value' do
it 'returns artifacs configuration' do
expect(entry.value).to eq config
shared_examples 'a valid entry' do |keyword, file|
describe '#value' do
it 'returns artifacs configuration' do
expect(entry.value).to eq({ "#{keyword}": [file] } )
end
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when value is not array' do
let(:config) { { junit: 'junit.xml' } }
where(:keyword, :file) do
:junit | 'junit.xml'
:sast | 'gl-sast-report.json'
:dependency_scanning | 'gl-dependency-scanning-report.json'
:container_scanning | 'gl-container-scanning-report.json'
:dast | 'gl-dast-report.json'
end
with_them do
context 'when value is an array' do
let(:config) { { "#{keyword}": [file] } }
it 'converts to array' do
expect(entry.value).to eq({ junit: ['junit.xml'] } )
it_behaves_like 'a valid entry', params[:keyword], params[:file]
end
context 'when value is not array' do
let(:config) { { "#{keyword}": file } }
it_behaves_like 'a valid entry', params[:keyword], params[:file]
end
end
end
......@@ -31,11 +57,13 @@ describe Gitlab::Ci::Config::Entry::Reports do
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of attribute is invalid' do
let(:config) { { junit: 10 } }
where(key: described_class::ALLOWED_KEYS) do
let(:config) { { "#{key}": 10 } }
it 'reports error' do
expect(entry.errors)
.to include 'reports junit should be an array of strings or a string'
it 'reports error' do
expect(entry.errors)
.to include "reports #{key} should be an array of strings or a string"
end
end
end
......
require 'fast_spec_helper'
describe Gitlab::Ci::Parsers::Junit do
describe Gitlab::Ci::Parsers::Test::Junit do
describe '#parse!' do
subject { described_class.new.parse!(junit, test_suite) }
......
require 'spec_helper'
describe Gitlab::Ci::Parsers do
describe Gitlab::Ci::Parsers::Test do
describe '.fabricate!' do
subject { described_class.fabricate!(file_type) }
......@@ -16,7 +16,7 @@ describe Gitlab::Ci::Parsers do
let(:file_type) { 'undefined' }
it 'raises an error' do
expect { subject }.to raise_error(NameError)
expect { subject }.to raise_error(Gitlab::Ci::Parsers::Test::ParserNotFoundError)
end
end
end
......
......@@ -177,9 +177,7 @@ describe Ci::Build do
it 'does not execute a query for selecting job artifact one by one' do
recorded = ActiveRecord::QueryRecorder.new do
subject.each do |build|
Ci::JobArtifact::TEST_REPORT_FILE_TYPES.each do |file_type|
build.public_send("job_artifacts_#{file_type}").file.exists?
end
build.job_artifacts.map { |a| a.file.exists? }
end
end
......@@ -551,44 +549,22 @@ describe Ci::Build do
end
end
describe '#has_test_reports?' do
subject { build.has_test_reports? }
describe '#has_job_artifacts?' do
subject { build.has_job_artifacts? }
context 'when build has a test report' do
let(:build) { create(:ci_build, :test_reports) }
context 'when build has a job artifact' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end
context 'when build does not have test reports' do
let(:build) { create(:ci_build, :artifacts) }
context 'when build does not have job artifacts' do
let(:build) { create(:ci_build, :legacy_artifacts) }
it { is_expected.to be_falsy }
end
end
describe '#erase_test_reports!' do
subject { build.erase_test_reports! }
context 'when build has a test report' do
let!(:build) { create(:ci_build, :test_reports) }
it 'removes a test report' do
subject
expect(build.has_test_reports?).to be_falsy
end
end
context 'when build does not have test reports' do
let!(:build) { create(:ci_build, :artifacts) }
it 'does not erase anything' do
expect { subject }.not_to change { Ci::JobArtifact.count }
end
end
end
describe '#has_old_trace?' do
subject { build.has_old_trace? }
......@@ -851,8 +827,8 @@ describe Ci::Build do
expect(build.artifacts_metadata.exists?).to be_falsy
end
it 'removes test reports' do
expect(build.job_artifacts.test_reports.count).to eq(0)
it 'removes all job_artifacts' do
expect(build.job_artifacts.count).to eq(0)
end
it 'erases build trace in trace file' do
......@@ -1023,6 +999,32 @@ describe Ci::Build do
end
end
describe '#erase_erasable_artifacts!' do
let!(:build) { create(:ci_build, :success) }
subject { build.erase_erasable_artifacts! }
before do
Ci::JobArtifact.file_types.keys.each do |file_type|
create(:ci_job_artifact, job: build, file_type: file_type, file_format: Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[file_type.to_sym])
end
end
it "erases erasable artifacts" do
subject
expect(build.job_artifacts.erasable).to be_empty
end
it "keeps non erasable artifacts" do
subject
Ci::JobArtifact::NON_ERASABLE_FILE_TYPES.each do |file_type|
expect(build.send("job_artifacts_#{file_type}")).not_to be_nil
end
end
end
describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
......@@ -2846,16 +2848,10 @@ describe Ci::Build do
end
it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Ci::Parsers::Junit::JunitParserError)
expect { subject }.to raise_error(Gitlab::Ci::Parsers::Test::Junit::JunitParserError)
end
end
end
context 'when build does not have test reports' do
it 'raises an error' do
expect { subject }.to raise_error(NoMethodError)
end
end
end
describe '#artifacts_metadata_entry' do
......
......@@ -31,6 +31,22 @@ describe Ci::JobArtifact do
end
end
describe '.erasable' do
subject { described_class.erasable }
context 'when there is am erasable artifact' do
let!(:artifact) { create(:ci_job_artifact, :junit) }
it { is_expected.to eq([artifact]) }
end
context 'when there are no erasable artifacts' do
let!(:artifact) { create(:ci_job_artifact, :trace) }
it { is_expected.to be_empty }
end
end
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
......@@ -106,34 +122,46 @@ describe Ci::JobArtifact do
describe 'validates file format' do
subject { artifact }
context 'when archive type with zip format' do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: :zip) }
described_class::TYPE_AND_FORMAT_PAIRS.except(:trace).each do |file_type, file_format|
context "when #{file_type} type with #{file_format} format" do
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: file_format) }
it { is_expected.to be_valid }
end
it { is_expected.to be_valid }
end
context 'when archive type with gzip format' do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: :gzip) }
context "when #{file_type} type without format specification" do
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: nil) }
it { is_expected.not_to be_valid }
end
it { is_expected.not_to be_valid }
end
context 'when archive type without format specification' do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: nil) }
context "when #{file_type} type with other formats" do
described_class.file_formats.except(file_format).values.each do |other_format|
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: other_format) }
it { is_expected.not_to be_valid }
it { is_expected.not_to be_valid }
end
end
end
end
context 'when junit type with zip format' do
let(:artifact) { build(:ci_job_artifact, :junit, file_format: :zip) }
describe 'validates DEFAULT_FILE_NAMES' do
subject { described_class::DEFAULT_FILE_NAMES }
it { is_expected.not_to be_valid }
described_class.file_types.each do |file_type, _|
it "expects #{file_type} to be included" do
is_expected.to include(file_type.to_sym)
end
end
end
context 'when junit type with gzip format' do
let(:artifact) { build(:ci_job_artifact, :junit, file_format: :gzip) }
describe 'validates TYPE_AND_FORMAT_PAIRS' do
subject { described_class::TYPE_AND_FORMAT_PAIRS }
it { is_expected.to be_valid }
described_class.file_types.each do |file_type, _|
it "expects #{file_type} to be included" do
expect(described_class.file_formats).to include(subject[file_type.to_sym])
end
end
end
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment