Commit 45a4bc30 authored by GitLab Release Tools Bot's avatar GitLab Release Tools Bot

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce

parents c7e6c8ef 4891e8a5
...@@ -8,6 +8,7 @@ require: ...@@ -8,6 +8,7 @@ require:
- rubocop-rspec - rubocop-rspec
AllCops: AllCops:
TargetRubyVersion: 2.5
TargetRailsVersion: 5.0 TargetRailsVersion: 5.0
Exclude: Exclude:
- 'vendor/**/*' - 'vendor/**/*'
...@@ -184,3 +185,8 @@ Cop/InjectEnterpriseEditionModule: ...@@ -184,3 +185,8 @@ Cop/InjectEnterpriseEditionModule:
Style/ReturnNil: Style/ReturnNil:
Enabled: true Enabled: true
# It isn't always safe to replace `=~` with `.match?`, especially when there are
# nil values on the left hand side
Performance/RegexpMatch:
Enabled: false
<script> <script>
import folderMixin from 'ee_else_ce/environments/mixins/environments_folder_view_mixin';
import environmentsMixin from '../mixins/environments_mixin'; import environmentsMixin from '../mixins/environments_mixin';
import CIPaginationMixin from '../../vue_shared/mixins/ci_pagination_api_mixin'; import CIPaginationMixin from '../../vue_shared/mixins/ci_pagination_api_mixin';
import StopEnvironmentModal from '../components/stop_environment_modal.vue'; import StopEnvironmentModal from '../components/stop_environment_modal.vue';
...@@ -8,7 +9,7 @@ export default { ...@@ -8,7 +9,7 @@ export default {
StopEnvironmentModal, StopEnvironmentModal,
}, },
mixins: [environmentsMixin, CIPaginationMixin], mixins: [environmentsMixin, CIPaginationMixin, folderMixin],
props: { props: {
endpoint: { endpoint: {
...@@ -41,7 +42,8 @@ export default { ...@@ -41,7 +42,8 @@ export default {
<div v-if="!isLoading" class="top-area"> <div v-if="!isLoading" class="top-area">
<h4 class="js-folder-name environments-folder-name"> <h4 class="js-folder-name environments-folder-name">
{{ s__('Environments|Environments') }} / <b>{{ folderName }}</b> {{ s__('Environments|Environments') }} /
<b>{{ folderName }}</b>
</h4> </h4>
<tabs :tabs="tabs" scope="environments" @onChangeTab="onChangeTab" /> <tabs :tabs="tabs" scope="environments" @onChangeTab="onChangeTab" />
...@@ -52,6 +54,11 @@ export default { ...@@ -52,6 +54,11 @@ export default {
:environments="state.environments" :environments="state.environments"
:pagination="state.paginationInformation" :pagination="state.paginationInformation"
:can-read-environment="canReadEnvironment" :can-read-environment="canReadEnvironment"
:canary-deployment-feature-id="canaryDeploymentFeatureId"
:show-canary-deployment-callout="showCanaryDeploymentCallout"
:user-callouts-path="userCalloutsPath"
:lock-promotion-svg-path="lockPromotionSvgPath"
:help-canary-deployments-path="helpCanaryDeploymentsPath"
@onChangePage="onChangePage" @onChangePage="onChangePage"
/> />
</div> </div>
......
import Vue from 'vue'; import Vue from 'vue';
import canaryCalloutMixin from 'ee_else_ce/environments/mixins/canary_callout_mixin';
import environmentsComponent from './components/environments_app.vue'; import environmentsComponent from './components/environments_app.vue';
import { parseBoolean } from '../lib/utils/common_utils'; import { parseBoolean } from '../lib/utils/common_utils';
import Translate from '../vue_shared/translate'; import Translate from '../vue_shared/translate';
...@@ -11,6 +12,7 @@ export default () => ...@@ -11,6 +12,7 @@ export default () =>
components: { components: {
environmentsComponent, environmentsComponent,
}, },
mixins: [canaryCalloutMixin],
data() { data() {
const environmentsData = document.querySelector(this.$options.el).dataset; const environmentsData = document.querySelector(this.$options.el).dataset;
...@@ -32,6 +34,7 @@ export default () => ...@@ -32,6 +34,7 @@ export default () =>
cssContainerClass: this.cssContainerClass, cssContainerClass: this.cssContainerClass,
canCreateEnvironment: this.canCreateEnvironment, canCreateEnvironment: this.canCreateEnvironment,
canReadEnvironment: this.canReadEnvironment, canReadEnvironment: this.canReadEnvironment,
...this.canaryCalloutProps,
}, },
}); });
}, },
......
export default {
computed: {
canaryCalloutProps() {},
},
};
export default {
props: {
canaryDeploymentFeatureId: {
type: String,
required: false,
default: '',
},
showCanaryDeploymentCallout: {
type: Boolean,
required: false,
default: false,
},
userCalloutsPath: {
type: String,
required: false,
default: '',
},
lockPromotionSvgPath: {
type: String,
required: false,
default: '',
},
helpCanaryDeploymentsPath: {
type: String,
required: false,
default: '',
},
},
};
...@@ -3,13 +3,13 @@ ...@@ -3,13 +3,13 @@
*/ */
import _ from 'underscore'; import _ from 'underscore';
import Visibility from 'visibilityjs'; import Visibility from 'visibilityjs';
import EnvironmentsStore from 'ee_else_ce/environments/stores/environments_store';
import Poll from '../../lib/utils/poll'; import Poll from '../../lib/utils/poll';
import { getParameterByName } from '../../lib/utils/common_utils'; import { getParameterByName } from '../../lib/utils/common_utils';
import { s__ } from '../../locale'; import { s__ } from '../../locale';
import Flash from '../../flash'; import Flash from '../../flash';
import eventHub from '../event_hub'; import eventHub from '../event_hub';
import EnvironmentsStore from '../stores/environments_store';
import EnvironmentsService from '../services/environments_service'; import EnvironmentsService from '../services/environments_service';
import tablePagination from '../../vue_shared/components/table_pagination.vue'; import tablePagination from '../../vue_shared/components/table_pagination.vue';
import environmentTable from '../components/environments_table.vue'; import environmentTable from '../components/environments_table.vue';
......
import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils'; import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
import { setDeployBoard } from 'ee_else_ce/environments/stores/helpers';
/** /**
* Environments Store. * Environments Store.
* *
...@@ -31,6 +33,14 @@ export default class EnvironmentsStore { ...@@ -31,6 +33,14 @@ export default class EnvironmentsStore {
* If the `size` is bigger than 1, it means it should be rendered as a folder. * If the `size` is bigger than 1, it means it should be rendered as a folder.
* In those cases we add `isFolder` key in order to render it properly. * In those cases we add `isFolder` key in order to render it properly.
* *
* Top level environments - when the size is 1 - with `rollout_status`
* can render a deploy board. We add `isDeployBoardVisible` and `deployBoardData`
* keys to those environments.
* The first key will let's us know if we should or not render the deploy board.
* It will be toggled when the user clicks to seee the deploy board.
*
* The second key will allow us to update the environment with the received deploy board data.
*
* @param {Array} environments * @param {Array} environments
* @returns {Array} * @returns {Array}
*/ */
...@@ -63,6 +73,7 @@ export default class EnvironmentsStore { ...@@ -63,6 +73,7 @@ export default class EnvironmentsStore {
filtered = Object.assign(filtered, env); filtered = Object.assign(filtered, env);
} }
filtered = setDeployBoard(oldEnvironmentState, filtered);
return filtered; return filtered;
}); });
...@@ -71,6 +82,20 @@ export default class EnvironmentsStore { ...@@ -71,6 +82,20 @@ export default class EnvironmentsStore {
return filteredEnvironments; return filteredEnvironments;
} }
/**
* Stores the pagination information needed to render the pagination for the
* table.
*
* Normalizes the headers to uppercase since they can be provided either
* in uppercase or lowercase.
*
* Parses to an integer the normalized ones needed for the pagination component.
*
* Stores the normalized and parsed information.
*
* @param {Object} pagination = {}
* @return {Object}
*/
setPagination(pagination = {}) { setPagination(pagination = {}) {
const normalizedHeaders = normalizeHeaders(pagination); const normalizedHeaders = normalizeHeaders(pagination);
const paginationInformation = parseIntPagination(normalizedHeaders); const paginationInformation = parseIntPagination(normalizedHeaders);
......
/**
* Deploy boards are EE only.
*
* @param {Object} environment
* @returns {Object}
*/
// eslint-disable-next-line import/prefer-default-export
export const setDeployBoard = (oldEnvironmentState, environment) => environment;
import initGroupDetails from '../shared/group_details';
document.addEventListener('DOMContentLoaded', () => {
initGroupDetails('details');
});
/* eslint-disable no-new */
import { getPagePath } from '~/lib/utils/common_utils';
import { ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED } from '~/groups/constants';
import NewGroupChild from '~/groups/new_group_child';
import notificationsDropdown from '~/notifications_dropdown';
import NotificationsForm from '~/notifications_form';
import ProjectsList from '~/projects_list';
import ShortcutsNavigation from '~/behaviors/shortcuts/shortcuts_navigation';
import GroupTabs from './group_tabs';
export default function initGroupDetails(actionName = 'show') {
const newGroupChildWrapper = document.querySelector('.js-new-project-subgroup');
const loadableActions = [ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED];
const paths = window.location.pathname.split('/');
const subpath = paths[paths.length - 1];
let action = loadableActions.includes(subpath) ? subpath : getPagePath(1);
if (actionName && action === actionName) {
action = 'show'; // 'show' resets GroupTabs to default action through base class
}
new GroupTabs({ parentEl: '.groups-listing', action });
new ShortcutsNavigation();
new NotificationsForm();
notificationsDropdown();
new ProjectsList();
if (newGroupChildWrapper) {
new NewGroupChild(newGroupChildWrapper);
}
}
/* eslint-disable no-new */ import initGroupDetails from '../shared/group_details';
import { getPagePath } from '~/lib/utils/common_utils';
import { ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED } from '~/groups/constants';
import NewGroupChild from '~/groups/new_group_child';
import notificationsDropdown from '~/notifications_dropdown';
import NotificationsForm from '~/notifications_form';
import ProjectsList from '~/projects_list';
import ShortcutsNavigation from '~/behaviors/shortcuts/shortcuts_navigation';
import GroupTabs from './group_tabs';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
const newGroupChildWrapper = document.querySelector('.js-new-project-subgroup'); initGroupDetails();
const loadableActions = [ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED];
const paths = window.location.pathname.split('/');
const subpath = paths[paths.length - 1];
const action = loadableActions.includes(subpath) ? subpath : getPagePath(1);
new GroupTabs({ parentEl: '.groups-listing', action });
new ShortcutsNavigation();
new NotificationsForm();
notificationsDropdown();
new ProjectsList();
if (newGroupChildWrapper) {
new NewGroupChild(newGroupChildWrapper);
}
}); });
...@@ -91,6 +91,7 @@ export default class UserTabs { ...@@ -91,6 +91,7 @@ export default class UserTabs {
this.actions = Object.keys(this.loaded); this.actions = Object.keys(this.loaded);
this.bindEvents(); this.bindEvents();
// TODO: refactor to make this configurable via constructor params with a default value of 'show'
if (this.action === 'show') { if (this.action === 'show') {
this.action = this.defaultAction; this.action = this.defaultAction;
} }
......
<script> <script>
import _ from 'underscore'; import _ from 'underscore';
import stageColumnMixin from 'ee_else_ce/pipelines/mixins/stage_column_mixin';
import JobItem from './job_item.vue'; import JobItem from './job_item.vue';
import JobGroupDropdown from './job_group_dropdown.vue'; import JobGroupDropdown from './job_group_dropdown.vue';
...@@ -8,6 +9,7 @@ export default { ...@@ -8,6 +9,7 @@ export default {
JobItem, JobItem,
JobGroupDropdown, JobGroupDropdown,
}, },
mixins: [stageColumnMixin],
props: { props: {
title: { title: {
type: String, type: String,
...@@ -32,9 +34,6 @@ export default { ...@@ -32,9 +34,6 @@ export default {
groupId(group) { groupId(group) {
return `ci-badge-${_.escape(group.name)}`; return `ci-badge-${_.escape(group.name)}`;
}, },
buildConnnectorClass(index) {
return index === 0 && !this.isFirstColumn ? 'left-connector' : '';
},
pipelineActionRequestComplete() { pipelineActionRequestComplete() {
this.$emit('refreshPipelineGraph'); this.$emit('refreshPipelineGraph');
}, },
......
export default {
methods: {
clickTriggeredByPipeline() {},
clickTriggeredPipeline() {},
},
};
export default {
methods: {
buildConnnectorClass(index) {
return index === 0 && !this.isFirstColumn ? 'left-connector' : '';
},
},
};
...@@ -2,8 +2,9 @@ import Vue from 'vue'; ...@@ -2,8 +2,9 @@ import Vue from 'vue';
import Flash from '~/flash'; import Flash from '~/flash';
import Translate from '~/vue_shared/translate'; import Translate from '~/vue_shared/translate';
import { __ } from '~/locale'; import { __ } from '~/locale';
import pipelineGraph from 'ee_else_ce/pipelines/components/graph/graph_component.vue';
import GraphEEMixin from 'ee_else_ce/pipelines/mixins/graph_pipeline_bundle_mixin';
import PipelinesMediator from './pipeline_details_mediator'; import PipelinesMediator from './pipeline_details_mediator';
import pipelineGraph from './components/graph/graph_component.vue';
import pipelineHeader from './components/header_component.vue'; import pipelineHeader from './components/header_component.vue';
import eventHub from './event_hub'; import eventHub from './event_hub';
...@@ -22,6 +23,7 @@ export default () => { ...@@ -22,6 +23,7 @@ export default () => {
components: { components: {
pipelineGraph, pipelineGraph,
}, },
mixins: [GraphEEMixin],
data() { data() {
return { return {
mediator, mediator,
...@@ -44,6 +46,10 @@ export default () => { ...@@ -44,6 +46,10 @@ export default () => {
}, },
on: { on: {
refreshPipelineGraph: this.requestRefreshPipelineGraph, refreshPipelineGraph: this.requestRefreshPipelineGraph,
onClickTriggeredBy: (parentPipeline, pipeline) =>
this.clickTriggeredByPipeline(parentPipeline, pipeline),
onClickTriggered: (parentPipeline, pipeline) =>
this.clickTriggeredPipeline(parentPipeline, pipeline),
}, },
}); });
}, },
......
...@@ -376,18 +376,21 @@ img.emoji { ...@@ -376,18 +376,21 @@ img.emoji {
.prepend-top-default { margin-top: $gl-padding !important; } .prepend-top-default { margin-top: $gl-padding !important; }
.prepend-top-16 { margin-top: 16px; } .prepend-top-16 { margin-top: 16px; }
.prepend-top-20 { margin-top: 20px; } .prepend-top-20 { margin-top: 20px; }
.prepend-top-32 { margin-top: 32px; }
.prepend-left-4 { margin-left: 4px; } .prepend-left-4 { margin-left: 4px; }
.prepend-left-5 { margin-left: 5px; } .prepend-left-5 { margin-left: 5px; }
.prepend-left-8 { margin-left: 8px; } .prepend-left-8 { margin-left: 8px; }
.prepend-left-10 { margin-left: 10px; } .prepend-left-10 { margin-left: 10px; }
.prepend-left-default { margin-left: $gl-padding; } .prepend-left-default { margin-left: $gl-padding; }
.prepend-left-20 { margin-left: 20px; } .prepend-left-20 { margin-left: 20px; }
.prepend-left-32 { margin-left: 32px; }
.append-right-4 { margin-right: 4px; } .append-right-4 { margin-right: 4px; }
.append-right-5 { margin-right: 5px; } .append-right-5 { margin-right: 5px; }
.append-right-8 { margin-right: 8px; } .append-right-8 { margin-right: 8px; }
.append-right-10 { margin-right: 10px; } .append-right-10 { margin-right: 10px; }
.append-right-default { margin-right: $gl-padding; } .append-right-default { margin-right: $gl-padding; }
.append-right-20 { margin-right: 20px; } .append-right-20 { margin-right: 20px; }
.prepend-right-32 { margin-right: 32px; }
.append-bottom-0 { margin-bottom: 0; } .append-bottom-0 { margin-bottom: 0; }
.append-bottom-4 { margin-bottom: $gl-padding-4; } .append-bottom-4 { margin-bottom: $gl-padding-4; }
.append-bottom-5 { margin-bottom: 5px; } .append-bottom-5 { margin-bottom: 5px; }
...@@ -396,6 +399,7 @@ img.emoji { ...@@ -396,6 +399,7 @@ img.emoji {
.append-bottom-15 { margin-bottom: 15px; } .append-bottom-15 { margin-bottom: 15px; }
.append-bottom-20 { margin-bottom: 20px; } .append-bottom-20 { margin-bottom: 20px; }
.append-bottom-default { margin-bottom: $gl-padding; } .append-bottom-default { margin-bottom: $gl-padding; }
.prepend-bottom-32 { margin-bottom: 32px; }
.inline { display: inline-block; } .inline { display: inline-block; }
.center { text-align: center; } .center { text-align: center; }
.vertical-align-middle { vertical-align: middle; } .vertical-align-middle { vertical-align: middle; }
......
...@@ -693,10 +693,6 @@ ...@@ -693,10 +693,6 @@
} }
} }
.project-empty-note-panel {
border-bottom: 1px solid $border-color;
}
.project-stats, .project-stats,
.project-buttons { .project-buttons {
.scrolling-tabs-container { .scrolling-tabs-container {
......
...@@ -58,11 +58,24 @@ class GroupsController < Groups::ApplicationController ...@@ -58,11 +58,24 @@ class GroupsController < Groups::ApplicationController
def show def show
respond_to do |format| respond_to do |format|
format.html format.html do
render_show_html
end
format.atom do format.atom do
load_events render_details_view_atom
render layout: 'xml.atom' end
end
end
def details
respond_to do |format|
format.html do
render_details_html
end
format.atom do
render_details_view_atom
end end
end end
end end
...@@ -119,6 +132,19 @@ class GroupsController < Groups::ApplicationController ...@@ -119,6 +132,19 @@ class GroupsController < Groups::ApplicationController
protected protected
def render_show_html
render 'groups/show'
end
def render_details_html
render 'groups/show'
end
def render_details_view_atom
load_events
render layout: 'xml.atom', template: 'groups/show'
end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def authorize_create_group! def authorize_create_group!
allowed = if params[:parent_id].present? allowed = if params[:parent_id].present?
......
...@@ -4,6 +4,7 @@ module GroupsHelper ...@@ -4,6 +4,7 @@ module GroupsHelper
def group_overview_nav_link_paths def group_overview_nav_link_paths
%w[ %w[
groups#show groups#show
groups#details
groups#activity groups#activity
groups#subgroups groups#subgroups
analytics#show analytics#show
......
...@@ -286,7 +286,6 @@ class MergeRequestDiff < ActiveRecord::Base ...@@ -286,7 +286,6 @@ class MergeRequestDiff < ActiveRecord::Base
return yield(@external_diff_file) if @external_diff_file return yield(@external_diff_file) if @external_diff_file
external_diff.open do |file| external_diff.open do |file|
begin
@external_diff_file = file @external_diff_file = file
yield(@external_diff_file) yield(@external_diff_file)
...@@ -294,7 +293,6 @@ class MergeRequestDiff < ActiveRecord::Base ...@@ -294,7 +293,6 @@ class MergeRequestDiff < ActiveRecord::Base
@external_diff_file = nil @external_diff_file = nil
end end
end end
end
private private
......
...@@ -23,6 +23,6 @@ class MergeRequestDiffFile < ActiveRecord::Base ...@@ -23,6 +23,6 @@ class MergeRequestDiffFile < ActiveRecord::Base
super super
end end
binary? ? content.unpack('m0').first : content binary? ? content.unpack1('m0') : content
end end
end end
...@@ -1209,13 +1209,11 @@ class Project < ActiveRecord::Base ...@@ -1209,13 +1209,11 @@ class Project < ActiveRecord::Base
def repo_exists? def repo_exists?
strong_memoize(:repo_exists) do strong_memoize(:repo_exists) do
begin
repository.exists? repository.exists?
rescue rescue
false false
end end
end end
end
def root_ref?(branch) def root_ref?(branch)
repository.root_ref == branch repository.root_ref == branch
......
...@@ -205,14 +205,12 @@ class JiraService < IssueTrackerService ...@@ -205,14 +205,12 @@ class JiraService < IssueTrackerService
# if any transition fails it will log the error message and stop the transition sequence # if any transition fails it will log the error message and stop the transition sequence
def transition_issue(issue) def transition_issue(issue)
jira_issue_transition_id.scan(Gitlab::Regex.jira_transition_id_regex).each do |transition_id| jira_issue_transition_id.scan(Gitlab::Regex.jira_transition_id_regex).each do |transition_id|
begin
issue.transitions.build.save!(transition: { id: transition_id }) issue.transitions.build.save!(transition: { id: transition_id })
rescue => error rescue => error
log_error("Issue transition failed", error: error.message, client_url: client_url) log_error("Issue transition failed", error: error.message, client_url: client_url)
return false return false
end end
end end
end
def add_issue_solved_comment(issue, commit_id, commit_url) def add_issue_solved_comment(issue, commit_id, commit_url)
link_title = "Solved by commit #{commit_id}." link_title = "Solved by commit #{commit_id}."
......
...@@ -265,7 +265,6 @@ class Repository ...@@ -265,7 +265,6 @@ class Repository
# to avoid unnecessary syncing. # to avoid unnecessary syncing.
def keep_around(*shas) def keep_around(*shas)
shas.each do |sha| shas.each do |sha|
begin
next unless sha.present? && commit_by(oid: sha) next unless sha.present? && commit_by(oid: sha)
next if kept_around?(sha) next if kept_around?(sha)
...@@ -276,7 +275,6 @@ class Repository ...@@ -276,7 +275,6 @@ class Repository
Rails.logger.error "Unable to create keep-around reference for repository #{disk_path}: #{ex}" Rails.logger.error "Unable to create keep-around reference for repository #{disk_path}: #{ex}"
end end
end end
end
def kept_around?(sha) def kept_around?(sha)
ref_exists?(keep_around_ref_name(sha)) ref_exists?(keep_around_ref_name(sha))
......
...@@ -26,7 +26,6 @@ class UserInteractedProject < ActiveRecord::Base ...@@ -26,7 +26,6 @@ class UserInteractedProject < ActiveRecord::Base
cached_exists?(attributes) do cached_exists?(attributes) do
transaction(requires_new: true) do transaction(requires_new: true) do
begin
where(attributes).select(1).first || create!(attributes) where(attributes).select(1).first || create!(attributes)
true # not caching the whole record here for now true # not caching the whole record here for now
rescue ActiveRecord::RecordNotUnique rescue ActiveRecord::RecordNotUnique
...@@ -38,7 +37,6 @@ class UserInteractedProject < ActiveRecord::Base ...@@ -38,7 +37,6 @@ class UserInteractedProject < ActiveRecord::Base
end end
end end
end end
end
# Check if we can safely call .track (table exists) # Check if we can safely call .track (table exists)
def available? def available?
......
...@@ -42,11 +42,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated ...@@ -42,11 +42,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
def empty_repo_statistics_anchors def empty_repo_statistics_anchors
[ [
license_anchor_data, license_anchor_data
commits_anchor_data,
branches_anchor_data,
tags_anchor_data,
files_anchor_data
].compact.select { |item| item.is_link } ].compact.select { |item| item.is_link }
end end
...@@ -55,9 +51,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated ...@@ -55,9 +51,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
new_file_anchor_data, new_file_anchor_data,
readme_anchor_data, readme_anchor_data,
changelog_anchor_data, changelog_anchor_data,
contribution_guide_anchor_data, contribution_guide_anchor_data
autodevops_anchor_data,
kubernetes_cluster_anchor_data
].compact.reject { |item| item.is_link } ].compact.reject { |item| item.is_link }
end end
......
...@@ -9,7 +9,6 @@ class DetailedStatusEntity < Grape::Entity ...@@ -9,7 +9,6 @@ class DetailedStatusEntity < Grape::Entity
expose :details_path expose :details_path
expose :illustration do |status| expose :illustration do |status|
begin
illustration = { illustration = {
image: ActionController::Base.helpers.image_path(status.illustration[:image]) image: ActionController::Base.helpers.image_path(status.illustration[:image])
} }
...@@ -19,7 +18,6 @@ class DetailedStatusEntity < Grape::Entity ...@@ -19,7 +18,6 @@ class DetailedStatusEntity < Grape::Entity
rescue NotImplementedError rescue NotImplementedError
# ignored # ignored
end end
end
expose :favicon do |status| expose :favicon do |status|
Gitlab::Favicon.status_overlay(status.favicon) Gitlab::Favicon.status_overlay(status.favicon)
......
...@@ -42,7 +42,6 @@ module Projects ...@@ -42,7 +42,6 @@ module Projects
def parse_response_links(objects_response) def parse_response_links(objects_response)
objects_response.each_with_object([]) do |entry, link_list| objects_response.each_with_object([]) do |entry, link_list|
begin
link = entry.dig('actions', DOWNLOAD_ACTION, 'href') link = entry.dig('actions', DOWNLOAD_ACTION, 'href')
raise DownloadLinkNotFound unless link raise DownloadLinkNotFound unless link
...@@ -54,7 +53,6 @@ module Projects ...@@ -54,7 +53,6 @@ module Projects
log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.") log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.")
end end
end end
end
def request_body(oids) def request_body(oids)
{ {
......
...@@ -75,7 +75,6 @@ module Projects ...@@ -75,7 +75,6 @@ module Projects
create_tmp_storage_dir create_tmp_storage_dir
File.open(tmp_filename, 'wb') do |file| File.open(tmp_filename, 'wb') do |file|
begin
yield file yield file
rescue StandardError => e rescue StandardError => e
# If the lfs file is successfully downloaded it will be removed # If the lfs file is successfully downloaded it will be removed
...@@ -87,7 +86,6 @@ module Projects ...@@ -87,7 +86,6 @@ module Projects
raise e raise e
end end
end end
end
def tmp_filename def tmp_filename
File.join(tmp_storage_dir, lfs_oid) File.join(tmp_storage_dir, lfs_oid)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
class ShaValidator < ActiveModel::EachValidator class ShaValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value) def validate_each(record, attribute, value)
return if value.blank? || value.match(/\A\h{40}\z/) return if value.blank? || Commit.valid_hash?(value)
record.errors.add(attribute, 'is not a valid SHA') record.errors.add(attribute, 'is not a valid SHA')
end end
......
...@@ -38,4 +38,4 @@ ...@@ -38,4 +38,4 @@
%li= link_to _('New project'), new_project_path, class: 'qa-global-new-project-link' %li= link_to _('New project'), new_project_path, class: 'qa-global-new-project-link'
- if current_user.can_create_group? - if current_user.can_create_group?
%li= link_to _('New group'), new_group_path %li= link_to _('New group'), new_group_path
%li= link_to _('New snippet'), new_snippet_path %li= link_to _('New snippet'), new_snippet_path, class: 'qa-global-new-snippet-link'
...@@ -29,7 +29,7 @@ ...@@ -29,7 +29,7 @@
- if dashboard_nav_link?(:snippets) - if dashboard_nav_link?(:snippets)
= nav_link(controller: 'dashboard/snippets', html_options: { class: ["d-none d-xl-block", ("d-lg-block" unless has_extra_nav_icons?)] }) do = nav_link(controller: 'dashboard/snippets', html_options: { class: ["d-none d-xl-block", ("d-lg-block" unless has_extra_nav_icons?)] }) do
= link_to dashboard_snippets_path, class: 'dashboard-shortcuts-snippets', title: _('Snippets') do = link_to dashboard_snippets_path, class: 'dashboard-shortcuts-snippets qa-snippets-link', title: _('Snippets') do
= _('Snippets') = _('Snippets')
- if any_dashboard_nav_link?([:groups, :milestones, :activity, :snippets]) - if any_dashboard_nav_link?([:groups, :milestones, :activity, :snippets])
......
...@@ -20,13 +20,14 @@ ...@@ -20,13 +20,14 @@
= _('Overview') = _('Overview')
%ul.sidebar-sub-level-items %ul.sidebar-sub-level-items
= nav_link(path: ['groups#show', 'groups#activity', 'groups#subgroups'], html_options: { class: "fly-out-top-item" } ) do = nav_link(path: ['groups#show', 'groups#details', 'groups#activity', 'groups#subgroups'], html_options: { class: "fly-out-top-item" } ) do
= link_to group_path(@group) do = link_to group_path(@group) do
%strong.fly-out-top-item-name %strong.fly-out-top-item-name
= _('Overview') = _('Overview')
%li.divider.fly-out-top-item %li.divider.fly-out-top-item
= nav_link(path: ['groups#show', 'groups#subgroups'], html_options: { class: 'home' }) do
= link_to group_path(@group), title: _('Group details') do = nav_link(path: ['groups#show', 'groups#details', 'groups#subgroups'], html_options: { class: 'home' }) do
= link_to details_group_path(@group), title: _('Group details') do
%span %span
= _('Details') = _('Details')
...@@ -40,9 +41,9 @@ ...@@ -40,9 +41,9 @@
- if group_sidebar_link?(:contribution_analytics) - if group_sidebar_link?(:contribution_analytics)
= nav_link(path: 'analytics#show') do = nav_link(path: 'analytics#show') do
= link_to group_analytics_path(@group), title: 'Contribution Analytics', data: {placement: 'right'} do = link_to group_analytics_path(@group), title: _('Contribution Analytics'), data: { placement: 'right' } do
%span %span
Contribution Analytics = _('Contribution Analytics')
= render_if_exists "layouts/nav/ee/epic_link", group: @group = render_if_exists "layouts/nav/ee/epic_link", group: @group
......
...@@ -5,4 +5,5 @@ ...@@ -5,4 +5,5 @@
- if current_user && can?(current_user, :download_code, project) - if current_user && can?(current_user, :download_code, project)
= render 'shared/no_ssh' = render 'shared/no_ssh'
= render 'shared/no_password' = render 'shared/no_password'
- unless project.empty_repo?
= render 'shared/auto_devops_implicitly_enabled_banner', project: project = render 'shared/auto_devops_implicitly_enabled_banner', project: project
...@@ -57,6 +57,9 @@ ...@@ -57,6 +57,9 @@
- if can?(current_user, :download_code, @project) - if can?(current_user, :download_code, @project)
%nav.project-stats %nav.project-stats
.nav-links.quick-links .nav-links.quick-links
- if @project.empty_repo?
= render 'stat_anchor_list', anchors: @project.empty_repo_statistics_anchors
- else
= render 'stat_anchor_list', anchors: @project.statistics_anchors(show_auto_devops_callout: show_auto_devops_callout) = render 'stat_anchor_list', anchors: @project.statistics_anchors(show_auto_devops_callout: show_auto_devops_callout)
.home-panel-home-desc.mt-1 .home-panel-home-desc.mt-1
......
.file-header-content .file-header-content
= blob_icon blob.mode, blob.name = blob_icon blob.mode, blob.name
%strong.file-title-name %strong.file-title-name.qa-file-title-name
= blob.name = blob.name
= copy_file_path_button(blob.path) = copy_file_path_button(blob.path)
......
...@@ -7,43 +7,22 @@ ...@@ -7,43 +7,22 @@
%div{ class: [container_class, ("limit-container-width" unless fluid_layout)] } %div{ class: [container_class, ("limit-container-width" unless fluid_layout)] }
= render "home_panel" = render "home_panel"
.project-empty-note-panel %h4.prepend-top-0.append-bottom-8
%h4.append-bottom-20
= _('The repository for this project is empty') = _('The repository for this project is empty')
- if @project.can_current_user_push_code? - if @project.can_current_user_push_code?
%p %p.append-bottom-0
- link_to_cli = link_to _('command line instructions'), '#repo-command-line-instructions' = _('You can create files directly in GitLab using one of the following options.')
= _('If you already have files you can push them using the %{link_to_cli} below.').html_safe % { link_to_cli: link_to_cli }
%p
%em
- link_to_protected_branches = link_to _('Learn more about protected branches'), help_page_path('user/project/protected_branches')
= _('Note that the master branch is automatically protected. %{link_to_protected_branches}').html_safe % { link_to_protected_branches: link_to_protected_branches }
%hr
%p
- link_to_auto_devops_settings = link_to(s_('AutoDevOps|enable Auto DevOps'), project_settings_ci_cd_path(@project, anchor: 'autodevops-settings'))
- link_to_add_kubernetes_cluster = link_to(s_('AutoDevOps|add a Kubernetes cluster'), new_project_cluster_path(@project))
= s_('AutoDevOps|You can automatically build and test your application if you %{link_to_auto_devops_settings} for this project. You can automatically deploy it as well, if you %{link_to_add_kubernetes_cluster}.').html_safe % { link_to_auto_devops_settings: link_to_auto_devops_settings, link_to_add_kubernetes_cluster: link_to_add_kubernetes_cluster }
%hr .project-buttons.qa-quick-actions
%p
= _('Otherwise it is recommended you start with one of the options below.')
.prepend-top-20
%nav.project-buttons
.scrolling-tabs-container.inner-page-scroll-tabs.is-smaller.qa-quick-actions
.fade-left= icon('angle-left')
.fade-right= icon('angle-right')
.nav-links.scrolling-tabs.quick-links
= render 'stat_anchor_list', anchors: @project.empty_repo_statistics_buttons = render 'stat_anchor_list', anchors: @project.empty_repo_statistics_buttons
- if can?(current_user, :push_code, @project) - if can?(current_user, :push_code, @project)
%div .empty-wrapper.prepend-top-32
.prepend-top-20
.empty_wrapper
%h3#repo-command-line-instructions.page-title-empty %h3#repo-command-line-instructions.page-title-empty
= _('Command line instructions') = _('Command line instructions')
%p
= _('You can also upload existing files from your computer using the instructions below.')
.git-empty.js-git-empty .git-empty.js-git-empty
%fieldset %fieldset
%h5= _('Git global setup') %h5= _('Git global setup')
...@@ -66,7 +45,7 @@ ...@@ -66,7 +45,7 @@
git push -u origin master git push -u origin master
%fieldset %fieldset
%h5= _('Existing folder') %h5= _('Push an existing folder')
%pre.bg-light %pre.bg-light
:preserve :preserve
cd existing_folder cd existing_folder
...@@ -79,7 +58,7 @@ ...@@ -79,7 +58,7 @@
git push -u origin master git push -u origin master
%fieldset %fieldset
%h5= _('Existing Git repository') %h5= _('Push an existing Git repository')
%pre.bg-light %pre.bg-light
:preserve :preserve
cd existing_repo cd existing_repo
...@@ -89,7 +68,3 @@ ...@@ -89,7 +68,3 @@
%span>< %span><
git push -u origin --all git push -u origin --all
git push -u origin --tags git push -u origin --tags
- if can? current_user, :remove_project, @project
.prepend-top-20
= link_to _('Remove project'), [@project.namespace.becomes(Namespace), @project], data: { confirm: remove_project_message(@project)}, method: :delete, class: "btn btn-inverted btn-remove float-right"
.file-content.code.js-syntax-highlight .file-content.code.js-syntax-highlight.qa-file-content
.line-numbers .line-numbers
- if blob.data.present? - if blob.data.present?
- link_icon = icon('link') - link_icon = icon('link')
......
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
.form-group.row .form-group.row
= f.label :title, class: 'col-form-label col-sm-2' = f.label :title, class: 'col-form-label col-sm-2'
.col-sm-10 .col-sm-10
= f.text_field :title, class: 'form-control', required: true, autofocus: true = f.text_field :title, class: 'form-control qa-snippet-title', required: true, autofocus: true
= render 'shared/form_elements/description', model: @snippet, project: @project, form: f = render 'shared/form_elements/description', model: @snippet, project: @project, form: f
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
.col-sm-10 .col-sm-10
.file-holder.snippet .file-holder.snippet
.js-file-title.file-title .js-file-title.file-title
= f.text_field :file_name, placeholder: "Optionally name this file to add code highlighting, e.g. example.rb for Ruby.", class: 'form-control snippet-file-name' = f.text_field :file_name, placeholder: "Optionally name this file to add code highlighting, e.g. example.rb for Ruby.", class: 'form-control snippet-file-name qa-snippet-file-name'
.file-content.code .file-content.code
%pre#editor= @snippet.content %pre#editor= @snippet.content
= f.hidden_field :content, class: 'snippet-file-content' = f.hidden_field :content, class: 'snippet-file-content'
...@@ -31,7 +31,7 @@ ...@@ -31,7 +31,7 @@
.form-actions .form-actions
- if @snippet.new_record? - if @snippet.new_record?
= f.submit 'Create snippet', class: "btn-success btn" = f.submit 'Create snippet', class: "btn-success btn qa-create-snippet-button"
- else - else
= f.submit 'Save changes', class: "btn-success btn" = f.submit 'Save changes', class: "btn-success btn"
......
.detail-page-header .detail-page-header
.detail-page-header-body .detail-page-header-body
.snippet-box.has-tooltip.inline.append-right-5{ title: snippet_visibility_level_description(@snippet.visibility_level, @snippet), data: { container: "body" } } .snippet-box.qa-snippet-box.has-tooltip.inline.append-right-5{ title: snippet_visibility_level_description(@snippet.visibility_level, @snippet), data: { container: "body" } }
%span.sr-only %span.sr-only
= visibility_level_label(@snippet.visibility_level) = visibility_level_label(@snippet.visibility_level)
= visibility_level_icon(@snippet.visibility_level, fw: false) = visibility_level_icon(@snippet.visibility_level, fw: false)
...@@ -17,11 +17,11 @@ ...@@ -17,11 +17,11 @@
= render "snippets/actions" = render "snippets/actions"
.snippet-header.limited-header-width .snippet-header.limited-header-width
%h2.snippet-title.prepend-top-0.append-bottom-0 %h2.snippet-title.prepend-top-0.append-bottom-0.qa-snippet-title
= markdown_field(@snippet, :title) = markdown_field(@snippet, :title)
- if @snippet.description.present? - if @snippet.description.present?
.description .description.qa-snippet-description
.wiki .wiki
= markdown_field(@snippet, :description) = markdown_field(@snippet, :description)
%textarea.hidden.js-task-list-field %textarea.hidden.js-task-list-field
...@@ -34,7 +34,7 @@ ...@@ -34,7 +34,7 @@
.embed-snippet .embed-snippet
.input-group .input-group
.input-group-prepend .input-group-prepend
%button.btn.btn-svg.embed-toggle.input-group-text{ 'data-toggle': 'dropdown', type: 'button' } %button.btn.btn-svg.embed-toggle.input-group-text.qa-embed-type{ 'data-toggle': 'dropdown', type: 'button' }
%span.js-embed-action= _("Embed") %span.js-embed-action= _("Embed")
= sprite_icon('angle-down', size: 12, css_class: 'caret-down') = sprite_icon('angle-down', size: 12, css_class: 'caret-down')
%ul.dropdown-menu.dropdown-menu-selectable.embed-toggle-list %ul.dropdown-menu.dropdown-menu-selectable.embed-toggle-list
......
...@@ -25,12 +25,10 @@ module WaitableWorker ...@@ -25,12 +25,10 @@ module WaitableWorker
failed = [] failed = []
args_list.each do |args| args_list.each do |args|
begin
new.perform(*args) new.perform(*args)
rescue rescue
failed << args failed << args
end end
end
bulk_perform_async(failed) if failed.present? bulk_perform_async(failed) if failed.present?
end end
......
...@@ -20,12 +20,10 @@ class CreateGpgSignatureWorker ...@@ -20,12 +20,10 @@ class CreateGpgSignatureWorker
# This calculates and caches the signature in the database # This calculates and caches the signature in the database
commits.each do |commit| commits.each do |commit|
begin
Gitlab::Gpg::Commit.new(commit).signature Gitlab::Gpg::Commit.new(commit).signature
rescue => e rescue => e
Rails.logger.error("Failed to create signature for commit #{commit.id}. Error: #{e.message}") Rails.logger.error("Failed to create signature for commit #{commit.id}. Error: #{e.message}")
end end
end end
end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
end end
...@@ -52,7 +52,6 @@ class EmailsOnPushWorker ...@@ -52,7 +52,6 @@ class EmailsOnPushWorker
end end
valid_recipients(recipients).each do |recipient| valid_recipients(recipients).each do |recipient|
begin
send_email( send_email(
recipient, recipient,
project_id, project_id,
...@@ -70,7 +69,6 @@ class EmailsOnPushWorker ...@@ -70,7 +69,6 @@ class EmailsOnPushWorker
rescue Net::SMTPFatalError, Net::SMTPSyntaxError => e rescue Net::SMTPFatalError, Net::SMTPSyntaxError => e
logger.info("Failed to send e-mail for project '#{project.full_name}' to #{recipient}: #{e}") logger.info("Failed to send e-mail for project '#{project.full_name}' to #{recipient}: #{e}")
end end
end
ensure ensure
@email = nil @email = nil
compare = nil compare = nil
......
...@@ -126,12 +126,10 @@ module ObjectStorage ...@@ -126,12 +126,10 @@ module ObjectStorage
def process_uploader(uploader) def process_uploader(uploader)
MigrationResult.new(uploader.upload).tap do |result| MigrationResult.new(uploader.upload).tap do |result|
begin
uploader.migrate!(@to_store) uploader.migrate!(@to_store)
rescue => e rescue => e
result.error = e result.error = e
end end
end end
end end
end
end end
...@@ -8,7 +8,7 @@ class PipelineScheduleWorker ...@@ -8,7 +8,7 @@ class PipelineScheduleWorker
def perform def perform
Ci::PipelineSchedule.active.where("next_run_at < ?", Time.now) Ci::PipelineSchedule.active.where("next_run_at < ?", Time.now)
.preload(:owner, :project).find_each do |schedule| .preload(:owner, :project).find_each do |schedule|
begin
Ci::CreatePipelineService.new(schedule.project, Ci::CreatePipelineService.new(schedule.project,
schedule.owner, schedule.owner,
ref: schedule.ref) ref: schedule.ref)
...@@ -19,7 +19,6 @@ class PipelineScheduleWorker ...@@ -19,7 +19,6 @@ class PipelineScheduleWorker
schedule.schedule_next_run! schedule.schedule_next_run!
end end
end end
end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
private private
......
...@@ -6,11 +6,9 @@ class RemoveExpiredMembersWorker ...@@ -6,11 +6,9 @@ class RemoveExpiredMembersWorker
def perform def perform
Member.expired.find_each do |member| Member.expired.find_each do |member|
begin
Members::DestroyService.new.execute(member, skip_authorization: true) Members::DestroyService.new.execute(member, skip_authorization: true)
rescue => ex rescue => ex
logger.error("Expired Member ID=#{member.id} cannot be removed - #{ex}") logger.error("Expired Member ID=#{member.id} cannot be removed - #{ex}")
end end
end end
end
end end
---
title: 'Project: Improve empty repository state UI'
merge_request: 26024
author:
type: other
---
title: Explicitly set master_auth setting to enable basic auth and client certificate
for new GKE clusters
merge_request: 26018
author:
type: other
---
title: Fix bug in BitBucket imports with SHA shorter than 40 chars
merge_request: 26050
author:
type: fixed
...@@ -14,10 +14,8 @@ module Rack ...@@ -14,10 +14,8 @@ module Rack
end end
gitlab_trusted_proxies = Array(Gitlab.config.gitlab.trusted_proxies).map do |proxy| gitlab_trusted_proxies = Array(Gitlab.config.gitlab.trusted_proxies).map do |proxy|
begin
IPAddr.new(proxy) IPAddr.new(proxy)
rescue IPAddr::InvalidAddressError rescue IPAddr::InvalidAddressError
end
end.compact end.compact
Rails.application.config.action_dispatch.trusted_proxies = ( Rails.application.config.action_dispatch.trusted_proxies = (
......
...@@ -6,6 +6,7 @@ const argumentsParser = require('commander'); ...@@ -6,6 +6,7 @@ const argumentsParser = require('commander');
const webpackConfig = require('./webpack.config.js'); const webpackConfig = require('./webpack.config.js');
const ROOT_PATH = path.resolve(__dirname, '..'); const ROOT_PATH = path.resolve(__dirname, '..');
const SPECS_PATH = /^(?:\.[\\\/])?(ee[\\\/])?spec[\\\/]javascripts[\\\/]/;
function fatalError(message) { function fatalError(message) {
console.error(chalk.red(`\nError: ${message}\n`)); console.error(chalk.red(`\nError: ${message}\n`));
...@@ -41,9 +42,19 @@ const specFilters = argumentsParser ...@@ -41,9 +42,19 @@ const specFilters = argumentsParser
) )
.parse(process.argv).filterSpec; .parse(process.argv).filterSpec;
if (specFilters.length) { const createContext = (specFiles, regex, suffix) => {
const specsPath = /^(?:\.[\\\/])?spec[\\\/]javascripts[\\\/]/; const newContext = specFiles.reduce((context, file) => {
const relativePath = file.replace(SPECS_PATH, '');
context[file] = `./${relativePath}`;
return context;
}, {});
webpackConfig.plugins.push(
new webpack.ContextReplacementPlugin(regex, path.join(ROOT_PATH, suffix), newContext),
);
};
if (specFilters.length) {
// resolve filters // resolve filters
let filteredSpecFiles = specFilters.map(filter => let filteredSpecFiles = specFilters.map(filter =>
glob glob
...@@ -64,23 +75,15 @@ if (specFilters.length) { ...@@ -64,23 +75,15 @@ if (specFilters.length) {
fatalError('Your filter did not match any test files.'); fatalError('Your filter did not match any test files.');
} }
if (!filteredSpecFiles.every(file => specsPath.test(file))) { if (!filteredSpecFiles.every(file => SPECS_PATH.test(file))) {
fatalError('Test files must be located within /spec/javascripts.'); fatalError('Test files must be located within /spec/javascripts.');
} }
const newContext = filteredSpecFiles.reduce((context, file) => { const CE_FILES = filteredSpecFiles.filter(file => !file.startsWith('ee'));
const relativePath = file.replace(specsPath, ''); createContext(CE_FILES, /[^e]{2}[\\\/]spec[\\\/]javascripts$/, 'spec/javascripts');
context[file] = `./${relativePath}`;
return context;
}, {});
webpackConfig.plugins.push( const EE_FILES = filteredSpecFiles.filter(file => file.startsWith('ee'));
new webpack.ContextReplacementPlugin( createContext(EE_FILES, /ee[\\\/]spec[\\\/]javascripts$/, 'ee/spec/javascripts');
/spec[\\\/]javascripts$/,
path.join(ROOT_PATH, 'spec/javascripts'),
newContext,
),
);
} }
// Karma configuration // Karma configuration
...@@ -111,10 +114,20 @@ module.exports = function(config) { ...@@ -111,10 +114,20 @@ module.exports = function(config) {
], ],
preprocessors: { preprocessors: {
'spec/javascripts/**/*.js': ['webpack', 'sourcemap'], 'spec/javascripts/**/*.js': ['webpack', 'sourcemap'],
'ee/spec/javascripts/**/*.js': ['webpack', 'sourcemap'],
}, },
reporters: ['mocha'], reporters: ['mocha'],
webpack: webpackConfig, webpack: webpackConfig,
webpackMiddleware: { stats: 'errors-only' }, webpackMiddleware: { stats: 'errors-only' },
plugins: [
'karma-chrome-launcher',
'karma-coverage-istanbul-reporter',
'karma-jasmine',
'karma-junit-reporter',
'karma-mocha-reporter',
'karma-sourcemap-loader',
'karma-webpack',
],
}; };
if (process.env.CI) { if (process.env.CI) {
...@@ -123,6 +136,19 @@ module.exports = function(config) { ...@@ -123,6 +136,19 @@ module.exports = function(config) {
outputFile: 'junit_karma.xml', outputFile: 'junit_karma.xml',
useBrowserName: false, useBrowserName: false,
}; };
} else {
// ignore 404s in local environment because we are not fixing them and they bloat the log
function ignore404() {
return (request, response /* next */) => {
response.writeHead(404);
return response.end('NOT FOUND');
};
}
karmaConfig.middleware = ['ignore-404'];
karmaConfig.plugins.push({
'middleware:ignore-404': ['factory', ignore404],
});
} }
if (process.env.BABEL_ENV === 'coverage' || process.env.NODE_ENV === 'coverage') { if (process.env.BABEL_ENV === 'coverage' || process.env.NODE_ENV === 'coverage') {
......
...@@ -14,6 +14,7 @@ constraints(::Constraints::GroupUrlConstrainer.new) do ...@@ -14,6 +14,7 @@ constraints(::Constraints::GroupUrlConstrainer.new) do
get :issues, as: :issues_group get :issues, as: :issues_group
get :merge_requests, as: :merge_requests_group get :merge_requests, as: :merge_requests_group
get :projects, as: :projects_group get :projects, as: :projects_group
get :details, as: :details_group
get :activity, as: :activity_group get :activity, as: :activity_group
put :transfer, as: :transfer_group put :transfer, as: :transfer_group
# TODO: Remove as part of refactor in https://gitlab.com/gitlab-org/gitlab-ce/issues/49693 # TODO: Remove as part of refactor in https://gitlab.com/gitlab-org/gitlab-ce/issues/49693
......
...@@ -324,6 +324,10 @@ module.exports = { ...@@ -324,6 +324,10 @@ module.exports = {
reportFilename: path.join(ROOT_PATH, 'webpack-report/index.html'), reportFilename: path.join(ROOT_PATH, 'webpack-report/index.html'),
statsFilename: path.join(ROOT_PATH, 'webpack-report/stats.json'), statsFilename: path.join(ROOT_PATH, 'webpack-report/stats.json'),
}), }),
new webpack.DefinePlugin({
'process.env.EE': JSON.stringify(IS_EE),
}),
].filter(Boolean), ].filter(Boolean),
devServer: { devServer: {
......
...@@ -126,12 +126,11 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration[4.2] ...@@ -126,12 +126,11 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration[4.2]
queues.each do |queue| queues.each do |queue|
# Stealing is racy so it's possible a pop might be called on an # Stealing is racy so it's possible a pop might be called on an
# already-empty queue. # already-empty queue.
begin
remove_orphans(*queue.pop(true)) remove_orphans(*queue.pop(true))
stolen = true stolen = true
rescue ThreadError rescue ThreadError
end end
end
break unless stolen break unless stolen
end end
......
...@@ -269,7 +269,7 @@ The `releases` directory will hold all our deployments: ...@@ -269,7 +269,7 @@ The `releases` directory will hold all our deployments:
echo 'Cloning repository' echo 'Cloning repository'
[ -d {{ $releases_dir }} ] || mkdir {{ $releases_dir }} [ -d {{ $releases_dir }} ] || mkdir {{ $releases_dir }}
git clone --depth 1 {{ $repository }} {{ $new_release_dir }} git clone --depth 1 {{ $repository }} {{ $new_release_dir }}
cd {{ $releases_dir }} cd {{ $new_release_dir }}
git reset --hard {{ $commit }} git reset --hard {{ $commit }}
@endtask @endtask
...@@ -347,7 +347,7 @@ At the end, our `Envoy.blade.php` file will look like this: ...@@ -347,7 +347,7 @@ At the end, our `Envoy.blade.php` file will look like this:
echo 'Cloning repository' echo 'Cloning repository'
[ -d {{ $releases_dir }} ] || mkdir {{ $releases_dir }} [ -d {{ $releases_dir }} ] || mkdir {{ $releases_dir }}
git clone --depth 1 {{ $repository }} {{ $new_release_dir }} git clone --depth 1 {{ $repository }} {{ $new_release_dir }}
cd {{ $releases_dir }} cd {{ $new_release_dir }}
git reset --hard {{ $commit }} git reset --hard {{ $commit }}
@endtask @endtask
......
...@@ -2561,4 +2561,4 @@ git push -o ci.skip ...@@ -2561,4 +2561,4 @@ git push -o ci.skip
[environment]: ../environments.md "CI/CD environments" [environment]: ../environments.md "CI/CD environments"
[schedules]: ../../user/project/pipelines/schedules.md "Pipelines schedules" [schedules]: ../../user/project/pipelines/schedules.md "Pipelines schedules"
[variables]: ../variables/README.md "CI/CD variables" [variables]: ../variables/README.md "CI/CD variables"
[push-option]: https://git-scm.com/docs/git-push#git-push--oltoptiongt [push-option]: https://git-scm.com/docs/git-push#Documentation/git-push.txt--oltoptiongt
...@@ -119,6 +119,7 @@ This [documentation](merge_request_workflow.md) outlines the current merge reque ...@@ -119,6 +119,7 @@ This [documentation](merge_request_workflow.md) outlines the current merge reque
- [Merge request guidelines](merge_request_workflow.md#merge-request-guidelines) - [Merge request guidelines](merge_request_workflow.md#merge-request-guidelines)
- [Contribution acceptance criteria](merge_request_workflow.md#contribution-acceptance-criteria) - [Contribution acceptance criteria](merge_request_workflow.md#contribution-acceptance-criteria)
- [Definition of done](merge_request_workflow.md#definition-of-done) - [Definition of done](merge_request_workflow.md#definition-of-done)
- [Dependencies](merge_request_workflow.md#dependencies)
## Style guides ## Style guides
......
# Merge requests # Merge requests
We welcome merge requests with fixes and improvements to GitLab code, tests, We welcome merge requests from everyone, with fixes and improvements
and/or documentation. The issues that are specifically suitable for to GitLab code, tests, and documentation. The issues that are specifically suitable
community contributions are listed with for community contributions are listed with the [`Accepting merge requests`](issue_workflow.md#label-for-community-contributors)
[the `Accepting merge requests` label](issue_workflow.md#label-for-community-contributors), label, but you are free to contribute to any issue you want.
but you are free to contribute to any other issue you want.
Please note that if an issue is marked for the current milestone at any time, even
Please note that if an issue is marked for the current milestone either before when you are working on it, a GitLab Inc. team member may take over the merge request
or while you are working on it, a team member may take over the merge request
in order to ensure the work is finished before the release date. in order to ensure the work is finished before the release date.
If you want to add a new feature that is not labeled it is best to first create If you want to add a new feature that is not labeled, it is best to first create
a feedback issue (if there isn't one already) and leave a comment asking for it an issue (if there isn't one already) and leave a comment asking for it
to be marked as `Accepting Merge Requests`. Please include screenshots or to be marked as `Accepting Merge Requests`. Please include screenshots or
wireframes if the feature will also change the UI. wireframes of the proposed feature if it will also change the UI.
Merge requests should be opened at [GitLab.com][gitlab-mr-tracker]. Merge requests should be submitted to the appropriate project at GitLab.com, for example
[GitLab CE](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests),
[GitLab EE](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests),
[GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner/merge_requests),
[GitLab Omnibus](https://gitlab.com/gitlab-org/omnibus-gitlab/merge_requests), etc.
If you are new to GitLab development (or web development in general), see the If you are new to GitLab development (or web development in general), see the
[I want to contribute!](index.md#i-want-to-contribute) section to get you started with [I want to contribute!](index.md#i-want-to-contribute) section to get started with
some potentially easy issues. some potentially easy issues.
To start with GitLab development download the [GitLab Development Kit][gdk] and To start developing GitLab, download the [GitLab Development Kit](https://gitlab.com/gitlab-org/gitlab-development-kit)
see the [Development section](../../README.md) for some guidelines. and see the [Development section](../../README.md) for the required guidelines.
[gitlab-mr-tracker]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests
[gdk]: https://gitlab.com/gitlab-org/gitlab-development-kit
## Merge request guidelines ## Merge request guidelines
If you can, please submit a merge request with the fix or improvements If you find an issue, please submit a merge request with a fix or improvement, if
including tests. If you don't know how to fix the issue but can write a test you can, and include tests. If you don't know how to fix the issue but can write a test
that exposes the issue we will accept that as well. In general bug fixes that that exposes the issue, we will accept that as well. In general, bug fixes that
include a regression test are merged quickly while new features without proper include a regression test are merged quickly, while new features without proper
tests are least likely to receive timely feedback. The workflow to make a merge tests might be slower to receive feedback. The workflow to make a merge
request is as follows: request is as follows:
1. Fork the project into your personal space on GitLab.com 1. [Fork](../../workflow/forking_workflow.md#creating-a-fork) the project into
1. Create a feature branch, branch away from `master` your personal namespace (or group) on GitLab.com.
1. Write [tests](https://docs.gitlab.com/ee/development/rake_tasks.html#run-tests) and code 1. Create a feature branch in your fork (don't work off `master`).
1. [Generate a changelog entry with `bin/changelog`][changelog] 1. Write [tests](../rake_tasks.md#run-tests) and code.
1. [Generate a changelog entry with `bin/changelog`](../changelog.md)
1. If you are writing documentation, make sure to follow the 1. If you are writing documentation, make sure to follow the
[documentation guidelines][doc-guidelines] [documentation guidelines](../documentation/index.md).
1. If you have multiple commits please combine them into a few logically 1. Follow the [commit messages guidelines](#commit-messages-guidelines).
organized commits by [squashing them][git-squash] 1. If you have multiple commits, combine them into a few logically organized
1. Push the commit(s) to your fork commits by [squashing them](https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History#_squashing),
1. Submit a merge request (MR) to the `master` branch but do not change the commit history if you're working on shared branches though.
1. Your merge request needs at least 1 approval but feel free to require more. 1. Push the commit(s) to your working branch in your fork.
For instance if you're touching backend and frontend code, it's a good idea 1. Submit a merge request (MR) to the `master` branch in the main GitLab project.
1. Your merge request needs at least 1 approval, but feel free to require more.
For instance if you're touching both backend and frontend code, it's a good idea
to require 2 approvals: 1 from a backend maintainer and 1 from a frontend to require 2 approvals: 1 from a backend maintainer and 1 from a frontend
maintainer maintainer.
1. You don't have to select any approvers, but you can if you really want 1. If you're submitting changes to documentation, you'll need approval from a technical
specific people to approve your merge request writer, based on the appropriate [product category](https://about.gitlab.com/handbook/product/categories/).
1. The MR title should describe the change you want to make Only assign the MR to them when it's ready for docs review.
1. The MR description should give a motive for your change and the method you 1. You don't have to select any specific approvers, but you can if you really want
used to achieve it. specific people to approve your merge request.
1. If you are contributing code, fill in the template already provided in the 1. The MR title should describe the change you want to make.
"Description" field. 1. The MR description should give a reason for your change.
1. If you are contributing code, fill in the description according to the default
template already provided in the "Description" field.
1. If you are contributing documentation, choose `Documentation` from the 1. If you are contributing documentation, choose `Documentation` from the
"Choose a template" menu and fill in the template. "Choose a template" menu and fill in the description according to the template.
1. Mention the issue(s) your merge request solves, using the `Solves #XXX` or 1. Mention the issue(s) your merge request solves, using the `Solves #XXX` or
`Closes #XXX` syntax to auto-close the issue(s) once the merge request will `Closes #XXX` syntax to [auto-close](../../user/project/issues/automatic_issue_closing.md)
be merged. the issue(s) once the merge request is merged.
1. If you're allowed to, set a relevant milestone and labels 1. If you're allowed to (Core team members, for example), set a relevant milestone
1. If the MR changes the UI it should include *Before* and *After* screenshots and [labels](issue_workflow.md).
1. If the MR changes CSS classes please include the list of affected pages, 1. If the MR changes the UI, it should include *Before* and *After* screenshots.
`grep css-class ./app -R` 1. If the MR changes CSS classes, please include the list of affected pages, which
1. Be prepared to answer questions and incorporate feedback even if requests can be found by running `grep css-class ./app -R`.
for this arrive weeks or months after your MR submission 1. Be prepared to answer questions and incorporate feedback into your MR with new
1. If a discussion has been addressed, select the "Resolve discussion" button commits. Once you have fully addressed a suggestion from a reviewer, click the
beneath it to mark it resolved. "Resolve discussion" button beneath it to mark it resolved.
1. If your MR touches code that executes shell commands, reads or opens files or 1. The merge request author resolves only the discussions they have fully addressed.
If there's an open reply or discussion, a suggestion, a question, or anything else,
the discussion should be left to be resolved by the reviewer.
1. If your MR touches code that executes shell commands, reads or opens files, or
handles paths to files on disk, make sure it adheres to the handles paths to files on disk, make sure it adheres to the
[shell command guidelines](../shell_commands.md) [shell command guidelines](../shell_commands.md)
1. If your code creates new files on disk please read the 1. If your code creates new files on disk please read the
[shared files guidelines](../shared_files.md). [shared files guidelines](../shared_files.md).
1. When writing commit messages please follow
[these](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html)
[guidelines](http://chris.beams.io/posts/git-commit/).
1. If your merge request adds one or more migrations, make sure to execute all 1. If your merge request adds one or more migrations, make sure to execute all
migrations on a fresh database before the MR is reviewed. If the review leads migrations on a fresh database before the MR is reviewed. If the review leads
to large changes in the MR, do this again once the review is complete. to large changes in the MR, execute the migrations again once the review is complete.
1. For more complex migrations, write tests. 1. Write tests for more complex migrations.
1. Merge requests **must** adhere to the [merge request performance 1. Merge requests **must** adhere to the [merge request performance guidelines](../merge_request_performance_guidelines.md).
guidelines](../merge_request_performance_guidelines.md). 1. For tests that use Capybara, read
1. For tests that use Capybara or PhantomJS, see this [article on how [how to write reliable, asynchronous integration tests](https://robots.thoughtbot.com/write-reliable-asynchronous-integration-tests-with-capybara).
to write reliable asynchronous tests](https://robots.thoughtbot.com/write-reliable-asynchronous-integration-tests-with-capybara).
1. If your merge request introduces changes that require additional steps when 1. If your merge request introduces changes that require additional steps when
installing GitLab from source, add them to `doc/install/installation.md` in installing GitLab from source, add them to `doc/install/installation.md` in
the same merge request. the same merge request.
...@@ -95,109 +99,117 @@ request is as follows: ...@@ -95,109 +99,117 @@ request is as follows:
instructions are specific to a version, add them to the "Version specific instructions are specific to a version, add them to the "Version specific
upgrading instructions" section. upgrading instructions" section.
Please keep the change in a single MR **as small as possible**. If you want to If you would like quick feedback on your merge request feel free to mention someone
contribute a large feature think very hard what the minimum viable change is. from the [core team](https://about.gitlab.com/community/core-team/) or one of the
Can you split the functionality? Can you only submit the backend/API code? Can [merge request coaches](https://about.gitlab.com/team/). When having your code reviewed
you start with a very simple UI? Can you do part of the refactor? The increased and when reviewing merge requests, please keep the [code review guidelines](../code_review.md)
reviewability of small MRs that leads to higher code quality is more important in mind.
to us than having a minimal commit log. The smaller an MR is the more likely it
is it will be merged (quickly). After that you can send more MRs to enhance it. ### Keep it simple
The ['How to get faster PR reviews' document of Kubernetes](https://github.com/kubernetes/community/blob/master/contributors/devel/faster_reviews.md) also has some great points regarding this.
*Live by smaller iterations.* Please keep the amount of changes in a single MR **as small as possible**.
For examples of feedback on merge requests please look at already If you want to contribute a large feature, think very carefully about what the
[closed merge requests][closed-merge-requests]. If you would like quick feedback [minimum viable change](https://about.gitlab.com/handbook/product/#the-minimally-viable-change)
on your merge request feel free to mention someone from the [core team] or one is. Can you split the functionality into two smaller MRs? Can you submit only the
of the [Merge request coaches][team]. backend/API code? Can you start with a very simple UI? Can you do just a part of the
Please ensure that your merge request meets the contribution acceptance criteria. refactor?
When having your code reviewed and when reviewing merge requests please take the Small MRs which are more easily reviewed, lead to higher code quality which is
[code review guidelines](../code_review.md) into account. more important to GitLab than having a minimal commit log. The smaller an MR is,
the more likely it will be merged quickly. After that you can send more MRs to
[git-squash]: https://git-scm.com/book/en/Git-Tools-Rewriting-History#Squashing-Commits enhance and expand the feature. The [How to get faster PR reviews](https://github.com/kubernetes/kubernetes/blob/release-1.5/docs/devel/faster_reviews.md)
[closed-merge-requests]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests?assignee_id=&label_name=&milestone_id=&scope=&sort=&state=closed document from the Kubernetes team also has some great points regarding this.
[team]: https://about.gitlab.com/team/
### Commit messages guidelines
When writing commit messages, please follow the guidelines below:
- The commit subject must contain at least 3 words.
- The commit subject should ideally contain up to 50 characters,
and must not be longer than 72 characters.
- The commit subject must start with a capital letter.
- The commit subject must not end with a period.
- The commit subject and body must be separated by a blank line.
- The commit body must not contain more than 72 characters per line.
- Commits that change 30 or more lines across at least 3 files must
describe these changes in the commit body.
- The commit subject or body must not contain Emojis.
- Use issues and merge requests' full URLs instead of short references,
as they are displayed as plain text outside of GitLab.
- The merge request must not contain more than 10 commit messages.
If the guidelines are not met, the MR will not pass the
[Danger checks](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/danger/commit_messages/Dangerfile).
For more information see [How to Write a Git Commit Message](https://chris.beams.io/posts/git-commit/).
## Contribution acceptance criteria ## Contribution acceptance criteria
1. The change is as small as possible To make sure that your merge request can be approved, please ensure that it meets
the contribution acceptance criteria below:
1. The change is as small as possible.
1. Include proper tests and make all tests pass (unless it contains a test 1. Include proper tests and make all tests pass (unless it contains a test
exposing a bug in existing code). Every new class should have corresponding exposing a bug in existing code). Every new class should have corresponding
unit tests, even if the class is exercised at a higher level, such as a feature test. unit tests, even if the class is exercised at a higher level, such as a feature test.
1. If you suspect a failing CI build is unrelated to your contribution, you may - If a failing CI build seems to be unrelated to your contribution, you can try
try and restart the failing CI job or ask a developer to fix the restarting the failing CI job, rebasing from master to bring in updates that
aforementioned failing test may resolve the failure, or if it has not been fixed yet, ask a developer to
1. Your MR initially contains a single commit (please use `git rebase -i` to help you fix the test.
squash commits) 1. The MR initially contains a a few logically organized commits.
1. Your changes can merge without problems (if not please rebase if you're the 1. The changes can merge without problems. If not, you should rebase if you're the
only one working on your feature branch, otherwise, merge `master`) only one working on your feature branch, otherwise merge `master`.
1. Does not break any existing functionality 1. Only one specific issue is fixed or one specific feature is implemented. Do not
1. Fixes one specific issue or implements one specific feature (do not combine combine things; send separate merge requests for each issue or feature.
things, send separate merge requests if needed) 1. Migrations should do only one thing (e.g., create a table, move data to a new
1. Migrations should do only one thing (e.g., either create a table, move data table, or remove an old table) to aid retrying on failure.
to a new table or remove an old table) to aid retrying on failure 1. Contains functionality that other users will benefit from.
1. Keeps the GitLab code base clean and well structured 1. Doesn't add configuration options or settings options since they complicate making
1. Contains functionality we think other users will benefit from too and testing future changes.
1. Doesn't add configuration options or settings options since they complicate 1. Changes do not degrade performance:
making and testing future changes - Avoid repeated polling of endpoints that require a significant amount of overhead.
1. Changes do not adversely degrade performance. - Check for N+1 queries via the SQL log or [`QueryRecorder`](../merge_request_performance_guidelines.md).
- Avoid repeated polling of endpoints that require a significant amount of overhead - Avoid repeated access of the filesystem.
- Check for N+1 queries via the SQL log or [`QueryRecorder`](https://docs.gitlab.com/ce/development/merge_request_performance_guidelines.html) - Use [polling with ETag caching](../polling.md) if needed to support real-time features.
- Avoid repeated access of filesystem 1. If the merge request adds any new libraries (gems, JavaScript libraries, etc.),
1. If you need polling to support real-time features, please use they should conform to our [Licensing guidelines](../licensing.md). See those
[polling with ETag caching][polling-etag]. instructions for help if the "license-finder" test fails with a
1. Changes after submitting the merge request should be in separate commits `Dependencies that need approval` error. Also, make the reviewer aware of the new
(no squashing). library and explain why you need it.
1. It conforms to the [style guides](style_guides.md) and the following: 1. The merge request meets GitLab's [definition of done](#definition-of-done), below.
- If your change touches a line that does not follow the style, modify the
entire line to follow it. This prevents linting tools from generating warnings.
- Don't touch neighbouring lines. As an exception, automatic mass
refactoring modifications may leave style non-compliant.
1. If the merge request adds any new libraries (gems, JavaScript libraries,
etc.), they should conform to our [Licensing guidelines][license-finder-doc].
See the instructions in that document for help if your MR fails the
"license-finder" test with a "Dependencies that need approval" error.
1. The merge request meets the [definition of done](#definition-of-done).
[license-finder-doc]: ../licensing.md
[polling-etag]: ../polling.md
## Definition of done ## Definition of done
If you contribute to GitLab please know that changes involve more than just If you contribute to GitLab please know that changes involve more than just
code. We have the following [definition of done][definition-of-done]. Please ensure you support code. We use the following [definition of done](https://www.agilealliance.org/glossary/definition-of-done).
the feature you contribute through all of these steps. Your contribution is not *done* until you have made sure it meets all of these
requirements.
1. Description explaining the relevancy (see following item)
1. Working and clean code that is commented where needed 1. Clear description explaining the relevancy of the contribution.
1. [Unit, integration, and system tests][testing] that pass on the CI server 1. Working and clean code that is commented where needed.
1. Performance/scalability implications have been considered, addressed, and tested 1. [Unit, integration, and system tests](../testing_guide/index.md) that all pass
1. [Documented][doc-guidelines] in the `/doc` directory on the CI server.
1. [Changelog entry added][changelog], if necessary 1. Performance/scalability implications have been considered, addressed, and tested.
1. Reviewed by UX/FE/BE and any concerns are addressed 1. [Documented](../documentation/index.md) in the `/doc` directory.
1. Merged by a project maintainer 1. [Changelog entry added](../changelog.md), if necessary.
1. Added to the release blog article, if relevant 1. Reviewed by relevant (UX/FE/BE/tech writing) reviewers and all concerns are addressed.
1. Added to [the website](https://gitlab.com/gitlab-com/www-gitlab-com/), if relevant 1. Merged by a project maintainer.
1. Community questions answered 1. Added to the [release post](https://about.gitlab.com/handbook/marketing/blog/release-posts/),
1. Answers to questions radiated (in docs/wiki/support etc.) if relevant.
1. [Black-box tests/end-to-end tests](../testing_guide/testing_levels.md#black-box-tests-at-the-system-level-aka-end-to-end-tests) added if required. Please contact [the quality team](https://about.gitlab.com/handbook/engineering/quality/#teams) with any questions 1. Added to [the website](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/features.yml), if relevant.
1. [Black-box tests/end-to-end tests](../testing_guide/testing_levels.md#black-box-tests-at-the-system-level-aka-end-to-end-tests)
added if required. Please contact [the quality team](https://about.gitlab.com/handbook/engineering/quality/#teams)
with any questions.
## Dependencies
If you add a dependency in GitLab (such as an operating system package) please If you add a dependency in GitLab (such as an operating system package) please
consider updating the following and note the applicability of each in your consider updating the following, and note the applicability of each in your merge
merge request: request:
1. Note the addition in the release blog post (create one if it doesn't exist yet) <https://gitlab.com/gitlab-com/www-gitlab-com/merge_requests/> 1. Note the addition in the [release blog post](https://about.gitlab.com/handbook/marketing/blog/release-posts/)
1. Upgrade guide, for example <https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/update/7.5-to-7.6.md> (create one if it doesn't exist yet).
1. Installation guide <https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/install/installation.md#1-packages-dependencies> 1. [The upgrade guide](../../update/upgrading_from_source.md).
1. GitLab Development Kit <https://gitlab.com/gitlab-org/gitlab-development-kit> 1. The [GitLab Installation Guide](../../install/installation.md#1-packages-and-dependencies).
1. Test suite <https://gitlab.com/gitlab-org/gitlab-ce/blob/master/scripts/prepare_build.sh> 1. The [GitLab Development Kit](https://gitlab.com/gitlab-org/gitlab-development-kit).
1. Omnibus package creator <https://gitlab.com/gitlab-org/omnibus-gitlab> 1. The [CI environment preparation](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/scripts/prepare_build.sh).
1. The [Omnibus package creator](https://gitlab.com/gitlab-org/omnibus-gitlab).
[definition-of-done]: http://guide.agilealliance.org/guide/definition-of-done.html
[testing]: ../testing_guide/index.md
---
[Return to Contributing documentation](index.md)
[changelog]: ../changelog.md "Generate a changelog entry"
[doc-guidelines]: ../documentation/index.md "Documentation guidelines"
...@@ -80,11 +80,10 @@ yield a useful result, and ensuring content is helpful and easy to consume. ...@@ -80,11 +80,10 @@ yield a useful result, and ensuring content is helpful and easy to consume.
## Text ## Text
- Split up long lines (wrap text), this makes it much easier to review and edit. Only - Splitting long lines (preferably up to 100 characters) can make it easier to provide feedback on small chunks of text.
double line breaks are shown as a full line break by creating new paragraphs. - Insert an empty line for new paragraphs.
80-100 characters is the recommended line length.
- Use sentence case for titles, headings, labels, menu items, and buttons. - Use sentence case for titles, headings, labels, menu items, and buttons.
- Jump a line between different markups (e.g., after every paragraph, header, list, etc). Example: - Insert an empty line between different markups (e.g., after every paragraph, header, list, etc). Example:
```md ```md
## Header ## Header
......
...@@ -15,7 +15,7 @@ information on general testing practices at GitLab. ...@@ -15,7 +15,7 @@ information on general testing practices at GitLab.
## Jest ## Jest
GitLab has started to migrate tests to the (Jest)[https://jestjs.io] GitLab has started to migrate tests to the [Jest](https://jestjs.io)
testing framework. You can read a [detailed evaluation](https://gitlab.com/gitlab-org/gitlab-ce/issues/49171) testing framework. You can read a [detailed evaluation](https://gitlab.com/gitlab-org/gitlab-ce/issues/49171)
of Jest compared to our use of Karma and Jasmine. In summary, it will allow us of Jest compared to our use of Karma and Jasmine. In summary, it will allow us
to improve the performance and consistency of our frontend tests. to improve the performance and consistency of our frontend tests.
......
...@@ -704,6 +704,7 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac ...@@ -704,6 +704,7 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `INCREMENTAL_ROLLOUT_MODE`| From GitLab 11.4, this variable, if present, can be used to enable an [incremental rollout](#incremental-rollout-to-production-premium) of your application for the production environment.<br/>Set to: <ul><li>`manual`, for manual deployment jobs.</li><li>`timed`, for automatic rollout deployments with a 5 minute delay each one.</li></ul> | | `INCREMENTAL_ROLLOUT_MODE`| From GitLab 11.4, this variable, if present, can be used to enable an [incremental rollout](#incremental-rollout-to-production-premium) of your application for the production environment.<br/>Set to: <ul><li>`manual`, for manual deployment jobs.</li><li>`timed`, for automatic rollout deployments with a 5 minute delay each one.</li></ul> |
| `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. | | `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. |
| `CODE_QUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. | | `CODE_QUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. |
| `LICENSE_MANAGEMENT_DISABLED` | From GitLab 11.0, this variable can be used to disable the `license_management` job. If the variable is present, the job will not be created. |
| `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. | | `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. |
| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. | | `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. |
| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. | | `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. |
......
...@@ -63,6 +63,12 @@ are available: ...@@ -63,6 +63,12 @@ are available:
- `%{commit_sha}`: ID of the most recent commit to the default branch of a - `%{commit_sha}`: ID of the most recent commit to the default branch of a
project's repository project's repository
NOTE: **NOTE**
Placeholders allow badges to expose otherwise-private information, such as the
default branch or commit SHA when the project is configured to have a private
repository. This is by design, as badges are intended to be used publicly. Avoid
using these placeholders if the information is sensitive.
## API ## API
You can also configure badges via the GitLab API. As in the settings, there is You can also configure badges via the GitLab API. As in the settings, there is
......
...@@ -75,6 +75,14 @@ new Kubernetes cluster to your project: ...@@ -75,6 +75,14 @@ new Kubernetes cluster to your project:
After a couple of minutes, your cluster will be ready to go. You can now proceed After a couple of minutes, your cluster will be ready to go. You can now proceed
to install some [pre-defined applications](#installing-applications). to install some [pre-defined applications](#installing-applications).
NOTE: **Note:**
GitLab requires basic authentication enabled and a client certificate issued for
the cluster in order to setup an [initial service
account](#access-controls). Starting from [GitLab
11.10](https://gitlab.com/gitlab-org/gitlab-ce/issues/58208), the cluster
creation process will explicitly request that basic authentication and
client certificate is enabled.
## Adding an existing Kubernetes cluster ## Adding an existing Kubernetes cluster
To add an existing Kubernetes cluster to your project: To add an existing Kubernetes cluster to your project:
......
...@@ -96,7 +96,7 @@ all matching branches: ...@@ -96,7 +96,7 @@ all matching branches:
## Creating a protected branch ## Creating a protected branch
> [Introduced][https://gitlab.com/gitlab-org/gitlab-ce/issues/53361] in GitLab 11.9. > [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/53361) in GitLab 11.9.
When a protected branch or wildcard protected branches are set to When a protected branch or wildcard protected branches are set to
[**No one** is **Allowed to push**](#using-the-allowed-to-merge-and-allowed-to-push-settings), [**No one** is **Allowed to push**](#using-the-allowed-to-merge-and-allowed-to-push-settings),
......
...@@ -103,7 +103,6 @@ module API ...@@ -103,7 +103,6 @@ module API
detail 'This feature was introduced in GitLab 11.9' detail 'This feature was introduced in GitLab 11.9'
end end
post ':id/milestones/:milestone_id/promote' do post ':id/milestones/:milestone_id/promote' do
begin
authorize! :admin_milestone, user_project authorize! :admin_milestone, user_project
authorize! :admin_milestone, user_project.group authorize! :admin_milestone, user_project.group
...@@ -116,5 +115,4 @@ module API ...@@ -116,5 +115,4 @@ module API
end end
end end
end end
end
end end
...@@ -89,12 +89,10 @@ module API ...@@ -89,12 +89,10 @@ module API
optional :format, type: String, desc: 'The archive format' optional :format, type: String, desc: 'The archive format'
end end
get ':id/repository/archive', requirements: { format: Gitlab::PathRegex.archive_formats_regex } do get ':id/repository/archive', requirements: { format: Gitlab::PathRegex.archive_formats_regex } do
begin
send_git_archive user_project.repository, ref: params[:sha], format: params[:format], append_sha: true send_git_archive user_project.repository, ref: params[:sha], format: params[:format], append_sha: true
rescue rescue
not_found!('File') not_found!('File')
end end
end
desc 'Compare two branches, tags, or commits' do desc 'Compare two branches, tags, or commits' do
success Entities::Compare success Entities::Compare
...@@ -118,13 +116,11 @@ module API ...@@ -118,13 +116,11 @@ module API
optional :sort, type: String, values: %w[asc desc], default: 'asc', desc: 'Sort by asc (ascending) or desc (descending)' optional :sort, type: String, values: %w[asc desc], default: 'asc', desc: 'Sort by asc (ascending) or desc (descending)'
end end
get ':id/repository/contributors' do get ':id/repository/contributors' do
begin
contributors = ::Kaminari.paginate_array(user_project.repository.contributors(order_by: params[:order_by], sort: params[:sort])) contributors = ::Kaminari.paginate_array(user_project.repository.contributors(order_by: params[:order_by], sort: params[:sort]))
present paginate(contributors), with: Entities::Contributor present paginate(contributors), with: Entities::Contributor
rescue rescue
not_found! not_found!
end end
end
desc 'Get the common ancestor between commits' do desc 'Get the common ancestor between commits' do
success Entities::Commit success Entities::Commit
......
...@@ -11,7 +11,7 @@ module Gitlab ...@@ -11,7 +11,7 @@ module Gitlab
# So we chose a way to use ::Ci::Build directly and we don't change the `archive!` method until 11.1 # So we chose a way to use ::Ci::Build directly and we don't change the `archive!` method until 11.1
::Ci::Build.finished.without_archived_trace ::Ci::Build.finished.without_archived_trace
.where(id: start_id..stop_id).find_each do |build| .where(id: start_id..stop_id).find_each do |build|
begin
build.trace.archive! build.trace.archive!
rescue => e rescue => e
Rails.logger.error "Failed to archive live trace. id: #{build.id} message: #{e.message}" Rails.logger.error "Failed to archive live trace. id: #{build.id} message: #{e.message}"
...@@ -19,5 +19,4 @@ module Gitlab ...@@ -19,5 +19,4 @@ module Gitlab
end end
end end
end end
end
end end
...@@ -302,7 +302,6 @@ module Gitlab ...@@ -302,7 +302,6 @@ module Gitlab
ldap_identities = Identity.where("provider like 'ldap%'").where(id: start_id..end_id) ldap_identities = Identity.where("provider like 'ldap%'").where(id: start_id..end_id)
ldap_identities.each do |identity| ldap_identities.each do |identity|
begin
identity.extern_uid = Gitlab::Auth::LDAP::DN.new(identity.extern_uid).to_normalized_s identity.extern_uid = Gitlab::Auth::LDAP::DN.new(identity.extern_uid).to_normalized_s
unless identity.save unless identity.save
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\". Skipping." Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\". Skipping."
...@@ -311,7 +310,6 @@ module Gitlab ...@@ -311,7 +310,6 @@ module Gitlab
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\" due to \"#{e.message}\". Skipping." Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\" due to \"#{e.message}\". Skipping."
end end
end end
end
def migrate? def migrate?
Identity.table_exists? Identity.table_exists?
......
...@@ -34,7 +34,6 @@ module Gitlab ...@@ -34,7 +34,6 @@ module Gitlab
def filter_error_files(files) def filter_error_files(files)
files.partition do |file| files.partition do |file|
begin
file.to_h file.to_h
true true
rescue => e rescue => e
...@@ -47,7 +46,6 @@ module Gitlab ...@@ -47,7 +46,6 @@ module Gitlab
false false
end end
end end
end
def filter_existing_uploads(files) def filter_existing_uploads(files)
paths = files.map(&:upload_path) paths = files.map(&:upload_path)
......
...@@ -79,7 +79,6 @@ module Gitlab ...@@ -79,7 +79,6 @@ module Gitlab
create_labels create_labels
client.issues(repo).each do |issue| client.issues(repo).each do |issue|
begin
description = '' description = ''
description += @formatter.author_line(issue.author) unless find_user_id(issue.author) description += @formatter.author_line(issue.author) unless find_user_id(issue.author)
description += issue.description description += issue.description
...@@ -105,7 +104,6 @@ module Gitlab ...@@ -105,7 +104,6 @@ module Gitlab
errors << { type: :issue, iid: issue.iid, errors: e.message } errors << { type: :issue, iid: issue.iid, errors: e.message }
end end
end end
end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
def import_issue_comments(issue, gitlab_issue) def import_issue_comments(issue, gitlab_issue)
...@@ -150,7 +148,6 @@ module Gitlab ...@@ -150,7 +148,6 @@ module Gitlab
pull_requests = client.pull_requests(repo) pull_requests = client.pull_requests(repo)
pull_requests.each do |pull_request| pull_requests.each do |pull_request|
begin
description = '' description = ''
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author) description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author)
description += pull_request.description description += pull_request.description
...@@ -182,7 +179,6 @@ module Gitlab ...@@ -182,7 +179,6 @@ module Gitlab
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, trace: e.backtrace.join("\n"), raw_response: pull_request.raw } errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, trace: e.backtrace.join("\n"), raw_response: pull_request.raw }
end end
end end
end
def import_pull_request_comments(pull_request, merge_request) def import_pull_request_comments(pull_request, merge_request)
comments = client.pull_request_comments(repo, pull_request.iid) comments = client.pull_request_comments(repo, pull_request.iid)
...@@ -211,7 +207,6 @@ module Gitlab ...@@ -211,7 +207,6 @@ module Gitlab
end end
inline_comments.each do |comment| inline_comments.each do |comment|
begin
attributes = pull_request_comment_attributes(comment) attributes = pull_request_comment_attributes(comment)
attributes[:discussion_id] = discussion_map[comment.parent_id] if comment.has_parent? attributes[:discussion_id] = discussion_map[comment.parent_id] if comment.has_parent?
...@@ -229,7 +224,6 @@ module Gitlab ...@@ -229,7 +224,6 @@ module Gitlab
errors << { type: :pull_request, iid: comment.iid, errors: e.message } errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end end
end end
end
def build_position(merge_request, pr_comment) def build_position(merge_request, pr_comment)
params = { params = {
...@@ -245,13 +239,11 @@ module Gitlab ...@@ -245,13 +239,11 @@ module Gitlab
def import_standalone_pr_comments(pr_comments, merge_request) def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment| pr_comments.each do |comment|
begin
merge_request.notes.create!(pull_request_comment_attributes(comment)) merge_request.notes.create!(pull_request_comment_attributes(comment))
rescue StandardError => e rescue StandardError => e
errors << { type: :pull_request, iid: comment.iid, errors: e.message } errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end end
end end
end
def pull_request_comment_attributes(comment) def pull_request_comment_attributes(comment)
{ {
......
...@@ -162,7 +162,6 @@ module Gitlab ...@@ -162,7 +162,6 @@ module Gitlab
restore_branches(batch) if recover_missing_commits restore_branches(batch) if recover_missing_commits
batch.each do |pull_request| batch.each do |pull_request|
begin
import_bitbucket_pull_request(pull_request) import_bitbucket_pull_request(pull_request)
rescue StandardError => e rescue StandardError => e
backtrace = Gitlab::Profiler.clean_backtrace(e.backtrace) backtrace = Gitlab::Profiler.clean_backtrace(e.backtrace)
...@@ -172,11 +171,9 @@ module Gitlab ...@@ -172,11 +171,9 @@ module Gitlab
end end
end end
end end
end
def delete_temp_branches def delete_temp_branches
@temp_branches.each do |branch| @temp_branches.each do |branch|
begin
client.delete_branch(project_key, repository_slug, branch.name, branch.sha) client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
project.repository.delete_branch(branch.name) project.repository.delete_branch(branch.name)
rescue BitbucketServer::Connection::ConnectionError => e rescue BitbucketServer::Connection::ConnectionError => e
...@@ -184,7 +181,6 @@ module Gitlab ...@@ -184,7 +181,6 @@ module Gitlab
@errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message } @errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
end end
end end
end
def import_bitbucket_pull_request(pull_request) def import_bitbucket_pull_request(pull_request)
log_info(stage: 'import_bitbucket_pull_requests', message: 'starting', iid: pull_request.iid) log_info(stage: 'import_bitbucket_pull_requests', message: 'starting', iid: pull_request.iid)
...@@ -323,7 +319,6 @@ module Gitlab ...@@ -323,7 +319,6 @@ module Gitlab
def import_standalone_pr_comments(pr_comments, merge_request) def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment| pr_comments.each do |comment|
begin
merge_request.notes.create!(pull_request_comment_attributes(comment)) merge_request.notes.create!(pull_request_comment_attributes(comment))
comment.comments.each do |replies| comment.comments.each do |replies|
...@@ -334,7 +329,6 @@ module Gitlab ...@@ -334,7 +329,6 @@ module Gitlab
errors << { type: :pull_request, comment_id: comment.id, errors: e.message } errors << { type: :pull_request, comment_id: comment.id, errors: e.message }
end end
end end
end
def pull_request_comment_attributes(comment) def pull_request_comment_attributes(comment)
author = find_user_id(comment.author_email) author = find_user_id(comment.author_email)
......
...@@ -98,7 +98,7 @@ module Gitlab ...@@ -98,7 +98,7 @@ module Gitlab
def read_uint32(gz) def read_uint32(gz)
binary = gz.read(4) binary = gz.read(4)
binary.unpack('L>')[0] if binary binary.unpack1('L>') if binary
end end
def read_string(gz) def read_string(gz)
......
...@@ -76,7 +76,7 @@ module Gitlab ...@@ -76,7 +76,7 @@ module Gitlab
postgresql? && version.to_f >= 9.4 postgresql? && version.to_f >= 9.4
end end
def self.pg_stat_wal_receiver_supported? def self.postgresql_minimum_supported_version?
postgresql? && version.to_f >= 9.6 postgresql? && version.to_f >= 9.6
end end
...@@ -98,6 +98,10 @@ module Gitlab ...@@ -98,6 +98,10 @@ module Gitlab
Gitlab::Database.postgresql_9_or_less? ? 'pg_last_xlog_replay_location' : 'pg_last_wal_replay_lsn' Gitlab::Database.postgresql_9_or_less? ? 'pg_last_xlog_replay_location' : 'pg_last_wal_replay_lsn'
end end
def self.pg_last_xact_replay_timestamp
'pg_last_xact_replay_timestamp'
end
def self.nulls_last_order(field, direction = 'ASC') def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}" order = "#{field} #{direction}"
......
...@@ -35,7 +35,6 @@ module Gitlab ...@@ -35,7 +35,6 @@ module Gitlab
threads = Array.new(thread_count) do threads = Array.new(thread_count) do
Thread.new do Thread.new do
pool.with_connection do |connection| pool.with_connection do |connection|
begin
Thread.current[MULTI_THREAD_AR_CONNECTION] = connection Thread.current[MULTI_THREAD_AR_CONNECTION] = connection
yield yield
ensure ensure
...@@ -43,7 +42,6 @@ module Gitlab ...@@ -43,7 +42,6 @@ module Gitlab
end end
end end
end end
end
threads.each(&:join) if join threads.each(&:join) if join
......
...@@ -22,7 +22,7 @@ module Gitlab ...@@ -22,7 +22,7 @@ module Gitlab
# Casts binary data to a SHA1 in hexadecimal. # Casts binary data to a SHA1 in hexadecimal.
def deserialize(value) def deserialize(value)
value = super(value) value = super(value)
value ? value.unpack(PACK_FORMAT)[0] : nil value ? value.unpack1(PACK_FORMAT) : nil
end end
# Casts a SHA1 in hexadecimal to the proper binary format. # Casts a SHA1 in hexadecimal to the proper binary format.
......
...@@ -75,13 +75,11 @@ module Gitlab ...@@ -75,13 +75,11 @@ module Gitlab
@certs = stub_cert_paths.flat_map do |cert_file| @certs = stub_cert_paths.flat_map do |cert_file|
File.read(cert_file).scan(PEM_REGEX).map do |cert| File.read(cert_file).scan(PEM_REGEX).map do |cert|
begin
OpenSSL::X509::Certificate.new(cert).to_pem OpenSSL::X509::Certificate.new(cert).to_pem
rescue OpenSSL::OpenSSLError => e rescue OpenSSL::OpenSSLError => e
Rails.logger.error "Could not load certificate #{cert_file} #{e}" Rails.logger.error "Could not load certificate #{cert_file} #{e}"
Gitlab::Sentry.track_exception(e, extra: { cert_file: cert_file }) Gitlab::Sentry.track_exception(e, extra: { cert_file: cert_file })
nil nil
end
end.compact end.compact
end.uniq.join("\n") end.uniq.join("\n")
end end
......
...@@ -13,7 +13,6 @@ module Gitlab ...@@ -13,7 +13,6 @@ module Gitlab
current_blob_data = nil current_blob_data = nil
@rpc_response.each do |msg| @rpc_response.each do |msg|
begin
if msg.oid.blank? && msg.data.blank? if msg.oid.blank? && msg.data.blank?
next next
elsif msg.oid.present? elsif msg.oid.present?
...@@ -25,7 +24,6 @@ module Gitlab ...@@ -25,7 +24,6 @@ module Gitlab
current_blob_data[:data] << msg.data current_blob_data[:data] << msg.data
end end
end end
end
yield new_blob(current_blob_data) if current_blob_data yield new_blob(current_blob_data) if current_blob_data
end end
......
...@@ -89,14 +89,12 @@ module Gitlab ...@@ -89,14 +89,12 @@ module Gitlab
def import_labels def import_labels
fetch_resources(:labels, repo, per_page: 100) do |labels| fetch_resources(:labels, repo, per_page: 100) do |labels|
labels.each do |raw| labels.each do |raw|
begin
gh_label = LabelFormatter.new(project, raw) gh_label = LabelFormatter.new(project, raw)
gh_label.create! gh_label.create!
rescue => e rescue => e
errors << { type: :label, url: Gitlab::UrlSanitizer.sanitize(gh_label.url), errors: e.message } errors << { type: :label, url: Gitlab::UrlSanitizer.sanitize(gh_label.url), errors: e.message }
end end
end end
end
cache_labels! cache_labels!
end end
...@@ -104,7 +102,6 @@ module Gitlab ...@@ -104,7 +102,6 @@ module Gitlab
def import_milestones def import_milestones
fetch_resources(:milestones, repo, state: :all, per_page: 100) do |milestones| fetch_resources(:milestones, repo, state: :all, per_page: 100) do |milestones|
milestones.each do |raw| milestones.each do |raw|
begin
gh_milestone = MilestoneFormatter.new(project, raw) gh_milestone = MilestoneFormatter.new(project, raw)
gh_milestone.create! gh_milestone.create!
rescue => e rescue => e
...@@ -112,7 +109,6 @@ module Gitlab ...@@ -112,7 +109,6 @@ module Gitlab
end end
end end
end end
end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def import_issues def import_issues
...@@ -223,7 +219,6 @@ module Gitlab ...@@ -223,7 +219,6 @@ module Gitlab
def create_comments(comments) def create_comments(comments)
ActiveRecord::Base.no_touching do ActiveRecord::Base.no_touching do
comments.each do |raw| comments.each do |raw|
begin
comment = CommentFormatter.new(project, raw, client) comment = CommentFormatter.new(project, raw, client)
# GH does not return info about comment's parent, so we guess it by checking its URL! # GH does not return info about comment's parent, so we guess it by checking its URL!
...@@ -243,7 +238,6 @@ module Gitlab ...@@ -243,7 +238,6 @@ module Gitlab
end end
end end
end end
end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
def discard_inserted_comments(comments, last_note) def discard_inserted_comments(comments, last_note)
...@@ -281,7 +275,6 @@ module Gitlab ...@@ -281,7 +275,6 @@ module Gitlab
def import_releases def import_releases
fetch_resources(:releases, repo, per_page: 100) do |releases| fetch_resources(:releases, repo, per_page: 100) do |releases|
releases.each do |raw| releases.each do |raw|
begin
gh_release = ReleaseFormatter.new(project, raw) gh_release = ReleaseFormatter.new(project, raw)
gh_release.create! if gh_release.valid? gh_release.create! if gh_release.valid?
rescue => e rescue => e
...@@ -289,7 +282,6 @@ module Gitlab ...@@ -289,7 +282,6 @@ module Gitlab
end end
end end
end end
end
def cache_labels! def cache_labels!
project.labels.select(:id, :title).find_each do |label| project.labels.select(:id, :title).find_each do |label|
......
...@@ -52,12 +52,10 @@ module Gitlab ...@@ -52,12 +52,10 @@ module Gitlab
pool&.with do |connection| pool&.with do |connection|
prepared.each_slice(settings[:packet_size]) do |slice| prepared.each_slice(settings[:packet_size]) do |slice|
begin
connection.write_points(slice) connection.write_points(slice)
rescue StandardError rescue StandardError
end end
end end
end
rescue Errno::EADDRNOTAVAIL, SocketError => ex rescue Errno::EADDRNOTAVAIL, SocketError => ex
Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.') Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.')
Gitlab::EnvironmentLogger.error(ex) Gitlab::EnvironmentLogger.error(ex)
......
...@@ -10,6 +10,7 @@ module GoogleApi ...@@ -10,6 +10,7 @@ module GoogleApi
class Client < GoogleApi::Auth class Client < GoogleApi::Auth
SCOPE = 'https://www.googleapis.com/auth/cloud-platform'.freeze SCOPE = 'https://www.googleapis.com/auth/cloud-platform'.freeze
LEAST_TOKEN_LIFE_TIME = 10.minutes LEAST_TOKEN_LIFE_TIME = 10.minutes
CLUSTER_MASTER_AUTH_USERNAME = 'admin'.freeze
class << self class << self
def session_key_for_token def session_key_for_token
...@@ -64,6 +65,12 @@ module GoogleApi ...@@ -64,6 +65,12 @@ module GoogleApi
"node_config": { "node_config": {
"machine_type": machine_type "machine_type": machine_type
}, },
"master_auth": {
"username": CLUSTER_MASTER_AUTH_USERNAME,
"client_certificate_config": {
issue_client_certificate: true
}
},
"legacy_abac": { "legacy_abac": {
"enabled": legacy_abac "enabled": legacy_abac
} }
......
...@@ -11,7 +11,7 @@ namespace :gitlab do ...@@ -11,7 +11,7 @@ namespace :gitlab do
Ci::Build.joins(:project) Ci::Build.joins(:project)
.with_artifacts_stored_locally .with_artifacts_stored_locally
.find_each(batch_size: 10) do |build| .find_each(batch_size: 10) do |build|
begin
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE) build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE) build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
...@@ -21,5 +21,4 @@ namespace :gitlab do ...@@ -21,5 +21,4 @@ namespace :gitlab do
end end
end end
end end
end
end end
...@@ -9,7 +9,7 @@ namespace :gitlab do ...@@ -9,7 +9,7 @@ namespace :gitlab do
LfsObject.with_files_stored_locally LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object| .find_each(batch_size: 10) do |lfs_object|
begin
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE) lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage") logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
...@@ -18,5 +18,4 @@ namespace :gitlab do ...@@ -18,5 +18,4 @@ namespace :gitlab do
end end
end end
end end
end
end end
...@@ -26,7 +26,7 @@ namespace :gitlab do ...@@ -26,7 +26,7 @@ namespace :gitlab do
Ci::Build.joins(:project) Ci::Build.joins(:project)
.with_archived_trace_stored_locally .with_archived_trace_stored_locally
.find_each(batch_size: 10) do |build| .find_each(batch_size: 10) do |build|
begin
build.job_artifacts_trace.file.migrate!(ObjectStorage::Store::REMOTE) build.job_artifacts_trace.file.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred job trace of #{build.id} to object storage") logger.info("Transferred job trace of #{build.id} to object storage")
...@@ -35,5 +35,4 @@ namespace :gitlab do ...@@ -35,5 +35,4 @@ namespace :gitlab do
end end
end end
end end
end
end end
...@@ -19,12 +19,10 @@ unless Rails.env.production? ...@@ -19,12 +19,10 @@ unless Rails.env.production?
desc "GitLab | lint | Lint HAML files" desc "GitLab | lint | Lint HAML files"
task :haml do task :haml do
begin
Rake::Task['haml_lint'].invoke Rake::Task['haml_lint'].invoke
rescue RuntimeError # The haml_lint tasks raise a RuntimeError rescue RuntimeError # The haml_lint tasks raise a RuntimeError
exit(1) exit(1)
end end
end
desc "GitLab | lint | Run several lint checks" desc "GitLab | lint | Run several lint checks"
task :all do task :all do
......
...@@ -2,7 +2,6 @@ desc "GitLab | Build internal ids for issues and merge requests" ...@@ -2,7 +2,6 @@ desc "GitLab | Build internal ids for issues and merge requests"
task migrate_iids: :environment do task migrate_iids: :environment do
puts 'Issues'.color(:yellow) puts 'Issues'.color(:yellow)
Issue.where(iid: nil).find_each(batch_size: 100) do |issue| Issue.where(iid: nil).find_each(batch_size: 100) do |issue|
begin
issue.set_iid issue.set_iid
if issue.update_attribute(:iid, issue.iid) if issue.update_attribute(:iid, issue.iid)
...@@ -13,12 +12,10 @@ task migrate_iids: :environment do ...@@ -13,12 +12,10 @@ task migrate_iids: :environment do
rescue rescue
print 'F' print 'F'
end end
end
puts 'done' puts 'done'
puts 'Merge Requests'.color(:yellow) puts 'Merge Requests'.color(:yellow)
MergeRequest.where(iid: nil).find_each(batch_size: 100) do |mr| MergeRequest.where(iid: nil).find_each(batch_size: 100) do |mr|
begin
mr.set_iid mr.set_iid
if mr.update_attribute(:iid, mr.iid) if mr.update_attribute(:iid, mr.iid)
...@@ -29,12 +26,10 @@ task migrate_iids: :environment do ...@@ -29,12 +26,10 @@ task migrate_iids: :environment do
rescue rescue
print 'F' print 'F'
end end
end
puts 'done' puts 'done'
puts 'Milestones'.color(:yellow) puts 'Milestones'.color(:yellow)
Milestone.where(iid: nil).find_each(batch_size: 100) do |m| Milestone.where(iid: nil).find_each(batch_size: 100) do |m|
begin
m.set_iid m.set_iid
if m.update_attribute(:iid, m.iid) if m.update_attribute(:iid, m.iid)
...@@ -45,7 +40,6 @@ task migrate_iids: :environment do ...@@ -45,7 +40,6 @@ task migrate_iids: :environment do
rescue rescue
print 'F' print 'F'
end end
end
puts 'done' puts 'done'
end end
...@@ -966,15 +966,6 @@ msgstr "" ...@@ -966,15 +966,6 @@ msgstr ""
msgid "AutoDevOps|The Auto DevOps pipeline has been enabled and will be used if no alternative CI configuration file is found. %{more_information_link}" msgid "AutoDevOps|The Auto DevOps pipeline has been enabled and will be used if no alternative CI configuration file is found. %{more_information_link}"
msgstr "" msgstr ""
msgid "AutoDevOps|You can automatically build and test your application if you %{link_to_auto_devops_settings} for this project. You can automatically deploy it as well, if you %{link_to_add_kubernetes_cluster}."
msgstr ""
msgid "AutoDevOps|add a Kubernetes cluster"
msgstr ""
msgid "AutoDevOps|enable Auto DevOps"
msgstr ""
msgid "Automatically marked as default internal user" msgid "Automatically marked as default internal user"
msgstr "" msgstr ""
...@@ -2315,6 +2306,9 @@ msgstr "" ...@@ -2315,6 +2306,9 @@ msgstr ""
msgid "Contribution" msgid "Contribution"
msgstr "" msgstr ""
msgid "Contribution Analytics"
msgstr ""
msgid "Contribution Charts" msgid "Contribution Charts"
msgstr "" msgstr ""
...@@ -3373,12 +3367,6 @@ msgstr "" ...@@ -3373,12 +3367,6 @@ msgstr ""
msgid "Except policy:" msgid "Except policy:"
msgstr "" msgstr ""
msgid "Existing Git repository"
msgstr ""
msgid "Existing folder"
msgstr ""
msgid "Existing members and groups" msgid "Existing members and groups"
msgstr "" msgstr ""
...@@ -4073,9 +4061,6 @@ msgstr "" ...@@ -4073,9 +4061,6 @@ msgstr ""
msgid "If enabled" msgid "If enabled"
msgstr "" msgstr ""
msgid "If you already have files you can push them using the %{link_to_cli} below."
msgstr ""
msgid "If your HTTP repository is not publicly accessible, add authentication information to the URL: <code>https://username:password@gitlab.company.com/group/project.git</code>." msgid "If your HTTP repository is not publicly accessible, add authentication information to the URL: <code>https://username:password@gitlab.company.com/group/project.git</code>."
msgstr "" msgstr ""
...@@ -4540,9 +4525,6 @@ msgstr "" ...@@ -4540,9 +4525,6 @@ msgstr ""
msgid "Learn more about Kubernetes" msgid "Learn more about Kubernetes"
msgstr "" msgstr ""
msgid "Learn more about protected branches"
msgstr ""
msgid "Learn more about signing commits" msgid "Learn more about signing commits"
msgstr "" msgstr ""
...@@ -5210,9 +5192,6 @@ msgstr "" ...@@ -5210,9 +5192,6 @@ msgstr ""
msgid "Not started" msgid "Not started"
msgstr "" msgstr ""
msgid "Note that the master branch is automatically protected. %{link_to_protected_branches}"
msgstr ""
msgid "Note that this invitation was sent to %{mail_to_invite_email}, but you are signed in as %{link_to_current_user} with email %{mail_to_current_user}." msgid "Note that this invitation was sent to %{mail_to_invite_email}, but you are signed in as %{link_to_current_user} with email %{mail_to_current_user}."
msgstr "" msgstr ""
...@@ -5404,9 +5383,6 @@ msgstr "" ...@@ -5404,9 +5383,6 @@ msgstr ""
msgid "Other Labels" msgid "Other Labels"
msgstr "" msgstr ""
msgid "Otherwise it is recommended you start with one of the options below."
msgstr ""
msgid "Outbound requests" msgid "Outbound requests"
msgstr "" msgstr ""
...@@ -6265,6 +6241,12 @@ msgstr "" ...@@ -6265,6 +6241,12 @@ msgstr ""
msgid "Push" msgid "Push"
msgstr "" msgstr ""
msgid "Push an existing Git repository"
msgstr ""
msgid "Push an existing folder"
msgstr ""
msgid "Push events" msgid "Push events"
msgstr "" msgstr ""
...@@ -8768,6 +8750,12 @@ msgstr "" ...@@ -8768,6 +8750,12 @@ msgstr ""
msgid "You can also star a label to make it a priority label." msgid "You can also star a label to make it a priority label."
msgstr "" msgstr ""
msgid "You can also upload existing files from your computer using the instructions below."
msgstr ""
msgid "You can create files directly in GitLab using one of the following options."
msgstr ""
msgid "You can easily contribute to them by requesting to join these groups." msgid "You can easily contribute to them by requesting to join these groups."
msgstr "" msgstr ""
...@@ -8972,9 +8960,6 @@ msgstr "" ...@@ -8972,9 +8960,6 @@ msgstr ""
msgid "branch name" msgid "branch name"
msgstr "" msgstr ""
msgid "command line instructions"
msgstr ""
msgid "commented on %{link_to_project}" msgid "commented on %{link_to_project}"
msgstr "" msgstr ""
......
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -61,6 +61,7 @@ module QA ...@@ -61,6 +61,7 @@ module QA
autoload :File, 'qa/resource/file' autoload :File, 'qa/resource/file'
autoload :Fork, 'qa/resource/fork' autoload :Fork, 'qa/resource/fork'
autoload :SSHKey, 'qa/resource/ssh_key' autoload :SSHKey, 'qa/resource/ssh_key'
autoload :Snippet, 'qa/resource/snippet'
module Events module Events
autoload :Base, 'qa/resource/events/base' autoload :Base, 'qa/resource/events/base'
...@@ -142,6 +143,12 @@ module QA ...@@ -142,6 +143,12 @@ module QA
module Dashboard module Dashboard
autoload :Projects, 'qa/page/dashboard/projects' autoload :Projects, 'qa/page/dashboard/projects'
autoload :Groups, 'qa/page/dashboard/groups' autoload :Groups, 'qa/page/dashboard/groups'
module Snippet
autoload :New, 'qa/page/dashboard/snippet/new'
autoload :Index, 'qa/page/dashboard/snippet/index'
autoload :Show, 'qa/page/dashboard/snippet/show'
end
end end
module Group module Group
......
...@@ -2,8 +2,6 @@ module QA ...@@ -2,8 +2,6 @@ module QA
module Page module Page
module Dashboard module Dashboard
class Projects < Page::Base class Projects < Page::Base
view 'app/views/dashboard/projects/index.html.haml'
view 'app/views/shared/projects/_search_form.html.haml' do view 'app/views/shared/projects/_search_form.html.haml' do
element :form_filter_by_name, /form_tag.+id: 'project-filter-form'/ # rubocop:disable QA/ElementWithPattern element :form_filter_by_name, /form_tag.+id: 'project-filter-form'/ # rubocop:disable QA/ElementWithPattern
end end
......
# frozen_string_literal: true
module QA
module Page
module Dashboard
module Snippet
class Index < Page::Base
view 'app/views/layouts/header/_new_dropdown.haml' do
element :new_menu_toggle
element :global_new_snippet_link
end
def go_to_new_snippet_page
click_element :new_menu_toggle
click_element :global_new_snippet_link
end
end
end
end
end
end
# frozen_string_literal: true
module QA
module Page
module Dashboard
module Snippet
class New < Page::Base
view 'app/views/shared/form_elements/_description.html.haml' do
element :issuable_form_description
end
view 'app/views/shared/snippets/_form.html.haml' do
element :snippet_title
element :snippet_file_name
element :create_snippet_button
end
def fill_title(title)
fill_element :snippet_title, title
end
def fill_description(description)
fill_element :issuable_form_description, description
end
def set_visibility(visibility)
choose visibility
end
def fill_file_name(name)
finished_loading?
fill_element :snippet_file_name, name
end
def fill_file_content(content)
finished_loading?
text_area.set content
end
def create_snippet
click_element :create_snippet_button
end
private
def text_area
find('#editor>textarea', visible: false)
end
end
end
end
end
end
# frozen_string_literal: true
module QA
module Page
module Dashboard
module Snippet
class Show < Page::Base
view 'app/views/shared/snippets/_header.html.haml' do
element :snippet_title
element :snippet_description
element :embed_type
element :snippet_box
end
view 'app/views/projects/blob/_header_content.html.haml' do
element :file_title_name
end
view 'app/views/shared/_file_highlight.html.haml' do
element :file_content
end
def has_snippet_title?(snippet_title)
within_element(:snippet_title) do
has_text?(snippet_title)
end
end
def has_snippet_description?(snippet_description)
within_element(:snippet_description) do
has_text?(snippet_description)
end
end
def has_embed_type?(embed_type)
within_element(:embed_type) do
has_text?(embed_type)
end
end
def has_visibility_type?(visibility_type)
within_element(:snippet_box) do
has_text?(visibility_type)
end
end
def has_file_name?(file_name)
within_element(:file_title_name) do
has_text?(file_name)
end
end
def has_file_content?(file_content)
finished_loading?
within_element(:file_content) do
has_text?(file_content)
end
end
end
end
end
end
end
...@@ -19,6 +19,7 @@ module QA ...@@ -19,6 +19,7 @@ module QA
element :admin_area_link element :admin_area_link
element :projects_dropdown element :projects_dropdown
element :groups_dropdown element :groups_dropdown
element :snippets_link
end end
view 'app/views/layouts/nav/projects_dropdown/_show.html.haml' do view 'app/views/layouts/nav/projects_dropdown/_show.html.haml' do
...@@ -66,6 +67,10 @@ module QA ...@@ -66,6 +67,10 @@ module QA
end end
end end
def go_to_snippets
click_element :snippets_link
end
def has_personal_area?(wait: Capybara.default_max_wait_time) def has_personal_area?(wait: Capybara.default_max_wait_time)
has_element?(:user_avatar, wait: wait) has_element?(:user_avatar, wait: wait)
end end
......
...@@ -43,12 +43,10 @@ module QA ...@@ -43,12 +43,10 @@ module QA
def create_new_file_from_template(file_name, template) def create_new_file_from_template(file_name, template)
click_element :new_file click_element :new_file
within_element(:template_list) do within_element(:template_list) do
begin
click_on file_name click_on file_name
rescue Capybara::ElementNotFound rescue Capybara::ElementNotFound
raise ElementNotFound, %Q(Couldn't find file template named "#{file_name}". Please confirm that it is a valid option.) raise ElementNotFound, %Q(Couldn't find file template named "#{file_name}". Please confirm that it is a valid option.)
end end
end
wait(reload: false) do wait(reload: false) do
within_element(:file_templates_bar) do within_element(:file_templates_bar) do
......
# frozen_string_literal: true
module QA
module Resource
class Snippet < Base
attr_accessor :title, :description, :file_content, :visibility, :file_name
def initialize
@title = 'New snippet title'
@description = 'The snippet description'
@visibility = 'Public'
@file_content = 'The snippet content'
@file_name = 'New snippet file name'
end
def fabricate!
Page::Dashboard::Snippet::Index.perform(&:go_to_new_snippet_page)
Page::Dashboard::Snippet::New.perform do |page|
page.fill_title(@title)
page.fill_description(@description)
page.set_visibility(@visibility)
page.fill_file_name(@file_name)
page.fill_file_content(@file_content)
page.create_snippet
end
end
end
end
end
# frozen_string_literal: true
module QA
context 'Create', :smoke do
describe 'Snippet creation' do
it 'User creates a snippet' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.perform(&:sign_in_using_credentials)
Page::Main::Menu.perform(&:go_to_snippets)
Resource::Snippet.fabricate_via_browser_ui! do |snippet|
snippet.title = 'Snippet title'
snippet.description = 'Snippet description'
snippet.visibility = 'Public'
snippet.file_name = 'New snippet file name'
snippet.file_content = 'Snippet file text'
end
Page::Dashboard::Snippet::Show.perform do |snippet|
expect(snippet).to have_snippet_title('Snippet title')
expect(snippet).to have_snippet_description('Snippet description')
expect(snippet).to have_embed_type('Embed')
expect(snippet).to have_visibility_type('Public')
expect(snippet).to have_file_name('New snippet file name')
expect(snippet).to have_file_content('Snippet file text')
end
end
end
end
end
# frozen_string_literal: true # frozen_string_literal: true
module QA module QA
context 'Create' do # Failure issue: https://gitlab.com/gitlab-org/quality/staging/issues/46
context 'Create', :quarantine do
describe 'Web IDE file templates' do describe 'Web IDE file templates' do
include Runtime::Fixtures include Runtime::Fixtures
......
...@@ -79,7 +79,6 @@ module GitalyTest ...@@ -79,7 +79,6 @@ module GitalyTest
socket = read_socket_path socket = read_socket_path
Integer(timeout / delay).times do Integer(timeout / delay).times do
begin
UNIXSocket.new(socket) UNIXSocket.new(socket)
puts ' OK' puts ' OK'
...@@ -88,7 +87,6 @@ module GitalyTest ...@@ -88,7 +87,6 @@ module GitalyTest
print '.' print '.'
sleep delay sleep delay
end end
end
puts ' FAILED' puts ' FAILED'
......
...@@ -12,13 +12,11 @@ module SimpleCov ...@@ -12,13 +12,11 @@ module SimpleCov
def resultset_hashes def resultset_hashes
resultset_files.map do |path| resultset_files.map do |path|
begin
JSON.parse(File.read(path)) JSON.parse(File.read(path))
rescue rescue
{} {}
end end
end end
end
def resultset def resultset
resultset_hashes.reduce({}, :merge) resultset_hashes.reduce({}, :merge)
......
...@@ -32,21 +32,46 @@ describe GroupsController do ...@@ -32,21 +32,46 @@ describe GroupsController do
end end
end end
shared_examples 'details view' do
it { is_expected.to render_template('groups/show') }
context 'as atom' do
let!(:event) { create(:event, project: project) }
let(:format) { :atom }
it { is_expected.to render_template('groups/show') }
it 'assigns events for all the projects in the group' do
subject
expect(assigns(:events)).to contain_exactly(event)
end
end
end
describe 'GET #show' do describe 'GET #show' do
before do before do
sign_in(user) sign_in(user)
project project
end end
context 'as atom' do let(:format) { :html }
it 'assigns events for all the projects in the group' do
create(:event, project: project)
get :show, params: { id: group.to_param }, format: :atom subject { get :show, params: { id: group.to_param }, format: format }
expect(assigns(:events)).not_to be_empty it_behaves_like 'details view'
end end
describe 'GET #details' do
before do
sign_in(user)
project
end end
let(:format) { :html }
subject { get :details, params: { id: group.to_param }, format: format }
it_behaves_like 'details view'
end end
describe 'GET edit' do describe 'GET edit' do
......
...@@ -13,7 +13,7 @@ describe 'Projects > Show > User sees Git instructions' do ...@@ -13,7 +13,7 @@ describe 'Projects > Show > User sees Git instructions' do
it 'shows Git command line instructions' do it 'shows Git command line instructions' do
click_link 'Create empty repository' click_link 'Create empty repository'
page.within '.empty_wrapper' do page.within '.empty-wrapper' do
expect(page).to have_content('Command line instructions') expect(page).to have_content('Command line instructions')
end end
end end
......
...@@ -20,18 +20,18 @@ describe 'Projects > Show > User sees setup shortcut buttons' do ...@@ -20,18 +20,18 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
visit project_path(project) visit project_path(project)
end end
it 'no Auto DevOps button if can not manage pipelines' do it 'Project buttons are not visible' do
page.within('.project-buttons') do
expect(page).not_to have_link('Enable Auto DevOps')
expect(page).not_to have_link('Auto DevOps enabled')
end
end
it '"Auto DevOps enabled" button not linked' do
visit project_path(project) visit project_path(project)
page.within('.project-buttons') do page.within('.project-buttons') do
expect(page).to have_text('Auto DevOps enabled') expect(page).not_to have_link('New file')
expect(page).not_to have_link('Add README')
expect(page).not_to have_link('Add CHANGELOG')
expect(page).not_to have_link('Add CONTRIBUTING')
expect(page).not_to have_link('Enable Auto DevOps')
expect(page).not_to have_link('Auto DevOps enabled')
expect(page).not_to have_link('Add Kubernetes cluster')
expect(page).not_to have_link('Kubernetes configured')
end end
end end
end end
...@@ -61,46 +61,6 @@ describe 'Projects > Show > User sees setup shortcut buttons' do ...@@ -61,46 +61,6 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
expect(page).to have_link('Add license', href: presenter.add_license_path) expect(page).to have_link('Add license', href: presenter.add_license_path)
end end
end end
describe 'Auto DevOps button' do
context 'when Auto DevOps is enabled' do
it '"Auto DevOps enabled" anchor linked to settings page' do
visit project_path(project)
page.within('.project-buttons') do
expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
end
end
end
context 'when Auto DevOps is not enabled' do
let(:project) { create(:project, :public, :empty_repo, auto_devops_attributes: { enabled: false }) }
it '"Enable Auto DevOps" button linked to settings page' do
page.within('.project-buttons') do
expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
end
end
end
end
describe 'Kubernetes cluster button' do
it '"Add Kubernetes cluster" button linked to clusters page' do
page.within('.project-buttons') do
expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project))
end
end
it '"Kubernetes cluster" anchor linked to cluster page' do
cluster = create(:cluster, :provided_by_gcp, projects: [project])
visit project_path(project)
page.within('.project-buttons') do
expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster))
end
end
end
end end
end end
......
...@@ -98,11 +98,7 @@ describe 'Private Group access' do ...@@ -98,11 +98,7 @@ describe 'Private Group access' do
let(:project) { create(:project, :public) } let(:project) { create(:project, :public) }
before do before do
Projects::GroupLinks::CreateService.new( create(:project_group_link, project: project, group: group)
project,
create(:user),
link_group_access: ProjectGroupLink::DEVELOPER
).execute(group)
end end
subject { group_path(group) } subject { group_path(group) }
......
...@@ -6,6 +6,5 @@ ...@@ -6,6 +6,5 @@
"properties" : { "properties" : {
"id": { "type": "integer" }, "id": { "type": "integer" },
"name": { "type": "string" } "name": { "type": "string" }
}, }
"additionalProperties": false
} }
...@@ -38,6 +38,5 @@ ...@@ -38,6 +38,5 @@
"items": { "$ref": "label.json" } "items": { "$ref": "label.json" }
}, },
"assignees": { "type": ["array", "null"] } "assignees": { "type": ["array", "null"] }
}, }
"additionalProperties": false
} }
...@@ -10,6 +10,5 @@ ...@@ -10,6 +10,5 @@
"items": { "$ref": "issue_board.json" } "items": { "$ref": "issue_board.json" }
}, },
"size": { "type": "integer" } "size": { "type": "integer" }
}, }
"additionalProperties": false
} }
...@@ -52,6 +52,7 @@ ...@@ -52,6 +52,7 @@
"mergeable_discussions_state": { "type": "boolean" }, "mergeable_discussions_state": { "type": "boolean" },
"conflicts_can_be_resolved_in_ui": { "type": "boolean" }, "conflicts_can_be_resolved_in_ui": { "type": "boolean" },
"branch_missing": { "type": "boolean" }, "branch_missing": { "type": "boolean" },
"commits_count": { "type": ["integer", "null"] },
"has_conflicts": { "type": "boolean" }, "has_conflicts": { "type": "boolean" },
"can_be_merged": { "type": "boolean" }, "can_be_merged": { "type": "boolean" },
"mergeable": { "type": "boolean" }, "mergeable": { "type": "boolean" },
...@@ -125,6 +126,5 @@ ...@@ -125,6 +126,5 @@
"can_receive_suggestion": { "type": "boolean" }, "can_receive_suggestion": { "type": "boolean" },
"source_branch_protected": { "type": "boolean" }, "source_branch_protected": { "type": "boolean" },
"conflicts_docs_path": { "type": ["string", "null"] } "conflicts_docs_path": { "type": ["string", "null"] }
}, }
"additionalProperties": false
} }
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
"items": { "$ref": "entities/label.json" } "items": { "$ref": "entities/label.json" }
}, },
"assignee": { "assignee": {
"id": { "type": "integet" }, "id": { "type": "integer" },
"name": { "type": "string" }, "name": { "type": "string" },
"username": { "type": "string" }, "username": { "type": "string" },
"avatar_url": { "type": "uri" } "avatar_url": { "type": "uri" }
...@@ -52,6 +52,5 @@ ...@@ -52,6 +52,5 @@
} }
}, },
"subscribed": { "type": ["boolean", "null"] } "subscribed": { "type": ["boolean", "null"] }
}, }
"additionalProperties": false
} }
...@@ -10,6 +10,5 @@ ...@@ -10,6 +10,5 @@
"items": { "$ref": "issue.json" } "items": { "$ref": "issue.json" }
}, },
"size": { "type": "integer" } "size": { "type": "integer" }
}, }
"additionalProperties": false
} }
{
"type": "object",
"properties" : {
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
"project_id": { "type": "integer" },
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
"merged_by": {
"type": ["object", "null"],
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"merged_at": { "type": ["date", "null"] },
"closed_by": {
"type": ["object", "null"],
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"closed_at": { "type": ["date", "null"] },
"created_at": { "type": "date" },
"updated_at": { "type": "date" },
"target_branch": { "type": "string" },
"source_branch": { "type": "string" },
"upvotes": { "type": "integer" },
"downvotes": { "type": "integer" },
"author": {
"type": "object",
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"assignee": {
"type": ["object", "null"],
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"source_project_id": { "type": "integer" },
"target_project_id": { "type": "integer" },
"labels": {
"type": "array",
"items": {
"type": "string"
}
},
"work_in_progress": { "type": "boolean" },
"milestone": {
"type": ["object", "null"],
"properties": {
"id": { "type": "integer" },
"iid": { "type": "integer" },
"project_id": { "type": ["integer", "null"] },
"group_id": { "type": ["integer", "null"] },
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
"created_at": { "type": "date" },
"updated_at": { "type": "date" },
"due_date": { "type": "date" },
"start_date": { "type": "date" }
},
"additionalProperties": false
},
"merge_when_pipeline_succeeds": { "type": "boolean" },
"merge_status": { "type": "string" },
"sha": { "type": "string" },
"merge_commit_sha": { "type": ["string", "null"] },
"user_notes_count": { "type": "integer" },
"changes_count": { "type": "string" },
"should_remove_source_branch": { "type": ["boolean", "null"] },
"force_remove_source_branch": { "type": ["boolean", "null"] },
"discussion_locked": { "type": ["boolean", "null"] },
"web_url": { "type": "uri" },
"squash": { "type": "boolean" },
"time_stats": {
"time_estimate": { "type": "integer" },
"total_time_spent": { "type": "integer" },
"human_time_estimate": { "type": ["string", "null"] },
"human_total_time_spent": { "type": ["string", "null"] }
},
"allow_collaboration": { "type": ["boolean", "null"] },
"allow_maintainer_to_push": { "type": ["boolean", "null"] }
},
"required": [
"id", "iid", "project_id", "title", "description",
"state", "created_at", "updated_at", "target_branch",
"source_branch", "upvotes", "downvotes", "author",
"assignee", "source_project_id", "target_project_id",
"labels", "work_in_progress", "milestone", "merge_when_pipeline_succeeds",
"merge_status", "sha", "merge_commit_sha", "user_notes_count",
"should_remove_source_branch", "force_remove_source_branch",
"web_url", "squash"
]
}
}
{ {
"type": "array", "type": "array",
"items": { "items": {
"type": "object", "$ref": "./merge_request.json"
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
"project_id": { "type": "integer" },
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
"merged_by": {
"type": ["object", "null"],
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"merged_at": { "type": ["date", "null"] },
"closed_by": {
"type": ["object", "null"],
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"closed_at": { "type": ["date", "null"] },
"created_at": { "type": "date" },
"updated_at": { "type": "date" },
"target_branch": { "type": "string" },
"source_branch": { "type": "string" },
"upvotes": { "type": "integer" },
"downvotes": { "type": "integer" },
"author": {
"type": "object",
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"assignee": {
"type": ["object", "null"],
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "uri" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
},
"source_project_id": { "type": "integer" },
"target_project_id": { "type": "integer" },
"labels": {
"type": "array",
"items": {
"type": "string"
}
},
"work_in_progress": { "type": "boolean" },
"milestone": {
"type": ["object", "null"],
"properties": {
"id": { "type": "integer" },
"iid": { "type": "integer" },
"project_id": { "type": ["integer", "null"] },
"group_id": { "type": ["integer", "null"] },
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
"created_at": { "type": "date" },
"updated_at": { "type": "date" },
"due_date": { "type": "date" },
"start_date": { "type": "date" }
},
"additionalProperties": false
},
"merge_when_pipeline_succeeds": { "type": "boolean" },
"merge_status": { "type": "string" },
"sha": { "type": "string" },
"merge_commit_sha": { "type": ["string", "null"] },
"user_notes_count": { "type": "integer" },
"changes_count": { "type": "string" },
"should_remove_source_branch": { "type": ["boolean", "null"] },
"force_remove_source_branch": { "type": ["boolean", "null"] },
"discussion_locked": { "type": ["boolean", "null"] },
"web_url": { "type": "uri" },
"squash": { "type": "boolean" },
"time_stats": {
"time_estimate": { "type": "integer" },
"total_time_spent": { "type": "integer" },
"human_time_estimate": { "type": ["string", "null"] },
"human_total_time_spent": { "type": ["string", "null"] }
},
"allow_collaboration": { "type": ["boolean", "null"] },
"allow_maintainer_to_push": { "type": ["boolean", "null"] }
},
"required": [
"id", "iid", "project_id", "title", "description",
"state", "created_at", "updated_at", "target_branch",
"source_branch", "upvotes", "downvotes", "author",
"assignee", "source_project_id", "target_project_id",
"labels", "work_in_progress", "milestone", "merge_when_pipeline_succeeds",
"merge_status", "sha", "merge_commit_sha", "user_notes_count",
"should_remove_source_branch", "force_remove_source_branch",
"web_url", "squash"
],
"additionalProperties": false
} }
} }
...@@ -34,13 +34,6 @@ describe('Store', () => { ...@@ -34,13 +34,6 @@ describe('Store', () => {
expect(store.state.stoppedCounter).toEqual(2); expect(store.state.stoppedCounter).toEqual(2);
}); });
describe('store environments', () => {
it('should store environments', () => {
store.storeEnvironments(serverData);
expect(store.state.environments.length).toEqual(serverData.length);
});
it('should add folder keys when environment is a folder', () => { it('should add folder keys when environment is a folder', () => {
const environment = { const environment = {
name: 'bar', name: 'bar',
...@@ -82,7 +75,6 @@ describe('Store', () => { ...@@ -82,7 +75,6 @@ describe('Store', () => {
expect(store.state.environments[1].folderName).toEqual(serverData[1].name); expect(store.state.environments[1].folderName).toEqual(serverData[1].name);
}); });
});
describe('toggleFolder', () => { describe('toggleFolder', () => {
it('should toggle folder', () => { it('should toggle folder', () => {
......
...@@ -3,6 +3,7 @@ import MockAdapter from 'axios-mock-adapter'; ...@@ -3,6 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue'; import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper'; import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { removeBreakLine, removeWhitespace } from 'spec/helpers/vue_component_helper';
import { environmentsList } from '../mock_data'; import { environmentsList } from '../mock_data';
describe('Environments Folder View', () => { describe('Environments Folder View', () => {
...@@ -15,6 +16,11 @@ describe('Environments Folder View', () => { ...@@ -15,6 +16,11 @@ describe('Environments Folder View', () => {
folderName: 'review', folderName: 'review',
canReadEnvironment: true, canReadEnvironment: true,
cssContainerClass: 'container', cssContainerClass: 'container',
canaryDeploymentFeatureId: 'canary_deployment',
showCanaryDeploymentCallout: true,
userCalloutsPath: '/callouts',
lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
helpCanaryDeploymentsPath: 'help/canary-deployments',
}; };
beforeEach(() => { beforeEach(() => {
...@@ -89,9 +95,11 @@ describe('Environments Folder View', () => { ...@@ -89,9 +95,11 @@ describe('Environments Folder View', () => {
it('should render parent folder name', done => { it('should render parent folder name', done => {
setTimeout(() => { setTimeout(() => {
expect(component.$el.querySelector('.js-folder-name').textContent.trim()).toContain( expect(
'Environments / review', removeBreakLine(
); removeWhitespace(component.$el.querySelector('.js-folder-name').textContent.trim()),
),
).toContain('Environments / review');
done(); done();
}, 0); }, 0);
}); });
......
...@@ -69,7 +69,7 @@ window.gl = window.gl || {}; ...@@ -69,7 +69,7 @@ window.gl = window.gl || {};
window.gl.TEST_HOST = TEST_HOST; window.gl.TEST_HOST = TEST_HOST;
window.gon = window.gon || {}; window.gon = window.gon || {};
window.gon.test_env = true; window.gon.test_env = true;
window.gon.ee = false; window.gon.ee = process.env.EE;
gon.relative_url_root = ''; gon.relative_url_root = '';
let hasUnhandledPromiseRejections = false; let hasUnhandledPromiseRejections = false;
...@@ -122,10 +122,16 @@ afterEach(() => { ...@@ -122,10 +122,16 @@ afterEach(() => {
const axiosDefaultAdapter = getDefaultAdapter(); const axiosDefaultAdapter = getDefaultAdapter();
// render all of our tests // render all of our tests
const testsContext = require.context('.', true, /_spec$/); const testContexts = [require.context('spec', true, /_spec$/)];
testsContext.keys().forEach(function(path) {
if (process.env.EE) {
testContexts.push(require.context('ee_spec', true, /_spec$/));
}
testContexts.forEach(context => {
context.keys().forEach(path => {
try { try {
testsContext(path); context(path);
} catch (err) { } catch (err) {
console.log(err); console.log(err);
console.error('[GL SPEC RUNNER ERROR] Unable to load spec: ', path); console.error('[GL SPEC RUNNER ERROR] Unable to load spec: ', path);
...@@ -135,6 +141,7 @@ testsContext.keys().forEach(function(path) { ...@@ -135,6 +141,7 @@ testsContext.keys().forEach(function(path) {
}); });
}); });
} }
});
}); });
describe('test errors', () => { describe('test errors', () => {
...@@ -204,19 +211,29 @@ if (process.env.BABEL_ENV === 'coverage') { ...@@ -204,19 +211,29 @@ if (process.env.BABEL_ENV === 'coverage') {
]; ];
describe('Uncovered files', function() { describe('Uncovered files', function() {
const sourceFiles = require.context('~', true, /\.(js|vue)$/); const sourceFilesContexts = [require.context('~', true, /\.(js|vue)$/)];
if (process.env.EE) {
sourceFilesContexts.push(require.context('ee', true, /\.(js|vue)$/));
}
const allTestFiles = testContexts.reduce(
(accumulator, context) => accumulator.concat(context.keys()),
[],
);
$.holdReady(true); $.holdReady(true);
sourceFiles.keys().forEach(function(path) { sourceFilesContexts.forEach(context => {
context.keys().forEach(path => {
// ignore if there is a matching spec file // ignore if there is a matching spec file
if (testsContext.keys().indexOf(`${path.replace(/\.(js|vue)$/, '')}_spec`) > -1) { if (allTestFiles.indexOf(`${path.replace(/\.(js|vue)$/, '')}_spec`) > -1) {
return; return;
} }
it(`includes '${path}'`, function() { it(`includes '${path}'`, function() {
try { try {
sourceFiles(path); context(path);
} catch (err) { } catch (err) {
if (troubleMakers.indexOf(path) === -1) { if (troubleMakers.indexOf(path) === -1) {
expect(err).toBeNull(); expect(err).toBeNull();
...@@ -225,4 +242,5 @@ if (process.env.BABEL_ENV === 'coverage') { ...@@ -225,4 +242,5 @@ if (process.env.BABEL_ENV === 'coverage') {
}); });
}); });
}); });
});
} }
...@@ -172,11 +172,9 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :m ...@@ -172,11 +172,9 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :m
let(:exception) { ActiveRecord::RecordNotFound } let(:exception) { ActiveRecord::RecordNotFound }
let(:perform_ignoring_exceptions) do let(:perform_ignoring_exceptions) do
begin
subject.perform(start_id, stop_id) subject.perform(start_id, stop_id)
rescue described_class::Error rescue described_class::Error
end end
end
before do before do
allow_any_instance_of(described_class::MergeRequestDiff::ActiveRecord_Relation) allow_any_instance_of(described_class::MergeRequestDiff::ActiveRecord_Relation)
......
...@@ -95,6 +95,9 @@ describe Gitlab::BitbucketImport::Importer do ...@@ -95,6 +95,9 @@ describe Gitlab::BitbucketImport::Importer do
subject { described_class.new(project) } subject { described_class.new(project) }
describe '#import_pull_requests' do describe '#import_pull_requests' do
let(:source_branch_sha) { sample.commits.last }
let(:target_branch_sha) { sample.commits.first }
before do before do
allow(subject).to receive(:import_wiki) allow(subject).to receive(:import_wiki)
allow(subject).to receive(:import_issues) allow(subject).to receive(:import_issues)
...@@ -102,9 +105,9 @@ describe Gitlab::BitbucketImport::Importer do ...@@ -102,9 +105,9 @@ describe Gitlab::BitbucketImport::Importer do
pull_request = instance_double( pull_request = instance_double(
Bitbucket::Representation::PullRequest, Bitbucket::Representation::PullRequest,
iid: 10, iid: 10,
source_branch_sha: sample.commits.last, source_branch_sha: source_branch_sha,
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch, source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
target_branch_sha: sample.commits.first, target_branch_sha: target_branch_sha,
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch, target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
title: 'This is a title', title: 'This is a title',
description: 'This is a test pull request', description: 'This is a test pull request',
...@@ -162,6 +165,19 @@ describe Gitlab::BitbucketImport::Importer do ...@@ -162,6 +165,19 @@ describe Gitlab::BitbucketImport::Importer do
expect(reply_note).to be_a(DiffNote) expect(reply_note).to be_a(DiffNote)
expect(reply_note.note).to eq(@reply.note) expect(reply_note.note).to eq(@reply.note)
end end
context "when branches' sha is not found in the repository" do
let(:source_branch_sha) { 'a' * Commit::MIN_SHA_LENGTH }
let(:target_branch_sha) { 'b' * Commit::MIN_SHA_LENGTH }
it 'uses the pull request sha references' do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request_diff = MergeRequest.first.merge_request_diff
expect(merge_request_diff.head_commit_sha).to eq source_branch_sha
expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
end
end
end end
context 'issues statuses' do context 'issues statuses' do
......
...@@ -87,6 +87,38 @@ describe Gitlab::Database do ...@@ -87,6 +87,38 @@ describe Gitlab::Database do
end end
end end
describe '.postgresql_minimum_supported_version?' do
it 'returns false when not using PostgreSQL' do
allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.postgresql_minimum_supported_version?).to eq(false)
end
context 'when using PostgreSQL' do
before do
allow(described_class).to receive(:postgresql?).and_return(true)
end
it 'returns false when using PostgreSQL 9.5' do
allow(described_class).to receive(:version).and_return('9.5')
expect(described_class.postgresql_minimum_supported_version?).to eq(false)
end
it 'returns true when using PostgreSQL 9.6' do
allow(described_class).to receive(:version).and_return('9.6')
expect(described_class.postgresql_minimum_supported_version?).to eq(true)
end
it 'returns true when using PostgreSQL 10 or newer' do
allow(described_class).to receive(:version).and_return('10')
expect(described_class.postgresql_minimum_supported_version?).to eq(true)
end
end
end
describe '.join_lateral_supported?' do describe '.join_lateral_supported?' do
it 'returns false when using MySQL' do it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false) allow(described_class).to receive(:postgresql?).and_return(false)
...@@ -195,6 +227,12 @@ describe Gitlab::Database do ...@@ -195,6 +227,12 @@ describe Gitlab::Database do
end end
end end
describe '.pg_last_xact_replay_timestamp' do
it 'returns pg_last_xact_replay_timestamp' do
expect(described_class.pg_last_xact_replay_timestamp).to eq('pg_last_xact_replay_timestamp')
end
end
describe '.nulls_last_order' do describe '.nulls_last_order' do
context 'when using PostgreSQL' do context 'when using PostgreSQL' do
before do before do
......
...@@ -37,7 +37,6 @@ describe Gitlab::Git::RepositoryCleaner do ...@@ -37,7 +37,6 @@ describe Gitlab::Git::RepositoryCleaner do
let(:object_map) { Gitlab::HttpIO.new(url, object_map_data.size) } let(:object_map) { Gitlab::HttpIO.new(url, object_map_data.size) }
around do |example| around do |example|
begin
tempfile.write(object_map_data) tempfile.write(object_map_data)
tempfile.close tempfile.close
...@@ -45,7 +44,6 @@ describe Gitlab::Git::RepositoryCleaner do ...@@ -45,7 +44,6 @@ describe Gitlab::Git::RepositoryCleaner do
ensure ensure
tempfile.unlink tempfile.unlink
end end
end
it 'removes internal references' do it 'removes internal references' do
stub_remote_url_200(url, tempfile.path) stub_remote_url_200(url, tempfile.path)
......
...@@ -25,7 +25,7 @@ describe Gitlab::RequestContext do ...@@ -25,7 +25,7 @@ describe Gitlab::RequestContext do
[200, {}, ["Hello"]] [200, {}, ["Hello"]]
end end
Rails.application.middleware.build(endpoint).call(env) described_class.new(endpoint).call(env)
expect(client_ip).to eq(load_balancer_ip) expect(client_ip).to eq(load_balancer_ip)
end end
......
...@@ -97,6 +97,12 @@ describe GoogleApi::CloudPlatform::Client do ...@@ -97,6 +97,12 @@ describe GoogleApi::CloudPlatform::Client do
"node_config": { "node_config": {
"machine_type": machine_type "machine_type": machine_type
}, },
"master_auth": {
"username": "admin",
"client_certificate_config": {
issue_client_certificate: true
}
},
"legacy_abac": { "legacy_abac": {
"enabled": true "enabled": true
} }
...@@ -122,6 +128,12 @@ describe GoogleApi::CloudPlatform::Client do ...@@ -122,6 +128,12 @@ describe GoogleApi::CloudPlatform::Client do
"node_config": { "node_config": {
"machine_type": machine_type "machine_type": machine_type
}, },
"master_auth": {
"username": "admin",
"client_certificate_config": {
issue_client_certificate: true
}
},
"legacy_abac": { "legacy_abac": {
"enabled": false "enabled": false
} }
......
...@@ -76,15 +76,10 @@ describe GroupPolicy do ...@@ -76,15 +76,10 @@ describe GroupPolicy do
context 'with no user and public project' do context 'with no user and public project' do
let(:project) { create(:project, :public) } let(:project) { create(:project, :public) }
let(:user) { create(:user) }
let(:current_user) { nil } let(:current_user) { nil }
before do before do
Projects::GroupLinks::CreateService.new( create(:project_group_link, project: project, group: group)
project,
user,
link_group_access: ProjectGroupLink::DEVELOPER
).execute(group)
end end
it { expect_disallowed(:read_group) } it { expect_disallowed(:read_group) }
...@@ -96,11 +91,7 @@ describe GroupPolicy do ...@@ -96,11 +91,7 @@ describe GroupPolicy do
let(:current_user) { create(:user) } let(:current_user) { create(:user) }
before do before do
Projects::GroupLinks::CreateService.new( create(:project_group_link, project: project, group: group)
project,
user,
link_group_access: ProjectGroupLink::DEVELOPER
).execute(group)
end end
it { expect_disallowed(:read_group) } it { expect_disallowed(:read_group) }
......
...@@ -44,12 +44,10 @@ describe 'Puma' do ...@@ -44,12 +44,10 @@ describe 'Puma' do
end end
after(:all) do after(:all) do
begin
WebMock.disable_net_connect!(allow_localhost: true) WebMock.disable_net_connect!(allow_localhost: true)
Process.kill('TERM', @puma_master_pid) Process.kill('TERM', @puma_master_pid)
rescue Errno::ESRCH rescue Errno::ESRCH
end end
end
def wait_puma_boot!(master_pid, ready_file) def wait_puma_boot!(master_pid, ready_file)
# We have seen the boot timeout after 2 minutes in CI so let's set it to 5 minutes. # We have seen the boot timeout after 2 minutes in CI so let's set it to 5 minutes.
......
...@@ -17,6 +17,10 @@ describe "Groups", "routing" do ...@@ -17,6 +17,10 @@ describe "Groups", "routing" do
expect(get("/#{group_path}")).to route_to('groups#show', id: group_path) expect(get("/#{group_path}")).to route_to('groups#show', id: group_path)
end end
it "to #details" do
expect(get("/groups/#{group_path}/-/details")).to route_to('groups#details', id: group_path)
end
it "to #activity" do it "to #activity" do
expect(get("/groups/#{group_path}/-/activity")).to route_to('groups#activity', id: group_path) expect(get("/groups/#{group_path}/-/activity")).to route_to('groups#activity', id: group_path)
end end
......
...@@ -128,11 +128,9 @@ describe Projects::DestroyService do ...@@ -128,11 +128,9 @@ describe Projects::DestroyService do
it 'keeps project team intact upon an error' do it 'keeps project team intact upon an error' do
perform_enqueued_jobs do perform_enqueued_jobs do
begin
destroy_project(project, user, {}) destroy_project(project, user, {})
rescue ::Redis::CannotConnectError rescue ::Redis::CannotConnectError
end end
end
expect(project.team.members.count).to eq 2 expect(project.team.members.count).to eq 2
end end
......
module SchemaPath module SchemaPath
def self.expand(schema, dir = '') def self.expand(schema, dir = nil)
Rails.root.join(dir, 'spec', "fixtures/api/schemas/#{schema}.json").to_s if Gitlab.ee? && dir.nil?
ee_path = expand(schema, 'ee')
return ee_path if File.exist?(ee_path)
end
Rails.root.join(dir.to_s, 'spec', "fixtures/api/schemas/#{schema}.json").to_s
end end
end end
RSpec::Matchers.define :match_response_schema do |schema, dir: '', **options| RSpec::Matchers.define :match_response_schema do |schema, dir: nil, **options|
match do |response| match do |response|
@errors = JSON::Validator.fully_validate( @errors = JSON::Validator.fully_validate(
SchemaPath.expand(schema, dir), response.body, options) SchemaPath.expand(schema, dir), response.body, options)
...@@ -18,8 +24,16 @@ RSpec::Matchers.define :match_response_schema do |schema, dir: '', **options| ...@@ -18,8 +24,16 @@ RSpec::Matchers.define :match_response_schema do |schema, dir: '', **options|
end end
end end
RSpec::Matchers.define :match_schema do |schema, dir: '', **options| RSpec::Matchers.define :match_schema do |schema, dir: nil, **options|
match do |data| match do |data|
JSON::Validator.validate!(SchemaPath.expand(schema, dir), data, options) @errors = JSON::Validator.fully_validate(
SchemaPath.expand(schema, dir), data, options)
@errors.empty?
end
failure_message do |response|
"didn't match the schema defined by #{SchemaPath.expand(schema, dir)}" \
" The validation errors were:\n#{@errors.join("\n")}"
end end
end end
...@@ -18,13 +18,11 @@ module GraphqlHelpers ...@@ -18,13 +18,11 @@ module GraphqlHelpers
# Runs a block inside a BatchLoader::Executor wrapper # Runs a block inside a BatchLoader::Executor wrapper
def batch(max_queries: nil, &blk) def batch(max_queries: nil, &blk)
wrapper = proc do wrapper = proc do
begin
BatchLoader::Executor.ensure_current BatchLoader::Executor.ensure_current
yield yield
ensure ensure
BatchLoader::Executor.clear_current BatchLoader::Executor.clear_current
end end
end
if max_queries if max_queries
result = nil result = nil
......
...@@ -23,7 +23,6 @@ module StubObjectStorage ...@@ -23,7 +23,6 @@ module StubObjectStorage
Fog.mock! Fog.mock!
::Fog::Storage.new(connection_params).tap do |connection| ::Fog::Storage.new(connection_params).tap do |connection|
begin
connection.directories.create(key: remote_directory) connection.directories.create(key: remote_directory)
# Cleanup remaining files # Cleanup remaining files
...@@ -33,7 +32,6 @@ module StubObjectStorage ...@@ -33,7 +32,6 @@ module StubObjectStorage
rescue Excon::Error::Conflict rescue Excon::Error::Conflict
end end
end end
end
def stub_artifacts_object_storage(**params) def stub_artifacts_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store, stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
......
...@@ -202,13 +202,11 @@ module TestEnv ...@@ -202,13 +202,11 @@ module TestEnv
socket = Gitlab::GitalyClient.address('default').sub('unix:', '') socket = Gitlab::GitalyClient.address('default').sub('unix:', '')
Integer(sleep_time / sleep_interval).times do Integer(sleep_time / sleep_interval).times do
begin
Socket.unix(socket) Socket.unix(socket)
return return
rescue rescue
sleep sleep_interval sleep sleep_interval
end end
end
raise "could not connect to gitaly at #{socket.inspect} after #{sleep_time} seconds" raise "could not connect to gitaly at #{socket.inspect} after #{sleep_time} seconds"
end end
......
...@@ -4,7 +4,7 @@ require 'spec_helper' ...@@ -4,7 +4,7 @@ require 'spec_helper'
describe ShaValidator do describe ShaValidator do
let(:validator) { described_class.new(attributes: [:base_commit_sha]) } let(:validator) { described_class.new(attributes: [:base_commit_sha]) }
let(:merge_diff) { build(:merge_request_diff) } let!(:merge_diff) { build(:merge_request_diff) }
subject { validator.validate_each(merge_diff, :base_commit_sha, value) } subject { validator.validate_each(merge_diff, :base_commit_sha, value) }
...@@ -12,6 +12,8 @@ describe ShaValidator do ...@@ -12,6 +12,8 @@ describe ShaValidator do
let(:value) { nil } let(:value) { nil }
it 'does not add any error if value is empty' do it 'does not add any error if value is empty' do
expect(Commit).not_to receive(:valid_hash?)
subject subject
expect(merge_diff.errors).to be_empty expect(merge_diff.errors).to be_empty
...@@ -21,7 +23,9 @@ describe ShaValidator do ...@@ -21,7 +23,9 @@ describe ShaValidator do
context 'with valid sha' do context 'with valid sha' do
let(:value) { Digest::SHA1.hexdigest(SecureRandom.hex) } let(:value) { Digest::SHA1.hexdigest(SecureRandom.hex) }
it 'does not add any error if value is empty' do it 'does not add any error' do
expect(Commit).to receive(:valid_hash?).and_call_original
subject subject
expect(merge_diff.errors).to be_empty expect(merge_diff.errors).to be_empty
...@@ -32,6 +36,7 @@ describe ShaValidator do ...@@ -32,6 +36,7 @@ describe ShaValidator do
let(:value) { 'foo' } let(:value) { 'foo' }
it 'adds error to the record' do it 'adds error to the record' do
expect(Commit).to receive(:valid_hash?).and_call_original
expect(merge_diff.errors).to be_empty expect(merge_diff.errors).to be_empty
subject subject
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment