Commit 800ab47a authored by Filipa Lacerda's avatar Filipa Lacerda

Merge branch 'master' into 22643-manual-job-page

* master: (50 commits)
  Prevent some specs from mangling the gitlab-shell checkout
  Line up search dropdown with other nav dropdowns
  Fix onion-skin re-entering state
  Remove related links in MR widget when empty state
  Show inline edit button for issues
  Fix tags in the Activity tab not being clickable
  Fix shortcut links on help page
  Don't link LFS-objects multiple times.
  [CE->EE] Fix spec/lib/gitlab/git/gitlab_projects_spec.rb
  Tidy up the documentation of Gitlab HA/Gitlab Application
  Make sure two except won't overwrite each other
  Update axios.md
  Remove transitionend event from GL dropdown
  Preserve gem path so that we use the same gems
  Load commit in batches for pipelines#index
  BlobViewer::PackageJson - if private link to homepage
  Do not generate links for private NPM modules in blob view
  Fix missing WHERE clause in 20171106135924_issues_milestone_id_foreign_key migration
  Inverse the has_multiple_clusters? helper usage
  Remove block styling from search dropdown
  ...
parents 572de0c1 febb0b9a
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.5-golang-1.8-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6" image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.5-golang-1.8-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6"
.dedicated-runner: &dedicated-runner
retry: 1
tags:
- gitlab-org
.default-cache: &default-cache .default-cache: &default-cache
key: "ruby-235-with-yarn" key: "ruby-235-with-yarn"
paths: paths:
...@@ -42,11 +47,6 @@ stages: ...@@ -42,11 +47,6 @@ stages:
- post-cleanup - post-cleanup
# Predefined scopes # Predefined scopes
.dedicated-runner: &dedicated-runner
retry: 1
tags:
- gitlab-org
.tests-metadata-state: &tests-metadata-state .tests-metadata-state: &tests-metadata-state
<<: *dedicated-runner <<: *dedicated-runner
variables: variables:
...@@ -80,11 +80,15 @@ stages: ...@@ -80,11 +80,15 @@ stages:
except: except:
- /(^qa[\/-].*|.*-qa$)/ - /(^qa[\/-].*|.*-qa$)/
.except-docs-and-qa: &except-docs-and-qa
except:
- /(^docs[\/-].*|.*-docs$)/
- /(^qa[\/-].*|.*-qa$)/
.rspec-metadata: &rspec-metadata .rspec-metadata: &rspec-metadata
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache <<: *pull-cache
<<: *except-docs
<<: *except-qa
stage: test stage: test
script: script:
- JOB_NAME=( $CI_JOB_NAME ) - JOB_NAME=( $CI_JOB_NAME )
...@@ -121,9 +125,8 @@ stages: ...@@ -121,9 +125,8 @@ stages:
.spinach-metadata: &spinach-metadata .spinach-metadata: &spinach-metadata
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache <<: *pull-cache
<<: *except-docs
<<: *except-qa
stage: test stage: test
script: script:
- JOB_NAME=( $CI_JOB_NAME ) - JOB_NAME=( $CI_JOB_NAME )
...@@ -162,6 +165,7 @@ stages: ...@@ -162,6 +165,7 @@ stages:
# Trigger a package build in omnibus-gitlab repository # Trigger a package build in omnibus-gitlab repository
# #
package-qa: package-qa:
<<: *dedicated-runner
image: ruby:2.4-alpine image: ruby:2.4-alpine
before_script: [] before_script: []
stage: build stage: build
...@@ -175,6 +179,7 @@ package-qa: ...@@ -175,6 +179,7 @@ package-qa:
# Review docs base # Review docs base
.review-docs: &review-docs .review-docs: &review-docs
<<: *dedicated-runner
<<: *except-qa <<: *except-qa
image: ruby:2.4-alpine image: ruby:2.4-alpine
before_script: before_script:
...@@ -220,8 +225,7 @@ review-docs-cleanup: ...@@ -220,8 +225,7 @@ review-docs-cleanup:
# Retrieve knapsack and rspec_flaky reports # Retrieve knapsack and rspec_flaky reports
retrieve-tests-metadata: retrieve-tests-metadata:
<<: *tests-metadata-state <<: *tests-metadata-state
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
stage: prepare stage: prepare
cache: cache:
key: tests_metadata key: tests_metadata
...@@ -284,9 +288,9 @@ flaky-examples-check: ...@@ -284,9 +288,9 @@ flaky-examples-check:
- scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT - scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT
setup-test-env: setup-test-env:
<<: *use-pg
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs
<<: *use-pg
stage: prepare stage: prepare
cache: cache:
<<: *default-cache <<: *default-cache
...@@ -375,19 +379,18 @@ spinach-mysql 3 4: *spinach-metadata-mysql ...@@ -375,19 +379,18 @@ spinach-mysql 3 4: *spinach-metadata-mysql
SETUP_DB: "false" SETUP_DB: "false"
.rake-exec: &rake-exec .rake-exec: &rake-exec
<<: *ruby-static-analysis
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
<<: *ruby-static-analysis
stage: test stage: test
script: script:
- bundle exec rake $CI_JOB_NAME - bundle exec rake $CI_JOB_NAME
static-analysis: static-analysis:
<<: *ruby-static-analysis
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs
<<: *ruby-static-analysis
stage: test stage: test
script: script:
- scripts/static-analysis - scripts/static-analysis
...@@ -441,8 +444,7 @@ ee_compat_check: ...@@ -441,8 +444,7 @@ ee_compat_check:
# DB migration, rollback, and seed jobs # DB migration, rollback, and seed jobs
.db-migrate-reset: &db-migrate-reset .db-migrate-reset: &db-migrate-reset
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
stage: test stage: test
script: script:
...@@ -456,11 +458,16 @@ db:migrate:reset-mysql: ...@@ -456,11 +458,16 @@ db:migrate:reset-mysql:
<<: *db-migrate-reset <<: *db-migrate-reset
<<: *use-mysql <<: *use-mysql
db:check-schema-pg:
<<: *db-migrate-reset
<<: *use-pg
script:
- source scripts/schema_changed.sh
.migration-paths: &migration-paths .migration-paths: &migration-paths
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache <<: *pull-cache
<<: *except-docs
<<: *except-qa
stage: test stage: test
variables: variables:
SETUP_DB: "false" SETUP_DB: "false"
...@@ -486,8 +493,7 @@ migration:path-mysql: ...@@ -486,8 +493,7 @@ migration:path-mysql:
.db-rollback: &db-rollback .db-rollback: &db-rollback
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
stage: test stage: test
script: script:
...@@ -504,8 +510,7 @@ db:rollback-mysql: ...@@ -504,8 +510,7 @@ db:rollback-mysql:
.db-seed_fu: &db-seed_fu .db-seed_fu: &db-seed_fu
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
stage: test stage: test
variables: variables:
...@@ -530,17 +535,10 @@ db:seed_fu-mysql: ...@@ -530,17 +535,10 @@ db:seed_fu-mysql:
<<: *db-seed_fu <<: *db-seed_fu
<<: *use-mysql <<: *use-mysql
db:check-schema-pg:
<<: *db-migrate-reset
<<: *use-pg
script:
- source scripts/schema_changed.sh
# Frontend-related jobs # Frontend-related jobs
gitlab:assets:compile: gitlab:assets:compile:
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
stage: test stage: test
dependencies: [] dependencies: []
...@@ -561,11 +559,10 @@ gitlab:assets:compile: ...@@ -561,11 +559,10 @@ gitlab:assets:compile:
- webpack-report/ - webpack-report/
karma: karma:
<<: *use-pg
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
<<: *use-pg
stage: test stage: test
variables: variables:
BABEL_ENV: "coverage" BABEL_ENV: "coverage"
...@@ -604,6 +601,7 @@ codequality: ...@@ -604,6 +601,7 @@ codequality:
paths: [codeclimate.json] paths: [codeclimate.json]
qa:internal: qa:internal:
<<: *dedicated-runner
<<: *except-docs <<: *except-docs
stage: test stage: test
variables: variables:
...@@ -616,8 +614,7 @@ qa:internal: ...@@ -616,8 +614,7 @@ qa:internal:
coverage: coverage:
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
stage: post-test stage: post-test
services: [] services: []
...@@ -636,8 +633,7 @@ coverage: ...@@ -636,8 +633,7 @@ coverage:
lint:javascript:report: lint:javascript:report:
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs-and-qa
<<: *except-qa
<<: *pull-cache <<: *pull-cache
stage: post-test stage: post-test
dependencies: dependencies:
...@@ -695,9 +691,9 @@ cache gems: ...@@ -695,9 +691,9 @@ cache gems:
- master@gitlab-org/gitlab-ee - master@gitlab-org/gitlab-ee
gitlab_git_test: gitlab_git_test:
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache <<: *pull-cache
<<: *except-docs
<<: *except-qa
variables: variables:
SETUP_DB: "false" SETUP_DB: "false"
script: script:
......
...@@ -263,7 +263,7 @@ gem 'gettext_i18n_rails', '~> 1.8.0' ...@@ -263,7 +263,7 @@ gem 'gettext_i18n_rails', '~> 1.8.0'
gem 'gettext_i18n_rails_js', '~> 1.2.0' gem 'gettext_i18n_rails_js', '~> 1.2.0'
gem 'gettext', '~> 3.2.2', require: false, group: :development gem 'gettext', '~> 3.2.2', require: false, group: :development
gem 'batch-loader' gem 'batch-loader', '~> 1.2.1'
# Perf bar # Perf bar
gem 'peek', '~> 1.0.1' gem 'peek', '~> 1.0.1'
......
...@@ -78,7 +78,7 @@ GEM ...@@ -78,7 +78,7 @@ GEM
thread_safe (~> 0.3, >= 0.3.1) thread_safe (~> 0.3, >= 0.3.1)
babosa (1.0.2) babosa (1.0.2)
base32 (0.3.2) base32 (0.3.2)
batch-loader (1.1.1) batch-loader (1.2.1)
bcrypt (3.1.11) bcrypt (3.1.11)
bcrypt_pbkdf (1.0.0) bcrypt_pbkdf (1.0.0)
benchmark-ips (2.3.0) benchmark-ips (2.3.0)
...@@ -988,7 +988,7 @@ DEPENDENCIES ...@@ -988,7 +988,7 @@ DEPENDENCIES
awesome_print (~> 1.2.0) awesome_print (~> 1.2.0)
babosa (~> 1.0.2) babosa (~> 1.0.2)
base32 (~> 0.3.0) base32 (~> 0.3.0)
batch-loader batch-loader (~> 1.2.1)
bcrypt_pbkdf (~> 1.0) bcrypt_pbkdf (~> 1.0)
benchmark-ips (~> 2.3.0) benchmark-ips (~> 2.3.0)
better_errors (~> 2.1.0) better_errors (~> 2.1.0)
......
...@@ -176,6 +176,7 @@ export default class ImageFile { ...@@ -176,6 +176,7 @@ export default class ImageFile {
left: dragTrackWidth left: dragTrackWidth
}); });
$frameAdded.css('opacity', 1);
framePadding = parseInt($frameAdded.css('right').replace('px', ''), 10); framePadding = parseInt($frameAdded.css('right').replace('px', ''), 10);
_this.initDraggable($dragger, framePadding, function(e, left) { _this.initDraggable($dragger, framePadding, function(e, left) {
......
/* global CommentsStore */ /* global CommentsStore */
/* global notes */
import Vue from 'vue'; import Vue from 'vue';
import collapseIcon from '../icons/collapse_icon.svg'; import collapseIcon from '../icons/collapse_icon.svg';
import Notes from '../../notes';
import userAvatarImage from '../../vue_shared/components/user_avatar/user_avatar_image.vue'; import userAvatarImage from '../../vue_shared/components/user_avatar/user_avatar_image.vue';
const DiffNoteAvatars = Vue.extend({ const DiffNoteAvatars = Vue.extend({
...@@ -129,7 +129,7 @@ const DiffNoteAvatars = Vue.extend({ ...@@ -129,7 +129,7 @@ const DiffNoteAvatars = Vue.extend({
}, },
methods: { methods: {
clickedAvatar(e) { clickedAvatar(e) {
notes.onAddDiffNote(e); Notes.instance.onAddDiffNote(e);
// Toggle the active state of the toggle all button // Toggle the active state of the toggle all button
this.toggleDiscussionsToggleState(); this.toggleDiscussionsToggleState();
......
import Mousetrap from 'mousetrap';
function addMousetrapClick(el, key) {
el.addEventListener('click', () => Mousetrap.trigger(key));
}
function domContentLoaded() {
addMousetrapClick(document.querySelector('.js-trigger-shortcut'), '?');
addMousetrapClick(document.querySelector('.js-trigger-search-bar'), 's');
}
document.addEventListener('DOMContentLoaded', domContentLoaded);
...@@ -300,7 +300,7 @@ GitLabDropdown = (function() { ...@@ -300,7 +300,7 @@ GitLabDropdown = (function() {
return function(data) { return function(data) {
_this.fullData = data; _this.fullData = data;
_this.parseData(_this.fullData); _this.parseData(_this.fullData);
_this.focusTextInput(true); _this.focusTextInput();
if (_this.options.filterable && _this.filter && _this.filter.input && _this.filter.input.val() && _this.filter.input.val().trim() !== '') { if (_this.options.filterable && _this.filter && _this.filter.input && _this.filter.input.val() && _this.filter.input.val().trim() !== '') {
return _this.filter.input.trigger('input'); return _this.filter.input.trigger('input');
} }
...@@ -790,9 +790,8 @@ GitLabDropdown = (function() { ...@@ -790,9 +790,8 @@ GitLabDropdown = (function() {
return [selectedObject, isMarking]; return [selectedObject, isMarking];
}; };
GitLabDropdown.prototype.focusTextInput = function(triggerFocus = false) { GitLabDropdown.prototype.focusTextInput = function() {
if (this.options.filterable) { if (this.options.filterable) {
this.dropdown.one('transitionend', () => {
const initialScrollTop = $(window).scrollTop(); const initialScrollTop = $(window).scrollTop();
if (this.dropdown.is('.open')) { if (this.dropdown.is('.open')) {
...@@ -802,13 +801,6 @@ GitLabDropdown = (function() { ...@@ -802,13 +801,6 @@ GitLabDropdown = (function() {
if ($(window).scrollTop() < initialScrollTop) { if ($(window).scrollTop() < initialScrollTop) {
$(window).scrollTop(initialScrollTop); $(window).scrollTop(initialScrollTop);
} }
});
if (triggerFocus) {
// This triggers after a ajax request
// in case of slow requests, the dropdown transition could already be finished
this.dropdown.trigger('transitionend');
}
} }
}; };
......
/* global Notes */ import Notes from './notes';
export default () => { export default () => {
const dataEl = document.querySelector('.js-notes-data'); const dataEl = document.querySelector('.js-notes-data');
...@@ -10,5 +10,7 @@ export default () => { ...@@ -10,5 +10,7 @@ export default () => {
autocomplete, autocomplete,
} = JSON.parse(dataEl.innerHTML); } = JSON.parse(dataEl.innerHTML);
window.notes = new Notes(notesUrl, notesIds, now, diffView, autocomplete); // Create a singleton so that we don't need to assign
// into the window object, we can just access the current isntance with Notes.instance
Notes.initialize(notesUrl, notesIds, now, diffView, autocomplete);
}; };
...@@ -32,7 +32,7 @@ export default { ...@@ -32,7 +32,7 @@ export default {
showInlineEditButton: { showInlineEditButton: {
type: Boolean, type: Boolean,
required: false, required: false,
default: false, default: true,
}, },
showDeleteButton: { showDeleteButton: {
type: Boolean, type: Boolean,
......
...@@ -79,7 +79,7 @@ ...@@ -79,7 +79,7 @@
v-tooltip v-tooltip
v-if="showInlineEditButton && canUpdate" v-if="showInlineEditButton && canUpdate"
type="button" type="button"
class="btn btn-default btn-edit btn-svg" class="btn btn-default btn-edit btn-svg js-issuable-edit"
v-html="pencilIcon" v-html="pencilIcon"
title="Edit title and description" title="Edit title and description"
data-placement="bottom" data-placement="bottom"
......
import Vue from 'vue'; import Vue from 'vue';
import eventHub from './event_hub';
import issuableApp from './components/app.vue'; import issuableApp from './components/app.vue';
import '../vue_shared/vue_resource_interceptor'; import '../vue_shared/vue_resource_interceptor';
...@@ -7,12 +6,6 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -7,12 +6,6 @@ document.addEventListener('DOMContentLoaded', () => {
const initialDataEl = document.getElementById('js-issuable-app-initial-data'); const initialDataEl = document.getElementById('js-issuable-app-initial-data');
const props = JSON.parse(initialDataEl.innerHTML.replace(/&quot;/g, '"')); const props = JSON.parse(initialDataEl.innerHTML.replace(/&quot;/g, '"'));
$('.js-issuable-edit').on('click', (e) => {
e.preventDefault();
eventHub.$emit('open.form');
});
return new Vue({ return new Vue({
el: document.getElementById('js-issuable-app'), el: document.getElementById('js-issuable-app'),
components: { components: {
......
...@@ -45,9 +45,7 @@ import './layout_nav'; ...@@ -45,9 +45,7 @@ import './layout_nav';
import LazyLoader from './lazy_loader'; import LazyLoader from './lazy_loader';
import './line_highlighter'; import './line_highlighter';
import initLogoAnimation from './logo'; import initLogoAnimation from './logo';
import './merge_request_tabs';
import './milestone_select'; import './milestone_select';
import './notes';
import './preview_markdown'; import './preview_markdown';
import './projects_dropdown'; import './projects_dropdown';
import './render_gfm'; import './render_gfm';
......
/* eslint-disable no-new, class-methods-use-this */ /* eslint-disable no-new, class-methods-use-this */
/* global notes */
import Cookies from 'js-cookie'; import Cookies from 'js-cookie';
import Flash from './flash'; import Flash from './flash';
...@@ -16,6 +15,7 @@ import initDiscussionTab from './image_diff/init_discussion_tab'; ...@@ -16,6 +15,7 @@ import initDiscussionTab from './image_diff/init_discussion_tab';
import Diff from './diff'; import Diff from './diff';
import { localTimeAgo } from './lib/utils/datetime_utility'; import { localTimeAgo } from './lib/utils/datetime_utility';
import syntaxHighlight from './syntax_highlight'; import syntaxHighlight from './syntax_highlight';
import Notes from './notes';
/* eslint-disable max-len */ /* eslint-disable max-len */
// MergeRequestTabs // MergeRequestTabs
...@@ -324,7 +324,7 @@ export default class MergeRequestTabs { ...@@ -324,7 +324,7 @@ export default class MergeRequestTabs {
if (anchor && anchor.length > 0) { if (anchor && anchor.length > 0) {
const notesContent = anchor.closest('.notes_content'); const notesContent = anchor.closest('.notes_content');
const lineType = notesContent.hasClass('new') ? 'new' : 'old'; const lineType = notesContent.hasClass('new') ? 'new' : 'old';
notes.toggleDiffNote({ Notes.instance.toggleDiffNote({
target: anchor, target: anchor,
lineType, lineType,
forceShow: true, forceShow: true,
......
...@@ -37,6 +37,12 @@ const MAX_VISIBLE_COMMIT_LIST_COUNT = 3; ...@@ -37,6 +37,12 @@ const MAX_VISIBLE_COMMIT_LIST_COUNT = 3;
const REGEX_QUICK_ACTIONS = /^\/\w+.*$/gm; const REGEX_QUICK_ACTIONS = /^\/\w+.*$/gm;
export default class Notes { export default class Notes {
static initialize(notes_url, note_ids, last_fetched_at, view, enableGFM = true) {
if (!this.instance) {
this.instance = new Notes(notes_url, note_ids, last_fetched_at, view, enableGFM);
}
}
constructor(notes_url, note_ids, last_fetched_at, view, enableGFM = true) { constructor(notes_url, note_ids, last_fetched_at, view, enableGFM = true) {
this.updateTargetButtons = this.updateTargetButtons.bind(this); this.updateTargetButtons = this.updateTargetButtons.bind(this);
this.updateComment = this.updateComment.bind(this); this.updateComment = this.updateComment.bind(this);
......
...@@ -51,7 +51,10 @@ export default class Shortcuts { ...@@ -51,7 +51,10 @@ export default class Shortcuts {
} }
onToggleHelp(e) { onToggleHelp(e) {
if (e.preventDefault) {
e.preventDefault(); e.preventDefault();
}
Shortcuts.toggleHelp(this.enabledHelp); Shortcuts.toggleHelp(this.enabledHelp);
} }
...@@ -112,6 +115,9 @@ export default class Shortcuts { ...@@ -112,6 +115,9 @@ export default class Shortcuts {
static focusSearch(e) { static focusSearch(e) {
$('#search').focus(); $('#search').focus();
if (e.preventDefault) {
e.preventDefault(); e.preventDefault();
} }
}
} }
...@@ -62,7 +62,7 @@ export default { ...@@ -62,7 +62,7 @@ export default {
return this.mr.hasCI; return this.mr.hasCI;
}, },
shouldRenderRelatedLinks() { shouldRenderRelatedLinks() {
return !!this.mr.relatedLinks; return !!this.mr.relatedLinks && !this.mr.isNothingToMergeState;
}, },
shouldRenderDeployments() { shouldRenderDeployments() {
return this.mr.deployments.length; return this.mr.deployments.length;
......
import { stateKey } from './state_maps';
export default function deviseState(data) { export default function deviseState(data) {
if (data.project_archived) { if (data.project_archived) {
return 'archived'; return stateKey.archived;
} else if (data.branch_missing) { } else if (data.branch_missing) {
return 'missingBranch'; return stateKey.missingBranch;
} else if (!data.commits_count) { } else if (!data.commits_count) {
return 'nothingToMerge'; return stateKey.nothingToMerge;
} else if (this.mergeStatus === 'unchecked') { } else if (this.mergeStatus === 'unchecked') {
return 'checking'; return stateKey.checking;
} else if (data.has_conflicts) { } else if (data.has_conflicts) {
return 'conflicts'; return stateKey.conflicts;
} else if (data.work_in_progress) { } else if (data.work_in_progress) {
return 'workInProgress'; return stateKey.workInProgress;
} else if (this.onlyAllowMergeIfPipelineSucceeds && this.isPipelineFailed) { } else if (this.onlyAllowMergeIfPipelineSucceeds && this.isPipelineFailed) {
return 'pipelineFailed'; return stateKey.pipelineFailed;
} else if (this.hasMergeableDiscussionsState) { } else if (this.hasMergeableDiscussionsState) {
return 'unresolvedDiscussions'; return stateKey.unresolvedDiscussions;
} else if (this.isPipelineBlocked) { } else if (this.isPipelineBlocked) {
return 'pipelineBlocked'; return stateKey.pipelineBlocked;
} else if (this.hasSHAChanged) { } else if (this.hasSHAChanged) {
return 'shaMismatch'; return stateKey.shaMismatch;
} else if (this.mergeWhenPipelineSucceeds) { } else if (this.mergeWhenPipelineSucceeds) {
return this.mergeError ? 'autoMergeFailed' : 'mergeWhenPipelineSucceeds'; return this.mergeError ? stateKey.autoMergeFailed : stateKey.mergeWhenPipelineSucceeds;
} else if (!this.canMerge) { } else if (!this.canMerge) {
return 'notAllowedToMerge'; return stateKey.notAllowedToMerge;
} else if (this.canBeMerged) { } else if (this.canBeMerged) {
return 'readyToMerge'; return stateKey.readyToMerge;
} }
return null; return null;
} }
import Timeago from 'timeago.js'; import Timeago from 'timeago.js';
import { getStateKey } from '../dependencies'; import { getStateKey } from '../dependencies';
import { stateKey } from './state_maps';
import { formatDate } from '../../lib/utils/datetime_utility'; import { formatDate } from '../../lib/utils/datetime_utility';
export default class MergeRequestStore { export default class MergeRequestStore {
...@@ -120,6 +121,10 @@ export default class MergeRequestStore { ...@@ -120,6 +121,10 @@ export default class MergeRequestStore {
} }
} }
get isNothingToMergeState() {
return this.state === stateKey.nothingToMerge;
}
static getEventObject(event) { static getEventObject(event) {
return { return {
author: MergeRequestStore.getAuthorObject(event), author: MergeRequestStore.getAuthorObject(event),
......
...@@ -31,6 +31,23 @@ const statesToShowHelpWidget = [ ...@@ -31,6 +31,23 @@ const statesToShowHelpWidget = [
'autoMergeFailed', 'autoMergeFailed',
]; ];
export const stateKey = {
archived: 'archived',
missingBranch: 'missingBranch',
nothingToMerge: 'nothingToMerge',
checking: 'checking',
conflicts: 'conflicts',
workInProgress: 'workInProgress',
pipelineFailed: 'pipelineFailed',
unresolvedDiscussions: 'unresolvedDiscussions',
pipelineBlocked: 'pipelineBlocked',
shaMismatch: 'shaMismatch',
autoMergeFailed: 'autoMergeFailed',
mergeWhenPipelineSucceeds: 'mergeWhenPipelineSucceeds',
notAllowedToMerge: 'notAllowedToMerge',
readyToMerge: 'readyToMerge',
};
export default { export default {
stateToComponentMap, stateToComponentMap,
statesToShowHelpWidget, statesToShowHelpWidget,
......
...@@ -9,12 +9,6 @@ ...@@ -9,12 +9,6 @@
padding-left: $contextual-sidebar-width; padding-left: $contextual-sidebar-width;
} }
// Override position: absolute
.right-sidebar {
position: fixed;
height: calc(100% - #{$header-height});
}
.issues-bulk-update.right-sidebar.right-sidebar-expanded .issuable-sidebar-header { .issues-bulk-update.right-sidebar.right-sidebar-expanded .issuable-sidebar-header {
padding: 10px 0 15px; padding: 10px 0 15px;
} }
......
...@@ -16,27 +16,18 @@ ...@@ -16,27 +16,18 @@
@mixin set-visible { @mixin set-visible {
transform: translateY(0); transform: translateY(0);
visibility: visible; display: block;
opacity: 1;
transition-duration: 100ms, 150ms, 25ms;
transition-delay: 35ms, 50ms, 25ms;
} }
@mixin set-invisible { @mixin set-invisible {
transform: translateY(-10px); transform: translateY(-10px);
visibility: hidden; display: none;
opacity: 0;
transition-property: opacity, transform, visibility;
transition-duration: 70ms, 250ms, 250ms;
transition-timing-function: linear, $dropdown-animation-timing;
transition-delay: 25ms, 50ms, 0ms;
} }
.open { .open {
.dropdown-menu, .dropdown-menu,
.dropdown-menu-nav { .dropdown-menu-nav {
@include set-visible; @include set-visible;
display: block;
min-height: 40px; min-height: 40px;
@media (max-width: $screen-xs-max) { @media (max-width: $screen-xs-max) {
...@@ -55,6 +46,11 @@ ...@@ -55,6 +46,11 @@
} }
} }
// Get search dropdown to line up with other nav dropdowns
.search-input-container .dropdown-menu {
margin-top: 11px;
}
.dropdown-toggle { .dropdown-toggle {
padding: 6px 8px 6px 10px; padding: 6px 8px 6px 10px;
background-color: $white-light; background-color: $white-light;
...@@ -214,7 +210,6 @@ ...@@ -214,7 +210,6 @@
.dropdown-menu, .dropdown-menu,
.dropdown-menu-nav { .dropdown-menu-nav {
@include set-invisible; @include set-invisible;
display: block;
position: absolute; position: absolute;
width: auto; width: auto;
top: 100%; top: 100%;
......
...@@ -90,11 +90,6 @@ ...@@ -90,11 +90,6 @@
.right-sidebar { .right-sidebar {
border-left: 1px solid $border-color; border-left: 1px solid $border-color;
height: calc(100% - #{$header-height}); height: calc(100% - #{$header-height});
&.affix {
position: fixed;
top: $header-height;
}
} }
.with-performance-bar .right-sidebar.affix { .with-performance-bar .right-sidebar.affix {
......
...@@ -122,7 +122,7 @@ ...@@ -122,7 +122,7 @@
} }
.right-sidebar { .right-sidebar {
position: absolute; position: fixed;
top: $header-height; top: $header-height;
bottom: 0; bottom: 0;
right: 0; right: 0;
...@@ -502,7 +502,7 @@ ...@@ -502,7 +502,7 @@
top: $header-height + $performance-bar-height; top: $header-height + $performance-bar-height;
.issuable-sidebar { .issuable-sidebar {
height: calc(100% - #{$header-height} - #{$performance-bar-height}); height: calc(100% - #{$performance-bar-height});
} }
} }
......
...@@ -108,13 +108,6 @@ input[type="checkbox"]:hover { ...@@ -108,13 +108,6 @@ input[type="checkbox"]:hover {
// Custom dropdown positioning // Custom dropdown positioning
.dropdown-menu { .dropdown-menu {
transition-property: opacity, transform;
transition-duration: 250ms, 250ms;
transition-delay: 0ms, 25ms;
transition-timing-function: $dropdown-animation-timing;
transform: translateY(0);
opacity: 0;
display: block;
left: -5px; left: -5px;
} }
...@@ -152,13 +145,6 @@ input[type="checkbox"]:hover { ...@@ -152,13 +145,6 @@ input[type="checkbox"]:hover {
background-color: $nav-badge-bg; background-color: $nav-badge-bg;
border-color: $border-color; border-color: $border-color;
} }
.dropdown-menu {
transition-duration: 100ms, 75ms;
transition-delay: 75ms, 100ms;
transform: translateY(7px);
opacity: 1;
}
} }
&.has-value { &.has-value {
......
...@@ -55,7 +55,6 @@ module IssuableActions ...@@ -55,7 +55,6 @@ module IssuableActions
def destroy def destroy
Issuable::DestroyService.new(issuable.project, current_user).execute(issuable) Issuable::DestroyService.new(issuable.project, current_user).execute(issuable)
TodoService.new.destroy_issuable(issuable, current_user)
name = issuable.human_class_name name = issuable.human_class_name
flash[:notice] = "The #{name} was successfully deleted." flash[:notice] = "The #{name} was successfully deleted."
......
...@@ -39,6 +39,7 @@ class Projects::Clusters::GcpController < Projects::ApplicationController ...@@ -39,6 +39,7 @@ class Projects::Clusters::GcpController < Projects::ApplicationController
params.require(:cluster).permit( params.require(:cluster).permit(
:enabled, :enabled,
:name, :name,
:environment_scope,
provider_gcp_attributes: [ provider_gcp_attributes: [
:gcp_project_id, :gcp_project_id,
:zone, :zone,
......
...@@ -26,6 +26,7 @@ class Projects::Clusters::UserController < Projects::ApplicationController ...@@ -26,6 +26,7 @@ class Projects::Clusters::UserController < Projects::ApplicationController
params.require(:cluster).permit( params.require(:cluster).permit(
:enabled, :enabled,
:name, :name,
:environment_scope,
platform_kubernetes_attributes: [ platform_kubernetes_attributes: [
:namespace, :namespace,
:api_url, :api_url,
......
...@@ -87,6 +87,7 @@ class Projects::ClustersController < Projects::ApplicationController ...@@ -87,6 +87,7 @@ class Projects::ClustersController < Projects::ApplicationController
if cluster.managed? if cluster.managed?
params.require(:cluster).permit( params.require(:cluster).permit(
:enabled, :enabled,
:environment_scope,
platform_kubernetes_attributes: [ platform_kubernetes_attributes: [
:namespace :namespace
] ]
...@@ -95,6 +96,7 @@ class Projects::ClustersController < Projects::ApplicationController ...@@ -95,6 +96,7 @@ class Projects::ClustersController < Projects::ApplicationController
params.require(:cluster).permit( params.require(:cluster).permit(
:enabled, :enabled,
:name, :name,
:environment_scope,
platform_kubernetes_attributes: [ platform_kubernetes_attributes: [
:api_url, :api_url,
:token, :token,
......
class Projects::PipelineSchedulesController < Projects::ApplicationController class Projects::PipelineSchedulesController < Projects::ApplicationController
before_action :schedule, except: [:index, :new, :create] before_action :schedule, except: [:index, :new, :create]
before_action :play_rate_limit, only: [:play]
before_action :authorize_play_pipeline_schedule!, only: [:play]
before_action :authorize_read_pipeline_schedule! before_action :authorize_read_pipeline_schedule!
before_action :authorize_create_pipeline_schedule!, only: [:new, :create] before_action :authorize_create_pipeline_schedule!, only: [:new, :create]
before_action :authorize_update_pipeline_schedule!, except: [:index, :new, :create] before_action :authorize_update_pipeline_schedule!, except: [:index, :new, :create, :play]
before_action :authorize_admin_pipeline_schedule!, only: [:destroy] before_action :authorize_admin_pipeline_schedule!, only: [:destroy]
def index def index
...@@ -40,6 +42,18 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController ...@@ -40,6 +42,18 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
end end
end end
def play
job_id = RunPipelineScheduleWorker.perform_async(schedule.id, current_user.id)
if job_id
flash[:notice] = "Successfully scheduled a pipeline to run. Go to the <a href=\"#{project_pipelines_path(@project)}\">Pipelines page</a> for details.".html_safe
else
flash[:alert] = 'Unable to schedule a pipeline to run immediately'
end
redirect_to pipeline_schedules_path(@project)
end
def take_ownership def take_ownership
if schedule.update(owner: current_user) if schedule.update(owner: current_user)
redirect_to pipeline_schedules_path(@project) redirect_to pipeline_schedules_path(@project)
...@@ -60,6 +74,17 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController ...@@ -60,6 +74,17 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
private private
def play_rate_limit
return unless current_user
limiter = ::Gitlab::ActionRateLimiter.new(action: :play_pipeline_schedule)
return unless limiter.throttled?([current_user, schedule], 1)
flash[:alert] = 'You cannot play this scheduled pipeline at the moment. Please wait a minute.'
redirect_to pipeline_schedules_path(@project)
end
def schedule def schedule
@schedule ||= project.pipeline_schedules.find(params[:id]) @schedule ||= project.pipeline_schedules.find(params[:id])
end end
...@@ -70,6 +95,10 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController ...@@ -70,6 +95,10 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
variables_attributes: [:id, :key, :value, :_destroy] ) variables_attributes: [:id, :key, :value, :_destroy] )
end end
def authorize_play_pipeline_schedule!
return access_denied! unless can?(current_user, :play_pipeline_schedule, schedule)
end
def authorize_update_pipeline_schedule! def authorize_update_pipeline_schedule!
return access_denied! unless can?(current_user, :update_pipeline_schedule, schedule) return access_denied! unless can?(current_user, :update_pipeline_schedule, schedule)
end end
......
...@@ -29,6 +29,8 @@ class Projects::PipelinesController < Projects::ApplicationController ...@@ -29,6 +29,8 @@ class Projects::PipelinesController < Projects::ApplicationController
@pipelines_count = PipelinesFinder @pipelines_count = PipelinesFinder
.new(project).execute.count .new(project).execute.count
@pipelines.map(&:commit) # List commits for batch loading
respond_to do |format| respond_to do |format|
format.html format.html
format.json do format.json do
......
module ClustersHelper
def has_multiple_clusters?(project)
false
end
end
...@@ -182,6 +182,11 @@ module GitlabRoutingHelper ...@@ -182,6 +182,11 @@ module GitlabRoutingHelper
edit_project_pipeline_schedule_path(project, schedule) edit_project_pipeline_schedule_path(project, schedule)
end end
def play_pipeline_schedule_path(schedule, *args)
project = schedule.project
play_project_pipeline_schedule_path(project, schedule, *args)
end
def take_ownership_pipeline_schedule_path(schedule, *args) def take_ownership_pipeline_schedule_path(schedule, *args)
project = schedule.project project = schedule.project
take_ownership_project_pipeline_schedule_path(project, schedule, *args) take_ownership_project_pipeline_schedule_path(project, schedule, *args)
......
...@@ -27,10 +27,17 @@ module BlobViewer ...@@ -27,10 +27,17 @@ module BlobViewer
private private
def package_name_from_json(key) def json_data
@json_data ||= begin
prepare! prepare!
JSON.parse(blob.data)
rescue
{}
end
end
JSON.parse(blob.data)[key] rescue nil def package_name_from_json(key)
json_data[key]
end end
def package_name_from_method_call(name) def package_name_from_method_call(name)
......
...@@ -16,7 +16,25 @@ module BlobViewer ...@@ -16,7 +16,25 @@ module BlobViewer
@package_name ||= package_name_from_json('name') @package_name ||= package_name_from_json('name')
end end
def package_type
private? ? 'private package' : super
end
def package_url def package_url
private? ? homepage : npm_url
end
private
def private?
!!json_data['private']
end
def homepage
json_data['homepage']
end
def npm_url
"https://www.npmjs.com/package/#{package_name}" "https://www.npmjs.com/package/#{package_name}"
end end
end end
......
...@@ -287,8 +287,12 @@ module Ci ...@@ -287,8 +287,12 @@ module Ci
Ci::Pipeline.truncate_sha(sha) Ci::Pipeline.truncate_sha(sha)
end end
# NOTE: This is loaded lazily and will never be nil, even if the commit
# cannot be found.
#
# Use constructs like: `pipeline.commit.present?`
def commit def commit
@commit ||= project.commit_by(oid: sha) @commit ||= Commit.lazy(project, sha)
end end
def branch? def branch?
...@@ -338,12 +342,9 @@ module Ci ...@@ -338,12 +342,9 @@ module Ci
end end
def latest? def latest?
return false unless ref return false unless ref && commit.present?
commit = project.commit(ref)
return false unless commit
commit.sha == sha project.commit(ref) == commit
end end
def retried def retried
......
...@@ -86,6 +86,20 @@ class Commit ...@@ -86,6 +86,20 @@ class Commit
def valid_hash?(key) def valid_hash?(key)
!!(/\A#{COMMIT_SHA_PATTERN}\z/ =~ key) !!(/\A#{COMMIT_SHA_PATTERN}\z/ =~ key)
end end
def lazy(project, oid)
BatchLoader.for({ project: project, oid: oid }).batch do |items, loader|
items_by_project = items.group_by { |i| i[:project] }
items_by_project.each do |project, commit_ids|
oids = commit_ids.map { |i| i[:oid] }
project.repository.commits_by(oids: oids).each do |commit|
loader.call({ project: commit.project, oid: commit.id }, commit) if commit
end
end
end
end
end end
attr_accessor :raw attr_accessor :raw
...@@ -103,7 +117,7 @@ class Commit ...@@ -103,7 +117,7 @@ class Commit
end end
def ==(other) def ==(other)
(self.class === other) && (raw == other.raw) other.is_a?(self.class) && raw == other.raw
end end
def self.reference_prefix def self.reference_prefix
...@@ -224,8 +238,8 @@ class Commit ...@@ -224,8 +238,8 @@ class Commit
notes.includes(:author) notes.includes(:author)
end end
def method_missing(m, *args, &block) def method_missing(method, *args, &block)
@raw.__send__(m, *args, &block) # rubocop:disable GitlabSecurity/PublicSend @raw.__send__(method, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
end end
def respond_to_missing?(method, include_private = false) def respond_to_missing?(method, include_private = false)
......
...@@ -22,12 +22,9 @@ class DiffDiscussion < Discussion ...@@ -22,12 +22,9 @@ class DiffDiscussion < Discussion
def merge_request_version_params def merge_request_version_params
return unless for_merge_request? return unless for_merge_request?
return {} if active?
if on_merge_request_commit? version_params.tap do |params|
{ commit_id: commit_id } params[:commit_id] = commit_id if on_merge_request_commit?
else
noteable.version_params_for(position.diff_refs)
end end
end end
...@@ -37,4 +34,12 @@ class DiffDiscussion < Discussion ...@@ -37,4 +34,12 @@ class DiffDiscussion < Discussion
position: position.to_json position: position.to_json
) )
end end
private
def version_params
return {} if active?
noteable.version_params_for(position.diff_refs)
end
end end
...@@ -118,6 +118,18 @@ class Repository ...@@ -118,6 +118,18 @@ class Repository
@commit_cache[oid] = find_commit(oid) @commit_cache[oid] = find_commit(oid)
end end
def commits_by(oids:)
return [] unless oids.present?
commits = Gitlab::Git::Commit.batch_by_oid(raw_repository, oids)
if commits.present?
Commit.decorate(commits, @project)
else
[]
end
end
def commits(ref, path: nil, limit: nil, offset: nil, skip_merges: false, after: nil, before: nil) def commits(ref, path: nil, limit: nil, offset: nil, skip_merges: false, after: nil, before: nil)
options = { options = {
repo: raw_repository, repo: raw_repository,
...@@ -221,6 +233,12 @@ class Repository ...@@ -221,6 +233,12 @@ class Repository
branch_names.include?(branch_name) branch_names.include?(branch_name)
end end
def tag_exists?(tag_name)
return false unless raw_repository
tag_names.include?(tag_name)
end
def ref_exists?(ref) def ref_exists?(ref)
!!raw_repository&.ref_exists?(ref) !!raw_repository&.ref_exists?(ref)
rescue ArgumentError rescue ArgumentError
......
...@@ -2,16 +2,18 @@ module Ci ...@@ -2,16 +2,18 @@ module Ci
class PipelinePolicy < BasePolicy class PipelinePolicy < BasePolicy
delegate { @subject.project } delegate { @subject.project }
condition(:protected_ref) do condition(:protected_ref) { ref_protected?(@user, @subject.project, @subject.tag?, @subject.ref) }
access = ::Gitlab::UserAccess.new(@user, project: @subject.project)
if @subject.tag? rule { protected_ref }.prevent :update_pipeline
!access.can_create_tag?(@subject.ref)
def ref_protected?(user, project, tag, ref)
access = ::Gitlab::UserAccess.new(user, project: project)
if tag
!access.can_create_tag?(ref)
else else
!access.can_update_branch?(@subject.ref) !access.can_update_branch?(ref)
end end
end end
rule { protected_ref }.prevent :update_pipeline
end end
end end
...@@ -2,13 +2,23 @@ module Ci ...@@ -2,13 +2,23 @@ module Ci
class PipelineSchedulePolicy < PipelinePolicy class PipelineSchedulePolicy < PipelinePolicy
alias_method :pipeline_schedule, :subject alias_method :pipeline_schedule, :subject
condition(:protected_ref) do
ref_protected?(@user, @subject.project, @subject.project.repository.tag_exists?(@subject.ref), @subject.ref)
end
condition(:owner_of_schedule) do condition(:owner_of_schedule) do
can?(:developer_access) && pipeline_schedule.owned_by?(@user) can?(:developer_access) && pipeline_schedule.owned_by?(@user)
end end
rule { can?(:developer_access) }.policy do
enable :play_pipeline_schedule
end
rule { can?(:master_access) | owner_of_schedule }.policy do rule { can?(:master_access) | owner_of_schedule }.policy do
enable :update_pipeline_schedule enable :update_pipeline_schedule
enable :admin_pipeline_schedule enable :admin_pipeline_schedule
end end
rule { protected_ref }.prevent :play_pipeline_schedule
end end
end end
module Issuable module Issuable
class DestroyService < IssuableBaseService class DestroyService < IssuableBaseService
def execute(issuable) def execute(issuable)
TodoService.new.destroy_target(issuable) do |issuable|
if issuable.destroy if issuable.destroy
issuable.update_project_counter_caches issuable.update_project_counter_caches
end end
end end
end end
end
end end
module Notes module Notes
class DestroyService < BaseService class DestroyService < BaseService
def execute(note) def execute(note)
TodoService.new.destroy_target(note) do |note|
note.destroy note.destroy
end end
end end
end
end end
...@@ -5,7 +5,7 @@ module Projects ...@@ -5,7 +5,7 @@ module Projects
if fork_source = @project.fork_source if fork_source = @project.fork_source
fork_source.lfs_objects.find_each do |lfs_object| fork_source.lfs_objects.find_each do |lfs_object|
lfs_object.projects << @project lfs_object.projects << @project unless lfs_object.projects.include?(@project)
end end
refresh_forks_count(fork_source) refresh_forks_count(fork_source)
......
...@@ -31,12 +31,20 @@ class TodoService ...@@ -31,12 +31,20 @@ class TodoService
mark_pending_todos_as_done(issue, current_user) mark_pending_todos_as_done(issue, current_user)
end end
# When we destroy an issuable we should: # When we destroy a todo target we should:
# #
# * refresh the todos count cache for the current user # * refresh the todos count cache for all users with todos on the target
# #
def destroy_issuable(issuable, user) # This needs to yield back to the caller to destroy the target, because it
user.update_todos_count_cache # collects the todo users before the todos themselves are deleted, then
# updates the todo counts for those users.
#
def destroy_target(target)
todo_users = User.where(id: target.todos.pending.select(:user_id)).to_a
yield target
todo_users.each(&:update_todos_count_cache)
end end
# When we reassign an issue we should: # When we reassign an issue we should:
......
...@@ -7,7 +7,8 @@ ...@@ -7,7 +7,8 @@
%span.pushed #{event.action_name} #{event.ref_type} %span.pushed #{event.action_name} #{event.ref_type}
%strong %strong
- commits_link = project_commits_path(project, event.ref_name) - commits_link = project_commits_path(project, event.ref_name)
= link_to_if project.repository.branch_exists?(event.ref_name), event.ref_name, commits_link, class: 'ref-name' - should_link = event.tag? ? project.repository.tag_exists?(event.ref_name) : project.repository.branch_exists?(event.ref_name)
= link_to_if should_link, event.ref_name, commits_link, class: 'ref-name'
= render "events/event_scope", event: event = render "events/event_scope", event: event
......
= webpack_bundle_tag 'docs'
%div %div
- if current_application_settings.help_page_text.present? - if current_application_settings.help_page_text.present?
= markdown_field(current_application_settings, :help_page_text) = markdown_field(current_application_settings, :help_page_text)
...@@ -37,8 +39,12 @@ ...@@ -37,8 +39,12 @@
Quick help Quick help
%ul.well-list %ul.well-list
%li= link_to 'See our website for getting help', support_url %li= link_to 'See our website for getting help', support_url
%li= link_to 'Use the search bar on the top of this page', '#', onclick: 'Shortcuts.focusSearch(event)' %li
%li= link_to 'Use shortcuts', '#', onclick: 'Shortcuts.toggleHelp()' %button.btn-blank.btn-link.js-trigger-search-bar{ type: 'button' }
Use the search bar on the top of this page
%li
%button.btn-blank.btn-link.js-trigger-shortcut{ type: 'button' }
Use shortcuts
- unless current_application_settings.help_page_hide_commercial_content? - unless current_application_settings.help_page_hide_commercial_content?
%li= link_to 'Get a support subscription', 'https://about.gitlab.com/pricing/' %li= link_to 'Get a support subscription', 'https://about.gitlab.com/pricing/'
%li= link_to 'Compare GitLab editions', 'https://about.gitlab.com/features/#compare' %li= link_to 'Compare GitLab editions', 'https://about.gitlab.com/features/#compare'
...@@ -6,6 +6,6 @@ ...@@ -6,6 +6,6 @@
- if viewer.package_name - if viewer.package_name
and defines a #{viewer.package_type} named and defines a #{viewer.package_type} named
%strong< %strong<
= link_to viewer.package_name, viewer.package_url, target: '_blank', rel: 'noopener noreferrer' = link_to_if viewer.package_url.present?, viewer.package_name, viewer.package_url, target: '_blank', rel: 'noopener noreferrer'
= link_to 'Learn more', viewer.manager_url, target: '_blank', rel: 'noopener noreferrer' = link_to 'Learn more', viewer.manager_url, target: '_blank', rel: 'noopener noreferrer'
...@@ -7,6 +7,9 @@ ...@@ -7,6 +7,9 @@
.form-group .form-group
= field.label :name, s_('ClusterIntegration|Cluster name') = field.label :name, s_('ClusterIntegration|Cluster name')
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name') = field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name')
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :provider_gcp, @cluster.provider_gcp do |provider_gcp_field| = field.fields_for :provider_gcp, @cluster.provider_gcp do |provider_gcp_field|
.form-group .form-group
......
...@@ -8,6 +8,11 @@ ...@@ -8,6 +8,11 @@
= form_for @cluster, url: namespace_project_cluster_path(@project.namespace, @project, @cluster), as: :cluster do |field| = form_for @cluster, url: namespace_project_cluster_path(@project.namespace, @project, @cluster), as: :cluster do |field|
= form_errors(@cluster) = form_errors(@cluster)
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control js-select-on-focus', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field| = field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group .form-group
= platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL') = platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL')
......
...@@ -3,6 +3,9 @@ ...@@ -3,6 +3,9 @@
.form-group .form-group
= field.label :name, s_('ClusterIntegration|Cluster name') = field.label :name, s_('ClusterIntegration|Cluster name')
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name') = field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name')
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field| = field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group .form-group
......
...@@ -4,6 +4,10 @@ ...@@ -4,6 +4,10 @@
= field.label :name, s_('ClusterIntegration|Cluster name') = field.label :name, s_('ClusterIntegration|Cluster name')
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name') = field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name')
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control js-select-on-focus', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field| = field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group .form-group
= platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL') = platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL')
......
...@@ -39,8 +39,6 @@ ...@@ -39,8 +39,6 @@
= icon('caret-down') = icon('caret-down')
.dropdown-menu.dropdown-menu-align-right.hidden-lg .dropdown-menu.dropdown-menu-align-right.hidden-lg
%ul %ul
- if can_update_issue
%li= link_to 'Edit', edit_project_issue_path(@project, @issue), class: 'js-issuable-edit'
- unless current_user == @issue.author - unless current_user == @issue.author
%li= link_to 'Report abuse', new_abuse_report_path(user_id: @issue.author.id, ref_url: issue_url(@issue)) %li= link_to 'Report abuse', new_abuse_report_path(user_id: @issue.author.id, ref_url: issue_url(@issue))
- if can_update_issue - if can_update_issue
...@@ -52,9 +50,6 @@ ...@@ -52,9 +50,6 @@
%li.divider %li.divider
%li= link_to 'New issue', new_project_issue_path(@project), title: 'New issue', id: 'new_issue_link' %li= link_to 'New issue', new_project_issue_path(@project), title: 'New issue', id: 'new_issue_link'
- if can_update_issue
= link_to 'Edit', edit_project_issue_path(@project, @issue), class: 'hidden-xs hidden-sm btn btn-grouped js-issuable-edit'
= render 'shared/issuable/close_reopen_button', issuable: @issue, can_update: can_update_issue = render 'shared/issuable/close_reopen_button', issuable: @issue, can_update: can_update_issue
- if can_report_spam - if can_report_spam
......
...@@ -26,10 +26,12 @@ ...@@ -26,10 +26,12 @@
= pipeline_schedule.owner&.name = pipeline_schedule.owner&.name
%td %td
.pull-right.btn-group .pull-right.btn-group
- if can?(current_user, :play_pipeline_schedule, pipeline_schedule)
= link_to play_pipeline_schedule_path(pipeline_schedule), method: :post, title: s_('Play'), class: 'btn' do
= icon('play')
- if can?(current_user, :update_pipeline_schedule, pipeline_schedule) - if can?(current_user, :update_pipeline_schedule, pipeline_schedule)
= link_to take_ownership_pipeline_schedule_path(pipeline_schedule), method: :post, title: s_('PipelineSchedules|Take ownership'), class: 'btn' do = link_to take_ownership_pipeline_schedule_path(pipeline_schedule), method: :post, title: s_('PipelineSchedules|Take ownership'), class: 'btn' do
= s_('PipelineSchedules|Take ownership') = s_('PipelineSchedules|Take ownership')
- if can?(current_user, :update_pipeline_schedule, pipeline_schedule)
= link_to edit_pipeline_schedule_path(pipeline_schedule), title: _('Edit'), class: 'btn' do = link_to edit_pipeline_schedule_path(pipeline_schedule), title: _('Edit'), class: 'btn' do
= icon('pencil') = icon('pencil')
- if can?(current_user, :admin_pipeline_schedule, pipeline_schedule) - if can?(current_user, :admin_pipeline_schedule, pipeline_schedule)
......
#js-pipeline-header-vue.pipeline-header-container #js-pipeline-header-vue.pipeline-header-container
- if @commit - if @commit.present?
.commit-box .commit-box
%h3.commit-title %h3.commit-title
= markdown(@commit.title, pipeline: :single_line) = markdown(@commit.title, pipeline: :single_line)
...@@ -8,7 +8,7 @@ ...@@ -8,7 +8,7 @@
%pre.commit-description %pre.commit-description
= preserve(markdown(@commit.description, pipeline: :single_line)) = preserve(markdown(@commit.description, pipeline: :single_line))
.info-well .info-well
- if @commit.status - if @commit.status
.well-segment.pipeline-info .well-segment.pipeline-info
.icon-container .icon-container
......
...@@ -8,11 +8,12 @@ ...@@ -8,11 +8,12 @@
= image_tag 'illustrations/issues.svg' = image_tag 'illustrations/issues.svg'
.col-xs-12 .col-xs-12
.text-content .text-content
- if has_button && current_user - if current_user
%h4 %h4
= _("The Issue Tracker is the place to add things that need to be improved or solved in a project") = _("The Issue Tracker is the place to add things that need to be improved or solved in a project")
%p %p
= _("Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable.") = _("Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable.")
- if has_button
.text-center .text-center
- if project_select_button - if project_select_button
= render 'shared/new_project_item_select', path: 'issues/new', label: 'New issue', type: :issues = render 'shared/new_project_item_select', path: 'issues/new', label: 'New issue', type: :issues
......
- max_render = 3 - max_render = 4
- max = [max_render, issue.assignees.length].min - assignees_rendering_overflow = issue.assignees.size > max_render
- render_count = assignees_rendering_overflow ? max_render - 1 : max_render
- more_assignees_count = issue.assignees.size - render_count
- issue.assignees.take(max).each do |assignee| - issue.assignees.take(render_count).each do |assignee|
= link_to_member(@project, assignee, name: false, title: "Assigned to :name") = link_to_member(@project, assignee, name: false, title: "Assigned to :name")
- if issue.assignees.length > max_render - if more_assignees_count.positive?
- counter = issue.assignees.length - max_render %span{ class: 'avatar-counter has-tooltip', data: { container: 'body', placement: 'bottom', 'line-type' => 'old', 'original-title' => "+#{more_assignees_count} more assignees" } } +#{more_assignees_count}
%span{ class: 'avatar-counter has-tooltip', data: { container: 'body', placement: 'bottom', 'line-type' => 'old', 'original-title' => "+#{counter} more assignees" } }
- if counter < 99
= "+#{counter}"
- else
99+
...@@ -39,6 +39,7 @@ ...@@ -39,6 +39,7 @@
- pipeline_cache:expire_job_cache - pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache - pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline - pipeline_creation:create_pipeline
- pipeline_creation:run_pipeline_schedule
- pipeline_default:build_coverage - pipeline_default:build_coverage
- pipeline_default:build_trace_sections - pipeline_default:build_trace_sections
- pipeline_default:pipeline_metrics - pipeline_default:pipeline_metrics
......
...@@ -13,7 +13,7 @@ class ExpirePipelineCacheWorker ...@@ -13,7 +13,7 @@ class ExpirePipelineCacheWorker
store.touch(project_pipelines_path(project)) store.touch(project_pipelines_path(project))
store.touch(project_pipeline_path(project, pipeline)) store.touch(project_pipeline_path(project, pipeline))
store.touch(commit_pipelines_path(project, pipeline.commit)) if pipeline.commit store.touch(commit_pipelines_path(project, pipeline.commit)) unless pipeline.commit.nil?
store.touch(new_merge_request_pipelines_path(project)) store.touch(new_merge_request_pipelines_path(project))
each_pipelines_merge_request_path(project, pipeline) do |path| each_pipelines_merge_request_path(project, pipeline) do |path|
store.touch(path) store.touch(path)
......
class RunPipelineScheduleWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_creation
def perform(schedule_id, user_id)
schedule = Ci::PipelineSchedule.find_by(id: schedule_id)
user = User.find_by(id: user_id)
return unless schedule && user
run_pipeline_schedule(schedule, user)
end
def run_pipeline_schedule(schedule, user)
Ci::CreatePipelineService.new(schedule.project,
user,
ref: schedule.ref)
.execute(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: schedule)
end
end
---
title: Fix tags in the Activity tab not being clickable
merge_request: 15996
author: Mario de la Ossa
type: fixed
---
title: Do not generate NPM links for private NPM modules in blob view
merge_request: 16002
author: Mario de la Ossa
type: added
---
title: List of avatars should never show +1
merge_request: 15972
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Reset todo counters when the target is deleted
merge_request: 15807
author:
type: fixed
---
title: Don't link LFS objects to a project when unlinking forks when they were already
linked
merge_request: 16006
author:
type: fixed
---
title: Fix shortcut links on help page
merge_request:
author:
type: fixed
---
title: Fix onion-skin re-entering state
merge_request:
author:
type: fixed
---
title: Remove related links in MR widget when empty state
merge_request:
author:
type: fixed
---
title: Add button to run scheduled pipeline immediately
merge_request:
author:
type: added
---
title: Move edit button to second row on issue page (and change it to a pencil icon)
merge_request:
author:
type: changed
...@@ -179,6 +179,7 @@ constraints(ProjectUrlConstrainer.new) do ...@@ -179,6 +179,7 @@ constraints(ProjectUrlConstrainer.new) do
resources :pipeline_schedules, except: [:show] do resources :pipeline_schedules, except: [:show] do
member do member do
post :play
post :take_ownership post :take_ownership
end end
end end
......
...@@ -36,6 +36,7 @@ var config = { ...@@ -36,6 +36,7 @@ var config = {
cycle_analytics: './cycle_analytics/cycle_analytics_bundle.js', cycle_analytics: './cycle_analytics/cycle_analytics_bundle.js',
commit_pipelines: './commit/pipelines/pipelines_bundle.js', commit_pipelines: './commit/pipelines/pipelines_bundle.js',
deploy_keys: './deploy_keys/index.js', deploy_keys: './deploy_keys/index.js',
docs: './docs/docs_bundle.js',
diff_notes: './diff_notes/diff_notes_bundle.js', diff_notes: './diff_notes/diff_notes_bundle.js',
environments: './environments/environments_bundle.js', environments: './environments/environments_bundle.js',
environments_folder: './environments/folder/environments_folder_bundle.js', environments_folder: './environments/folder/environments_folder_bundle.js',
......
...@@ -16,6 +16,7 @@ class IssuesMilestoneIdForeignKey < ActiveRecord::Migration ...@@ -16,6 +16,7 @@ class IssuesMilestoneIdForeignKey < ActiveRecord::Migration
def self.with_orphaned_milestones def self.with_orphaned_milestones
where('NOT EXISTS (SELECT true FROM milestones WHERE milestones.id = issues.milestone_id)') where('NOT EXISTS (SELECT true FROM milestones WHERE milestones.id = issues.milestone_id)')
.where('milestone_id IS NOT NULL')
end end
end end
......
# Configuring GitLab for HA # Configuring GitLab for HA
Assuming you have already configured a database, Redis, and NFS, you can Assuming you have already configured a [database](database.md), [Redis](redis.md), and [NFS](nfs.md), you can
configure the GitLab application server(s) now. Complete the steps below configure the GitLab application server(s) now. Complete the steps below
for each GitLab application server in your environment. for each GitLab application server in your environment.
...@@ -56,8 +56,7 @@ for each GitLab application server in your environment. ...@@ -56,8 +56,7 @@ for each GitLab application server in your environment.
high_availability['mountpoint'] = '/var/opt/gitlab/git-data' high_availability['mountpoint'] = '/var/opt/gitlab/git-data'
# Disable components that will not be on the GitLab application server # Disable components that will not be on the GitLab application server
postgresql['enable'] = false roles ['application_role']
redis['enable'] = false
# PostgreSQL connection details # PostgreSQL connection details
gitlab_rails['db_adapter'] = 'postgresql' gitlab_rails['db_adapter'] = 'postgresql'
......
...@@ -11,7 +11,7 @@ This exported module should be used instead of directly using `axios` to ensure ...@@ -11,7 +11,7 @@ This exported module should be used instead of directly using `axios` to ensure
## Usage ## Usage
```javascript ```javascript
import axios from '~/lib/utils/axios_utils'; import axios from './lib/utils/axios_utils';
axios.get(url) axios.get(url)
.then((response) => { .then((response) => {
......
...@@ -163,3 +163,11 @@ For Windows, you can use `wincred` or Microsoft's [Git Credential Manager for Wi ...@@ -163,3 +163,11 @@ For Windows, you can use `wincred` or Microsoft's [Git Credential Manager for Wi
More details about various methods of storing the user credentials can be found More details about various methods of storing the user credentials can be found
on [Git Credential Storage documentation](https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage). on [Git Credential Storage documentation](https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage).
### LFS objects are missing on push
GitLab checks files to detect LFS pointers on push. If LFS pointers are detected, GitLab tries to verify that those files already exist in LFS on GitLab.
Verify that LFS in installed locally and consider a manual push with `git lfs push --all`.
If you are storing LFS files outside of GitLab you can disable LFS on the project by settting `lfs_enabled: false` with the [projets api](../../api/projects.md#edit-project).
module Gitlab
# This class implements a simple rate limiter that can be used to throttle
# certain actions. Unlike Rack Attack and Rack::Throttle, which operate at
# the middleware level, this can be used at the controller level.
class ActionRateLimiter
TIME_TO_EXPIRE = 60 # 1 min
attr_accessor :action, :expiry_time
def initialize(action:, expiry_time: TIME_TO_EXPIRE)
@action = action
@expiry_time = expiry_time
end
# Increments the given cache key and increments the value by 1 with the
# given expiration time. Returns the incremented value.
#
# key - An array of ActiveRecord instances
def increment(key)
value = 0
Gitlab::Redis::Cache.with do |redis|
cache_key = action_key(key)
value = redis.incr(cache_key)
redis.expire(cache_key, expiry_time) if value == 1
end
value
end
# Increments the given key and returns true if the action should
# be throttled.
#
# key - An array of ActiveRecord instances
# threshold_value - The maximum number of times this action should occur in the given time interval
def throttled?(key, threshold_value)
self.increment(key) > threshold_value
end
private
def action_key(key)
serialized = key.map { |obj| "#{obj.class.model_name.to_s.underscore}:#{obj.id}" }.join(":")
"action_rate_limiter:#{action}:#{serialized}"
end
end
end
...@@ -228,6 +228,19 @@ module Gitlab ...@@ -228,6 +228,19 @@ module Gitlab
end end
end end
end end
# Only to be used when the object ids will not necessarily have a
# relation to each other. The last 10 commits for a branch for example,
# should go through .where
def batch_by_oid(repo, oids)
repo.gitaly_migrate(:list_commits_by_oid) do |is_enabled|
if is_enabled
repo.gitaly_commit_client.list_commits_by_oid(oids)
else
oids.map { |oid| find(repo, oid) }.compact
end
end
end
end end
def initialize(repository, raw_commit, head = nil) def initialize(repository, raw_commit, head = nil)
......
...@@ -169,6 +169,15 @@ module Gitlab ...@@ -169,6 +169,15 @@ module Gitlab
consume_commits_response(response) consume_commits_response(response)
end end
def list_commits_by_oid(oids)
request = Gitaly::ListCommitsByOidRequest.new(repository: @gitaly_repo, oid: oids)
response = GitalyClient.call(@repository.storage, :commit_service, :list_commits_by_oid, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response)
rescue GRPC::Unknown # If no repository is found, happens mainly during testing
[]
end
def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0) def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0)
request = Gitaly::CommitsByMessageRequest.new( request = Gitaly::CommitsByMessageRequest.new(
repository: @gitaly_repo, repository: @gitaly_repo,
......
#!/usr/bin/env ruby #!/usr/bin/env ruby
gitaly_dir = 'tmp/tests/gitaly' gitaly_dir = 'tmp/tests/gitaly'
env = { 'HOME' => File.expand_path('tmp/tests') } env = { 'HOME' => File.expand_path('tmp/tests'),
'GEM_PATH' => Gem.path.join(':') }
args = %W[#{gitaly_dir}/gitaly #{gitaly_dir}/config.toml] args = %W[#{gitaly_dir}/gitaly #{gitaly_dir}/config.toml]
# Print the PID of the spawned process # Print the PID of the spawned process
......
...@@ -874,7 +874,7 @@ describe Projects::IssuesController do ...@@ -874,7 +874,7 @@ describe Projects::IssuesController do
end end
it 'delegates the update of the todos count cache to TodoService' do it 'delegates the update of the todos count cache to TodoService' do
expect_any_instance_of(TodoService).to receive(:destroy_issuable).with(issue, owner).once expect_any_instance_of(TodoService).to receive(:destroy_target).with(issue).once
delete :destroy, namespace_id: project.namespace, project_id: project, id: issue.iid delete :destroy, namespace_id: project.namespace, project_id: project, id: issue.iid
end end
......
...@@ -468,7 +468,7 @@ describe Projects::MergeRequestsController do ...@@ -468,7 +468,7 @@ describe Projects::MergeRequestsController do
end end
it 'delegates the update of the todos count cache to TodoService' do it 'delegates the update of the todos count cache to TodoService' do
expect_any_instance_of(TodoService).to receive(:destroy_issuable).with(merge_request, owner).once expect_any_instance_of(TodoService).to receive(:destroy_target).with(merge_request).once
delete :destroy, namespace_id: project.namespace, project_id: project, id: merge_request.iid delete :destroy, namespace_id: project.namespace, project_id: project, id: merge_request.iid
end end
......
...@@ -3,10 +3,12 @@ require 'spec_helper' ...@@ -3,10 +3,12 @@ require 'spec_helper'
describe Projects::PipelineSchedulesController do describe Projects::PipelineSchedulesController do
include AccessMatchersForController include AccessMatchersForController
set(:project) { create(:project, :public) } set(:project) { create(:project, :public, :repository) }
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) } set(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
describe 'GET #index' do describe 'GET #index' do
render_views
let(:scope) { nil } let(:scope) { nil }
let!(:inactive_pipeline_schedule) do let!(:inactive_pipeline_schedule) do
create(:ci_pipeline_schedule, :inactive, project: project) create(:ci_pipeline_schedule, :inactive, project: project)
...@@ -96,7 +98,7 @@ describe Projects::PipelineSchedulesController do ...@@ -96,7 +98,7 @@ describe Projects::PipelineSchedulesController do
end end
end end
context 'when variables_attributes has two variables and duplicted' do context 'when variables_attributes has two variables and duplicated' do
let(:schedule) do let(:schedule) do
basic_param.merge({ basic_param.merge({
variables_attributes: [{ key: 'AAA', value: 'AAA123' }, { key: 'AAA', value: 'BBB123' }] variables_attributes: [{ key: 'AAA', value: 'AAA123' }, { key: 'AAA', value: 'BBB123' }]
...@@ -364,6 +366,65 @@ describe Projects::PipelineSchedulesController do ...@@ -364,6 +366,65 @@ describe Projects::PipelineSchedulesController do
end end
end end
describe 'POST #play', :clean_gitlab_redis_cache do
set(:user) { create(:user) }
let(:ref) { 'master' }
before do
project.add_developer(user)
sign_in(user)
end
context 'when an anonymous user makes the request' do
before do
sign_out(user)
end
it 'does not allow pipeline to be executed' do
expect(RunPipelineScheduleWorker).not_to receive(:perform_async)
post :play, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
expect(response).to have_gitlab_http_status(404)
end
end
context 'when a developer makes the request' do
it 'executes a new pipeline' do
expect(RunPipelineScheduleWorker).to receive(:perform_async).with(pipeline_schedule.id, user.id).and_return('job-123')
post :play, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
expect(flash[:notice]).to start_with 'Successfully scheduled a pipeline to run'
expect(response).to have_gitlab_http_status(302)
end
it 'prevents users from scheduling the same pipeline repeatedly' do
2.times do
post :play, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
end
expect(flash.to_a.size).to eq(2)
expect(flash[:alert]).to eq 'You cannot play this scheduled pipeline at the moment. Please wait a minute.'
expect(response).to have_gitlab_http_status(302)
end
end
context 'when a developer attempts to schedule a protected ref' do
it 'does not allow pipeline to be executed' do
create(:protected_branch, project: project, name: ref)
protected_schedule = create(:ci_pipeline_schedule, project: project, ref: ref)
expect(RunPipelineScheduleWorker).not_to receive(:perform_async)
post :play, namespace_id: project.namespace.to_param, project_id: project, id: protected_schedule.id
expect(response).to have_gitlab_http_status(404)
end
end
end
describe 'DELETE #destroy' do describe 'DELETE #destroy' do
set(:user) { create(:user) } set(:user) { create(:user) }
......
...@@ -17,13 +17,10 @@ describe Projects::PipelinesController do ...@@ -17,13 +17,10 @@ describe Projects::PipelinesController do
describe 'GET index.json' do describe 'GET index.json' do
before do before do
branch_head = project.commit %w(pending running created success).each_with_index do |status, index|
parent = branch_head.parent sha = project.commit("HEAD~#{index}")
create(:ci_empty_pipeline, status: status, project: project, sha: sha)
create(:ci_empty_pipeline, status: 'pending', project: project, sha: branch_head.id) end
create(:ci_empty_pipeline, status: 'running', project: project, sha: branch_head.id)
create(:ci_empty_pipeline, status: 'created', project: project, sha: parent.id)
create(:ci_empty_pipeline, status: 'success', project: project, sha: parent.id)
end end
subject do subject do
...@@ -46,7 +43,7 @@ describe Projects::PipelinesController do ...@@ -46,7 +43,7 @@ describe Projects::PipelinesController do
context 'when performing gitaly calls', :request_store do context 'when performing gitaly calls', :request_store do
it 'limits the Gitaly requests' do it 'limits the Gitaly requests' do
expect { subject }.to change { Gitlab::GitalyClient.get_request_count }.by(8) expect { subject }.to change { Gitlab::GitalyClient.get_request_count }.by(3)
end end
end end
end end
......
...@@ -32,6 +32,24 @@ describe 'Help Pages' do ...@@ -32,6 +32,24 @@ describe 'Help Pages' do
it_behaves_like 'help page', prefix: '/gitlab' it_behaves_like 'help page', prefix: '/gitlab'
end end
context 'quick link shortcuts', :js do
before do
visit help_path
end
it 'focuses search bar' do
find('.js-trigger-search-bar').click
expect(page).to have_selector('#search:focus')
end
it 'opens shortcuts help dialog' do
find('.js-trigger-shortcut').click
expect(page).to have_selector('#modal-shortcuts')
end
end
end end
context 'in a production environment with version check enabled', :js do context 'in a production environment with version check enabled', :js do
......
...@@ -24,7 +24,7 @@ feature 'Issue Detail', :js do ...@@ -24,7 +24,7 @@ feature 'Issue Detail', :js do
visit project_issue_path(project, issue) visit project_issue_path(project, issue)
wait_for_requests wait_for_requests
click_link 'Edit' page.find('.js-issuable-edit').click
fill_in 'issuable-title', with: 'issue title' fill_in 'issuable-title', with: 'issue title'
click_button 'Save' click_button 'Save'
wait_for_requests wait_for_requests
......
...@@ -8,6 +8,19 @@ describe 'Issues' do ...@@ -8,6 +8,19 @@ describe 'Issues' do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project, :public) } let(:project) { create(:project, :public) }
describe 'while user is signed out' do
describe 'empty state' do
it 'user sees empty state' do
visit project_issues_path(project)
expect(page).to have_content('Register / Sign In')
expect(page).to have_content('The Issue Tracker is the place to add things that need to be improved or solved in a project.')
expect(page).to have_content('You can register or sign in to create issues for this project.')
end
end
end
describe 'while user is signed in' do
before do before do
sign_in(user) sign_in(user)
user2 = create(:user) user2 = create(:user)
...@@ -15,6 +28,16 @@ describe 'Issues' do ...@@ -15,6 +28,16 @@ describe 'Issues' do
project.team << [[user, user2], :developer] project.team << [[user, user2], :developer]
end end
describe 'empty state' do
it 'user sees empty state' do
visit project_issues_path(project)
expect(page).to have_content('The Issue Tracker is the place to add things that need to be improved or solved in a project')
expect(page).to have_content('Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable.')
expect(page).to have_content('New issue')
end
end
describe 'Edit issue' do describe 'Edit issue' do
let!(:issue) do let!(:issue) do
create(:issue, create(:issue,
...@@ -733,4 +756,5 @@ describe 'Issues' do ...@@ -733,4 +756,5 @@ describe 'Issues' do
expect(page).not_to have_css('.is-active') expect(page).not_to have_css('.is-active')
end end
end end
end
end end
...@@ -10,8 +10,6 @@ feature 'image diff notes', :js do ...@@ -10,8 +10,6 @@ feature 'image diff notes', :js do
project.team << [user, :master] project.team << [user, :master]
sign_in user sign_in user
page.driver.set_cookie('sidebar_collapsed', 'true')
# Stub helper to return any blob file as image from public app folder. # Stub helper to return any blob file as image from public app folder.
# This is necessary to run this specs since we don't display repo images in capybara. # This is necessary to run this specs since we don't display repo images in capybara.
allow_any_instance_of(DiffHelper).to receive(:diff_file_blob_raw_path).and_return('/apple-touch-icon.png') allow_any_instance_of(DiffHelper).to receive(:diff_file_blob_raw_path).and_return('/apple-touch-icon.png')
...@@ -141,13 +139,13 @@ feature 'image diff notes', :js do ...@@ -141,13 +139,13 @@ feature 'image diff notes', :js do
end end
it 'allows expanding/collapsing the discussion notes' do it 'allows expanding/collapsing the discussion notes' do
page.all('.js-diff-notes-toggle')[0].trigger('click') page.all('.js-diff-notes-toggle')[0].click
page.all('.js-diff-notes-toggle')[1].trigger('click') page.all('.js-diff-notes-toggle')[1].click
expect(page).not_to have_content('image diff test comment') expect(page).not_to have_content('image diff test comment')
page.all('.js-diff-notes-toggle')[0].trigger('click') page.all('.js-diff-notes-toggle')[0].click
page.all('.js-diff-notes-toggle')[1].trigger('click') page.all('.js-diff-notes-toggle')[1].click
expect(page).to have_content('image diff test comment') expect(page).to have_content('image diff test comment')
end end
...@@ -196,13 +194,31 @@ feature 'image diff notes', :js do ...@@ -196,13 +194,31 @@ feature 'image diff notes', :js do
expect(find('.onion-skin-frame')['style']).to match('width: 228px; height: 240px;') expect(find('.onion-skin-frame')['style']).to match('width: 228px; height: 240px;')
end end
it 'resets onion skin view mode opacity when toggling between view modes' do
find('.view-modes-menu .onion-skin').click
# Simulate dragging onion-skin slider
drag_and_drop_by(find('.dragger'), -30, 0)
expect(find('.onion-skin-frame .frame.added', visible: false)['style']).not_to match('opacity: 1;')
find('.view-modes-menu .swipe').click
find('.view-modes-menu .onion-skin').click
expect(find('.onion-skin-frame .frame.added', visible: false)['style']).to match('opacity: 1;')
end
end end
end
def create_image_diff_note def drag_and_drop_by(element, right_by, down_by)
page.driver.browser.action.drag_and_drop_by(element.native, right_by, down_by).perform
end
def create_image_diff_note
find('.js-add-image-diff-note-button', match: :first).click find('.js-add-image-diff-note-button', match: :first).click
page.all('.js-add-image-diff-note-button')[0].trigger('click') page.all('.js-add-image-diff-note-button')[0].click
find('.diff-content .note-textarea').native.send_keys('image diff test comment') find('.diff-content .note-textarea').native.send_keys('image diff test comment')
click_button 'Comment' click_button 'Comment'
wait_for_requests wait_for_requests
end
end end
...@@ -2,15 +2,15 @@ require 'spec_helper' ...@@ -2,15 +2,15 @@ require 'spec_helper'
feature 'project owner sees a link to create a license file in empty project', :js do feature 'project owner sees a link to create a license file in empty project', :js do
let(:project_master) { create(:user) } let(:project_master) { create(:user) }
let(:project) { create(:project) } let(:project) { create(:project_empty_repo) }
background do background do
project.team << [project_master, :master] project.add_master(project_master)
sign_in(project_master) sign_in(project_master)
end end
scenario 'project master creates a license file from a template' do scenario 'project master creates a license file from a template' do
visit project_path(project) visit project_path(project)
click_link 'Create empty bare repository'
click_on 'LICENSE' click_on 'LICENSE'
expect(page).to have_content('New file') expect(page).to have_content('New file')
...@@ -26,8 +26,6 @@ feature 'project owner sees a link to create a license file in empty project', : ...@@ -26,8 +26,6 @@ feature 'project owner sees a link to create a license file in empty project', :
expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}") expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
fill_in :commit_message, with: 'Add a LICENSE file', visible: true fill_in :commit_message, with: 'Add a LICENSE file', visible: true
# Remove pre-receive hook so we can push without auth
FileUtils.rm_f(File.join(project.repository.path, 'hooks', 'pre-receive'))
click_button 'Commit changes' click_button 'Commit changes'
expect(current_path).to eq( expect(current_path).to eq(
......
...@@ -32,9 +32,7 @@ feature 'issuable templates', :js do ...@@ -32,9 +32,7 @@ feature 'issuable templates', :js do
message: 'added issue template', message: 'added issue template',
branch_name: 'master') branch_name: 'master')
visit project_issue_path project, issue visit project_issue_path project, issue
page.within('.js-issuable-actions') do page.find('.js-issuable-edit').click
click_on 'Edit'
end
fill_in :'issuable-title', with: 'test issue title' fill_in :'issuable-title', with: 'test issue title'
end end
...@@ -77,9 +75,7 @@ feature 'issuable templates', :js do ...@@ -77,9 +75,7 @@ feature 'issuable templates', :js do
message: 'added issue template', message: 'added issue template',
branch_name: 'master') branch_name: 'master')
visit project_issue_path project, issue visit project_issue_path project, issue
page.within('.js-issuable-actions') do page.find('.js-issuable-edit').click
click_on 'Edit'
end
fill_in :'issuable-title', with: 'test issue title' fill_in :'issuable-title', with: 'test issue title'
fill_in :'issue-description', with: prior_description fill_in :'issue-description', with: prior_description
end end
......
...@@ -4,18 +4,17 @@ feature 'Master views tags' do ...@@ -4,18 +4,17 @@ feature 'Master views tags' do
let(:user) { create(:user) } let(:user) { create(:user) }
before do before do
project.team << [user, :master] project.add_master(user)
sign_in(user) sign_in(user)
end end
context 'when project has no tags' do context 'when project has no tags' do
let(:project) { create(:project_empty_repo) } let(:project) { create(:project_empty_repo) }
before do before do
visit project_path(project) visit project_path(project)
click_on 'README' click_on 'README'
fill_in :commit_message, with: 'Add a README file', visible: true fill_in :commit_message, with: 'Add a README file', visible: true
# Remove pre-receive hook so we can push without auth
FileUtils.rm_f(File.join(project.repository.path, 'hooks', 'pre-receive'))
click_button 'Commit changes' click_button 'Commit changes'
visit project_tags_path(project) visit project_tags_path(project)
end end
......
...@@ -41,6 +41,7 @@ describe NotesHelper do ...@@ -41,6 +41,7 @@ describe NotesHelper do
describe '#discussion_path' do describe '#discussion_path' do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:anchor) { discussion.line_code }
context 'for a merge request discusion' do context 'for a merge request discusion' do
let(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) } let(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) }
...@@ -151,6 +152,15 @@ describe NotesHelper do ...@@ -151,6 +152,15 @@ describe NotesHelper do
expect(helper.discussion_path(discussion)).to be_nil expect(helper.discussion_path(discussion)).to be_nil
end end
end end
context 'for a contextual commit discussion' do
let(:commit) { merge_request.commits.last }
let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, commit_id: commit.id).to_discussion }
it 'returns the merge request diff discussion scoped in the commit' do
expect(helper.discussion_path(discussion)).to eq(diffs_project_merge_request_path(project, merge_request, commit_id: commit.id, anchor: anchor))
end
end
end end
context 'for a commit discussion' do context 'for a commit discussion' do
...@@ -160,7 +170,7 @@ describe NotesHelper do ...@@ -160,7 +170,7 @@ describe NotesHelper do
let(:discussion) { create(:diff_note_on_commit, project: project).to_discussion } let(:discussion) { create(:diff_note_on_commit, project: project).to_discussion }
it 'returns the commit path with the line code' do it 'returns the commit path with the line code' do
expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: discussion.line_code)) expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: anchor))
end end
end end
...@@ -168,7 +178,7 @@ describe NotesHelper do ...@@ -168,7 +178,7 @@ describe NotesHelper do
let(:discussion) { create(:legacy_diff_note_on_commit, project: project).to_discussion } let(:discussion) { create(:legacy_diff_note_on_commit, project: project).to_discussion }
it 'returns the commit path with the line code' do it 'returns the commit path with the line code' do
expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: discussion.line_code)) expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: anchor))
end end
end end
......
/* global Notes */
import 'autosize'; import 'autosize';
import '~/gl_form'; import '~/gl_form';
import '~/lib/utils/text_utility'; import '~/lib/utils/text_utility';
import '~/render_gfm'; import '~/render_gfm';
import '~/render_math'; import '~/render_math';
import '~/notes'; import Notes from '~/notes';
const upArrowKeyCode = 38; const upArrowKeyCode = 38;
......
/* eslint-disable no-var, comma-dangle, object-shorthand */ /* eslint-disable no-var, comma-dangle, object-shorthand */
/* global Notes */
import * as urlUtils from '~/lib/utils/url_utility'; import * as urlUtils from '~/lib/utils/url_utility';
import MergeRequestTabs from '~/merge_request_tabs'; import MergeRequestTabs from '~/merge_request_tabs';
...@@ -7,7 +6,7 @@ import '~/commit/pipelines/pipelines_bundle'; ...@@ -7,7 +6,7 @@ import '~/commit/pipelines/pipelines_bundle';
import '~/breakpoints'; import '~/breakpoints';
import '~/lib/utils/common_utils'; import '~/lib/utils/common_utils';
import Diff from '~/diff'; import Diff from '~/diff';
import '~/notes'; import Notes from '~/notes';
import 'vendor/jquery.scrollTo'; import 'vendor/jquery.scrollTo';
(function () { (function () {
...@@ -279,8 +278,8 @@ import 'vendor/jquery.scrollTo'; ...@@ -279,8 +278,8 @@ import 'vendor/jquery.scrollTo';
loadFixtures('merge_requests/diff_comment.html.raw'); loadFixtures('merge_requests/diff_comment.html.raw');
$('body').attr('data-page', 'projects:merge_requests:show'); $('body').attr('data-page', 'projects:merge_requests:show');
window.gl.ImageFile = () => {}; window.gl.ImageFile = () => {};
window.notes = new Notes('', []); Notes.initialize('', []);
spyOn(window.notes, 'toggleDiffNote').and.callThrough(); spyOn(Notes.instance, 'toggleDiffNote').and.callThrough();
}); });
afterEach(() => { afterEach(() => {
...@@ -338,7 +337,7 @@ import 'vendor/jquery.scrollTo'; ...@@ -338,7 +337,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteId.length).toBeGreaterThan(0); expect(noteId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).toHaveBeenCalledWith({ expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({
target: jasmine.any(Object), target: jasmine.any(Object),
lineType: 'old', lineType: 'old',
forceShow: true, forceShow: true,
...@@ -349,7 +348,7 @@ import 'vendor/jquery.scrollTo'; ...@@ -349,7 +348,7 @@ import 'vendor/jquery.scrollTo';
spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist');
this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled(); expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
}); });
}); });
...@@ -359,7 +358,7 @@ import 'vendor/jquery.scrollTo'; ...@@ -359,7 +358,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteLineNumId.length).toBeGreaterThan(0); expect(noteLineNumId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled(); expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
}); });
}); });
}); });
...@@ -393,7 +392,7 @@ import 'vendor/jquery.scrollTo'; ...@@ -393,7 +392,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteId.length).toBeGreaterThan(0); expect(noteId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).toHaveBeenCalledWith({ expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({
target: jasmine.any(Object), target: jasmine.any(Object),
lineType: 'new', lineType: 'new',
forceShow: true, forceShow: true,
...@@ -404,7 +403,7 @@ import 'vendor/jquery.scrollTo'; ...@@ -404,7 +403,7 @@ import 'vendor/jquery.scrollTo';
spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist');
this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled(); expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
}); });
}); });
...@@ -414,7 +413,7 @@ import 'vendor/jquery.scrollTo'; ...@@ -414,7 +413,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteLineNumId.length).toBeGreaterThan(0); expect(noteLineNumId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled(); expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
}); });
}); });
}); });
......
/* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */ /* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */
/* global Notes */
import * as urlUtils from '~/lib/utils/url_utility'; import * as urlUtils from '~/lib/utils/url_utility';
import 'autosize'; import 'autosize';
import '~/gl_form'; import '~/gl_form';
import '~/lib/utils/text_utility'; import '~/lib/utils/text_utility';
import '~/render_gfm'; import '~/render_gfm';
import '~/notes'; import Notes from '~/notes';
(function() { (function() {
window.gon || (window.gon = {}); window.gon || (window.gon = {});
......
...@@ -2,6 +2,7 @@ import Vue from 'vue'; ...@@ -2,6 +2,7 @@ import Vue from 'vue';
import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options'; import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options';
import eventHub from '~/vue_merge_request_widget/event_hub'; import eventHub from '~/vue_merge_request_widget/event_hub';
import notify from '~/lib/utils/notify'; import notify from '~/lib/utils/notify';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
import mockData from './mock_data'; import mockData from './mock_data';
import mountComponent from '../helpers/vue_mount_component_helper'; import mountComponent from '../helpers/vue_mount_component_helper';
...@@ -344,4 +345,31 @@ describe('mrWidgetOptions', () => { ...@@ -344,4 +345,31 @@ describe('mrWidgetOptions', () => {
expect(comps['mr-widget-merge-when-pipeline-succeeds']).toBeDefined(); expect(comps['mr-widget-merge-when-pipeline-succeeds']).toBeDefined();
}); });
}); });
describe('rendering relatedLinks', () => {
beforeEach((done) => {
vm.mr.relatedLinks = {
assignToMe: null,
closing: `
<a class="close-related-link" href="#'>
Close
</a>
`,
mentioned: '',
};
Vue.nextTick(done);
});
it('renders if there are relatedLinks', () => {
expect(vm.$el.querySelector('.close-related-link')).toBeDefined();
});
it('does not render if state is nothingToMerge', (done) => {
vm.mr.state = stateKey.nothingToMerge;
Vue.nextTick(() => {
expect(vm.$el.querySelector('.close-related-link')).toBeNull();
done();
});
});
});
}); });
import MergeRequestStore from '~/vue_merge_request_widget/stores/mr_widget_store'; import MergeRequestStore from '~/vue_merge_request_widget/stores/mr_widget_store';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
import mockData from '../mock_data'; import mockData from '../mock_data';
describe('MergeRequestStore', () => { describe('MergeRequestStore', () => {
...@@ -52,5 +53,17 @@ describe('MergeRequestStore', () => { ...@@ -52,5 +53,17 @@ describe('MergeRequestStore', () => {
expect(store.isPipelineSkipped).toBe(false); expect(store.isPipelineSkipped).toBe(false);
}); });
}); });
describe('isNothingToMergeState', () => {
it('returns true when nothingToMerge', () => {
store.state = stateKey.nothingToMerge;
expect(store.isNothingToMergeState).toEqual(true);
});
it('returns false when not nothingToMerge', () => {
store.state = 'state';
expect(store.isNothingToMergeState).toEqual(false);
});
});
}); });
}); });
require 'spec_helper'
describe Gitlab::ActionRateLimiter do
let(:redis) { double('redis') }
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:key) { [user, project] }
let(:cache_key) { "action_rate_limiter:test_action:user:#{user.id}:project:#{project.id}" }
subject { described_class.new(action: :test_action, expiry_time: 100) }
before do
allow(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
end
it 'increases the throttle count and sets the expire time' do
expect(redis).to receive(:incr).with(cache_key).and_return(1)
expect(redis).to receive(:expire).with(cache_key, 100)
expect(subject.throttled?(key, 1)).to be false
end
it 'returns true if the key is throttled' do
expect(redis).to receive(:incr).with(cache_key).and_return(2)
expect(redis).not_to receive(:expire)
expect(subject.throttled?(key, 1)).to be true
end
end
...@@ -41,7 +41,8 @@ describe Gitlab::Git::GitlabProjects do ...@@ -41,7 +41,8 @@ describe Gitlab::Git::GitlabProjects do
end end
it "fails if the source path doesn't exist" do it "fails if the source path doesn't exist" do
expect(logger).to receive(:error).with("mv-project failed: source path <#{tmp_repos_path}/bad-src.git> does not exist.") expected_source_path = File.join(tmp_repos_path, 'bad-src.git')
expect(logger).to receive(:error).with("mv-project failed: source path <#{expected_source_path}> does not exist.")
result = build_gitlab_projects(tmp_repos_path, 'bad-src.git').mv_project('repo.git') result = build_gitlab_projects(tmp_repos_path, 'bad-src.git').mv_project('repo.git')
expect(result).to be_falsy expect(result).to be_falsy
...@@ -50,7 +51,8 @@ describe Gitlab::Git::GitlabProjects do ...@@ -50,7 +51,8 @@ describe Gitlab::Git::GitlabProjects do
it 'fails if the destination path already exists' do it 'fails if the destination path already exists' do
FileUtils.mkdir_p(File.join(tmp_repos_path, 'already-exists.git')) FileUtils.mkdir_p(File.join(tmp_repos_path, 'already-exists.git'))
message = "mv-project failed: destination path <#{tmp_repos_path}/already-exists.git> already exists." expected_distination_path = File.join(tmp_repos_path, 'already-exists.git')
message = "mv-project failed: destination path <#{expected_distination_path}> already exists."
expect(logger).to receive(:error).with(message) expect(logger).to receive(:error).with(message)
expect(gl_projects.mv_project('already-exists.git')).to be_falsy expect(gl_projects.mv_project('already-exists.git')).to be_falsy
......
...@@ -22,4 +22,51 @@ describe BlobViewer::PackageJson do ...@@ -22,4 +22,51 @@ describe BlobViewer::PackageJson do
expect(subject.package_name).to eq('module-name') expect(subject.package_name).to eq('module-name')
end end
end end
describe '#package_url' do
it 'returns the package URL' do
expect(subject).to receive(:prepare!)
expect(subject.package_url).to eq("https://www.npmjs.com/package/#{subject.package_name}")
end
end
describe '#package_type' do
it 'returns "package"' do
expect(subject).to receive(:prepare!)
expect(subject.package_type).to eq('package')
end
end
context 'when package.json has "private": true' do
let(:data) do
<<-SPEC.strip_heredoc
{
"name": "module-name",
"version": "10.3.1",
"private": true,
"homepage": "myawesomepackage.com"
}
SPEC
end
let(:blob) { fake_blob(path: 'package.json', data: data) }
subject { described_class.new(blob) }
describe '#package_url' do
it 'returns homepage if any' do
expect(subject).to receive(:prepare!)
expect(subject.package_url).to eq('myawesomepackage.com')
end
end
describe '#package_type' do
it 'returns "private package"' do
expect(subject).to receive(:prepare!)
expect(subject.package_type).to eq('private package')
end
end
end
end end
...@@ -13,6 +13,45 @@ describe Commit do ...@@ -13,6 +13,45 @@ describe Commit do
it { is_expected.to include_module(StaticModel) } it { is_expected.to include_module(StaticModel) }
end end
describe '.lazy' do
set(:project) { create(:project, :repository) }
context 'when the commits are found' do
let(:oids) do
%w(
498214de67004b1da3d820901307bed2a68a8ef6
c642fe9b8b9f28f9225d7ea953fe14e74748d53b
6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
048721d90c449b244b7b4c53a9186b04330174ec
281d3a76f31c812dbf48abce82ccf6860adedd81
)
end
subject { oids.map { |oid| described_class.lazy(project, oid) } }
it 'batches requests for commits' do
expect(project.repository).to receive(:commits_by).once.and_call_original
subject.first.title
subject.last.title
end
it 'maintains ordering' do
subject.each_with_index do |commit, i|
expect(commit.id).to eq(oids[i])
end
end
end
context 'when not found' do
it 'returns nil as commit' do
commit = described_class.lazy(project, 'deadbeef').__sync
expect(commit).to be_nil
end
end
end
describe '#author' do describe '#author' do
it 'looks up the author in a case-insensitive way' do it 'looks up the author in a case-insensitive way' do
user = create(:user, email: commit.author_email.upcase) user = create(:user, email: commit.author_email.upcase)
......
...@@ -239,6 +239,54 @@ describe Repository do ...@@ -239,6 +239,54 @@ describe Repository do
end end
end end
describe '#commits_by' do
set(:project) { create(:project, :repository) }
shared_examples 'batch commits fetching' do
let(:oids) { TestEnv::BRANCH_SHA.values }
subject { project.repository.commits_by(oids: oids) }
it 'finds each commit' do
expect(subject).not_to include(nil)
expect(subject.size).to eq(oids.size)
end
it 'returns only Commit instances' do
expect(subject).to all( be_a(Commit) )
end
context 'when some commits are not found ' do
let(:oids) do
['deadbeef'] + TestEnv::BRANCH_SHA.values.first(10)
end
it 'returns only found commits' do
expect(subject).not_to include(nil)
expect(subject.size).to eq(10)
end
end
context 'when no oids are passed' do
let(:oids) { [] }
it 'does not call #batch_by_oid' do
expect(Gitlab::Git::Commit).not_to receive(:batch_by_oid)
subject
end
end
end
context 'when Gitaly list_commits_by_oid is enabled' do
it_behaves_like 'batch commits fetching'
end
context 'when Gitaly list_commits_by_oid is enabled', :disable_gitaly do
it_behaves_like 'batch commits fetching'
end
end
describe '#find_commits_by_message' do describe '#find_commits_by_message' do
shared_examples 'finding commits by message' do shared_examples 'finding commits by message' do
it 'returns commits with messages containing a given string' do it 'returns commits with messages containing a given string' do
...@@ -1163,6 +1211,15 @@ describe Repository do ...@@ -1163,6 +1211,15 @@ describe Repository do
end end
end end
describe '#tag_exists?' do
it 'uses tag_names' do
allow(repository).to receive(:tag_names).and_return(['foobar'])
expect(repository.tag_exists?('foobar')).to eq(true)
expect(repository.tag_exists?('master')).to eq(false)
end
end
describe '#branch_names', :use_clean_rails_memory_store_caching do describe '#branch_names', :use_clean_rails_memory_store_caching do
let(:fake_branch_names) { ['foobar'] } let(:fake_branch_names) { ['foobar'] }
......
require 'spec_helper'
describe Ci::PipelineSchedulePolicy, :models do
set(:user) { create(:user) }
set(:project) { create(:project, :repository) }
set(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) }
let(:policy) do
described_class.new(user, pipeline_schedule)
end
describe 'rules' do
describe 'rules for protected ref' do
before do
project.add_developer(user)
end
context 'when no one can push or merge to the branch' do
before do
create(:protected_branch, :no_one_can_push,
name: pipeline_schedule.ref, project: project)
end
it 'does not include ability to play pipeline schedule' do
expect(policy).to be_disallowed :play_pipeline_schedule
end
end
context 'when developers can push to the branch' do
before do
create(:protected_branch, :developers_can_merge,
name: pipeline_schedule.ref, project: project)
end
it 'includes ability to update pipeline' do
expect(policy).to be_allowed :play_pipeline_schedule
end
end
context 'when no one can create the tag' do
let(:tag) { 'v1.0.0' }
before do
pipeline_schedule.update(ref: tag)
create(:protected_tag, :no_one_can_create,
name: pipeline_schedule.ref, project: project)
end
it 'does not include ability to play pipeline schedule' do
expect(policy).to be_disallowed :play_pipeline_schedule
end
end
context 'when no one can create the tag but it is not a tag' do
before do
create(:protected_tag, :no_one_can_create,
name: pipeline_schedule.ref, project: project)
end
it 'includes ability to play pipeline schedule' do
expect(policy).to be_allowed :play_pipeline_schedule
end
end
end
describe 'rules for owner of schedule' do
before do
project.add_developer(user)
pipeline_schedule.update(owner: user)
end
it 'includes abilities to do do all operations on pipeline schedule' do
expect(policy).to be_allowed :play_pipeline_schedule
expect(policy).to be_allowed :update_pipeline_schedule
expect(policy).to be_allowed :admin_pipeline_schedule
end
end
describe 'rules for a master' do
before do
project.add_master(user)
end
it 'includes abilities to do do all operations on pipeline schedule' do
expect(policy).to be_allowed :play_pipeline_schedule
expect(policy).to be_allowed :update_pipeline_schedule
expect(policy).to be_allowed :admin_pipeline_schedule
end
end
end
end
require 'spec_helper' require 'spec_helper'
describe PipelineSerializer do describe PipelineSerializer do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) } set(:user) { create(:user) }
let(:serializer) do let(:serializer) do
...@@ -16,7 +17,7 @@ describe PipelineSerializer do ...@@ -16,7 +17,7 @@ describe PipelineSerializer do
end end
context 'when a single object is being serialized' do context 'when a single object is being serialized' do
let(:resource) { create(:ci_empty_pipeline) } let(:resource) { create(:ci_empty_pipeline, project: project) }
it 'serializers the pipeline object' do it 'serializers the pipeline object' do
expect(subject[:id]).to eq resource.id expect(subject[:id]).to eq resource.id
...@@ -24,7 +25,7 @@ describe PipelineSerializer do ...@@ -24,7 +25,7 @@ describe PipelineSerializer do
end end
context 'when multiple objects are being serialized' do context 'when multiple objects are being serialized' do
let(:resource) { create_list(:ci_pipeline, 2) } let(:resource) { create_list(:ci_pipeline, 2, project: project) }
it 'serializers the array of pipelines' do it 'serializers the array of pipelines' do
expect(subject).not_to be_empty expect(subject).not_to be_empty
...@@ -100,7 +101,6 @@ describe PipelineSerializer do ...@@ -100,7 +101,6 @@ describe PipelineSerializer do
context 'number of queries' do context 'number of queries' do
let(:resource) { Ci::Pipeline.all } let(:resource) { Ci::Pipeline.all }
let(:project) { create(:project) }
before do before do
# Since RequestStore.active? is true we have to allow the # Since RequestStore.active? is true we have to allow the
......
...@@ -2,7 +2,7 @@ require 'spec_helper' ...@@ -2,7 +2,7 @@ require 'spec_helper'
describe Issuable::DestroyService do describe Issuable::DestroyService do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project) } let(:project) { create(:project, :public) }
subject(:service) { described_class.new(project, user) } subject(:service) { described_class.new(project, user) }
...@@ -19,6 +19,13 @@ describe Issuable::DestroyService do ...@@ -19,6 +19,13 @@ describe Issuable::DestroyService do
service.execute(issue) service.execute(issue)
end end
it 'updates the todo caches for users with todos on the issue' do
create(:todo, target: issue, user: user, author: user, project: project)
expect { service.execute(issue) }
.to change { user.todos_pending_count }.from(1).to(0)
end
end end
context 'when issuable is a merge request' do context 'when issuable is a merge request' do
...@@ -33,6 +40,13 @@ describe Issuable::DestroyService do ...@@ -33,6 +40,13 @@ describe Issuable::DestroyService do
service.execute(merge_request) service.execute(merge_request)
end end
it 'updates the todo caches for users with todos on the merge request' do
create(:todo, target: merge_request, user: user, author: user, project: project)
expect { service.execute(merge_request) }
.to change { user.todos_pending_count }.from(1).to(0)
end
end end
end end
end end
require 'spec_helper' require 'spec_helper'
describe Notes::DestroyService do describe Notes::DestroyService do
set(:project) { create(:project, :public) }
set(:issue) { create(:issue, project: project) }
let(:user) { issue.author }
describe '#execute' do describe '#execute' do
it 'deletes a note' do it 'deletes a note' do
project = create(:project)
issue = create(:issue, project: project)
note = create(:note, project: project, noteable: issue) note = create(:note, project: project, noteable: issue)
described_class.new(project, note.author).execute(note) described_class.new(project, user).execute(note)
expect(project.issues.find(issue.id).notes).not_to include(note) expect(project.issues.find(issue.id).notes).not_to include(note)
end end
it 'updates the todo counts for users with todos for the note' do
note = create(:note, project: project, noteable: issue)
create(:todo, note: note, target: issue, user: user, author: user, project: project)
expect { described_class.new(project, user).execute(note) }
.to change { user.todos_pending_count }.from(1).to(0)
end
end end
end end
...@@ -62,6 +62,26 @@ describe Projects::UnlinkForkService do ...@@ -62,6 +62,26 @@ describe Projects::UnlinkForkService do
expect(source.forks_count).to be_zero expect(source.forks_count).to be_zero
end end
context 'when the source has LFS objects' do
let(:lfs_object) { create(:lfs_object) }
before do
lfs_object.projects << project
end
it 'links the fork to the lfs object before unlinking' do
subject.execute
expect(lfs_object.projects).to include(forked_project)
end
it 'does not fail if the lfs objects were already linked' do
lfs_object.projects << forked_project
expect { subject.execute }.not_to raise_error
end
end
context 'when the original project was deleted' do context 'when the original project was deleted' do
it 'does not fail when the original project is deleted' do it 'does not fail when the original project is deleted' do
source = forked_project.forked_from_project source = forked_project.forked_from_project
......
...@@ -248,11 +248,26 @@ describe TodoService do ...@@ -248,11 +248,26 @@ describe TodoService do
end end
end end
describe '#destroy_issuable' do describe '#destroy_target' do
it 'refresh the todos count cache for the user' do it 'refreshes the todos count cache for users with todos on the target' do
expect(john_doe).to receive(:update_todos_count_cache).and_call_original create(:todo, target: issue, user: john_doe, author: john_doe, project: issue.project)
expect_any_instance_of(User).to receive(:update_todos_count_cache).and_call_original
service.destroy_target(issue) { }
end
it 'does not refresh the todos count cache for users with only done todos on the target' do
create(:todo, :done, target: issue, user: john_doe, author: john_doe, project: issue.project)
expect_any_instance_of(User).not_to receive(:update_todos_count_cache)
service.destroy_target(issue) { }
end
service.destroy_issuable(issue, john_doe) it 'yields the target to the caller' do
expect { |b| service.destroy_target(issue, &b) }
.to yield_with_args(issue)
end end
end end
......
require 'spec_helper'
describe 'events/event/_push.html.haml' do
let(:event) { build_stubbed(:push_event) }
context 'with a branch' do
let(:payload) { build_stubbed(:push_event_payload, event: event) }
before do
allow(event).to receive(:push_event_payload).and_return(payload)
end
it 'links to the branch' do
allow(event.project.repository).to receive(:branch_exists?).with(event.ref_name).and_return(true)
link = project_commits_path(event.project, event.ref_name)
render partial: 'events/event/push', locals: { event: event }
expect(rendered).to have_link(event.ref_name, href: link)
end
context 'that has been deleted' do
it 'does not link to the branch' do
render partial: 'events/event/push', locals: { event: event }
expect(rendered).not_to have_link(event.ref_name)
end
end
end
context 'with a tag' do
let(:payload) { build_stubbed(:push_event_payload, event: event, ref_type: :tag, ref: 'v0.1.0') }
before do
allow(event).to receive(:push_event_payload).and_return(payload)
end
it 'links to the tag' do
allow(event.project.repository).to receive(:tag_exists?).with(event.ref_name).and_return(true)
link = project_commits_path(event.project, event.ref_name)
render partial: 'events/event/push', locals: { event: event }
expect(rendered).to have_link(event.ref_name, href: link)
end
context 'that has been deleted' do
it 'does not link to the tag' do
render partial: 'events/event/push', locals: { event: event }
expect(rendered).not_to have_link(event.ref_name)
end
end
end
end
require 'spec_helper'
describe RunPipelineScheduleWorker do
describe '#perform' do
set(:project) { create(:project) }
set(:user) { create(:user) }
set(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }
let(:worker) { described_class.new }
context 'when a project not found' do
it 'does not call the Service' do
expect(Ci::CreatePipelineService).not_to receive(:new)
expect(worker).not_to receive(:run_pipeline_schedule)
worker.perform(100000, user.id)
end
end
context 'when a user not found' do
it 'does not call the Service' do
expect(Ci::CreatePipelineService).not_to receive(:new)
expect(worker).not_to receive(:run_pipeline_schedule)
worker.perform(pipeline_schedule.id, 10000)
end
end
context 'when everything is ok' do
let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService) }
it 'calls the Service' do
expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: pipeline_schedule.ref).and_return(create_pipeline_service)
expect(create_pipeline_service).to receive(:execute).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule)
worker.perform(pipeline_schedule.id, user.id)
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment