Commit 4ab27ca5 authored by Alejandro Rodríguez's avatar Alejandro Rodríguez

Merge remote-tracking branch 'ce/master' into ce-upstream

parents 516c323d 149df275
/* eslint-disable func-names, space-before-function-paren, wrap-iife, no-undef, quotes, no-var, padded-blocks, max-len */
(function() {
this.Activities = (function() {
function Activities() {
Pager.init(20, true, false, this.updateTooltips);
$(".event-filter-link").on("click", (function(_this) {
return function(event) {
event.preventDefault();
_this.toggleFilter($(event.currentTarget));
return _this.reloadActivities();
};
})(this));
}
Activities.prototype.updateTooltips = function() {
gl.utils.localTimeAgo($('.js-timeago', '.content_list'));
};
Activities.prototype.reloadActivities = function() {
$(".content_list").html('');
Pager.init(20, true, false, this.updateTooltips);
};
Activities.prototype.toggleFilter = function(sender) {
var filter = sender.attr("id").split("_")[0];
$('.event-filter .active').removeClass("active");
Cookies.set("event_filter", filter);
sender.closest('li').toggleClass("active");
};
return Activities;
})();
}).call(this);
/* eslint-disable no-param-reassign, class-methods-use-this */
/* global Pager, Cookies */
((global) => {
class Activities {
constructor() {
Pager.init(20, true, false, this.updateTooltips);
$('.event-filter-link').on('click', (e) => {
e.preventDefault();
this.toggleFilter(e.currentTarget);
this.reloadActivities();
});
}
updateTooltips() {
gl.utils.localTimeAgo($('.js-timeago', '.content_list'));
}
reloadActivities() {
$('.content_list').html('');
Pager.init(20, true, false, this.updateTooltips);
}
toggleFilter(sender) {
const $sender = $(sender);
const filter = $sender.attr('id').split('_')[0];
$('.event-filter .active').removeClass('active');
Cookies.set('event_filter', filter);
$sender.closest('li').toggleClass('active');
}
}
global.Activities = Activities;
})(window.gl || (window.gl = {}));
......@@ -111,10 +111,10 @@
Issuable.init();
break;
case 'dashboard:activity':
new Activities();
new gl.Activities();
break;
case 'dashboard:projects:starred':
new Activities();
new gl.Activities();
break;
case 'projects:commit:show':
new Commit();
......@@ -140,7 +140,7 @@
new gl.Pipelines();
break;
case 'groups:activity':
new Activities();
new gl.Activities();
break;
case 'groups:show':
shortcut_handler = new ShortcutsNavigation();
......
......@@ -157,17 +157,17 @@
<li v-bind:class="{ 'active': scope === undefined }">
<a :href="projectEnvironmentsPath">
Available
<span
class="badge js-available-environments-count"
v-html="state.availableCounter"></span>
<span class="badge js-available-environments-count">
{{state.availableCounter}}
</span>
</a>
</li>
<li v-bind:class="{ 'active' : scope === 'stopped' }">
<a :href="projectStoppedEnvironmentsPath">
Stopped
<span
class="badge js-stopped-environments-count"
v-html="state.stoppedCounter"></span>
<span class="badge js-stopped-environments-count">
{{state.stoppedCounter}}
</span>
</a>
</li>
</ul>
......@@ -183,8 +183,7 @@
<i class="fa fa-spinner spin"></i>
</div>
<div
class="blank-state blank-state-no-icon"
<div class="blank-state blank-state-no-icon"
v-if="!isLoading && state.environments.length === 0">
<h2 class="blank-state-title">
You don't have any environments right now.
......@@ -205,8 +204,7 @@
</a>
</div>
<div
class="table-holder"
<div class="table-holder"
v-if="!isLoading && state.environments.length > 0">
<table class="table ci-table environments">
<thead>
......@@ -234,7 +232,9 @@
is="environment-item"
v-for="children in model.children"
:model="children"
:toggleRow="toggleRow.bind(children)">
:toggleRow="toggleRow.bind(children)"
:can-create-deployment="canCreateDeploymentParsed"
:can-read-environment="canReadEnvironmentParsed">
</tr>
</template>
......
......@@ -43,8 +43,7 @@
<div class="inline">
<div class="dropdown">
<a class="dropdown-new btn btn-default" data-toggle="dropdown">
<span class="dropdown-play-icon-container">
</span>
<span class="dropdown-play-icon-container"></span>
<i class="fa fa-caret-down"></i>
</a>
......@@ -54,9 +53,10 @@
data-method="post"
rel="nofollow"
class="js-manual-action-link">
<span class="action-play-icon-container">
<span class="action-play-icon-container"></span>
<span>
{{action.name}}
</span>
<span v-html="action.name"></span>
</a>
</li>
</ul>
......
......@@ -389,11 +389,10 @@
template: `
<tr>
<td v-bind:class="{ 'children-row': isChildren}">
<a
v-if="!isFolder"
<a v-if="!isFolder"
class="environment-name"
:href="model.environment_path"
v-html="model.name">
:href="model.environment_path">
{{model.name}}
</a>
<span v-else v-on:click="toggleRow(model)" class="folder-name">
<span class="folder-icon">
......@@ -401,16 +400,19 @@
<i v-show="!model.isOpen" class="fa fa-caret-right"></i>
</span>
<span v-html="model.name"></span>
<span>
{{model.name}}
</span>
<span class="badge" v-html="childrenCounter"></span>
<span class="badge">
{{childrenCounter}}
</span>
</span>
</td>
<td class="deployment-column">
<span
v-if="shouldRenderDeploymentID"
v-html="deploymentInternalId">
<span v-if="shouldRenderDeploymentID">
{{deploymentInternalId}}
</span>
<span v-if="!isFolder && deploymentHasUser">
......@@ -427,8 +429,8 @@
<td>
<a v-if="shouldRenderBuildName"
class="build-link"
:href="model.last_deployment.deployable.build_path"
v-html="buildName">
:href="model.last_deployment.deployable.build_path">
{{buildName}}
</a>
</td>
......@@ -451,8 +453,8 @@
<td>
<span
v-if="!isFolder && model.last_deployment"
class="environment-created-date-timeago"
v-html="createdDate">
class="environment-created-date-timeago">
{{createdDate}}
</span>
</td>
......
......@@ -14,8 +14,7 @@
},
template: `
<a
class="btn stop-env-link"
<a class="btn stop-env-link"
:href="stop_url"
data-confirm="Are you sure you want to stop this environment?"
data-method="post"
......
......@@ -235,7 +235,7 @@
}
if (environment.deployed_at && environment.deployed_at_formatted) {
environment.deployed_at = gl.utils.getTimeago(environment.deployed_at) + '.';
environment.deployed_at = gl.utils.getTimeago().format(environment.deployed_at, 'gl_en') + '.';
} else {
$('.js-environment-timeago', $template).remove();
environment.name += '.';
......
/* eslint-disable func-names, space-before-function-paren, object-shorthand, quotes, no-undef, prefer-template, wrap-iife, comma-dangle, no-return-assign, no-else-return, consistent-return, no-unused-vars, padded-blocks, max-len */
(function() {
this.Pager = {
init: function(limit, preload, disable, callback) {
this.limit = limit != null ? limit : 0;
this.disable = disable != null ? disable : false;
this.callback = callback != null ? callback : $.noop;
this.loading = $('.loading').first();
if (preload) {
this.offset = 0;
this.getOld();
} else {
this.offset = this.limit;
}
return this.initLoadMore();
},
getOld: function() {
this.loading.show();
return $.ajax({
type: "GET",
url: $(".content_list").data('href') || location.href,
data: "limit=" + this.limit + "&offset=" + this.offset,
complete: (function(_this) {
return function() {
return _this.loading.hide();
};
})(this),
success: function(data) {
Pager.append(data.count, data.html);
return Pager.callback();
},
dataType: "json"
});
},
append: function(count, html) {
$(".content_list").append(html);
if (count > 0) {
return this.offset += count;
} else {
return this.disable = true;
}
},
initLoadMore: function() {
$(document).unbind('scroll');
return $(document).endlessScroll({
bottomPixels: 400,
fireDelay: 1000,
fireOnce: true,
ceaseFire: function() {
return Pager.disable;
},
callback: (function(_this) {
return function(i) {
if (!_this.loading.is(':visible')) {
_this.loading.show();
return Pager.getOld();
}
};
})(this)
});
}
};
}).call(this);
(() => {
const ENDLESS_SCROLL_BOTTOM_PX = 400;
const ENDLESS_SCROLL_FIRE_DELAY_MS = 1000;
const Pager = {
init(limit = 0, preload = false, disable = false, callback = $.noop) {
this.limit = limit;
this.offset = this.limit;
this.disable = disable;
this.callback = callback;
this.loading = $('.loading').first();
if (preload) {
this.offset = 0;
this.getOld();
}
this.initLoadMore();
},
getOld() {
this.loading.show();
$.ajax({
type: 'GET',
url: $('.content_list').data('href') || window.location.href,
data: `limit=${this.limit}&offset=${this.offset}`,
dataType: 'json',
error: () => this.loading.hide(),
success: (data) => {
this.append(data.count, data.html);
this.callback();
// keep loading until we've filled the viewport height
if (!this.disable && !this.isScrollable()) {
this.getOld();
} else {
this.loading.hide();
}
},
});
},
append(count, html) {
$('.content_list').append(html);
if (count > 0) {
this.offset += count;
} else {
this.disable = true;
}
},
isScrollable() {
const $w = $(window);
return $(document).height() > $w.height() + $w.scrollTop() + ENDLESS_SCROLL_BOTTOM_PX;
},
initLoadMore() {
$(document).unbind('scroll');
$(document).endlessScroll({
bottomPixels: ENDLESS_SCROLL_BOTTOM_PX,
fireDelay: ENDLESS_SCROLL_FIRE_DELAY_MS,
fireOnce: true,
ceaseFire: () => this.disable === true,
callback: () => {
if (!this.loading.is(':visible')) {
this.loading.show();
this.getOld();
}
},
});
},
};
window.Pager = Pager;
})();
......@@ -134,7 +134,7 @@ content on the Users#show page.
}
const $calendarWrap = this.$parentEl.find('.user-calendar');
$calendarWrap.load($calendarWrap.data('href'));
new Activities();
new gl.Activities();
return this.loaded['activity'] = true;
}
......
......@@ -138,16 +138,15 @@
<a v-if="hasRef"
class="monospace branch-name"
:href="ref.ref_url"
v-html="ref.name">
:href="ref.ref_url">
{{ref.name}}
</a>
<div class="icon-container commit-icon commit-icon-container">
</div>
<div class="icon-container commit-icon commit-icon-container"></div>
<a class="commit-id monospace"
:href="commit_url"
v-html="short_sha">
:href="commit_url">
{{short_sha}}
</a>
<p class="commit-title">
......@@ -156,14 +155,15 @@
class="avatar-image-container"
:href="author.web_url">
<img
class="avatar has-tooltip s20"
class="avatar has-tooltip s20"
:src="author.avatar_url"
:alt="userImageAltDescription"
:title="author.username" />
</a>
<a class="commit-row-message"
:href="commit_url" v-html="title">
:href="commit_url">
{{title}}
</a>
</span>
<span v-else>
......
......@@ -255,26 +255,3 @@
}
}
// For sign in pane only, to improve tab order, the following removes the submit button from
// normal document flow and pins it to the bottom of the form. For context, see !6867 & !6928
.login-box {
.new_user {
position: relative;
padding-bottom: 35px;
@media (min-width: $screen-sm-min) and (max-width: $screen-sm-max) {
.forgot-password {
float: none !important;
margin-top: 5px;
}
}
}
.move-submit-down {
position: absolute;
width: 100%;
bottom: 0;
}
}
......@@ -8,6 +8,10 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
def show
@cycle_analytics = ::CycleAnalytics.new(@project, from: start_date(cycle_analytics_params))
stats_values, cycle_analytics_json = generate_cycle_analytics_data
@cycle_analytics_no_data = stats_values.blank?
respond_to do |format|
format.html
format.json { render json: cycle_analytics_json }
......@@ -22,7 +26,9 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
{ start_date: params[:cycle_analytics][:start_date] }
end
def cycle_analytics_json
def generate_cycle_analytics_data
stats_values = []
cycle_analytics_view_data = [[:issue, "Issue", "Time before an issue gets scheduled"],
[:plan, "Plan", "Time before an issue starts implementation"],
[:code, "Code", "Time until first merge request"],
......@@ -34,11 +40,14 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
stats = cycle_analytics_view_data.reduce([]) do |stats, (stage_method, stage_text, stage_description)|
value = @cycle_analytics.send(stage_method).presence
stats_values << value.abs if value
stats << {
title: stage_text,
description: stage_description,
value: value && !value.zero? ? distance_of_time_in_words(value) : nil
}
stats
end
......@@ -52,9 +61,11 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
{ title: "Deploy".pluralize(deploys), value: deploys }
]
{
summary: summary,
stats: stats
cycle_analytics_hash = { summary: summary,
stats: stats,
permissions: @cycle_analytics.permissions(user: current_user)
}
[stats_values, cycle_analytics_hash]
end
end
......@@ -62,7 +62,11 @@ class Projects::MergeRequestsController < Projects::ApplicationController
format.html { define_discussion_vars }
format.json do
<<<<<<< HEAD
render json: MergeRequestSerializer.new.represent(@merge_request, type: :full)
=======
render json: MergeRequestSerializer.new.represent(@merge_request)
>>>>>>> ce/master
end
format.patch do
......@@ -84,12 +88,12 @@ class Projects::MergeRequestsController < Projects::ApplicationController
@merge_request_diff =
if params[:diff_id]
@merge_request.merge_request_diffs.find(params[:diff_id])
@merge_request.merge_request_diffs.viewable.find(params[:diff_id])
else
@merge_request.merge_request_diff
end
@merge_request_diffs = @merge_request.merge_request_diffs.select_without_diff
@merge_request_diffs = @merge_request.merge_request_diffs.viewable.select_without_diff
@comparable_diffs = @merge_request_diffs.select { |diff| diff.id < @merge_request_diff.id }
if params[:start_sha].present?
......@@ -442,7 +446,7 @@ class Projects::MergeRequestsController < Projects::ApplicationController
response = {
title: merge_request.title,
sha: merge_request.diff_head_commit.short_id,
sha: (merge_request.diff_head_commit.short_id if merge_request.diff_head_sha),
status: status,
coverage: coverage
}
......@@ -601,7 +605,7 @@ class Projects::MergeRequestsController < Projects::ApplicationController
def define_pipelines_vars
@pipelines = @merge_request.all_pipelines
if @pipelines.present?
if @pipelines.present? && @merge_request.commits.present?
@pipeline = @pipelines.first
@statuses = @pipeline.statuses.relevant
end
......
......@@ -197,7 +197,10 @@ class Projects::NotesController < Projects::ApplicationController
)
end
<<<<<<< HEAD
attrs[:commands_changes] = note.commands_changes unless attrs[:award]
=======
>>>>>>> ce/master
attrs
end
......
......@@ -17,6 +17,8 @@ module ServicesHelper
"Event will be triggered when a build status changes"
when "wiki_page"
"Event will be triggered when a wiki page is created/updated"
when "commit"
"Event will be triggered when a commit is created/updated"
end
end
......
......@@ -70,7 +70,11 @@ module Ci
environment: build.environment,
status_event: 'enqueue'
)
MergeRequests::AddTodoWhenBuildFailsService.new(build.project, nil).close(new_build)
MergeRequests::AddTodoWhenBuildFailsService
.new(build.project, nil)
.close(new_build)
build.pipeline.mark_as_processable_after_stage(build.stage_idx)
new_build
end
......@@ -484,6 +488,10 @@ module Ci
]
end
def credentials
Gitlab::Ci::Build::Credentials::Factory.new(self).create!
end
private
def update_artifacts_size
......
# == Mentionable concern
#
# Contains functionality related to objects that can mention Users, Issues, MergeRequests, or Commits by
# Contains functionality related to objects that can mention Users, Issues, MergeRequests, Commits or Snippets by
# GFM references.
#
# Used by Issue, Note, MergeRequest, and Commit.
......
class CycleAnalytics
STAGES = %i[issue plan code test review staging production].freeze
def initialize(project, from:)
@project = project
@from = from
......@@ -9,6 +11,10 @@ class CycleAnalytics
@summary ||= Summary.new(@project, from: @from)
end
def permissions(user:)
Gitlab::CycleAnalytics::Permissions.get(user: user, project: @project)
end
def issue
@fetcher.calculate_metric(:issue,
Issue.arel_table[:created_at],
......
......@@ -19,7 +19,7 @@ class Environment < ActiveRecord::Base
allow_nil: true,
addressable_url: true
delegate :stop_action, to: :last_deployment, allow_nil: true
delegate :stop_action, :manual_actions, to: :last_deployment, allow_nil: true
scope :available, -> { with_state(:available) }
scope :stopped, -> { with_state(:stopped) }
......@@ -99,4 +99,12 @@ class Environment < ActiveRecord::Base
stop
stop_action.play(current_user)
end
def actions_for(environment)
return [] unless manual_actions
manual_actions.select do |action|
action.expanded_environment_name == environment
end
end
end
......@@ -11,6 +11,9 @@ class MergeRequestDiff < ActiveRecord::Base
belongs_to :merge_request
serialize :st_commits
serialize :st_diffs
state_machine :state, initial: :empty do
state :collected
state :overflow
......@@ -22,8 +25,7 @@ class MergeRequestDiff < ActiveRecord::Base
state :overflow_diff_lines_limit
end
serialize :st_commits
serialize :st_diffs
scope :viewable, -> { without_state(:empty) }
# All diff information is collected from repository after object is created.
# It allows you to override variables like head_commit_sha before getting diff.
......
......@@ -1196,7 +1196,7 @@ class Project < ActiveRecord::Base
"refs/heads/#{branch}",
force: true)
repository.copy_gitattributes(branch)
repository.expire_avatar_cache(branch)
repository.expire_avatar_cache
reload_default_branch
end
......
# == Schema Information
#
# Table name: services
#
# id :integer not null, primary key
# type :string(255)
# title :string(255)
# project_id :integer
# created_at :datetime
# updated_at :datetime
# active :boolean default(FALSE), not null
# properties :text
# template :boolean default(FALSE)
# push_events :boolean default(TRUE)
# issues_events :boolean default(TRUE)
# merge_requests_events :boolean default(TRUE)
# tag_push_events :boolean default(TRUE)
# note_events :boolean default(TRUE), not null
# build_events :boolean default(FALSE), not null
#
class JiraService < IssueTrackerService
include Gitlab::Routing.url_helpers
......@@ -30,6 +9,10 @@ class JiraService < IssueTrackerService
before_update :reset_password
def supported_events
%w(commit merge_request)
end
# {PROJECT-KEY}-{NUMBER} Examples: JIRA-1, PROJECT-1
def reference_pattern
@reference_pattern ||= %r{(?<issue>\b([A-Z][A-Z0-9_]+-)\d+)}
......@@ -137,19 +120,17 @@ class JiraService < IssueTrackerService
end
def create_cross_reference_note(mentioned, noteable, author)
jira_issue = jira_request { client.Issue.find(mentioned.id) }
unless can_cross_reference?(noteable)
return "Events for #{noteable.model_name.plural.humanize(capitalize: false)} are disabled."
end
return false unless jira_issue.present?
jira_issue = jira_request { client.Issue.find(mentioned.id) }
project = self.project
noteable_name = noteable.class.name.underscore.downcase
noteable_id = if noteable.is_a?(Commit)
noteable.id
else
noteable.iid
end
return unless jira_issue.present?
entity_url = build_entity_url(noteable_name.to_sym, noteable_id)
noteable_id = noteable.respond_to?(:iid) ? noteable.iid : noteable.id
noteable_type = noteable_name(noteable)
entity_url = build_entity_url(noteable_type, noteable_id)
data = {
user: {
......@@ -157,11 +138,11 @@ class JiraService < IssueTrackerService
url: resource_url(user_path(author)),
},
project: {
name: project.path_with_namespace,
url: resource_url(namespace_project_path(project.namespace, project))
name: self.project.path_with_namespace,
url: resource_url(namespace_project_path(project.namespace, self.project))
},
entity: {
name: noteable_name.humanize.downcase,
name: noteable_type.humanize.downcase,
url: entity_url,
title: noteable.title
}
......@@ -193,8 +174,16 @@ class JiraService < IssueTrackerService
private
def can_cross_reference?(noteable)
case noteable
when Commit then commit_events
when MergeRequest then merge_requests_events
else true
end
end
def close_issue(entity, issue)
return if issue.nil? || issue.resolution.present?
return if issue.nil? || issue.resolution.present? || !jira_issue_transition_id.present?
commit_id = if entity.is_a?(Commit)
entity.id
......@@ -290,18 +279,26 @@ class JiraService < IssueTrackerService
"#{Settings.gitlab.base_url.chomp("/")}#{resource}"
end
def build_entity_url(entity_name, entity_id)
def build_entity_url(noteable_type, entity_id)
polymorphic_url(
[
self.project.namespace.becomes(Namespace),
self.project,
entity_name
noteable_type.to_sym
],
id: entity_id,
host: Settings.gitlab.base_url
)
end
def noteable_name(noteable)
name = noteable.model_name.singular
# ProjectSnippet inherits from Snippet class so it causes
# routing error building the URL.
name == "project_snippet" ? "snippet" : name
end
# Handle errors when doing JIRA API calls
def jira_request
yield
......
......@@ -3,6 +3,7 @@ require 'securerandom'
require 'forwardable'
class Repository
<<<<<<< HEAD
include Elastic::RepositoriesSearch
class CommitError < StandardError; end
......@@ -13,11 +14,56 @@ class Repository
# Files to use as a project avatar in case no avatar was uploaded via the web
# UI.
AVATAR_FILES = %w{logo.png logo.jpg logo.gif}
=======
include Gitlab::ShellAdapter
attr_accessor :path_with_namespace, :project
class CommitError < StandardError; end
# Methods that cache data from the Git repository.
#
# Each entry in this Array should have a corresponding method with the exact
# same name. The cache key used by those methods must also match method's
# name.
#
# For example, for entry `:readme` there's a method called `readme` which
# stores its data in the `readme` cache key.
CACHED_METHODS = %i(size commit_count readme version contribution_guide
changelog license_blob license_key gitignore koding_yml
gitlab_ci_yml branch_names tag_names branch_count
tag_count avatar exists? empty? root_ref)
# Certain method caches should be refreshed when certain types of files are
# changed. This Hash maps file types (as returned by Gitlab::FileDetector) to
# the corresponding methods to call for refreshing caches.
METHOD_CACHES_FOR_FILE_TYPES = {
readme: :readme,
changelog: :changelog,
license: %i(license_blob license_key),
contributing: :contribution_guide,
version: :version,
gitignore: :gitignore,
koding: :koding_yml,
gitlab_ci: :gitlab_ci_yml,
avatar: :avatar
}
# Wraps around the given method and caches its output in Redis and an instance
# variable.
#
# This only works for methods that do not take any arguments.
def self.cache_method(name, fallback: nil)
original = :"_uncached_#{name}"
>>>>>>> ce/master
alias_method(original, name)
define_method(name) do
cache_method_output(name, fallback: fallback) { __send__(original) }
end
end
def self.storages
Gitlab.config.repositories.storages
end
......@@ -48,24 +94,6 @@ class Repository
)
end
def exists?
return @exists unless @exists.nil?
@exists = cache.fetch(:exists?) do
begin
raw_repository && raw_repository.rugged ? true : false
rescue Gitlab::Git::Repository::NoRepository
false
end
end
end
def empty?
return @empty unless @empty.nil?
@empty = cache.fetch(:empty?) { raw_repository.empty? }
end
#
# Git repository can contains some hidden refs like:
# /refs/notes/*
......@@ -278,10 +306,6 @@ class Repository
branch_names + tag_names
end
def branch_names
@branch_names ||= cache.fetch(:branch_names) { branches.map(&:name) }
end
def branch_exists?(branch_name)
branch_names.include?(branch_name)
end
......@@ -331,34 +355,6 @@ class Repository
ref_exists?(keep_around_ref_name(sha))
end
def tag_names
cache.fetch(:tag_names) { raw_repository.tag_names }
end
def commit_count
cache.fetch(:commit_count) do
begin
raw_repository.commit_count(self.root_ref)
rescue
0
end
end
end
def branch_count
@branch_count ||= cache.fetch(:branch_count) { branches.size }
end
def tag_count
@tag_count ||= cache.fetch(:tag_count) { raw_repository.rugged.tags.count }
end
# Return repo size in megabytes
# Cached in redis
def size
cache.fetch(:size) { raw_repository.size }
end
def diverging_commit_counts(branch)
root_ref_hash = raw_repository.rev_parse_target(root_ref).oid
cache.fetch(:"diverging_commit_counts_#{branch.name}") do
......@@ -374,48 +370,55 @@ class Repository
end
end
# Keys for data that can be affected for any commit push.
def cache_keys
%i(size commit_count
readme version contribution_guide changelog
license_blob license_key gitignore koding_yml)
def expire_tags_cache
expire_method_caches(%i(tag_names tag_count))
@tags = nil
end
# Keys for data on branch/tag operations.
def cache_keys_for_branches_and_tags
%i(branch_names tag_names branch_count tag_count)
def expire_branches_cache
expire_method_caches(%i(branch_names branch_count))
@local_branches = nil
end
def build_cache
(cache_keys + cache_keys_for_branches_and_tags).each do |key|
unless cache.exist?(key)
send(key)
end
end
def expire_statistics_caches
expire_method_caches(%i(size commit_count))
end
def expire_tags_cache
cache.expire(:tag_names)
@tags = nil
def expire_all_method_caches
expire_method_caches(CACHED_METHODS)
end
def expire_branches_cache
cache.expire(:branch_names)
@branch_names = nil
@local_branches = nil
# Expires the caches of a specific set of methods
def expire_method_caches(methods)
methods.each do |key|
cache.expire(key)
ivar = cache_instance_variable_name(key)
remove_instance_variable(ivar) if instance_variable_defined?(ivar)
end
end
def expire_cache(branch_name = nil, revision = nil)
cache_keys.each do |key|
cache.expire(key)
def expire_avatar_cache
expire_method_caches(%i(avatar))
end
# Refreshes the method caches of this repository.
#
# types - An Array of file types (e.g. `:readme`) used to refresh extra
# caches.
def refresh_method_caches(types)
to_refresh = []
types.each do |type|
methods = METHOD_CACHES_FOR_FILE_TYPES[type.to_sym]
to_refresh.concat(Array(methods)) if methods
end
expire_branch_cache(branch_name)
expire_avatar_cache(branch_name, revision)
expire_method_caches(to_refresh)
# This ensures this particular cache is flushed after the first commit to a
# new repository.
expire_emptiness_caches if empty?
to_refresh.each { |method| send(method) }
end
def expire_branch_cache(branch_name = nil)
......@@ -434,15 +437,14 @@ class Repository
end
def expire_root_ref_cache
cache.expire(:root_ref)
@root_ref = nil
expire_method_caches(%i(root_ref))
end
# Expires the cache(s) used to determine if a repository is empty or not.
def expire_emptiness_caches
cache.expire(:empty?)
@empty = nil
return unless empty?
expire_method_caches(%i(empty?))
expire_has_visible_content_cache
end
......@@ -451,51 +453,22 @@ class Repository
@has_visible_content = nil
end
def expire_branch_count_cache
cache.expire(:branch_count)
@branch_count = nil
end
def expire_tag_count_cache
cache.expire(:tag_count)
@tag_count = nil
end
def lookup_cache
@lookup_cache ||= {}
end
def expire_avatar_cache(branch_name = nil, revision = nil)
# Avatars are pulled from the default branch, thus if somebody pushes to a
# different branch there's no need to expire anything.
return if branch_name && branch_name != root_ref
# We don't want to flush the cache if the commit didn't actually make any
# changes to any of the possible avatar files.
if revision && commit = self.commit(revision)
return unless commit.raw_diffs(deltas_only: true).
any? { |diff| AVATAR_FILES.include?(diff.new_path) }
end
cache.expire(:avatar)
@avatar = nil
end
def expire_exists_cache
cache.expire(:exists?)
@exists = nil
expire_method_caches(%i(exists?))
end
# expire cache that doesn't depend on repository data (when expiring)
def expire_content_cache
expire_tags_cache
expire_tag_count_cache
expire_branches_cache
expire_branch_count_cache
expire_root_ref_cache
expire_emptiness_caches
expire_exists_cache
expire_statistics_caches
end
# Runs code after a repository has been created.
......@@ -510,9 +483,8 @@ class Repository
# Runs code just before a repository is deleted.
def before_delete
expire_exists_cache
expire_cache if exists?
expire_all_method_caches
expire_branch_cache if exists?
expire_content_cache
repository_event(:remove_repository)
......@@ -529,9 +501,9 @@ class Repository
# Runs code before pushing (= creating or removing) a tag.
def before_push_tag
expire_cache
expire_statistics_caches
expire_emptiness_caches
expire_tags_cache
expire_tag_count_cache
repository_event(:push_tag)
end
......@@ -539,7 +511,7 @@ class Repository
# Runs code before removing a tag.
def before_remove_tag
expire_tags_cache
expire_tag_count_cache
expire_statistics_caches
repository_event(:remove_tag)
end
......@@ -551,12 +523,14 @@ class Repository
# Runs code after a repository has been forked/imported.
def after_import
expire_content_cache
build_cache
expire_tags_cache
expire_branches_cache
end
# Runs code after a new commit has been pushed.
def after_push_commit(branch_name, revision)
expire_cache(branch_name, revision)
def after_push_commit(branch_name)
expire_statistics_caches
expire_branch_cache(branch_name)
repository_event(:push_commit, branch: branch_name)
end
......@@ -565,7 +539,6 @@ class Repository
def after_create_branch
expire_branches_cache
expire_has_visible_content_cache
expire_branch_count_cache
repository_event(:push_branch)
end
......@@ -580,7 +553,6 @@ class Repository
# Runs code after an existing branch has been removed.
def after_remove_branch
expire_has_visible_content_cache
expire_branch_count_cache
expire_branches_cache
end
......@@ -607,86 +579,127 @@ class Repository
Gitlab::Git::Blob.raw(self, oid)
end
def root_ref
if raw_repository
raw_repository.root_ref
else
# When the repo does not exist we raise this error so no data is cached.
raise Rugged::ReferenceError
end
end
cache_method :root_ref
def exists?
refs_directory_exists?
end
cache_method :exists?
def empty?
raw_repository.empty?
end
cache_method :empty?
# The size of this repository in megabytes.
def size
exists? ? raw_repository.size : 0.0
end
cache_method :size, fallback: 0.0
def commit_count
root_ref ? raw_repository.commit_count(root_ref) : 0
end
cache_method :commit_count, fallback: 0
def branch_names
branches.map(&:name)
end
cache_method :branch_names, fallback: []
def tag_names
raw_repository.tag_names
end
cache_method :tag_names, fallback: []
def branch_count
branches.size
end
cache_method :branch_count, fallback: 0
def tag_count
raw_repository.rugged.tags.count
end
cache_method :tag_count, fallback: 0
def avatar
if tree = file_on_head(:avatar)
tree.path
end
end
cache_method :avatar
def readme
cache.fetch(:readme) { tree(:head).readme }
if head = tree(:head)
head.readme
end
end
cache_method :readme
def version
cache.fetch(:version) do
tree(:head).blobs.find do |file|
file.name.casecmp('version').zero?
end
end
file_on_head(:version)
end
cache_method :version
def contribution_guide
cache.fetch(:contribution_guide) do
tree(:head).blobs.find do |file|
file.contributing?
end
end
file_on_head(:contributing)
end
cache_method :contribution_guide
def changelog
cache.fetch(:changelog) do
file_on_head(/\A(changelog|history|changes|news)/i)
end
file_on_head(:changelog)
end
cache_method :changelog
def license_blob
return nil unless head_exists?
cache.fetch(:license_blob) do
file_on_head(/\A(licen[sc]e|copying)(\..+|\z)/i)
end
file_on_head(:license)
end
cache_method :license_blob
def license_key
return nil unless head_exists?
return unless exists?
cache.fetch(:license_key) do
Licensee.license(path).try(:key)
end
Licensee.license(path).try(:key)
end
cache_method :license_key
def gitignore
return nil if !exists? || empty?
cache.fetch(:gitignore) do
file_on_head(/\A\.gitignore\z/)
end
file_on_head(:gitignore)
end
cache_method :gitignore
def koding_yml
return nil unless head_exists?
cache.fetch(:koding_yml) do
file_on_head(/\A\.koding\.yml\z/)
end
file_on_head(:koding)
end
cache_method :koding_yml
def gitlab_ci_yml
return nil unless head_exists?
@gitlab_ci_yml ||= tree(:head).blobs.find do |file|
file.name == '.gitlab-ci.yml'
end
rescue Rugged::ReferenceError
# For unknow reason spinach scenario "Scenario: I change project path"
# lead to "Reference 'HEAD' not found" exception from Repository#empty?
nil
file_on_head(:gitlab_ci)
end
cache_method :gitlab_ci_yml
def head_commit
@head_commit ||= commit(self.root_ref)
end
def head_tree
@head_tree ||= Tree.new(self, head_commit.sha, nil)
if head_commit
@head_tree ||= Tree.new(self, head_commit.sha, nil)
end
end
def tree(sha = :head, path = nil, recursive: false)
if sha == :head
return unless head_commit
if path.nil?
return head_tree
else
......@@ -861,10 +874,6 @@ class Repository
@tags ||= raw_repository.tags
end
def root_ref
@root_ref ||= cache.fetch(:root_ref) { raw_repository.root_ref }
end
def commit_dir(user, path, message, branch, author_email: nil, author_name: nil)
update_branch_with_hooks(user, branch) do |ref|
options = {
......@@ -1288,6 +1297,7 @@ class Repository
end
end
<<<<<<< HEAD
def main_language
return unless head_exists?
......@@ -1296,26 +1306,70 @@ class Repository
def avatar
return nil unless exists?
@avatar ||= cache.fetch(:avatar) do
AVATAR_FILES.find do |file|
blob_at_branch(root_ref, file)
=======
# Caches the supplied block both in a cache and in an instance variable.
#
# The cache key and instance variable are named the same way as the value of
# the `key` argument.
#
# This method will return `nil` if the corresponding instance variable is also
# set to `nil`. This ensures we don't keep yielding the block when it returns
# `nil`.
#
# key - The name of the key to cache the data in.
# fallback - A value to fall back to in the event of a Git error.
def cache_method_output(key, fallback: nil, &block)
ivar = cache_instance_variable_name(key)
>>>>>>> ce/master
if instance_variable_defined?(ivar)
instance_variable_get(ivar)
else
begin
instance_variable_set(ivar, cache.fetch(key, &block))
rescue Rugged::ReferenceError, Gitlab::Git::Repository::NoRepository
# if e.g. HEAD or the entire repository doesn't exist we want to
# gracefully handle this and not cache anything.
fallback
end
end
end
<<<<<<< HEAD
def head_exists?
exists? && !empty? && !rugged.head_unborn?
end
private
=======
def cache_instance_variable_name(key)
:"@#{key.to_s.tr('?!', '')}"
end
>>>>>>> ce/master
def cache
@cache ||= RepositoryCache.new(path_with_namespace, @project.id)
def file_on_head(type)
if head = tree(:head)
head.blobs.find do |file|
Gitlab::FileDetector.type_of(file.name) == type
end
end
end
<<<<<<< HEAD
def file_on_head(regex)
tree(:head).blobs.find { |file| file.name =~ regex }
=======
private
def refs_directory_exists?
return false unless path_with_namespace
File.exist?(File.join(path_to_repo, 'refs'))
end
def cache
@cache ||= RepositoryCache.new(path_with_namespace, @project.id)
>>>>>>> ce/master
end
def tags_sorted_by_committed_date
......
......@@ -8,6 +8,7 @@ class Service < ActiveRecord::Base
default_value_for :push_events, true
default_value_for :issues_events, true
default_value_for :confidential_issues_events, true
default_value_for :commit_events, true
default_value_for :merge_requests_events, true
default_value_for :tag_push_events, true
default_value_for :note_events, true
......
......@@ -7,6 +7,7 @@ class Snippet < ActiveRecord::Base
include Sortable
include Elastic::SnippetsSearch
include Awardable
include Mentionable
cache_markdown_field :title, pipeline: :single_line
cache_markdown_field :content
......
......@@ -18,7 +18,9 @@ class Tree
def readme
return @readme if defined?(@readme)
available_readmes = blobs.select(&:readme?)
available_readmes = blobs.select do |blob|
Gitlab::FileDetector.type_of(blob.name) == :readme
end
previewable_readmes = available_readmes.select do |blob|
previewable?(blob.name)
......
......@@ -247,19 +247,19 @@ class User < ActiveRecord::Base
def filter(filter_name)
case filter_name
when 'admins'
self.admins
admins
when 'blocked'
self.blocked
blocked
when 'two_factor_disabled'
self.without_two_factor
without_two_factor
when 'two_factor_enabled'
self.with_two_factor
with_two_factor
when 'wop'
self.without_projects
without_projects
when 'external'
self.external
external
else
self.active
active
end
end
......@@ -378,7 +378,7 @@ class User < ActiveRecord::Base
end
def generate_password
if self.force_random_password
if force_random_password
self.password = self.password_confirmation = Devise.friendly_token.first(Devise.password_length.min)
end
end
......@@ -419,56 +419,55 @@ class User < ActiveRecord::Base
end
def two_factor_otp_enabled?
self.otp_required_for_login?
otp_required_for_login?
end
def two_factor_u2f_enabled?
self.u2f_registrations.exists?
u2f_registrations.exists?
end
def namespace_uniq
# Return early if username already failed the first uniqueness validation
return if self.errors.key?(:username) &&
self.errors[:username].include?('has already been taken')
return if errors.key?(:username) &&
errors[:username].include?('has already been taken')
namespace_name = self.username
existing_namespace = Namespace.by_path(namespace_name)
if existing_namespace && existing_namespace != self.namespace
self.errors.add(:username, 'has already been taken')
existing_namespace = Namespace.by_path(username)
if existing_namespace && existing_namespace != namespace
errors.add(:username, 'has already been taken')
end
end
def avatar_type
unless self.avatar.image?
self.errors.add :avatar, "only images allowed"
unless avatar.image?
errors.add :avatar, "only images allowed"
end
end
def unique_email
if !self.emails.exists?(email: self.email) && Email.exists?(email: self.email)
self.errors.add(:email, 'has already been taken')
if !emails.exists?(email: email) && Email.exists?(email: email)
errors.add(:email, 'has already been taken')
end
end
def owns_notification_email
return if self.temp_oauth_email?
return if temp_oauth_email?
self.errors.add(:notification_email, "is not an email you own") unless self.all_emails.include?(self.notification_email)
errors.add(:notification_email, "is not an email you own") unless all_emails.include?(notification_email)
end
def owns_public_email
return if self.public_email.blank?
return if public_email.blank?
self.errors.add(:public_email, "is not an email you own") unless self.all_emails.include?(self.public_email)
errors.add(:public_email, "is not an email you own") unless all_emails.include?(public_email)
end
def update_emails_with_primary_email
primary_email_record = self.emails.find_by(email: self.email)
primary_email_record = emails.find_by(email: email)
if primary_email_record
primary_email_record.destroy
self.emails.create(email: self.email_was)
emails.create(email: email_was)
self.update_secondary_emails!
update_secondary_emails!
end
end
......@@ -656,7 +655,7 @@ class User < ActiveRecord::Base
end
def project_deploy_keys
DeployKey.unscoped.in_projects(self.authorized_projects.pluck(:id)).distinct(:id)
DeployKey.unscoped.in_projects(authorized_projects.pluck(:id)).distinct(:id)
end
def accessible_deploy_keys
......@@ -672,38 +671,38 @@ class User < ActiveRecord::Base
end
def sanitize_attrs
%w(name username skype linkedin twitter).each do |attr|
value = self.send(attr)
self.send("#{attr}=", Sanitize.clean(value)) if value.present?
%w[name username skype linkedin twitter].each do |attr|
value = public_send(attr)
public_send("#{attr}=", Sanitize.clean(value)) if value.present?
end
end
def set_notification_email
if self.notification_email.blank? || !self.all_emails.include?(self.notification_email)
self.notification_email = self.email
if notification_email.blank? || !all_emails.include?(notification_email)
self.notification_email = email
end
end
def set_public_email
if self.public_email.blank? || !self.all_emails.include?(self.public_email)
if public_email.blank? || !all_emails.include?(public_email)
self.public_email = ''
end
end
def update_secondary_emails!
self.set_notification_email
self.set_public_email
self.save if self.notification_email_changed? || self.public_email_changed?
set_notification_email
set_public_email
save if notification_email_changed? || public_email_changed?
end
def set_projects_limit
# `User.select(:id)` raises
# `ActiveModel::MissingAttributeError: missing attribute: projects_limit`
# without this safeguard!
return unless self.has_attribute?(:projects_limit)
return unless has_attribute?(:projects_limit)
connection_default_value_defined = new_record? && !projects_limit_changed?
return unless self.projects_limit.nil? || connection_default_value_defined
return unless projects_limit.nil? || connection_default_value_defined
self.projects_limit = current_application_settings.default_projects_limit
end
......@@ -733,7 +732,7 @@ class User < ActiveRecord::Base
def with_defaults
User.defaults.each do |k, v|
self.send("#{k}=", v)
public_send("#{k}=", v)
end
self
......@@ -753,7 +752,7 @@ class User < ActiveRecord::Base
# Thus it will automatically generate a new fragment
# when the event is updated because the key changes.
def reset_events_cache
Event.where(author_id: self.id).
Event.where(author_id: id).
order('id DESC').limit(1000).
update_all(updated_at: Time.now)
end
......@@ -786,8 +785,8 @@ class User < ActiveRecord::Base
def all_emails
all_emails = []
all_emails << self.email unless self.temp_oauth_email?
all_emails.concat(self.emails.map(&:email))
all_emails << email unless temp_oauth_email?
all_emails.concat(emails.map(&:email))
all_emails
end
......@@ -801,21 +800,21 @@ class User < ActiveRecord::Base
def ensure_namespace_correct
# Ensure user has namespace
self.create_namespace!(path: self.username, name: self.username) unless self.namespace
create_namespace!(path: username, name: username) unless namespace
if self.username_changed?
self.namespace.update_attributes(path: self.username, name: self.username)
if username_changed?
namespace.update_attributes(path: username, name: username)
end
end
def post_create_hook
log_info("User \"#{self.name}\" (#{self.email}) was created")
notification_service.new_user(self, @reset_token) if self.created_by_id
log_info("User \"#{name}\" (#{email}) was created")
notification_service.new_user(self, @reset_token) if created_by_id
system_hook_service.execute_hooks_for(self, :create)
end
def post_destroy_hook
log_info("User \"#{self.name}\" (#{self.email}) was removed")
log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy)
end
......@@ -863,7 +862,7 @@ class User < ActiveRecord::Base
end
def oauth_authorized_tokens
Doorkeeper::AccessToken.where(resource_owner_id: self.id, revoked_at: nil)
Doorkeeper::AccessToken.where(resource_owner_id: id, revoked_at: nil)
end
# Returns the projects a user contributed to in the last year.
......@@ -994,7 +993,7 @@ class User < ActiveRecord::Base
end
def ensure_external_user_rights
return unless self.external?
return unless external?
self.can_create_group = false
self.projects_limit = 0
......@@ -1006,7 +1005,7 @@ class User < ActiveRecord::Base
if current_application_settings.domain_blacklist_enabled?
blocked_domains = current_application_settings.domain_blacklist
if domain_matches?(blocked_domains, self.email)
if domain_matches?(blocked_domains, email)
error = 'is not from an allowed domain.'
valid = false
end
......@@ -1014,7 +1013,7 @@ class User < ActiveRecord::Base
allowed_domains = current_application_settings.domain_whitelist
unless allowed_domains.blank?
if domain_matches?(allowed_domains, self.email)
if domain_matches?(allowed_domains, email)
valid = true
else
error = "domain is not authorized for sign-up"
......@@ -1022,7 +1021,7 @@ class User < ActiveRecord::Base
end
end
self.errors.add(:email, error) unless valid
errors.add(:email, error) unless valid
valid
end
......
......@@ -13,8 +13,11 @@ class IssuableEntity < Grape::Entity
expose :created_at
expose :updated_at
expose :deleted_at
<<<<<<< HEAD
expose :time_estimate
expose :total_time_spent
expose :human_time_estimate
expose :human_total_time_spent
=======
>>>>>>> ce/master
end
......@@ -4,7 +4,10 @@ class IssueEntity < IssuableEntity
expose :due_date
expose :moved_to_id
expose :project_id
<<<<<<< HEAD
expose :weight
=======
>>>>>>> ce/master
expose :milestone, using: API::Entities::Milestone
expose :labels, using: LabelEntity
end
class MergeRequestEntity < IssuableEntity
<<<<<<< HEAD
expose :approvals_before_merge
=======
>>>>>>> ce/master
expose :in_progress_merge_commit_sha
expose :locked_at
expose :merge_commit_sha
......@@ -8,8 +11,11 @@ class MergeRequestEntity < IssuableEntity
expose :merge_status
expose :merge_user_id
expose :merge_when_build_succeeds
<<<<<<< HEAD
expose :rebase_commit_sha
expose :rebase_in_progress?, if: { type: :full }
=======
>>>>>>> ce/master
expose :source_branch
expose :source_project_id
expose :target_branch
......
......@@ -18,7 +18,7 @@ class GitPushService < BaseService
#
def execute
@project.repository.after_create if @project.empty_repo?
@project.repository.after_push_commit(branch_name, params[:newrev])
@project.repository.after_push_commit(branch_name)
if push_remove_branch?
@project.repository.after_remove_branch
......@@ -55,12 +55,32 @@ class GitPushService < BaseService
execute_related_hooks
perform_housekeeping
update_caches
end
def update_gitattributes
@project.repository.copy_gitattributes(params[:ref])
end
def update_caches
if is_default_branch?
paths = Set.new
@push_commits.each do |commit|
commit.raw_diffs(deltas_only: true).each do |diff|
paths << diff.new_path
end
end
types = Gitlab::FileDetector.types_in_paths(paths.to_a)
else
types = []
end
ProjectCacheWorker.perform_async(@project.id, types)
end
protected
def execute_related_hooks
......@@ -74,8 +94,12 @@ class GitPushService < BaseService
EventCreateService.new.push(@project, current_user, build_push_data)
@project.execute_hooks(build_push_data.dup, :push_hooks)
@project.execute_services(build_push_data.dup, :push_hooks)
<<<<<<< HEAD
Ci::CreatePipelineService.new(@project, current_user, build_push_data).execute(mirror_update: mirror_update)
ProjectCacheWorker.perform_async(@project.id)
=======
Ci::CreatePipelineService.new(@project, current_user, build_push_data).execute
>>>>>>> ce/master
if push_remove_branch?
AfterBranchDeleteService
......
module MergeRequests
class AddTodoWhenBuildFailsService < MergeRequests::BaseService
# Adds a todo to the parent merge_request when a CI build fails
#
def execute(commit_status)
return if commit_status.allow_failure?
commit_status_merge_requests(commit_status) do |merge_request|
todo_service.merge_request_build_failed(merge_request)
end
end
# Closes any pending build failed todos for the parent MRs when a build is retried
# Closes any pending build failed todos for the parent MRs when a
# build is retried
#
def close(commit_status)
commit_status_merge_requests(commit_status) do |merge_request|
todo_service.merge_request_build_retried(merge_request)
......
......@@ -48,11 +48,11 @@ module MergeRequests
end
# See if source and target branches exist
unless merge_request.source_project.commit(merge_request.source_branch)
if merge_request.source_branch.present? && !merge_request.source_project.commit(merge_request.source_branch)
messages << "Source branch \"#{merge_request.source_branch}\" does not exist"
end
unless merge_request.target_project.commit(merge_request.target_branch)
if merge_request.target_branch.present? && !merge_request.target_project.commit(merge_request.target_branch)
messages << "Target branch \"#{merge_request.target_branch}\" does not exist"
end
......
......@@ -61,7 +61,15 @@ module MergeRequests
merge_requests = filter_merge_requests(merge_requests)
merge_requests.each do |merge_request|
reload_diff(merge_request) unless branch_removed?
if merge_request.source_branch == @branch_name || force_push?
merge_request.reload_diff
else
mr_commit_ids = merge_request.commits.map(&:id)
push_commit_ids = @commits.map(&:id)
matches = mr_commit_ids & push_commit_ids
merge_request.reload_diff if matches.any?
end
merge_request.mark_as_unchecked
end
end
......@@ -180,16 +188,5 @@ module MergeRequests
def branch_removed?
Gitlab::Git.blank_ref?(@newrev)
end
def reload_diff(merge_request)
if merge_request.source_branch == @branch_name || force_push?
merge_request.reload_diff
else
mr_commit_ids = merge_request.commits.map(&:id)
push_commit_ids = @commits.map(&:id)
matches = mr_commit_ids & push_commit_ids
merge_request.reload_diff if matches.any?
end
end
end
end
......@@ -5,8 +5,6 @@
%div.form-group
= f.label :password
= f.password_field :password, class: "form-control bottom", required: true, title: "This field is required."
%div.submit-container.move-submit-down
= f.submit "Sign in", class: "btn btn-save"
- if devise_mapping.rememberable?
.remember-me.checkbox
%label{for: "user_remember_me"}
......@@ -14,3 +12,5 @@
%span Remember me
.pull-right.forgot-password
= link_to "Forgot your password?", new_password_path(resource_name)
%div.submit-container.move-submit-down
= f.submit "Sign in", class: "btn btn-save"
......@@ -13,7 +13,7 @@
= spinner
:javascript
var activity = new Activities();
var activity = new gl.Activities();
$(document).on('page:restore', function (event) {
activity.reloadActivities()
})
......@@ -11,12 +11,17 @@
= render 'projects/merge_requests/widget/open/geo'
- if @project.archived?
= render 'projects/merge_requests/widget/open/archived'
<<<<<<< HEAD
- elsif @project.above_size_limit?
= render 'projects/merge_requests/widget/open/size_limit_reached'
- elsif @merge_request.commits.blank?
= render 'projects/merge_requests/widget/open/nothing'
=======
>>>>>>> ce/master
- elsif @merge_request.branch_missing?
= render 'projects/merge_requests/widget/open/missing_branch'
- elsif @merge_request.commits.blank?
= render 'projects/merge_requests/widget/open/nothing'
- elsif @merge_request.unchecked?
= render 'projects/merge_requests/widget/open/check'
- elsif @merge_request.cannot_be_merged? && !resolved_conflicts
......
# Worker for updating any project specific caches.
#
# This worker runs at most once every 15 minutes per project. This is to ensure
# that multiple instances of jobs for this worker don't hammer the underlying
# storage engine as much.
class ProjectCacheWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
......@@ -10,46 +6,34 @@ class ProjectCacheWorker
LEASE_TIMEOUT = 15.minutes.to_i
def self.lease_for(project_id)
Gitlab::ExclusiveLease.
new("project_cache_worker:#{project_id}", timeout: LEASE_TIMEOUT)
end
# project_id - The ID of the project for which to flush the cache.
# refresh - An Array containing extra types of data to refresh such as
# `:readme` to flush the README and `:changelog` to flush the
# CHANGELOG.
def perform(project_id, refresh = [])
project = Project.find_by(id: project_id)
# Overwrite Sidekiq's implementation so we only schedule when actually needed.
def self.perform_async(project_id)
# If a lease for this project is still being held there's no point in
# scheduling a new job.
super unless lease_for(project_id).exists?
end
return unless project && project.repository.exists?
def perform(project_id)
if try_obtain_lease_for(project_id)
Rails.logger.
info("Obtained ProjectCacheWorker lease for project #{project_id}")
else
Rails.logger.
info("Could not obtain ProjectCacheWorker lease for project #{project_id}")
return
end
update_repository_size(project)
project.update_commit_count
update_caches(project_id)
project.repository.refresh_method_caches(refresh.map(&:to_sym))
end
def update_caches(project_id)
project = Project.find(project_id)
def update_repository_size(project)
return unless try_obtain_lease_for(project.id, :update_repository_size)
return unless project.repository.exists?
Rails.logger.info("Updating repository size for project #{project.id}")
project.update_repository_size
project.update_commit_count
if project.repository.root_ref
project.repository.build_cache
end
end
def try_obtain_lease_for(project_id)
self.class.lease_for(project_id).try_obtain
private
def try_obtain_lease_for(project_id, section)
Gitlab::ExclusiveLease.
new("project_cache_worker:#{project_id}:#{section}", timeout: LEASE_TIMEOUT).
try_obtain
end
end
---
title: Fix activity page endless scroll on large viewports
merge_request: 7608
author:
---
title: Fix regression causing bad error message to appear on Merge Request form
merge_request: 7599
author: Alex Sanford
---
title: Add deployment command to ChatOps
merge_request: 7619
author:
---
title: Add api endpoint for creating a pipeline
merge_request: 7209
author: Ido Leibovich
---
title: Fix 500 error when group name ends with git
merge_request: 7630
author:
---
title: Fix 404 on some group pages when name contains dot
merge_request: 7614
author:
---
title: Send credentials (currently for registry only) with build data to GitLab Runner
merge_request: 7474
author:
---
title: Added permissions per stage to cycle analytics endpoint
merge_request:
author:
---
title: Do not create a new TODO when failed build is allowed to fail
merge_request: 7618
author:
---
title: Do not create a MergeRequestDiff record when source branch is deleted
merge_request: 7481
author:
---
title: Fix errors happening when source branch of merge request is removed and then restored
merge_request: 7568
author:
---
title: Fix JIRA references for project snippets
merge_request:
author:
---
title: Allow enabling and disabling commit and MR events for JIRA
merge_request:
author:
---
title: Remove unnecessary self from user model
merge_request: 7551
author: Semyon Pupkov
---
title: Rework cache invalidation so only changed data is refreshed
merge_request: 7360
author:
......@@ -14,7 +14,9 @@ end
resources :groups, only: [:index, :new, :create]
scope(path: 'groups/:id', controller: :groups) do
scope(path: 'groups/:id',
controller: :groups,
constraints: { id: Gitlab::Regex.namespace_route_regex }) do
get :edit, as: :edit_group
get :issues, as: :issues_group
get :merge_requests, as: :merge_requests_group
......@@ -22,6 +24,7 @@ scope(path: 'groups/:id', controller: :groups) do
get :activity, as: :activity_group
end
<<<<<<< HEAD
scope(path: 'groups/:group_id', module: :groups, as: :group) do
## EE-specific
resource :analytics, only: [:show]
......@@ -34,6 +37,12 @@ scope(path: 'groups/:group_id', module: :groups, as: :group) do
resources :ldap_group_links, only: [:index, :create, :destroy]
## EE-specific
=======
scope(path: 'groups/:group_id',
module: :groups,
as: :group,
constraints: { group_id: Gitlab::Regex.namespace_route_regex }) do
>>>>>>> ce/master
resources :group_members, only: [:index, :create, :update, :destroy], concerns: :access_requestable do
post :resend_invite, on: :member
delete :leave, on: :collection
......@@ -61,4 +70,4 @@ scope(path: 'groups/:group_id', module: :groups, as: :group) do
end
# Must be last route in this file
get 'groups/:id' => 'groups#show', as: :group_canonical
get 'groups/:id' => 'groups#show', as: :group_canonical, constraints: { id: Gitlab::Regex.namespace_route_regex }
class AddCommitEventsToServices < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default(:services, :commit_events, :boolean, default: true, allow_null: false)
end
def down
remove_column(:services, :commit_events)
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20161117114805) do
ActiveRecord::Schema.define(version: 20161118183841) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -115,9 +115,6 @@ ActiveRecord::Schema.define(version: 20161117114805) do
t.integer "housekeeping_incremental_repack_period", default: 10, null: false
t.integer "housekeeping_full_repack_period", default: 50, null: false
t.integer "housekeeping_gc_period", default: 200, null: false
t.boolean "sidekiq_throttling_enabled", default: false
t.string "sidekiq_throttling_queues"
t.decimal "sidekiq_throttling_factor"
end
create_table "approvals", force: :cascade do |t|
......@@ -1214,6 +1211,7 @@ ActiveRecord::Schema.define(version: 20161117114805) do
t.boolean "wiki_page_events", default: true
t.boolean "pipeline_events", default: false, null: false
t.boolean "confidential_issues_events", default: true, null: false
t.boolean "commit_events", default: true, null: false
end
add_index "services", ["project_id"], name: "index_services_on_project_id", using: :btree
......
......@@ -114,6 +114,51 @@ Example of response
}
```
## Create a new pipeline
> [Introduced][ce-7209] in GitLab 8.14
```
POST /projects/:id/pipeline
```
| Attribute | Type | Required | Description |
|------------|---------|----------|---------------------|
| `id` | integer | yes | The ID of a project |
| `ref` | string | yes | Reference to commit |
```
curl --request POST --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v3/projects/1/pipeline?ref=master"
```
Example of response
```json
{
"id": 61,
"sha": "384c444e840a515b23f21915ee5766b87068a70d",
"ref": "master",
"status": "pending",
"before_sha": "0000000000000000000000000000000000000000",
"tag": false,
"yaml_errors": null,
"user": {
"name": "Administrator",
"username": "root",
"id": 1,
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://localhost:3000/root"
},
"created_at": "2016-11-04T09:36:13.747Z",
"updated_at": "2016-11-04T09:36:13.977Z",
"started_at": null,
"finished_at": null,
"committed_at": null,
"duration": null
}
```
## Retry failed builds in a pipeline
> [Introduced][ce-5837] in GitLab 8.11
......@@ -205,3 +250,4 @@ Response:
```
[ce-5837]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/5837
[ce-7209]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/7209
......@@ -235,7 +235,7 @@ will help us achieve that.
As the name suggests, it is possible to create environments on the fly by just
declaring their names dynamically in `.gitlab-ci.yml`. Dynamic environments is
the basis of [Review apps](review_apps.md).
the basis of [Review apps](review_apps/index.md).
GitLab Runner exposes various [environment variables][variables] when a job runs,
and as such, you can use them as environment names. Let's add another job in
......
......@@ -289,7 +289,7 @@ The trick is to use the merge/pull request with multiple commits when your work
The commit message should reflect your intention, not the contents of the commit.
The contents of the commit can be easily seen anyway, the question is why you did it.
An example of a good commit message is: "Combine templates to dry up the user views.".
Some words that are bad commit messages because they don't contain munch information are: change, improve and refactor.
Some words that are bad commit messages because they don't contain much information are: change, improve and refactor.
The word fix or fixes is also a red flag, unless it comes after the commit sentence and references an issue number.
To see more information about the formatting of commit messages please see this great [blog post by Tim Pope](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
......
......@@ -24,9 +24,11 @@ you still have open.
A Todo appears in your Todos dashboard when:
- an issue or merge request is assigned to you
- an issue or merge request is assigned to you,
- you are `@mentioned` in an issue or merge request, be it the description of
the issue/merge request or in a comment
the issue/merge request or in a comment,
- build in the CI pipeline running for your merge request failed, but this
build is not allowed to fail.
>**Note:** Commenting on a commit will _not_ trigger a Todo.
......
......@@ -22,6 +22,27 @@ module API
pipelines = PipelinesFinder.new(user_project).execute(scope: params[:scope])
present paginate(pipelines), with: Entities::Pipeline
end
desc 'Create a new pipeline' do
detail 'This feature was introduced in GitLab 8.14'
success Entities::Pipeline
end
params do
requires :ref, type: String, desc: 'Reference'
end
post ':id/pipeline' do
authorize! :create_pipeline, user_project
new_pipeline = Ci::CreatePipelineService.new(user_project,
current_user,
declared_params(include_missing: false))
.execute(ignore_skip_ci: true, save_on_errors: false)
if new_pipeline.persisted?
present new_pipeline, with: Entities::Pipeline
else
render_validation_error!(new_pipeline)
end
end
desc 'Gets a specific pipeline for the project' do
detail 'This feature was introduced in GitLab 8.11'
......
......@@ -3,6 +3,9 @@ module API
class ProjectSnippets < Grape::API
before { authenticate! }
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects do
helpers do
def handle_project_member_errors(errors)
......@@ -18,111 +21,108 @@ module API
end
end
# Get a project snippets
#
# Parameters:
# id (required) - The ID of a project
# Example Request:
# GET /projects/:id/snippets
desc 'Get all project snippets' do
success Entities::ProjectSnippet
end
get ":id/snippets" do
present paginate(snippets_for_current_user), with: Entities::ProjectSnippet
end
# Get a project snippet
#
# Parameters:
# id (required) - The ID of a project
# snippet_id (required) - The ID of a project snippet
# Example Request:
# GET /projects/:id/snippets/:snippet_id
desc 'Get a single project snippet' do
success Entities::ProjectSnippet
end
params do
requires :snippet_id, type: Integer, desc: 'The ID of a project snippet'
end
get ":id/snippets/:snippet_id" do
@snippet = snippets_for_current_user.find(params[:snippet_id])
present @snippet, with: Entities::ProjectSnippet
end
# Create a new project snippet
#
# Parameters:
# id (required) - The ID of a project
# title (required) - The title of a snippet
# file_name (required) - The name of a snippet file
# code (required) - The content of a snippet
# visibility_level (required) - The snippet's visibility
# Example Request:
# POST /projects/:id/snippets
snippet = snippets_for_current_user.find(params[:snippet_id])
present snippet, with: Entities::ProjectSnippet
end
desc 'Create a new project snippet' do
success Entities::ProjectSnippet
end
params do
requires :title, type: String, desc: 'The title of the snippet'
requires :file_name, type: String, desc: 'The file name of the snippet'
requires :code, type: String, desc: 'The content of the snippet'
requires :visibility_level, type: Integer,
values: [Gitlab::VisibilityLevel::PRIVATE,
Gitlab::VisibilityLevel::INTERNAL,
Gitlab::VisibilityLevel::PUBLIC],
desc: 'The visibility level of the snippet'
end
post ":id/snippets" do
authorize! :create_project_snippet, user_project
required_attributes! [:title, :file_name, :code, :visibility_level]
snippet_params = declared_params
snippet_params[:content] = snippet_params.delete(:code)
attrs = attributes_for_keys [:title, :file_name, :visibility_level]
attrs[:content] = params[:code] if params[:code].present?
@snippet = CreateSnippetService.new(user_project, current_user,
attrs).execute
snippet = CreateSnippetService.new(user_project, current_user, snippet_params).execute
if @snippet.errors.any?
render_validation_error!(@snippet)
if snippet.persisted?
present snippet, with: Entities::ProjectSnippet
else
present @snippet, with: Entities::ProjectSnippet
render_validation_error!(snippet)
end
end
# Update an existing project snippet
#
# Parameters:
# id (required) - The ID of a project
# snippet_id (required) - The ID of a project snippet
# title (optional) - The title of a snippet
# file_name (optional) - The name of a snippet file
# code (optional) - The content of a snippet
# visibility_level (optional) - The snippet's visibility
# Example Request:
# PUT /projects/:id/snippets/:snippet_id
desc 'Update an existing project snippet' do
success Entities::ProjectSnippet
end
params do
requires :snippet_id, type: Integer, desc: 'The ID of a project snippet'
optional :title, type: String, desc: 'The title of the snippet'
optional :file_name, type: String, desc: 'The file name of the snippet'
optional :code, type: String, desc: 'The content of the snippet'
optional :visibility_level, type: Integer,
values: [Gitlab::VisibilityLevel::PRIVATE,
Gitlab::VisibilityLevel::INTERNAL,
Gitlab::VisibilityLevel::PUBLIC],
desc: 'The visibility level of the snippet'
at_least_one_of :title, :file_name, :code, :visibility_level
end
put ":id/snippets/:snippet_id" do
@snippet = snippets_for_current_user.find(params[:snippet_id])
authorize! :update_project_snippet, @snippet
snippet = snippets_for_current_user.find_by(id: params.delete(:snippet_id))
not_found!('Snippet') unless snippet
authorize! :update_project_snippet, snippet
snippet_params = declared_params(include_missing: false)
snippet_params[:content] = snippet_params.delete(:code) if snippet_params[:code].present?
attrs = attributes_for_keys [:title, :file_name, :visibility_level]
attrs[:content] = params[:code] if params[:code].present?
UpdateSnippetService.new(user_project, current_user, snippet,
snippet_params).execute
UpdateSnippetService.new(user_project, current_user, @snippet,
attrs).execute
if @snippet.errors.any?
render_validation_error!(@snippet)
if snippet.persisted?
present snippet, with: Entities::ProjectSnippet
else
present @snippet, with: Entities::ProjectSnippet
render_validation_error!(snippet)
end
end
# Delete a project snippet
#
# Parameters:
# id (required) - The ID of a project
# snippet_id (required) - The ID of a project snippet
# Example Request:
# DELETE /projects/:id/snippets/:snippet_id
desc 'Delete a project snippet'
params do
requires :snippet_id, type: Integer, desc: 'The ID of a project snippet'
end
delete ":id/snippets/:snippet_id" do
begin
@snippet = snippets_for_current_user.find(params[:snippet_id])
authorize! :update_project_snippet, @snippet
@snippet.destroy
rescue
not_found!('Snippet')
end
snippet = snippets_for_current_user.find_by(id: params[:snippet_id])
not_found!('Snippet') unless snippet
authorize! :admin_project_snippet, snippet
snippet.destroy
end
# Get a raw project snippet
#
# Parameters:
# id (required) - The ID of a project
# snippet_id (required) - The ID of a project snippet
# Example Request:
# GET /projects/:id/snippets/:snippet_id/raw
desc 'Get a raw project snippet'
params do
requires :snippet_id, type: Integer, desc: 'The ID of a project snippet'
end
get ":id/snippets/:snippet_id/raw" do
@snippet = snippets_for_current_user.find(params[:snippet_id])
snippet = snippets_for_current_user.find_by(id: params[:snippet_id])
not_found!('Snippet') unless snippet
env['api.format'] = :txt
content_type 'text/plain'
present @snippet.content
present snippet.content
end
end
end
......
......@@ -33,7 +33,10 @@ module API
optional :active, type: Boolean, default: false, desc: 'Filters only active users'
optional :external, type: Boolean, default: false, desc: 'Filters only external users'
optional :blocked, type: Boolean, default: false, desc: 'Filters only blocked users'
<<<<<<< HEAD
optional :skip_ldap, type: Boolean, default: false, desc: 'Skip LDAP users'
=======
>>>>>>> ce/master
end
get do
unless can?(current_user, :read_users_list, nil)
......@@ -45,7 +48,10 @@ module API
else
users = User.all
users = users.active if params[:active]
<<<<<<< HEAD
users = users.non_ldap if params[:skip_ldap]
=======
>>>>>>> ce/master
users = users.search(params[:search]) if params[:search].present?
users = users.blocked if params[:blocked]
users = users.external if params[:external] && current_user.is_admin?
......
......@@ -32,6 +32,10 @@ module Ci
expose :artifacts_file, using: ArtifactFile, if: ->(build, _) { build.artifacts? }
end
class BuildCredentials < Grape::Entity
expose :type, :url, :username, :password
end
class BuildDetails < Build
expose :commands
expose :repo_url
......@@ -50,6 +54,8 @@ module Ci
expose :variables
expose :depends_on_builds, using: Build
expose :credentials, using: BuildCredentials
end
class Runner < Grape::Entity
......
......@@ -4,6 +4,7 @@ module Gitlab
COMMANDS = [
Gitlab::ChatCommands::IssueShow,
Gitlab::ChatCommands::IssueCreate,
Gitlab::ChatCommands::Deploy,
].freeze
def execute
......
module Gitlab
module ChatCommands
class Deploy < BaseCommand
def self.match(text)
/\Adeploy\s+(?<from>.*)\s+to+\s+(?<to>.*)\z/.match(text)
end
def self.help_message
'deploy <environment> to <target-environment>'
end
def self.available?(project)
project.builds_enabled?
end
def self.allowed?(project, user)
can?(user, :create_deployment, project)
end
def execute(match)
from = match[:from]
to = match[:to]
actions = find_actions(from, to)
return unless actions.present?
if actions.one?
actions.first.play(current_user)
else
Result.new(:error, 'Too many actions defined')
end
end
private
def find_actions(from, to)
environment = project.environments.find_by(name: from)
return unless environment
environment.actions_for(to).select(&:starts_environment?)
end
end
end
end
module Gitlab
module ChatCommands
Result = Struct.new(:type, :message)
end
end
module Gitlab
module Ci
module Build
module Credentials
class Base
def type
self.class.name.demodulize.underscore
end
end
end
end
end
end
module Gitlab
module Ci
module Build
module Credentials
class Factory
def initialize(build)
@build = build
end
def create!
credentials.select(&:valid?)
end
private
def credentials
providers.map { |provider| provider.new(@build) }
end
def providers
[Registry]
end
end
end
end
end
end
module Gitlab
module Ci
module Build
module Credentials
class Registry < Base
attr_reader :username, :password
def initialize(build)
@username = 'gitlab-ci-token'
@password = build.token
end
def url
Gitlab.config.registry.host_port
end
def valid?
Gitlab.config.registry.enabled
end
end
end
end
end
end
module Gitlab
module CycleAnalytics
class Permissions
STAGE_PERMISSIONS = {
issue: :read_issue,
code: :read_merge_request,
test: :read_build,
review: :read_merge_request,
staging: :read_build,
production: :read_issue,
}.freeze
def self.get(*args)
new(*args).get
end
def initialize(user:, project:)
@user = user
@project = project
@stage_permission_hash = {}
end
def get
::CycleAnalytics::STAGES.each do |stage|
@stage_permission_hash[stage] = authorized_stage?(stage)
end
@stage_permission_hash
end
private
def authorized_stage?(stage)
return false unless authorize_project(:read_cycle_analytics)
STAGE_PERMISSIONS[stage] ? authorize_project(STAGE_PERMISSIONS[stage]) : true
end
def authorize_project(permission)
Ability.allowed?(@user, permission, @project)
end
end
end
end
require 'set'
module Gitlab
# Module that can be used to detect if a path points to a special file such as
# a README or a CONTRIBUTING file.
module FileDetector
PATTERNS = {
readme: /\Areadme/i,
changelog: /\A(changelog|history|changes|news)/i,
license: /\A(licen[sc]e|copying)(\..+|\z)/i,
contributing: /\Acontributing/i,
version: 'version',
gitignore: '.gitignore',
koding: '.koding.yml',
gitlab_ci: '.gitlab-ci.yml',
avatar: /\Alogo\.(png|jpg|gif)\z/
}
# Returns an Array of file types based on the given paths.
#
# This method can be used to check if a list of file paths (e.g. of changed
# files) involve any special files such as a README or a LICENSE file.
#
# Example:
#
# types_in_paths(%w{README.md foo/bar.txt}) # => [:readme]
def self.types_in_paths(paths)
types = Set.new
paths.each do |path|
type = type_of(path)
types << type if type
end
types.to_a
end
# Returns the type of a file path, or nil if none could be detected.
#
# Returned types are Symbols such as `:readme`, `:version`, etc.
#
# Example:
#
# type_of('README.md') # => :readme
# type_of('VERSION') # => :version
def self.type_of(path)
name = File.basename(path)
PATTERNS.each do |type, search|
did_match = if search.is_a?(Regexp)
name =~ search
else
name.casecmp(search) == 0
end
return type if did_match
end
nil
end
end
end
......@@ -9,7 +9,7 @@ module Gitlab
# `NAMESPACE_REGEX_STR`, with the negative lookbehind assertion removed. This means that the client-side validation
# will pass for usernames ending in `.atom` and `.git`, but will be caught by the server-side validation.
NAMESPACE_REGEX_STR_SIMPLE = '[a-zA-Z0-9_\.][a-zA-Z0-9_\-\.]*[a-zA-Z0-9_\-]|[a-zA-Z0-9_]'.freeze
NAMESPACE_REGEX_STR = "(?:#{NAMESPACE_REGEX_STR_SIMPLE})(?<!\.git|\.atom)".freeze
NAMESPACE_REGEX_STR = '(?:' + NAMESPACE_REGEX_STR_SIMPLE + ')(?<!\.git|\.atom)'.freeze
def namespace_regex
@namespace_regex ||= /\A#{NAMESPACE_REGEX_STR}\z/.freeze
......
......@@ -24,20 +24,22 @@ module Mattermost
end
end
def present(resource)
return not_found unless resource
if resource.respond_to?(:count)
if resource.count > 1
return multiple_resources(resource)
elsif resource.count == 0
return not_found
def present(subject)
return not_found unless subject
if subject.is_a?(Gitlab::ChatCommands::Result)
show_result(subject)
elsif subject.respond_to?(:count)
if subject.many?
multiple_resources(subject)
elsif subject.none?
not_found
else
resource = resource.first
single_resource(subject)
end
else
single_resource(subject)
end
single_resource(resource)
end
def access_denied
......@@ -46,6 +48,10 @@ module Mattermost
private
def show_result(result)
ephemeral_response(result.message)
end
def not_found
ephemeral_response("404 not found! GitLab couldn't find what you were looking for! :boom:")
end
......@@ -54,7 +60,7 @@ module Mattermost
return error(resource) if resource.errors.any? || !resource.persisted?
message = "### #{title(resource)}"
message << "\n\n#{resource.description}" if resource.description
message << "\n\n#{resource.description}" if resource.try(:description)
in_channel_response(message)
end
......@@ -74,7 +80,10 @@ module Mattermost
end
def title(resource)
"[#{resource.to_reference} #{resource.title}](#{url(resource)})"
reference = resource.try(:to_reference) || resource.try(:id)
title = resource.try(:title) || resource.try(:name)
"[#{reference} #{title}](#{url(resource)})"
end
def header_with_list(header, items)
......
diff --git a/db/migrate/20161010142410_create_project_authorizations.rb b/db/migrate/20161010142410_create_project_authorizations.rb
new file mode 100644
index 0000000..e095ab9
--- /dev/null
+++ b/db/migrate/20161010142410_create_project_authorizations.rb
@@ -0,0 +1,15 @@
+class CreateProjectAuthorizations < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :project_authorizations do |t|
+ t.references :user, foreign_key: { on_delete: :cascade }
+ t.references :project, foreign_key: { on_delete: :cascade }
+ t.integer :access_level
+
+ t.index [:user_id, :project_id, :access_level], unique: true, name: 'index_project_authorizations_on_user_id_project_id_access_level'
+ end
+ end
+end
diff --git a/db/migrate/20161017091941_add_authorized_projects_populated_to_users.rb b/db/migrate/20161017091941_add_authorized_projects_populated_to_users.rb
new file mode 100644
index 0000000..8f6be9d
--- /dev/null
+++ b/db/migrate/20161017091941_add_authorized_projects_populated_to_users.rb
@@ -0,0 +1,9 @@
+class AddAuthorizedProjectsPopulatedToUsers < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column :users, :authorized_projects_populated, :boolean
+ end
+end
diff --git a/db/migrate/20161020083353_add_pipeline_id_to_merge_request_metrics.rb b/db/migrate/20161020083353_add_pipeline_id_to_merge_request_metrics.rb
new file mode 100644
index 0000000..f49df68
--- /dev/null
+++ b/db/migrate/20161020083353_add_pipeline_id_to_merge_request_metrics.rb
@@ -0,0 +1,33 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddPipelineIdToMergeRequestMetrics < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ disable_ddl_transaction!
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = true
+
+ # When a migration requires downtime you **must** uncomment the following
+ # constant and define a short and easy to understand explanation as to why the
+ # migration requires downtime.
+ DOWNTIME_REASON = 'Adding a foreign key'
+
+ # When using the methods "add_concurrent_index" or "add_column_with_default"
+ # you must disable the use of transactions as these methods can not run in an
+ # existing transaction. When using "add_concurrent_index" make sure that this
+ # method is the _only_ method called in the migration, any other changes
+ # should go in a separate migration. This ensures that upon failure _only_ the
+ # index creation fails and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ # disable_ddl_transaction!
+
+ def change
+ add_column :merge_request_metrics, :pipeline_id, :integer
+ add_concurrent_index :merge_request_metrics, :pipeline_id
+ add_foreign_key :merge_request_metrics, :ci_commits, column: :pipeline_id, on_delete: :cascade
+ end
+end
diff --git a/db/migrate/20161030005533_add_estimate_to_issuables.rb b/db/migrate/20161030005533_add_estimate_to_issuables.rb
new file mode 100644
index 0000000..96e7593
--- /dev/null
+++ b/db/migrate/20161030005533_add_estimate_to_issuables.rb
@@ -0,0 +1,35 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddEstimateToIssuables < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ # When a migration requires downtime you **must** uncomment the following
+ # constant and define a short and easy to understand explanation as to why the
+ # migration requires downtime.
+ # DOWNTIME_REASON = ''
+
+ # When using the methods "add_concurrent_index" or "add_column_with_default"
+ # you must disable the use of transactions as these methods can not run in an
+ # existing transaction. When using "add_concurrent_index" make sure that this
+ # method is the _only_ method called in the migration, any other changes
+ # should go in a separate migration. This ensures that upon failure _only_ the
+ # index creation fails and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ # disable_ddl_transaction!
+
+ def up
+ add_column :issues, :time_estimate, :integer
+ add_column :merge_requests, :time_estimate, :integer
+ end
+
+ def down
+ remove_column :issues, :time_estimate
+ remove_column :merge_requests, :time_estimate
+ end
+end
diff --git a/db/migrate/20161030020610_create_timelogs.rb b/db/migrate/20161030020610_create_timelogs.rb
new file mode 100644
index 0000000..31183ae
--- /dev/null
+++ b/db/migrate/20161030020610_create_timelogs.rb
@@ -0,0 +1,18 @@
+class CreateTimelogs < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :timelogs do |t|
+ t.integer :time_spent, null: false
+ t.references :trackable, polymorphic: true
+ t.references :user
+
+ t.timestamps null: false
+ end
+
+ add_index :timelogs, [:trackable_type, :trackable_id]
+ add_index :timelogs, :user_id
+ end
+end
diff --git a/db/migrate/20161031171301_add_project_id_to_subscriptions.rb b/db/migrate/20161031171301_add_project_id_to_subscriptions.rb
new file mode 100644
index 0000000..9753467
--- /dev/null
+++ b/db/migrate/20161031171301_add_project_id_to_subscriptions.rb
@@ -0,0 +1,14 @@
+class AddProjectIdToSubscriptions < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ add_column :subscriptions, :project_id, :integer
+ add_foreign_key :subscriptions, :projects, column: :project_id, on_delete: :cascade
+ end
+
+ def down
+ remove_column :subscriptions, :project_id
+ end
+end
diff --git a/db/migrate/20161031174110_migrate_subscriptions_project_id.rb b/db/migrate/20161031174110_migrate_subscriptions_project_id.rb
new file mode 100644
index 0000000..549145a
--- /dev/null
+++ b/db/migrate/20161031174110_migrate_subscriptions_project_id.rb
@@ -0,0 +1,44 @@
+class MigrateSubscriptionsProjectId < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = true
+ DOWNTIME_REASON = 'Subscriptions will not work as expected until this migration is complete.'
+
+ def up
+ execute <<-EOF.strip_heredoc
+ UPDATE subscriptions
+ SET project_id = (
+ SELECT issues.project_id
+ FROM issues
+ WHERE issues.id = subscriptions.subscribable_id
+ )
+ WHERE subscriptions.subscribable_type = 'Issue';
+ EOF
+
+ execute <<-EOF.strip_heredoc
+ UPDATE subscriptions
+ SET project_id = (
+ SELECT merge_requests.target_project_id
+ FROM merge_requests
+ WHERE merge_requests.id = subscriptions.subscribable_id
+ )
+ WHERE subscriptions.subscribable_type = 'MergeRequest';
+ EOF
+
+ execute <<-EOF.strip_heredoc
+ UPDATE subscriptions
+ SET project_id = (
+ SELECT projects.id
+ FROM labels INNER JOIN projects ON projects.id = labels.project_id
+ WHERE labels.id = subscriptions.subscribable_id
+ )
+ WHERE subscriptions.subscribable_type = 'Label';
+ EOF
+ end
+
+ def down
+ execute <<-EOF.strip_heredoc
+ UPDATE subscriptions SET project_id = NULL;
+ EOF
+ end
+end
diff --git a/db/migrate/20161031181638_add_unique_index_to_subscriptions.rb b/db/migrate/20161031181638_add_unique_index_to_subscriptions.rb
new file mode 100644
index 0000000..4b1b29e
--- /dev/null
+++ b/db/migrate/20161031181638_add_unique_index_to_subscriptions.rb
@@ -0,0 +1,18 @@
+class AddUniqueIndexToSubscriptions < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = true
+ DOWNTIME_REASON = 'This migration requires downtime because it changes a column to not accept null values.'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :subscriptions, [:subscribable_id, :subscribable_type, :user_id, :project_id], { unique: true, name: 'index_subscriptions_on_subscribable_and_user_id_and_project_id' }
+ remove_index :subscriptions, name: 'subscriptions_user_id_and_ref_fields' if index_name_exists?(:subscriptions, 'subscriptions_user_id_and_ref_fields', false)
+ end
+
+ def down
+ add_concurrent_index :subscriptions, [:subscribable_id, :subscribable_type, :user_id], { unique: true, name: 'subscriptions_user_id_and_ref_fields' }
+ remove_index :subscriptions, name: 'index_subscriptions_on_subscribable_and_user_id_and_project_id' if index_name_exists?(:subscriptions, 'index_subscriptions_on_subscribable_and_user_id_and_project_id', false)
+ end
+end
diff --git a/db/migrate/20161113184239_create_user_chat_names_table.rb b/db/migrate/20161113184239_create_user_chat_names_table.rb
new file mode 100644
index 0000000..97b5976
--- /dev/null
+++ b/db/migrate/20161113184239_create_user_chat_names_table.rb
@@ -0,0 +1,21 @@
+class CreateUserChatNamesTable < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :chat_names do |t|
+ t.integer :user_id, null: false
+ t.integer :service_id, null: false
+ t.string :team_id, null: false
+ t.string :team_domain
+ t.string :chat_id, null: false
+ t.string :chat_name
+ t.datetime :last_used_at
+ t.timestamps null: false
+ end
+
+ add_index :chat_names, [:user_id, :service_id], unique: true
+ add_index :chat_names, [:service_id, :team_id, :chat_id], unique: true
+ end
+end
diff --git a/db/migrate/20161117114805_remove_undeleted_groups.rb b/db/migrate/20161117114805_remove_undeleted_groups.rb
new file mode 100644
index 0000000..ebc2d97
--- /dev/null
+++ b/db/migrate/20161117114805_remove_undeleted_groups.rb
@@ -0,0 +1,16 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class RemoveUndeletedGroups < ActiveRecord::Migration
+ DOWNTIME = false
+
+ def up
+ execute "DELETE FROM namespaces WHERE deleted_at IS NOT NULL;"
+ end
+
+ def down
+ # This is an irreversible migration;
+ # If someone is trying to rollback for other reasons, we should not throw an Exception.
+ # raise ActiveRecord::IrreversibleMigration
+ end
+end
#!/bin/bash
#!/bin/sh
retry() {
if eval "$@"; then
......@@ -24,11 +24,12 @@ if [ -f /.dockerenv ] || [ -f ./dockerinit ]; then
cp config/resque.yml.example config/resque.yml
sed -i 's/localhost/redis/g' config/resque.yml
export FLAGS=(--path vendor --retry 3 --quiet)
export FLAGS="--path vendor --retry 3 --quiet"
else
export PATH=$HOME/bin:/usr/local/bin:/usr/bin:/bin
rnd=$(awk 'BEGIN { srand() ; printf("%d\n",rand()*5) }')
export PATH="$HOME/bin:/usr/local/bin:/usr/bin:/bin"
cp config/database.yml.mysql config/database.yml
sed "s/username\:.*$/username\: runner/" -i config/database.yml
sed "s/password\:.*$/password\: 'password'/" -i config/database.yml
sed "s/gitlabhq_test/gitlabhq_test_$((RANDOM/5000))/" -i config/database.yml
sed "s/gitlabhq_test/gitlabhq_test_$rnd/" -i config/database.yml
fi
require 'spec_helper'
describe Projects::CycleAnalyticsController do
let(:project) { create(:project) }
let(:user) { create(:user) }
before do
sign_in(user)
project.team << [user, :master]
end
describe 'cycle analytics not set up flag' do
context 'with no data' do
it 'is true' do
get(:show,
namespace_id: project.namespace.to_param,
project_id: project.to_param)
expect(response).to be_success
expect(assigns(:cycle_analytics_no_data)).to eq(true)
end
end
context 'with data' do
before do
issue = create(:issue, project: project, created_at: 4.days.ago)
milestone = create(:milestone, project: project, created_at: 5.days.ago)
issue.update(milestone: milestone)
create_merge_request_closing_issue(issue)
end
it 'is false' do
get(:show,
namespace_id: project.namespace.to_param,
project_id: project.to_param)
expect(response).to be_success
expect(assigns(:cycle_analytics_no_data)).to eq(false)
end
end
end
end
......@@ -55,6 +55,12 @@ FactoryGirl.define do
self.when 'manual'
end
trait :teardown_environment do
options do
{ environment: { action: 'stop' } }
end
end
trait :allowed_to_fail do
allow_failure true
end
......
......@@ -106,4 +106,11 @@ feature 'Create New Merge Request', feature: true, js: true do
expect(page).to have_content "6049019_460s.jpg"
end
end
# Isolates a regression (see #24627)
it 'does not show error messages on initial form' do
visit new_namespace_project_merge_request_path(project.namespace, project)
expect(page).not_to have_selector('#error_explanation')
expect(page).not_to have_content('The form contains the following error')
end
end
require 'spec_helper'
describe 'Deleted source branch', feature: true, js: true do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request) }
before do
login_as user
merge_request.project.team << [user, :master]
merge_request.update!(source_branch: 'this-branch-does-not-exist')
visit namespace_project_merge_request_path(
merge_request.project.namespace,
merge_request.project, merge_request
)
end
it 'shows a message about missing source branch' do
expect(page).to have_content(
'Source branch this-branch-does-not-exist does not exist'
)
end
it 'hides Discussion, Commits and Changes tabs' do
within '.merge-request-details' do
expect(page).to have_no_content('Discussion')
expect(page).to have_no_content('Commits')
expect(page).to have_no_content('Changes')
end
end
end
......@@ -3,11 +3,12 @@ require 'spec_helper'
feature 'Merge Request versions', js: true, feature: true do
let(:merge_request) { create(:merge_request, importing: true) }
let(:project) { merge_request.source_project }
let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
let!(:merge_request_diff2) { merge_request.merge_request_diffs.create(head_commit_sha: nil) }
let!(:merge_request_diff3) { merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
before do
login_as :admin
merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9')
merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e')
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
end
......@@ -53,7 +54,7 @@ feature 'Merge Request versions', js: true, feature: true do
project.namespace,
project,
merge_request.iid,
diff_id: 2,
diff_id: merge_request_diff3.id,
start_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9'
)
end
......
......@@ -35,7 +35,7 @@
describe('Activities', () => {
beforeEach(() => {
fixture.load(fixtureTemplate);
new Activities();
new gl.Activities();
});
for(let i = 0; i < filters.length; i++) {
......
......@@ -135,7 +135,7 @@ describe('Environment item', () => {
});
it('should render environment name', () => {
expect(component.$el.querySelector('.environment-name').textContent).toEqual(environment.name);
expect(component.$el.querySelector('.environment-name').textContent).toContain(environment.name);
});
describe('With deployment', () => {
......
/* eslint-disable space-before-function-paren, quotes, comma-dangle, dot-notation, indent, quote-props, no-var, padded-blocks, max-len */
/*= require merge_request_widget */
/*= require lib/utils/timeago.js */
/*= require lib/utils/timeago */
/*= require lib/utils/datetime_utility */
(function() {
describe('MergeRequestWidget', function() {
......@@ -35,9 +36,9 @@
external_url_formatted: 'test-url.com'
}];
spyOn(jQuery, 'getJSON').and.callFake((req, cb) => {
spyOn(jQuery, 'getJSON').and.callFake(function(req, cb) {
cb(this.ciEnvironmentsStatusData);
});
}.bind(this));
});
it('should call renderEnvironments when the environments property is set', function() {
......@@ -54,6 +55,57 @@
});
});
describe('renderEnvironments', function() {
describe('should render correct timeago', function() {
beforeEach(function() {
this.environments = [{
id: 'test-environment-id',
url: 'testurl',
deployed_at: new Date().toISOString(),
deployed_at_formatted: true
}];
});
function getTimeagoText(template) {
var el = document.createElement('html');
el.innerHTML = template;
return el.querySelector('.js-environment-timeago').innerText.trim();
}
it('should render less than a minute ago text', function() {
spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
expect(getTimeagoText(template)).toBe('less than a minute ago.');
});
this.class.renderEnvironments(this.environments);
});
it('should render about an hour ago text', function() {
var oneHourAgo = new Date();
oneHourAgo.setHours(oneHourAgo.getHours() - 1);
this.environments[0].deployed_at = oneHourAgo.toISOString();
spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
expect(getTimeagoText(template)).toBe('about an hour ago.');
});
this.class.renderEnvironments(this.environments);
});
it('should render about 2 hours ago text', function() {
var twoHoursAgo = new Date();
twoHoursAgo.setHours(twoHoursAgo.getHours() - 2);
this.environments[0].deployed_at = twoHoursAgo.toISOString();
spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
expect(getTimeagoText(template)).toBe('about 2 hours ago.');
});
this.class.renderEnvironments(this.environments);
});
});
});
return describe('getCIStatus', function() {
beforeEach(function() {
this.ciStatusData = {
......
......@@ -4,9 +4,9 @@ describe Gitlab::ChatCommands::Command, service: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
subject { described_class.new(project, user, params).execute }
describe '#execute' do
subject { described_class.new(project, user, params).execute }
context 'when no command is available' do
let(:params) { { text: 'issue show 1' } }
let(:project) { create(:project, has_external_issue_tracker: true) }
......@@ -51,5 +51,44 @@ describe Gitlab::ChatCommands::Command, service: true do
expect(subject[:text]).to match(/\/issues\/\d+/)
end
end
context 'when trying to do deployment' do
let(:params) { { text: 'deploy staging to production' } }
let!(:build) { create(:ci_build, project: project) }
let!(:staging) { create(:environment, name: 'staging', project: project) }
let!(:deployment) { create(:deployment, environment: staging, deployable: build) }
let!(:manual) do
create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'first', environment: 'production')
end
context 'and user can not create deployment' do
it 'returns action' do
expect(subject[:response_type]).to be(:ephemeral)
expect(subject[:text]).to start_with('Whoops! That action is not allowed')
end
end
context 'and user does have deployment permission' do
before do
project.team << [user, :developer]
end
it 'returns action' do
expect(subject[:text]).to include(manual.name)
expect(subject[:response_type]).to be(:in_channel)
end
context 'when duplicate action exists' do
let!(:manual2) do
create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'second', environment: 'production')
end
it 'returns error' do
expect(subject[:response_type]).to be(:ephemeral)
expect(subject[:text]).to include('Too many actions defined')
end
end
end
end
end
end
require 'spec_helper'
describe Gitlab::ChatCommands::Deploy, service: true do
describe '#execute' do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
let(:regex_match) { described_class.match('deploy staging to production') }
before do
project.team << [user, :master]
end
subject do
described_class.new(project, user).execute(regex_match)
end
context 'if no environment is defined' do
it 'returns nil' do
expect(subject).to be_nil
end
end
context 'with environment' do
let!(:staging) { create(:environment, name: 'staging', project: project) }
let!(:build) { create(:ci_build, project: project) }
let!(:deployment) { create(:deployment, environment: staging, deployable: build) }
context 'without actions' do
it 'returns nil' do
expect(subject).to be_nil
end
end
context 'with action' do
let!(:manual1) do
create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'first', environment: 'production')
end
it 'returns action' do
expect(subject).to eq(manual1)
end
context 'when duplicate action exists' do
let!(:manual2) do
create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'second', environment: 'production')
end
it 'returns error' do
expect(subject.message).to eq('Too many actions defined')
end
end
context 'when teardown action exists' do
let!(:teardown) do
create(:ci_build, :manual, :teardown_environment,
project: project, pipeline: build.pipeline,
name: 'teardown', environment: 'production')
end
it 'returns error' do
expect(subject).to eq(manual1)
end
end
end
end
end
describe 'self.match' do
it 'matches the environment' do
match = described_class.match('deploy staging to production')
expect(match[:from]).to eq('staging')
expect(match[:to]).to eq('production')
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Build::Credentials::Factory do
let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) }
subject { Gitlab::Ci::Build::Credentials::Factory.new(build).create! }
class TestProvider
def initialize(build); end
end
before do
allow_any_instance_of(Gitlab::Ci::Build::Credentials::Factory).to receive(:providers).and_return([TestProvider])
end
context 'when provider is valid' do
before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(true)
end
it 'generates an array of credentials objects' do
is_expected.to be_kind_of(Array)
is_expected.not_to be_empty
expect(subject.first).to be_kind_of(TestProvider)
end
end
context 'when provider is not valid' do
before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(false)
end
it 'generates an array without specific credential object' do
is_expected.to be_kind_of(Array)
is_expected.to be_empty
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Build::Credentials::Registry do
let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) }
let(:registry_url) { 'registry.example.com:5005' }
subject { Gitlab::Ci::Build::Credentials::Registry.new(build) }
before do
stub_container_registry_config(host_port: registry_url)
end
it 'contains valid DockerRegistry credentials' do
expect(subject).to be_kind_of(Gitlab::Ci::Build::Credentials::Registry)
expect(subject.username).to eq 'gitlab-ci-token'
expect(subject.password).to eq build.token
expect(subject.url).to eq registry_url
expect(subject.type).to eq 'registry'
end
describe '.valid?' do
subject { Gitlab::Ci::Build::Credentials::Registry.new(build).valid? }
context 'when registry is enabled' do
before do
stub_container_registry_config(enabled: true)
end
it { is_expected.to be_truthy }
end
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false)
end
it { is_expected.to be_falsey }
end
end
end
require 'spec_helper'
describe Gitlab::CycleAnalytics::Permissions do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
subject { described_class.get(user: user, project: project) }
context 'user with no relation to the project' do
it 'has no permissions to issue stage' do
expect(subject[:issue]).to eq(false)
end
it 'has no permissions to test stage' do
expect(subject[:test]).to eq(false)
end
it 'has no permissions to staging stage' do
expect(subject[:staging]).to eq(false)
end
it 'has no permissions to production stage' do
expect(subject[:production]).to eq(false)
end
it 'has no permissions to code stage' do
expect(subject[:code]).to eq(false)
end
it 'has no permissions to review stage' do
expect(subject[:review]).to eq(false)
end
it 'has no permissions to plan stage' do
expect(subject[:plan]).to eq(false)
end
end
context 'user is master' do
before do
project.team << [user, :master]
end
it 'has permissions to issue stage' do
expect(subject[:issue]).to eq(true)
end
it 'has permissions to test stage' do
expect(subject[:test]).to eq(true)
end
it 'has permissions to staging stage' do
expect(subject[:staging]).to eq(true)
end
it 'has permissions to production stage' do
expect(subject[:production]).to eq(true)
end
it 'has permissions to code stage' do
expect(subject[:code]).to eq(true)
end
it 'has permissions to review stage' do
expect(subject[:review]).to eq(true)
end
it 'has permissions to plan stage' do
expect(subject[:plan]).to eq(true)
end
end
context 'user has no build permissions' do
before do
project.team << [user, :guest]
end
it 'has permissions to issue stage' do
expect(subject[:issue]).to eq(true)
end
it 'has no permissions to test stage' do
expect(subject[:test]).to eq(false)
end
it 'has no permissions to staging stage' do
expect(subject[:staging]).to eq(false)
end
end
context 'user has no merge request permissions' do
before do
project.team << [user, :guest]
end
it 'has permissions to issue stage' do
expect(subject[:issue]).to eq(true)
end
it 'has no permissions to code stage' do
expect(subject[:code]).to eq(false)
end
it 'has no permissions to review stage' do
expect(subject[:review]).to eq(false)
end
end
context 'user has no issue permissions' do
before do
project.team << [user, :developer]
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
end
it 'has permissions to code stage' do
expect(subject[:code]).to eq(true)
end
it 'has no permissions to issue stage' do
expect(subject[:issue]).to eq(false)
end
it 'has no permissions to production stage' do
expect(subject[:production]).to eq(false)
end
end
end
require 'spec_helper'
describe Gitlab::FileDetector do
describe '.types_in_paths' do
it 'returns the file types for the given paths' do
expect(described_class.types_in_paths(%w(README.md CHANGELOG VERSION VERSION))).
to eq(%i{readme changelog version})
end
it 'does not include unrecognized file paths' do
expect(described_class.types_in_paths(%w(README.md foo.txt))).
to eq(%i{readme})
end
end
describe '.type_of' do
it 'returns the type of a README file' do
expect(described_class.type_of('README.md')).to eq(:readme)
end
it 'returns the type of a changelog file' do
%w(CHANGELOG HISTORY CHANGES NEWS).each do |file|
expect(described_class.type_of(file)).to eq(:changelog)
end
end
it 'returns the type of a license file' do
%w(LICENSE LICENCE COPYING).each do |file|
expect(described_class.type_of(file)).to eq(:license)
end
end
it 'returns the type of a version file' do
expect(described_class.type_of('VERSION')).to eq(:version)
end
it 'returns the type of a .gitignore file' do
expect(described_class.type_of('.gitignore')).to eq(:gitignore)
end
it 'returns the type of a Koding config file' do
expect(described_class.type_of('.koding.yml')).to eq(:koding)
end
it 'returns the type of a GitLab CI config file' do
expect(described_class.type_of('.gitlab-ci.yml')).to eq(:gitlab_ci)
end
it 'returns the type of an avatar' do
%w(logo.gif logo.png logo.jpg).each do |file|
expect(described_class.type_of(file)).to eq(:avatar)
end
end
it 'returns nil for an unknown file' do
expect(described_class.type_of('foo.txt')).to be_nil
end
end
end
......@@ -262,6 +262,7 @@ Service:
- template
- push_events
- issues_events
- commit_events
- merge_requests_events
- tag_push_events
- note_events
......@@ -348,6 +349,7 @@ LabelPriority:
- priority
- created_at
- updated_at
<<<<<<< HEAD
Timelog:
- id
- time_spent
......@@ -356,3 +358,5 @@ Timelog:
- user_id
- created_at
- updated_at
=======
>>>>>>> ce/master
require 'spec_helper'
describe BroadcastMessage, models: true do
subject { create(:broadcast_message) }
subject { build(:broadcast_message) }
it { is_expected.to be_valid }
......
......@@ -9,6 +9,7 @@ describe Environment, models: true do
it { is_expected.to delegate_method(:last_deployment).to(:deployments).as(:last) }
it { is_expected.to delegate_method(:stop_action).to(:last_deployment) }
it { is_expected.to delegate_method(:manual_actions).to(:last_deployment) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
......@@ -187,4 +188,15 @@ describe Environment, models: true do
it { is_expected.to be false }
end
end
describe '#actions_for' do
let(:deployment) { create(:deployment, environment: environment) }
let(:pipeline) { deployment.deployable.pipeline }
let!(:review_action) { create(:ci_build, :manual, name: 'review-apps', pipeline: pipeline, environment: 'review/$CI_BUILD_REF_NAME' )}
let!(:production_action) { create(:ci_build, :manual, name: 'production', pipeline: pipeline, environment: 'production' )}
it 'returns a list of actions with matching environment' do
expect(environment.actions_for('review/master')).to contain_exactly(review_action)
end
end
end
......@@ -83,7 +83,8 @@ describe JiraService, models: true do
url: 'http://jira.example.com',
username: 'gitlab_jira_username',
password: 'gitlab_jira_password',
project_key: 'GitLabProject'
project_key: 'GitLabProject',
jira_issue_transition_id: "custom-id"
)
# These stubs are needed to test JiraService#close_issue.
......@@ -177,11 +178,10 @@ describe JiraService, models: true do
end
it "calls the api with jira_issue_transition_id" do
@jira_service.jira_issue_transition_id = 'this-is-a-custom-id'
@jira_service.execute(merge_request, ExternalIssue.new("JIRA-123", project))
expect(WebMock).to have_requested(:post, @transitions_url).with(
body: /this-is-a-custom-id/
body: /custom-id/
).once
end
......
......@@ -1859,7 +1859,7 @@ describe Project, models: true do
end
it 'expires the avatar cache' do
expect(project.repository).to receive(:expire_avatar_cache).with(project.default_branch)
expect(project.repository).to receive(:expire_avatar_cache)
project.change_head(project.default_branch)
end
......
......@@ -464,11 +464,7 @@ describe Repository, models: true do
end
end
describe "#changelog" do
before do
repository.send(:cache).expire(:changelog)
end
describe "#changelog", caching: true do
it 'accepts changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')])
......@@ -500,17 +496,16 @@ describe Repository, models: true do
end
end
describe "#license_blob" do
describe "#license_blob", caching: true do
before do
repository.send(:cache).expire(:license_blob)
repository.remove_file(user, 'LICENSE', 'Remove LICENSE', 'master')
end
it 'handles when HEAD points to non-existent ref' do
repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
rugged = double('rugged')
expect(rugged).to receive(:head_unborn?).and_return(true)
expect(repository).to receive(:rugged).and_return(rugged)
allow(repository).to receive(:file_on_head).
and_raise(Rugged::ReferenceError)
expect(repository.license_blob).to be_nil
end
......@@ -537,22 +532,18 @@ describe Repository, models: true do
end
end
describe '#license_key' do
describe '#license_key', caching: true do
before do
repository.send(:cache).expire(:license_key)
repository.remove_file(user, 'LICENSE', 'Remove LICENSE', 'master')
end
it 'handles when HEAD points to non-existent ref' do
repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
rugged = double('rugged')
expect(rugged).to receive(:head_unborn?).and_return(true)
expect(repository).to receive(:rugged).and_return(rugged)
it 'returns nil when no license is detected' do
expect(repository.license_key).to be_nil
end
it 'returns nil when no license is detected' do
it 'returns nil when the repository does not exist' do
expect(repository).to receive(:exists?).and_return(false)
expect(repository.license_key).to be_nil
end
......@@ -569,7 +560,7 @@ describe Repository, models: true do
end
end
describe "#gitlab_ci_yml" do
describe "#gitlab_ci_yml", caching: true do
it 'returns valid file' do
files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
......@@ -583,7 +574,7 @@ describe Repository, models: true do
end
it 'returns nil for empty repository' do
expect(repository).to receive(:empty?).and_return(true)
allow(repository).to receive(:file_on_head).and_raise(Rugged::ReferenceError)
expect(repository.gitlab_ci_yml).to be_nil
end
end
......@@ -778,7 +769,6 @@ describe Repository, models: true do
expect(repository).not_to receive(:expire_emptiness_caches)
expect(repository).to receive(:expire_branches_cache)
expect(repository).to receive(:expire_has_visible_content_cache)
expect(repository).to receive(:expire_branch_count_cache)
repository.update_branch_with_hooks(user, 'new-feature') { new_rev }
end
......@@ -797,7 +787,6 @@ describe Repository, models: true do
expect(empty_repository).to receive(:expire_emptiness_caches)
expect(empty_repository).to receive(:expire_branches_cache)
expect(empty_repository).to receive(:expire_has_visible_content_cache)
expect(empty_repository).to receive(:expire_branch_count_cache)
empty_repository.commit_file(user, 'CHANGELOG', 'Changelog!',
'Updates file content', 'master', false)
......@@ -811,8 +800,7 @@ describe Repository, models: true do
end
it 'returns false when a repository does not exist' do
expect(repository.raw_repository).to receive(:rugged).
and_raise(Gitlab::Git::Repository::NoRepository)
allow(repository).to receive(:refs_directory_exists?).and_return(false)
expect(repository.exists?).to eq(false)
end
......@@ -916,34 +904,6 @@ describe Repository, models: true do
end
end
describe '#expire_cache' do
it 'expires all caches' do
expect(repository).to receive(:expire_branch_cache)
repository.expire_cache
end
it 'expires the caches for a specific branch' do
expect(repository).to receive(:expire_branch_cache).with('master')
repository.expire_cache('master')
end
it 'expires the emptiness caches for an empty repository' do
expect(repository).to receive(:empty?).and_return(true)
expect(repository).to receive(:expire_emptiness_caches)
repository.expire_cache
end
it 'does not expire the emptiness caches for a non-empty repository' do
expect(repository).to receive(:empty?).and_return(false)
expect(repository).not_to receive(:expire_emptiness_caches)
repository.expire_cache
end
end
describe '#expire_root_ref_cache' do
it 'expires the root reference cache' do
repository.root_ref
......@@ -1003,12 +963,23 @@ describe Repository, models: true do
describe '#expire_emptiness_caches' do
let(:cache) { repository.send(:cache) }
it 'expires the caches' do
it 'expires the caches for an empty repository' do
allow(repository).to receive(:empty?).and_return(true)
expect(cache).to receive(:expire).with(:empty?)
expect(repository).to receive(:expire_has_visible_content_cache)
repository.expire_emptiness_caches
end
it 'does not expire the cache for a non-empty repository' do
allow(repository).to receive(:empty?).and_return(false)
expect(cache).not_to receive(:expire).with(:empty?)
expect(repository).not_to receive(:expire_has_visible_content_cache)
repository.expire_emptiness_caches
end
end
describe 'skip_merged_commit' do
......@@ -1146,24 +1117,12 @@ describe Repository, models: true do
repository.before_delete
end
it 'flushes the tag count cache' do
expect(repository).to receive(:expire_tag_count_cache)
repository.before_delete
end
it 'flushes the branches cache' do
expect(repository).to receive(:expire_branches_cache)
repository.before_delete
end
it 'flushes the branch count cache' do
expect(repository).to receive(:expire_branch_count_cache)
repository.before_delete
end
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
......@@ -1188,36 +1147,18 @@ describe Repository, models: true do
allow(repository).to receive(:exists?).and_return(true)
end
it 'flushes the caches that depend on repository data' do
expect(repository).to receive(:expire_cache)
repository.before_delete
end
it 'flushes the tags cache' do
expect(repository).to receive(:expire_tags_cache)
repository.before_delete
end
it 'flushes the tag count cache' do
expect(repository).to receive(:expire_tag_count_cache)
repository.before_delete
end
it 'flushes the branches cache' do
expect(repository).to receive(:expire_branches_cache)
repository.before_delete
end
it 'flushes the branch count cache' do
expect(repository).to receive(:expire_branch_count_cache)
repository.before_delete
end
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
......@@ -1248,8 +1189,9 @@ describe Repository, models: true do
describe '#before_push_tag' do
it 'flushes the cache' do
expect(repository).to receive(:expire_cache)
expect(repository).to receive(:expire_tag_count_cache)
expect(repository).to receive(:expire_statistics_caches)
expect(repository).to receive(:expire_emptiness_caches)
expect(repository).to receive(:expire_tags_cache)
repository.before_push_tag
end
......@@ -1266,17 +1208,23 @@ describe Repository, models: true do
describe '#after_import' do
it 'flushes and builds the cache' do
expect(repository).to receive(:expire_content_cache)
expect(repository).to receive(:build_cache)
expect(repository).to receive(:expire_tags_cache)
expect(repository).to receive(:expire_branches_cache)
repository.after_import
end
end
describe '#after_push_commit' do
it 'flushes the cache' do
expect(repository).to receive(:expire_cache).with('master', '123')
it 'expires statistics caches' do
expect(repository).to receive(:expire_statistics_caches).
and_call_original
repository.after_push_commit('master', '123')
expect(repository).to receive(:expire_branch_cache).
with('master').
and_call_original
repository.after_push_commit('master')
end
end
......@@ -1370,7 +1318,8 @@ describe Repository, models: true do
describe '#before_remove_tag' do
it 'flushes the tag cache' do
expect(repository).to receive(:expire_tag_count_cache)
expect(repository).to receive(:expire_tags_cache).and_call_original
expect(repository).to receive(:expire_statistics_caches).and_call_original
repository.before_remove_tag
end
......@@ -1388,23 +1337,23 @@ describe Repository, models: true do
end
end
describe '#expire_branch_count_cache' do
let(:cache) { repository.send(:cache) }
describe '#expire_branches_cache' do
it 'expires the cache' do
expect(cache).to receive(:expire).with(:branch_count)
expect(repository).to receive(:expire_method_caches).
with(%i(branch_names branch_count)).
and_call_original
repository.expire_branch_count_cache
repository.expire_branches_cache
end
end
describe '#expire_tag_count_cache' do
let(:cache) { repository.send(:cache) }
describe '#expire_tags_cache' do
it 'expires the cache' do
expect(cache).to receive(:expire).with(:tag_count)
expect(repository).to receive(:expire_method_caches).
with(%i(tag_names tag_count)).
and_call_original
repository.expire_tag_count_cache
repository.expire_tags_cache
end
end
......@@ -1480,131 +1429,253 @@ describe Repository, models: true do
describe '#avatar' do
it 'returns nil if repo does not exist' do
expect(repository).to receive(:exists?).and_return(false)
expect(repository).to receive(:file_on_head).
and_raise(Rugged::ReferenceError)
expect(repository.avatar).to eq(nil)
end
it 'returns the first avatar file found in the repository' do
expect(repository).to receive(:blob_at_branch).
with('master', 'logo.png').
and_return(true)
expect(repository).to receive(:file_on_head).
with(:avatar).
and_return(double(:tree, path: 'logo.png'))
expect(repository.avatar).to eq('logo.png')
end
it 'caches the output' do
allow(repository).to receive(:blob_at_branch).
with('master', 'logo.png').
and_return(true)
expect(repository.avatar).to eq('logo.png')
expect(repository).to receive(:file_on_head).
with(:avatar).
once.
and_return(double(:tree, path: 'logo.png'))
expect(repository).not_to receive(:blob_at_branch)
expect(repository.avatar).to eq('logo.png')
2.times { expect(repository.avatar).to eq('logo.png') }
end
end
describe '#expire_avatar_cache' do
describe '#expire_exists_cache' do
let(:cache) { repository.send(:cache) }
before do
allow(repository).to receive(:cache).and_return(cache)
it 'expires the cache' do
expect(cache).to receive(:expire).with(:exists?)
repository.expire_exists_cache
end
end
context 'without a branch or revision' do
it 'flushes the cache' do
expect(cache).to receive(:expire).with(:avatar)
describe "#keep_around" do
it "does not fail if we attempt to reference bad commit" do
expect(repository.kept_around?('abc1234')).to be_falsey
end
repository.expire_avatar_cache
end
it "stores a reference to the specified commit sha so it isn't garbage collected" do
repository.keep_around(sample_commit.id)
expect(repository.kept_around?(sample_commit.id)).to be_truthy
end
it "attempting to call keep_around on truncated ref does not fail" do
repository.keep_around(sample_commit.id)
ref = repository.send(:keep_around_ref_name, sample_commit.id)
path = File.join(repository.path, ref)
# Corrupt the reference
File.truncate(path, 0)
expect(repository.kept_around?(sample_commit.id)).to be_falsey
repository.keep_around(sample_commit.id)
expect(repository.kept_around?(sample_commit.id)).to be_falsey
File.delete(path)
end
end
context 'with a branch' do
it 'does not flush the cache if the branch is not the default branch' do
expect(cache).not_to receive(:expire)
describe '#update_ref!' do
it 'can create a ref' do
repository.update_ref!('refs/heads/foobar', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
expect(repository.find_branch('foobar')).not_to be_nil
end
repository.expire_avatar_cache('cats')
it 'raises CommitError when the ref update fails' do
expect do
repository.update_ref!('refs/heads/master', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
end.to raise_error(Repository::CommitError)
end
end
describe '#contribution_guide', caching: true do
it 'returns and caches the output' do
expect(repository).to receive(:file_on_head).
with(:contributing).
and_return(Gitlab::Git::Tree.new(path: 'CONTRIBUTING.md')).
once
2.times do
expect(repository.contribution_guide).
to be_an_instance_of(Gitlab::Git::Tree)
end
end
end
it 'flushes the cache if the branch equals the default branch' do
expect(cache).to receive(:expire).with(:avatar)
describe '#gitignore', caching: true do
it 'returns and caches the output' do
expect(repository).to receive(:file_on_head).
with(:gitignore).
and_return(Gitlab::Git::Tree.new(path: '.gitignore')).
once
repository.expire_avatar_cache(repository.root_ref)
2.times do
expect(repository.gitignore).to be_an_instance_of(Gitlab::Git::Tree)
end
end
end
context 'with a branch and revision' do
let(:commit) { double(:commit) }
describe '#koding_yml', caching: true do
it 'returns and caches the output' do
expect(repository).to receive(:file_on_head).
with(:koding).
and_return(Gitlab::Git::Tree.new(path: '.koding.yml')).
once
before do
allow(repository).to receive(:commit).and_return(commit)
2.times do
expect(repository.koding_yml).to be_an_instance_of(Gitlab::Git::Tree)
end
end
end
it 'does not flush the cache if the commit does not change any logos' do
diff = double(:diff, new_path: 'test.txt')
describe '#readme', caching: true do
context 'with a non-existing repository' do
it 'returns nil' do
expect(repository).to receive(:tree).with(:head).and_return(nil)
expect(commit).to receive(:raw_diffs).and_return([diff])
expect(cache).not_to receive(:expire)
expect(repository.readme).to be_nil
end
end
repository.expire_avatar_cache(repository.root_ref, '123')
context 'with an existing repository' do
it 'returns the README' do
expect(repository.readme).to be_an_instance_of(Gitlab::Git::Blob)
end
end
end
it 'flushes the cache if the commit changes any of the logos' do
diff = double(:diff, new_path: Repository::AVATAR_FILES[0])
describe '#expire_statistics_caches' do
it 'expires the caches' do
expect(repository).to receive(:expire_method_caches).
with(%i(size commit_count))
expect(commit).to receive(:raw_diffs).and_return([diff])
expect(cache).to receive(:expire).with(:avatar)
repository.expire_statistics_caches
end
end
repository.expire_avatar_cache(repository.root_ref, '123')
end
describe '#expire_method_caches' do
it 'expires the caches of the given methods' do
expect_any_instance_of(RepositoryCache).to receive(:expire).with(:readme)
expect_any_instance_of(RepositoryCache).to receive(:expire).with(:gitignore)
repository.expire_method_caches(%i(readme gitignore))
end
end
describe '#expire_exists_cache' do
let(:cache) { repository.send(:cache) }
describe '#expire_all_method_caches' do
it 'expires the caches of all methods' do
expect(repository).to receive(:expire_method_caches).
with(Repository::CACHED_METHODS)
repository.expire_all_method_caches
end
end
describe '#expire_avatar_cache' do
it 'expires the cache' do
expect(cache).to receive(:expire).with(:exists?)
expect(repository).to receive(:expire_method_caches).with(%i(avatar))
repository.expire_exists_cache
repository.expire_avatar_cache
end
end
describe '#build_cache' do
let(:cache) { repository.send(:cache) }
describe '#file_on_head' do
context 'with a non-existing repository' do
it 'returns nil' do
expect(repository).to receive(:tree).with(:head).and_return(nil)
expect(repository.file_on_head(:readme)).to be_nil
end
end
it 'builds the caches if they do not already exist' do
cache_keys = repository.cache_keys + repository.cache_keys_for_branches_and_tags
context 'with a repository that has no blobs' do
it 'returns nil' do
expect_any_instance_of(Tree).to receive(:blobs).and_return([])
expect(cache).to receive(:exist?).
exactly(cache_keys.length).
times.
and_return(false)
expect(repository.file_on_head(:readme)).to be_nil
end
end
context 'with an existing repository' do
it 'returns a Gitlab::Git::Tree' do
expect(repository.file_on_head(:readme)).
to be_an_instance_of(Gitlab::Git::Tree)
end
end
end
describe '#head_tree' do
context 'with an existing repository' do
it 'returns a Tree' do
expect(repository.head_tree).to be_an_instance_of(Tree)
end
end
context 'with a non-existing repository' do
it 'returns nil' do
expect(repository).to receive(:head_commit).and_return(nil)
cache_keys.each do |key|
expect(repository).to receive(key)
expect(repository.head_tree).to be_nil
end
end
end
repository.build_cache
describe '#tree' do
context 'using a non-existing repository' do
before do
allow(repository).to receive(:head_commit).and_return(nil)
end
it 'returns nil' do
expect(repository.tree(:head)).to be_nil
end
it 'returns nil when using a path' do
expect(repository.tree(:head, 'README.md')).to be_nil
end
end
it 'does not build any caches that already exist' do
cache_keys = repository.cache_keys + repository.cache_keys_for_branches_and_tags
context 'using an existing repository' do
it 'returns a Tree' do
expect(repository.tree(:head)).to be_an_instance_of(Tree)
end
end
end
expect(cache).to receive(:exist?).
exactly(cache_keys.length).
times.
and_return(true)
describe '#size' do
context 'with a non-existing repository' do
it 'returns 0' do
expect(repository).to receive(:exists?).and_return(false)
cache_keys.each do |key|
expect(repository).not_to receive(key)
expect(repository.size).to eq(0.0)
end
end
repository.build_cache
context 'with an existing repository' do
it 'returns the repository size as a Float' do
expect(repository.size).to be_an_instance_of(Float)
end
end
end
<<<<<<< HEAD
describe '#push_remote_branches' do
it 'push branches to the remote repo' do
expect_any_instance_of(Gitlab::Shell).to receive(:push_remote_branches).
......@@ -1683,42 +1754,72 @@ describe Repository, models: true do
describe "#keep_around" do
it "does not fail if we attempt to reference bad commit" do
expect(repository.kept_around?('abc1234')).to be_falsey
=======
describe '#commit_count' do
context 'with a non-existing repository' do
it 'returns 0' do
expect(repository).to receive(:root_ref).and_return(nil)
expect(repository.commit_count).to eq(0)
end
>>>>>>> ce/master
end
it "stores a reference to the specified commit sha so it isn't garbage collected" do
repository.keep_around(sample_commit.id)
context 'with an existing repository' do
it 'returns the commit count' do
expect(repository.commit_count).to be_an_instance_of(Fixnum)
end
end
end
expect(repository.kept_around?(sample_commit.id)).to be_truthy
describe '#cache_method_output', caching: true do
context 'with a non-existing repository' do
let(:value) do
repository.cache_method_output(:cats, fallback: 10) do
raise Rugged::ReferenceError
end
end
it 'returns a fallback value' do
expect(value).to eq(10)
end
it 'does not cache the data' do
value
expect(repository.instance_variable_defined?(:@cats)).to eq(false)
expect(repository.send(:cache).exist?(:cats)).to eq(false)
end
end
it "attempting to call keep_around on truncated ref does not fail" do
repository.keep_around(sample_commit.id)
ref = repository.send(:keep_around_ref_name, sample_commit.id)
path = File.join(repository.path, ref)
# Corrupt the reference
File.truncate(path, 0)
context 'with an existing repository' do
it 'caches the output' do
object = double
expect(repository.kept_around?(sample_commit.id)).to be_falsey
expect(object).to receive(:number).once.and_return(10)
repository.keep_around(sample_commit.id)
2.times do
val = repository.cache_method_output(:cats) { object.number }
expect(repository.kept_around?(sample_commit.id)).to be_falsey
expect(val).to eq(10)
end
File.delete(path)
expect(repository.send(:cache).exist?(:cats)).to eq(true)
expect(repository.instance_variable_get(:@cats)).to eq(10)
end
end
end
describe '#update_ref!' do
it 'can create a ref' do
repository.update_ref!('refs/heads/foobar', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches).
with(%i(readme license_blob license_key))
expect(repository.find_branch('foobar')).not_to be_nil
end
expect(repository).to receive(:readme)
expect(repository).to receive(:license_blob)
expect(repository).to receive(:license_key)
it 'raises CommitError when the ref update fails' do
expect do
repository.update_ref!('refs/heads/master', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
end.to raise_error(Repository::CommitError)
repository.refresh_method_caches(%i(readme license))
end
end
......
......@@ -14,7 +14,7 @@ describe API::API, api: true do
describe "GET /projects/:id/repository/branches" do
it "returns an array of project branches" do
project.repository.expire_cache
project.repository.expire_all_method_caches
get api("/projects/#{project.id}/repository/branches", user)
expect(response).to have_http_status(200)
......
......@@ -41,6 +41,52 @@ describe API::API, api: true do
end
end
describe 'POST /projects/:id/pipeline ' do
context 'authorized user' do
context 'with gitlab-ci.yml' do
before { stub_ci_pipeline_to_return_yaml_file }
it 'creates and returns a new pipeline' do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch
end.to change { Ci::Pipeline.count }.by(1)
expect(response).to have_http_status(201)
expect(json_response).to be_a Hash
expect(json_response['sha']).to eq project.commit.id
end
it 'fails when using an invalid ref' do
post api("/projects/#{project.id}/pipeline", user), ref: 'invalid_ref'
expect(response).to have_http_status(400)
expect(json_response['message']['base'].first).to eq 'Reference not found'
expect(json_response).not_to be_an Array
end
end
context 'without gitlab-ci.yml' do
it 'fails to create pipeline' do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch
expect(response).to have_http_status(400)
expect(json_response['message']['base'].first).to eq 'Missing .gitlab-ci.yml file'
expect(json_response).not_to be_an Array
end
end
end
context 'unauthorized user' do
it 'does not create pipeline' do
post api("/projects/#{project.id}/pipeline", non_member), ref: project.default_branch
expect(response).to have_http_status(404)
expect(json_response['message']).to eq '404 Project Not Found'
expect(json_response).not_to be_an Array
end
end
end
describe 'GET /projects/:id/pipelines/:pipeline_id' do
context 'authorized user' do
it 'returns project pipelines' do
......
......@@ -3,10 +3,12 @@ require 'rails_helper'
describe API::API, api: true do
include ApiHelpers
let(:project) { create(:empty_project, :public) }
let(:admin) { create(:admin) }
describe 'GET /projects/:project_id/snippets/:id' do
# TODO (rspeicher): Deprecated; remove in 9.0
it 'always exposes expires_at as nil' do
admin = create(:admin)
snippet = create(:project_snippet, author: admin)
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}", admin)
......@@ -17,9 +19,9 @@ describe API::API, api: true do
end
describe 'GET /projects/:project_id/snippets/' do
let(:user) { create(:user) }
it 'returns all snippets available to team member' do
project = create(:project, :public)
user = create(:user)
project.team << [user, :developer]
public_snippet = create(:project_snippet, :public, project: project)
internal_snippet = create(:project_snippet, :internal, project: project)
......@@ -34,8 +36,6 @@ describe API::API, api: true do
end
it 'hides private snippets from regular user' do
project = create(:project, :public)
user = create(:user)
create(:project_snippet, :private, project: project)
get api("/projects/#{project.id}/snippets/", user)
......@@ -45,16 +45,16 @@ describe API::API, api: true do
end
describe 'POST /projects/:project_id/snippets/' do
it 'creates a new snippet' do
admin = create(:admin)
project = create(:project)
params = {
let(:params) do
{
title: 'Test Title',
file_name: 'test.rb',
code: 'puts "hello world"',
visibility_level: Gitlab::VisibilityLevel::PUBLIC
}
end
it 'creates a new snippet' do
post api("/projects/#{project.id}/snippets/", admin), params
expect(response).to have_http_status(201)
......@@ -64,12 +64,20 @@ describe API::API, api: true do
expect(snippet.file_name).to eq(params[:file_name])
expect(snippet.visibility_level).to eq(params[:visibility_level])
end
it 'returns 400 for missing parameters' do
params.delete(:title)
post api("/projects/#{project.id}/snippets/", admin), params
expect(response).to have_http_status(400)
end
end
describe 'PUT /projects/:project_id/snippets/:id/' do
let(:snippet) { create(:project_snippet, author: admin) }
it 'updates snippet' do
admin = create(:admin)
snippet = create(:project_snippet, author: admin)
new_content = 'New content'
put api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin), code: new_content
......@@ -78,9 +86,24 @@ describe API::API, api: true do
snippet.reload
expect(snippet.content).to eq(new_content)
end
it 'returns 404 for invalid snippet id' do
put api("/projects/#{snippet.project.id}/snippets/1234", admin), title: 'foo'
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
it 'returns 400 for missing parameters' do
put api("/projects/#{project.id}/snippets/1234", admin)
expect(response).to have_http_status(400)
end
end
describe 'DELETE /projects/:project_id/snippets/:id/' do
let(:snippet) { create(:project_snippet, author: admin) }
it 'deletes snippet' do
admin = create(:admin)
snippet = create(:project_snippet, author: admin)
......@@ -89,18 +112,31 @@ describe API::API, api: true do
expect(response).to have_http_status(200)
end
it 'returns 404 for invalid snippet id' do
delete api("/projects/#{snippet.project.id}/snippets/1234", admin)
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
end
describe 'GET /projects/:project_id/snippets/:id/raw' do
it 'returns raw text' do
admin = create(:admin)
snippet = create(:project_snippet, author: admin)
let(:snippet) { create(:project_snippet, author: admin) }
it 'returns raw text' do
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", admin)
expect(response).to have_http_status(200)
expect(response.content_type).to eq 'text/plain'
expect(response.body).to eq(snippet.content)
end
it 'returns 404 for invalid snippet id' do
delete api("/projects/#{snippet.project.id}/snippets/1234", admin)
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
end
end
......@@ -17,6 +17,10 @@ describe Ci::API::API do
let!(:build) { create(:ci_build, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let(:user_agent) { 'gitlab-ci-multi-runner 1.5.2 (1-5-stable; go1.6.3; linux/amd64)' }
before do
stub_container_registry_config(enabled: false)
end
shared_examples 'no builds available' do
context 'when runner sends version in User-Agent' do
context 'for stable version' do
......@@ -53,6 +57,41 @@ describe Ci::API::API do
it 'updates runner info' do
expect { register_builds }.to change { runner.reload.contacted_at }
end
context 'registry credentials' do
let(:registry_credentials) do
{ 'type' => 'registry',
'url' => 'registry.example.com:5005',
'username' => 'gitlab-ci-token',
'password' => build.token }
end
context 'when registry is enabled' do
before do
stub_container_registry_config(enabled: true, host_port: 'registry.example.com:5005')
end
it 'sends registry credentials key' do
register_builds info: { platform: :darwin }
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).to include(registry_credentials)
end
end
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false, host_port: 'registry.example.com:5005')
end
it 'does not send registry credentials' do
register_builds info: { platform: :darwin }
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).not_to include(registry_credentials)
end
end
end
end
context 'when builds are finished' do
......
......@@ -261,20 +261,28 @@ describe "Authentication", "routing" do
end
describe "Groups", "routing" do
let(:name) { 'complex.group-namegit' }
it "to #show" do
expect(get("/groups/1")).to route_to('groups#show', id: '1')
expect(get("/groups/#{name}")).to route_to('groups#show', id: name)
end
it "also display group#show on the short path" do
allow(Group).to receive(:find_by).and_return(true)
expect(get('/1')).to route_to('groups#show', id: '1')
expect(get("/#{name}")).to route_to('groups#show', id: name)
end
it "also display group#show with dot in the path" do
allow(Group).to receive(:find_by).and_return(true)
it "to #activity" do
expect(get("/groups/#{name}/activity")).to route_to('groups#activity', id: name)
end
it "to #issues" do
expect(get("/groups/#{name}/issues")).to route_to('groups#issues', id: name)
end
expect(get('/group.with.dot')).to route_to('groups#show', id: 'group.with.dot')
it "to #members" do
expect(get("/groups/#{name}/group_members")).to route_to('groups/group_members#index', group_id: name)
end
end
......
......@@ -27,27 +27,14 @@ describe GitPushService, services: true do
it { is_expected.to be_truthy }
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache).
with('master', newrev)
it 'calls the after_push_commit hook' do
expect(project.repository).to receive(:after_push_commit).with('master')
subject
end
it 'flushes the visible content cache' do
expect(project.repository).to receive(:expire_has_visible_content_cache)
subject
end
it 'flushes the branches cache' do
expect(project.repository).to receive(:expire_branches_cache)
subject
end
it 'flushes the branch count cache' do
expect(project.repository).to receive(:expire_branch_count_cache)
it 'calls the after_create_branch hook' do
expect(project.repository).to receive(:after_create_branch)
subject
end
......@@ -56,21 +43,8 @@ describe GitPushService, services: true do
context 'existing branch' do
it { is_expected.to be_truthy }
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache).
with('master', newrev)
subject
end
it 'does not flush the branches cache' do
expect(project.repository).not_to receive(:expire_branches_cache)
subject
end
it 'does not flush the branch count cache' do
expect(project.repository).not_to receive(:expire_branch_count_cache)
it 'calls the after_push_commit hook' do
expect(project.repository).to receive(:after_push_commit).with('master')
subject
end
......@@ -81,27 +55,14 @@ describe GitPushService, services: true do
it { is_expected.to be_truthy }
it 'flushes the visible content cache' do
expect(project.repository).to receive(:expire_has_visible_content_cache)
subject
end
it 'flushes the branches cache' do
expect(project.repository).to receive(:expire_branches_cache)
subject
end
it 'flushes the branch count cache' do
expect(project.repository).to receive(:expire_branch_count_cache)
it 'calls the after_push_commit hook' do
expect(project.repository).to receive(:after_push_commit).with('master')
subject
end
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache).
with('master', newrev)
it 'calls the after_remove_branch hook' do
expect(project.repository).to receive(:after_remove_branch)
subject
end
......@@ -622,6 +583,51 @@ describe GitPushService, services: true do
end
end
describe '#update_caches' do
let(:service) do
described_class.new(project,
user,
oldrev: sample_commit.parent_id,
newrev: sample_commit.id,
ref: 'refs/heads/master')
end
context 'on the default branch' do
before do
allow(service).to receive(:is_default_branch?).and_return(true)
end
it 'flushes the caches of any special files that have been changed' do
commit = double(:commit)
diff = double(:diff, new_path: 'README.md')
expect(commit).to receive(:raw_diffs).with(deltas_only: true).
and_return([diff])
service.push_commits = [commit]
expect(ProjectCacheWorker).to receive(:perform_async).
with(project.id, %i(readme))
service.update_caches
end
end
context 'on a non-default branch' do
before do
allow(service).to receive(:is_default_branch?).and_return(false)
end
it 'does not flush any conditional caches' do
expect(ProjectCacheWorker).to receive(:perform_async).
with(project.id, []).
and_call_original
service.update_caches
end
end
end
def execute_service(project, user, oldrev, newrev, ref)
service = described_class.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref )
service.execute
......
......@@ -18,7 +18,7 @@ describe GitTagPushService, services: true do
end
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache)
expect(project.repository).to receive(:before_push_tag)
subject
end
......@@ -28,12 +28,6 @@ describe GitTagPushService, services: true do
subject
end
it 'flushes the tag count cache' do
expect(project.repository).to receive(:expire_tag_count_cache)
subject
end
end
describe "Git Tag Push Data" do
......
require 'spec_helper'
# Write specs in this file.
describe MergeRequests::AddTodoWhenBuildFailsService do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request) }
let(:project) { create(:project) }
let(:sha) { '1234567890abcdef1234567890abcdef12345678' }
let(:pipeline) { create(:ci_pipeline_with_one_job, ref: merge_request.source_branch, project: project, sha: sha) }
let(:service) { MergeRequests::AddTodoWhenBuildFailsService.new(project, user, commit_message: 'Awesome message') }
let(:ref) { merge_request.source_branch }
let(:pipeline) do
create(:ci_pipeline_with_one_job, ref: ref,
project: project,
sha: sha)
end
let(:service) do
described_class.new(project, user, commit_message: 'Awesome message')
end
let(:todo_service) { TodoService.new }
let(:merge_request) do
......@@ -23,7 +32,9 @@ describe MergeRequests::AddTodoWhenBuildFailsService do
describe '#execute' do
context 'commit status with ref' do
let(:commit_status) { create(:generic_commit_status, ref: merge_request.source_branch, pipeline: pipeline) }
let(:commit_status) do
create(:generic_commit_status, ref: ref, pipeline: pipeline)
end
it 'notifies the todo service' do
expect(todo_service).to receive(:merge_request_build_failed).with(merge_request)
......@@ -32,7 +43,7 @@ describe MergeRequests::AddTodoWhenBuildFailsService do
end
context 'commit status with non-HEAD ref' do
let(:commit_status) { create(:generic_commit_status, ref: merge_request.source_branch) }
let(:commit_status) { create(:generic_commit_status, ref: ref) }
it 'does not notify the todo service' do
expect(todo_service).not_to receive(:merge_request_build_failed)
......@@ -48,6 +59,18 @@ describe MergeRequests::AddTodoWhenBuildFailsService do
service.execute(commit_status)
end
end
context 'when commit status is a build allowed to fail' do
let(:commit_status) do
create(:ci_build, :allowed_to_fail, ref: ref, pipeline: pipeline)
end
it 'does not create todo' do
expect(todo_service).not_to receive(:merge_request_build_failed)
service.execute(commit_status)
end
end
end
describe '#close' do
......
......@@ -75,10 +75,14 @@ describe MergeRequests::MergeService, services: true do
include JiraServiceHelper
let(:jira_tracker) { project.create_jira_service }
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
before do
project.update_attributes!(has_external_issue_tracker: true)
jira_service_settings
stub_jira_urls(jira_issue.id)
allow(merge_request).to receive(:commits).and_return([commit])
end
it 'closes issues on JIRA issue tracker' do
......@@ -92,6 +96,18 @@ describe MergeRequests::MergeService, services: true do
service.execute(merge_request)
end
context "when jira_issue_transition_id is not present" do
before { allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(nil) }
it "does not close issue" do
allow(jira_tracker).to receive_messages(jira_issue_transition_id: nil)
expect_any_instance_of(JiraService).not_to receive(:transition_issue)
service.execute(merge_request)
end
end
context "wrong issue markdown" do
it 'does not close issues on JIRA issue tracker' do
jira_issue = ExternalIssue.new('#JIRA-123', project)
......
......@@ -302,16 +302,6 @@ describe MergeRequests::RefreshService, services: true do
end
end
context 'when the source branch is deleted' do
it 'does not create a MergeRequestDiff record' do
refresh_service = service.new(@project, @user)
expect do
refresh_service.execute(@oldrev, Gitlab::Git::BLANK_SHA, 'refs/heads/master')
end.not_to change { MergeRequestDiff.count }
end
end
def reload_mrs
@merge_request.reload
@fork_merge_request.reload
......
......@@ -543,25 +543,52 @@ describe SystemNoteService, services: true do
let(:project) { create(:jira_project) }
let(:author) { create(:user) }
let(:issue) { create(:issue, project: project) }
let(:mergereq) { create(:merge_request, :simple, target_project: project, source_project: project) }
let(:merge_request) { create(:merge_request, :simple, target_project: project, source_project: project) }
let(:jira_issue) { ExternalIssue.new("JIRA-1", project)}
let(:jira_tracker) { project.jira_service }
let(:commit) { project.commit }
let(:comment_url) { jira_api_comment_url(jira_issue.id) }
let(:success_message) { "JiraService SUCCESS: Successfully posted to http://jira.example.net." }
before { stub_jira_urls(jira_issue.id) }
before do
stub_jira_urls(jira_issue.id)
jira_service_settings
end
noteable_types = ["merge_requests", "commit"]
noteable_types.each do |type|
context "when noteable is a #{type}" do
it "blocks cross reference when #{type.underscore}_events is false" do
jira_tracker.update("#{type}_events" => false)
context 'in issue' do
before { jira_service_settings }
noteable = type == "commit" ? commit : merge_request
result = described_class.cross_reference(jira_issue, noteable, author)
expect(result).to eq("Events for #{noteable.class.to_s.underscore.humanize.pluralize.downcase} are disabled.")
end
describe "new reference" do
subject { described_class.cross_reference(jira_issue, commit, author) }
it "blocks cross reference when #{type.underscore}_events is true" do
jira_tracker.update("#{type}_events" => true)
it { is_expected.to eq(success_message) }
noteable = type == "commit" ? commit : merge_request
result = described_class.cross_reference(jira_issue, noteable, author)
expect(result).to eq(success_message)
end
end
end
describe "new reference" do
context 'for commits' do
it "creates comment" do
result = described_class.cross_reference(jira_issue, commit, author)
expect(result).to eq(success_message)
end
it "creates remote link" do
subject
described_class.cross_reference(jira_issue, commit, author)
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
......@@ -576,18 +603,18 @@ describe SystemNoteService, services: true do
).once
end
end
end
context 'in commit' do
context 'in JIRA issue tracker' do
before { jira_service_settings }
context 'for issues' do
let(:issue) { create(:issue, project: project) }
subject { described_class.cross_reference(jira_issue, issue, author) }
it "creates comment" do
result = described_class.cross_reference(jira_issue, issue, author)
it { is_expected.to eq(success_message) }
expect(result).to eq(success_message)
end
it "creates remote link" do
subject
described_class.cross_reference(jira_issue, issue, author)
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
......@@ -602,6 +629,32 @@ describe SystemNoteService, services: true do
).once
end
end
context 'for snippets' do
let(:snippet) { create(:snippet, project: project) }
it "creates comment" do
result = described_class.cross_reference(jira_issue, snippet, author)
expect(result).to eq(success_message)
end
it "creates remote link" do
described_class.cross_reference(jira_issue, snippet, author)
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
GlobalID: "GitLab",
object: {
url: namespace_project_snippet_url(project.namespace, project, snippet),
title: "GitLab: Mentioned on snippet - #{snippet.title}",
icon: { title: "GitLab", url16x16: "https://gitlab.com/favicon.ico" },
status: { resolved: false }
}
)
).once
end
end
end
describe "existing reference" do
......@@ -610,9 +663,11 @@ describe SystemNoteService, services: true do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:comments).and_return([OpenStruct.new(body: message)])
end
subject { described_class.cross_reference(jira_issue, commit, author) }
it "does not return success message" do
result = described_class.cross_reference(jira_issue, commit, author)
it { is_expected.not_to eq(success_message) }
expect(result).not_to eq(success_message)
end
it 'does not try to create comment and remote link' do
subject
......
......@@ -6,7 +6,8 @@ module JiraServiceHelper
properties = {
title: "JIRA tracker",
url: JIRA_URL,
project_key: "JIRA"
project_key: "JIRA",
jira_issue_transition_id: '1'
}
jira_tracker.update_attributes(properties: properties, active: true)
......
......@@ -2,53 +2,58 @@ require 'spec_helper'
describe ProjectCacheWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
subject { described_class.new }
describe '.perform_async' do
it 'schedules the job when no lease exists' do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:exists?).
and_return(false)
describe '#perform' do
before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).
and_return(true)
end
expect_any_instance_of(described_class).to receive(:perform)
context 'with a non-existing project' do
it 'does nothing' do
expect(worker).not_to receive(:update_repository_size)
described_class.perform_async(project.id)
worker.perform(-1)
end
end
it 'does not schedule the job when a lease exists' do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:exists?).
and_return(true)
context 'with an existing project without a repository' do
it 'does nothing' do
allow_any_instance_of(Repository).to receive(:exists?).and_return(false)
expect_any_instance_of(described_class).not_to receive(:perform)
expect(worker).not_to receive(:update_repository_size)
described_class.perform_async(project.id)
worker.perform(project.id)
end
end
end
describe '#perform' do
context 'when an exclusive lease can be obtained' do
before do
allow(subject).to receive(:try_obtain_lease_for).with(project.id).
and_return(true)
end
context 'with an existing project' do
it 'updates the repository size' do
expect(worker).to receive(:update_repository_size).and_call_original
it 'updates project cache data' do
expect_any_instance_of(Repository).to receive(:size)
expect_any_instance_of(Repository).to receive(:commit_count)
worker.perform(project.id)
end
expect_any_instance_of(Project).to receive(:update_repository_size)
expect_any_instance_of(Project).to receive(:update_commit_count)
it 'updates the commit count' do
expect_any_instance_of(Project).to receive(:update_commit_count).
and_call_original
<<<<<<< HEAD
expect_any_instance_of(Repository).to receive(:build_cache).and_call_original
subject.perform(project.id)
=======
worker.perform(project.id)
>>>>>>> ce/master
end
it 'handles missing repository data' do
expect_any_instance_of(Repository).to receive(:exists?).and_return(false)
expect_any_instance_of(Repository).not_to receive(:size)
it 'refreshes the method caches' do
expect_any_instance_of(Repository).to receive(:refresh_method_caches).
with(%i(readme)).
and_call_original
subject.perform(project.id)
worker.perform(project.id, %i(readme))
end
context 'when in Geo secondary node' do
......@@ -66,15 +71,30 @@ describe ProjectCacheWorker do
end
end
end
end
context 'when an exclusive lease can not be obtained' do
it 'does nothing' do
allow(subject).to receive(:try_obtain_lease_for).with(project.id).
describe '#update_repository_size' do
context 'when a lease could not be obtained' do
it 'does not update the repository size' do
allow(worker).to receive(:try_obtain_lease_for).
with(project.id, :update_repository_size).
and_return(false)
expect(subject).not_to receive(:update_caches)
expect(project).not_to receive(:update_repository_size)
subject.perform(project.id)
worker.update_repository_size(project)
end
end
context 'when a lease could be obtained' do
it 'updates the repository size' do
allow(worker).to receive(:try_obtain_lease_for).
with(project.id, :update_repository_size).
and_return(true)
expect(project).to receive(:update_repository_size).and_call_original
worker.update_repository_size(project)
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment