Commit 9ff500d3 authored by Alejandro Rodríguez's avatar Alejandro Rodríguez

Merge remote-tracking branch 'ce/master' into ce_upstream

parents 794f3e88 01083870
......@@ -325,7 +325,11 @@ trigger_docs:
cache: {}
artifacts: {}
script:
<<<<<<< HEAD
- "curl -X POST -F token=${DOCS_TRIGGER_TOKEN} -F ref=master -F variables[PROJECT]=ee https://gitlab.com/api/v3/projects/1794617/trigger/builds"
=======
- "curl -X POST -F token=${DOCS_TRIGGER_TOKEN} -F ref=master -F variables[PROJECT]=ce https://gitlab.com/api/v3/projects/1794617/trigger/builds"
>>>>>>> ce/master
only:
- master@gitlab-org/gitlab-ee
......
......@@ -71,6 +71,7 @@ entry.
- Fix applying GitHub-imported labels when importing job is interrupted
- Allow to search for user by secondary email address in the admin interface(/admin/users) !7115 (YarNayar)
- Updated commit SHA styling on the branches page.
- Fix 404 when visit /projects page
## 8.13.3 (2016-11-02)
......
......@@ -26,7 +26,7 @@ gem 'omniauth-bitbucket', '~> 0.0.2'
gem 'omniauth-cas3', '~> 1.1.2'
gem 'omniauth-facebook', '~> 4.0.0'
gem 'omniauth-github', '~> 1.1.1'
gem 'omniauth-gitlab', '~> 1.0.0'
gem 'omniauth-gitlab', '~> 1.0.2'
gem 'omniauth-google-oauth2', '~> 0.4.1'
gem 'omniauth-kerberos', '~> 0.3.0', group: :kerberos
gem 'omniauth-saml', '~> 1.7.0'
......@@ -162,7 +162,7 @@ gem 'settingslogic', '~> 2.0.9'
gem 'version_sorter', '~> 2.1.0'
# Cache
gem 'redis-rails', '~> 4.0.0'
gem 'redis-rails', '~> 5.0.1'
# Redis
gem 'redis', '~> 3.2'
......
......@@ -480,7 +480,7 @@ GEM
omniauth-github (1.1.2)
omniauth (~> 1.0)
omniauth-oauth2 (~> 1.1)
omniauth-gitlab (1.0.1)
omniauth-gitlab (1.0.2)
omniauth (~> 1.0)
omniauth-oauth2 (~> 1.0)
omniauth-google-oauth2 (0.4.1)
......@@ -597,23 +597,23 @@ GEM
json
redcarpet (3.3.3)
redis (3.2.2)
redis-actionpack (4.0.1)
actionpack (~> 4)
redis-rack (~> 1.5.0)
redis-store (~> 1.1.0)
redis-activesupport (4.1.5)
activesupport (>= 3, < 5)
redis-store (~> 1.1.0)
redis-actionpack (5.0.1)
actionpack (>= 4.0, < 6)
redis-rack (>= 1, < 3)
redis-store (>= 1.1.0, < 1.4.0)
redis-activesupport (5.0.1)
activesupport (>= 3, < 6)
redis-store (~> 1.2.0)
redis-namespace (1.5.2)
redis (~> 3.0, >= 3.0.4)
redis-rack (1.5.0)
redis-rack (1.6.0)
rack (~> 1.5)
redis-store (~> 1.1.0)
redis-rails (4.0.0)
redis-actionpack (~> 4)
redis-activesupport (~> 4)
redis-store (~> 1.1.0)
redis-store (1.1.7)
redis-store (~> 1.2.0)
redis-rails (5.0.1)
redis-actionpack (~> 5.0.0)
redis-activesupport (~> 5.0.0)
redis-store (~> 1.2.0)
redis-store (1.2.0)
redis (>= 2.2)
request_store (1.3.1)
rerun (0.11.0)
......@@ -946,7 +946,7 @@ DEPENDENCIES
omniauth-cas3 (~> 1.1.2)
omniauth-facebook (~> 4.0.0)
omniauth-github (~> 1.1.1)
omniauth-gitlab (~> 1.0.0)
omniauth-gitlab (~> 1.0.2)
omniauth-google-oauth2 (~> 0.4.1)
omniauth-kerberos (~> 0.3.0)
omniauth-saml (~> 1.7.0)
......@@ -971,7 +971,7 @@ DEPENDENCIES
redcarpet (~> 3.3.3)
redis (~> 3.2)
redis-namespace (~> 1.5.2)
redis-rails (~> 4.0.0)
redis-rails (~> 5.0.1)
request_store (~> 1.3)
rerun (~> 0.11.0)
responders (~> 2.0)
......@@ -1028,4 +1028,4 @@ DEPENDENCIES
wikicloth (= 0.8.1)
BUNDLED WITH
1.13.5
1.13.6
......@@ -10,6 +10,7 @@
Issuable.initSearch();
Issuable.initChecks();
Issuable.initResetFilters();
Issuable.resetIncomingEmailToken();
return Issuable.initLabelFilterRemove();
},
initTemplates: function() {
......@@ -154,6 +155,27 @@
this.issuableBulkActions.willUpdateLabels = false;
}
return true;
},
resetIncomingEmailToken: function() {
$('.incoming-email-token-reset').on('click', function(e) {
e.preventDefault();
$.ajax({
type: 'PUT',
url: $('.incoming-email-token-reset').attr('href'),
dataType: 'json',
success: function(response) {
$('#issue_email').val(response.new_issue_address).focus();
},
beforeSend: function() {
$('.incoming-email-token-reset').text('resetting...');
},
complete: function() {
$('.incoming-email-token-reset').text('reset it');
}
});
});
}
};
......
......@@ -9,6 +9,8 @@
(function() {
$(function() {
if (!$(".network-graph").length) return;
var network_graph;
network_graph = new Network({
url: $(".network-graph").attr('data-url'),
......
......@@ -63,7 +63,7 @@
}
.select2-highlighted {
background: #3084bb !important;
background: $gl-link-color !important;
}
.select2-results li.select2-result-with-children > .select2-result-label {
......
......@@ -103,7 +103,7 @@ $gl-text-color-light: #8c8c8c;
$gl-text-green: #4a2;
$gl-text-red: #d12f19;
$gl-text-orange: #d90;
$gl-link-color: #3084bb;
$gl-link-color: #3777b0;
$gl-dark-link-color: #333;
$gl-placeholder-color: #8f8f8f;
$gl-icon-color: $gl-placeholder-color;
......@@ -197,7 +197,7 @@ $line-number-new: #ddfbe6;
$line-number-select: #fbf2da;
$match-line: $gray-light;
$table-border-gray: #f0f0f0;
$line-target-blue: #eaf3fc;
$line-target-blue: #f6faff;
$line-select-yellow: #fcf8e7;
$line-select-yellow-dark: #f0e2bd;
......
......@@ -23,6 +23,10 @@
color: $md-link-color;
}
.private-tokens-reset div.reset-action:not(:first-child) {
padding-top: 15px;
}
.oauth-buttons {
.btn-group {
margin-right: 10px;
......
......@@ -133,7 +133,16 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
:elasticsearch_port,
:usage_ping_enabled,
:enabled_git_access_protocol,
<<<<<<< HEAD
:repository_size_limit,
=======
:housekeeping_enabled,
:housekeeping_bitmaps_enabled,
:housekeeping_incremental_repack_period,
:housekeeping_full_repack_period,
:housekeeping_gc_period,
repository_storages: [],
>>>>>>> ce/master
restricted_visibility_levels: [],
import_sources: [],
disabled_oauth_sign_in_sources: [],
......
......@@ -26,7 +26,15 @@ class ProfilesController < Profiles::ApplicationController
def reset_private_token
if current_user.reset_authentication_token!
flash[:notice] = "Token was successfully updated"
flash[:notice] = "Private token was successfully reset"
end
redirect_to profile_account_path
end
def reset_incoming_email_token
if current_user.reset_incoming_email_token!
flash[:notice] = "Incoming email token was successfully reset"
end
redirect_to profile_account_path
......
......@@ -5,17 +5,29 @@ class Projects::NetworkController < Projects::ApplicationController
before_action :require_non_empty_project
before_action :assign_ref_vars
before_action :authorize_download_code!
before_action :assign_commit
def show
@url = namespace_project_network_path(@project.namespace, @project, @ref, @options.merge(format: :json))
@commit_url = namespace_project_commit_path(@project.namespace, @project, 'ae45ca32').gsub("ae45ca32", "%s")
respond_to do |format|
format.html
format.html do
if @options[:extended_sha1] && !@commit
flash.now[:alert] = "Git revision '#{@options[:extended_sha1]}' does not exist."
end
end
format.json do
@graph = Network::Graph.new(project, @ref, @commit, @options[:filter_ref])
end
end
end
def assign_commit
return if params[:extended_sha1].blank?
@options[:extended_sha1] = params[:extended_sha1]
@commit = @repo.commit(@options[:extended_sha1])
end
end
......@@ -2,9 +2,9 @@ class ProjectsController < Projects::ApplicationController
include IssuableCollections
include ExtractsPath
before_action :authenticate_user!, except: [:show, :activity, :refs]
before_action :project, except: [:new, :create]
before_action :repository, except: [:new, :create]
before_action :authenticate_user!, except: [:index, :show, :activity, :refs]
before_action :project, except: [:index, :new, :create]
before_action :repository, except: [:index, :new, :create]
before_action :assign_ref_vars, only: [:show], if: :repo_exists?
before_action :assign_tree_vars, only: [:show], if: [:repo_exists?, :project_view_files?]
......@@ -161,6 +161,13 @@ class ProjectsController < Projects::ApplicationController
end
end
def new_issue_address
return render_404 unless Gitlab::IncomingEmail.supports_issue_creation?
current_user.reset_incoming_email_token!
render json: { new_issue_address: @project.new_issue_address(current_user) }
end
def archive
return access_denied! unless can?(current_user, :archive_project, @project)
......
......@@ -16,7 +16,7 @@ class SearchController < ApplicationController
@group = nil unless can?(current_user, :read_group, @group)
end
return if params[:search].nil? || params[:search].blank?
return if params[:search].blank?
@search_term = params[:search]
......
module AccountsHelper
def incoming_email_token_enabled?
current_user.incoming_email_token && Gitlab::IncomingEmail.supports_issue_creation?
end
end
module ComponentsHelper
def gitlab_workhorse_version
if request.headers['Gitlab-Workhorse'].present?
request.headers['Gitlab-Workhorse'].split('-').first
else
Gitlab::Workhorse.version
end
end
end
......@@ -61,6 +61,10 @@ module TodosHelper
}
end
def todos_filter_empty?
todos_filter_params.values.none?
end
def todos_filter_path(options = {})
without = options.delete(:without)
......
class BaseMailer < ActionMailer::Base
add_template_helper ApplicationHelper
add_template_helper GitlabMarkdownHelper
helper ApplicationHelper
helper GitlabMarkdownHelper
attr_accessor :current_user
helper_method :current_user, :can?
......
......@@ -11,12 +11,12 @@ class Notify < BaseMailer
include Emails::Pipelines
include Emails::Members
add_template_helper MergeRequestsHelper
add_template_helper DiffHelper
add_template_helper BlobHelper
add_template_helper EmailsHelper
add_template_helper MembersHelper
add_template_helper GitlabRoutingHelper
helper MergeRequestsHelper
helper DiffHelper
helper BlobHelper
helper EmailsHelper
helper MembersHelper
helper GitlabRoutingHelper
def test_email(recipient_email, subject, body)
mail(to: recipient_email,
......
......@@ -97,6 +97,18 @@ class ApplicationSetting < ActiveRecord::Base
presence: { message: 'Domain blacklist cannot be empty if Blacklist is enabled.' },
if: :domain_blacklist_enabled?
validates :housekeeping_incremental_repack_period,
presence: true,
numericality: { only_integer: true, greater_than: 0 }
validates :housekeeping_full_repack_period,
presence: true,
numericality: { only_integer: true, greater_than: :housekeeping_incremental_repack_period }
validates :housekeeping_gc_period,
presence: true,
numericality: { only_integer: true, greater_than: :housekeeping_full_repack_period }
validates_each :restricted_visibility_levels do |record, attr, value|
unless value.nil?
value.each do |level|
......@@ -183,6 +195,11 @@ class ApplicationSetting < ActiveRecord::Base
usage_ping_enabled: true,
repository_storages: ['default'],
user_default_external: false,
housekeeping_enabled: true,
housekeeping_bitmaps_enabled: true,
housekeeping_incremental_repack_period: 10,
housekeeping_full_repack_period: 50,
housekeeping_gc_period: 200,
)
end
......@@ -221,11 +238,7 @@ class ApplicationSetting < ActiveRecord::Base
end
def repository_storages
value = read_attribute(:repository_storages)
value = [value] if value.is_a?(String)
value = [] if value.nil?
value
Array(read_attribute(:repository_storages))
end
# repository_storage is still required in the API. Remove in 9.0
......
......@@ -289,6 +289,11 @@ module Issuable
false
end
def assignee_or_author?(user)
# We're comparing IDs here so we don't need to load any associations.
author_id == user.id || assignee_id == user.id
end
def record_metrics
metrics = self.metrics || create_metrics
metrics.record!
......
......@@ -4,17 +4,21 @@ module TokenAuthenticatable
private
def write_new_token(token_field)
new_token = generate_token(token_field)
new_token = generate_available_token(token_field)
write_attribute(token_field, new_token)
end
def generate_token(token_field)
def generate_available_token(token_field)
loop do
token = Devise.friendly_token
token = generate_token(token_field)
break token unless self.class.unscoped.find_by(token_field => token)
end
end
def generate_token(token_field)
Devise.friendly_token
end
class_methods do
def authentication_token_fields
@token_fields || []
......
......@@ -29,6 +29,15 @@ class ExternalIssue
@project
end
def project_id
@project.id
end
# Pattern used to extract `JIRA-123` issue references from text
def self.reference_pattern
@reference_pattern ||= %r{(?<issue>\b([A-Z][A-Z0-9_]+-)\d+)}
end
def to_reference(_from_project = nil)
id
end
......
# IssueCollection can be used to reduce a list of issues down to a subset.
#
# IssueCollection is not meant to be some sort of Enumerable, instead it's meant
# to take a list of issues and return a new list of issues based on some
# criteria. For example, given a list of issues you may want to return a list of
# issues that can be read or updated by a given user.
class IssueCollection
attr_reader :collection
def initialize(collection)
@collection = collection
end
# Returns all the issues that can be updated by the user.
def updatable_by_user(user)
return collection if user.admin?
# Given all the issue projects we get a list of projects that the current
# user has at least reporter access to.
projects_with_reporter_access = user.
projects_with_reporter_access_limited_to(project_ids).
pluck(:id)
collection.select do |issue|
if projects_with_reporter_access.include?(issue.project_id)
true
elsif issue.is_a?(Issue)
issue.assignee_or_author?(user)
else
false
end
end
end
alias_method :visible_to, :updatable_by_user
private
def project_ids
@project_ids ||= collection.map(&:project_id).uniq
end
end
......@@ -719,13 +719,12 @@ class Project < ActiveRecord::Base
end
def new_issue_address(author)
# This feature is disabled for the time being.
return nil
return unless Gitlab::IncomingEmail.supports_issue_creation? && author
if Gitlab::IncomingEmail.enabled? && author # rubocop:disable Lint/UnreachableCode
Gitlab::IncomingEmail.reply_address(
"#{path_with_namespace}+#{author.authentication_token}")
end
author.ensure_incoming_email_token!
Gitlab::IncomingEmail.reply_address(
"#{path_with_namespace}+#{author.incoming_email_token}")
end
def build_commit_note(commit)
......
......@@ -1212,6 +1212,10 @@ class Repository
end
def search_files(query, ref)
unless exists? && has_visible_content? && query.present?
return []
end
offset = 2
args = %W(#{Gitlab.config.git.bin_path} grep -i -I -n --before-context #{offset} --after-context #{offset} -E -e #{Regexp.escape(query)} #{ref || root_ref})
Gitlab::Popen.popen(args, path_to_repo).first.scrub.split(/^--$/)
......
......@@ -13,6 +13,7 @@ class User < ActiveRecord::Base
DEFAULT_NOTIFICATION_LEVEL = :participating
add_authentication_token_field :authentication_token
add_authentication_token_field :incoming_email_token
default_value_for :admin, false
default_value_for(:external) { current_application_settings.user_default_external }
......@@ -127,7 +128,7 @@ class User < ActiveRecord::Base
before_validation :set_public_email, if: ->(user) { user.public_email_changed? }
after_update :update_emails_with_primary_email, if: ->(user) { user.email_changed? }
before_save :ensure_authentication_token
before_save :ensure_authentication_token, :ensure_incoming_email_token
before_save :ensure_external_user_rights
after_save :ensure_namespace_correct
after_initialize :set_projects_limit
......@@ -483,6 +484,16 @@ class User < ActiveRecord::Base
Project.where("projects.id IN (#{projects_union(min_access_level).to_sql})")
end
# Returns the projects this user has reporter (or greater) access to, limited
# to at most the given projects.
#
# This method is useful when you have a list of projects and want to
# efficiently check to which of these projects the user has at least reporter
# access.
def projects_with_reporter_access_limited_to(projects)
authorized_projects(Gitlab::Access::REPORTER).where(id: projects)
end
def viewable_starred_projects
starred_projects.where("projects.visibility_level IN (?) OR projects.id IN (#{projects_union.to_sql})",
[Project::PUBLIC, Project::INTERNAL])
......@@ -989,4 +1000,13 @@ class User < ActiveRecord::Base
signup_domain =~ regexp
end
end
def generate_token(token_field)
if token_field == :incoming_email_token
# Needs to be all lowercase and alphanumeric because it's gonna be used in an email address.
SecureRandom.hex.to_i(16).to_s(36)
else
super
end
end
end
......@@ -4,7 +4,7 @@ class IssuablePolicy < BasePolicy
end
def rules
if @user && (@subject.author == @user || @subject.assignee == @user)
if @user && @subject.assignee_or_author?(@user)
can! :"read_#{action_name}"
can! :"update_#{action_name}"
end
......
......@@ -8,9 +8,8 @@ class IssuePolicy < IssuablePolicy
if @subject.confidential? && !can_read_confidential?
cannot! :read_issue
cannot! :admin_issue
cannot! :update_issue
cannot! :read_issue
cannot! :admin_issue
end
end
......@@ -18,11 +17,7 @@ class IssuePolicy < IssuablePolicy
def can_read_confidential?
return false unless @user
return true if @user.admin?
return true if @subject.author == @user
return true if @subject.assignee == @user
return true if @subject.project.team.member?(@user, Gitlab::Access::REPORTER)
false
IssueCollection.new([@subject]).visible_to(@user).any?
end
end
......@@ -110,35 +110,11 @@ class GitPushService < BaseService
# Extract any GFM references from the pushed commit messages. If the configured issue-closing regex is matched,
# close the referenced Issue. Create cross-reference Notes corresponding to any other referenced Mentionables.
def process_commit_messages
is_default_branch = is_default_branch?
authors = Hash.new do |hash, commit|
email = commit.author_email
next hash[email] if hash.has_key?(email)
hash[email] = commit_user(commit)
end
default = is_default_branch?
@push_commits.each do |commit|
# Keep track of the issues that will be actually closed because they are on a default branch.
# Hence, when creating cross-reference notes, the not-closed issues (on non-default branches)
# will also have cross-reference.
closed_issues = []
if is_default_branch
# Close issues if these commits were pushed to the project's default branch and the commit message matches the
# closing regex. Exclude any mentioned Issues from cross-referencing even if the commits are being pushed to
# a different branch.
closed_issues = commit.closes_issues(current_user)
closed_issues.each do |issue|
if can?(current_user, :update_issue, issue)
Issues::CloseService.new(project, authors[commit], {}).execute(issue, commit: commit)
end
end
end
commit.create_cross_references!(authors[commit], closed_issues)
update_issue_metrics(commit, authors)
ProcessCommitWorker.
perform_async(project.id, current_user.id, commit.id, default)
end
end
......@@ -181,11 +157,4 @@ class GitPushService < BaseService
def branch_name
@branch_name ||= Gitlab::Git.ref_name(params[:ref])
end
def update_issue_metrics(commit, authors)
mentioned_issues = commit.all_references(authors[commit]).issues
Issue::Metrics.where(issue_id: mentioned_issues.map(&:id), first_mentioned_in_commit_at: nil).
update_all(first_mentioned_in_commit_at: commit.committed_date)
end
end
module Issues
class CloseService < Issues::BaseService
# Closes the supplied issue if the current user is able to do so.
def execute(issue, commit: nil, notifications: true, system_note: true)
return issue unless can?(current_user, :update_issue, issue)
close_issue(issue,
commit: commit,
notifications: notifications,
system_note: system_note)
end
# Closes the supplied issue without checking if the user is authorized to
# do so.
#
# The code calling this method is responsible for ensuring that a user is
# allowed to close the given issue.
def close_issue(issue, commit: nil, notifications: true, system_note: true)
if project.jira_tracker? && project.jira_service.active
project.jira_service.execute(commit, issue)
todo_service.close_issue(issue, current_user)
......
......@@ -7,6 +7,8 @@
#
module Projects
class HousekeepingService < BaseService
include Gitlab::CurrentSettings
LEASE_TIMEOUT = 3600
class LeaseTaken < StandardError
......@@ -20,13 +22,14 @@ module Projects
end
def execute
raise LeaseTaken unless try_obtain_lease
lease_uuid = try_obtain_lease
raise LeaseTaken unless lease_uuid.present?
execute_gitlab_shell_gc
execute_gitlab_shell_gc(lease_uuid)
end
def needed?
@project.pushes_since_gc >= 10
pushes_since_gc > 0 && period_match? && housekeeping_enabled?
end
def increment!
......@@ -37,19 +40,59 @@ module Projects
private
def execute_gitlab_shell_gc
GitGarbageCollectWorker.perform_async(@project.id)
def execute_gitlab_shell_gc(lease_uuid)
GitGarbageCollectWorker.perform_async(@project.id, task, lease_key, lease_uuid)
ensure
Gitlab::Metrics.measure(:reset_pushes_since_gc) do
@project.reset_pushes_since_gc
if pushes_since_gc >= gc_period
Gitlab::Metrics.measure(:reset_pushes_since_gc) do
@project.reset_pushes_since_gc
end
end
end
def try_obtain_lease
Gitlab::Metrics.measure(:obtain_housekeeping_lease) do
lease = ::Gitlab::ExclusiveLease.new("project_housekeeping:#{@project.id}", timeout: LEASE_TIMEOUT)
lease = ::Gitlab::ExclusiveLease.new(lease_key, timeout: LEASE_TIMEOUT)
lease.try_obtain
end
end
def lease_key
"project_housekeeping:#{@project.id}"
end
def pushes_since_gc
@project.pushes_since_gc
end
def task
if pushes_since_gc % gc_period == 0
:gc
elsif pushes_since_gc % full_repack_period == 0
:full_repack
else
:incremental_repack
end
end
def period_match?
[gc_period, full_repack_period, repack_period].any? { |period| pushes_since_gc % period == 0 }
end
def housekeeping_enabled?
current_application_settings.housekeeping_enabled
end
def gc_period
current_application_settings.housekeeping_gc_period
end
def full_repack_period
current_application_settings.housekeeping_full_repack_period
end
def repack_period
current_application_settings.housekeeping_incremental_repack_period
end
end
end
......@@ -483,5 +483,44 @@
Enable this option to include the name of the author of the issue,
merge request or comment in the email body instead.
%fieldset
%legend Automatic Git repository housekeeping
.form-group
.col-sm-offset-2.col-sm-10
.checkbox
= f.label :housekeeping_enabled do
= f.check_box :housekeeping_enabled
Enable automatic repository housekeeping (git repack, git gc)
.help-block
If you keep automatic housekeeping disabled for a long time Git
repository access on your GitLab server will become slower and your
repositories will use more disk space. We recommend to always leave
this enabled.
.checkbox
= f.label :housekeeping_bitmaps_enabled do
= f.check_box :housekeeping_bitmaps_enabled
Enable Git pack file bitmap creation
.help-block
Creating pack file bitmaps makes housekeeping take a little longer but
bitmaps should accelerate 'git clone' performance.
.form-group
= f.label :housekeeping_incremental_repack_period, 'Incremental repack period', class: 'control-label col-sm-2'
.col-sm-10
= f.number_field :housekeeping_incremental_repack_period, class: 'form-control'
.help-block
Number of Git pushes after which an incremental 'git repack' is run.
.form-group
= f.label :housekeeping_full_repack_period, 'Full repack period', class: 'control-label col-sm-2'
.col-sm-10
= f.number_field :housekeeping_full_repack_period, class: 'form-control'
.help-block
Number of Git pushes after which a full 'git repack' is run.
.form-group
= f.label :housekeeping_gc_period, 'Git GC period', class: 'control-label col-sm-2'
.col-sm-10
= f.number_field :housekeeping_gc_period, class: 'form-control'
.help-block
Number of Git pushes after which 'git gc' is run.
.form-actions
= f.submit 'Save', class: 'btn btn-save'
......@@ -98,7 +98,7 @@
%p
GitLab Workhorse
%span.pull-right
= Gitlab::Workhorse.version
= gitlab_workhorse_version
%p
GitLab API
%span.pull-right
......
......@@ -82,15 +82,19 @@
- elsif current_user.todos.any?
.todos-all-done
= render "shared/empty_states/todos_all_done.svg"
%h4.text-center
Good job! Looks like you don't have any todos left.
%p.text-center
Are you looking for things to do? Take a look at
= succeed "," do
= link_to "the opened issues", issues_dashboard_path
contribute to
= link_to "merge requests", merge_requests_dashboard_path
or mention someone in a comment to assign a new todo automatically.
- if todos_filter_empty?
%h4.text-center
Good job! Looks like you don't have any todos left.
%p.text-center
Are you looking for things to do? Take a look at
= succeed "," do
= link_to "the opened issues", issues_dashboard_path
contribute to
= link_to "merge requests", merge_requests_dashboard_path
or mention someone in a comment to assign a new todo automatically.
- else
%h4.text-center
There are no todos to show.
- else
.todos-empty
.todos-empty-hero
......
......@@ -103,11 +103,11 @@
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;"}
%img{height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-commit-gray.gif'), style: "display:block;", width: "13"}/
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;"}
%a{href: commit_url(@pipeline), style: "color:#3084bb;text-decoration:none;"}
%a{href: commit_url(@pipeline), style: "color:#3777b0;text-decoration:none;"}
= @pipeline.short_sha
- if @merge_request
in
%a{href: merge_request_url(@merge_request), style: "color:#3084bb;text-decoration:none;"}
%a{href: merge_request_url(@merge_request), style: "color:#3777b0;text-decoration:none;"}
= @merge_request.to_reference
.commit{style: "color:#5c5c5c;font-weight:300;"}
= @pipeline.git_commit_message.truncate(50)
......@@ -134,7 +134,7 @@
%tr.pre-section
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;color:#333333;font-size:15px;font-weight:400;line-height:1.4;padding:15px 0;"}
Pipeline
%a{href: pipeline_url(@pipeline), style: "color:#3084bb;text-decoration:none;"}
%a{href: pipeline_url(@pipeline), style: "color:#3777b0;text-decoration:none;"}
= "\##{@pipeline.id}"
had
= failed.size
......@@ -158,7 +158,7 @@
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;color:#8c8c8c;font-weight:500;font-size:15px;vertical-align:middle;"}
= build.stage
%td{align: "right", style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:20px 0;color:#8c8c8c;font-weight:500;font-size:15px;"}
%a{href: pipeline_build_url(@pipeline, build), style: "color:#3084bb;text-decoration:none;"}
%a{href: pipeline_build_url(@pipeline, build), style: "color:#3777b0;text-decoration:none;"}
= build.name
%tr.build-log
%td{colspan: "2", style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:0 0 15px;"}
......@@ -168,10 +168,10 @@
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:25px 0;font-size:13px;line-height:1.6;color:#5c5c5c;"}
%img{alt: "GitLab", height: "33", src: image_url('mailers/ci_pipeline_notif_v1/gitlab-logo-full-horizontal.gif'), style: "display:block;margin:0 auto 1em;", width: "90"}/
%div
%a{href: profile_notifications_url, style: "color:#3084bb;text-decoration:none;"} Manage all notifications
%a{href: profile_notifications_url, style: "color:#3777b0;text-decoration:none;"} Manage all notifications
&middot;
%a{href: help_url, style: "color:#3084bb;text-decoration:none;"} Help
%a{href: help_url, style: "color:#3777b0;text-decoration:none;"} Help
%div
You're receiving this email because of your account on
= succeed "." do
%a{href: root_url, style: "color:#3084bb;text-decoration:none;"}= Gitlab.config.gitlab.host
%a{href: root_url, style: "color:#3777b0;text-decoration:none;"}= Gitlab.config.gitlab.host
......@@ -103,11 +103,11 @@
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;padding-right:5px;"}
%img{height: "13", src: image_url('mailers/ci_pipeline_notif_v1/icon-commit-gray.gif'), style: "display:block;", width: "13"}/
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;font-size:15px;line-height:1.4;vertical-align:middle;"}
%a{href: commit_url(@pipeline), style: "color:#3084bb;text-decoration:none;"}
%a{href: commit_url(@pipeline), style: "color:#3777b0;text-decoration:none;"}
= @pipeline.short_sha
- if @merge_request
in
%a{href: merge_request_url(@merge_request), style: "color:#3084bb;text-decoration:none;"}
%a{href: merge_request_url(@merge_request), style: "color:#3777b0;text-decoration:none;"}
= @merge_request.to_reference
.commit{style: "color:#5c5c5c;font-weight:300;"}
= @pipeline.git_commit_message.truncate(50)
......@@ -135,7 +135,7 @@
- build_count = @pipeline.statuses.latest.size
- stage_count = @pipeline.stages.size
Pipeline
%a{href: pipeline_url(@pipeline), style: "color:#3084bb;text-decoration:none;"}
%a{href: pipeline_url(@pipeline), style: "color:#3777b0;text-decoration:none;"}
= "\##{@pipeline.id}"
successfully completed
= "#{build_count} #{'build'.pluralize(build_count)}"
......@@ -145,10 +145,10 @@
%td{style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;padding:25px 0;font-size:13px;line-height:1.6;color:#5c5c5c;"}
%img{alt: "GitLab", height: "33", src: image_url('mailers/ci_pipeline_notif_v1/gitlab-logo-full-horizontal.gif'), style: "display:block;margin:0 auto 1em;", width: "90"}/
%div
%a{href: profile_notifications_url, style: "color:#3084bb;text-decoration:none;"} Manage all notifications
%a{href: profile_notifications_url, style: "color:#3777b0;text-decoration:none;"} Manage all notifications
&middot;
%a{href: help_url, style: "color:#3084bb;text-decoration:none;"} Help
%a{href: help_url, style: "color:#3777b0;text-decoration:none;"} Help
%div
You're receiving this email because of your account on
= succeed "." do
%a{href: root_url, style: "color:#3084bb;text-decoration:none;"}= Gitlab.config.gitlab.host
%a{href: root_url, style: "color:#3777b0;text-decoration:none;"}= Gitlab.config.gitlab.host
......@@ -8,24 +8,36 @@
.row.prepend-top-default
.col-lg-3.profile-settings-sidebar
%h4.prepend-top-0
Private Token
= incoming_email_token_enabled? ? "Private Tokens" : "Private Token"
%p
Your private token is used to access application resources without authentication.
.col-lg-9
= form_for @user, url: reset_private_token_profile_path, method: :put, html: { class: "private-token" } do |f|
Keep
= incoming_email_token_enabled? ? "these tokens" : "this token"
secret, anyone with access to them can interact with GitLab as if they were you.
.col-lg-9.private-tokens-reset
.reset-action
%p.cgray
- if current_user.private_token
= label_tag "token", "Private token", class: "label-light"
= text_field_tag "token", current_user.private_token, class: "form-control"
= label_tag "private-token", "Private token", class: "label-light"
= text_field_tag "private-token", current_user.private_token, class: "form-control", readonly: true, onclick: "this.select()"
- else
%span You don`t have one yet. Click generate to fix it.
%p.help-block
It can be used for atom feeds or the API. Keep it secret!
%span You don't have one yet. Click generate to fix it.
%p.help-block
Your private token is used to access the API and Atom feeds without username/password authentication.
.prepend-top-default
- if current_user.private_token
= f.submit 'Reset private token', data: { confirm: "Are you sure?" }, class: "btn btn-default"
= link_to 'Reset private token', reset_private_token_profile_path, method: :put, data: { confirm: "Are you sure?" }, class: "btn btn-default private-token"
- else
= f.submit 'Generate', class: "btn btn-default"
- if incoming_email_token_enabled?
.reset-action
%p.cgray
= label_tag "incoming-email-token", "Incoming Email Token", class: 'label-light'
= text_field_tag "incoming-email-token", current_user.incoming_email_token, class: "form-control", readonly: true, onclick: "this.select()"
%p.help-block
Your incoming email token is used to create new issues by email, and is included in your project-specific email addresses.
.prepend-top-default
= link_to 'Reset incoming email token', reset_incoming_email_token_profile_path, method: :put, data: { confirm: "Are you sure?" }, class: "btn btn-default incoming-email-token"
%hr
.row.prepend-top-default
.col-lg-3.profile-settings-sidebar
......
......@@ -12,16 +12,23 @@
Create new issue by email
.modal-body
%p
Write an email to the below email address. (This is a private email address, so keep it secret.)
You can create a new issue inside this project by sending an email to the following email address:
.email-modal-input-group.input-group
= text_field_tag :issue_email, email, class: "monospace js-select-on-focus form-control", readonly: true
.input-group-btn
= clipboard_button(clipboard_target: '#issue_email')
%p
Send an email to this address to create an issue.
%p
Use the subject line as the title of your issue.
The subject will be used as the title of the new issue, and the message will be the description.
= link_to 'Slash commands', help_page_path('user/project/slash_commands'), target: '_blank', tabindex: -1
and styling with
= link_to 'Markdown', help_page_path('user/markdown'), target: '_blank', tabindex: -1
are supported.
%p
Use the message as the body of your issue (feel free to include some nice
= succeed ")." do
= link_to "Markdown", help_page_path('markdown', 'markdown')
This is a private email address, generated just for you.
Anyone who gets ahold of it can create issues as if they were you.
You should
= link_to 'reset it', new_issue_address_namespace_project_path(@project.namespace, @project), class: 'incoming-email-token-reset'
if that ever happens.
......@@ -17,5 +17,6 @@
= check_box_tag :filter_ref, 1, @options[:filter_ref]
%span Begin with the selected commit
.network-graph{ data: { url: @url, commit_url: @commit_url, ref: @ref, commit_id: @commit.id } }
= spinner nil, true
- if @commit
.network-graph{ data: { url: @url, commit_url: @commit_url, ref: @ref, commit_id: @commit.id } }
= spinner nil, true
<<<<<<< HEAD
= render 'projects/commits/commit', project: commit.project, commit: commit, ref: nil
=======
= render 'projects/commits/commit', project: @project, commit: commit, ref: nil
>>>>>>> ce/master
class GitGarbageCollectWorker
include Sidekiq::Worker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
include Gitlab::CurrentSettings
sidekiq_options retry: false
def perform(project_id)
def perform(project_id, task = :gc, lease_key = nil, lease_uuid = nil)
project = Project.find(project_id)
task = task.to_sym
cmd = command(task)
repo_path = project.repository.path_to_repo
description = "'#{cmd.join(' ')}' in #{repo_path}"
Gitlab::GitLogger.info(description)
output, status = Gitlab::Popen.popen(cmd, repo_path)
Gitlab::GitLogger.error("#{description} failed:\n#{output}") unless status.zero?
gitlab_shell.gc(project.repository_storage_path, project.path_with_namespace)
# Refresh the branch cache in case garbage collection caused a ref lookup to fail
flush_ref_caches(project) if task == :gc
ensure
Gitlab::ExclusiveLease.cancel(lease_key, lease_uuid) if lease_key.present? && lease_uuid.present?
end
private
def command(task)
case task
when :gc
git(write_bitmaps: bitmaps_enabled?) + %w[gc]
when :full_repack
git(write_bitmaps: bitmaps_enabled?) + %w[repack -A -d --pack-kept-objects]
when :incremental_repack
# Normal git repack fails when bitmaps are enabled. It is impossible to
# create a bitmap here anyway.
git(write_bitmaps: false) + %w[repack -d]
else
raise "Invalid gc task: #{task.inspect}"
end
end
def flush_ref_caches(project)
project.repository.after_create_branch
project.repository.branch_names
project.repository.has_visible_content?
end
def bitmaps_enabled?
current_application_settings.housekeeping_bitmaps_enabled
end
def git(write_bitmaps:)
config_value = write_bitmaps ? 'true' : 'false'
%W[git -c repack.writeBitmaps=#{config_value}]
end
end
# Worker for processing individiual commit messages pushed to a repository.
#
# Jobs for this worker are scheduled for every commit that is being pushed. As a
# result of this the workload of this worker should be kept to a bare minimum.
# Consider using an extra worker if you need to add any extra (and potentially
# slow) processing of commits.
class ProcessCommitWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
# project_id - The ID of the project this commit belongs to.
# user_id - The ID of the user that pushed the commit.
# commit_sha - The SHA1 of the commit to process.
# default - The data was pushed to the default branch.
def perform(project_id, user_id, commit_sha, default = false)
project = Project.find_by(id: project_id)
return unless project
user = User.find_by(id: user_id)
return unless user
commit = find_commit(project, commit_sha)
return unless commit
author = commit.author || user
process_commit_message(project, commit, user, author, default)
update_issue_metrics(commit, author)
end
def process_commit_message(project, commit, user, author, default = false)
closed_issues = default ? commit.closes_issues(user) : []
unless closed_issues.empty?
close_issues(project, user, author, commit, closed_issues)
end
commit.create_cross_references!(author, closed_issues)
end
def close_issues(project, user, author, commit, issues)
# We don't want to run permission related queries for every single issue,
# therefor we use IssueCollection here and skip the authorization check in
# Issues::CloseService#execute.
IssueCollection.new(issues).updatable_by_user(user).each do |issue|
Issues::CloseService.new(project, author).
close_issue(issue, commit: commit)
end
end
def update_issue_metrics(commit, author)
mentioned_issues = commit.all_references(author).issues
Issue::Metrics.where(issue_id: mentioned_issues.map(&:id), first_mentioned_in_commit_at: nil).
update_all(first_mentioned_in_commit_at: commit.committed_date)
end
private
def find_commit(project, sha)
project.commit(sha)
end
end
---
title: Use the Gitlab Workhorse HTTP header in the admin dashboard
merge_request:
author: Chris Wright
---
title: Rewrite git blame spinach feature tests to rspec feature tests
merge_request: 7197
author: Lisanne Fellinger
---
title: Fix broken commits search
merge_request:
author:
---
title: Expose label IDs in API
merge_request: 7275
author: Rares Sfirlogea
---
title: Add an index for project_id in project_import_data to improve performance
merge_request:
author:
---
title: API: Ability to retrieve version information
merge_request: 7286
author: Robert Schilling
---
title: Return 400 when creating a system hook fails
merge_request: 7350
author: Robert Schilling
---
title: Fix broken link to observatory cli on Frontend Dev Guide
merge_request:
author: Sam Rose
---
title: Faster search inside Project
merge_request:
author:
---
title: Fix 404 on network page when entering non-existent git revision
merge_request: 7172
author: Hiroyuki Sato
---
title: Finer-grained Git gargage collection
merge_request: 6588
author:
---
title: Process commits using a dedicated Sidekiq worker
merge_request: 6802
author:
---
title: Bump omniauth-gitlab to 1.0.2 to fix incompatibility with omniauth-oauth2
merge_request:
author:
---
title: Set default Sidekiq retries to 3
merge_request: 7294
author:
---
title: Use separate email-token for incoming email and revert back the inactive feature
merge_request: 5914
author:
# Adds draw method into Rails routing
# It allows us to keep routing splitted into files
class ActionDispatch::Routing::Mapper
def draw(routes_name)
instance_eval(File.read(Rails.root.join("config/routes/#{routes_name}.rb")))
end
end
......@@ -2,6 +2,9 @@
redis_config_hash = Gitlab::Redis.params
redis_config_hash[:namespace] = Gitlab::Redis::SIDEKIQ_NAMESPACE
# Default is to retry 25 times with exponential backoff. That's too much.
Sidekiq.default_worker_options = { retry: 3 }
Sidekiq.configure_server do |config|
config.redis = redis_config_hash
......
......@@ -2,12 +2,6 @@ require 'sidekiq/web'
require 'sidekiq/cron/web'
require 'api/api'
class ActionDispatch::Routing::Mapper
def draw(routes_name)
instance_eval(File.read(Rails.root.join("config/routes/#{routes_name}.rb")))
end
end
Rails.application.routes.draw do
concern :access_requestable do
post :request_access, on: :collection
......
scope constraints: { id: /.+\.git/, format: nil } do
# Git HTTP clients ('git clone' etc.)
get '/info/refs', to: 'git_http#info_refs'
post '/git-upload-pack', to: 'git_http#git_upload_pack'
post '/git-receive-pack', to: 'git_http#git_receive_pack'
# Git LFS API (metadata)
post '/info/lfs/objects/batch', to: 'lfs_api#batch'
post '/info/lfs/objects', to: 'lfs_api#deprecated'
get '/info/lfs/objects/*oid', to: 'lfs_api#deprecated'
# GitLab LFS object storage
scope constraints: { oid: /[a-f0-9]{64}/ } do
get '/gitlab-lfs/objects/*oid', to: 'lfs_storage#download'
scope constraints: { size: /[0-9]+/ } do
put '/gitlab-lfs/objects/*oid/*size/authorize', to: 'lfs_storage#upload_authorize'
put '/gitlab-lfs/objects/*oid/*size', to: 'lfs_storage#upload_finalize'
end
end
end
# Allow /info/refs, /info/refs?service=git-upload-pack, and
# /info/refs?service=git-receive-pack, but nothing else.
#
git_http_handshake = lambda do |request|
request.query_string.blank? ||
request.query_string.match(/\Aservice=git-(upload|receive)-pack\z/)
end
ref_redirect = redirect do |params, request|
path = "#{params[:namespace_id]}/#{params[:project_id]}.git/info/refs"
path << "?#{request.query_string}" unless request.query_string.blank?
path
end
get '/info/refs', constraints: git_http_handshake, to: ref_redirect
......@@ -3,7 +3,7 @@ require 'constraints/group_url_constrainer'
constraints(GroupUrlConstrainer.new) do
scope(path: ':id',
as: :group,
constraints: { id: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ },
constraints: { id: Gitlab::Regex.namespace_route_regex },
controller: :groups) do
get '/', action: :show
patch '/', action: :update
......@@ -12,15 +12,9 @@ constraints(GroupUrlConstrainer.new) do
end
end
scope constraints: { id: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ } do
resources :groups, except: [:show] do
member do
get :issues
get :merge_requests
get :projects
get :activity
end
resources :groups, only: [:index, :new, :create]
<<<<<<< HEAD
scope module: :groups do
## EE-specific
resource :analytics, only: [:show]
......@@ -58,4 +52,26 @@ scope constraints: { id: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ } do
end
get 'groups/:id' => 'groups#show', as: :group_canonical
=======
scope(path: 'groups/:id', controller: :groups) do
get :edit, as: :edit_group
get :issues, as: :issues_group
get :merge_requests, as: :merge_requests_group
get :projects, as: :projects_group
get :activity, as: :activity_group
end
scope(path: 'groups/:group_id', module: :groups, as: :group) do
resources :group_members, only: [:index, :create, :update, :destroy], concerns: :access_requestable do
post :resend_invite, on: :member
delete :leave, on: :collection
end
resource :avatar, only: [:destroy]
resources :milestones, constraints: { id: /[^\/]+/ }, only: [:index, :show, :update, :new, :create]
resources :labels, except: [:show], constraints: { id: /\d+/ }
>>>>>>> ce/master
end
# Must be last route in this file
get 'groups/:id' => 'groups#show', as: :group_canonical
......@@ -4,6 +4,7 @@ resource :profile, only: [:show, :update] do
get :applications, to: 'oauth/applications#index'
put :reset_private_token
put :reset_incoming_email_token
put :update_username
end
......
......@@ -18,152 +18,17 @@ resources :namespaces, path: '/', constraints: { id: /[a-zA-Z.0-9_\-]+/ }, only:
get :autocomplete_sources
get :activity
get :refs
put :new_issue_address
end
scope module: :projects do
scope constraints: { id: /.+\.git/, format: nil } do
# Git HTTP clients ('git clone' etc.)
get '/info/refs', to: 'git_http#info_refs'
post '/git-upload-pack', to: 'git_http#git_upload_pack'
post '/git-receive-pack', to: 'git_http#git_receive_pack'
# Git LFS API (metadata)
post '/info/lfs/objects/batch', to: 'lfs_api#batch'
post '/info/lfs/objects', to: 'lfs_api#deprecated'
get '/info/lfs/objects/*oid', to: 'lfs_api#deprecated'
# GitLab LFS object storage
scope constraints: { oid: /[a-f0-9]{64}/ } do
get '/gitlab-lfs/objects/*oid', to: 'lfs_storage#download'
scope constraints: { size: /[0-9]+/ } do
put '/gitlab-lfs/objects/*oid/*size/authorize', to: 'lfs_storage#upload_authorize'
put '/gitlab-lfs/objects/*oid/*size', to: 'lfs_storage#upload_finalize'
end
end
end
# Allow /info/refs, /info/refs?service=git-upload-pack, and
# /info/refs?service=git-receive-pack, but nothing else.
#
git_http_handshake = lambda do |request|
request.query_string.blank? ||
request.query_string.match(/\Aservice=git-(upload|receive)-pack\z/)
end
ref_redirect = redirect do |params, request|
path = "#{params[:namespace_id]}/#{params[:project_id]}.git/info/refs"
path << "?#{request.query_string}" unless request.query_string.blank?
path
end
get '/info/refs', constraints: git_http_handshake, to: ref_redirect
# Blob routes:
get '/new/*id', to: 'blob#new', constraints: { id: /.+/ }, as: 'new_blob'
post '/create/*id', to: 'blob#create', constraints: { id: /.+/ }, as: 'create_blob'
get '/edit/*id', to: 'blob#edit', constraints: { id: /.+/ }, as: 'edit_blob'
put '/update/*id', to: 'blob#update', constraints: { id: /.+/ }, as: 'update_blob'
post '/preview/*id', to: 'blob#preview', constraints: { id: /.+/ }, as: 'preview_blob'
draw :git_http
#
# Templates
#
get '/templates/:template_type/:key' => 'templates#show', as: :template
scope do
get(
'/blob/*id/diff',
to: 'blob#diff',
constraints: { id: /.+/, format: false },
as: :blob_diff
)
get(
'/blob/*id',
to: 'blob#show',
constraints: { id: /.+/, format: false },
as: :blob
)
delete(
'/blob/*id',
to: 'blob#destroy',
constraints: { id: /.+/, format: false }
)
put(
'/blob/*id',
to: 'blob#update',
constraints: { id: /.+/, format: false }
)
post(
'/blob/*id',
to: 'blob#create',
constraints: { id: /.+/, format: false }
)
end
scope do
get(
'/raw/*id',
to: 'raw#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :raw
)
end
scope do
get(
'/tree/*id',
to: 'tree#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :tree
)
end
scope do
get(
'/find_file/*id',
to: 'find_file#show',
constraints: { id: /.+/, format: /html/ },
as: :find_file
)
end
scope do
get(
'/files/*id',
to: 'find_file#list',
constraints: { id: /(?:[^.]|\.(?!json$))+/, format: /json/ },
as: :files
)
end
scope do
post(
'/create_dir/*id',
to: 'tree#create_dir',
constraints: { id: /.+/ },
as: 'create_dir'
)
end
scope do
get(
'/blame/*id',
to: 'blame#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :blame
)
end
scope do
get(
'/commits/*id',
to: 'commits#show',
constraints: { id: /.+/, format: false },
as: :commits
)
end
resource :avatar, only: [:show, :destroy]
resources :commit, only: [:show], constraints: { id: /\h{7,40}/ } do
member do
......@@ -212,29 +77,6 @@ resources :namespaces, path: '/', constraints: { id: /[a-zA-Z.0-9_\-]+/ }, only:
end
end
WIKI_SLUG_ID = { id: /\S+/ } unless defined? WIKI_SLUG_ID
scope do
# Order matters to give priority to these matches
get '/wikis/git_access', to: 'wikis#git_access'
get '/wikis/pages', to: 'wikis#pages', as: 'wiki_pages'
post '/wikis', to: 'wikis#create'
get '/wikis/*id/history', to: 'wikis#history', as: 'wiki_history', constraints: WIKI_SLUG_ID
get '/wikis/*id/edit', to: 'wikis#edit', as: 'wiki_edit', constraints: WIKI_SLUG_ID
get '/wikis/*id', to: 'wikis#show', as: 'wiki', constraints: WIKI_SLUG_ID
delete '/wikis/*id', to: 'wikis#destroy', constraints: WIKI_SLUG_ID
put '/wikis/*id', to: 'wikis#update', constraints: WIKI_SLUG_ID
post '/wikis/*id/preview_markdown', to: 'wikis#preview_markdown', constraints: WIKI_SLUG_ID, as: 'wiki_preview_markdown'
end
resource :repository, only: [:create] do
member do
get 'archive', constraints: { format: Gitlab::Regex.archive_formats_regex }
end
end
resources :services, constraints: { id: /[^\/]+/ }, only: [:index, :edit, :update] do
member do
get :test
......@@ -251,23 +93,6 @@ resources :namespaces, path: '/', constraints: { id: /[a-zA-Z.0-9_\-]+/ }, only:
resources :forks, only: [:index, :new, :create]
resource :import, only: [:new, :create, :show]
resources :refs, only: [] do
collection do
get 'switch'
end
member do
# tree viewer logs
get 'logs_tree', constraints: { id: Gitlab::Regex.git_reference_regex }
# Directories with leading dots erroneously get rejected if git
# ref regex used in constraints. Regex verification now done in controller.
get 'logs_tree/*path' => 'refs#logs_tree', as: :logs_file, constraints: {
id: /.*/,
path: /.*/
}
end
end
resources :merge_requests, concerns: :awardable, constraints: { id: /\d+/ } do
member do
get :commits
......@@ -513,9 +338,16 @@ resources :namespaces, path: '/', constraints: { id: /[a-zA-Z.0-9_\-]+/ }, only:
end
end
<<<<<<< HEAD
## EE-specific
resources :audit_events, only: [:index]
## EE-specific
=======
# Since both wiki and repository routing contains wildcard characters
# its preferable to keep it below all other project routes
draw :wiki
draw :repository
>>>>>>> ce/master
end
end
end
# All routing related to repositoty browsing
resource :repository, only: [:create] do
member do
get 'archive', constraints: { format: Gitlab::Regex.archive_formats_regex }
end
end
resources :refs, only: [] do
collection do
get 'switch'
end
member do
# tree viewer logs
get 'logs_tree', constraints: { id: Gitlab::Regex.git_reference_regex }
# Directories with leading dots erroneously get rejected if git
# ref regex used in constraints. Regex verification now done in controller.
get 'logs_tree/*path' => 'refs#logs_tree', as: :logs_file, constraints: {
id: /.*/,
path: /.*/
}
end
end
get '/new/*id', to: 'blob#new', constraints: { id: /.+/ }, as: 'new_blob'
post '/create/*id', to: 'blob#create', constraints: { id: /.+/ }, as: 'create_blob'
get '/edit/*id', to: 'blob#edit', constraints: { id: /.+/ }, as: 'edit_blob'
put '/update/*id', to: 'blob#update', constraints: { id: /.+/ }, as: 'update_blob'
post '/preview/*id', to: 'blob#preview', constraints: { id: /.+/ }, as: 'preview_blob'
scope do
get(
'/blob/*id/diff',
to: 'blob#diff',
constraints: { id: /.+/, format: false },
as: :blob_diff
)
get(
'/blob/*id',
to: 'blob#show',
constraints: { id: /.+/, format: false },
as: :blob
)
delete(
'/blob/*id',
to: 'blob#destroy',
constraints: { id: /.+/, format: false }
)
put(
'/blob/*id',
to: 'blob#update',
constraints: { id: /.+/, format: false }
)
post(
'/blob/*id',
to: 'blob#create',
constraints: { id: /.+/, format: false }
)
get(
'/raw/*id',
to: 'raw#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :raw
)
get(
'/tree/*id',
to: 'tree#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :tree
)
get(
'/find_file/*id',
to: 'find_file#show',
constraints: { id: /.+/, format: /html/ },
as: :find_file
)
get(
'/files/*id',
to: 'find_file#list',
constraints: { id: /(?:[^.]|\.(?!json$))+/, format: /json/ },
as: :files
)
post(
'/create_dir/*id',
to: 'tree#create_dir',
constraints: { id: /.+/ },
as: 'create_dir'
)
get(
'/blame/*id',
to: 'blame#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :blame
)
# File/dir history
get(
'/commits/*id',
to: 'commits#show',
constraints: { id: /.+/, format: false },
as: :commits
)
end
......@@ -23,31 +23,32 @@ end
constraints(UserUrlConstrainer.new) do
scope(path: ':username',
as: :user,
constraints: { username: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ },
constraints: { username: Gitlab::Regex.namespace_route_regex },
controller: :users) do
get '/', action: :show
end
end
scope(path: 'users/:username',
as: :user,
constraints: { username: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ },
controller: :users) do
get :calendar
get :calendar_activities
get :groups
get :projects
get :contributed, as: :contributed_projects
get :snippets
get :exists
get '/', to: redirect('/%{username}')
end
scope(constraints: { username: Gitlab::Regex.namespace_route_regex }) do
scope(path: 'users/:username',
as: :user,
controller: :users) do
get :calendar
get :calendar_activities
get :groups
get :projects
get :contributed, as: :contributed_projects
get :snippets
get :exists
get '/', to: redirect('/%{username}')
end
# Compatibility with old routing
# TODO (dzaporozhets): remove in 10.0
get '/u/:username', to: redirect('/%{username}'), constraints: { username: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ }
# TODO (dzaporozhets): remove in 9.0
get '/u/:username/groups', to: redirect('/users/%{username}/groups'), constraints: { username: /[a-zA-Z.0-9_\-]+/ }
get '/u/:username/projects', to: redirect('/users/%{username}/projects'), constraints: { username: /[a-zA-Z.0-9_\-]+/ }
get '/u/:username/snippets', to: redirect('/users/%{username}/snippets'), constraints: { username: /[a-zA-Z.0-9_\-]+/ }
get '/u/:username/contributed', to: redirect('/users/%{username}/contributed'), constraints: { username: /[a-zA-Z.0-9_\-]+/ }
# Compatibility with old routing
# TODO (dzaporozhets): remove in 10.0
get '/u/:username', to: redirect('/%{username}')
# TODO (dzaporozhets): remove in 9.0
get '/u/:username/groups', to: redirect('/users/%{username}/groups')
get '/u/:username/projects', to: redirect('/users/%{username}/projects')
get '/u/:username/snippets', to: redirect('/users/%{username}/snippets')
get '/u/:username/contributed', to: redirect('/users/%{username}/contributed')
end
WIKI_SLUG_ID = { id: /\S+/ } unless defined? WIKI_SLUG_ID
scope do
# Order matters to give priority to these matches
get '/wikis/git_access', to: 'wikis#git_access'
get '/wikis/pages', to: 'wikis#pages', as: 'wiki_pages'
post '/wikis', to: 'wikis#create'
get '/wikis/*id/history', to: 'wikis#history', as: 'wiki_history', constraints: WIKI_SLUG_ID
get '/wikis/*id/edit', to: 'wikis#edit', as: 'wiki_edit', constraints: WIKI_SLUG_ID
get '/wikis/*id', to: 'wikis#show', as: 'wiki', constraints: WIKI_SLUG_ID
delete '/wikis/*id', to: 'wikis#destroy', constraints: WIKI_SLUG_ID
put '/wikis/*id', to: 'wikis#update', constraints: WIKI_SLUG_ID
post '/wikis/*id/preview_markdown', to: 'wikis#preview_markdown', constraints: WIKI_SLUG_ID, as: 'wiki_preview_markdown'
end
......@@ -21,6 +21,7 @@
- [post_receive, 5]
- [merge, 5]
- [update_merge_requests, 3]
- [process_commit, 2]
- [new_note, 2]
- [build, 2]
- [pipeline, 2]
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddIncomingEmailTokenToUsers < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def change
add_column :users, :incoming_email_token, :string
add_concurrent_index :users, :incoming_email_token
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddHousekeepingToApplicationSettings < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
# DOWNTIME_REASON = ''
disable_ddl_transaction!
def up
add_column_with_default(:application_settings, :housekeeping_enabled, :boolean, default: true, allow_null: false)
add_column_with_default(:application_settings, :housekeeping_bitmaps_enabled, :boolean, default: true, allow_null: false)
add_column_with_default(:application_settings, :housekeeping_incremental_repack_period, :integer, default: 10, allow_null: false)
add_column_with_default(:application_settings, :housekeeping_full_repack_period, :integer, default: 50, allow_null: false)
add_column_with_default(:application_settings, :housekeeping_gc_period, :integer, default: 200, allow_null: false)
end
def down
remove_column(:application_settings, :housekeeping_enabled, :boolean, default: true, allow_null: false)
remove_column(:application_settings, :housekeeping_bitmaps_enabled, :boolean, default: true, allow_null: false)
remove_column(:application_settings, :housekeeping_incremental_repack_period, :integer, default: 10, allow_null: false)
remove_column(:application_settings, :housekeeping_full_repack_period, :integer, default: 50, allow_null: false)
remove_column(:application_settings, :housekeeping_gc_period, :integer, default: 200, allow_null: false)
end
end
......@@ -5,12 +5,12 @@ class RenameRepositoryStorageColumn < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
DOWNTIME = true
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
# DOWNTIME_REASON = ''
DOWNTIME_REASON = 'Renaming the application_settings.repository_storage column'
# When using the methods "add_concurrent_index" or "add_column_with_default"
# you must disable the use of transactions as these methods can not run in an
......
class AddProjectImportDataProjectIndex < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def change
add_concurrent_index :project_import_data, :project_id
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20161103171205) do
ActiveRecord::Schema.define(version: 20161106185620) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -107,6 +107,11 @@ ActiveRecord::Schema.define(version: 20161103171205) do
t.text "help_page_text_html"
t.text "shared_runners_text_html"
t.text "after_sign_up_text_html"
t.boolean "housekeeping_enabled", default: true, null: false
t.boolean "housekeeping_bitmaps_enabled", default: true, null: false
t.integer "housekeeping_incremental_repack_period", default: 10, null: false
t.integer "housekeeping_full_repack_period", default: 50, null: false
t.integer "housekeeping_gc_period", default: 200, null: false
end
create_table "approvals", force: :cascade do |t|
......@@ -991,6 +996,8 @@ ActiveRecord::Schema.define(version: 20161103171205) do
t.string "encrypted_credentials_salt"
end
add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree
create_table "projects", force: :cascade do |t|
t.string "name"
t.string "path"
......@@ -1363,6 +1370,7 @@ ActiveRecord::Schema.define(version: 20161103171205) do
t.boolean "ldap_email", default: false, null: false
t.boolean "external", default: false
t.string "organization"
t.string "incoming_email_token"
end
add_index "users", ["admin"], name: "index_users_on_admin", using: :btree
......@@ -1372,6 +1380,7 @@ ActiveRecord::Schema.define(version: 20161103171205) do
add_index "users", ["current_sign_in_at"], name: "index_users_on_current_sign_in_at", using: :btree
add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree
add_index "users", ["email"], name: "index_users_on_email_trigram", using: :gin, opclasses: {"email"=>"gin_trgm_ops"}
add_index "users", ["incoming_email_token"], name: "index_users_on_incoming_email_token", using: :btree
add_index "users", ["name"], name: "index_users_on_name", using: :btree
add_index "users", ["name"], name: "index_users_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree
......
......@@ -3,6 +3,14 @@
> [Introduced][ce-2371] in GitLab 8.4.
---
## Automatic housekeeping
GitLab automatically runs `git gc` and `git repack` on repositories
after Git pushes. If needed you can change how often this happens, or
to turn it off, go to **Admin area > Settings**
(`/admin/application_settings`).
## Manual housekeeping
The housekeeping function runs `git gc` ([man page][man]) on the current
project Git repository.
......
......@@ -20,46 +20,61 @@ Example response:
```json
[
{
"name" : "bug",
"color" : "#d9534f",
"description": "Bug reported by user",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 1
},
{
"color" : "#d9534f",
"name" : "confirmed",
"description": "Confirmed issue",
"open_issues_count": 2,
"closed_issues_count": 5,
"open_merge_requests_count": 0
},
{
"name" : "critical",
"color" : "#d9534f",
"description": "Critical issue. Need fix ASAP",
"open_issues_count": 1,
"closed_issues_count": 3,
"open_merge_requests_count": 1
},
{
"name" : "documentation",
"color" : "#f0ad4e",
"description": "Issue about documentation",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 2
},
{
"color" : "#5cb85c",
"name" : "enhancement",
"description": "Enhancement proposal",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 1
}
{
"id" : 1,
"name" : "bug",
"color" : "#d9534f",
"description": "Bug reported by user",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 1,
"subscribed": false,
"priority": 10
},
{
"id" : 4,
"color" : "#d9534f",
"name" : "confirmed",
"description": "Confirmed issue",
"open_issues_count": 2,
"closed_issues_count": 5,
"open_merge_requests_count": 0,
"subscribed": false,
"priority": null
},
{
"id" : 7,
"name" : "critical",
"color" : "#d9534f",
"description": "Critical issue. Need fix ASAP",
"open_issues_count": 1,
"closed_issues_count": 3,
"open_merge_requests_count": 1,
"subscribed": false,
"priority": null
},
{
"id" : 8,
"name" : "documentation",
"color" : "#f0ad4e",
"description": "Issue about documentation",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 2,
"subscribed": false,
"priority": null
},
{
"id" : 9,
"color" : "#5cb85c",
"name" : "enhancement",
"description": "Enhancement proposal",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 1,
"subscribed": true,
"priority": null
}
]
```
......@@ -80,6 +95,7 @@ POST /projects/:id/labels
| `name` | string | yes | The name of the label |
| `color` | string | yes | The color of the label in 6-digit hex notation with leading `#` sign |
| `description` | string | no | The description of the label |
| `priority` | integer | no | The priority of the label. Must be greater or equal than zero or `null` to remove the priority. |
```bash
curl --data "name=feature&color=#5843AD" --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v3/projects/1/labels"
......@@ -89,9 +105,15 @@ Example response:
```json
{
"name" : "feature",
"color" : "#5843AD",
"description":null
"id" : 10,
"name" : "feature",
"color" : "#5843AD",
"description":null,
"open_issues_count": 0,
"closed_issues_count": 0,
"open_merge_requests_count": 0,
"subscribed": false,
"priority": null
}
```
......@@ -120,14 +142,15 @@ Example response:
```json
{
"title" : "feature",
"color" : "#5843AD",
"description": "New feature proposal",
"updated_at" : "2015-11-03T21:22:30.737Z",
"template" : false,
"project_id" : 1,
"created_at" : "2015-11-03T21:22:30.737Z",
"id" : 9
"id" : 1,
"name" : "bug",
"color" : "#d9534f",
"description": "Bug reported by user",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 1,
"subscribed": false,
"priority": null
}
```
......@@ -151,6 +174,8 @@ PUT /projects/:id/labels
| `new_name` | string | yes if `color` is not provided | The new name of the label |
| `color` | string | yes if `new_name` is not provided | The new color of the label in 6-digit hex notation with leading `#` sign |
| `description` | string | no | The new description of the label |
| `priority` | integer | no | The new priority of the label. Must be greater or equal than zero or `null` to remove the priority. |
```bash
curl --request PUT --data "name=documentation&new_name=docs&color=#8E44AD&description=Documentation" --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v3/projects/1/labels"
......@@ -160,9 +185,15 @@ Example response:
```json
{
"color" : "#8E44AD",
"name" : "docs",
"description": "Documentation"
"id" : 8,
"name" : "docs",
"color" : "#8E44AD",
"description": "Documentation",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 2,
"subscribed": false,
"priority": null
}
```
......@@ -191,13 +222,15 @@ Example response:
```json
{
"name": "Docs",
"color": "#cc0033",
"description": "",
"open_issues_count": 0,
"closed_issues_count": 0,
"open_merge_requests_count": 0,
"subscribed": true
"id" : 1,
"name" : "bug",
"color" : "#d9534f",
"description": "Bug reported by user",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 1,
"subscribed": true,
"priority": null
}
```
......@@ -226,12 +259,14 @@ Example response:
```json
{
"name": "Docs",
"color": "#cc0033",
"description": "",
"open_issues_count": 0,
"closed_issues_count": 0,
"open_merge_requests_count": 0,
"subscribed": false
"id" : 1,
"name" : "bug",
"color" : "#d9534f",
"description": "Bug reported by user",
"open_issues_count": 1,
"closed_issues_count": 0,
"open_merge_requests_count": 1,
"subscribed": false,
"priority": null
}
```
......@@ -228,7 +228,7 @@ For our currently-supported browsers, see our [requirements][requirements].
[page-specific-js-example]: https://gitlab.com/gitlab-org/gitlab-ce/blob/13bb9ed77f405c5f6ee4fdbc964ecf635c9a223f/app/views/projects/graphs/_head.html.haml#L6-8
[chrome-accessibility-developer-tools]: https://github.com/GoogleChrome/accessibility-developer-tools
[audit-rules]: https://github.com/GoogleChrome/accessibility-developer-tools/wiki/Audit-Rules
[observatory-cli]: https://github.com/mozilla/http-observatory-cli)
[observatory-cli]: https://github.com/mozilla/http-observatory-cli
[qualys-ssl]: https://www.ssllabs.com/ssltest/analyze.html
[secure_headers]: https://github.com/twitter/secureheaders
[mdn-csp]: https://developer.mozilla.org/en-US/docs/Web/Security/CSP
......
......@@ -66,6 +66,12 @@ producing errors whenever it tries to use the `dummy` column.
As a result of the above downtime _is_ required when removing a column, even
when using PostgreSQL.
## Renaming Columns
Renaming columns requires downtime as running GitLab instances will continue
using the old column name until a new version is deployed. This can result
in the instance producing errors, which in turn can impact the user experience.
## Changing Column Constraints
Generally changing column constraints requires checking all rows in the table to
......
......@@ -59,11 +59,6 @@ Feature: Profile
When I unsuccessfully change my password
Then I should see a password error message
Scenario: I reset my token
Given I visit profile account page
Then I reset my token
And I should see new token
Scenario: I visit history tab
Given I have activity
When I visit Audit Log page
......
......@@ -43,4 +43,4 @@ Feature: Project Network Graph
Scenario: I should fail to look for a commit
When I look for a commit by ";"
Then page status code should be 404
Then I should see non-existent git revision error message
Feature: Project Source Git Blame
Background:
Given I sign in as a user
And I own project "Shop"
Given I visit project source page
Scenario: I blame file
Given I click on ".gitignore" file in repo
And I click Blame button
Then I should see git file blame
......@@ -162,7 +162,7 @@ class Spinach::Features::Groups < Spinach::FeatureSteps
end
step 'I visit group "NonExistentGroup" page' do
visit group_path(-1)
visit group_path("NonExistentGroup")
end
step 'the archived project have some issues' do
......
......@@ -104,18 +104,6 @@ class Spinach::Features::Profile < Spinach::FeatureSteps
end
end
step 'I reset my token' do
page.within '.private-token' do
@old_token = @user.private_token
click_button "Reset private token"
end
end
step 'I should see new token' do
expect(find("#token").value).not_to eq @old_token
expect(find("#token").value).to eq @user.reload.private_token
end
step 'I have activity' do
create(:closed_issue_event, author: current_user)
end
......
......@@ -109,4 +109,8 @@ class Spinach::Features::ProjectNetworkGraph < Spinach::FeatureSteps
find('button').click
end
end
step 'I should see non-existent git revision error message' do
expect(page).to have_selector '.flash-alert', text: "Git revision ';' does not exist."
end
end
......@@ -468,11 +468,14 @@ module API
end
class LabelBasic < Grape::Entity
expose :name, :color, :description
expose :id, :name, :color, :description
end
class Label < LabelBasic
expose :open_issues_count, :closed_issues_count, :open_merge_requests_count
expose :priority do |label, options|
label.priority(options[:project])
end
expose :subscribed do |label, options|
label.subscribed?(options[:current_user])
......
......@@ -11,7 +11,7 @@ module API
success Entities::Label
end
get ':id/labels' do
present available_labels, with: Entities::Label, current_user: current_user
present available_labels, with: Entities::Label, current_user: current_user, project: user_project
end
desc 'Create a new label' do
......@@ -21,6 +21,7 @@ module API
requires :name, type: String, desc: 'The name of the label to be created'
requires :color, type: String, desc: "The color of the label given in 6-digit hex notation with leading '#' sign (e.g. #FFAABB)"
optional :description, type: String, desc: 'The description of label to be created'
optional :priority, type: Integer, desc: 'The priority of the label', allow_blank: true
end
post ':id/labels' do
authorize! :admin_label, user_project
......@@ -28,10 +29,15 @@ module API
label = available_labels.find_by(title: params[:name])
conflict!('Label already exists') if label
label = user_project.labels.create(declared(params, include_parent_namespaces: false).to_h)
priority = params.delete(:priority)
label_params = declared(params,
include_parent_namespaces: false,
include_missing: false).to_h
label = user_project.labels.create(label_params)
if label.valid?
present label, with: Entities::Label, current_user: current_user
label.prioritize!(user_project, priority) if priority
present label, with: Entities::Label, current_user: current_user, project: user_project
else
render_validation_error!(label)
end
......@@ -49,7 +55,7 @@ module API
label = user_project.labels.find_by(title: params[:name])
not_found!('Label') unless label
present label.destroy, with: Entities::Label, current_user: current_user
present label.destroy, with: Entities::Label, current_user: current_user, project: user_project
end
desc 'Update an existing label. At least one optional parameter is required.' do
......@@ -60,7 +66,8 @@ module API
optional :new_name, type: String, desc: 'The new name of the label'
optional :color, type: String, desc: "The new color of the label given in 6-digit hex notation with leading '#' sign (e.g. #FFAABB)"
optional :description, type: String, desc: 'The new description of label'
at_least_one_of :new_name, :color, :description
optional :priority, type: Integer, desc: 'The priority of the label', allow_blank: true
at_least_one_of :new_name, :color, :description, :priority
end
put ':id/labels' do
authorize! :admin_label, user_project
......@@ -68,17 +75,25 @@ module API
label = user_project.labels.find_by(title: params[:name])
not_found!('Label not found') unless label
update_params = declared(params,
include_parent_namespaces: false,
include_missing: false).to_h
update_priority = params.key?(:priority)
priority = params.delete(:priority)
label_params = declared(params,
include_parent_namespaces: false,
include_missing: false).to_h
# Rename new name to the actual label attribute name
update_params['name'] = update_params.delete('new_name') if update_params.key?('new_name')
label_params[:name] = label_params.delete('new_name') if label_params.key?('new_name')
if label.update(update_params)
present label, with: Entities::Label, current_user: current_user
else
render_validation_error!(label)
render_validation_error!(label) unless label.update(label_params)
if update_priority
if priority.nil?
label.unprioritize!(user_project)
else
label.prioritize!(user_project, priority)
end
end
present label, with: Entities::Label, current_user: current_user, project: user_project
end
end
end
......
......@@ -32,7 +32,7 @@ module API
if hook.save
present hook, with: Entities::Hook
else
not_found!
render_validation_error!(hook)
end
end
......
class NamespaceUrlConstrainer
def matches?(request)
id = request.path
id = id.sub(/\A#{relative_url_root}/, '') if relative_url_root
id = id.sub(/\A\/+/, '').split('/').first
id = id.sub(/.atom\z/, '') if id
if id =~ Gitlab::Regex.namespace_regex
find_resource(id)
end
end
def find_resource(id)
Namespace.find_by_path(id)
module ConstrainerHelper
def extract_resource_path(path)
id = path.dup
id.sub!(/\A#{relative_url_root}/, '') if relative_url_root
id.sub(/\A\/+/, '').sub(/\/+\z/, '').sub(/.atom\z/, '')
end
private
......
require 'constraints/namespace_url_constrainer'
require_relative 'constrainer_helper'
class GroupUrlConstrainer < NamespaceUrlConstrainer
def find_resource(id)
Group.find_by_path(id)
class GroupUrlConstrainer
include ConstrainerHelper
def matches?(request)
id = extract_resource_path(request.path)
if id =~ Gitlab::Regex.namespace_regex
Group.find_by(path: id).present?
else
false
end
end
end
require 'constraints/namespace_url_constrainer'
require_relative 'constrainer_helper'
class UserUrlConstrainer < NamespaceUrlConstrainer
def find_resource(id)
User.find_by('lower(username) = ?', id.downcase)
class UserUrlConstrainer
include ConstrainerHelper
def matches?(request)
id = extract_resource_path(request.path)
if id =~ Gitlab::Regex.namespace_regex
User.find_by('lower(username) = ?', id.downcase).present?
else
false
end
end
end
......@@ -106,7 +106,7 @@ module ExtractsPath
# resolved (e.g., when a user inserts an invalid path or ref).
def assign_ref_vars
# assign allowed options
allowed_options = ["filter_ref", "extended_sha1"]
allowed_options = ["filter_ref"]
@options = params.select {|key, value| allowed_options.include?(key) && !value.blank? }
@options = HashWithIndifferentAccess.new(@options)
......@@ -114,17 +114,13 @@ module ExtractsPath
@ref, @path = extract_ref(@id)
@repo = @project.repository
if @options[:extended_sha1].present?
@commit = @repo.commit(@options[:extended_sha1])
else
@commit = @repo.commit(@ref)
@commit = @repo.commit(@ref)
if @path.empty? && !@commit && @id.ends_with?('.atom')
@id = @ref = extract_ref_without_atom(@id)
@commit = @repo.commit(@ref)
if @path.empty? && !@commit && @id.ends_with?('.atom')
@id = @ref = extract_ref_without_atom(@id)
@commit = @repo.commit(@ref)
request.format = :atom if @commit
end
request.format = :atom if @commit
end
raise InvalidPathError unless @commit
......
......@@ -188,19 +188,6 @@ module Gitlab
'rm-project', storage, "#{name}.git"])
end
# Gc repository
#
# storage - project storage path
# path - project path with namespace
#
# Ex.
# gc("/path/to/storage", "gitlab/gitlab-ci")
#
def gc(storage, path)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'gc',
storage, "#{path}.git"])
end
# Add new key to gitlab-shell
#
# Ex.
......
......@@ -4,8 +4,7 @@ require 'gitlab/email/handler/create_issue_handler'
module Gitlab
module Email
module Handler
# The `CreateIssueHandler` feature is disabled for the time being.
HANDLERS = [CreateNoteHandler]
HANDLERS = [CreateNoteHandler, CreateIssueHandler]
def self.for(mail, mail_key)
HANDLERS.find do |klass|
......
......@@ -5,16 +5,16 @@ module Gitlab
module Email
module Handler
class CreateIssueHandler < BaseHandler
attr_reader :project_path, :authentication_token
attr_reader :project_path, :incoming_email_token
def initialize(mail, mail_key)
super(mail, mail_key)
@project_path, @authentication_token =
@project_path, @incoming_email_token =
mail_key && mail_key.split('+', 2)
end
def can_handle?
!authentication_token.nil?
!incoming_email_token.nil?
end
def execute
......@@ -29,7 +29,7 @@ module Gitlab
end
def author
@author ||= User.find_by(authentication_token: authentication_token)
@author ||= User.find_by(incoming_email_token: incoming_email_token)
end
def project
......
require 'securerandom'
module Gitlab
# This class implements an 'exclusive lease'. We call it a 'lease'
# because it has a set expiry time. We call it 'exclusive' because only
# one caller may obtain a lease for a given key at a time. The
# implementation is intended to work across GitLab processes and across
# servers. It is a 'cheap' alternative to using SQL queries and updates:
# servers. It is a cheap alternative to using SQL queries and updates:
# you do not need to change the SQL schema to start using
# ExclusiveLease.
#
# It is important to choose the timeout wisely. If the timeout is very
# high (1 hour) then the throughput of your operation gets very low (at
# most once an hour). If the timeout is lower than how long your
# operation may take then you cannot count on exclusivity. For example,
# if the timeout is 10 seconds and you do an operation which may take 20
# seconds then two overlapping operations may hold a lease for the same
# key at the same time.
#
# This class has no 'cancel' method. I originally decided against adding
# it because it would add complexity and a false sense of security. The
# complexity: instead of setting '1' we would have to set a UUID, and to
# delete it we would have to execute Lua on the Redis server to only
# delete the key if the value was our own UUID. Otherwise there is a
# chance that when you intend to cancel your lease you actually delete
# someone else's. The false sense of security: you cannot design your
# system to rely too much on the lease being cancelled after use because
# the calling (Ruby) process may crash or be killed. You _cannot_ count
# on begin/ensure blocks to cancel a lease, because the 'ensure' does
# not always run. Think of 'kill -9' from the Unicorn master for
# instance.
#
# If you find that leases are getting in your way, ask yourself: would
# it be enough to lower the lease timeout? Another thing that might be
# appropriate is to only use a lease for bulk/automated operations, and
# to ignore the lease when you get a single 'manual' user request (a
# button click).
#
class ExclusiveLease
LUA_CANCEL_SCRIPT = <<-EOS
local key, uuid = KEYS[1], ARGV[1]
if redis.call("get", key) == uuid then
redis.call("del", key)
end
EOS
def self.cancel(key, uuid)
Gitlab::Redis.with do |redis|
redis.eval(LUA_CANCEL_SCRIPT, keys: [redis_key(key)], argv: [uuid])
end
end
def self.redis_key(key)
"gitlab:exclusive_lease:#{key}"
end
def initialize(key, timeout:)
@key, @timeout = key, timeout
@redis_key = self.class.redis_key(key)
@timeout = timeout
@uuid = SecureRandom.uuid
end
# Try to obtain the lease. Return true on success,
# Try to obtain the lease. Return lease UUID on success,
# false if the lease is already taken.
def try_obtain
# Performing a single SET is atomic
Gitlab::Redis.with do |redis|
!!redis.set(redis_key, '1', nx: true, ex: @timeout)
redis.set(@redis_key, @uuid, nx: true, ex: @timeout) && @uuid
end
end
# Returns true if the key for this lease is set.
def exists?
Gitlab::Redis.with do |redis|
redis.exists(redis_key)
redis.exists(@redis_key)
end
end
# No #cancel method. See comments above!
private
def redis_key
"gitlab:exclusive_lease:#{@key}"
end
end
end
module Gitlab
module IncomingEmail
WILDCARD_PLACEHOLDER = '%{key}'.freeze
class << self
FALLBACK_MESSAGE_ID_REGEX = /\Areply\-(.+)@#{Gitlab.config.gitlab.host}\Z/.freeze
......@@ -7,8 +9,16 @@ module Gitlab
config.enabled && config.address
end
def supports_wildcard?
config.address && config.address.include?(WILDCARD_PLACEHOLDER)
end
def supports_issue_creation?
enabled? && supports_wildcard?
end
def reply_address(key)
config.address.gsub('%{key}', key)
config.address.gsub(WILDCARD_PLACEHOLDER, key)
end
def key_from_address(address)
......
......@@ -5,11 +5,7 @@ module Gitlab
def initialize(current_user, project, query, repository_ref = nil)
@current_user = current_user
@project = project
@repository_ref = if repository_ref.present?
repository_ref
else
nil
end
@repository_ref = repository_ref.presence
@query = query
end
......@@ -47,33 +43,31 @@ module Gitlab
private
def blobs
if project.empty_repo? || query.blank?
[]
else
project.repository.search_files(query, repository_ref)
end
@blobs ||= project.repository.search_files(query, repository_ref)
end
def wiki_blobs
if project.wiki_enabled? && query.present?
project_wiki = ProjectWiki.new(project)
@wiki_blobs ||= begin
if project.wiki_enabled? && query.present?
project_wiki = ProjectWiki.new(project)
unless project_wiki.empty?
project_wiki.search_files(query)
unless project_wiki.empty?
project_wiki.search_files(query)
else
[]
end
else
[]
end
else
[]
end
end
def notes
project.notes.user.search(query, as_user: @current_user).order('updated_at DESC')
@notes ||= project.notes.user.search(query, as_user: @current_user).order('updated_at DESC')
end
def commits
project.repository.find_commits_by_message(query)
@commits ||= project.repository.find_commits_by_message(query)
end
def project_ids_relation
......
......@@ -8,6 +8,10 @@ module Gitlab
@namespace_regex ||= /\A#{NAMESPACE_REGEX_STR}\z/.freeze
end
def namespace_route_regex
@namespace_route_regex ||= /#{NAMESPACE_REGEX_STR}/.freeze
end
def namespace_regex_message
"can contain only letters, digits, '_', '-' and '.'. " \
"Cannot start with '-' or end in '.', '.git' or '.atom'." \
......
......@@ -7,6 +7,26 @@ describe ProjectsController do
let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
describe 'GET index' do
context 'as a user' do
it 'redirects to root page' do
sign_in(user)
get :index
expect(response).to redirect_to(root_path)
end
end
context 'as a guest' do
it 'redirects to Explore page' do
get :index
expect(response).to redirect_to(explore_root_path)
end
end
end
describe "GET show" do
context "user not project member" do
before { sign_in(user) }
......@@ -285,6 +305,33 @@ describe ProjectsController do
end
end
describe 'PUT #new_issue_address' do
subject do
put :new_issue_address,
namespace_id: project.namespace.to_param,
id: project.to_param
user.reload
end
before do
sign_in(user)
project.team << [user, :developer]
allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true)
end
it 'has http status 200' do
expect(response).to have_http_status(200)
end
it 'changes the user incoming email token' do
expect { subject }.to change { user.incoming_email_token }
end
it 'changes projects new issue address' do
expect { subject }.to change { project.new_issue_address(user) }
end
end
describe "POST #toggle_star" do
it "toggles star if user is signed in" do
sign_in(user)
......
require 'spec_helper'
<<<<<<< HEAD
feature 'Global elastic search', feature: true do
=======
feature 'Global search', feature: true do
>>>>>>> ce/master
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
before do
<<<<<<< HEAD
stub_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
=======
>>>>>>> ce/master
project.team << [user, :master]
login_with(user)
end
<<<<<<< HEAD
after do
Gitlab::Elastic::Helper.delete_index
stub_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
......@@ -22,6 +30,12 @@ feature 'Global elastic search', feature: true do
create_list(:issue, 21, project: project, title: 'initial')
Gitlab::Elastic::Helper.refresh_index
=======
describe 'I search through the issues and I see pagination' do
before do
allow_any_instance_of(Gitlab::SearchResults).to receive(:per_page).and_return(1)
create_list(:issue, 2, project: project, title: 'initial')
>>>>>>> ce/master
end
it "has a pagination" do
......@@ -34,6 +48,7 @@ feature 'Global elastic search', feature: true do
expect(page).to have_selector('.gl-pagination .page', count: 2)
end
end
<<<<<<< HEAD
describe 'I search through the blobs' do
before do
......@@ -71,4 +86,6 @@ feature 'Global elastic search', feature: true do
expect(page).to have_selector('.commit-row-description')
end
end
=======
>>>>>>> ce/master
end
......@@ -18,8 +18,8 @@ feature 'Start new branch from an issue', feature: true do
end
context "when there is a referenced merge request" do
let(:note) do
create(:note, :on_issue, :system, project: project,
let!(:note) do
create(:note, :on_issue, :system, project: project, noteable: issue,
note: "Mentioned in !#{referenced_mr.iid}")
end
let(:referenced_mr) do
......@@ -28,12 +28,13 @@ feature 'Start new branch from an issue', feature: true do
end
before do
issue.notes << note
referenced_mr.cache_merge_request_closes_issues!(user)
visit namespace_project_issue_path(project.namespace, project, issue)
end
it "hides the new branch button", js: true do
expect(page).to have_css('#new-branch .unavailable')
expect(page).not_to have_css('#new-branch .available')
expect(page).to have_content /1 Related Merge Request/
end
......
......@@ -3,6 +3,7 @@ require 'spec_helper'
describe 'Issues', feature: true do
include IssueHelpers
include SortingHelper
include WaitForAjax
let(:project) { create(:project) }
......@@ -368,6 +369,26 @@ describe 'Issues', feature: true do
end
end
describe 'when I want to reset my incoming email token' do
let(:project1) { create(:project, namespace: @user.namespace) }
before do
allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true)
project1.team << [@user, :master]
visit namespace_project_issues_path(@user.namespace, project1)
end
it 'changes incoming email address token', js: true do
find('.issue-email-modal-btn').click
previous_token = find('input#issue_email').value
find('.incoming-email-token-reset').click
wait_for_ajax
expect(find('input#issue_email').value).not_to eq(previous_token)
end
end
describe 'update labels from issue#show', js: true do
let(:issue) { create(:issue, project: project, author: @user, assignee: @user) }
let!(:label) { create(:label, project: project) }
......@@ -574,7 +595,7 @@ describe 'Issues', feature: true do
end
end
xdescribe 'new issue by email' do
describe 'new issue by email' do
shared_examples 'show the email in the modal' do
before do
stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
......
......@@ -32,4 +32,33 @@ describe 'Profile account page', feature: true do
expect(current_path).to eq(profile_account_path)
end
end
describe 'when I reset private token' do
before do
visit profile_account_path
end
it 'resets private token' do
previous_token = find("#private-token").value
click_link('Reset private token')
expect(find('#private-token').value).not_to eq(previous_token)
end
end
describe 'when I reset incoming email token' do
before do
allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true)
visit profile_account_path
end
it 'resets incoming email token' do
previous_token = find('#incoming-email-token').value
click_link('Reset incoming email token')
expect(find('#incoming-email-token').value).not_to eq(previous_token)
end
end
end
class Spinach::Features::ProjectSourceGitBlame < Spinach::FeatureSteps
include SharedAuthentication
include SharedProject
include SharedPaths
require 'spec_helper'
step 'I click on ".gitignore" file in repo' do
click_link ".gitignore"
end
feature 'user checks git blame', feature: true do
let(:project) { create(:project) }
let(:user) { create(:user) }
step 'I click Blame button' do
click_link 'Blame'
before do
project.team << [user, :master]
login_with(user)
visit namespace_project_tree_path(project.namespace, project, project.default_branch)
end
step 'I should see git file blame' do
scenario "can see blame of '.gitignore'" do
click_link ".gitignore"
click_link 'Blame'
expect(page).to have_content "*.rb"
expect(page).to have_content "Dmitriy Zaporozhets"
expect(page).to have_content "Initial commit"
......
......@@ -100,6 +100,32 @@ describe "Search", feature: true do
expect(page).to have_link(snippet.title)
end
it 'finds a commit' do
visit namespace_project_path(project.namespace, project)
page.within '.search' do
fill_in 'search', with: 'add'
click_button 'Go'
end
click_link "Commits"
expect(page).to have_selector('.commit-row-description')
end
it 'finds a code' do
visit namespace_project_path(project.namespace, project)
page.within '.search' do
fill_in 'search', with: 'def'
click_button 'Go'
end
click_link "Code"
expect(page).to have_selector('.file-content .code')
end
end
describe 'Right header search field', feature: true do
......
......@@ -3,7 +3,7 @@ require 'spec_helper'
describe "Internal Project Snippets Access", feature: true do
include AccessMatchers
let(:project) { create(:project, :internal) }
let(:project) { create(:empty_project, :internal) }
let(:owner) { project.owner }
let(:master) { create(:user) }
......@@ -48,31 +48,63 @@ describe "Internal Project Snippets Access", feature: true do
it { is_expected.to be_denied_for :visitor }
end
describe "GET /:project_path/snippets/:id for an internal snippet" do
subject { namespace_project_snippet_path(project.namespace, project, internal_snippet) }
describe "GET /:project_path/snippets/:id" do
context "for an internal snippet" do
subject { namespace_project_snippet_path(project.namespace, project, internal_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
context "for a private snippet" do
subject { namespace_project_snippet_path(project.namespace, project, private_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
end
describe "GET /:project_path/snippets/:id for a private snippet" do
subject { namespace_project_snippet_path(project.namespace, project, private_snippet) }
describe "GET /:project_path/snippets/:id/raw" do
context "for an internal snippet" do
subject { raw_namespace_project_snippet_path(project.namespace, project, internal_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
context "for a private snippet" do
subject { raw_namespace_project_snippet_path(project.namespace, project, private_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
end
end
......@@ -3,7 +3,7 @@ require 'spec_helper'
describe "Private Project Snippets Access", feature: true do
include AccessMatchers
let(:project) { create(:project, :private) }
let(:project) { create(:empty_project, :private) }
let(:owner) { project.owner }
let(:master) { create(:user) }
......@@ -60,4 +60,18 @@ describe "Private Project Snippets Access", feature: true do
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
describe "GET /:project_path/snippets/:id/raw for a private snippet" do
subject { raw_namespace_project_snippet_path(project.namespace, project, private_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
end
......@@ -3,7 +3,7 @@ require 'spec_helper'
describe "Public Project Snippets Access", feature: true do
include AccessMatchers
let(:project) { create(:project, :public) }
let(:project) { create(:empty_project, :public) }
let(:owner) { project.owner }
let(:master) { create(:user) }
......@@ -49,45 +49,91 @@ describe "Public Project Snippets Access", feature: true do
it { is_expected.to be_denied_for :visitor }
end
describe "GET /:project_path/snippets/:id for a public snippet" do
subject { namespace_project_snippet_path(project.namespace, project, public_snippet) }
describe "GET /:project_path/snippets/:id" do
context "for a public snippet" do
subject { namespace_project_snippet_path(project.namespace, project, public_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_allowed_for :external }
it { is_expected.to be_allowed_for :visitor }
end
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_allowed_for :external }
it { is_expected.to be_allowed_for :visitor }
end
describe "GET /:project_path/snippets/:id for an internal snippet" do
subject { namespace_project_snippet_path(project.namespace, project, internal_snippet) }
context "for an internal snippet" do
subject { namespace_project_snippet_path(project.namespace, project, internal_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
context "for a private snippet" do
subject { namespace_project_snippet_path(project.namespace, project, private_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
end
describe "GET /:project_path/snippets/:id for a private snippet" do
subject { namespace_project_snippet_path(project.namespace, project, private_snippet) }
describe "GET /:project_path/snippets/:id/raw" do
context "for a public snippet" do
subject { raw_namespace_project_snippet_path(project.namespace, project, public_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_allowed_for :external }
it { is_expected.to be_allowed_for :visitor }
end
context "for an internal snippet" do
subject { raw_namespace_project_snippet_path(project.namespace, project, internal_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
context "for a private snippet" do
subject { raw_namespace_project_snippet_path(project.namespace, project, private_snippet) }
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
end
end
end
require 'spec_helper'
describe ComponentsHelper do
describe '#gitlab_workhorse_version' do
context 'without a Gitlab-Workhorse header' do
it 'shows the version from Gitlab::Workhorse.version' do
expect(helper.gitlab_workhorse_version).to eq Gitlab::Workhorse.version
end
end
context 'with a Gitlab-Workhorse header' do
before do
helper.request.headers['Gitlab-Workhorse'] = '42.42.0-rc3'
end
it 'shows the actual GitLab Workhorse version currently in use' do
expect(helper.gitlab_workhorse_version).to eq '42.42.0'
end
end
end
end
......@@ -13,8 +13,9 @@
//= require boards/stores/boards_store
//= require ./mock_data
(() => {
describe('Store', () => {
beforeEach(() => {
Vue.http.interceptors.push(boardsMockInterceptor);
gl.boardService = new BoardService('/test/issue-boards/board', '1');
gl.issueBoards.BoardsStore.create();
......@@ -24,145 +25,147 @@
});
});
describe('Store', () => {
it('starts with a blank state', () => {
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(0);
});
afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor);
});
describe('lists', () => {
it('creates new list without persisting to DB', () => {
gl.issueBoards.BoardsStore.addList(listObj);
it('starts with a blank state', () => {
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(0);
});
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
});
describe('lists', () => {
it('creates new list without persisting to DB', () => {
gl.issueBoards.BoardsStore.addList(listObj);
it('finds list by ID', () => {
gl.issueBoards.BoardsStore.addList(listObj);
const list = gl.issueBoards.BoardsStore.findList('id', 1);
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
});
expect(list.id).toBe(1);
});
it('finds list by ID', () => {
gl.issueBoards.BoardsStore.addList(listObj);
const list = gl.issueBoards.BoardsStore.findList('id', 1);
it('finds list by type', () => {
gl.issueBoards.BoardsStore.addList(listObj);
const list = gl.issueBoards.BoardsStore.findList('type', 'label');
expect(list.id).toBe(1);
});
expect(list).toBeDefined();
});
it('finds list by type', () => {
gl.issueBoards.BoardsStore.addList(listObj);
const list = gl.issueBoards.BoardsStore.findList('type', 'label');
it('finds list limited by type', () => {
gl.issueBoards.BoardsStore.addList({
id: 1,
position: 0,
title: 'Test',
list_type: 'backlog'
});
const list = gl.issueBoards.BoardsStore.findList('id', 1, 'backlog');
expect(list).toBeDefined();
});
expect(list).toBeDefined();
it('finds list limited by type', () => {
gl.issueBoards.BoardsStore.addList({
id: 1,
position: 0,
title: 'Test',
list_type: 'backlog'
});
const list = gl.issueBoards.BoardsStore.findList('id', 1, 'backlog');
it('gets issue when new list added', (done) => {
gl.issueBoards.BoardsStore.addList(listObj);
const list = gl.issueBoards.BoardsStore.findList('id', 1);
expect(list).toBeDefined();
});
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
it('gets issue when new list added', (done) => {
gl.issueBoards.BoardsStore.addList(listObj);
const list = gl.issueBoards.BoardsStore.findList('id', 1);
setTimeout(() => {
expect(list.issues.length).toBe(1);
expect(list.issues[0].id).toBe(1);
done();
}, 0);
});
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
it('persists new list', (done) => {
gl.issueBoards.BoardsStore.new({
title: 'Test',
type: 'label',
label: {
id: 1,
title: 'Testing',
color: 'red',
description: 'testing;'
}
});
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
setTimeout(() => {
const list = gl.issueBoards.BoardsStore.findList('id', 1);
expect(list).toBeDefined();
expect(list.id).toBe(1);
expect(list.position).toBe(0);
done();
}, 0);
});
setTimeout(() => {
expect(list.issues.length).toBe(1);
expect(list.issues[0].id).toBe(1);
done();
}, 0);
});
it('check for blank state adding', () => {
expect(gl.issueBoards.BoardsStore.shouldAddBlankState()).toBe(true);
it('persists new list', (done) => {
gl.issueBoards.BoardsStore.new({
title: 'Test',
type: 'label',
label: {
id: 1,
title: 'Testing',
color: 'red',
description: 'testing;'
}
});
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
it('check for blank state not adding', () => {
gl.issueBoards.BoardsStore.addList(listObj);
expect(gl.issueBoards.BoardsStore.shouldAddBlankState()).toBe(false);
});
setTimeout(() => {
const list = gl.issueBoards.BoardsStore.findList('id', 1);
expect(list).toBeDefined();
expect(list.id).toBe(1);
expect(list.position).toBe(0);
done();
}, 0);
});
it('check for blank state adding when backlog & done list exist', () => {
gl.issueBoards.BoardsStore.addList({
list_type: 'backlog'
});
gl.issueBoards.BoardsStore.addList({
list_type: 'done'
});
it('check for blank state adding', () => {
expect(gl.issueBoards.BoardsStore.shouldAddBlankState()).toBe(true);
});
it('check for blank state not adding', () => {
gl.issueBoards.BoardsStore.addList(listObj);
expect(gl.issueBoards.BoardsStore.shouldAddBlankState()).toBe(false);
});
expect(gl.issueBoards.BoardsStore.shouldAddBlankState()).toBe(true);
it('check for blank state adding when backlog & done list exist', () => {
gl.issueBoards.BoardsStore.addList({
list_type: 'backlog'
});
gl.issueBoards.BoardsStore.addList({
list_type: 'done'
});
expect(gl.issueBoards.BoardsStore.shouldAddBlankState()).toBe(true);
});
it('adds the blank state', () => {
gl.issueBoards.BoardsStore.addBlankState();
it('adds the blank state', () => {
gl.issueBoards.BoardsStore.addBlankState();
const list = gl.issueBoards.BoardsStore.findList('type', 'blank', 'blank');
expect(list).toBeDefined();
});
const list = gl.issueBoards.BoardsStore.findList('type', 'blank', 'blank');
expect(list).toBeDefined();
});
it('removes list from state', () => {
gl.issueBoards.BoardsStore.addList(listObj);
it('removes list from state', () => {
gl.issueBoards.BoardsStore.addList(listObj);
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(1);
gl.issueBoards.BoardsStore.removeList(1, 'label');
gl.issueBoards.BoardsStore.removeList(1, 'label');
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(0);
});
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(0);
});
it('moves the position of lists', () => {
const listOne = gl.issueBoards.BoardsStore.addList(listObj),
listTwo = gl.issueBoards.BoardsStore.addList(listObjDuplicate);
it('moves the position of lists', () => {
const listOne = gl.issueBoards.BoardsStore.addList(listObj),
listTwo = gl.issueBoards.BoardsStore.addList(listObjDuplicate);
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(2);
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(2);
gl.issueBoards.BoardsStore.moveList(listOne, ['2', '1']);
gl.issueBoards.BoardsStore.moveList(listOne, ['2', '1']);
expect(listOne.position).toBe(1);
});
expect(listOne.position).toBe(1);
});
it('moves an issue from one list to another', (done) => {
const listOne = gl.issueBoards.BoardsStore.addList(listObj),
listTwo = gl.issueBoards.BoardsStore.addList(listObjDuplicate);
it('moves an issue from one list to another', (done) => {
const listOne = gl.issueBoards.BoardsStore.addList(listObj),
listTwo = gl.issueBoards.BoardsStore.addList(listObjDuplicate);
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(2);
expect(gl.issueBoards.BoardsStore.state.lists.length).toBe(2);
setTimeout(() => {
expect(listOne.issues.length).toBe(1);
expect(listTwo.issues.length).toBe(1);
setTimeout(() => {
expect(listOne.issues.length).toBe(1);
expect(listTwo.issues.length).toBe(1);
gl.issueBoards.BoardsStore.moveIssueToList(listOne, listTwo, listOne.findIssue(1));
gl.issueBoards.BoardsStore.moveIssueToList(listOne, listTwo, listOne.findIssue(1));
expect(listOne.issues.length).toBe(0);
expect(listTwo.issues.length).toBe(1);
expect(listOne.issues.length).toBe(0);
expect(listTwo.issues.length).toBe(1);
done();
}, 0);
});
done();
}, 0);
});
});
})();
});
......@@ -17,12 +17,17 @@ describe('List model', () => {
let list;
beforeEach(() => {
Vue.http.interceptors.push(boardsMockInterceptor);
gl.boardService = new BoardService('/test/issue-boards/board', '1');
gl.issueBoards.BoardsStore.create();
list = new List(listObj);
});
afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor);
});
it('gets issues when created', (done) => {
setTimeout(() => {
expect(list.issues.length).toBe(1);
......
......@@ -48,10 +48,10 @@ const BoardsMockData = {
}
};
Vue.http.interceptors.push((request, next) => {
const boardsMockInterceptor = (request, next) => {
const body = BoardsMockData[request.method][request.url];
next(request.respondWith(JSON.stringify(body), {
status: 200
}));
});
};
require 'spec_helper'
describe ConstrainerHelper, lib: true do
include ConstrainerHelper
describe '#extract_resource_path' do
it { expect(extract_resource_path('/gitlab/')).to eq('gitlab') }
it { expect(extract_resource_path('///gitlab//')).to eq('gitlab') }
it { expect(extract_resource_path('/gitlab.atom')).to eq('gitlab') }
context 'relative url' do
before do
allow(Gitlab::Application.config).to receive(:relative_url_root) { '/gitlab' }
end
it { expect(extract_resource_path('/gitlab/foo')).to eq('foo') }
it { expect(extract_resource_path('/foo/bar')).to eq('foo/bar') }
end
end
end
require 'spec_helper'
describe GroupUrlConstrainer, lib: true do
let!(:username) { create(:group, path: 'gitlab-org') }
let!(:group) { create(:group, path: 'gitlab') }
describe '#find_resource' do
it { expect(!!subject.find_resource('gitlab-org')).to be_truthy }
it { expect(!!subject.find_resource('gitlab-com')).to be_falsey }
describe '#matches?' do
context 'root group' do
it { expect(subject.matches?(request '/gitlab')).to be_truthy }
it { expect(subject.matches?(request '/gitlab.atom')).to be_truthy }
it { expect(subject.matches?(request '/gitlab/edit')).to be_falsey }
it { expect(subject.matches?(request '/gitlab-ce')).to be_falsey }
it { expect(subject.matches?(request '/.gitlab')).to be_falsey }
end
end
def request(path)
double(:request, path: path)
end
end
require 'spec_helper'
describe NamespaceUrlConstrainer, lib: true do
let!(:group) { create(:group, path: 'gitlab') }
describe '#matches?' do
context 'existing namespace' do
it { expect(subject.matches?(request '/gitlab')).to be_truthy }
it { expect(subject.matches?(request '/gitlab.atom')).to be_truthy }
it { expect(subject.matches?(request '/gitlab/')).to be_truthy }
it { expect(subject.matches?(request '//gitlab/')).to be_truthy }
end
context 'non-existing namespace' do
it { expect(subject.matches?(request '/gitlab-ce')).to be_falsey }
it { expect(subject.matches?(request '/gitlab.ce')).to be_falsey }
it { expect(subject.matches?(request '/g/gitlab')).to be_falsey }
it { expect(subject.matches?(request '/.gitlab')).to be_falsey }
end
context 'relative url' do
before do
allow(Gitlab::Application.config).to receive(:relative_url_root) { '/gitlab' }
end
it { expect(subject.matches?(request '/gitlab/gitlab')).to be_truthy }
it { expect(subject.matches?(request '/gitlab/gitlab-ce')).to be_falsey }
it { expect(subject.matches?(request '/gitlab/')).to be_falsey }
end
end
def request(path)
OpenStruct.new(path: path)
end
end
......@@ -3,8 +3,14 @@ require 'spec_helper'
describe UserUrlConstrainer, lib: true do
let!(:username) { create(:user, username: 'dz') }
describe '#find_resource' do
it { expect(!!subject.find_resource('dz')).to be_truthy }
it { expect(!!subject.find_resource('john')).to be_falsey }
describe '#matches?' do
it { expect(subject.matches?(request '/dz')).to be_truthy }
it { expect(subject.matches?(request '/dz.atom')).to be_truthy }
it { expect(subject.matches?(request '/dz/projects')).to be_falsey }
it { expect(subject.matches?(request '/gitlab')).to be_falsey }
end
def request(path)
double(:request, path: path)
end
end
......@@ -14,7 +14,6 @@ describe Gitlab::Shell, lib: true do
it { is_expected.to respond_to :add_repository }
it { is_expected.to respond_to :remove_repository }
it { is_expected.to respond_to :fork_repository }
it { is_expected.to respond_to :gc }
it { is_expected.to respond_to :add_namespace }
it { is_expected.to respond_to :rm_namespace }
it { is_expected.to respond_to :mv_namespace }
......
require 'spec_helper'
require_relative '../email_shared_blocks'
xdescribe Gitlab::Email::Handler::CreateIssueHandler, lib: true do
describe Gitlab::Email::Handler::CreateIssueHandler, lib: true do
include_context :email_shared_context
it_behaves_like :email_shared_examples
......@@ -18,7 +18,7 @@ xdescribe Gitlab::Email::Handler::CreateIssueHandler, lib: true do
create(
:user,
email: 'jake@adventuretime.ooo',
authentication_token: 'auth_token'
incoming_email_token: 'auth_token'
)
end
......@@ -60,8 +60,8 @@ xdescribe Gitlab::Email::Handler::CreateIssueHandler, lib: true do
end
end
context "when we can't find the authentication_token" do
let(:email_raw) { fixture_file("emails/wrong_authentication_token.eml") }
context "when we can't find the incoming_email_token" do
let(:email_raw) { fixture_file("emails/wrong_incoming_email_token.eml") }
it "raises an UserNotFoundError" do
expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotFoundError)
......
......@@ -5,32 +5,47 @@ describe Gitlab::ExclusiveLease, type: :redis do
describe '#try_obtain' do
it 'cannot obtain twice before the lease has expired' do
lease = Gitlab::ExclusiveLease.new(unique_key, timeout: 3600)
expect(lease.try_obtain).to eq(true)
lease = described_class.new(unique_key, timeout: 3600)
expect(lease.try_obtain).to be_present
expect(lease.try_obtain).to eq(false)
end
it 'can obtain after the lease has expired' do
timeout = 1
lease = Gitlab::ExclusiveLease.new(unique_key, timeout: timeout)
lease = described_class.new(unique_key, timeout: timeout)
lease.try_obtain # start the lease
sleep(2 * timeout) # lease should have expired now
expect(lease.try_obtain).to eq(true)
expect(lease.try_obtain).to be_present
end
end
describe '#exists?' do
it 'returns true for an existing lease' do
lease = Gitlab::ExclusiveLease.new(unique_key, timeout: 3600)
lease = described_class.new(unique_key, timeout: 3600)
lease.try_obtain
expect(lease.exists?).to eq(true)
end
it 'returns false for a lease that does not exist' do
lease = Gitlab::ExclusiveLease.new(unique_key, timeout: 3600)
lease = described_class.new(unique_key, timeout: 3600)
expect(lease.exists?).to eq(false)
end
end
describe '.cancel' do
it 'can cancel a lease' do
uuid = new_lease(unique_key)
expect(uuid).to be_present
expect(new_lease(unique_key)).to eq(false)
described_class.cancel(unique_key, uuid)
expect(new_lease(unique_key)).to be_present
end
def new_lease(key)
described_class.new(key, timeout: 3600).try_obtain
end
end
end
......@@ -98,6 +98,24 @@ describe ApplicationSetting, models: true do
end
end
end
context 'housekeeping settings' do
it { is_expected.not_to allow_value(0).for(:housekeeping_incremental_repack_period) }
it 'wants the full repack period to be longer than the incremental repack period' do
subject.housekeeping_incremental_repack_period = 2
subject.housekeeping_full_repack_period = 1
expect(subject).not_to be_valid
end
it 'wants the gc period to be longer than the full repack period' do
subject.housekeeping_full_repack_period = 2
subject.housekeeping_gc_period = 1
expect(subject).not_to be_valid
end
end
end
context 'restricted signup domains' do
......
......@@ -358,4 +358,25 @@ describe Issue, "Issuable" do
expect(Issue.with_label([bug.title, enhancement.title])).to match_array([issue2])
end
end
describe '#assignee_or_author?' do
let(:user) { build(:user, id: 1) }
let(:issue) { build(:issue) }
it 'returns true for a user that is assigned to an issue' do
issue.assignee = user
expect(issue.assignee_or_author?(user)).to eq(true)
end
it 'returns true for a user that is the author of an issue' do
issue.author = user
expect(issue.assignee_or_author?(user)).to eq(true)
end
it 'returns false for a user that is not the assignee or author' do
expect(issue.assignee_or_author?(user)).to eq(false)
end
end
end
require 'spec_helper'
describe ExternalIssue, models: true do
let(:project) { double('project', to_reference: 'namespace1/project1') }
let(:project) { double('project', id: 1, to_reference: 'namespace1/project1') }
let(:issue) { described_class.new('EXT-1234', project) }
describe 'modules' do
......@@ -36,4 +36,10 @@ describe ExternalIssue, models: true do
end
end
end
describe '#project_id' do
it 'returns the ID of the project' do
expect(issue.project_id).to eq(project.id)
end
end
end
require 'spec_helper'
describe IssueCollection do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:issue1) { create(:issue, project: project) }
let(:issue2) { create(:issue, project: project) }
let(:collection) { described_class.new([issue1, issue2]) }
describe '#collection' do
it 'returns the issues in the same order as the input Array' do
expect(collection.collection).to eq([issue1, issue2])
end
end
describe '#updatable_by_user' do
context 'using an admin user' do
it 'returns all issues' do
user = create(:admin)
expect(collection.updatable_by_user(user)).to eq([issue1, issue2])
end
end
context 'using a user that has no access to the project' do
it 'returns no issues when the user is not an assignee or author' do
expect(collection.updatable_by_user(user)).to be_empty
end
it 'returns the issues the user is assigned to' do
issue1.assignee = user
expect(collection.updatable_by_user(user)).to eq([issue1])
end
it 'returns the issues for which the user is the author' do
issue1.author = user
expect(collection.updatable_by_user(user)).to eq([issue1])
end
end
context 'using a user that has reporter access to the project' do
it 'returns the issues of the project' do
project.team << [user, :reporter]
expect(collection.updatable_by_user(user)).to eq([issue1, issue2])
end
end
context 'using a user that is the owner of a project' do
it 'returns the issues of the project' do
expect(collection.updatable_by_user(project.namespace.owner)).
to eq([issue1, issue2])
end
end
end
describe '#visible_to' do
it 'is an alias for updatable_by_user' do
updatable_by_user = described_class.instance_method(:updatable_by_user)
visible_to = described_class.instance_method(:visible_to)
expect(visible_to).to eq(updatable_by_user)
end
end
end
......@@ -296,6 +296,7 @@ describe Project, models: true do
end
end
<<<<<<< HEAD
describe "#kerberos_url_to_repo" do
let(:project) { create(:empty_project, path: "somewhere") }
......@@ -305,6 +306,9 @@ describe Project, models: true do
end
xdescribe "#new_issue_address" do
=======
describe "#new_issue_address" do
>>>>>>> ce/master
let(:project) { create(:empty_project, path: "somewhere") }
let(:user) { create(:user) }
......@@ -314,8 +318,7 @@ describe Project, models: true do
end
it 'returns the address to create a new issue' do
token = user.authentication_token
address = "p+#{project.namespace.path}/#{project.path}+#{token}@gl.ab"
address = "p+#{project.path_with_namespace}+#{user.incoming_email_token}@gl.ab"
expect(project.new_issue_address(user)).to eq(address)
end
......
......@@ -362,6 +362,19 @@ describe Repository, models: true do
expect(results.first).not_to start_with('fatal:')
end
it 'properly handles when query is not present' do
results = repository.search_files('', 'master')
expect(results).to match_array([])
end
it 'properly handles query when repo is empty' do
repository = create(:empty_project).repository
results = repository.search_files('test', 'master')
expect(results).to match_array([])
end
describe 'result' do
subject { results.first }
......
......@@ -1240,4 +1240,40 @@ describe User, models: true do
expect(user.viewable_starred_projects).not_to include(private_project)
end
end
describe '#projects_with_reporter_access_limited_to' do
let(:project1) { create(:project) }
let(:project2) { create(:project) }
let(:user) { create(:user) }
before do
project1.team << [user, :reporter]
project2.team << [user, :guest]
end
it 'returns the projects when using a single project ID' do
projects = user.projects_with_reporter_access_limited_to(project1.id)
expect(projects).to eq([project1])
end
it 'returns the projects when using an Array of project IDs' do
projects = user.projects_with_reporter_access_limited_to([project1.id])
expect(projects).to eq([project1])
end
it 'returns the projects when using an ActiveRecord relation' do
projects = user.
projects_with_reporter_access_limited_to(Project.select(:id))
expect(projects).to eq([project1])
end
it 'does not return projects you do not have reporter access to' do
projects = user.projects_with_reporter_access_limited_to(project2.id)
expect(projects).to be_empty
end
end
end
require 'spec_helper'
describe IssuePolicy, models: true do
let(:user) { create(:user) }
describe '#rules' do
context 'using a regular issue' do
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
let(:policies) { described_class.abilities(user, issue).to_set }
context 'with a regular user' do
it 'includes the read_issue permission' do
expect(policies).to include(:read_issue)
end
it 'does not include the admin_issue permission' do
expect(policies).not_to include(:admin_issue)
end
it 'does not include the update_issue permission' do
expect(policies).not_to include(:update_issue)
end
end
context 'with a user that is a project reporter' do
before do
project.team << [user, :reporter]
end
it 'includes the read_issue permission' do
expect(policies).to include(:read_issue)
end
it 'includes the admin_issue permission' do
expect(policies).to include(:admin_issue)
end
it 'includes the update_issue permission' do
expect(policies).to include(:update_issue)
end
end
context 'with a user that is a project guest' do
before do
project.team << [user, :guest]
end
it 'includes the read_issue permission' do
expect(policies).to include(:read_issue)
end
it 'does not include the admin_issue permission' do
expect(policies).not_to include(:admin_issue)
end
it 'does not include the update_issue permission' do
expect(policies).not_to include(:update_issue)
end
end
end
context 'using a confidential issue' do
let(:issue) { create(:issue, :confidential) }
context 'with a regular user' do
let(:policies) { described_class.abilities(user, issue).to_set }
it 'does not include the read_issue permission' do
expect(policies).not_to include(:read_issue)
end
it 'does not include the admin_issue permission' do
expect(policies).not_to include(:admin_issue)
end
it 'does not include the update_issue permission' do
expect(policies).not_to include(:update_issue)
end
end
context 'with a user that is a project member' do
let(:policies) { described_class.abilities(user, issue).to_set }
before do
issue.project.team << [user, :reporter]
end
it 'includes the read_issue permission' do
expect(policies).to include(:read_issue)
end
it 'includes the admin_issue permission' do
expect(policies).to include(:admin_issue)
end
it 'includes the update_issue permission' do
expect(policies).to include(:update_issue)
end
end
context 'without a user' do
let(:policies) { described_class.abilities(nil, issue).to_set }
it 'does not include the read_issue permission' do
expect(policies).not_to include(:read_issue)
end
it 'does not include the admin_issue permission' do
expect(policies).not_to include(:admin_issue)
end
it 'does not include the update_issue permission' do
expect(policies).not_to include(:update_issue)
end
end
end
end
end
......@@ -6,6 +6,7 @@ describe API::API, api: true do
let(:user) { create(:user) }
let(:project) { create(:project, creator_id: user.id, namespace: user.namespace) }
let!(:label1) { create(:label, title: 'label1', project: project) }
let!(:priority_label) { create(:label, title: 'bug', project: project, priority: 3) }
before do
project.team << [user, :master]
......@@ -16,13 +17,27 @@ describe API::API, api: true do
group = create(:group)
group_label = create(:group_label, group: group)
project.update(group: group)
expected_keys = [
'id', 'name', 'color', 'description',
'open_issues_count', 'closed_issues_count', 'open_merge_requests_count',
'subscribed', 'priority'
]
get api("/projects/#{project.id}/labels", user)
expect(response).to have_http_status(200)
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
expect(json_response.map { |l| l['name'] }).to match_array([group_label.name, label1.name])
expect(json_response.size).to eq(3)
expect(json_response.first.keys).to match_array expected_keys
expect(json_response.map { |l| l['name'] }).to match_array([group_label.name, priority_label.name, label1.name])
expect(json_response.last['name']).to eq(label1.name)
expect(json_response.last['color']).to be_present
expect(json_response.last['description']).to be_nil
expect(json_response.last['open_issues_count']).to eq(0)
expect(json_response.last['closed_issues_count']).to eq(0)
expect(json_response.last['open_merge_requests_count']).to eq(0)
expect(json_response.last['priority']).to be_nil
expect(json_response.last['subscribed']).to be_falsey
end
end
......@@ -31,21 +46,39 @@ describe API::API, api: true do
post api("/projects/#{project.id}/labels", user),
name: 'Foo',
color: '#FFAABB',
description: 'test'
description: 'test',
priority: 2
expect(response).to have_http_status(201)
expect(json_response['name']).to eq('Foo')
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to eq('test')
expect(json_response['priority']).to eq(2)
end
it 'returns created label when only required params' do
post api("/projects/#{project.id}/labels", user),
name: 'Foo & Bar',
color: '#FFAABB'
expect(response.status).to eq(201)
expect(json_response['name']).to eq('Foo & Bar')
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to be_nil
expect(json_response['priority']).to be_nil
end
it 'creates a prioritized label' do
post api("/projects/#{project.id}/labels", user),
name: 'Foo & Bar',
color: '#FFAABB',
priority: 3
expect(response.status).to eq(201)
expect(json_response['name']).to eq('Foo & Bar')
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to be_nil
expect(json_response['priority']).to eq(3)
end
it 'returns a 400 bad request if name not given' do
......@@ -95,6 +128,15 @@ describe API::API, api: true do
expect(json_response['message']).to eq('Label already exists')
end
it 'returns 400 for invalid priority' do
post api("/projects/#{project.id}/labels", user),
name: 'Foo',
color: '#FFAAFFFF',
priority: 'foo'
expect(response).to have_http_status(400)
end
it 'returns 409 if label already exists in project' do
post api("/projects/#{project.id}/labels", user),
name: 'label1',
......@@ -155,11 +197,43 @@ describe API::API, api: true do
it 'returns 200 if description is changed' do
put api("/projects/#{project.id}/labels", user),
name: 'label1',
name: 'bug',
description: 'test'
expect(response).to have_http_status(200)
expect(json_response['name']).to eq(label1.name)
expect(json_response['name']).to eq(priority_label.name)
expect(json_response['description']).to eq('test')
expect(json_response['priority']).to eq(3)
end
it 'returns 200 if priority is changed' do
put api("/projects/#{project.id}/labels", user),
name: 'bug',
priority: 10
expect(response.status).to eq(200)
expect(json_response['name']).to eq(priority_label.name)
expect(json_response['priority']).to eq(10)
end
it 'returns 200 if a priority is added' do
put api("/projects/#{project.id}/labels", user),
name: 'label1',
priority: 3
expect(response.status).to eq(200)
expect(json_response['name']).to eq(label1.name)
expect(json_response['priority']).to eq(3)
end
it 'returns 200 if the priority is removed' do
put api("/projects/#{project.id}/labels", user),
name: priority_label.name,
priority: nil
expect(response.status).to eq(200)
expect(json_response['name']).to eq(priority_label.name)
expect(json_response['priority']).to be_nil
end
it 'returns 404 if label does not exist' do
......@@ -178,7 +252,7 @@ describe API::API, api: true do
it 'returns 400 if no new parameters given' do
put api("/projects/#{project.id}/labels", user), name: 'label1'
expect(response).to have_http_status(400)
expect(json_response['error']).to eq('new_name, color, description are missing, '\
expect(json_response['error']).to eq('new_name, color, description, priority are missing, '\
'at least one parameter must be provided')
end
......@@ -206,6 +280,14 @@ describe API::API, api: true do
expect(response).to have_http_status(400)
expect(json_response['message']['color']).to eq(['must be a valid color code'])
end
it 'returns 400 for invalid priority' do
post api("/projects/#{project.id}/labels", user),
name: 'Foo',
priority: 'foo'
expect(response).to have_http_status(400)
end
end
describe "POST /projects/:id/labels/:label_id/subscription" do
......
......@@ -52,6 +52,12 @@ describe API::API, api: true do
expect(response).to have_http_status(400)
end
it "responds with 400 if url is invalid" do
post api("/hooks", admin), url: 'hp://mep.mep'
expect(response).to have_http_status(400)
end
it "does not create new hook without url" do
expect do
post api("/hooks", admin)
......
......@@ -266,13 +266,13 @@ describe "Groups", "routing" do
end
it "also display group#show on the short path" do
allow(Group).to receive(:find_by_path).and_return(true)
allow(Group).to receive(:find_by).and_return(true)
expect(get('/1')).to route_to('groups#show', id: '1')
end
it "also display group#show with dot in the path" do
allow(Group).to receive(:find_by_path).and_return(true)
allow(Group).to receive(:find_by).and_return(true)
expect(get('/group.with.dot')).to route_to('groups#show', id: 'group.with.dot')
end
......
......@@ -326,6 +326,9 @@ describe GitPushService, services: true do
author_email: commit_author.email
)
allow_any_instance_of(ProcessCommitWorker).to receive(:find_commit).
and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
end
......@@ -381,6 +384,9 @@ describe GitPushService, services: true do
committed_date: commit_time
)
allow_any_instance_of(ProcessCommitWorker).to receive(:find_commit).
and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
end
......@@ -417,6 +423,9 @@ describe GitPushService, services: true do
allow(project.repository).to receive(:commits_between).
and_return([closing_commit])
allow_any_instance_of(ProcessCommitWorker).to receive(:find_commit).
and_return(closing_commit)
project.team << [commit_author, :master]
end
......@@ -562,9 +571,16 @@ describe GitPushService, services: true do
let(:housekeeping) { Projects::HousekeepingService.new(project) }
before do
# Flush any raw Redis data stored by the housekeeping code.
Gitlab::Redis.with { |conn| conn.flushall }
allow(Projects::HousekeepingService).to receive(:new).and_return(housekeeping)
end
after do
Gitlab::Redis.with { |conn| conn.flushall }
end
it 'does not perform housekeeping when not needed' do
expect(housekeeping).not_to receive(:execute)
......
......@@ -15,10 +15,39 @@ describe Issues::CloseService, services: true do
end
describe '#execute' do
let(:service) { described_class.new(project, user) }
it 'checks if the user is authorized to update the issue' do
expect(service).to receive(:can?).with(user, :update_issue, issue).
and_call_original
service.execute(issue)
end
it 'does not close the issue when the user is not authorized to do so' do
allow(service).to receive(:can?).with(user, :update_issue, issue).
and_return(false)
expect(service).not_to receive(:close_issue)
expect(service.execute(issue)).to eq(issue)
end
it 'closes the issue when the user is authorized to do so' do
allow(service).to receive(:can?).with(user, :update_issue, issue).
and_return(true)
expect(service).to receive(:close_issue).
with(issue, commit: nil, notifications: true, system_note: true)
service.execute(issue)
end
end
describe '#close_issue' do
context "valid params" do
before do
perform_enqueued_jobs do
described_class.new(project, user).execute(issue)
described_class.new(project, user).close_issue(issue)
end
end
......@@ -41,24 +70,12 @@ describe Issues::CloseService, services: true do
end
end
context 'current user is not authorized to close issue' do
before do
perform_enqueued_jobs do
described_class.new(project, guest).execute(issue)
end
end
it 'does not close the issue' do
expect(issue).to be_open
end
end
context 'when issue is not confidential' do
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
described_class.new(project, user).execute(issue)
described_class.new(project, user).close_issue(issue)
end
end
......@@ -69,14 +86,14 @@ describe Issues::CloseService, services: true do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
described_class.new(project, user).execute(issue)
described_class.new(project, user).close_issue(issue)
end
end
context 'external issue tracker' do
before do
allow(project).to receive(:default_issues_tracker?).and_return(false)
described_class.new(project, user).execute(issue)
described_class.new(project, user).close_issue(issue)
end
it { expect(issue).to be_valid }
......
......@@ -14,8 +14,10 @@ describe Projects::HousekeepingService do
describe '#execute' do
it 'enqueues a sidekiq job' do
expect(subject).to receive(:try_obtain_lease).and_return(true)
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id)
expect(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
expect(subject).to receive(:lease_key).and_return(:the_lease_key)
expect(subject).to receive(:task).and_return(:the_task)
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :the_task, :the_lease_key, :the_uuid)
subject.execute
expect(project.reload.pushes_since_gc).to eq(0)
......@@ -58,4 +60,26 @@ describe Projects::HousekeepingService do
end.to change { project.pushes_since_gc }.from(0).to(1)
end
end
it 'uses all three kinds of housekeeping we offer' do
allow(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
allow(subject).to receive(:lease_key).and_return(:the_lease_key)
# At push 200
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :gc, :the_lease_key, :the_uuid).
exactly(1).times
# At push 50, 100, 150
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :full_repack, :the_lease_key, :the_uuid).
exactly(3).times
# At push 10, 20, ... (except those above)
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :incremental_repack, :the_lease_key, :the_uuid).
exactly(16).times
201.times do
subject.increment!
subject.execute if subject.needed?
end
expect(project.pushes_since_gc).to eq(1)
end
end
......@@ -29,7 +29,11 @@ RSpec.configure do |config|
config.include Devise::Test::ControllerHelpers, type: :controller
config.include Warden::Test::Helpers, type: :request
config.include LoginHelpers, type: :feature
<<<<<<< HEAD
config.include SearchHelpers, type: :feature
=======
config.include SearchHelpers, type: :feature
>>>>>>> ce/master
config.include StubConfiguration
config.include EmailHelpers
config.include TestEnv
......
......@@ -205,20 +205,18 @@ module TestEnv
end
def set_repo_refs(repo_path, branch_sha)
instructions = branch_sha.map {|branch, sha| "update refs/heads/#{branch}\x00#{sha}\x00" }.join("\x00") << "\x00"
update_refs = %W(#{Gitlab.config.git.bin_path} update-ref --stdin -z)
reset = proc do
IO.popen(update_refs, "w") {|io| io.write(instructions) }
$?.success?
end
Dir.chdir(repo_path) do
branch_sha.each do |branch, sha|
# Try to reset without fetching to avoid using the network.
reset = %W(#{Gitlab.config.git.bin_path} update-ref refs/heads/#{branch} #{sha})
unless system(*reset)
if system(*%W(#{Gitlab.config.git.bin_path} fetch origin))
unless system(*reset)
raise 'The fetched test seed '\
'does not contain the required revision.'
end
else
raise 'Could not fetch test seed repository.'
end
end
# Try to reset without fetching to avoid using the network.
unless reset.call
raise 'Could not fetch test seed repository.' unless system(*%W(#{Gitlab.config.git.bin_path} fetch origin))
raise 'The fetched test seed does not contain the required revision.' unless reset.call
end
end
end
......
require 'fileutils'
require 'spec_helper'
describe GitGarbageCollectWorker do
......@@ -6,16 +8,12 @@ describe GitGarbageCollectWorker do
subject { GitGarbageCollectWorker.new }
before do
allow(subject).to receive(:gitlab_shell).and_return(shell)
end
describe "#perform" do
it "runs `git gc`" do
expect(shell).to receive(:gc).with(
project.repository_storage_path,
project.path_with_namespace).
and_return(true)
it "flushes ref caches when the task is 'gc'" do
expect(subject).to receive(:command).with(:gc).and_return([:the, :command])
expect(Gitlab::Popen).to receive(:popen).
with([:the, :command], project.repository.path_to_repo).and_return(["", 0])
expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).to receive(:branch_count).and_call_original
......@@ -23,5 +21,110 @@ describe GitGarbageCollectWorker do
subject.perform(project.id)
end
shared_examples 'gc tasks' do
before { allow(subject).to receive(:bitmaps_enabled?).and_return(bitmaps_enabled) }
it 'incremental repack adds a new packfile' do
create_objects(project)
before_packs = packs(project)
expect(before_packs.count).to be >= 1
subject.perform(project.id, 'incremental_repack')
after_packs = packs(project)
# Exactly one new pack should have been created
expect(after_packs.count).to eq(before_packs.count + 1)
# Previously existing packs are still around
expect(before_packs & after_packs).to eq(before_packs)
end
it 'full repack consolidates into 1 packfile' do
create_objects(project)
subject.perform(project.id, 'incremental_repack')
before_packs = packs(project)
expect(before_packs.count).to be >= 2
subject.perform(project.id, 'full_repack')
after_packs = packs(project)
expect(after_packs.count).to eq(1)
# Previously existing packs should be gone now
expect(after_packs - before_packs).to eq(after_packs)
expect(File.exist?(bitmap_path(after_packs.first))).to eq(bitmaps_enabled)
end
it 'gc consolidates into 1 packfile and updates packed-refs' do
create_objects(project)
before_packs = packs(project)
before_packed_refs = packed_refs(project)
expect(before_packs.count).to be >= 1
subject.perform(project.id, 'gc')
after_packed_refs = packed_refs(project)
after_packs = packs(project)
expect(after_packs.count).to eq(1)
# Previously existing packs should be gone now
expect(after_packs - before_packs).to eq(after_packs)
# The packed-refs file should have been updated during 'git gc'
expect(before_packed_refs).not_to eq(after_packed_refs)
expect(File.exist?(bitmap_path(after_packs.first))).to eq(bitmaps_enabled)
end
end
context 'with bitmaps enabled' do
let(:bitmaps_enabled) { true }
include_examples 'gc tasks'
end
context 'with bitmaps disabled' do
let(:bitmaps_enabled) { false }
include_examples 'gc tasks'
end
end
# Create a new commit on a random new branch
def create_objects(project)
rugged = project.repository.rugged
old_commit = rugged.branches.first.target
new_commit_sha = Rugged::Commit.create(
rugged,
message: "hello world #{SecureRandom.hex(6)}",
author: Gitlab::Git::committer_hash(email: 'foo@bar', name: 'baz'),
committer: Gitlab::Git::committer_hash(email: 'foo@bar', name: 'baz'),
tree: old_commit.tree,
parents: [old_commit],
)
project.repository.update_ref!(
"refs/heads/#{SecureRandom.hex(6)}",
new_commit_sha,
Gitlab::Git::BLANK_SHA
)
end
def packs(project)
Dir["#{project.repository.path_to_repo}/objects/pack/*.pack"]
end
def packed_refs(project)
path = "#{project.repository.path_to_repo}/packed-refs"
FileUtils.touch(path)
File.read(path)
end
def bitmap_path(pack)
pack.sub(/\.pack\z/, '.bitmap')
end
end
require 'spec_helper'
describe ProcessCommitWorker do
let(:worker) { described_class.new }
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project, author: user) }
let(:commit) { project.commit }
describe '#perform' do
it 'does not process the commit when the project does not exist' do
expect(worker).not_to receive(:close_issues)
worker.perform(-1, user.id, commit.id)
end
it 'does not process the commit when the user does not exist' do
expect(worker).not_to receive(:close_issues)
worker.perform(project.id, -1, commit.id)
end
it 'does not process the commit when the commit no longer exists' do
expect(worker).not_to receive(:close_issues)
worker.perform(project.id, user.id, 'this-should-does-not-exist')
end
it 'processes the commit message' do
expect(worker).to receive(:process_commit_message).and_call_original
worker.perform(project.id, user.id, commit.id)
end
it 'updates the issue metrics' do
expect(worker).to receive(:update_issue_metrics).and_call_original
worker.perform(project.id, user.id, commit.id)
end
end
describe '#process_commit_message' do
context 'when pushing to the default branch' do
it 'closes issues that should be closed per the commit message' do
allow(commit).to receive(:safe_message).
and_return("Closes #{issue.to_reference}")
expect(worker).to receive(:close_issues).
with(project, user, user, commit, [issue])
worker.process_commit_message(project, commit, user, user, true)
end
end
context 'when pushing to a non-default branch' do
it 'does not close any issues' do
allow(commit).to receive(:safe_message).
and_return("Closes #{issue.to_reference}")
expect(worker).not_to receive(:close_issues)
worker.process_commit_message(project, commit, user, user, false)
end
end
it 'creates cross references' do
expect(commit).to receive(:create_cross_references!)
worker.process_commit_message(project, commit, user, user)
end
end
describe '#close_issues' do
context 'when the user can update the issues' do
it 'closes the issues' do
worker.close_issues(project, user, user, commit, [issue])
issue.reload
expect(issue.closed?).to eq(true)
end
end
context 'when the user can not update the issues' do
it 'does not close the issues' do
other_user = create(:user)
worker.close_issues(project, other_user, other_user, commit, [issue])
issue.reload
expect(issue.closed?).to eq(false)
end
end
end
describe '#update_issue_metrics' do
it 'updates any existing issue metrics' do
allow(commit).to receive(:safe_message).
and_return("Closes #{issue.to_reference}")
worker.update_issue_metrics(commit, user)
metric = Issue::Metrics.first
expect(metric.first_mentioned_in_commit_at).to eq(commit.committed_date)
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment