Commit 977fdbe8 authored by Jacob Vosmaer's avatar Jacob Vosmaer

Merge branch 'master' of https://gitlab.com/gitlab-org/gitlab-ce into cache-clear

parents 9805fe19 7f7eef2a
...@@ -62,6 +62,8 @@ v 8.5.0 (unreleased) ...@@ -62,6 +62,8 @@ v 8.5.0 (unreleased)
- Replaces "Create merge request" link with one to the "Merge Request" when one exists - Replaces "Create merge request" link with one to the "Merge Request" when one exists
- Fix CI builds badge, add a new link to builds badge, deprecate the old one - Fix CI builds badge, add a new link to builds badge, deprecate the old one
- Fix broken link to project in build notification emails - Fix broken link to project in build notification emails
- Ability to see and sort on vote count from Issues and MR lists
- Fix builds scheduler when first build in stage was allowed to fail
v 8.4.4 v 8.4.4
- Update omniauth-saml gem to 1.4.2 - Update omniauth-saml gem to 1.4.2
......
...@@ -112,7 +112,7 @@ gem 'diffy', '~> 3.0.3' ...@@ -112,7 +112,7 @@ gem 'diffy', '~> 3.0.3'
# Application server # Application server
group :unicorn do group :unicorn do
gem "unicorn", '~> 4.8.2' gem "unicorn", '~> 4.9.0'
gem 'unicorn-worker-killer', '~> 0.4.2' gem 'unicorn-worker-killer', '~> 0.4.2'
end end
......
...@@ -833,7 +833,7 @@ GEM ...@@ -833,7 +833,7 @@ GEM
unf (0.1.4) unf (0.1.4)
unf_ext unf_ext
unf_ext (0.0.7.1) unf_ext (0.0.7.1)
unicorn (4.8.3) unicorn (4.9.0)
kgio (~> 2.6) kgio (~> 2.6)
rack rack
raindrops (~> 0.7) raindrops (~> 0.7)
...@@ -1034,7 +1034,7 @@ DEPENDENCIES ...@@ -1034,7 +1034,7 @@ DEPENDENCIES
uglifier (~> 2.7.2) uglifier (~> 2.7.2)
underscore-rails (~> 1.8.0) underscore-rails (~> 1.8.0)
unf (~> 0.1.4) unf (~> 0.1.4)
unicorn (~> 4.8.2) unicorn (~> 4.9.0)
unicorn-worker-killer (~> 0.4.2) unicorn-worker-killer (~> 0.4.2)
version_sorter (~> 2.0.0) version_sorter (~> 2.0.0)
virtus (~> 1.0.1) virtus (~> 1.0.1)
......
...@@ -15,3 +15,5 @@ class @IssuableContext ...@@ -15,3 +15,5 @@ class @IssuableContext
block.find('.selectbox').show() block.find('.selectbox').show()
block.find('.value').hide() block.find('.value').hide()
block.find('.js-select2').select2("open") block.find('.js-select2').select2("open")
$(".right-sidebar").niceScroll()
...@@ -117,4 +117,4 @@ body { ...@@ -117,4 +117,4 @@ body {
&.ui_violet { &.ui_violet {
@include gitlab-theme(#9988CC, $theme-violet, #443366, #332255); @include gitlab-theme(#9988CC, $theme-violet, #443366, #332255);
} }
} }
\ No newline at end of file
...@@ -24,7 +24,7 @@ ...@@ -24,7 +24,7 @@
display: inline-block; display: inline-block;
} }
.issue-no-comments { .issue-no-comments, .issue-no-votes {
opacity: 0.5; opacity: 0.5;
} }
} }
......
...@@ -163,7 +163,7 @@ ...@@ -163,7 +163,7 @@
display: inline-block; display: inline-block;
} }
.merge-request-no-comments { .merge-request-no-comments, .merge-request-no-votes {
opacity: 0.5; opacity: 0.5;
} }
} }
...@@ -236,4 +236,4 @@ ...@@ -236,4 +236,4 @@
} }
} }
} }
} }
\ No newline at end of file
...@@ -3,6 +3,7 @@ class Projects::ImportsController < Projects::ApplicationController ...@@ -3,6 +3,7 @@ class Projects::ImportsController < Projects::ApplicationController
before_action :authorize_admin_project! before_action :authorize_admin_project!
before_action :require_no_repo, only: [:new, :create] before_action :require_no_repo, only: [:new, :create]
before_action :redirect_if_progress, only: [:new, :create] before_action :redirect_if_progress, only: [:new, :create]
before_action :redirect_if_no_import, only: :show
def new def new
end end
...@@ -63,14 +64,19 @@ class Projects::ImportsController < Projects::ApplicationController ...@@ -63,14 +64,19 @@ class Projects::ImportsController < Projects::ApplicationController
def require_no_repo def require_no_repo
if @project.repository_exists? if @project.repository_exists?
redirect_to(namespace_project_path(@project.namespace, @project)) redirect_to namespace_project_path(@project.namespace, @project)
end end
end end
def redirect_if_progress def redirect_if_progress
if @project.import_in_progress? if @project.import_in_progress?
redirect_to namespace_project_import_path(@project.namespace, @project) && redirect_to namespace_project_import_path(@project.namespace, @project)
return end
end
def redirect_if_no_import
if @project.repository_exists? && @project.no_import?
redirect_to namespace_project_path(@project.namespace, @project)
end end
end end
end end
...@@ -11,7 +11,9 @@ class Projects::RepositoriesController < Projects::ApplicationController ...@@ -11,7 +11,9 @@ class Projects::RepositoriesController < Projects::ApplicationController
end end
def archive def archive
render json: ArchiveRepositoryService.new(@project, params[:ref], params[:format]).execute RepositoryArchiveCacheWorker.perform_async
headers.store(*Gitlab::Workhorse.send_git_archive(@project, params[:ref], params[:format]))
head :ok
rescue => ex rescue => ex
logger.error("#{self.class.name}: #{ex}") logger.error("#{self.class.name}: #{ex}")
return git_not_found! return git_not_found!
......
...@@ -69,7 +69,7 @@ module DiffHelper ...@@ -69,7 +69,7 @@ module DiffHelper
end end
def line_comments def line_comments
@line_comments ||= @line_notes.select(&:active?).group_by(&:line_code) @line_comments ||= @line_notes.select(&:active?).sort_by(&:created_at).group_by(&:line_code)
end end
def organize_comments(type_left, type_right, line_code_left, line_code_right) def organize_comments(type_left, type_right, line_code_left, line_code_right)
......
...@@ -11,6 +11,8 @@ module SortingHelper ...@@ -11,6 +11,8 @@ module SortingHelper
sort_value_largest_repo => sort_title_largest_repo, sort_value_largest_repo => sort_title_largest_repo,
sort_value_recently_signin => sort_title_recently_signin, sort_value_recently_signin => sort_title_recently_signin,
sort_value_oldest_signin => sort_title_oldest_signin, sort_value_oldest_signin => sort_title_oldest_signin,
sort_value_downvotes => sort_title_downvotes,
sort_value_upvotes => sort_title_upvotes
} }
end end
...@@ -54,6 +56,14 @@ module SortingHelper ...@@ -54,6 +56,14 @@ module SortingHelper
'Oldest sign in' 'Oldest sign in'
end end
def sort_title_downvotes
'Least popular'
end
def sort_title_upvotes
'Most popular'
end
def sort_value_oldest_updated def sort_value_oldest_updated
'updated_asc' 'updated_asc'
end end
...@@ -93,4 +103,12 @@ module SortingHelper ...@@ -93,4 +103,12 @@ module SortingHelper
def sort_value_oldest_signin def sort_value_oldest_signin
'oldest_sign_in' 'oldest_sign_in'
end end
def sort_value_downvotes
'downvotes_desc'
end
def sort_value_upvotes
'upvotes_desc'
end
end end
...@@ -69,10 +69,35 @@ module Issuable ...@@ -69,10 +69,35 @@ module Issuable
case method.to_s case method.to_s
when 'milestone_due_asc' then order_milestone_due_asc when 'milestone_due_asc' then order_milestone_due_asc
when 'milestone_due_desc' then order_milestone_due_desc when 'milestone_due_desc' then order_milestone_due_desc
when 'downvotes_desc' then order_downvotes_desc
when 'upvotes_desc' then order_upvotes_desc
else else
order_by(method) order_by(method)
end end
end end
def order_downvotes_desc
order_votes_desc('thumbsdown')
end
def order_upvotes_desc
order_votes_desc('thumbsup')
end
def order_votes_desc(award_emoji_name)
issuable_table = self.arel_table
note_table = Note.arel_table
join_clause = issuable_table.join(note_table, Arel::Nodes::OuterJoin).on(
note_table[:noteable_id].eq(issuable_table[:id]).and(
note_table[:noteable_type].eq(self.name).and(
note_table[:is_award].eq(true).and(note_table[:note].eq(award_emoji_name))
)
)
).join_sources
joins(join_clause).group(issuable_table[:id]).reorder("COUNT(notes.id) DESC")
end
end end
def today? def today?
......
...@@ -137,7 +137,7 @@ class MergeRequest < ActiveRecord::Base ...@@ -137,7 +137,7 @@ class MergeRequest < ActiveRecord::Base
scope :by_milestone, ->(milestone) { where(milestone_id: milestone) } scope :by_milestone, ->(milestone) { where(milestone_id: milestone) }
scope :in_projects, ->(project_ids) { where("source_project_id in (:project_ids) OR target_project_id in (:project_ids)", project_ids: project_ids) } scope :in_projects, ->(project_ids) { where("source_project_id in (:project_ids) OR target_project_id in (:project_ids)", project_ids: project_ids) }
scope :of_projects, ->(ids) { where(target_project_id: ids) } scope :of_projects, ->(ids) { where(target_project_id: ids) }
scope :opened, -> { with_state(:opened) } scope :opened, -> { with_states(:opened, :reopened) }
scope :merged, -> { with_state(:merged) } scope :merged, -> { with_state(:merged) }
scope :closed, -> { with_state(:closed) } scope :closed, -> { with_state(:closed) }
scope :closed_and_merged, -> { with_states(:closed, :merged) } scope :closed_and_merged, -> { with_states(:closed, :merged) }
......
...@@ -382,6 +382,10 @@ class Project < ActiveRecord::Base ...@@ -382,6 +382,10 @@ class Project < ActiveRecord::Base
external_import? || forked? external_import? || forked?
end end
def no_import?
import_status == 'none'
end
def external_import? def external_import?
import_url.present? import_url.present?
end end
......
...@@ -238,6 +238,15 @@ class Repository ...@@ -238,6 +238,15 @@ class Repository
expire_branch_cache(branch_name) expire_branch_cache(branch_name)
end end
# Expires _all_ caches, including those that would normally only be expired
# under specific conditions.
def expire_all_caches!
expire_cache
expire_root_ref_cache
expire_emptiness_caches
expire_has_visible_content_cache
end
def expire_branch_cache(branch_name = nil) def expire_branch_cache(branch_name = nil)
# When we push to the root branch we have to flush the cache for all other # When we push to the root branch we have to flush the cache for all other
# branches as their statistics are based on the commits relative to the # branches as their statistics are based on the commits relative to the
...@@ -258,6 +267,14 @@ class Repository ...@@ -258,6 +267,14 @@ class Repository
@root_ref = nil @root_ref = nil
end end
# Expires the cache(s) used to determine if a repository is empty or not.
def expire_emptiness_caches
cache.expire(:empty?)
@empty = nil
expire_has_visible_content_cache
end
def expire_has_visible_content_cache def expire_has_visible_content_cache
cache.expire(:has_visible_content?) cache.expire(:has_visible_content?)
@has_visible_content = nil @has_visible_content = nil
...@@ -611,6 +628,8 @@ class Repository ...@@ -611,6 +628,8 @@ class Repository
end end
def merge_base(first_commit_id, second_commit_id) def merge_base(first_commit_id, second_commit_id)
first_commit_id = commit(first_commit_id).try(:id) || first_commit_id
second_commit_id = commit(second_commit_id).try(:id) || second_commit_id
rugged.merge_base(first_commit_id, second_commit_id) rugged.merge_base(first_commit_id, second_commit_id)
rescue Rugged::ReferenceError rescue Rugged::ReferenceError
nil nil
......
class ArchiveRepositoryService
attr_reader :project, :ref, :format
def initialize(project, ref, format)
format ||= 'tar.gz'
@project, @ref, @format = project, ref, format.downcase
end
def execute(options = {})
RepositoryArchiveCacheWorker.perform_async
metadata = project.repository.archive_metadata(ref, storage_path, format)
raise "Repository or ref not found" if metadata.empty?
metadata
end
private
def storage_path
Gitlab.config.gitlab.repository_downloads_path
end
end
...@@ -34,6 +34,7 @@ module Ci ...@@ -34,6 +34,7 @@ module Ci
build = commit.builds.create!(build_attrs) build = commit.builds.create!(build_attrs)
build.execute_hooks build.execute_hooks
build
end end
end end
end end
......
class GitPushService class GitPushService < BaseService
attr_accessor :project, :user, :push_data, :push_commits attr_accessor :push_data, :push_commits
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
include Gitlab::Access include Gitlab::Access
# This method will be called after each git update # This method will be called after each git update
# and only if the provided user and project is present in GitLab. # and only if the provided user and project are present in GitLab.
# #
# All callbacks for post receive action should be placed here. # All callbacks for post receive action should be placed here.
# #
...@@ -15,67 +15,67 @@ class GitPushService ...@@ -15,67 +15,67 @@ class GitPushService
# 4. Executes the project's web hooks # 4. Executes the project's web hooks
# 5. Executes the project's services # 5. Executes the project's services
# #
def execute(project, user, oldrev, newrev, ref) def execute
@project, @user = project, user @project.repository.expire_cache(branch_name)
branch_name = Gitlab::Git.ref_name(ref)
project.repository.expire_cache(branch_name)
if push_remove_branch?(ref, newrev)
project.repository.expire_has_visible_content_cache
if push_remove_branch?
@project.repository.expire_has_visible_content_cache
@push_commits = [] @push_commits = []
elsif push_to_new_branch?(ref, oldrev) elsif push_to_new_branch?
project.repository.expire_has_visible_content_cache @project.repository.expire_has_visible_content_cache
# Re-find the pushed commits. # Re-find the pushed commits.
if is_default_branch?(ref) if is_default_branch?
# Initial push to the default branch. Take the full history of that branch as "newly pushed". # Initial push to the default branch. Take the full history of that branch as "newly pushed".
@push_commits = project.repository.commits(newrev) process_default_branch
# Ensure HEAD points to the default branch in case it is not master
project.change_head(branch_name)
# Set protection on the default branch if configured
if (current_application_settings.default_branch_protection != PROTECTION_NONE)
developers_can_push = current_application_settings.default_branch_protection == PROTECTION_DEV_CAN_PUSH ? true : false
project.protected_branches.create({ name: project.default_branch, developers_can_push: developers_can_push })
end
else else
# Use the pushed commits that aren't reachable by the default branch # Use the pushed commits that aren't reachable by the default branch
# as a heuristic. This may include more commits than are actually pushed, but # as a heuristic. This may include more commits than are actually pushed, but
# that shouldn't matter because we check for existing cross-references later. # that shouldn't matter because we check for existing cross-references later.
@push_commits = project.repository.commits_between(project.default_branch, newrev) @push_commits = @project.repository.commits_between(@project.default_branch, params[:newrev])
# don't process commits for the initial push to the default branch # don't process commits for the initial push to the default branch
process_commit_messages(ref) process_commit_messages
end end
elsif push_to_existing_branch?(ref, oldrev) elsif push_to_existing_branch?
# Collect data for this git push # Collect data for this git push
@push_commits = project.repository.commits_between(oldrev, newrev) @push_commits = @project.repository.commits_between(params[:oldrev], params[:newrev])
process_commit_messages(ref) process_commit_messages
end end
# Update merge requests that may be affected by this push. A new branch # Update merge requests that may be affected by this push. A new branch
# could cause the last commit of a merge request to change. # could cause the last commit of a merge request to change.
project.update_merge_requests(oldrev, newrev, ref, @user) update_merge_requests
end
@push_data = build_push_data(oldrev, newrev, ref) protected
EventCreateService.new.push(project, user, @push_data) def update_merge_requests
project.execute_hooks(@push_data.dup, :push_hooks) @project.update_merge_requests(params[:oldrev], params[:newrev], params[:ref], current_user)
project.execute_services(@push_data.dup, :push_hooks)
CreateCommitBuildsService.new.execute(project, @user, @push_data) EventCreateService.new.push(@project, current_user, build_push_data)
ProjectCacheWorker.perform_async(project.id) @project.execute_hooks(build_push_data.dup, :push_hooks)
@project.execute_services(build_push_data.dup, :push_hooks)
CreateCommitBuildsService.new.execute(@project, current_user, build_push_data)
ProjectCacheWorker.perform_async(@project.id)
end end
protected def process_default_branch
@push_commits = project.repository.commits(params[:newrev])
# Ensure HEAD points to the default branch in case it is not master
project.change_head(branch_name)
# Set protection on the default branch if configured
if (current_application_settings.default_branch_protection != PROTECTION_NONE)
developers_can_push = current_application_settings.default_branch_protection == PROTECTION_DEV_CAN_PUSH ? true : false
@project.protected_branches.create({ name: @project.default_branch, developers_can_push: developers_can_push })
end
end
# Extract any GFM references from the pushed commit messages. If the configured issue-closing regex is matched, # Extract any GFM references from the pushed commit messages. If the configured issue-closing regex is matched,
# close the referenced Issue. Create cross-reference Notes corresponding to any other referenced Mentionables. # close the referenced Issue. Create cross-reference Notes corresponding to any other referenced Mentionables.
def process_commit_messages(ref) def process_commit_messages
is_default_branch = is_default_branch?(ref) is_default_branch = is_default_branch?
authors = Hash.new do |hash, commit| authors = Hash.new do |hash, commit|
email = commit.author_email email = commit.author_email
...@@ -94,7 +94,7 @@ class GitPushService ...@@ -94,7 +94,7 @@ class GitPushService
# Close issues if these commits were pushed to the project's default branch and the commit message matches the # Close issues if these commits were pushed to the project's default branch and the commit message matches the
# closing regex. Exclude any mentioned Issues from cross-referencing even if the commits are being pushed to # closing regex. Exclude any mentioned Issues from cross-referencing even if the commits are being pushed to
# a different branch. # a different branch.
closed_issues = commit.closes_issues(user) closed_issues = commit.closes_issues(current_user)
closed_issues.each do |issue| closed_issues.each do |issue|
Issues::CloseService.new(project, authors[commit], {}).execute(issue, commit) Issues::CloseService.new(project, authors[commit], {}).execute(issue, commit)
end end
...@@ -104,34 +104,38 @@ class GitPushService ...@@ -104,34 +104,38 @@ class GitPushService
end end
end end
def build_push_data(oldrev, newrev, ref) def build_push_data
Gitlab::PushDataBuilder. @push_data ||= Gitlab::PushDataBuilder.
build(project, user, oldrev, newrev, ref, push_commits) build(@project, current_user, params[:oldrev], params[:newrev], params[:ref], push_commits)
end end
def push_to_existing_branch?(ref, oldrev) def push_to_existing_branch?
# Return if this is not a push to a branch (e.g. new commits) # Return if this is not a push to a branch (e.g. new commits)
Gitlab::Git.branch_ref?(ref) && !Gitlab::Git.blank_ref?(oldrev) Gitlab::Git.branch_ref?(params[:ref]) && !Gitlab::Git.blank_ref?(params[:oldrev])
end end
def push_to_new_branch?(ref, oldrev) def push_to_new_branch?
Gitlab::Git.branch_ref?(ref) && Gitlab::Git.blank_ref?(oldrev) Gitlab::Git.branch_ref?(params[:ref]) && Gitlab::Git.blank_ref?(params[:oldrev])
end end
def push_remove_branch?(ref, newrev) def push_remove_branch?
Gitlab::Git.branch_ref?(ref) && Gitlab::Git.blank_ref?(newrev) Gitlab::Git.branch_ref?(params[:ref]) && Gitlab::Git.blank_ref?(params[:newrev])
end end
def push_to_branch?(ref) def push_to_branch?
Gitlab::Git.branch_ref?(ref) Gitlab::Git.branch_ref?(params[:ref])
end end
def is_default_branch?(ref) def is_default_branch?
Gitlab::Git.branch_ref?(ref) && Gitlab::Git.branch_ref?(params[:ref]) &&
(Gitlab::Git.ref_name(ref) == project.default_branch || project.default_branch.nil?) (Gitlab::Git.ref_name(params[:ref]) == project.default_branch || project.default_branch.nil?)
end end
def commit_user(commit) def commit_user(commit)
commit.author || user commit.author || current_user
end
def branch_name
@branch_name ||= Gitlab::Git.ref_name(params[:ref])
end end
end end
...@@ -16,11 +16,15 @@ module Projects ...@@ -16,11 +16,15 @@ module Projects
return false unless can?(current_user, :remove_project, project) return false unless can?(current_user, :remove_project, project)
project.team.truncate project.team.truncate
project.repository.expire_cache unless project.empty_repo?
repo_path = project.path_with_namespace repo_path = project.path_with_namespace
wiki_path = repo_path + '.wiki' wiki_path = repo_path + '.wiki'
# Flush the cache for both repositories. This has to be done _before_
# removing the physical repositories as some expiration code depends on
# Git data (e.g. a list of branch names).
flush_caches(project, wiki_path)
Project.transaction do Project.transaction do
project.destroy! project.destroy!
...@@ -70,5 +74,13 @@ module Projects ...@@ -70,5 +74,13 @@ module Projects
def removal_path(path) def removal_path(path)
"#{path}+#{project.id}#{DELETED_FLAG}" "#{path}+#{project.id}#{DELETED_FLAG}"
end end
def flush_caches(project, wiki_path)
project.repository.expire_all_caches! if project.repository.exists?
wiki_repo = Repository.new(wiki_path, project)
wiki_repo.expire_all_caches! if wiki_repo.exists?
end
end end
end end
...@@ -36,7 +36,7 @@ ...@@ -36,7 +36,7 @@
= render "download", blob: blob = render "download", blob: blob
- elsif blob.text? - elsif blob.text?
- if blob_svg?(blob) - if blob_svg?(blob)
= render "image", blob: sanitize_svg(blob) = render "image", blob: blob
- else - else
= render "text", blob: blob = render "text", blob: blob
- elsif blob.image? - elsif blob.image?
......
.file-content.image_file .file-content.image_file
%img{ src: namespace_project_raw_path(@project.namespace, @project, @id)} - if blob_svg?(blob)
- # We need to scrub SVG but we cannot do so in the RawController: it would
- # be wrong/strange if RawController modified the data.
- blob.load_all_data!(@repository)
- blob = sanitize_svg(blob)
%img{src: "data:#{blob.mime_type};base64,#{Base64.encode64(blob.data)}"}
- else
%img{src: namespace_project_raw_path(@project.namespace, @project, @id)}
- diff = diff_file.diff - diff = diff_file.diff
- file.load_all_data!(@project.repository) - file_raw_path = namespace_project_raw_path(@project.namespace, @project, tree_join(@commit.id, diff.new_path))
- old_file_raw_path = namespace_project_raw_path(@project.namespace, @project, tree_join(@commit.parent_id, diff.old_path))
- if diff.renamed_file || diff.new_file || diff.deleted_file - if diff.renamed_file || diff.new_file || diff.deleted_file
.image .image
%span.wrap %span.wrap
.frame{class: image_diff_class(diff)} .frame{class: image_diff_class(diff)}
%img{src: "data:#{file.mime_type};base64,#{Base64.encode64(file.data)}"} %img{src: diff.deleted_file ? old_file_raw_path : file_raw_path}
%p.image-info= "#{number_to_human_size file.size}" %p.image-info= "#{number_to_human_size file.size}"
- else - else
- old_file.load_all_data!(@project.repository)
.image .image
%div.two-up.view %div.two-up.view
%span.wrap %span.wrap
.frame.deleted .frame.deleted
%a{href: namespace_project_blob_path(@project.namespace, @project, tree_join(@commit.parent_id, diff.old_path))} %a{href: namespace_project_blob_path(@project.namespace, @project, tree_join(@commit.parent_id, diff.old_path))}
%img{src: "data:#{old_file.mime_type};base64,#{Base64.encode64(old_file.data)}"} %img{src: old_file_raw_path}
%p.image-info.hide %p.image-info.hide
%span.meta-filesize= "#{number_to_human_size old_file.size}" %span.meta-filesize= "#{number_to_human_size old_file.size}"
| |
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
%span.wrap %span.wrap
.frame.added .frame.added
%a{href: namespace_project_blob_path(@project.namespace, @project, tree_join(@commit.id, diff.new_path))} %a{href: namespace_project_blob_path(@project.namespace, @project, tree_join(@commit.id, diff.new_path))}
%img{src: "data:#{file.mime_type};base64,#{Base64.encode64(file.data)}"} %img{src: file_raw_path}
%p.image-info.hide %p.image-info.hide
%span.meta-filesize= "#{number_to_human_size file.size}" %span.meta-filesize= "#{number_to_human_size file.size}"
| |
...@@ -38,10 +38,10 @@ ...@@ -38,10 +38,10 @@
%div.swipe.view.hide %div.swipe.view.hide
.swipe-frame .swipe-frame
.frame.deleted .frame.deleted
%img{src: "data:#{old_file.mime_type};base64,#{Base64.encode64(old_file.data)}"} %img{src: old_file_raw_path}
.swipe-wrap .swipe-wrap
.frame.added .frame.added
%img{src: "data:#{file.mime_type};base64,#{Base64.encode64(file.data)}"} %img{src: file_raw_path}
%span.swipe-bar %span.swipe-bar
%span.top-handle %span.top-handle
%span.bottom-handle %span.bottom-handle
...@@ -49,9 +49,9 @@ ...@@ -49,9 +49,9 @@
%div.onion-skin.view.hide %div.onion-skin.view.hide
.onion-skin-frame .onion-skin-frame
.frame.deleted .frame.deleted
%img{src: "data:#{old_file.mime_type};base64,#{Base64.encode64(old_file.data)}"} %img{src: old_file_raw_path}
.frame.added .frame.added
%img{src: "data:#{file.mime_type};base64,#{Base64.encode64(file.data)}"} %img{src: file_raw_path}
.controls .controls
.transparent .transparent
.drag-track .drag-track
......
...@@ -119,13 +119,13 @@ ...@@ -119,13 +119,13 @@
.col-sm-offset-2.col-sm-10 .col-sm-offset-2.col-sm-10
%p Get recent application code using the following command: %p Get recent application code using the following command:
.radio .radio
= f.label :build_allow_git_fetch do = f.label :build_allow_git_fetch_false do
= f.radio_button :build_allow_git_fetch, 'false' = f.radio_button :build_allow_git_fetch, 'false'
%strong git clone %strong git clone
%br %br
%span.descr Slower but makes sure you have a clean dir before every build %span.descr Slower but makes sure you have a clean dir before every build
.radio .radio
= f.label :build_allow_git_fetch do = f.label :build_allow_git_fetch_true do
= f.radio_button :build_allow_git_fetch, 'true' = f.radio_button :build_allow_git_fetch, 'true'
%strong git fetch %strong git fetch
%br %br
......
...@@ -15,6 +15,25 @@ ...@@ -15,6 +15,25 @@
%li %li
= link_to_member(@project, issue.assignee, name: false, title: "Assigned to :name") = link_to_member(@project, issue.assignee, name: false, title: "Assigned to :name")
- upvotes, downvotes = issue.upvotes, issue.downvotes
- if upvotes > 0 || downvotes > 0
%li
= icon('thumbs-up')
= upvotes
- else
%li{ class: 'issue-no-votes' }
= icon('thumbs-up')
= upvotes
- if upvotes > 0 || downvotes > 0
%li
= icon('thumbs-down')
= downvotes
- else
%li{ class: 'issue-no-votes' }
= icon('thumbs-down')
= downvotes
- note_count = issue.notes.user.count - note_count = issue.notes.user.count
- if note_count > 0 - if note_count > 0
%li %li
......
$('aside.right-sidebar')[0].outerHTML = "#{escape_javascript(render 'shared/issuable/sidebar', issuable: @issue)}"; $('aside.right-sidebar')[0].outerHTML = "#{escape_javascript(render 'shared/issuable/sidebar', issuable: @issue)}";
$('aside.right-sidebar').effect('highlight'); $('aside.right-sidebar').effect('highlight');
new Issue(); new IssuableContext();
\ No newline at end of file
...@@ -24,6 +24,25 @@ ...@@ -24,6 +24,25 @@
%li %li
= link_to_member(merge_request.source_project, merge_request.assignee, name: false, title: "Assigned to :name") = link_to_member(merge_request.source_project, merge_request.assignee, name: false, title: "Assigned to :name")
- upvotes, downvotes = merge_request.upvotes, merge_request.downvotes
- if upvotes > 0 || downvotes > 0
%li
= icon('thumbs-up')
= upvotes
- else
%li{ class: 'merge-request-no-votes' }
= icon('thumbs-up')
= upvotes
- if upvotes > 0 || downvotes > 0
%li
= icon('thumbs-down')
= downvotes
- else
%li{ class: 'merge-request-no-votes' }
= icon('thumbs-down')
= downvotes
- note_count = merge_request.mr_and_commit_notes.user.count - note_count = merge_request.mr_and_commit_notes.user.count
- if note_count > 0 - if note_count > 0
%li %li
......
$('aside.right-sidebar')[0].outerHTML= "#{escape_javascript(render 'shared/issuable/sidebar', issuable: @merge_request)}"; $('aside.right-sidebar')[0].outerHTML = "#{escape_javascript(render 'shared/issuable/sidebar', issuable: @merge_request)}";
$('aside.right-sidebar').effect('highlight') $('aside.right-sidebar').effect('highlight');
merge_request = new MergeRequest(); new IssuableContext();
...@@ -20,3 +20,7 @@ ...@@ -20,3 +20,7 @@
= sort_title_milestone_soon = sort_title_milestone_soon
= link_to page_filter_path(sort: sort_value_milestone_later) do = link_to page_filter_path(sort: sort_value_milestone_later) do
= sort_title_milestone_later = sort_title_milestone_later
= link_to page_filter_path(sort: sort_value_upvotes) do
= sort_title_upvotes
= link_to page_filter_path(sort: sort_value_downvotes) do
= sort_title_downvotes
...@@ -47,7 +47,7 @@ ...@@ -47,7 +47,7 @@
.block.milestone .block.milestone
.sidebar-collapsed-icon .sidebar-collapsed-icon
= icon('balance-scale') = icon('clock-o')
%span %span
- if issuable.milestone - if issuable.milestone
= issuable.milestone.title = issuable.milestone.title
......
...@@ -38,7 +38,7 @@ class PostReceive ...@@ -38,7 +38,7 @@ class PostReceive
if Gitlab::Git.tag_ref?(ref) if Gitlab::Git.tag_ref?(ref)
GitTagPushService.new.execute(project, @user, oldrev, newrev, ref) GitTagPushService.new.execute(project, @user, oldrev, newrev, ref)
else else
GitPushService.new.execute(project, @user, oldrev, newrev, ref) GitPushService.new(project, @user, oldrev: oldrev, newrev: newrev, ref: ref).execute
end end
end end
end end
......
...@@ -27,6 +27,7 @@ class RepositoryForkWorker ...@@ -27,6 +27,7 @@ class RepositoryForkWorker
return return
end end
project.repository.expire_emptiness_caches
project.import_finish project.import_finish
end end
end end
...@@ -18,6 +18,7 @@ class RepositoryImportWorker ...@@ -18,6 +18,7 @@ class RepositoryImportWorker
return return
end end
project.repository.expire_emptiness_caches
project.import_finish project.import_finish
end end
end end
...@@ -428,8 +428,30 @@ artifacts: ...@@ -428,8 +428,30 @@ artifacts:
- binaries/ - binaries/
``` ```
The artifacts will be send after a successful build success to GitLab, and will You may want to create artifacts only for tagged releases to avoid filling the
be accessible in the GitLab UI to download. build server storage with temporary build artifacts.
Create artifacts only for tags (`default-job` will not create artifacts):
```yaml
default-job:
script:
- mvn test -U
except:
- tags
release-job:
script:
- mvn package -U
artifacts:
paths:
- target/*.war
only:
- tags
```
The artifacts will be sent to GitLab after a successful build and will
be available for download in the GitLab UI.
### cache ### cache
......
...@@ -15,7 +15,7 @@ or inconsistencies and guard for that. Try to make as little assumptions as poss ...@@ -15,7 +15,7 @@ or inconsistencies and guard for that. Try to make as little assumptions as poss
about the state of the database. about the state of the database.
Please don't depend on GitLab specific code since it can change in future versions. Please don't depend on GitLab specific code since it can change in future versions.
If needed copy-paste GitLab code into the migration to make make it forward compatible. If needed copy-paste GitLab code into the migration to make it forward compatible.
## Comments in the migration ## Comments in the migration
......
...@@ -353,7 +353,7 @@ GitLab Shell is an SSH access and repository management software developed speci ...@@ -353,7 +353,7 @@ GitLab Shell is an SSH access and repository management software developed speci
cd /home/git cd /home/git
sudo -u git -H git clone https://gitlab.com/gitlab-org/gitlab-workhorse.git sudo -u git -H git clone https://gitlab.com/gitlab-org/gitlab-workhorse.git
cd gitlab-workhorse cd gitlab-workhorse
sudo -u git -H git checkout 0.6.4 sudo -u git -H git checkout 0.6.5
sudo -u git -H make sudo -u git -H make
### Initialize Database and Activate Advanced Features ### Initialize Database and Activate Advanced Features
......
...@@ -82,6 +82,32 @@ There are new configuration options available for [`gitlab.yml`](config/gitlab.y ...@@ -82,6 +82,32 @@ There are new configuration options available for [`gitlab.yml`](config/gitlab.y
git diff origin/8-4-stable:config/gitlab.yml.example origin/8-5-stable:config/gitlab.yml.example git diff origin/8-4-stable:config/gitlab.yml.example origin/8-5-stable:config/gitlab.yml.example
``` ```
#### Nginx configuration
Ensure you're still up-to-date with the latest NGINX configuration changes:
```sh
# For HTTPS configurations
git diff origin/8-4-stable:lib/support/nginx/gitlab-ssl origin/8-5-stable:lib/support/nginx/gitlab-ssl
# For HTTP configurations
git diff origin/8-4-stable:lib/support/nginx/gitlab origin/8-5-stable:lib/support/nginx/gitlab
```
If you are using Apache instead of NGINX please see the updated [Apache templates].
Also note that because Apache does not support upstreams behind Unix sockets you
will need to let gitlab-workhorse listen on a TCP port. You can do this
via [/etc/default/gitlab].
[Apache templates]: https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/web-server/apache
[/etc/default/gitlab]: https://gitlab.com/gitlab-org/gitlab-ce/blob/8-5-stable/lib/support/init.d/gitlab.default.example#L37
#### Init script
Ensure you're still up-to-date with the latest init script changes:
sudo cp lib/support/init.d/gitlab /etc/init.d/gitlab
### 8. Start application ### 8. Start application
sudo service gitlab start sudo service gitlab start
......
...@@ -88,6 +88,16 @@ Feature: Project Issues ...@@ -88,6 +88,16 @@ Feature: Project Issues
And I visit dashboard merge requests page And I visit dashboard merge requests page
Then The list should be sorted by "Oldest updated" Then The list should be sorted by "Oldest updated"
@javascript
Scenario: Sort issues by upvotes/downvotes
Given project "Shop" have "Bugfix" open issue
And issue "Release 0.4" have 2 upvotes and 1 downvote
And issue "Tweet control" have 1 upvote and 2 downvotes
And I sort the list by "Most popular"
Then The list should be sorted by "Most popular"
And I sort the list by "Least popular"
Then The list should be sorted by "Least popular"
@javascript @javascript
Scenario: I search issue Scenario: I search issue
Given I fill in issue search with "Re" Given I fill in issue search with "Re"
......
...@@ -107,6 +107,17 @@ Feature: Project Merge Requests ...@@ -107,6 +107,17 @@ Feature: Project Merge Requests
And I visit dashboard merge requests page And I visit dashboard merge requests page
Then The list should be sorted by "Oldest updated" Then The list should be sorted by "Oldest updated"
@javascript
Scenario: Sort merge requests by upvotes/downvotes
Given project "Shop" have "Bug NS-05" open merge request with diffs inside
And project "Shop" have "Bug NS-06" open merge request
And merge request "Bug NS-04" have 2 upvotes and 1 downvote
And merge request "Bug NS-06" have 1 upvote and 2 downvotes
And I sort the list by "Most popular"
Then The list should be sorted by "Most popular"
And I sort the list by "Least popular"
Then The list should be sorted by "Least popular"
@javascript @javascript
Scenario: Visiting Merge Requests after commenting on diffs Scenario: Visiting Merge Requests after commenting on diffs
Given project "Shop" have "Bug NS-05" open merge request with diffs inside Given project "Shop" have "Bug NS-05" open merge request with diffs inside
......
...@@ -174,6 +174,13 @@ class Spinach::Features::ProjectIssues < Spinach::FeatureSteps ...@@ -174,6 +174,13 @@ class Spinach::Features::ProjectIssues < Spinach::FeatureSteps
author: project.users.first) author: project.users.first)
end end
step 'project "Shop" have "Bugfix" open issue' do
create(:issue,
title: "Bugfix",
project: project,
author: project.users.first)
end
step 'project "Shop" have "Release 0.3" closed issue' do step 'project "Shop" have "Release 0.3" closed issue' do
create(:closed_issue, create(:closed_issue,
title: "Release 0.3", title: "Release 0.3",
...@@ -181,6 +188,56 @@ class Spinach::Features::ProjectIssues < Spinach::FeatureSteps ...@@ -181,6 +188,56 @@ class Spinach::Features::ProjectIssues < Spinach::FeatureSteps
author: project.users.first) author: project.users.first)
end end
step 'issue "Release 0.4" have 2 upvotes and 1 downvote' do
issue = Issue.find_by(title: 'Release 0.4')
create_list(:upvote_note, 2, project: project, noteable: issue)
create(:downvote_note, project: project, noteable: issue)
end
step 'issue "Tweet control" have 1 upvote and 2 downvotes' do
issue = Issue.find_by(title: 'Tweet control')
create(:upvote_note, project: project, noteable: issue)
create_list(:downvote_note, 2, project: project, noteable: issue)
end
step 'The list should be sorted by "Least popular"' do
page.within '.issues-list' do
page.within 'li.issue:nth-child(1)' do
expect(page).to have_content 'Tweet control'
expect(page).to have_content '1 2'
end
page.within 'li.issue:nth-child(2)' do
expect(page).to have_content 'Release 0.4'
expect(page).to have_content '2 1'
end
page.within 'li.issue:nth-child(3)' do
expect(page).to have_content 'Bugfix'
expect(page).to have_content '0 0'
end
end
end
step 'The list should be sorted by "Most popular"' do
page.within '.issues-list' do
page.within 'li.issue:nth-child(1)' do
expect(page).to have_content 'Release 0.4'
expect(page).to have_content '2 1'
end
page.within 'li.issue:nth-child(2)' do
expect(page).to have_content 'Tweet control'
expect(page).to have_content '1 2'
end
page.within 'li.issue:nth-child(3)' do
expect(page).to have_content 'Bugfix'
expect(page).to have_content '0 0'
end
end
end
step 'empty project "Empty Project"' do step 'empty project "Empty Project"' do
create :empty_project, name: 'Empty Project', namespace: @user.namespace create :empty_project, name: 'Empty Project', namespace: @user.namespace
end end
......
...@@ -138,6 +138,56 @@ class Spinach::Features::ProjectMergeRequests < Spinach::FeatureSteps ...@@ -138,6 +138,56 @@ class Spinach::Features::ProjectMergeRequests < Spinach::FeatureSteps
author: project.users.first) author: project.users.first)
end end
step 'merge request "Bug NS-04" have 2 upvotes and 1 downvote' do
merge_request = MergeRequest.find_by(title: 'Bug NS-04')
create_list(:upvote_note, 2, project: project, noteable: merge_request)
create(:downvote_note, project: project, noteable: merge_request)
end
step 'merge request "Bug NS-06" have 1 upvote and 2 downvotes' do
merge_request = MergeRequest.find_by(title: 'Bug NS-06')
create(:upvote_note, project: project, noteable: merge_request)
create_list(:downvote_note, 2, project: project, noteable: merge_request)
end
step 'The list should be sorted by "Least popular"' do
page.within '.mr-list' do
page.within 'li.merge-request:nth-child(1)' do
expect(page).to have_content 'Bug NS-06'
expect(page).to have_content '1 2'
end
page.within 'li.merge-request:nth-child(2)' do
expect(page).to have_content 'Bug NS-04'
expect(page).to have_content '2 1'
end
page.within 'li.merge-request:nth-child(3)' do
expect(page).to have_content 'Bug NS-05'
expect(page).to have_content '0 0'
end
end
end
step 'The list should be sorted by "Most popular"' do
page.within '.mr-list' do
page.within 'li.merge-request:nth-child(1)' do
expect(page).to have_content 'Bug NS-04'
expect(page).to have_content '2 1'
end
page.within 'li.merge-request:nth-child(2)' do
expect(page).to have_content 'Bug NS-06'
expect(page).to have_content '1 2'
end
page.within 'li.merge-request:nth-child(3)' do
expect(page).to have_content 'Bug NS-05'
expect(page).to have_content '0 0'
end
end
end
step 'I click on the Changes tab' do step 'I click on the Changes tab' do
page.within '.merge-request-tabs' do page.within '.merge-request-tabs' do
click_link 'Changes' click_link 'Changes'
......
...@@ -113,6 +113,22 @@ module SharedIssuable ...@@ -113,6 +113,22 @@ module SharedIssuable
end end
end end
step 'I sort the list by "Least popular"' do
find('button.dropdown-toggle.btn').click
page.within('ul.dropdown-menu.dropdown-menu-align-right li') do
click_link 'Least popular'
end
end
step 'I sort the list by "Most popular"' do
find('button.dropdown-toggle.btn').click
page.within('ul.dropdown-menu.dropdown-menu-align-right li') do
click_link 'Most popular'
end
end
step 'The list should be sorted by "Oldest updated"' do step 'The list should be sorted by "Oldest updated"' do
page.within('div.dropdown.inline.prepend-left-10') do page.within('div.dropdown.inline.prepend-left-10') do
expect(page.find('button.dropdown-toggle.btn')).to have_content('Oldest updated') expect(page.find('button.dropdown-toggle.btn')).to have_content('Oldest updated')
......
...@@ -98,11 +98,8 @@ module API ...@@ -98,11 +98,8 @@ module API
authorize! :download_code, user_project authorize! :download_code, user_project
begin begin
ArchiveRepositoryService.new( RepositoryArchiveCacheWorker.perform_async
user_project, header *Gitlab::Workhorse.send_git_archive(user_project, params[:sha], params[:format])
params[:sha],
params[:format]
).execute
rescue rescue
not_found!('File') not_found!('File')
end end
......
module Ci module Ci
class Status class Status
def self.get_status(statuses) def self.get_status(statuses)
statuses.reject! { |status| status.try(&:allow_failure?) }
if statuses.none? if statuses.none?
'skipped' 'skipped'
elsif statuses.all?(&:success?) elsif statuses.all? { |status| status.success? || status.ignored? }
'success' 'success'
elsif statuses.all?(&:pending?) elsif statuses.all?(&:pending?)
'pending' 'pending'
......
...@@ -22,6 +22,8 @@ module Gitlab ...@@ -22,6 +22,8 @@ module Gitlab
# } # }
# #
def build(project, user, oldrev, newrev, ref, commits = [], message = nil) def build(project, user, oldrev, newrev, ref, commits = [], message = nil)
commits = Array(commits)
# Total commits count # Total commits count
commits_count = commits.size commits_count = commits.size
......
...@@ -3,19 +3,38 @@ require 'json' ...@@ -3,19 +3,38 @@ require 'json'
module Gitlab module Gitlab
class Workhorse class Workhorse
SEND_DATA_HEADER = 'Gitlab-Workhorse-Send-Data'
class << self class << self
def send_git_blob(repository, blob) def send_git_blob(repository, blob)
params_hash = { params = {
'RepoPath' => repository.path_to_repo, 'RepoPath' => repository.path_to_repo,
'BlobId' => blob.id, 'BlobId' => blob.id,
} }
params = Base64.urlsafe_encode64(JSON.dump(params_hash))
[ [
'Gitlab-Workhorse-Send-Data', SEND_DATA_HEADER,
"git-blob:#{params}", "git-blob:#{encode(params)}",
] ]
end end
def send_git_archive(project, ref, format)
format ||= 'tar.gz'
format.downcase!
params = project.repository.archive_metadata(ref, Gitlab.config.gitlab.repository_downloads_path, format)
raise "Repository or ref not found" if params.empty?
[
SEND_DATA_HEADER,
"git-archive:#{encode(params)}",
]
end
protected
def encode(hash)
Base64.urlsafe_encode64(JSON.dump(hash))
end
end end
end end
end end
...@@ -16,7 +16,6 @@ namespace :gitlab do ...@@ -16,7 +16,6 @@ namespace :gitlab do
check_git_config check_git_config
check_database_config_exists check_database_config_exists
check_database_is_not_sqlite
check_migrations_are_up check_migrations_are_up
check_orphaned_group_members check_orphaned_group_members
check_gitlab_config_exists check_gitlab_config_exists
......
...@@ -104,6 +104,18 @@ describe Projects::ImportsController do ...@@ -104,6 +104,18 @@ describe Projects::ImportsController do
end end
end end
end end
context 'when import never happened' do
before do
project.update_attribute(:import_status, :none)
end
it 'redirects to namespace_project_path' do
get :show, namespace_id: project.namespace.to_param, project_id: project.to_param
expect(response).to redirect_to namespace_project_path(project.namespace, project)
end
end
end end
end end
end end
...@@ -123,6 +123,40 @@ describe Projects::MergeRequestsController do ...@@ -123,6 +123,40 @@ describe Projects::MergeRequestsController do
end end
end end
describe 'GET #index' do
def get_merge_requests
get :index,
namespace_id: project.namespace.to_param,
project_id: project.to_param,
state: 'opened'
end
context 'when filtering by opened state' do
context 'with opened merge requests' do
it 'should list those merge requests' do
get_merge_requests
expect(assigns(:merge_requests)).to include(merge_request)
end
end
context 'with reopened merge requests' do
before do
merge_request.close!
merge_request.reopen!
end
it 'should list those merge requests' do
get_merge_requests
expect(assigns(:merge_requests)).to include(merge_request)
end
end
end
end
describe 'GET diffs' do describe 'GET diffs' do
def go(format: 'html') def go(format: 'html')
get :diffs, get :diffs,
......
...@@ -8,15 +8,10 @@ describe Projects::RepositoriesController do ...@@ -8,15 +8,10 @@ describe Projects::RepositoriesController do
before do before do
sign_in(user) sign_in(user)
project.team << [user, :developer] project.team << [user, :developer]
allow(ArchiveRepositoryService).to receive(:new).and_return(service)
end end
let(:service) { ArchiveRepositoryService.new(project, "master", "zip") } it "uses Gitlab::Workhorse" do
expect(Gitlab::Workhorse).to receive(:send_git_archive).with(project, "master", "zip")
it "executes ArchiveRepositoryService" do
expect(ArchiveRepositoryService).to receive(:new).with(project, "master", "zip")
expect(service).to receive(:execute)
get :archive, namespace_id: project.namespace.path, project_id: project.path, ref: "master", format: "zip" get :archive, namespace_id: project.namespace.path, project_id: project.path, ref: "master", format: "zip"
end end
...@@ -24,7 +19,7 @@ describe Projects::RepositoriesController do ...@@ -24,7 +19,7 @@ describe Projects::RepositoriesController do
context "when the service raises an error" do context "when the service raises an error" do
before do before do
allow(service).to receive(:execute).and_raise("Archive failed") allow(Gitlab::Workhorse).to receive(:send_git_archive).and_raise("Archive failed")
end end
it "renders Not Found" do it "renders Not Found" do
......
...@@ -16,10 +16,30 @@ FactoryGirl.define do ...@@ -16,10 +16,30 @@ FactoryGirl.define do
commit factory: :ci_commit commit factory: :ci_commit
trait :success do
status 'success'
end
trait :failed do
status 'failed'
end
trait :canceled do trait :canceled do
status 'canceled' status 'canceled'
end end
trait :running do
status 'running'
end
trait :pending do
status 'pending'
end
trait :allowed_to_fail do
allow_failure true
end
after(:build) do |build, evaluator| after(:build) do |build, evaluator|
build.project = build.commit.project build.project = build.commit.project
end end
......
...@@ -34,6 +34,8 @@ FactoryGirl.define do ...@@ -34,6 +34,8 @@ FactoryGirl.define do
factory :note_on_merge_request_diff, traits: [:on_merge_request, :on_diff] factory :note_on_merge_request_diff, traits: [:on_merge_request, :on_diff]
factory :note_on_project_snippet, traits: [:on_project_snippet] factory :note_on_project_snippet, traits: [:on_project_snippet]
factory :system_note, traits: [:system] factory :system_note, traits: [:system]
factory :downvote_note, traits: [:award, :downvote]
factory :upvote_note, traits: [:award, :upvote]
trait :on_commit do trait :on_commit do
project project
...@@ -65,6 +67,18 @@ FactoryGirl.define do ...@@ -65,6 +67,18 @@ FactoryGirl.define do
system true system true
end end
trait :award do
is_award true
end
trait :downvote do
note "thumbsdown"
end
trait :upvote do
note "thumbsup"
end
trait :with_attachment do trait :with_attachment do
attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") } attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") }
end end
......
require 'spec_helper'
describe Ci::Status do
describe '.get_status' do
subject { described_class.get_status(builds) }
context 'all builds successful' do
let(:builds) { Array.new(2) { create(:ci_build, :success) } }
it { is_expected.to eq 'success' }
end
context 'at least one build failed' do
let(:builds) { [create(:ci_build, :success), create(:ci_build, :failed)] }
it { is_expected.to eq 'failed' }
end
context 'at least one running' do
let(:builds) { [create(:ci_build, :success), create(:ci_build, :running)] }
it { is_expected.to eq 'running' }
end
context 'at least one pending' do
let(:builds) { [create(:ci_build, :success), create(:ci_build, :pending)] }
it { is_expected.to eq 'running' }
end
context 'build success and failed but allowed to fail' do
let(:builds) { [create(:ci_build, :success), create(:ci_build, :failed, :allowed_to_fail)] }
it { is_expected.to eq 'success' }
end
context 'one build failed but allowed to fail' do
let(:builds) { [create(:ci_build, :failed, :allowed_to_fail)] }
it { is_expected.to eq 'success' }
end
end
end
require 'spec_helper' require 'spec_helper'
describe 'Gitlab::PushDataBuilder', lib: true do describe Gitlab::PushDataBuilder, lib: true do
let(:project) { create(:project) } let(:project) { create(:project) }
let(:user) { create(:user) } let(:user) { create(:user) }
describe :build_sample do describe '.build_sample' do
let(:data) { Gitlab::PushDataBuilder.build_sample(project, user) } let(:data) { described_class.build_sample(project, user) }
it { expect(data).to be_a(Hash) } it { expect(data).to be_a(Hash) }
it { expect(data[:before]).to eq('6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') } it { expect(data[:before]).to eq('6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
...@@ -22,13 +22,11 @@ describe 'Gitlab::PushDataBuilder', lib: true do ...@@ -22,13 +22,11 @@ describe 'Gitlab::PushDataBuilder', lib: true do
include_examples 'deprecated repository hook data' include_examples 'deprecated repository hook data'
end end
describe :build do describe '.build' do
let(:data) do let(:data) do
Gitlab::PushDataBuilder.build(project, described_class.build(project, user, Gitlab::Git::BLANK_SHA,
user, '8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b',
Gitlab::Git::BLANK_SHA, 'refs/tags/v1.1.0')
'8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b',
'refs/tags/v1.1.0')
end end
it { expect(data).to be_a(Hash) } it { expect(data).to be_a(Hash) }
...@@ -38,5 +36,10 @@ describe 'Gitlab::PushDataBuilder', lib: true do ...@@ -38,5 +36,10 @@ describe 'Gitlab::PushDataBuilder', lib: true do
it { expect(data[:ref]).to eq('refs/tags/v1.1.0') } it { expect(data[:ref]).to eq('refs/tags/v1.1.0') }
it { expect(data[:commits]).to be_empty } it { expect(data[:commits]).to be_empty }
it { expect(data[:total_commits_count]).to be_zero } it { expect(data[:total_commits_count]).to be_zero }
it 'does not raise an error when given nil commits' do
expect { described_class.build(spy, spy, spy, spy, spy, nil) }.
not_to raise_error
end
end end
end end
require 'spec_helper' require 'spec_helper'
describe ArchiveRepositoryService, services: true do describe Gitlab::Workhorse, lib: true do
let(:project) { create(:project) } let(:project) { create(:project) }
subject { ArchiveRepositoryService.new(project, "master", "zip") } let(:subject) { Gitlab::Workhorse }
describe "#execute" do
it "cleans old archives" do
expect(RepositoryArchiveCacheWorker).to receive(:perform_async)
subject.execute(timeout: 0.0)
end
describe "#send_git_archive" do
context "when the repository doesn't have an archive file path" do context "when the repository doesn't have an archive file path" do
before do before do
allow(project.repository).to receive(:archive_metadata).and_return(Hash.new) allow(project.repository).to receive(:archive_metadata).and_return(Hash.new)
end end
it "raises an error" do it "raises an error" do
expect { subject.execute(timeout: 0.0) }.to raise_error(RuntimeError) expect { subject.send_git_archive(project, "master", "zip") }.to raise_error(RuntimeError)
end end
end end
end end
end end
...@@ -247,6 +247,35 @@ describe Ci::Commit, models: true do ...@@ -247,6 +247,35 @@ describe Ci::Commit, models: true do
end end
end end
context 'custom stage with first job allowed to fail' do
let(:yaml) do
{
stages: ['clean', 'test'],
clean_job: {
stage: 'clean',
allow_failure: true,
script: 'BUILD',
},
test_job: {
stage: 'test',
script: 'TEST',
},
}
end
before do
stub_ci_commit_yaml_file(YAML.dump(yaml))
create_builds
end
it 'properly schedules builds' do
expect(commit.builds.pluck(:status)).to contain_exactly('pending')
commit.builds.running_or_pending.each(&:drop)
expect(commit.builds.pluck(:status)).to contain_exactly('pending', 'failed')
end
end
context 'properly creates builds when "when" is defined' do context 'properly creates builds when "when" is defined' do
let(:yaml) do let(:yaml) do
{ {
......
...@@ -355,6 +355,17 @@ describe Repository, models: true do ...@@ -355,6 +355,17 @@ describe Repository, models: true do
end end
end end
describe '#expire_emptiness_caches' do
let(:cache) { repository.send(:cache) }
it 'expires the caches' do
expect(cache).to receive(:expire).with(:empty?)
expect(repository).to receive(:expire_has_visible_content_cache)
repository.expire_emptiness_caches
end
end
describe :skip_merged_commit do describe :skip_merged_commit do
subject { repository.commits(Gitlab::Git::BRANCH_REF_PREFIX + "'test'", nil, 100, 0, true).map{ |k| k.id } } subject { repository.commits(Gitlab::Git::BRANCH_REF_PREFIX + "'test'", nil, 100, 0, true).map{ |k| k.id } }
......
...@@ -4,6 +4,7 @@ require 'mime/types' ...@@ -4,6 +4,7 @@ require 'mime/types'
describe API::API, api: true do describe API::API, api: true do
include ApiHelpers include ApiHelpers
include RepoHelpers include RepoHelpers
include WorkhorseHelpers
let(:user) { create(:user) } let(:user) { create(:user) }
let(:user2) { create(:user) } let(:user2) { create(:user) }
...@@ -91,21 +92,27 @@ describe API::API, api: true do ...@@ -91,21 +92,27 @@ describe API::API, api: true do
get api("/projects/#{project.id}/repository/archive", user) get api("/projects/#{project.id}/repository/archive", user)
repo_name = project.repository.name.gsub("\.git", "") repo_name = project.repository.name.gsub("\.git", "")
expect(response.status).to eq(200) expect(response.status).to eq(200)
expect(json_response['ArchivePath']).to match(/#{repo_name}\-[^\.]+\.tar.gz/) type, params = workhorse_send_data
expect(type).to eq('git-archive')
expect(params['ArchivePath']).to match(/#{repo_name}\-[^\.]+\.tar.gz/)
end end
it "should get the archive.zip" do it "should get the archive.zip" do
get api("/projects/#{project.id}/repository/archive.zip", user) get api("/projects/#{project.id}/repository/archive.zip", user)
repo_name = project.repository.name.gsub("\.git", "") repo_name = project.repository.name.gsub("\.git", "")
expect(response.status).to eq(200) expect(response.status).to eq(200)
expect(json_response['ArchivePath']).to match(/#{repo_name}\-[^\.]+\.zip/) type, params = workhorse_send_data
expect(type).to eq('git-archive')
expect(params['ArchivePath']).to match(/#{repo_name}\-[^\.]+\.zip/)
end end
it "should get the archive.tar.bz2" do it "should get the archive.tar.bz2" do
get api("/projects/#{project.id}/repository/archive.tar.bz2", user) get api("/projects/#{project.id}/repository/archive.tar.bz2", user)
repo_name = project.repository.name.gsub("\.git", "") repo_name = project.repository.name.gsub("\.git", "")
expect(response.status).to eq(200) expect(response.status).to eq(200)
expect(json_response['ArchivePath']).to match(/#{repo_name}\-[^\.]+\.tar.bz2/) type, params = workhorse_send_data
expect(type).to eq('git-archive')
expect(params['ArchivePath']).to match(/#{repo_name}\-[^\.]+\.tar.bz2/)
end end
it "should return 404 for invalid sha" do it "should return 404 for invalid sha" do
......
require 'spec_helper'
describe Ci::CreateBuildsService, services: true do
let(:commit) { create(:ci_commit) }
let(:user) { create(:user) }
describe '#execute' do
# Using stubbed .gitlab-ci.yml created in commit factory
#
subject do
described_class.new.execute(commit, 'test', 'master', nil, user, nil, status)
end
context 'next builds available' do
let(:status) { 'success' }
it { is_expected.to be_an_instance_of Array }
it { is_expected.to all(be_an_instance_of Ci::Build) }
end
context 'builds skipped' do
let(:status) { 'skipped' }
it { is_expected.to be_empty }
end
end
end
...@@ -5,7 +5,6 @@ describe GitPushService, services: true do ...@@ -5,7 +5,6 @@ describe GitPushService, services: true do
let(:user) { create :user } let(:user) { create :user }
let(:project) { create :project } let(:project) { create :project }
let(:service) { GitPushService.new }
before do before do
@blankrev = Gitlab::Git::BLANK_SHA @blankrev = Gitlab::Git::BLANK_SHA
...@@ -15,10 +14,17 @@ describe GitPushService, services: true do ...@@ -15,10 +14,17 @@ describe GitPushService, services: true do
end end
describe 'Push branches' do describe 'Push branches' do
let(:oldrev) { @oldrev }
let(:newrev) { @newrev }
subject do
execute_service(project, user, oldrev, newrev, @ref )
end
context 'new branch' do context 'new branch' do
subject do
service.execute(project, user, @blankrev, @newrev, @ref) let(:oldrev) { @blankrev }
end
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
...@@ -36,9 +42,6 @@ describe GitPushService, services: true do ...@@ -36,9 +42,6 @@ describe GitPushService, services: true do
end end
context 'existing branch' do context 'existing branch' do
subject do
service.execute(project, user, @oldrev, @newrev, @ref)
end
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
...@@ -50,9 +53,8 @@ describe GitPushService, services: true do ...@@ -50,9 +53,8 @@ describe GitPushService, services: true do
end end
context 'rm branch' do context 'rm branch' do
subject do
service.execute(project, user, @oldrev, @blankrev, @ref) let(:newrev) { @blankrev }
end
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
...@@ -72,7 +74,7 @@ describe GitPushService, services: true do ...@@ -72,7 +74,7 @@ describe GitPushService, services: true do
describe "Git Push Data" do describe "Git Push Data" do
before do before do
service.execute(project, user, @oldrev, @newrev, @ref) service = execute_service(project, user, @oldrev, @newrev, @ref )
@push_data = service.push_data @push_data = service.push_data
@commit = project.commit(@newrev) @commit = project.commit(@newrev)
end end
...@@ -134,20 +136,21 @@ describe GitPushService, services: true do ...@@ -134,20 +136,21 @@ describe GitPushService, services: true do
describe "Push Event" do describe "Push Event" do
before do before do
service.execute(project, user, @oldrev, @newrev, @ref) service = execute_service(project, user, @oldrev, @newrev, @ref )
@event = Event.last @event = Event.last
@push_data = service.push_data
end end
it { expect(@event).not_to be_nil } it { expect(@event).not_to be_nil }
it { expect(@event.project).to eq(project) } it { expect(@event.project).to eq(project) }
it { expect(@event.action).to eq(Event::PUSHED) } it { expect(@event.action).to eq(Event::PUSHED) }
it { expect(@event.data).to eq(service.push_data) } it { expect(@event.data).to eq(@push_data) }
context "Updates merge requests" do context "Updates merge requests" do
it "when pushing a new branch for the first time" do it "when pushing a new branch for the first time" do
expect(project).to receive(:update_merge_requests). expect(project).to receive(:update_merge_requests).
with(@blankrev, 'newrev', 'refs/heads/master', user) with(@blankrev, 'newrev', 'refs/heads/master', user)
service.execute(project, user, @blankrev, 'newrev', 'refs/heads/master') execute_service(project, user, @blankrev, 'newrev', 'refs/heads/master' )
end end
end end
end end
...@@ -158,7 +161,7 @@ describe GitPushService, services: true do ...@@ -158,7 +161,7 @@ describe GitPushService, services: true do
expect(project).to receive(:execute_hooks) expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master") expect(project.default_branch).to eq("master")
expect(project.protected_branches).to receive(:create).with({ name: "master", developers_can_push: false }) expect(project.protected_branches).to receive(:create).with({ name: "master", developers_can_push: false })
service.execute(project, user, @blankrev, 'newrev', 'refs/heads/master') execute_service(project, user, @blankrev, 'newrev', 'refs/heads/master' )
end end
it "when pushing a branch for the first time with default branch protection disabled" do it "when pushing a branch for the first time with default branch protection disabled" do
...@@ -167,7 +170,7 @@ describe GitPushService, services: true do ...@@ -167,7 +170,7 @@ describe GitPushService, services: true do
expect(project).to receive(:execute_hooks) expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master") expect(project.default_branch).to eq("master")
expect(project.protected_branches).not_to receive(:create) expect(project.protected_branches).not_to receive(:create)
service.execute(project, user, @blankrev, 'newrev', 'refs/heads/master') execute_service(project, user, @blankrev, 'newrev', 'refs/heads/master' )
end end
it "when pushing a branch for the first time with default branch protection set to 'developers can push'" do it "when pushing a branch for the first time with default branch protection set to 'developers can push'" do
...@@ -176,12 +179,12 @@ describe GitPushService, services: true do ...@@ -176,12 +179,12 @@ describe GitPushService, services: true do
expect(project).to receive(:execute_hooks) expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master") expect(project.default_branch).to eq("master")
expect(project.protected_branches).to receive(:create).with({ name: "master", developers_can_push: true }) expect(project.protected_branches).to receive(:create).with({ name: "master", developers_can_push: true })
service.execute(project, user, @blankrev, 'newrev', 'refs/heads/master') execute_service(project, user, @blankrev, 'newrev', 'refs/heads/master' )
end end
it "when pushing new commits to existing branch" do it "when pushing new commits to existing branch" do
expect(project).to receive(:execute_hooks) expect(project).to receive(:execute_hooks)
service.execute(project, user, 'oldrev', 'newrev', 'refs/heads/master') execute_service(project, user, 'oldrev', 'newrev', 'refs/heads/master' )
end end
end end
end end
...@@ -204,7 +207,7 @@ describe GitPushService, services: true do ...@@ -204,7 +207,7 @@ describe GitPushService, services: true do
it "creates a note if a pushed commit mentions an issue" do it "creates a note if a pushed commit mentions an issue" do
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author) expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
end end
it "only creates a cross-reference note if one doesn't already exist" do it "only creates a cross-reference note if one doesn't already exist" do
...@@ -212,7 +215,7 @@ describe GitPushService, services: true do ...@@ -212,7 +215,7 @@ describe GitPushService, services: true do
expect(SystemNoteService).not_to receive(:cross_reference).with(issue, commit, commit_author) expect(SystemNoteService).not_to receive(:cross_reference).with(issue, commit, commit_author)
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
end end
it "defaults to the pushing user if the commit's author is not known" do it "defaults to the pushing user if the commit's author is not known" do
...@@ -222,7 +225,7 @@ describe GitPushService, services: true do ...@@ -222,7 +225,7 @@ describe GitPushService, services: true do
) )
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, user) expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, user)
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
end end
it "finds references in the first push to a non-default branch" do it "finds references in the first push to a non-default branch" do
...@@ -231,7 +234,7 @@ describe GitPushService, services: true do ...@@ -231,7 +234,7 @@ describe GitPushService, services: true do
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author) expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
service.execute(project, user, @blankrev, @newrev, 'refs/heads/other') execute_service(project, user, @blankrev, @newrev, 'refs/heads/other' )
end end
end end
...@@ -255,18 +258,18 @@ describe GitPushService, services: true do ...@@ -255,18 +258,18 @@ describe GitPushService, services: true do
context "to default branches" do context "to default branches" do
it "closes issues" do it "closes issues" do
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
expect(Issue.find(issue.id)).to be_closed expect(Issue.find(issue.id)).to be_closed
end end
it "adds a note indicating that the issue is now closed" do it "adds a note indicating that the issue is now closed" do
expect(SystemNoteService).to receive(:change_status).with(issue, project, commit_author, "closed", closing_commit) expect(SystemNoteService).to receive(:change_status).with(issue, project, commit_author, "closed", closing_commit)
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
end end
it "doesn't create additional cross-reference notes" do it "doesn't create additional cross-reference notes" do
expect(SystemNoteService).not_to receive(:cross_reference) expect(SystemNoteService).not_to receive(:cross_reference)
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
end end
it "doesn't close issues when external issue tracker is in use" do it "doesn't close issues when external issue tracker is in use" do
...@@ -274,7 +277,7 @@ describe GitPushService, services: true do ...@@ -274,7 +277,7 @@ describe GitPushService, services: true do
# The push still shouldn't create cross-reference notes. # The push still shouldn't create cross-reference notes.
expect do expect do
service.execute(project, user, @oldrev, @newrev, 'refs/heads/hurf') execute_service(project, user, @oldrev, @newrev, 'refs/heads/hurf' )
end.not_to change { Note.where(project_id: project.id, system: true).count } end.not_to change { Note.where(project_id: project.id, system: true).count }
end end
end end
...@@ -287,11 +290,11 @@ describe GitPushService, services: true do ...@@ -287,11 +290,11 @@ describe GitPushService, services: true do
it "creates cross-reference notes" do it "creates cross-reference notes" do
expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author) expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author)
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
end end
it "doesn't close issues" do it "doesn't close issues" do
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
expect(Issue.find(issue.id)).to be_opened expect(Issue.find(issue.id)).to be_opened
end end
end end
...@@ -328,7 +331,7 @@ describe GitPushService, services: true do ...@@ -328,7 +331,7 @@ describe GitPushService, services: true do
let(:message) { "this is some work.\n\nrelated to JIRA-1" } let(:message) { "this is some work.\n\nrelated to JIRA-1" }
it "should initiate one api call to jira server to mention the issue" do it "should initiate one api call to jira server to mention the issue" do
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
expect(WebMock).to have_requested(:post, jira_api_comment_url).with( expect(WebMock).to have_requested(:post, jira_api_comment_url).with(
body: /mentioned this issue in/ body: /mentioned this issue in/
...@@ -346,7 +349,7 @@ describe GitPushService, services: true do ...@@ -346,7 +349,7 @@ describe GitPushService, services: true do
} }
}.to_json }.to_json
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
expect(WebMock).to have_requested(:post, jira_api_transition_url).with( expect(WebMock).to have_requested(:post, jira_api_transition_url).with(
body: transition_body body: transition_body
).once ).once
...@@ -357,7 +360,7 @@ describe GitPushService, services: true do ...@@ -357,7 +360,7 @@ describe GitPushService, services: true do
body: "Issue solved with [#{closing_commit.id}|http://localhost/#{project.path_with_namespace}/commit/#{closing_commit.id}]." body: "Issue solved with [#{closing_commit.id}|http://localhost/#{project.path_with_namespace}/commit/#{closing_commit.id}]."
}.to_json }.to_json
service.execute(project, user, @oldrev, @newrev, @ref) execute_service(project, user, @oldrev, @newrev, @ref )
expect(WebMock).to have_requested(:post, jira_api_comment_url).with( expect(WebMock).to have_requested(:post, jira_api_comment_url).with(
body: comment_body body: comment_body
).once ).once
...@@ -376,7 +379,13 @@ describe GitPushService, services: true do ...@@ -376,7 +379,13 @@ describe GitPushService, services: true do
end end
it 'push to first branch updates HEAD' do it 'push to first branch updates HEAD' do
service.execute(project, user, @blankrev, @newrev, new_ref) execute_service(project, user, @blankrev, @newrev, new_ref )
end end
end end
def execute_service(project, user, oldrev, newrev, ref)
service = described_class.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref )
service.execute
service
end
end end
module WorkhorseHelpers
extend self
def workhorse_send_data
@_workhorse_send_data ||= begin
header = response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
split_header = header.split(':')
type = split_header.shift
header = split_header.join(':')
[
type,
JSON.parse(Base64.urlsafe_decode64(header)),
]
end
end
end
...@@ -19,6 +19,18 @@ describe RepositoryForkWorker do ...@@ -19,6 +19,18 @@ describe RepositoryForkWorker do
fork_project.namespace.path) fork_project.namespace.path)
end end
it 'flushes the empty caches' do
expect_any_instance_of(Gitlab::Shell).to receive(:fork_repository).
with(project.path_with_namespace, fork_project.namespace.path).
and_return(true)
expect_any_instance_of(Repository).to receive(:expire_emptiness_caches).
and_call_original
subject.perform(project.id, project.path_with_namespace,
fork_project.namespace.path)
end
it "handles bad fork" do it "handles bad fork" do
expect_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_return(false) expect_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_return(false)
subject.perform( subject.perform(
......
require 'spec_helper'
describe RepositoryImportWorker do
let(:project) { create(:project) }
subject { described_class.new }
describe '#perform' do
it 'imports a project' do
expect_any_instance_of(Projects::ImportService).to receive(:execute).
and_return({ status: :ok })
expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
expect_any_instance_of(Project).to receive(:import_finish)
subject.perform(project.id)
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment