Commit 2d9670b5 authored by Matija Čupić's avatar Matija Čupić

Merge remote-tracking branch 'origin/master' into list-multiple-clusters

parents fc4f3164 8c77ae2d
......@@ -586,6 +586,7 @@ codequality:
paths: [codeclimate.json]
qa:internal:
<<: *except-docs
stage: test
variables:
SETUP_DB: "false"
......
......@@ -298,18 +298,21 @@ import ProjectVariables from './project_variables';
break;
case 'projects:snippets:show':
initNotes();
new ZenMode();
break;
case 'projects:snippets:new':
case 'projects:snippets:edit':
case 'projects:snippets:create':
case 'projects:snippets:update':
new GLForm($('.snippet-form'), true);
new ZenMode();
break;
case 'snippets:new':
case 'snippets:edit':
case 'snippets:create':
case 'snippets:update':
new GLForm($('.snippet-form'), false);
new ZenMode();
break;
case 'projects:releases:edit':
new ZenMode();
......@@ -546,6 +549,7 @@ import ProjectVariables from './project_variables';
new LineHighlighter();
new BlobViewer();
initNotes();
new ZenMode();
break;
case 'import:fogbugz:new_user_map':
new UsersSelect();
......
......@@ -4,8 +4,8 @@ class Admin::AppearancesController < Admin::ApplicationController
def show
end
def preview
render 'preview', layout: 'devise'
def preview_sign_in
render 'preview_sign_in', layout: 'devise'
end
def create
......@@ -52,7 +52,7 @@ class Admin::AppearancesController < Admin::ApplicationController
def appearance_params
params.require(:appearance).permit(
:title, :description, :logo, :logo_cache, :header_logo, :header_logo_cache,
:updated_by
:new_project_guidelines, :updated_by
)
end
end
module AppearancesHelper
def brand_title
if brand_item && brand_item.title
brand_item.title
else
'GitLab Community Edition'
end
brand_item&.title.presence || 'GitLab Community Edition'
end
def brand_image
if brand_item.logo?
image_tag brand_item.logo
else
nil
end
image_tag(brand_item.logo) if brand_item&.logo?
end
def brand_text
markdown_field(brand_item, :description)
end
def brand_new_project_guidelines
markdown_field(brand_item, :new_project_guidelines)
end
def brand_item
@appearance ||= Appearance.current
end
def brand_header_logo
if brand_item && brand_item.header_logo?
if brand_item&.header_logo?
image_tag brand_item.header_logo
else
render 'shared/logo.svg'
......@@ -33,7 +29,7 @@ module AppearancesHelper
# Skip the 'GitLab' type logo when custom brand logo is set
def brand_header_logo_type
unless brand_item && brand_item.header_logo?
unless brand_item&.header_logo?
render 'shared/logo_type.svg'
end
end
......
......@@ -2,9 +2,8 @@ class Appearance < ActiveRecord::Base
include CacheMarkdownField
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
validates :title, presence: true
validates :description, presence: true
validates :logo, file_size: { maximum: 1.megabyte }
validates :header_logo, file_size: { maximum: 1.megabyte }
......
module Ci
class Build < CommitStatus
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
include Presentable
......@@ -10,9 +11,14 @@ module Ci
belongs_to :erased_by, class_name: 'User'
has_many :deployments, as: :deployable
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment
@persisted_environment ||= Environment.find_by(
......@@ -31,15 +37,37 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
mount_uploader :artifacts_file, ArtifactUploader
mount_uploader :artifacts_metadata, ArtifactUploader
scope :matches_tag_ids, -> (tag_ids) do
matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id')
.where.not(tag_id: tag_ids).select('1')
where("NOT EXISTS (?)", matcher)
end
scope :with_any_tags, -> do
matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id').select('1')
where("EXISTS (?)", matcher)
end
mount_uploader :legacy_artifacts_file, LegacyArtifactUploader, mount_on: :artifacts_file
mount_uploader :legacy_artifacts_metadata, LegacyArtifactUploader, mount_on: :artifacts_metadata
acts_as_taggable
......@@ -326,14 +354,6 @@ module Ci
project.running_or_pending_build_count(force: true)
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options)
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path,
......@@ -386,6 +406,7 @@ module Ci
def keep_artifacts!
self.update(artifacts_expire_at: nil)
self.job_artifacts.update_all(expire_at: nil)
end
def coverage_regex
......@@ -473,11 +494,7 @@ module Ci
private
def update_artifacts_size
self.artifacts_size = if artifacts_file.exists?
artifacts_file.size
else
nil
end
self.artifacts_size = legacy_artifacts_file&.size
end
def erase_trace!
......
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :set_size, if: :file_changed?
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
def expire_in
expire_at - Time.now if expire_at
end
def expire_in=(value)
self.expire_at =
if value
ChronicDuration.parse(value)&.seconds&.from_now
end
end
end
end
......@@ -112,7 +112,7 @@ module Ci
def can_pick?(build)
return false if self.ref_protected? && !build.protected?
assignable_for?(build.project) && accepting_tags?(build)
assignable_for?(build.project_id) && accepting_tags?(build)
end
def only_for?(project)
......@@ -171,8 +171,8 @@ module Ci
end
end
def assignable_for?(project)
is_shared? || projects.exists?(id: project.id)
def assignable_for?(project_id)
is_shared? || projects.exists?(id: project_id)
end
def accepting_tags?(build)
......
# Adapter class to unify the interface between mounted uploaders and the
# Ci::Artifact model
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
job_artifacts_archive&.file || legacy_artifacts_file
end
def artifacts_metadata
job_artifacts_metadata&.file || legacy_artifacts_metadata
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_artifacts_archive&.file_changed? || attribute_changed?(:artifacts_file)
end
def remove_artifacts_file!
if job_artifacts_archive
job_artifacts_archive.destroy
else
remove_legacy_artifacts_file!
end
end
def remove_artifacts_metadata!
if job_artifacts_metadata
job_artifacts_metadata.destroy
else
remove_legacy_artifacts_metadata!
end
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
end
end
......@@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord'
class Group < Namespace
include Gitlab::ConfigHelper
include AfterCommitQueue
include AccessRequestable
include Avatarable
include Referable
......
......@@ -2,6 +2,7 @@ require 'digest/md5'
class Key < ActiveRecord::Base
include Gitlab::CurrentSettings
include AfterCommitQueue
include Sortable
belongs_to :user
......
class Member < ActiveRecord::Base
include AfterCommitQueue
include Sortable
include Importable
include Expirable
......
......@@ -35,7 +35,9 @@ class ProjectStatistics < ActiveRecord::Base
end
def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size)
self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self)
end
def update_storage_size
......
......@@ -211,7 +211,7 @@ class Service < ActiveRecord::Base
def async_execute(data)
return unless supported_events.include?(data[:object_kind])
Sidekiq::Client.enqueue(ProjectServiceWorker, id, data)
ProjectServiceWorker.perform_async(id, data)
end
def issue_tracker?
......
......@@ -7,6 +7,7 @@ class User < ActiveRecord::Base
include Gitlab::ConfigHelper
include Gitlab::CurrentSettings
include Gitlab::SQL::Pattern
include AfterCommitQueue
include Avatarable
include Referable
include Sortable
......@@ -903,6 +904,7 @@ class User < ActiveRecord::Base
def post_destroy_hook
log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy)
end
......
......@@ -22,6 +22,16 @@ module Ci
valid = true
if Feature.enabled?('ci_job_request_with_tags_matcher')
# pick builds that does not have other tags than runner's one
builds = builds.matches_tag_ids(runner.tags.ids)
# pick builds that have at least one tag
unless runner.run_untagged?
builds = builds.with_any_tags
end
end
builds.find do |build|
next unless runner.can_pick?(build)
......
......@@ -18,7 +18,7 @@ module Projects
@status.enqueue!
@status.run!
raise 'missing pages artifacts' unless build.artifacts_file?
raise 'missing pages artifacts' unless build.artifacts?
raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts
......
class SystemHooksService
def execute_hooks_for(model, event)
execute_hooks(build_event_data(model, event))
data = build_event_data(model, event)
model.run_after_commit_or_now do
SystemHooksService.new.execute_hooks(data)
end
end
def execute_hooks(data, hooks_scope = :all)
......
......@@ -63,7 +63,7 @@ class WebHookService
end
def async_execute
Sidekiq::Client.enqueue(WebHookWorker, hook.id, data, hook_name)
WebHookWorker.perform_async(hook.id, data, hook_name)
end
private
......
class JobArtifactUploader < GitlabUploader
storage :file
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def size
return super if model.size.nil?
model.size
end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, model.job_id.to_s, model.id.to_s)
end
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(model.project_id.to_s)
end
end
class ArtifactUploader < GitlabUploader
class LegacyArtifactUploader < GitlabUploader
storage :file
attr_reader :job, :field
def self.local_artifacts_store
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_artifacts_store, 'tmp/uploads/')
end
def initialize(job, field)
@job, @field = job, field
File.join(self.local_store_path, 'tmp/uploads/')
end
def store_dir
......@@ -20,20 +14,20 @@ class ArtifactUploader < GitlabUploader
end
def cache_dir
File.join(self.class.local_artifacts_store, 'tmp/cache')
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_artifacts_store, 'tmp/work')
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_artifacts_store, default_path)
File.join(self.class.local_store_path, default_path)
end
def default_path
File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s)
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end
end
= form_for @appearance, url: admin_appearances_path, html: { class: 'form-horizontal'} do |f|
= form_errors(@appearance)
%fieldset.app_logo
%legend
Navigation bar:
.form-group
= f.label :header_logo, 'Header logo', class: 'control-label'
.col-sm-10
- if @appearance.header_logo?
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
.hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo
%fieldset.sign-in
%legend
Sign in/Sign up pages:
......@@ -28,27 +45,22 @@
.hint
Maximum file size is 1MB. Pages are optimized for a 640x360 px logo.
%fieldset.app_logo
%fieldset
%legend
Navigation bar:
New project pages:
.form-group
= f.label :header_logo, 'Header logo', class: 'control-label'
= f.label :new_project_guidelines, class: 'control-label'
.col-sm-10
- if @appearance.header_logo?
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
= f.text_area :new_project_guidelines, class: "form-control", rows: 10
.hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo
Guidelines parsed with #{link_to "GitLab Flavored Markdown", help_page_path('user/markdown'), target: '_blank'}.
.form-actions
= f.submit 'Save', class: 'btn btn-save append-right-10'
- if @appearance.persisted?
= link_to 'Preview last save', preview_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
Preview last save:
= link_to 'Sign-in page', preview_sign_in_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
= link_to 'New project page', new_project_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
- if @appearance.updated_at
%span.pull-right
......
......@@ -15,8 +15,8 @@
.col-sm-7.brand-holder.pull-left
%h1
= brand_title
- if brand_item
= brand_image
- if brand_item&.description?
= brand_text
- else
%h3 Open source software to collaborate on code
......
......@@ -25,7 +25,7 @@
= markdown_toolbar_button({ icon: "list-bulleted", data: { "md-tag" => "* ", "md-prepend" => true }, title: "Add a bullet list" })
= markdown_toolbar_button({ icon: "list-numbered", data: { "md-tag" => "1. ", "md-prepend" => true }, title: "Add a numbered list" })
= markdown_toolbar_button({ icon: "task-done", data: { "md-tag" => "* [ ] ", "md-prepend" => true }, title: "Add a task list" })
%button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, aria: { label: "Go full screen" }, title: "Go full screen", data: { container: "body" } }
%button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, "aria-label": "Go full screen", title: "Go full screen", data: { container: "body" } }
= sprite_icon("screen-full")
.md-write-holder
......
......@@ -18,6 +18,7 @@
A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), #{link_to 'among other things', help_page_path("user/project/index.md", anchor: "projects-features"), target: '_blank'}.
%p
All features are enabled when you create a project, but you can disable the ones you don’t need in the project settings.
= brand_new_project_guidelines
.col-lg-9.js-toggle-container
%ul.nav-links.gitlab-tabs{ role: 'tablist' }
%li.active{ role: 'presentation' }
......
class AdminEmailWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class AuthorizedProjectsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# Schedules multiple jobs and waits for them to be completed.
def self.bulk_perform_and_wait(args_list)
......@@ -17,11 +16,6 @@ class AuthorizedProjectsWorker
waiter.wait
end
# Schedules multiple jobs to run in sidekiq without waiting for completion
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
# Performs multiple jobs directly. Failed jobs will be put into sidekiq so
# they can benefit from retries
def self.bulk_perform_inline(args_list)
......
class BackgroundMigrationWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
# Enqueues a number of jobs in bulk.
#
# The `jobs` argument should be an Array of Arrays, each sub-array must be in
# the form:
#
# [migration-class, [arg1, arg2, ...]]
def self.perform_bulk(jobs)
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs)
end
# Schedules multiple jobs in bulk, with a delay.
#
def self.perform_bulk_in(delay, jobs)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs,
'at' => schedule)
end
include ApplicationWorker
# Performs the background migration.
#
......
class BuildCoverageWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(build_id)
......
class BuildFinishedWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing
......
class BuildHooksWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :hooks
......
class BuildQueueWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing
......
class BuildSuccessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing
......
class BuildTraceSectionsWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(build_id)
......
class ClusterInstallAppWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
......
class ClusterProvisionWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
def perform(cluster_id)
......
class ClusterWaitForAppInstallationWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
......
Sidekiq::Worker.extend ActiveSupport::Concern
module ApplicationWorker
extend ActiveSupport::Concern
include Sidekiq::Worker
included do
sidekiq_options queue: base_queue_name
end
module ClassMethods
def base_queue_name
name
.sub(/\AGitlab::/, '')
.sub(/Worker\z/, '')
.underscore
.tr('/', '_')
end
def queue
get_sidekiq_options['queue'].to_s
end
def bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list)
end
def bulk_perform_in(delay, args_list)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list, 'at' => schedule)
end
end
end
# Concern that sets the queue of a Sidekiq worker based on the worker's class
# name/namespace.
module DedicatedSidekiqQueue
extend ActiveSupport::Concern
included do
sidekiq_options queue: name.sub(/Worker\z/, '').underscore.tr('/', '_')
end
end
......@@ -8,7 +8,7 @@ module Gitlab
extend ActiveSupport::Concern
included do
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include ReschedulingMethods
include NotifyUponDeath
......
class CreateGpgSignatureWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(commit_sha, project_id)
project = Project.find_by(id: project_id)
......
class CreatePipelineWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :creation
......
class DeleteMergedBranchesWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(project_id, user_id)
begin
......
class DeleteUserWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(current_user_id, delete_user_id, options = {})
delete_user = User.find(delete_user_id)
......
class EmailReceiverWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(raw)
return unless Gitlab::IncomingEmail.enabled?
......
class EmailsOnPushWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
attr_reader :email, :skip_premailer
......
class ExpireBuildArtifactsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......@@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker
build_ids = Ci::Build.with_expired_artifacts.pluck(:id)
build_ids = build_ids.map { |build_id| [build_id] }
Sidekiq::Client.push_bulk('class' => ExpireBuildInstanceArtifactsWorker, 'args' => build_ids )
ExpireBuildInstanceArtifactsWorker.bulk_perform_async(build_ids)
end
end
class ExpireBuildInstanceArtifactsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(build_id)
build = Ci::Build
......
class ExpireJobCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :cache
......
class ExpirePipelineCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :cache
......
class GitGarbageCollectWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include Gitlab::CurrentSettings
sidekiq_options retry: false
......
......@@ -7,7 +7,7 @@ module Gitlab
# been completed this worker will advance the import process to the next
# stage.
class AdvanceStageWorker
include Sidekiq::Worker
include ApplicationWorker
sidekiq_options queue: 'github_importer_advance_stage', dead: false
......
......@@ -3,7 +3,7 @@
module Gitlab
module GithubImport
class RefreshImportJidWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
# The interval to schedule new instances of this job at.
......
......@@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class FinishImportWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
......
......@@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportBaseDataWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
......
......@@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportIssuesAndDiffNotesWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
......
......@@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportNotesWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
......
......@@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportPullRequestsWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
......
......@@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportRepositoryWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
......
class GitlabShellWorker
include Sidekiq::Worker
include ApplicationWorker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
def perform(action, *arg)
gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
......
class GitlabUsagePingWorker
LEASE_TIMEOUT = 86400
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class GroupDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def perform(group_id, user_id)
......
class ImportExportProjectCleanupWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class InvalidGpgSignatureUpdateWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id)
......
......@@ -2,8 +2,7 @@ require 'json'
require 'socket'
class IrkerWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(project_id, chans, colors, push_data, settings)
project = Project.find(project_id)
......
class MergeWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(merge_request_id, current_user_id, params)
params = params.with_indifferent_access
......
......@@ -5,14 +5,9 @@
# The worker will reject doing anything for projects that *do* have a
# namespace. For those use ProjectDestroyWorker instead.
class NamespacelessProjectDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
def perform(project_id)
begin
project = Project.unscoped.find(project_id)
......
class NewIssueWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include NewIssuable
def perform(issue_id, user_id)
......
class NewMergeRequestWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include NewIssuable
def perform(merge_request_id, user_id)
......
class NewNoteWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# Keep extra parameter to preserve backwards compatibility with
# old `NewNoteWorker` jobs (can remove later)
......
class PagesWorker
include Sidekiq::Worker
include ApplicationWorker
sidekiq_options queue: :pages, retry: false
......
class PipelineHooksWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :hooks
......
class PipelineMetricsWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(pipeline_id)
......
class PipelineNotificationWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(pipeline_id, recipients = nil)
......
class PipelineProcessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing
......
class PipelineScheduleWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class PipelineSuccessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing
......
class PipelineUpdateWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing
......
class PostReceive
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(gl_repository, identifier, changes)
project, is_wiki = Gitlab::GlRepository.parse(gl_repository)
......
......@@ -5,8 +5,7 @@
# Consider using an extra worker if you need to add any extra (and potentially
# slow) processing of commits.
class ProcessCommitWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# project_id - The ID of the project this commit belongs to.
# user_id - The ID of the user that pushed the commit.
......
# Worker for updating any project specific caches.
class ProjectCacheWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 15.minutes.to_i
......
class ProjectDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def perform(project_id, user_id, params)
......
class ProjectExportWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
sidekiq_options retry: 3
......
class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 30.seconds.to_i
......
class ProjectServiceWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
sidekiq_options dead: false
......
# Worker for updating any project specific caches.
class PropagateServiceTemplateWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 4.hours.to_i
......
class PruneOldEventsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class ReactiveCachingWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(class_name, id, *args)
klass = begin
......
class RemoveExpiredGroupLinksWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class RemoveExpiredMembersWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class RemoveOldWebHookLogsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
WEB_HOOK_LOG_LIFETIME = 2.days
......
class RemoveUnreferencedLfsObjectsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class RepositoryArchiveCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
module RepositoryCheck
class BatchWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
RUN_TIME = 3600
......
module RepositoryCheck
class ClearWorker
include Sidekiq::Worker
include ApplicationWorker
include RepositoryCheckQueue
def perform
......
module RepositoryCheck
class SingleRepositoryWorker
include Sidekiq::Worker
include ApplicationWorker
include RepositoryCheckQueue
def perform(project_id)
......
class RepositoryForkWorker
ForkError = Class.new(StandardError)
include Sidekiq::Worker
include ApplicationWorker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
include ProjectStartImport
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
......
class RepositoryImportWorker
ImportError = Class.new(StandardError)
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
include ProjectStartImport
......
class RequestsProfilesWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class ScheduleUpdateUserActivityWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform(batch_size = 500)
......
class StageUpdateWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing
......
class StorageMigratorWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
BATCH_SIZE = 100
......
class StuckCiJobsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
EXCLUSIVE_LEASE_KEY = 'stuck_ci_builds_worker_lease'.freeze
......
class StuckImportJobsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
IMPORT_JOBS_EXPIRATION = 15.hours.to_i
......
class StuckMergeJobsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class SystemHookPushWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(push_data, hook_id)
SystemHooksService.new.execute_hooks(push_data, hook_id)
......
class TrendingProjectsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
......
class UpdateMergeRequestsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LOG_TIME_THRESHOLD = 90 # seconds
......
class UpdateUserActivityWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(pairs)
pairs = cast_data(pairs)
......
class UploadChecksumWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(upload_id)
upload = Upload.find(upload_id)
......
class WaitForClusterCreationWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
def perform(cluster_id)
......
class WebHookWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
sidekiq_options retry: 4, dead: false
......
---
title: Init zen mode in snippets pages
merge_request:
author:
type: fixed
---
title: Add custom brand text on new project pages
merge_request: 15541
author: Markus Koller
type: changed
---
title: Perform SQL matching of Build&Runner tags to greatly speed-up job picking
merge_request:
author:
type: performance
......@@ -649,6 +649,8 @@ test:
# user: YOUR_USERNAME
pages:
path: tmp/tests/pages
artifacts:
path: tmp/tests/artifacts
repositories:
storages:
default:
......
......@@ -13,13 +13,9 @@ module Sidekiq
module ClassMethods
module NoSchedulingFromTransactions
NESTING = ::Rails.env.test? ? 1 : 0
%i(perform_async perform_at perform_in).each do |name|
define_method(name) do |*args|
return super(*args) if Sidekiq::Worker.skip_transaction_check
return super(*args) unless ActiveRecord::Base.connection.open_transactions > NESTING
if !Sidekiq::Worker.skip_transaction_check && AfterCommitQueue.inside_transaction?
raise <<-MSG.strip_heredoc
`#{self}.#{name}` cannot be called inside a transaction as this can lead to
race conditions when the worker runs before the transaction is committed and
......@@ -28,6 +24,9 @@ module Sidekiq
Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead.
MSG
end
super(*args)
end
end
end
......
......@@ -64,13 +64,13 @@ end
# The Sidekiq client API always adds the queue to the Sidekiq queue
# list, but mail_room and gitlab-shell do not. This is only necessary
# for monitoring.
config = YAML.load_file(Rails.root.join('config', 'sidekiq_queues.yml').to_s)
begin
queues = Gitlab::SidekiqConfig.worker_queues
Sidekiq.redis do |conn|
conn.pipelined do
config[:queues].each do |queue|
conn.sadd('queues', queue[0])
queues.each do |queue|
conn.sadd('queues', queue)
end
end
end
......
......@@ -97,7 +97,7 @@ namespace :admin do
resource :appearances, only: [:show, :create, :update], path: 'appearance' do
member do
get :preview
get :preview_sign_in
delete :logo
delete :header_logos
end
......
......@@ -124,11 +124,11 @@ class Gitlab::Seeder::Pipelines
return unless %w[build test].include?(build.stage)
artifacts_cache_file(artifacts_archive_path) do |file|
build.artifacts_file = file
build.job_artifacts.build(project: build.project, file_type: :archive, file: file)
end
artifacts_cache_file(artifacts_metadata_path) do |file|
build.artifacts_metadata = file
build.job_artifacts.build(project: build.project, file_type: :metadata, file: file)
end
end
......
class CreateJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_job_artifacts do |t|
t.belongs_to :project, null: false, index: true, foreign_key: { on_delete: :cascade }
t.integer :job_id, null: false
t.integer :file_type, null: false
t.integer :size, limit: 8
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :updated_at, null: false
t.datetime_with_timezone :expire_at
t.string :file
t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade
t.index [:job_id, :file_type], unique: true
end
end
end
class AddNewProjectGuidelinesToAppearances < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
change_table :appearances do |t|
t.text :new_project_guidelines
t.text :new_project_guidelines_html
end
end
end
......@@ -25,14 +25,14 @@ class ScheduleEventMigrations < ActiveRecord::Migration
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear
end
jobs << ['MigrateEventsToPushEventPayloads', [min, max]]
end
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end
def down
......
......@@ -19,7 +19,7 @@ class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration
[MIGRATION, [id]]
end
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
end
end
......
......@@ -36,6 +36,8 @@ ActiveRecord::Schema.define(version: 20171124150326) do
t.datetime_with_timezone "updated_at", null: false
t.text "description_html"
t.integer "cached_markdown_version"
t.text "new_project_guidelines"
t.text "new_project_guidelines_html"
end
create_table "application_settings", force: :cascade do |t|
......@@ -319,6 +321,20 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
create_table "ci_job_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
t.string "file"
end
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
......@@ -1909,6 +1925,8 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
......@@ -189,6 +189,7 @@ have access to GitLab administration tools and settings.
- [Issue closing pattern](administration/issue_closing_pattern.md): Customize how to close an issue from commit messages.
- [Libravatar](customization/libravatar.md): Use Libravatar instead of Gravatar for user avatars.
- [Welcome message](customization/welcome_message.md): Add a custom welcome message to the sign-in page.
- [New project page](customization/new_project_page.md): Customize the new project page.
### Admin tools
......
......@@ -58,7 +58,9 @@ Runs the following rake tasks:
It will check that each component was setup according to the installation guide and suggest fixes for issues found.
You may also have a look at our [Trouble Shooting Guide](https://github.com/gitlabhq/gitlab-public-wiki/wiki/Trouble-Shooting-Guide).
You may also have a look at our Trouble Shooting Guides:
- [Trouble Shooting Guide (GitLab)](http://docs.gitlab.com/ee/README.html#troubleshooting)
- [Trouble Shooting Guide (Omnibus Gitlab)](http://docs.gitlab.com/omnibus/README.html#troubleshooting)
**Omnibus Installation**
......
# Customizing the new project page
It is possible to add a markdown-formatted message to your GitLab
new project page.
By default, the new project page shows a sidebar with general information:
![](new_project_page/default_new_project_page.png)
## Changing the appearance of the new project page
Navigate to the **Admin** area and go to the **Appearance** page.
Fill in your project guidelines:
![](new_project_page/appearance_settings.png)
After saving the page, your new project page will show the guidelines in the sidebar, below the general information:
![](new_project_page/custom_new_project_page.png)
......@@ -68,10 +68,10 @@ BackgroundMigrationWorker.perform_async('BackgroundMigrationClassName', [arg1, a
```
Usually it's better to enqueue jobs in bulk, for this you can use
`BackgroundMigrationWorker.perform_bulk`:
`BackgroundMigrationWorker.bulk_perform_async`:
```ruby
BackgroundMigrationWorker.perform_bulk(
BackgroundMigrationWorker.bulk_perform_async(
[['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
)
......@@ -85,13 +85,13 @@ updates. Removals in turn can be handled by simply defining foreign keys with
cascading deletes.
If you would like to schedule jobs in bulk with a delay, you can use
`BackgroundMigrationWorker.perform_bulk_in`:
`BackgroundMigrationWorker.bulk_perform_in`:
```ruby
jobs = [['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
BackgroundMigrationWorker.perform_bulk_in(5.minutes, jobs)
BackgroundMigrationWorker.bulk_perform_in(5.minutes, jobs)
```
## Cleaning Up
......@@ -201,7 +201,7 @@ class ScheduleExtractServicesUrl < ActiveRecord::Migration
['ExtractServicesUrl', [id]]
end
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
end
end
......
......@@ -3,6 +3,12 @@
This document outlines various guidelines that should be followed when adding or
modifying Sidekiq workers.
## ApplicationWorker
All workers should include `ApplicationWorker` instead of `Sidekiq::Worker`,
which adds some convenience methods and automatically sets the queue based on
the worker's name.
## Default Queue
Use of the "default" queue is not allowed. Every worker should use a queue that
......@@ -13,19 +19,10 @@ A list of all available queues can be found in `config/sidekiq_queues.yml`.
## Dedicated Queues
Most workers should use their own queue. To ease this process a worker can
include the `DedicatedSidekiqQueue` concern as follows:
```ruby
class ProcessSomethingWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
end
```
This will set the queue name based on the class' name, minus the `Worker`
suffix. In the above example this would lead to the queue being
`process_something`.
Most workers should use their own queue, which is automatically set based on the
worker class name. For a worker named `ProcessSomethingWorker`, the queue name
would be `process_something`. If you're not sure what a worker's queue name is,
you can find it using `SomeWorker.queue`.
In some cases multiple workers do use the same queue. For example, the various
workers for updating CI pipelines all use the `pipeline` queue. Adding workers
......
......@@ -17,6 +17,9 @@ Taking the trigger term as `project-name`, the commands are:
| `/project-name issue search <query>` | Shows up to 5 issues matching `<query>` |
| `/project-name deploy <from> to <to>` | Deploy from the `<from>` environment to the `<to>` environment |
Note that if you are using the [GitLab Slack application](https://docs.gitlab.com/ee/user/project/integrations/gitlab_slack_application.html) for
your GitLab.com projects, you need to [add the `gitlab` keyword at the beginning of the command](https://docs.gitlab.com/ee/user/project/integrations/gitlab_slack_application.html#usage).
## Issue commands
It is possible to create new issue, display issue details and search up to 5 issues.
......
......@@ -44,8 +44,8 @@ class Spinach::Features::ProjectPages < Spinach::FeatureSteps
project: @project,
pipeline: pipeline,
ref: 'HEAD',
artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
legacy_artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
legacy_artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
)
result = ::Projects::UpdatePagesService.new(@project, build).execute
......
......@@ -37,13 +37,13 @@ module SharedBuilds
step 'recent build has artifacts available' do
artifacts = Rails.root + 'spec/fixtures/ci_build_artifacts.zip'
archive = fixture_file_upload(artifacts, 'application/zip')
@build.update_attributes(artifacts_file: archive)
@build.update_attributes(legacy_artifacts_file: archive)
end
step 'recent build has artifacts metadata available' do
metadata = Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz'
gzip = fixture_file_upload(metadata, 'application/x-gzip')
@build.update_attributes(artifacts_metadata: gzip)
@build.update_attributes(legacy_artifacts_metadata: gzip)
end
step 'recent build has a build trace' do
......
......@@ -6,12 +6,34 @@ module AfterCommitQueue
after_rollback :_clear_after_commit_queue
end
def run_after_commit(method = nil, &block)
_after_commit_queue << proc { self.send(method) } if method # rubocop:disable GitlabSecurity/PublicSend
def run_after_commit(&block)
_after_commit_queue << block if block
true
end
def run_after_commit_or_now(&block)
if AfterCommitQueue.inside_transaction?
run_after_commit(&block)
else
instance_eval(&block)
end
true
end
def self.open_transactions_baseline
if ::Rails.env.test?
return DatabaseCleaner.connections.count { |conn| conn.strategy.is_a?(DatabaseCleaner::ActiveRecord::Transaction) }
end
0
end
def self.inside_transaction?
ActiveRecord::Base.connection.open_transactions > open_transactions_baseline
end
protected
def _run_after_commit_queue
......
......@@ -1050,13 +1050,9 @@ module API
expose :type, :url, :username, :password
end
class ArtifactFile < Grape::Entity
expose :filename, :size
end
class Dependency < Grape::Entity
expose :id, :name, :token
expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? }
expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end
class Response < Grape::Entity
......
......@@ -215,18 +215,20 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = ArtifactUploader.artifacts_upload_path
artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
job.artifacts_file = artifacts
job.artifacts_metadata = metadata
job.artifacts_expire_in = params['expire_in'] ||
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, expire_in: expire_in)
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, expire_in: expire_in) if metadata
job.artifacts_expire_in = expire_in
if job.save
present job, with: Entities::JobRequest::Response
else
......
......@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
super('artifacts', ArtifactUploader.local_artifacts_store)
super('artifacts', LegacyArtifactUploader.local_store_path)
end
def create_files_dir
......
......@@ -50,6 +50,10 @@ module Gitlab
postgresql? && version.to_f >= 9.3
end
def self.replication_slots_supported?
postgresql? && version.to_f >= 9.4
end
def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}"
......
......@@ -703,14 +703,14 @@ into similar problems in the future (e.g. when new tables are created).
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear
end
jobs << [job_class_name, [start_id, end_id]]
end
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end
# Queues background migration jobs for an entire table, batched by ID range.
......
......@@ -213,6 +213,10 @@ module Gitlab
end
def shas_with_signatures(repository, shas)
GitalyClient.migrate(:filter_shas_with_signatures) do |is_enabled|
if is_enabled
Gitlab::GitalyClient::CommitService.new(repository).filter_shas_with_signatures(shas)
else
shas.select do |sha|
begin
Rugged::Commit.extract_signature(repository.rugged, sha)
......@@ -222,6 +226,8 @@ module Gitlab
end
end
end
end
end
def initialize(repository, raw_commit, head = nil)
raise "Nil as raw commit passed" unless raw_commit
......
......@@ -250,6 +250,26 @@ module Gitlab
consume_commits_response(response)
end
def filter_shas_with_signatures(shas)
request = Gitaly::FilterShasWithSignaturesRequest.new(repository: @gitaly_repo)
enum = Enumerator.new do |y|
shas.each_slice(20) do |revs|
request.shas = GitalyClient.encode_repeated(revs)
y.yield request
request = Gitaly::FilterShasWithSignaturesRequest.new
end
end
response = GitalyClient.call(@repository.storage, :commit_service, :filter_shas_with_signatures, enum)
response.flat_map do |msg|
msg.shas.map { |sha| EncodingHelper.encode!(sha) }
end
end
private
def call_commit_diff(request_params, options = {})
......
require 'yaml'
module Gitlab
module SidekiqConfig
def self.redis_queues
@redis_queues ||= Sidekiq::Queue.all.map(&:name)
end
# This method is called by `bin/sidekiq-cluster` in EE, which runs outside
# of bundler/Rails context, so we cannot use any gem or Rails methods.
def self.config_queues(rails_path = Rails.root.to_s)
@config_queues ||= begin
config = YAML.load_file(File.join(rails_path, 'config', 'sidekiq_queues.yml'))
config[:queues].map(&:first)
end
end
def self.cron_workers
@cron_workers ||= Settings.cron_jobs.map { |job_name, options| options['job_class'].constantize }
end
def self.workers
@workers ||= find_workers(Rails.root.join('app', 'workers'))
end
def self.default_queues
[ActionMailer::DeliveryJob.queue_name, 'default']
end
def self.worker_queues
@worker_queues ||= (workers.map(&:queue) + default_queues).uniq
end
def self.find_workers(root)
concerns = root.join('concerns').to_s
workers = Dir[root.join('**', '*.rb')]
.reject { |path| path.start_with?(concerns) }
workers.map! do |path|
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
ns.camelize.constantize
end
# Skip concerns
workers.select { |w| w < Sidekiq::Worker }
end
end
end
......@@ -58,7 +58,7 @@ module Gitlab
end
def artifact_upload_ok
{ TempPath: ArtifactUploader.artifacts_upload_path }
{ TempPath: JobArtifactUploader.artifacts_upload_path }
end
def send_git_blob(repository, blob)
......
......@@ -4,5 +4,6 @@ FactoryGirl.define do
factory :appearance do
title "MepMep"
description "This is my Community Edition instance"
new_project_guidelines "Custom project guidelines"
end
end
......@@ -154,36 +154,29 @@ FactoryGirl.define do
runner factory: :ci_runner
end
trait :artifacts do
trait :legacy_artifacts do
after(:create) do |build, _|
build.artifacts_file =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.save!
build.update!(
legacy_artifacts_file: fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip'),
legacy_artifacts_metadata: fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
)
end
end
trait :artifacts_expired do
after(:create) do |build, _|
build.artifacts_file =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.artifacts_expire_at = 1.minute.ago
build.save!
trait :artifacts do
after(:create) do |build|
create(:ci_job_artifact, :archive, job: build)
create(:ci_job_artifact, :metadata, job: build)
build.reload
end
end
trait :expired do
artifacts_expire_at 1.minute.ago
end
trait :with_commit do
after(:build) do |build|
allow(build).to receive(:commit).and_return build(:commit, :without_author)
......
include ActionDispatch::TestProcess
FactoryGirl.define do
factory :ci_job_artifact, class: Ci::JobArtifact do
job factory: :ci_build
file_type :archive
after :build do |artifact|
artifact.project ||= artifact.job.project
end
trait :archive do
file_type :archive
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
end
trait :metadata do
file_type :metadata
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
end
end
......@@ -9,6 +9,7 @@ feature 'Admin Appearance' do
fill_in 'appearance_title', with: 'MyCompany'
fill_in 'appearance_description', with: 'dev server'
fill_in 'appearance_new_project_guidelines', with: 'Custom project guidelines'
click_button 'Save'
expect(current_path).to eq admin_appearances_path
......@@ -16,21 +17,39 @@ feature 'Admin Appearance' do
expect(page).to have_field('appearance_title', with: 'MyCompany')
expect(page).to have_field('appearance_description', with: 'dev server')
expect(page).to have_field('appearance_new_project_guidelines', with: 'Custom project guidelines')
expect(page).to have_content 'Last edit'
end
scenario 'Preview appearance' do
scenario 'Preview sign-in page appearance' do
sign_in(create(:admin))
visit admin_appearances_path
click_link "Preview"
click_link "Sign-in page"
expect_page_has_custom_appearance(appearance)
expect_custom_sign_in_appearance(appearance)
end
scenario 'Preview new project page appearance' do
sign_in(create(:admin))
visit admin_appearances_path
click_link "New project page"
expect_custom_new_project_appearance(appearance)
end
scenario 'Custom sign-in page' do
visit new_user_session_path
expect_page_has_custom_appearance(appearance)
expect_custom_sign_in_appearance(appearance)
end
scenario 'Custom new project page' do
sign_in create(:user)
visit new_project_path
expect_custom_new_project_appearance(appearance)
end
scenario 'Appearance logo' do
......@@ -57,11 +76,15 @@ feature 'Admin Appearance' do
expect(page).not_to have_css(header_logo_selector)
end
def expect_page_has_custom_appearance(appearance)
def expect_custom_sign_in_appearance(appearance)
expect(page).to have_content appearance.title
expect(page).to have_content appearance.description
end
def expect_custom_new_project_appearance(appearance)
expect(page).to have_content appearance.new_project_guidelines
end
def logo_selector
'//img[data-src^="/uploads/-/system/appearance/logo"]'
end
......
......@@ -89,7 +89,7 @@ describe 'Commits' do
context 'Download artifacts' do
before do
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
end
it do
......@@ -146,7 +146,7 @@ describe 'Commits' do
context "when logged as reporter" do
before do
project.team << [user, :reporter]
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline)
end
......@@ -168,7 +168,7 @@ describe 'Commits' do
project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline)
end
......
......@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do
let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
before do
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, when: 'manual')
end
it 'avoids repeated database queries' do
before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, when: 'manual')
after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
......
......@@ -187,7 +187,7 @@ feature 'Jobs' do
context "Download artifacts" do
before do
job.update_attributes(artifacts_file: artifacts_file)
job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
end
......@@ -198,7 +198,7 @@ feature 'Jobs' do
context 'Artifacts expire date' do
before do
job.update_attributes(artifacts_file: artifacts_file,
job.update_attributes(legacy_artifacts_file: artifacts_file,
artifacts_expire_at: expire_at)
visit project_job_path(project, job)
......@@ -422,14 +422,14 @@ feature 'Jobs' do
describe "GET /:project/jobs/:id/download" do
before do
job.update_attributes(artifacts_file: artifacts_file)
job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
click_link 'Download'
end
context "Build from other project" do
before do
job2.update_attributes(artifacts_file: artifacts_file)
job2.update_attributes(legacy_artifacts_file: artifacts_file)
visit download_project_job_artifacts_path(project, job2)
end
......
......@@ -304,7 +304,7 @@ describe 'Pipelines', :js do
context 'with artifacts expired' do
let!(:with_artifacts_expired) do
create(:ci_build, :artifacts_expired, :success,
create(:ci_build, :expired, :success,
pipeline: pipeline,
name: 'rspec',
stage: 'test')
......
......@@ -39,6 +39,11 @@ describe 'Project snippets', :js do
expect(page).to have_selector('.atwho-view')
end
it 'should have zen mode' do
find('.js-zen-enter').click()
expect(page).to have_selector('.fullscreen')
end
end
end
end
......@@ -942,8 +942,8 @@ describe Gitlab::Database::MigrationHelpers do
end
it 'queues jobs in groups of buffer size 1' do
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]]])
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id3, id3]]])
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]]])
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id3, id3]]])
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
end
......@@ -960,7 +960,7 @@ describe Gitlab::Database::MigrationHelpers do
end
it 'queues jobs in bulk all at once (big buffer size)' do
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]],
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]],
['FooJob', [id3, id3]]])
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
......
......@@ -73,6 +73,28 @@ describe Gitlab::Database do
end
end
describe '.replication_slots_supported?' do
it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.replication_slots_supported?).to eq(false)
end
it 'returns false when using PostgreSQL 9.3' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.3.1')
expect(described_class.replication_slots_supported?).to eq(false)
end
it 'returns true when using PostgreSQL 9.4.0 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.4.0')
expect(described_class.replication_slots_supported?).to eq(true)
end
end
describe '.nulls_last_order' do
context 'when using PostgreSQL' do
before do
......
......@@ -278,6 +278,35 @@ describe Gitlab::Git::Commit, seed_helper: true do
it { is_expected.not_to include(SeedRepo::FirstCommit::ID) }
end
shared_examples '.shas_with_signatures' do
let(:signed_shas) { %w[5937ac0a7beb003549fc5fd26fc247adbce4a52e 570e7b2abdd848b95f2f578043fc23bd6f6fd24d] }
let(:unsigned_shas) { %w[19e2e9b4ef76b422ce1154af39a91323ccc57434 c642fe9b8b9f28f9225d7ea953fe14e74748d53b] }
let(:first_signed_shas) { %w[5937ac0a7beb003549fc5fd26fc247adbce4a52e c642fe9b8b9f28f9225d7ea953fe14e74748d53b] }
it 'has 2 signed shas' do
ret = described_class.shas_with_signatures(repository, signed_shas)
expect(ret).to eq(signed_shas)
end
it 'has 0 signed shas' do
ret = described_class.shas_with_signatures(repository, unsigned_shas)
expect(ret).to eq([])
end
it 'has 1 signed sha' do
ret = described_class.shas_with_signatures(repository, first_signed_shas)
expect(ret).to contain_exactly(first_signed_shas.first)
end
end
describe '.shas_with_signatures with gitaly on' do
it_should_behave_like '.shas_with_signatures'
end
describe '.shas_with_signatures with gitaly disabled', :disable_gitaly do
it_should_behave_like '.shas_with_signatures'
end
describe '.find_all' do
shared_examples 'finding all commits' do
it 'should return a return a collection of commits' do
......
require 'rails_helper'
describe Gitlab::SidekiqConfig do
describe '.workers' do
it 'includes all workers' do
workers = described_class.workers
expect(workers).to include(PostReceive)
expect(workers).to include(MergeWorker)
end
end
describe '.worker_queues' do
it 'includes all queues' do
queues = described_class.worker_queues
expect(queues).to include('post_receive')
expect(queues).to include('merge')
expect(queues).to include('cronjob')
expect(queues).to include('mailers')
expect(queues).to include('default')
end
end
end
......@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do
end
context 'with migratable data' do
let(:project1) { create(:project, ci_id: 2) }
let(:project2) { create(:project, ci_id: 3) }
let(:project3) { create(:project) }
set(:project1) { create(:project, ci_id: 2) }
set(:project2) { create(:project, ci_id: 3) }
set(:project3) { create(:project) }
let(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
let(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
let(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
set(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
set(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
set(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build2) { create(:ci_build, :artifacts, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, :artifacts, pipeline: pipeline3) }
let!(:build2) { create(:ci_build, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, pipeline: pipeline3) }
before do
setup_builds(build2, build3)
store_artifacts_in_legacy_path(build_with_legacy_artifacts)
end
......@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do
end
it "legacy artifacts are set" do
expect(build_with_legacy_artifacts.artifacts_file_identifier).not_to be_nil
expect(build_with_legacy_artifacts.legacy_artifacts_file_identifier).not_to be_nil
end
describe '#min_id' do
......@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do
build.project.ci_id.to_s,
build.id.to_s)
end
def new_legacy_path(build)
File.join(directory,
build.created_at.utc.strftime('%Y_%m'),
build.project_id.to_s,
build.id.to_s)
end
def setup_builds(*builds)
builds.each do |build|
FileUtils.mkdir_p(new_legacy_path(build))
build.update_columns(
artifacts_file: 'ci_build_artifacts.zip',
artifacts_metadata: 'ci_build_artifacts_metadata.gz')
build.reload
end
end
end
end
......@@ -5,9 +5,6 @@ describe Appearance do
it { is_expected.to be_valid }
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to have_many(:uploads).dependent(:destroy) }
describe '.current', :use_clean_rails_memory_store_caching do
......
......@@ -23,6 +23,8 @@ describe Ci::Build do
it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) }
it { is_expected.to be_a(ArtifactMigratable) }
describe 'callbacks' do
context 'when running after_create callback' do
it 'triggers asynchronous build hooks worker' do
......@@ -130,37 +132,58 @@ describe Ci::Build do
end
describe '#artifacts?' do
context 'when new artifacts are used' do
let(:build) { create(:ci_build, :artifacts) }
subject { build.artifacts? }
context 'artifacts archive does not exist' do
before do
build.update_attributes(artifacts_file: nil)
end
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
context 'is expired' do
before do
build.update(artifacts_expire_at: Time.now - 7.days)
end
let!(:build) { create(:ci_build, :artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'is not expired' do
before do
build.update(artifacts_expire_at: Time.now + 7.days)
it { is_expected.to be_truthy }
end
end
end
context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
it { is_expected.to be_truthy }
context 'is expired' do
let!(:build) { create(:ci_build, :legacy_artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'is not expired' do
it { is_expected.to be_truthy }
end
end
end
end
describe '#artifacts_expired?' do
subject { build.artifacts_expired? }
......@@ -612,11 +635,13 @@ describe Ci::Build do
describe '#erasable?' do
subject { build.erasable? }
it { is_expected.to eq false }
end
end
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
describe '#erase' do
......@@ -683,6 +708,77 @@ describe Ci::Build do
end
end
context 'old artifacts' do
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
describe '#erase' do
before do
build.erase(erased_by: user)
end
context 'erased by user' do
let!(:user) { create(:user, username: 'eraser') }
include_examples 'erasable'
it 'records user who erased a build' do
expect(build.erased_by).to eq user
end
end
context 'erased by system' do
let(:user) { nil }
include_examples 'erasable'
it 'does not set user who erased a build' do
expect(build.erased_by).to be_nil
end
end
end
describe '#erasable?' do
subject { build.erasable? }
it { is_expected.to be_truthy }
end
describe '#erased?' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
subject { build.erased? }
context 'job has not been erased' do
it { is_expected.to be_falsey }
end
context 'job has been erased' do
before do
build.erase
end
it { is_expected.to be_truthy }
end
end
context 'metadata and build trace are not available' do
let!(:build) { create(:ci_build, :success, :legacy_artifacts) }
before do
build.remove_artifacts_metadata!
end
describe '#erase' do
it 'does not raise error' do
expect { build.erase }.not_to raise_error
end
end
end
end
end
end
end
describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
......@@ -912,11 +1008,23 @@ describe Ci::Build do
describe '#keep_artifacts!' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
subject { build.keep_artifacts! }
it 'to reset expire_at' do
build.keep_artifacts!
subject
expect(build.artifacts_expire_at).to be_nil
end
context 'when having artifacts files' do
let!(:artifact) { create(:ci_job_artifact, job: build, expire_in: '7 days') }
it 'to reset dependent objects' do
subject
expect(artifact.reload.expire_at).to be_nil
end
end
end
describe '#merge_request' do
......@@ -1813,4 +1921,77 @@ describe Ci::Build do
end
end
end
describe '.matches_tag_ids' do
set(:build) { create(:ci_build, project: project, user: user) }
let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids }
subject { described_class.where(id: build).matches_tag_ids(tag_ids) }
before do
build.update(tag_list: build_tag_list)
end
context 'when have different tags' do
let(:build_tag_list) { %w(A B) }
let(:tag_list) { %w(C D) }
it "does not match a build" do
is_expected.not_to contain_exactly(build)
end
end
context 'when have a subset of tags' do
let(:build_tag_list) { %w(A B) }
let(:tag_list) { %w(A B C D) }
it "does match a build" do
is_expected.to contain_exactly(build)
end
end
context 'when build does not have tags' do
let(:build_tag_list) { [] }
let(:tag_list) { %w(C D) }
it "does match a build" do
is_expected.to contain_exactly(build)
end
end
context 'when does not have a subset of tags' do
let(:build_tag_list) { %w(A B C) }
let(:tag_list) { %w(C D) }
it "does not match a build" do
is_expected.not_to contain_exactly(build)
end
end
end
describe '.matches_tags' do
set(:build) { create(:ci_build, project: project, user: user) }
subject { described_class.where(id: build).with_any_tags }
before do
build.update(tag_list: tag_list)
end
context 'when does have tags' do
let(:tag_list) { %w(A B) }
it "does match a build" do
is_expected.to contain_exactly(build)
end
end
context 'when does not have tags' do
let(:tag_list) { [] }
it "does not match a build" do
is_expected.not_to contain_exactly(build)
end
end
end
end
require 'spec_helper'
describe Ci::JobArtifact do
set(:artifact) { create(:ci_job_artifact, :archive) }
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:job) }
end
it { is_expected.to respond_to(:file) }
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
end
end
describe '#file' do
subject { artifact.file }
context 'the uploader api' do
it { is_expected.to respond_to(:store_dir) }
it { is_expected.to respond_to(:cache_dir) }
it { is_expected.to respond_to(:work_dir) }
end
end
describe '#expire_in' do
subject { artifact.expire_in }
it { is_expected.to be_nil }
context 'when expire_at is specified' do
let(:expire_at) { Time.now + 7.days }
before do
artifact.expire_at = expire_at
end
it { is_expected.to be_within(5).of(expire_at - Time.now) }
end
end
describe '#expire_in=' do
subject { artifact.expire_in }
it 'when assigning valid duration' do
artifact.expire_in = '7 days'
is_expected.to be_within(10).of(7.days.to_i)
end
it 'when assigning invalid duration' do
expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
is_expected.to be_nil
end
it 'when resetting value' do
artifact.expire_in = nil
is_expected.to be_nil
end
it 'when setting to 0' do
artifact.expire_in = '0'
is_expected.to be_nil
end
end
end
......@@ -133,15 +133,29 @@ describe ProjectStatistics do
describe '#update_build_artifacts_size' do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) }
context 'when new job artifacts are calculated' do
let(:ci_build) { create(:ci_build, pipeline: pipeline) }
before do
create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build)
end
it "stores the size of related build artifacts" do
statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to be(106365)
end
end
context 'when legacy artifacts are used' do
let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) }
it "stores the size of related build artifacts" do
expect(statistics.build_artifacts_size).to eq 101.megabytes
statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to eq(10.megabytes)
end
end
end
......
......@@ -945,7 +945,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end
context 'when job has been erased' do
......@@ -985,15 +985,6 @@ describe API::Runner do
it_behaves_like 'successful artifacts upload'
end
context 'when updates artifact' do
before do
upload_artifacts(file_upload2, headers_with_token)
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
context 'when using runners token' do
it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
......@@ -1106,7 +1097,7 @@ describe API::Runner do
expect(response).to have_gitlab_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759)
expect(stored_artifacts_size).to eq(72821)
end
end
......@@ -1131,7 +1122,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end
after do
......
require 'spec_helper'
describe PipelineSerializer do
let(:user) { create(:user) }
set(:user) { create(:user) }
let(:serializer) do
described_class.new(current_user: user)
......@@ -117,7 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(57)
expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0)
end
end
......
......@@ -15,16 +15,14 @@ module Ci
describe '#execute' do
context 'runner follow tag list' do
it "picks build with the same tag" do
pending_job.tag_list = ["linux"]
pending_job.save
specific_runner.tag_list = ["linux"]
pending_job.update(tag_list: ["linux"])
specific_runner.update(tag_list: ["linux"])
expect(execute(specific_runner)).to eq(pending_job)
end
it "does not pick build with different tag" do
pending_job.tag_list = ["linux"]
pending_job.save
specific_runner.tag_list = ["win32"]
pending_job.update(tag_list: ["linux"])
specific_runner.update(tag_list: ["win32"])
expect(execute(specific_runner)).to be_falsey
end
......@@ -33,13 +31,12 @@ module Ci
end
it "does not pick build with tag" do
pending_job.tag_list = ["linux"]
pending_job.save
pending_job.update(tag_list: ["linux"])
expect(execute(specific_runner)).to be_falsey
end
it "pick build without tag" do
specific_runner.tag_list = ["win32"]
specific_runner.update(tag_list: ["win32"])
expect(execute(specific_runner)).to eq(pending_job)
end
end
......@@ -172,7 +169,7 @@ module Ci
context 'when first build is stalled' do
before do
pending_job.lock_version = 10
pending_job.update(lock_version: 0)
end
subject { described_class.new(specific_runner).execute }
......@@ -182,7 +179,7 @@ module Ci
before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
.and_return([pending_job, other_build])
.and_return(Ci::Build.where(id: [pending_job, other_build]))
end
it "receives second build from the queue" do
......@@ -194,7 +191,7 @@ module Ci
context 'when single build is in queue' do
before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
.and_return([pending_job])
.and_return(Ci::Build.where(id: pending_job))
end
it "does not receive any valid result" do
......@@ -205,7 +202,7 @@ module Ci
context 'when there is no build in queue' do
before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
.and_return([])
.and_return(Ci::Build.none)
end
it "does not receive builds but result is valid" do
......
......@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do
%i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by].freeze
erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
......@@ -34,7 +34,7 @@ describe Ci::RetryBuildService do
end
let(:build) do
create(:ci_build, :failed, :artifacts_expired, :erased,
create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline,
......
require "spec_helper"
describe Projects::UpdatePagesService do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
set(:project) { create(:project, :repository) }
set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') }
let(:extension) { 'zip' }
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{extension}.meta"
fixture_file_upload(filename) if File.exist?(filename)
end
subject { described_class.new(project, build) }
......@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do
project.remove_pages
end
context 'legacy artifacts' do
%w(tar.gz zip).each do |format|
let(:extension) { format }
context "for valid #{format}" do
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{format}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{format}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{format}.meta"
fixture_file_upload(filename) if File.exist?(filename)
before do
build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(legacy_artifacts_metadata: metadata)
end
describe 'pages artifacts' do
context 'with expiry date' do
before do
build.update_attributes(artifacts_file: file)
build.update_attributes(artifacts_metadata: metadata)
build.artifacts_expire_in = "2 days"
end
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(true)
end
end
context 'without expiry date' do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(false)
end
end
end
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
# Check that all expected files are extracted
%w[index.html zero .hidden/file].each do |filename|
expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
end
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
end
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
project.destroy
expect(project.pages_deployed?).to be_falsey
end
it 'fails if sha on branch is not latest' do
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(legacy_artifacts_file: empty_file)
expect(execute).not_to eq(:success)
end
end
end
end
context 'for new artifacts' do
context "for a valid job" do
before do
create(:ci_job_artifact, file: file, job: build)
create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build)
build.reload
end
describe 'pages artifacts' do
......@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(true)
expect(build.artifacts?).to eq(true)
end
end
......@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(false)
expect(build.reload.artifacts?).to eq(false)
end
end
end
......@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do
end
it 'fails if sha on branch is not latest' do
pipeline.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
build.job_artifacts_archive.update_attributes(file: empty_file)
expect(execute).not_to eq(:success)
end
end
......@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do
end
it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file)
build.update_attributes(legacy_artifacts_file: invalid_file)
expect(execute).not_to eq(:success)
end
......@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do
file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip')
metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
build.update_attributes(artifacts_file: file)
build.update_attributes(artifacts_metadata: metafile)
build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(legacy_artifacts_metadata: metafile)
allow(build).to receive(:artifacts_metadata_entry)
.and_return(metadata)
......
......@@ -146,7 +146,7 @@ describe WebHookService do
let(:system_hook) { create(:system_hook) }
it 'enqueue WebHookWorker' do
expect(Sidekiq::Client).to receive(:enqueue).with(WebHookWorker, project_hook.id, data, 'push_hooks')
expect(WebHookWorker).to receive(:perform_async).with(project_hook.id, data, 'push_hooks')
described_class.new(project_hook, data, 'push_hooks').async_execute
end
......
......@@ -120,6 +120,7 @@ module TestEnv
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path)
end
def clean_gitlab_test_path
......@@ -233,6 +234,10 @@ module TestEnv
Gitlab.config.pages.path
end
def artifacts_path
Gitlab.config.artifacts.path
end
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
......
require 'spec_helper'
describe JobArtifactUploader do
let(:job_artifact) { create(:ci_job_artifact) }
let(:uploader) { described_class.new(job_artifact, :file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '#store_dir' do
subject { uploader.store_dir }
let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
context 'file is stored in valid local_path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
require 'rails_helper'
describe ArtifactUploader do
describe LegacyArtifactUploader do
let(:job) { create(:ci_build) }
let(:uploader) { described_class.new(job, :artifacts_file) }
let(:path) { Gitlab.config.artifacts.path }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_artifacts_store' do
subject { described_class.local_artifacts_store }
describe '.local_store_path' do
subject { described_class.local_store_path }
it "delegate to artifacts path" do
expect(Gitlab.config.artifacts).to receive(:path)
......@@ -18,28 +18,32 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') }
end
describe '#store_dir' do
subject { uploader.store_dir }
it { is_expected.to start_with(path) }
it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
......@@ -51,11 +55,23 @@ describe ArtifactUploader do
subject { uploader.filename }
it { is_expected.to be_nil }
end
context 'with artifacts' do
let(:job) { create(:ci_build, :artifacts) }
context 'file is stored in valid path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
it { is_expected.not_to be_nil }
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
......@@ -65,7 +65,6 @@ describe AuthorizedProjectsWorker do
args_list = build_args_list(project.owner.id)
push_bulk_args = {
'class' => described_class,
'queue' => described_class.sidekiq_options['queue'],
'args' => args_list
}
......
......@@ -10,35 +10,4 @@ describe BackgroundMigrationWorker, :sidekiq do
described_class.new.perform('Foo', [10, 20])
end
end
describe '.perform_bulk' do
it 'enqueues background migrations in bulk' do
Sidekiq::Testing.fake! do
described_class.perform_bulk([['Foo', [1]], ['Foo', [2]]])
expect(described_class.jobs.count).to eq 2
expect(described_class.jobs).to all(include('enqueued_at'))
end
end
end
describe '.perform_bulk_in' do
context 'when delay is valid' do
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
described_class.perform_bulk_in(1.minute, [['Foo', [1]], ['Foo', [2]]])
expect(described_class.jobs.count).to eq 2
expect(described_class.jobs).to all(include('at'))
end
end
end
context 'when delay is invalid' do
it 'raises an ArgumentError exception' do
expect { described_class.perform_bulk_in(-60, [['Foo']]) }
.to raise_error(ArgumentError)
end
end
end
end
require 'spec_helper'
describe ApplicationWorker do
let(:worker) do
Class.new do
def self.name
'Gitlab::Foo::Bar::DummyWorker'
end
include ApplicationWorker
end
end
describe 'Sidekiq options' do
it 'sets the queue name based on the class name' do
expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
end
end
describe '.queue' do
it 'returns the queue name' do
worker.sidekiq_options queue: :some_queue
expect(worker.queue).to eq('some_queue')
end
end
describe '.bulk_perform_async' do
it 'enqueues jobs in bulk' do
Sidekiq::Testing.fake! do
worker.bulk_perform_async([['Foo', [1]], ['Foo', [2]]])
expect(worker.jobs.count).to eq 2
expect(worker.jobs).to all(include('enqueued_at'))
end
end
end
describe '.bulk_perform_in' do
context 'when delay is valid' do
it 'correctly schedules jobs' do
Sidekiq::Testing.fake! do
worker.bulk_perform_in(1.minute, [['Foo', [1]], ['Foo', [2]]])
expect(worker.jobs.count).to eq 2
expect(worker.jobs).to all(include('at'))
end
end
end
context 'when delay is invalid' do
it 'raises an ArgumentError exception' do
expect { worker.bulk_perform_in(-60, [['Foo']]) }
.to raise_error(ArgumentError)
end
end
end
end
......@@ -3,7 +3,11 @@ require 'spec_helper'
describe ClusterQueue do
let(:worker) do
Class.new do
include Sidekiq::Worker
def self.name
'DummyWorker'
end
include ApplicationWorker
include ClusterQueue
end
end
......
......@@ -3,7 +3,11 @@ require 'spec_helper'
describe CronjobQueue do
let(:worker) do
Class.new do
include Sidekiq::Worker
def self.name
'DummyWorker'
end
include ApplicationWorker
include CronjobQueue
end
end
......
require 'spec_helper'
describe DedicatedSidekiqQueue do
let(:worker) do
Class.new do
def self.name
'Foo::Bar::DummyWorker'
end
include Sidekiq::Worker
include DedicatedSidekiqQueue
end
end
describe 'queue names' do
it 'sets the queue name based on the class name' do
expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
end
end
end
......@@ -3,6 +3,10 @@ require 'spec_helper'
describe Gitlab::GithubImport::ObjectImporter do
let(:worker) do
Class.new do
def self.name
'DummyWorker'
end
include(Gitlab::GithubImport::ObjectImporter)
def counter_name
......
......@@ -3,7 +3,11 @@ require 'spec_helper'
describe Gitlab::GithubImport::Queue do
it 'sets the Sidekiq options for the worker' do
worker = Class.new do
include Sidekiq::Worker
def self.name
'DummyWorker'
end
include ApplicationWorker
include Gitlab::GithubImport::Queue
end
......
......@@ -3,7 +3,11 @@ require 'spec_helper'
describe PipelineQueue do
let(:worker) do
Class.new do
include Sidekiq::Worker
def self.name
'DummyWorker'
end
include ApplicationWorker
include PipelineQueue
end
end
......
......@@ -3,7 +3,11 @@ require 'spec_helper'
describe RepositoryCheckQueue do
let(:worker) do
Class.new do
include Sidekiq::Worker
def self.name
'DummyWorker'
end
include ApplicationWorker
include RepositoryCheckQueue
end
end
......
require 'spec_helper'
describe 'Every Sidekiq worker' do
let(:workers) do
root = Rails.root.join('app', 'workers')
concerns = root.join('concerns').to_s
workers = Dir[root.join('**', '*.rb')]
.reject { |path| path.start_with?(concerns) }
workers.map do |path|
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
ns.camelize.constantize
end
it 'includes ApplicationWorker' do
expect(Gitlab::SidekiqConfig.workers).to all(include(ApplicationWorker))
end
it 'does not use the default queue' do
workers.each do |worker|
expect(worker.sidekiq_options['queue'].to_s).not_to eq('default')
end
expect(Gitlab::SidekiqConfig.workers.map(&:queue)).not_to include('default')
end
it 'uses the cronjob queue when the worker runs as a cronjob' do
cron_workers = Settings.cron_jobs
.map { |job_name, options| options['job_class'].constantize }
.to_set
workers.each do |worker|
next unless cron_workers.include?(worker)
expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob')
end
expect(Gitlab::SidekiqConfig.cron_workers.map(&:queue)).to all(eq('cronjob'))
end
it 'defines the queue in the Sidekiq configuration file' do
config = YAML.load_file(Rails.root.join('config', 'sidekiq_queues.yml').to_s)
queue_names = config[:queues].map { |(queue, _)| queue }.to_set
config_queue_names = Gitlab::SidekiqConfig.config_queues.to_set
workers.each do |worker|
expect(queue_names).to include(worker.sidekiq_options['queue'].to_s)
end
expect(Gitlab::SidekiqConfig.worker_queues).to all(be_in(config_queue_names))
end
end
......@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do
end
context 'with expired artifacts' do
let(:artifacts_expiry) { { artifacts_expire_at: Time.now - 7.days } }
context 'when associated project is valid' do
let(:build) do
create(:ci_build, :artifacts, artifacts_expiry)
end
let(:build) { create(:ci_build, :artifacts, :expired) }
it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy
......@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_falsey
end
it 'does nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).to be_nil
it 'does remove the job artifact record' do
expect(build.reload.job_artifacts_archive).to be_nil
end
end
end
context 'with not yet expired artifacts' do
let(:build) do
set(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end
......@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
......@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
context 'for expired artifacts' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) }
let(:build) { create(:ci_build, :expired) }
it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment