Commit 88bee24b authored by Kamil Trzciński's avatar Kamil Trzciński

Merge branch 'an-sidekiq-job-feature-attribution' into 'master'

Attribute Sidekiq workers to feature categories

Closes gitlab-com/gl-infra/scalability#32

See merge request gitlab-org/gitlab!18462
parents fc9e1625 049a4d4c
# frozen_string_literal: true
module WorkerAttributes
extend ActiveSupport::Concern
class_methods do
def feature_category(value)
raise "Invalid category. Use `feature_category_not_owned!` to mark a worker as not owned" if value == :not_owned
worker_attributes[:feature_category] = value
end
# Special case: mark this work as not associated with a feature category
# this should be used for cross-cutting concerns, such as mailer workers.
def feature_category_not_owned!
worker_attributes[:feature_category] = :not_owned
end
def get_feature_category
get_worker_attribute(:feature_category)
end
def feature_category_not_owned?
get_worker_attribute(:feature_category) == :not_owned
end
protected
# Returns a worker attribute declared on this class or its parent class.
# This approach allows declared attributes to be inherited by
# child classes.
def get_worker_attribute(name)
worker_attributes[name] || superclass_worker_attributes(name)
end
private
def worker_attributes
@attributes ||= {}
end
def superclass_worker_attributes(name)
return unless superclass.include? WorkerAttributes
superclass.get_worker_attribute(name)
end
end
end
......@@ -4,6 +4,8 @@ class AdminEmailWorker
include ApplicationWorker
include CronjobQueue
feature_category_not_owned!
def perform
send_repository_check_mail if Gitlab::CurrentSettings.repository_checks_enabled
end
......
......@@ -4,6 +4,8 @@ class AuthorizedProjectsWorker
include ApplicationWorker
prepend WaitableWorker
feature_category :authentication_and_authorization
# This is a workaround for a Ruby 2.3.7 bug. rspec-mocks cannot restore the
# visibility of prepended modules. See https://github.com/rspec/rspec-mocks/issues/1231
# for more details.
......
......@@ -4,6 +4,7 @@ class AutoMergeProcessWorker
include ApplicationWorker
queue_namespace :auto_merge
feature_category :continuous_delivery
def perform(merge_request_id)
MergeRequest.find_by_id(merge_request_id).try do |merge_request|
......
......@@ -3,6 +3,8 @@
class BackgroundMigrationWorker
include ApplicationWorker
feature_category_not_owned!
# The minimum amount of time between processing two jobs of the same migration
# class.
#
......
......@@ -5,6 +5,7 @@ class BuildHooksWorker
include PipelineQueue
queue_namespace :pipeline_hooks
feature_category :continuous_integration
# rubocop: disable CodeReuse/ActiveRecord
def perform(build_id)
......
......@@ -5,6 +5,7 @@ class BuildQueueWorker
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
# rubocop: disable CodeReuse/ActiveRecord
def perform(build_id)
......
......@@ -3,6 +3,8 @@
class ChatNotificationWorker
include ApplicationWorker
feature_category :chatops
RESCHEDULE_INTERVAL = 2.seconds
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -5,6 +5,8 @@ module Ci
include ApplicationWorker
include CronjobQueue
feature_category :continuous_integration
# rubocop: disable CodeReuse/ActiveRecord
def perform
# Archive stale live traces which still resides in redis or database
......
......@@ -6,6 +6,7 @@ module Ci
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
def perform(build_id)
Ci::Build.find_by_id(build_id).try do |build|
......
......@@ -6,6 +6,7 @@ module Ci
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
def perform(build_id)
::Ci::Build.find_by_id(build_id).try do |build|
......
......@@ -4,6 +4,7 @@ class CleanupContainerRepositoryWorker
include ApplicationWorker
queue_namespace :container_repository
feature_category :container_registry
attr_reader :container_repository, :current_user
......
......@@ -8,6 +8,7 @@ module ApplicationWorker
extend ActiveSupport::Concern
include Sidekiq::Worker # rubocop:disable Cop/IncludeSidekiqWorker
include WorkerAttributes
included do
set_queue
......
......@@ -5,5 +5,6 @@ module AutoDevopsQueue
included do
queue_namespace :auto_devops
feature_category :auto_devops
end
end
......@@ -5,5 +5,6 @@ module ChaosQueue
included do
queue_namespace :chaos
feature_category :chaos_engineering
end
end
......@@ -8,5 +8,6 @@ module ClusterQueue
included do
queue_namespace :gcp_cluster
feature_category :kubernetes_configuration
end
end
......@@ -12,6 +12,8 @@ module Gitlab
include GithubImport::Queue
include ReschedulingMethods
include NotifyUponDeath
feature_category :importers
end
# project - An instance of `Project` to import the data into.
......
......@@ -7,6 +7,7 @@ module Gitlab
included do
queue_namespace :github_importer
feature_category :importers
# If a job produces an error it may block a stage from advancing
# forever. To prevent this from happening we prevent jobs from going to
......
......@@ -8,5 +8,6 @@ module ObjectPoolQueue
included do
queue_namespace :object_pool
feature_category :gitaly
end
end
......@@ -8,5 +8,6 @@ module PipelineBackgroundQueue
included do
queue_namespace :pipeline_background
feature_category :continuous_integration
end
end
......@@ -8,5 +8,6 @@ module PipelineQueue
included do
queue_namespace :pipeline_default
feature_category :continuous_integration
end
end
......@@ -6,7 +6,7 @@ module RepositoryCheckQueue
included do
queue_namespace :repository_check
sidekiq_options retry: false
feature_category :source_code_management
end
end
......@@ -8,5 +8,6 @@ module TodosDestroyerQueue
included do
queue_namespace :todos_destroyer
feature_category :issue_tracking
end
end
......@@ -3,6 +3,8 @@
class CreateEvidenceWorker
include ApplicationWorker
feature_category :release_governance
def perform(release_id)
release = Release.find_by_id(release_id)
return unless release
......
......@@ -3,6 +3,8 @@
class CreateGpgSignatureWorker
include ApplicationWorker
feature_category :source_code_management
# rubocop: disable CodeReuse/ActiveRecord
def perform(commit_shas, project_id)
# Older versions of Git::BranchPushService may push a single commit ID on
......
......@@ -3,6 +3,8 @@
class CreateNoteDiffFileWorker
include ApplicationWorker
feature_category :source_code_management
def perform(diff_note_id)
diff_note = DiffNote.find(diff_note_id)
......
......@@ -5,6 +5,7 @@ class CreatePipelineWorker
include PipelineQueue
queue_namespace :pipeline_creation
feature_category :continuous_integration
def perform(project_id, user_id, ref, source, params = {})
project = Project.find(project_id)
......
......@@ -5,6 +5,7 @@ class DeleteContainerRepositoryWorker
include ExclusiveLeaseGuard
queue_namespace :container_repository
feature_category :container_registry
LEASE_TIMEOUT = 1.hour
......
......@@ -3,6 +3,8 @@
class DeleteDiffFilesWorker
include ApplicationWorker
feature_category :source_code_management
# rubocop: disable CodeReuse/ActiveRecord
def perform(merge_request_diff_id)
merge_request_diff = MergeRequestDiff.find(merge_request_diff_id)
......
......@@ -3,6 +3,8 @@
class DeleteMergedBranchesWorker
include ApplicationWorker
feature_category :source_code_management
def perform(project_id, user_id)
begin
project = Project.find(project_id)
......
......@@ -3,6 +3,8 @@
class DeleteStoredFilesWorker
include ApplicationWorker
feature_category_not_owned!
def perform(class_name, keys)
klass = begin
class_name.constantize
......
......@@ -3,6 +3,8 @@
class DeleteUserWorker
include ApplicationWorker
feature_category :authentication_and_authorization
def perform(current_user_id, delete_user_id, options = {})
delete_user = User.find(delete_user_id)
current_user = User.find(current_user_id)
......
......@@ -5,6 +5,7 @@ module Deployments
include ApplicationWorker
queue_namespace :deployment
feature_category :continuous_delivery
def perform(deployment_id)
Deployment.find_by_id(deployment_id).try(:execute_hooks)
......
......@@ -5,6 +5,7 @@ module Deployments
include ApplicationWorker
queue_namespace :deployment
feature_category :continuous_delivery
def perform(deployment_id)
Deployment.find_by_id(deployment_id).try do |deployment|
......
......@@ -6,6 +6,7 @@ class DetectRepositoryLanguagesWorker
include ExclusiveLeaseGuard
sidekiq_options retry: 1
feature_category :source_code_management
LEASE_TIMEOUT = 300
......
......@@ -3,6 +3,8 @@
class EmailReceiverWorker
include ApplicationWorker
feature_category :issue_tracking
def perform(raw)
return unless Gitlab::IncomingEmail.enabled?
......
......@@ -5,6 +5,8 @@ class EmailsOnPushWorker
attr_reader :email, :skip_premailer
feature_category :source_code_management
def perform(project_id, recipients, push_data, options = {})
options.symbolize_keys!
options.reverse_merge!(
......
......@@ -4,6 +4,8 @@ class ExpireBuildArtifactsWorker
include ApplicationWorker
include CronjobQueue
feature_category :continuous_integration
def perform
if Feature.enabled?(:ci_new_expire_job_artifacts_service, default_enabled: true)
perform_efficient_artifacts_removal
......
......@@ -3,6 +3,8 @@
class ExpireBuildInstanceArtifactsWorker
include ApplicationWorker
feature_category :continuous_integration
# rubocop: disable CodeReuse/ActiveRecord
def perform(build_id)
build = Ci::Build
......
......@@ -4,6 +4,7 @@ class GitGarbageCollectWorker
include ApplicationWorker
sidekiq_options retry: false
feature_category :gitaly
# Timeout set to 24h
LEASE_TIMEOUT = 86400
......
......@@ -10,6 +10,7 @@ module Gitlab
include ApplicationWorker
sidekiq_options dead: false
feature_category :importers
INTERVAL = 30.seconds.to_i
......
......@@ -4,6 +4,8 @@ class GitlabShellWorker
include ApplicationWorker
include Gitlab::ShellAdapter
feature_category :source_code_management
def perform(action, *arg)
gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
end
......
......@@ -6,6 +6,8 @@ class GitlabUsagePingWorker
include ApplicationWorker
include CronjobQueue
feature_category_not_owned!
# Retry for up to approximately three hours then give up.
sidekiq_options retry: 10, dead: false
......
......@@ -4,6 +4,8 @@ class GroupDestroyWorker
include ApplicationWorker
include ExceptionBacktrace
feature_category :groups
def perform(group_id, user_id)
begin
group = Group.find(group_id)
......
......@@ -3,6 +3,9 @@
module HashedStorage
class BaseWorker
include ExclusiveLeaseGuard
include WorkerAttributes
feature_category :source_code_management
LEASE_TIMEOUT = 30.seconds.to_i
LEASE_KEY_SEGMENT = 'project_migrate_hashed_storage_worker'
......
......@@ -5,6 +5,7 @@ module HashedStorage
include ApplicationWorker
queue_namespace :hashed_storage
feature_category :source_code_management
# @param [Integer] start initial ID of the batch
# @param [Integer] finish last ID of the batch
......
......@@ -5,6 +5,7 @@ module HashedStorage
include ApplicationWorker
queue_namespace :hashed_storage
feature_category :source_code_management
# @param [Integer] start initial ID of the batch
# @param [Integer] finish last ID of the batch
......
......@@ -4,6 +4,8 @@ class ImportExportProjectCleanupWorker
include ApplicationWorker
include CronjobQueue
feature_category :importers
def perform
ImportExportCleanUpService.new.execute
end
......
......@@ -3,6 +3,8 @@
class ImportIssuesCsvWorker
include ApplicationWorker
feature_category :issue_tracking
sidekiq_retries_exhausted do |job|
Upload.find(job['args'][2]).destroy
end
......
......@@ -3,6 +3,8 @@
class InvalidGpgSignatureUpdateWorker
include ApplicationWorker
feature_category :source_code_management
# rubocop: disable CodeReuse/ActiveRecord
def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id)
......
......@@ -6,6 +6,8 @@ require 'socket'
class IrkerWorker
include ApplicationWorker
feature_category :integrations
def perform(project_id, chans, colors, push_data, settings)
project = Project.find(project_id)
......
......@@ -4,6 +4,8 @@ class IssueDueSchedulerWorker
include ApplicationWorker
include CronjobQueue
feature_category :issue_tracking
# rubocop: disable CodeReuse/ActiveRecord
def perform
project_ids = Issue.opened.due_tomorrow.group(:project_id).pluck(:project_id).map { |id| [id] }
......
......@@ -5,6 +5,8 @@ module MailScheduler
include ApplicationWorker
include MailSchedulerQueue
feature_category :issue_tracking
# rubocop: disable CodeReuse/ActiveRecord
def perform(project_id)
Issue.opened.due_tomorrow.in_projects(project_id).preload(:project).find_each do |issue|
......
......@@ -7,6 +7,8 @@ module MailScheduler
include ApplicationWorker
include MailSchedulerQueue
feature_category :issue_tracking
def perform(meth, *args)
check_arguments!(args)
......
......@@ -3,6 +3,8 @@
class MergeWorker
include ApplicationWorker
feature_category :source_code_management
def perform(merge_request_id, current_user_id, params)
params = params.with_indifferent_access
current_user = User.find(current_user_id)
......
......@@ -3,6 +3,8 @@
class MigrateExternalDiffsWorker
include ApplicationWorker
feature_category :source_code_management
def perform(merge_request_diff_id)
diff = MergeRequestDiff.find_by_id(merge_request_diff_id)
return unless diff
......
......@@ -10,6 +10,8 @@ class NamespacelessProjectDestroyWorker
include ApplicationWorker
include ExceptionBacktrace
feature_category :authentication_and_authorization
def perform(project_id)
begin
project = Project.unscoped.find(project_id)
......
......@@ -5,6 +5,8 @@ module Namespaces
include ApplicationWorker
include CronjobQueue
feature_category :source_code_management
# Worker to prune pending rows on Namespace::AggregationSchedule
# It's scheduled to run once a day at 1:05am.
def perform
......
......@@ -5,6 +5,7 @@ module Namespaces
include ApplicationWorker
queue_namespace :update_namespace_statistics
feature_category :source_code_management
def perform(namespace_id)
namespace = Namespace.find(namespace_id)
......
......@@ -5,6 +5,7 @@ module Namespaces
include ApplicationWorker
queue_namespace :update_namespace_statistics
feature_category :source_code_management
def perform(namespace_id)
return unless aggregation_schedules_table_exists?
......
......@@ -4,6 +4,8 @@ class NewIssueWorker
include ApplicationWorker
include NewIssuable
feature_category :issue_tracking
def perform(issue_id, user_id)
return unless objects_found?(issue_id, user_id)
......
......@@ -4,6 +4,8 @@ class NewMergeRequestWorker
include ApplicationWorker
include NewIssuable
feature_category :source_code_management
def perform(merge_request_id, user_id)
return unless objects_found?(merge_request_id, user_id)
......
......@@ -3,6 +3,8 @@
class NewNoteWorker
include ApplicationWorker
feature_category :issue_tracking
# Keep extra parameter to preserve backwards compatibility with
# old `NewNoteWorker` jobs (can remove later)
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -4,6 +4,7 @@ class NewReleaseWorker
include ApplicationWorker
queue_namespace :notifications
feature_category :release_orchestration
def perform(release_id)
release = Release.with_project_and_namespace.find_by_id(release_id)
......
......@@ -6,6 +6,7 @@ module ObjectStorage
include ObjectStorageQueue
sidekiq_options retry: 5
feature_category_not_owned!
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
......
......@@ -5,6 +5,8 @@ module ObjectStorage
include ApplicationWorker
include ObjectStorageQueue
feature_category_not_owned!
SanityCheckError = Class.new(StandardError)
class MigrationResult
......
......@@ -4,6 +4,8 @@ class PagesDomainRemovalCronWorker
include ApplicationWorker
include CronjobQueue
feature_category :pages
def perform
PagesDomain.for_removal.find_each do |domain|
domain.destroy!
......
......@@ -4,6 +4,8 @@ class PagesDomainSslRenewalCronWorker
include ApplicationWorker
include CronjobQueue
feature_category :pages
def perform
return unless ::Gitlab::LetsEncrypt.enabled?
......
......@@ -3,6 +3,8 @@
class PagesDomainSslRenewalWorker
include ApplicationWorker
feature_category :pages
def perform(domain_id)
domain = PagesDomain.find_by_id(domain_id)
return unless domain&.enabled?
......
......@@ -4,6 +4,8 @@ class PagesDomainVerificationCronWorker
include ApplicationWorker
include CronjobQueue
feature_category :pages
def perform
return if Gitlab::Database.read_only?
......
......@@ -3,6 +3,8 @@
class PagesDomainVerificationWorker
include ApplicationWorker
feature_category :pages
# rubocop: disable CodeReuse/ActiveRecord
def perform(domain_id)
return if Gitlab::Database.read_only?
......
......@@ -4,6 +4,7 @@ class PagesWorker
include ApplicationWorker
sidekiq_options retry: 3
feature_category :pages
def perform(action, *arg)
send(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
......
......@@ -5,6 +5,7 @@ class PipelineProcessWorker
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
# rubocop: disable CodeReuse/ActiveRecord
def perform(pipeline_id, build_ids = nil)
......
......@@ -4,6 +4,8 @@ class PipelineScheduleWorker
include ApplicationWorker
include CronjobQueue
feature_category :continuous_integration
def perform
Ci::PipelineSchedule.runnable_schedules.preloaded.find_in_batches do |schedules|
schedules.each do |schedule|
......
......@@ -4,6 +4,7 @@ class PluginWorker
include ApplicationWorker
sidekiq_options retry: false
feature_category :integrations
def perform(file_name, data)
success, message = Gitlab::Plugin.execute(file_name, data)
......
......@@ -3,6 +3,8 @@
class PostReceive
include ApplicationWorker
feature_category :source_code_management
def perform(gl_repository, identifier, changes, push_options = {})
project, repo_type = Gitlab::GlRepository.parse(gl_repository)
......
......@@ -10,6 +10,8 @@
class ProcessCommitWorker
include ApplicationWorker
feature_category :source_code_management
# project_id - The ID of the project this commit belongs to.
# user_id - The ID of the user that pushed the commit.
# commit_hash - Hash containing commit details to use for constructing a
......
......@@ -5,6 +5,8 @@ class ProjectCacheWorker
include ApplicationWorker
LEASE_TIMEOUT = 15.minutes.to_i
feature_category :source_code_management
# project_id - The ID of the project for which to flush the cache.
# files - An Array containing extra types of files to refresh such as
# `:readme` to flush the README and `:changelog` to flush the
......
......@@ -3,6 +3,8 @@
class ProjectDailyStatisticsWorker
include ApplicationWorker
feature_category :source_code_management
def perform(project_id)
project = Project.find_by_id(project_id)
......
......@@ -4,6 +4,8 @@ class ProjectDestroyWorker
include ApplicationWorker
include ExceptionBacktrace
feature_category :source_code_management
def perform(project_id, user_id, params)
project = Project.find(project_id)
user = User.find(user_id)
......
......@@ -5,6 +5,7 @@ class ProjectExportWorker
include ExceptionBacktrace
sidekiq_options retry: 3
feature_category :source_code_management
def perform(current_user_id, project_id, after_export_strategy = {}, params = {})
current_user = User.find(current_user_id)
......
......@@ -4,6 +4,7 @@ class ProjectServiceWorker
include ApplicationWorker
sidekiq_options dead: false
feature_category :integrations
def perform(hook_id, data)
data = data.with_indifferent_access
......
......@@ -4,6 +4,8 @@
class PropagateServiceTemplateWorker
include ApplicationWorker
feature_category :source_code_management
LEASE_TIMEOUT = 4.hours.to_i
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -4,6 +4,8 @@ class PruneOldEventsWorker
include ApplicationWorker
include CronjobQueue
feature_category_not_owned!
# rubocop: disable CodeReuse/ActiveRecord
def perform
# Contribution calendar shows maximum 12 months of events, we retain 3 years for data integrity.
......
......@@ -6,6 +6,8 @@ class PruneWebHookLogsWorker
include ApplicationWorker
include CronjobQueue
feature_category :integrations
# The maximum number of rows to remove in a single job.
DELETE_LIMIT = 50_000
......
......@@ -3,6 +3,8 @@
class ReactiveCachingWorker
include ApplicationWorker
feature_category_not_owned!
def perform(class_name, id, *args)
klass = begin
class_name.constantize
......
......@@ -5,6 +5,8 @@
class RebaseWorker
include ApplicationWorker
feature_category :source_code_management
def perform(merge_request_id, current_user_id)
current_user = User.find(current_user_id)
merge_request = MergeRequest.find(merge_request_id)
......
......@@ -3,6 +3,8 @@
class RemoteMirrorNotificationWorker
include ApplicationWorker
feature_category :source_code_management
def perform(remote_mirror_id)
remote_mirror = RemoteMirror.find_by_id(remote_mirror_id)
......
......@@ -4,6 +4,8 @@ class RemoveExpiredGroupLinksWorker
include ApplicationWorker
include CronjobQueue
feature_category :authentication_and_authorization
def perform
ProjectGroupLink.expired.destroy_all # rubocop: disable DestroyAll
end
......
......@@ -4,6 +4,8 @@ class RemoveExpiredMembersWorker
include ApplicationWorker
include CronjobQueue
feature_category :authentication_and_authorization
def perform
Member.expired.find_each do |member|
Members::DestroyService.new.execute(member, skip_authorization: true)
......
......@@ -4,6 +4,8 @@ class RemoveUnreferencedLfsObjectsWorker
include ApplicationWorker
include CronjobQueue
feature_category :source_code_management
def perform
LfsObject.destroy_unreferenced
end
......
......@@ -4,6 +4,8 @@ class RepositoryArchiveCacheWorker
include ApplicationWorker
include CronjobQueue
feature_category :source_code_management
def perform
RepositoryArchiveCleanUpService.new.execute
end
......
......@@ -7,6 +7,8 @@ module RepositoryCheck
include ::EachShardWorker
include ExclusiveLeaseGuard
feature_category :source_code_management
LEASE_TIMEOUT = 1.hour
def perform
......
......@@ -4,6 +4,7 @@ class RepositoryCleanupWorker
include ApplicationWorker
sidekiq_options retry: 3
feature_category :source_code_management
sidekiq_retries_exhausted do |msg, err|
next if err.is_a?(ActiveRecord::RecordNotFound)
......
......@@ -6,6 +6,8 @@ class RepositoryForkWorker
include ProjectStartImport
include ProjectImportOptions
feature_category :source_code_management
def perform(*args)
target_project_id = args.shift
target_project = Project.find(target_project_id)
......
......@@ -6,6 +6,8 @@ class RepositoryImportWorker
include ProjectStartImport
include ProjectImportOptions
feature_category :importers
# technical debt: https://gitlab.com/gitlab-org/gitlab/issues/33991
sidekiq_options memory_killer_memory_growth_kb: ENV.fetch('MEMORY_KILLER_REPOSITORY_IMPORT_WORKER_MEMORY_GROWTH_KB', 50).to_i
sidekiq_options memory_killer_max_memory_growth_kb: ENV.fetch('MEMORY_KILLER_REPOSITORY_IMPORT_WORKER_MAX_MEMORY_GROWTH_KB', 300_000).to_i
......
......@@ -4,6 +4,8 @@ class RepositoryRemoveRemoteWorker
include ApplicationWorker
include ExclusiveLeaseGuard
feature_category :source_code_management
LEASE_TIMEOUT = 1.hour
attr_reader :project, :remote_name
......
......@@ -7,6 +7,7 @@ class RepositoryUpdateRemoteMirrorWorker
include Gitlab::ExclusiveLeaseHelpers
sidekiq_options retry: 3, dead: false
feature_category :source_code_management
LOCK_WAIT_TIME = 30.seconds
MAX_TRIES = 3
......
......@@ -4,6 +4,8 @@ class RequestsProfilesWorker
include ApplicationWorker
include CronjobQueue
feature_category :source_code_management
def perform
Gitlab::RequestProfiler.remove_all_profiles
end
......
......@@ -5,6 +5,7 @@ class RunPipelineScheduleWorker
include PipelineQueue
queue_namespace :pipeline_creation
feature_category :continuous_integration
# rubocop: disable CodeReuse/ActiveRecord
def perform(schedule_id, user_id)
......
......@@ -5,6 +5,8 @@ class ScheduleMigrateExternalDiffsWorker
include CronjobQueue
include Gitlab::ExclusiveLeaseHelpers
feature_category :source_code_management
def perform
in_lock(self.class.name.underscore, ttl: 2.hours, retries: 0) do
MergeRequests::MigrateExternalDiffsService.enqueue!
......
......@@ -4,6 +4,8 @@ class StuckCiJobsWorker
include ApplicationWorker
include CronjobQueue
feature_category :continuous_integration
EXCLUSIVE_LEASE_KEY = 'stuck_ci_builds_worker_lease'
BUILD_RUNNING_OUTDATED_TIMEOUT = 1.hour
......
......@@ -4,6 +4,8 @@ class StuckImportJobsWorker
include ApplicationWorker
include CronjobQueue
feature_category :importers
IMPORT_JOBS_EXPIRATION = 15.hours.to_i
def perform
......
......@@ -4,6 +4,8 @@ class StuckMergeJobsWorker
include ApplicationWorker
include CronjobQueue
feature_category :source_code_management
def self.logger
Rails.logger # rubocop:disable Gitlab/RailsLogger
end
......
......@@ -3,6 +3,8 @@
class SystemHookPushWorker
include ApplicationWorker
feature_category :source_code_management
def perform(push_data, hook_id)
SystemHooksService.new.execute_hooks(push_data, hook_id)
end
......
......@@ -4,6 +4,8 @@ class TrendingProjectsWorker
include ApplicationWorker
include CronjobQueue
feature_category :source_code_management
def perform
Rails.logger.info('Refreshing trending projects') # rubocop:disable Gitlab/RailsLogger
......
......@@ -3,6 +3,8 @@
class UpdateExternalPullRequestsWorker
include ApplicationWorker
feature_category :source_code_management
def perform(project_id, user_id, ref)
project = Project.find_by_id(project_id)
return unless project
......
......@@ -5,6 +5,7 @@ class UpdateHeadPipelineForMergeRequestWorker
include PipelineQueue
queue_namespace :pipeline_processing
feature_category :continuous_integration
def perform(merge_request_id)
MergeRequest.find_by_id(merge_request_id).try do |merge_request|
......
......@@ -3,6 +3,8 @@
class UpdateMergeRequestsWorker
include ApplicationWorker
feature_category :source_code_management
LOG_TIME_THRESHOLD = 90 # seconds
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -4,6 +4,8 @@
class UpdateProjectStatisticsWorker
include ApplicationWorker
feature_category :source_code_management
# project_id - The ID of the project for which to flush the cache.
# statistics - An Array containing columns from ProjectStatistics to
# refresh, if empty all columns will be refreshed
......
......@@ -3,6 +3,8 @@
class UploadChecksumWorker
include ApplicationWorker
feature_category :geo_replication
def perform(upload_id)
upload = Upload.find(upload_id)
upload.calculate_checksum!
......
......@@ -3,6 +3,7 @@
class WebHookWorker
include ApplicationWorker
feature_category :integrations
sidekiq_options retry: 4, dead: false
def perform(hook_id, data, hook_name)
......
---
title: Attribute each Sidekiq worker to a feature category
merge_request: 18462
author:
type: other
#
# This file contains a list of all feature categories in GitLab
# It is generated from the stages file at https://gitlab.com/gitlab-com/www-gitlab-com/raw/master/data/stages.yml.
# If you would like to update it, please run
# `./scripts/update-feature-categories` to generate a new copy
#
# PLEASE DO NOT EDIT THIS FILE MANUALLY.
#
---
- accessibility_testing
- account-management
- agile_portfolio_management
- analysis
- audit_management
- authentication_and_authorization
- auto_devops
- backup_restore
- behavior_analytics
- chaos_engineering
- chatops
- cloud_native_installation
- cluster_cost_optimization
- cluster_monitoring
- code_analytics
- code_quality
- code_review
- collection
- container_network_security
- container_registry
- container_scanning
- continuous_delivery
- continuous_integration
- data_loss_prevention
- dependency_proxy
- dependency_scanning
- design_management
- devops_score
- disaster_recovery
- dynamic_application_security_testing
- error_tracking
- feature_flags
- fuzzing
- geo_replication
- gitaly
- gitter
- groups
- helm_chart_registry
- importers
- incident_management
- incremental_rollout
- infrastructure_as_code
- integration_testing
- integrations
- interactive_application_security_testing
- internationalization
- issue_tracking
- kanban_boards
- kubernetes_configuration
- language_specific
- license_compliance
- live_coding
- load_testing
- logging
- metrics
- omnibus_package
- package_registry
- pages
- quality_management
- release_governance
- release_orchestration
- requirements_management
- review_apps
- runbooks
- runner
- runtime_application_self_protection
- sdk
- search
- secret_detection
- secrets_management
- serverless
- service_desk
- snippets
- source_code_management
- static_application_security_testing
- status_page
- storage_security
- synthetic_monitoring
- system_testing
- templates
- threat_detection
- time_tracking
- tracing
- unit_testing
- usability_testing
- users
- value_stream_management
- vulnerability_database
- vulnerability_management
- web_firewall
- web_ide
- web_performance
- wiki
- workflow_policies
......@@ -61,6 +61,56 @@ the extra jobs will take resources away from jobs from workers that were already
there, if the resources available to the Sidekiq process handling the namespace
are not adjusted appropriately.
## Feature Categorization
Each Sidekiq worker, or one of its ancestor classes, must declare a
`feature_category` attribute. This attribute maps each worker to a feature
category. This is done for error budgeting, alert routing, and team attribution
for Sidekiq workers.
The declaration uses the `feature_category` class method, as shown below.
```ruby
class SomeScheduledTaskWorker
include ApplicationWorker
# Declares that this feature is part of the
# `continuous_integration` feature category
feature_category :continuous_integration
# ...
end
```
The list of value values can be found in the file `config/feature_categories.yml`.
This file is, in turn generated from the [`stages.yml` from the GitLab Company Handbook
source](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml).
### Updating `config/feature_categories.yml`
Occassionally new features will be added to GitLab stages. When this occurs, you
can automatically update `config/feature_categories.yml` by running
`scripts/update-feature-categories`. This script will fetch and parse
[`stages.yml`](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml)
and generare a new version of the file, which needs to be checked into source control.
### Excluding Sidekiq workers from feature categorization
A few Sidekiq workers, that are used across all features, cannot be mapped to a
single category. These should be declared as such using the `feature_category_not_owned!`
declaration, as shown below:
```ruby
class SomeCrossCuttingConcernWorker
include ApplicationWorker
# Declares that this worker does not map to a feature category
feature_category_not_owned!
# ...
end
```
## Tests
Each Sidekiq worker must be tested using RSpec, just like any other class. These
......
......@@ -3,6 +3,8 @@
class AdminEmailsWorker
include ApplicationWorker
feature_category_not_owned!
# rubocop: disable CodeReuse/ActiveRecord
def perform(recipient_id, subject, body)
recipient_list(recipient_id).pluck(:id).uniq.each do |user_id|
......
......@@ -6,6 +6,8 @@ class ClearSharedRunnersMinutesWorker
include ApplicationWorker
include CronjobQueue
feature_category :continuous_integration
def perform
return unless try_obtain_lease
......
......@@ -6,5 +6,6 @@ module GeoQueue
included do
queue_namespace :geo
feature_category :geo_replication
end
end
......@@ -4,6 +4,8 @@ class CreateGithubWebhookWorker
include ApplicationWorker
include GrapePathHelpers::NamedRouteMatcher
feature_category :integrations
attr_reader :project
def perform(project_id)
......
......@@ -4,6 +4,8 @@ module DesignManagement
class NewVersionWorker
include ApplicationWorker
feature_category :design_management
def perform(version_id)
version = DesignManagement::Version.find(version_id)
......
......@@ -3,6 +3,8 @@
class ElasticBatchProjectIndexerWorker
include ApplicationWorker
feature_category :search
# Batch indexing is a generally a onetime option, so give finer control over
# queuing and concurrency
......
......@@ -3,6 +3,7 @@
class ElasticCommitIndexerWorker
include ApplicationWorker
feature_category :search
sidekiq_options retry: 2
def perform(project_id, oldrev = nil, newrev = nil, wiki = false)
......
......@@ -8,6 +8,7 @@ class ElasticFullIndexWorker
include ApplicationWorker
sidekiq_options retry: 2
feature_category :search
def perform(start_id, end_id)
return true unless Gitlab::CurrentSettings.elasticsearch_indexing?
......
......@@ -4,6 +4,7 @@ class ElasticIndexerWorker
include Elasticsearch::Model::Client::ClassMethods
sidekiq_options retry: 2
feature_category :search
def perform(operation, class_name, record_id, es_id, options = {})
return true unless Gitlab::CurrentSettings.elasticsearch_indexing?
......
......@@ -3,6 +3,7 @@
class ElasticNamespaceIndexerWorker
include ApplicationWorker
feature_category :search
sidekiq_options retry: 2
def perform(namespace_id, operation)
......
......@@ -3,6 +3,8 @@
class ExportCsvWorker
include ApplicationWorker
feature_category :issue_tracking
def perform(current_user_id, project_id, params)
@current_user = User.find(current_user_id)
@project = Project.find(project_id)
......
......@@ -6,6 +6,8 @@ module Geo
include ExclusiveLeaseGuard
include CronjobQueue
feature_category :geo_replication
LEASE_TIMEOUT = 5.minutes
def perform
......
......@@ -7,6 +7,8 @@ module Geo
include ::Gitlab::Utils::StrongMemoize
include ::Gitlab::Geo::LogHelpers
feature_category :geo_replication
LEASE_TIMEOUT = 5.minutes
def perform
......
......@@ -8,6 +8,8 @@ module Geo
include ::Gitlab::Geo::LogHelpers
include ::EachShardWorker
feature_category :geo_replication
def perform
each_eligible_shard { |shard_name| schedule_job(shard_name) }
end
......
......@@ -5,6 +5,8 @@ module Geo
include ApplicationWorker
include CronjobQueue
feature_category :geo_replication
def perform
Gitlab::Geo::CronManager.new.execute
end
......
......@@ -4,6 +4,8 @@ class HistoricalDataWorker
include ApplicationWorker
include CronjobQueue
feature_category :license_compliance
def perform
return if License.current.nil? || License.current&.trial?
......
......@@ -4,6 +4,7 @@ class ImportSoftwareLicensesWorker
include ApplicationWorker
queue_namespace :cronjob
feature_category :license_compliance
def perform
catalogue.each do |spdx_license|
......
......@@ -5,6 +5,7 @@ module IncidentManagement
include ApplicationWorker
queue_namespace :incident_management
feature_category :incident_management
def perform(project_id, alert)
project = find_project(project_id)
......
......@@ -5,6 +5,7 @@ module IncidentManagement
include ApplicationWorker
queue_namespace :incident_management
feature_category :incident_management
def perform(project_id, alert_hash)
project = find_project(project_id)
......
......@@ -5,6 +5,7 @@ module JiraConnect
include ApplicationWorker
queue_namespace :jira_connect
feature_category :integrations
def perform(project_id, branch_name, commit_shas)
project = Project.find_by_id(project_id)
......
......@@ -5,6 +5,7 @@ module JiraConnect
include ApplicationWorker
queue_namespace :jira_connect
feature_category :integrations
def perform(merge_request_id)
merge_request = MergeRequest.find_by_id(merge_request_id)
......
......@@ -4,6 +4,8 @@ class LdapAllGroupsSyncWorker
include ApplicationWorker
include CronjobQueue
feature_category :authentication_and_authorization
def perform
return unless Gitlab::Auth::LDAP::Config.group_sync_enabled?
......
......@@ -3,6 +3,8 @@
class LdapGroupSyncWorker
include ApplicationWorker
feature_category :authentication_and_authorization
# rubocop: disable CodeReuse/ActiveRecord
def perform(group_ids, provider = nil)
return unless Gitlab::Auth::LDAP::Config.group_sync_enabled?
......
......@@ -4,6 +4,8 @@ class LdapSyncWorker
include ApplicationWorker
include CronjobQueue
feature_category :authentication_and_authorization
# rubocop: disable CodeReuse/ActiveRecord
# rubocop: disable Gitlab/RailsLogger
def perform
......
......@@ -4,6 +4,8 @@ class NewEpicWorker
include ApplicationWorker
include NewIssuable
feature_category :agile_portfolio_management
def perform(epic_id, user_id)
return unless objects_found?(epic_id, user_id)
......
......@@ -6,6 +6,7 @@ class ProjectImportScheduleWorker
include ApplicationWorker
prepend WaitableWorker
feature_category :importers
sidekiq_options retry: false
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -3,6 +3,8 @@
class ProjectUpdateRepositoryStorageWorker
include ApplicationWorker
feature_category :source_code_management
def perform(project_id, new_repository_storage_key)
project = Project.find(project_id)
......
......@@ -4,6 +4,8 @@ class PseudonymizerWorker
include ApplicationWorker
include CronjobQueue
feature_category :integrations
def perform
return unless Gitlab::CurrentSettings.pseudonymizer_enabled?
......
......@@ -3,6 +3,8 @@
class RefreshLicenseComplianceChecksWorker
include ApplicationWorker
feature_category :license_compliance
def perform(project_id)
project = Project.find(project_id)
project_approval_rule = project
......
......@@ -3,6 +3,8 @@
class RepositoryPushAuditEventWorker
include ApplicationWorker
feature_category :authentication_and_authorization
def perform(changes, project_id, user_id)
project = Project.find(project_id)
user = User.find(user_id)
......
......@@ -7,6 +7,8 @@ class RepositoryUpdateMirrorWorker
include Gitlab::ShellAdapter
include ProjectStartImport
feature_category :importers
# Retry not necessary. It will try again at the next update interval.
sidekiq_options retry: false, status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
......
......@@ -6,6 +6,8 @@ class SyncSecurityReportsToReportApprovalRulesWorker
include ApplicationWorker
include PipelineQueue
feature_category :static_application_security_testing
def perform(pipeline_id)
pipeline = Ci::Pipeline.find_by_id(pipeline_id)
return unless pipeline
......
......@@ -4,6 +4,8 @@ class UpdateAllMirrorsWorker
include ApplicationWorker
include CronjobQueue
feature_category :source_code_management
LEASE_TIMEOUT = 5.minutes
SCHEDULE_WAIT_TIMEOUT = 4.minutes
LEASE_KEY = 'update_all_mirrors'.freeze
......
......@@ -4,6 +4,8 @@ class UpdateMaxSeatsUsedForGitlabComSubscriptionsWorker
include ApplicationWorker
include CronjobQueue
feature_category :license_compliance
# rubocop: disable CodeReuse/ActiveRecord
def perform
return if ::Gitlab::Database.read_only?
......
......@@ -23,6 +23,8 @@ module Gitlab
include ProjectImportOptions # This marks the project as failed after too many tries
include Gitlab::ExclusiveLeaseHelpers
feature_category :importers
class << self
def schedule(project_id, *args)
perform_async(project_id, *args)
......
#!/usr/bin/env ruby
require 'uri'
require 'net/http'
require 'yaml'
url = URI("https://gitlab.com/gitlab-com/www-gitlab-com/raw/master/data/stages.yml")
http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true
request = Net::HTTP::Get.new(url)
response = http.request(request)
stages_doc = YAML.safe_load(response.read_body)
feature_categories = stages_doc["stages"].values
.flat_map { |stage| stage["groups"].values }
.flat_map { |group| group["categories"] }
.select(&:itself)
.uniq
.sort
File.open("#{__dir__}/../config/feature_categories.yml", 'w') do |file|
file.puts(<<~HEADER_COMMENT)
#
# This file contains a list of all feature categories in GitLab
# It is generated from the stages file at #{url}.
# If you would like to update it, please run
# `./scripts/update-feature-categories` to generate a new copy
#
# PLEASE DO NOT EDIT THIS FILE MANUALLY.
#
HEADER_COMMENT
file.write(feature_categories.to_yaml)
end
......@@ -35,4 +35,32 @@ describe 'Every Sidekiq worker' do
expect(config_queues).to include(queue).or(include(queue_namespace))
end
end
describe "feature category declarations" do
let(:feature_categories) do
YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).map(&:to_sym).to_set
end
# All Sidekiq worker classes should declare a valid `feature_category`
# or explicitely be excluded with the `feature_category_not_owned!` annotation.
# Please see doc/development/sidekiq_style_guide.md#Feature-Categorization for more details.
it 'has a feature_category or feature_category_not_owned! attribute', :aggregate_failures do
Gitlab::SidekiqConfig.workers.each do |worker|
expect(worker.get_feature_category).to be_a(Symbol), "expected #{worker.inspect} to declare a feature_category or feature_category_not_owned!"
end
end
# All Sidekiq worker classes should declare a valid `feature_category`.
# The category should match a value in `config/feature_categories.yml`.
# Please see doc/development/sidekiq_style_guide.md#Feature-Categorization for more details.
it 'has a feature_category that maps to a value in feature_categories.yml', :aggregate_failures do
workers_with_feature_categories = Gitlab::SidekiqConfig.workers
.select(&:get_feature_category)
.reject(&:feature_category_not_owned?)
workers_with_feature_categories.each do |worker|
expect(feature_categories).to include(worker.get_feature_category), "expected #{worker.inspect} to declare a valid feature_category, but got #{worker.get_feature_category}"
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment