Commit aea69299 authored by Allison Browne's avatar Allison Browne

Switch to nested module and class definition

Switch to nested module and classes instead of compact
to follow this guideline:

https://github.com/rubocop/ruby-style-guide\#namespace-definition
parent a6515f14
# frozen_string_literal: true # frozen_string_literal: true
module Ci module Ci
class JobArtifacts::DestroyAllExpiredService module JobArtifacts
include ::Gitlab::ExclusiveLeaseHelpers class DestroyAllExpiredService
include ::Gitlab::LoopHelpers include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
BATCH_SIZE = 100
LOOP_TIMEOUT = 5.minutes BATCH_SIZE = 100
LOOP_LIMIT = 1000 LOOP_TIMEOUT = 5.minutes
EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock' LOOP_LIMIT = 1000
LOCK_TIMEOUT = 6.minutes EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
LOCK_TIMEOUT = 6.minutes
def initialize
@removed_artifacts_count = 0 def initialize
end @removed_artifacts_count = 0
##
# Destroy expired job artifacts on GitLab instance
#
# This destroy process cannot run for more than 6 minutes. This is for
# preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_job_artifacts_with_slow_iteration(Time.current)
end end
@removed_artifacts_count ##
end # Destroy expired job artifacts on GitLab instance
#
# This destroy process cannot run for more than 6 minutes. This is for
# preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_job_artifacts_with_slow_iteration(Time.current)
end
@removed_artifacts_count
end
private private
def destroy_job_artifacts_with_slow_iteration(start_at) def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index| Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
# For performance reasons, join with ci_pipelines after the batch is queried. # For performance reasons, join with ci_pipelines after the batch is queried.
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496 # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
artifacts = relation.unlocked artifacts = relation.unlocked
service_response = destroy_batch_async(artifacts) service_response = destroy_batch_async(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count] @removed_artifacts_count += service_response[:destroyed_artifacts_count]
break if loop_timeout?(start_at) break if loop_timeout?(start_at)
break if index >= LOOP_LIMIT break if index >= LOOP_LIMIT
end
end end
end
def destroy_batch_async(artifacts) def destroy_batch_async(artifacts)
Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute
end end
def loop_timeout?(start_at) def loop_timeout?(start_at)
Time.current > start_at + LOOP_TIMEOUT Time.current > start_at + LOOP_TIMEOUT
end
end end
end end
end end
# frozen_string_literal: true # frozen_string_literal: true
module Ci module Ci
class JobArtifacts::DestroyBatchService module JobArtifacts
include BaseServiceUtility class DestroyBatchService
include ::Gitlab::Utils::StrongMemoize include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
# Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts # Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
# Not for use outsie of the ci namespace # Not for use outside of the Ci:: namespace
# Adds the passed batch of job artifacts to the `ci_deleted_objects` table # Adds the passed batch of job artifacts to the `ci_deleted_objects` table
# for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService` # for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
# and then deletes the batch of related `ci_job_artifacts` records. # and then deletes the batch of related `ci_job_artifacts` records.
# Params: # Params:
# +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE) # +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
# +pick_up_at+:: When to pick up for deletion of files # +pick_up_at+:: When to pick up for deletion of files
# Returns: # Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys # +Hash+:: A hash with status and destroyed_artifacts_count keys
def initialize(job_artifacts, pick_up_at: nil) def initialize(job_artifacts, pick_up_at: nil)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a @job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at @pick_up_at = pick_up_at
end end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def execute def execute
return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty? return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
Ci::DeletedObject.transaction do Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at) Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
destroy_related_records(@job_artifacts) destroy_related_records(@job_artifacts)
end end
# This is executed outside of the transaction because it depends on Redis # This is executed outside of the transaction because it depends on Redis
update_project_statistics update_project_statistics
increment_monitoring_statistics(artifacts_count) increment_monitoring_statistics(artifacts_count)
success(destroyed_artifacts_count: artifacts_count) success(destroyed_artifacts_count: artifacts_count)
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
private private
# This method is implemented in EE and it must do only database work # This method is implemented in EE and it must do only database work
def destroy_related_records(artifacts); end def destroy_related_records(artifacts); end
def update_project_statistics def update_project_statistics
artifacts_by_project = @job_artifacts.group_by(&:project) artifacts_by_project = @job_artifacts.group_by(&:project)
artifacts_by_project.each do |project, artifacts| artifacts_by_project.each do |project, artifacts|
delta = -artifacts.sum { |artifact| artifact.size.to_i } delta = -artifacts.sum { |artifact| artifact.size.to_i }
ProjectStatistics.increment_statistic( ProjectStatistics.increment_statistic(
project, Ci::JobArtifact.project_statistics_name, delta) project, Ci::JobArtifact.project_statistics_name, delta)
end
end end
end
def increment_monitoring_statistics(size) def increment_monitoring_statistics(size)
metrics.increment_destroyed_artifacts(size) metrics.increment_destroyed_artifacts(size)
end end
def metrics def metrics
@metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new @metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
end end
def artifacts_count def artifacts_count
strong_memoize(:artifacts_count) do strong_memoize(:artifacts_count) do
@job_artifacts.count @job_artifacts.count
end
end end
end end
end end
......
...@@ -2,20 +2,22 @@ ...@@ -2,20 +2,22 @@
module EE module EE
module Ci module Ci
module JobArtifacts::DestroyBatchService module JobArtifacts
extend ::Gitlab::Utils::Override module DestroyBatchService
extend ::Gitlab::Utils::Override
private private
override :destroy_related_records override :destroy_related_records
def destroy_related_records(artifacts) def destroy_related_records(artifacts)
destroy_security_findings(artifacts) destroy_security_findings(artifacts)
end end
def destroy_security_findings(artifacts) def destroy_security_findings(artifacts)
job_ids = artifacts.map(&:job_id) job_ids = artifacts.map(&:job_id)
::Security::Finding.by_build_ids(job_ids).delete_all ::Security::Finding.by_build_ids(job_ids).delete_all
end
end end
end end
end end
......
...@@ -24,7 +24,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s ...@@ -24,7 +24,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
job = create(:ci_build, pipeline: artifact.job.pipeline) job = create(:ci_build, pipeline: artifact.job.pipeline)
create(:ci_job_artifact, :archive, :expired, job: job) create(:ci_job_artifact, :archive, :expired, job: job)
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 1) stub_const("#{described_class}::LOOP_LIMIT", 1)
end end
it 'performs the smallest number of queries for job_artifacts' do it 'performs the smallest number of queries for job_artifacts' do
...@@ -113,7 +113,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s ...@@ -113,7 +113,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
context 'when failed to destroy artifact' do context 'when failed to destroy artifact' do
before do before do
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 10) stub_const("#{described_class}::LOOP_LIMIT", 10)
end end
context 'when the import fails' do context 'when the import fails' do
...@@ -159,8 +159,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s ...@@ -159,8 +159,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) } let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
before do before do
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::LOOP_TIMEOUT', 0.seconds) stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1) stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked! second_artifact.job.pipeline.unlocked!
end end
...@@ -176,8 +176,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s ...@@ -176,8 +176,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
context 'when loop reached loop limit' do context 'when loop reached loop limit' do
before do before do
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 1) stub_const("#{described_class}::LOOP_LIMIT", 1)
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1) stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked! second_artifact.job.pipeline.unlocked!
end end
...@@ -209,7 +209,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s ...@@ -209,7 +209,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
context 'when there are artifacts more than batch sizes' do context 'when there are artifacts more than batch sizes' do
before do before do
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1) stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked! second_artifact.job.pipeline.unlocked!
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment