Commit aa6fe0e3 authored by Igor Drozdov's avatar Igor Drozdov

Merge branch '345755-add-package-files-cleanup-background-jobs' into 'master'

Add package file cleanup jobs

See merge request gitlab-org/gitlab!77212
parents 19964f23 d039bfa2
......@@ -365,6 +365,10 @@ class ApplicationSetting < ApplicationRecord
allow_nil: false,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :packages_cleanup_package_file_worker_capacity,
allow_nil: false,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :invisible_captcha_enabled,
inclusion: { in: [true, false], message: _('must be a boolean value') }
......
# frozen_string_literal: true
module Packages
module Destructible
extend ActiveSupport::Concern
class_methods do
def next_pending_destruction(order_by: nil)
set = pending_destruction.limit(1).lock('FOR UPDATE SKIP LOCKED')
set = set.order(order_by) if order_by
set.take
end
end
end
end
......@@ -7,16 +7,10 @@ module TtlExpirable
validates :status, presence: true
default_value_for :read_at, Time.zone.now
enum status: { default: 0, expired: 1, processing: 2, error: 3 }
enum status: { default: 0, pending_destruction: 1, processing: 2, error: 3 }
scope :read_before, ->(number_of_days) { where("read_at <= ?", Time.zone.now - number_of_days.days) }
scope :active, -> { where(status: :default) }
scope :lock_next_by, ->(sort) do
order(sort)
.limit(1)
.lock('FOR UPDATE SKIP LOCKED')
end
end
def read!
......
......@@ -3,6 +3,7 @@
class DependencyProxy::Blob < ApplicationRecord
include FileStoreMounter
include TtlExpirable
include Packages::Destructible
include EachBatch
belongs_to :group
......
......@@ -3,6 +3,7 @@
class DependencyProxy::Manifest < ApplicationRecord
include FileStoreMounter
include TtlExpirable
include Packages::Destructible
include EachBatch
belongs_to :group
......
......@@ -3,6 +3,7 @@ class Packages::PackageFile < ApplicationRecord
include UpdateProjectStatistics
include FileStoreMounter
include Packages::Installable
include Packages::Destructible
INSTALLABLE_STATUSES = [:default].freeze
......@@ -11,7 +12,7 @@ class Packages::PackageFile < ApplicationRecord
delegate :file_type, :dsc?, :component, :architecture, :fields, to: :debian_file_metadatum, prefix: :debian
delegate :channel, :metadata, to: :helm_file_metadatum, prefix: :helm
enum status: { default: 0, pending_destruction: 1 }
enum status: { default: 0, pending_destruction: 1, processing: 2, error: 3 }
belongs_to :package
......
......@@ -444,6 +444,15 @@
:weight: 1
:idempotent:
:tags: []
- :name: cronjob:packages_cleanup_package_registry
:worker_name: Packages::CleanupPackageRegistryWorker
:feature_category: :package_registry
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:packages_composer_cache_cleanup
:worker_name: Packages::Composer::CacheCleanupWorker
:feature_category: :package_registry
......@@ -1366,6 +1375,15 @@
:weight: 1
:idempotent:
:tags: []
- :name: package_cleanup:packages_cleanup_package_file
:worker_name: Packages::CleanupPackageFileWorker
:feature_category: :package_registry
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: package_repositories:packages_debian_generate_distribution
:worker_name: Packages::Debian::GenerateDistributionWorker
:feature_category: :package_registry
......
......@@ -10,7 +10,7 @@ module DependencyProxy
def expire_artifacts(collection)
collection.each_batch(of: UPDATE_BATCH_SIZE) do |batch|
batch.update_all(status: :expired)
batch.update_all(status: :pending_destruction)
end
end
end
......
# frozen_string_literal: true
module DependencyProxy
module CleanupWorker
module Packages
module CleanupArtifactWorker
extend ActiveSupport::Concern
include LimitedCapacity::Worker
include Gitlab::Utils::StrongMemoize
def perform_work
......@@ -15,12 +16,8 @@ module DependencyProxy
artifact&.error!
end
def max_running_jobs
::Gitlab::CurrentSettings.dependency_proxy_ttl_group_policy_worker_capacity
end
def remaining_work_count
expired_artifacts.limit(max_running_jobs + 1).count
artifacts_pending_destruction.limit(max_running_jobs + 1).count
end
private
......@@ -52,12 +49,12 @@ module DependencyProxy
end
end
def expired_artifacts
model.expired
def artifacts_pending_destruction
model.pending_destruction
end
def next_item
expired_artifacts.lock_next_by(:updated_at).first
model.next_pending_destruction(order_by: :updated_at)
end
end
end
......@@ -3,9 +3,8 @@
module DependencyProxy
class CleanupBlobWorker
include ApplicationWorker
include LimitedCapacity::Worker
include ::Packages::CleanupArtifactWorker
include Gitlab::Utils::StrongMemoize
include DependencyProxy::CleanupWorker
data_consistency :always
......@@ -17,6 +16,10 @@ module DependencyProxy
worker_resource_boundary :unknown
idempotent!
def max_running_jobs
::Gitlab::CurrentSettings.dependency_proxy_ttl_group_policy_worker_capacity
end
private
def model
......
......@@ -11,8 +11,8 @@ module DependencyProxy
feature_category :dependency_proxy
def perform
enqueue_blob_cleanup_job if DependencyProxy::Blob.expired.any?
enqueue_manifest_cleanup_job if DependencyProxy::Manifest.expired.any?
enqueue_blob_cleanup_job if DependencyProxy::Blob.pending_destruction.any?
enqueue_manifest_cleanup_job if DependencyProxy::Manifest.pending_destruction.any?
end
private
......
......@@ -3,9 +3,8 @@
module DependencyProxy
class CleanupManifestWorker
include ApplicationWorker
include LimitedCapacity::Worker
include ::Packages::CleanupArtifactWorker
include Gitlab::Utils::StrongMemoize
include DependencyProxy::CleanupWorker
data_consistency :always
......@@ -17,6 +16,10 @@ module DependencyProxy
worker_resource_boundary :unknown
idempotent!
def max_running_jobs
::Gitlab::CurrentSettings.dependency_proxy_ttl_group_policy_worker_capacity
end
private
def model
......
......@@ -26,8 +26,8 @@ module DependencyProxy
def log_counts
use_replica_if_available do
expired_blob_count = DependencyProxy::Blob.expired.count
expired_manifest_count = DependencyProxy::Manifest.expired.count
expired_blob_count = DependencyProxy::Blob.pending_destruction.count
expired_manifest_count = DependencyProxy::Manifest.pending_destruction.count
processing_blob_count = DependencyProxy::Blob.processing.count
processing_manifest_count = DependencyProxy::Manifest.processing.count
error_blob_count = DependencyProxy::Blob.error.count
......
# frozen_string_literal: true
module Packages
class CleanupPackageFileWorker
include ApplicationWorker
include ::Packages::CleanupArtifactWorker
include Gitlab::Utils::StrongMemoize
data_consistency :always
queue_namespace :package_cleanup
feature_category :package_registry
urgency :low
worker_resource_boundary :unknown
idempotent!
def max_running_jobs
::Gitlab::CurrentSettings.packages_cleanup_package_file_worker_capacity
end
private
def model
Packages::PackageFile
end
def next_item
model.next_pending_destruction
end
def log_metadata(package_file)
log_extra_metadata_on_done(:package_file_id, package_file.id)
log_extra_metadata_on_done(:package_id, package_file.package_id)
end
def log_cleanup_item(package_file)
logger.info(
structured_payload(
package_id: package_file.package_id,
package_file_id: package_file.id
)
)
end
end
end
# frozen_string_literal: true
module Packages
class CleanupPackageRegistryWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
data_consistency :always
idempotent!
feature_category :package_registry
def perform
enqueue_package_file_cleanup_job if Packages::PackageFile.pending_destruction.exists?
log_counts
end
private
def enqueue_package_file_cleanup_job
Packages::CleanupPackageFileWorker.perform_with_capacity
end
def log_counts
use_replica_if_available do
pending_destruction_package_files_count = Packages::PackageFile.pending_destruction.count
processing_package_files_count = Packages::PackageFile.processing.count
error_package_files_count = Packages::PackageFile.error.count
log_extra_metadata_on_done(:pending_destruction_package_files_count, pending_destruction_package_files_count)
log_extra_metadata_on_done(:processing_package_files_count, processing_package_files_count)
log_extra_metadata_on_done(:error_package_files_count, error_package_files_count)
end
end
def use_replica_if_available(&block)
::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries(&block)
end
end
end
......@@ -545,6 +545,10 @@ Settings.cron_jobs['image_ttl_group_policy_worker']['job_class'] = 'DependencyPr
Settings.cron_jobs['cleanup_dependency_proxy_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['cleanup_dependency_proxy_worker']['cron'] ||= '20 3,15 * * *'
Settings.cron_jobs['cleanup_dependency_proxy_worker']['job_class'] = 'DependencyProxy::CleanupDependencyProxyWorker'
Settings.cron_jobs['cleanup_package_registry_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['cleanup_package_registry_worker']['cron'] ||= '20 0,12 * * *'
Settings.cron_jobs['cleanup_package_registry_worker']['job_class'] = 'Packages::CleanupPackageRegistryWorker'
Settings.cron_jobs['x509_issuer_crl_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['x509_issuer_crl_check_worker']['cron'] ||= '30 1 * * *'
Settings.cron_jobs['x509_issuer_crl_check_worker']['job_class'] = 'X509IssuerCrlCheckWorker'
......
......@@ -297,6 +297,8 @@
- 1
- - object_storage
- 1
- - package_cleanup
- 1
- - package_repositories
- 1
- - packages_composer_cache_update
......
# frozen_string_literal: true
class AddPackagesCleanupPackageFileWorkerCapacityToApplicationSettings < Gitlab::Database::Migration[1.0]
enable_lock_retries!
def change
add_column :application_settings,
:packages_cleanup_package_file_worker_capacity,
:smallint,
default: 2,
null: false
end
end
# frozen_string_literal: true
class AddPackagesCleanupPackageFileWorkerCapacityCheckConstraintToAppSettings < Gitlab::Database::Migration[1.0]
CONSTRAINT_NAME = 'app_settings_p_cleanup_package_file_worker_capacity_positive'
disable_ddl_transaction!
def up
add_check_constraint :application_settings, 'packages_cleanup_package_file_worker_capacity >= 0', CONSTRAINT_NAME
end
def down
remove_check_constraint :application_settings, CONSTRAINT_NAME
end
end
# frozen_string_literal: true
class AddStatusOnlyIndexToPackagesPackageFiles < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'index_packages_package_files_on_status'
def up
add_concurrent_index :packages_package_files, :status, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :packages_package_files, name: INDEX_NAME
end
end
3709c5c229e45ff0f594d6291d0b2b9a167b3bf4f5b29158b9abdac333a638b8
\ No newline at end of file
f4ac776ec4213d6fcd07ccfa913f51e1386ff212bf47d27817b24b501a78443b
\ No newline at end of file
f427f4c0d75308f7c97685e10e27a735dcf284714acd49659f62a6f7752234ef
\ No newline at end of file
......@@ -10482,9 +10482,11 @@ CREATE TABLE application_settings (
static_objects_external_storage_auth_token_encrypted text,
future_subscriptions jsonb DEFAULT '[]'::jsonb NOT NULL,
user_email_lookup_limit integer DEFAULT 60 NOT NULL,
packages_cleanup_package_file_worker_capacity smallint DEFAULT 2 NOT NULL,
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
CONSTRAINT app_settings_ext_pipeline_validation_service_url_text_limit CHECK ((char_length(external_pipeline_validation_service_url) <= 255)),
CONSTRAINT app_settings_p_cleanup_package_file_worker_capacity_positive CHECK ((packages_cleanup_package_file_worker_capacity >= 0)),
CONSTRAINT app_settings_registry_exp_policies_worker_capacity_positive CHECK ((container_registry_expiration_policies_worker_capacity >= 0)),
CONSTRAINT app_settings_yaml_max_depth_positive CHECK ((max_yaml_depth > 0)),
CONSTRAINT app_settings_yaml_max_size_positive CHECK ((max_yaml_size_bytes > 0)),
......@@ -27017,6 +27019,8 @@ CREATE INDEX index_packages_package_files_on_package_id_id ON packages_package_f
CREATE INDEX index_packages_package_files_on_package_id_status_and_id ON packages_package_files USING btree (package_id, status, id);
CREATE INDEX index_packages_package_files_on_status ON packages_package_files USING btree (status);
CREATE INDEX index_packages_package_files_on_verification_state ON packages_package_files USING btree (verification_state);
CREATE INDEX index_packages_packages_on_creator_id ON packages_packages USING btree (creator_id);
......@@ -8,8 +8,8 @@ FactoryBot.define do
file_name { 'a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4.gz' }
status { :default }
trait :expired do
status { :expired }
trait :pending_destruction do
status { :pending_destruction }
end
end
......@@ -22,8 +22,8 @@ FactoryBot.define do
content_type { 'application/vnd.docker.distribution.manifest.v2+json' }
status { :default }
trait :expired do
status { :expired }
trait :pending_destruction do
status { :pending_destruction }
end
end
end
......@@ -80,6 +80,9 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:dependency_proxy_ttl_group_policy_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:dependency_proxy_ttl_group_policy_worker_capacity) }
it { is_expected.to validate_numericality_of(:packages_cleanup_package_file_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:packages_cleanup_package_file_worker_capacity) }
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
it { is_expected.to validate_presence_of(:max_artifacts_size) }
......
......@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe DependencyProxy::Blob, type: :model do
it_behaves_like 'ttl_expirable'
it_behaves_like 'destructible', factory: :dependency_proxy_blob
describe 'relationships' do
it { is_expected.to belong_to(:group) }
......
......@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe DependencyProxy::Manifest, type: :model do
it_behaves_like 'ttl_expirable'
it_behaves_like 'destructible', factory: :dependency_proxy_manifest
describe 'relationships' do
it { is_expected.to belong_to(:group) }
......
......@@ -11,6 +11,7 @@ RSpec.describe Packages::PackageFile, type: :model do
let_it_be(:debian_package) { create(:debian_package, project: project) }
it_behaves_like 'having unique enum values'
it_behaves_like 'destructible', factory: :package_file
describe 'relationships' do
it { is_expected.to belong_to(:package) }
......
......@@ -91,9 +91,9 @@ RSpec.describe DependencyProxy::FindCachedManifestService do
it_behaves_like 'returning no manifest'
end
context 'when the cached manifest is expired' do
context 'when the cached manifest is pending destruction' do
before do
dependency_proxy_manifest.update_column(:status, DependencyProxy::Manifest.statuses[:expired])
dependency_proxy_manifest.update_column(:status, DependencyProxy::Manifest.statuses[:pending_destruction])
stub_manifest_head(image, tag, headers: headers)
stub_manifest_download(image, tag, headers: headers)
end
......
# frozen_string_literal: true
RSpec.shared_examples 'destructible' do |factory:|
let_it_be(:item1) { create(factory, created_at: 1.month.ago, updated_at: 1.day.ago) }
let_it_be(:item2) { create(factory, created_at: 1.year.ago, updated_at: 1.year.ago) }
let_it_be(:item3) { create(factory, :pending_destruction, created_at: 2.years.ago, updated_at: 1.month.ago) }
let_it_be(:item4) { create(factory, :pending_destruction, created_at: 3.years.ago, updated_at: 2.weeks.ago) }
describe '.next_pending_destruction' do
it 'returns the oldest item pending destruction based on updated_at' do
expect(described_class.next_pending_destruction(order_by: :updated_at)).to eq(item3)
end
it 'returns the oldest item pending destruction based on created_at' do
expect(described_class.next_pending_destruction(order_by: :created_at)).to eq(item4)
end
end
end
......@@ -29,7 +29,7 @@ RSpec.shared_examples 'ttl_expirable' do
describe '.active' do
# rubocop:disable Rails/SaveBang
let_it_be(:item1) { create(class_symbol) }
let_it_be(:item2) { create(class_symbol, :expired) }
let_it_be(:item2) { create(class_symbol, :pending_destruction) }
let_it_be(:item3) { create(class_symbol, status: :error) }
# rubocop:enable Rails/SaveBang
......@@ -38,17 +38,6 @@ RSpec.shared_examples 'ttl_expirable' do
end
end
describe '.lock_next_by' do
let_it_be(:item1) { create(class_symbol, created_at: 1.month.ago, updated_at: 1.day.ago) }
let_it_be(:item2) { create(class_symbol, created_at: 1.year.ago, updated_at: 1.year.ago) }
let_it_be(:item3) { create(class_symbol, created_at: 2.years.ago, updated_at: 1.month.ago) }
it 'returns the first item sorted by the argument' do
expect(described_class.lock_next_by(:updated_at)).to contain_exactly(item2)
expect(described_class.lock_next_by(:created_at)).to contain_exactly(item3)
end
end
describe '#read', :freeze_time do
let_it_be(:old_read_at) { 1.day.ago }
let_it_be(:item1) { create(class_symbol, read_at: old_read_at) }
......
......@@ -13,12 +13,12 @@ RSpec.shared_examples 'dependency_proxy_cleanup_worker' do
end
context 'with work to do' do
let_it_be(:artifact1) { create(factory_type, :expired, group: group) }
let_it_be(:artifact2) { create(factory_type, :expired, group: group, updated_at: 6.months.ago, created_at: 2.years.ago) }
let_it_be_with_reload(:artifact3) { create(factory_type, :expired, group: group, updated_at: 1.year.ago, created_at: 1.year.ago) }
let_it_be(:artifact1) { create(factory_type, :pending_destruction, group: group) }
let_it_be(:artifact2) { create(factory_type, :pending_destruction, group: group, updated_at: 6.months.ago, created_at: 2.years.ago) }
let_it_be_with_reload(:artifact3) { create(factory_type, :pending_destruction, group: group, updated_at: 1.year.ago, created_at: 1.year.ago) }
let_it_be(:artifact4) { create(factory_type, group: group, updated_at: 2.years.ago, created_at: 2.years.ago) }
it 'deletes the oldest expired artifact based on updated_at', :aggregate_failures do
it 'deletes the oldest artifact pending destruction based on updated_at', :aggregate_failures do
expect(worker).to receive(:log_extra_metadata_on_done).with("#{factory_type}_id".to_sym, artifact3.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:group_id, group.id)
......@@ -40,10 +40,8 @@ RSpec.shared_examples 'dependency_proxy_cleanup_worker' do
end
describe '#remaining_work_count' do
let_it_be(:expired_artifacts) do
(1..3).map do |_|
create(factory_type, :expired, group: group)
end
before(:context) do
create_list(factory_type, 3, :pending_destruction, group: group)
end
subject { worker.remaining_work_count }
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Packages::CleanupArtifactWorker do
let_it_be(:worker_class) do
Class.new do
def self.name
'Gitlab::Foo::Bar::DummyWorker'
end
include ApplicationWorker
include ::Packages::CleanupArtifactWorker
end
end
let(:worker) { worker_class.new }
describe '#model' do
subject { worker.send(:model) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#log_metadata' do
subject { worker.send(:log_metadata) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#log_cleanup_item' do
subject { worker.send(:log_cleanup_item) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
end
......@@ -9,8 +9,8 @@ RSpec.describe DependencyProxy::CleanupDependencyProxyWorker do
context 'when there are records to be deleted' do
it_behaves_like 'an idempotent worker' do
it 'queues the cleanup jobs', :aggregate_failures do
create(:dependency_proxy_blob, :expired)
create(:dependency_proxy_manifest, :expired)
create(:dependency_proxy_blob, :pending_destruction)
create(:dependency_proxy_manifest, :pending_destruction)
expect(DependencyProxy::CleanupBlobWorker).to receive(:perform_with_capacity).twice
expect(DependencyProxy::CleanupManifestWorker).to receive(:perform_with_capacity).twice
......
......@@ -17,19 +17,19 @@ RSpec.describe DependencyProxy::ImageTtlGroupPolicyWorker do
let_it_be_with_reload(:new_blob) { create(:dependency_proxy_blob, group: group) }
let_it_be_with_reload(:new_manifest) { create(:dependency_proxy_manifest, group: group) }
it 'updates the old images to expired' do
it 'updates the old images to pending_destruction' do
expect { subject }
.to change { old_blob.reload.status }.from('default').to('expired')
.and change { old_manifest.reload.status }.from('default').to('expired')
.to change { old_blob.reload.status }.from('default').to('pending_destruction')
.and change { old_manifest.reload.status }.from('default').to('pending_destruction')
.and not_change { new_blob.reload.status }
.and not_change { new_manifest.reload.status }
end
end
context 'counts logging' do
let_it_be(:expired_blob) { create(:dependency_proxy_blob, :expired, group: group) }
let_it_be(:expired_blob2) { create(:dependency_proxy_blob, :expired, group: group) }
let_it_be(:expired_manifest) { create(:dependency_proxy_manifest, :expired, group: group) }
let_it_be(:expired_blob) { create(:dependency_proxy_blob, :pending_destruction, group: group) }
let_it_be(:expired_blob2) { create(:dependency_proxy_blob, :pending_destruction, group: group) }
let_it_be(:expired_manifest) { create(:dependency_proxy_manifest, :pending_destruction, group: group) }
let_it_be(:processing_blob) { create(:dependency_proxy_blob, status: :processing, group: group) }
let_it_be(:processing_manifest) { create(:dependency_proxy_manifest, status: :processing, group: group) }
let_it_be(:error_blob) { create(:dependency_proxy_blob, status: :error, group: group) }
......
......@@ -361,6 +361,7 @@ RSpec.describe 'Every Sidekiq worker' do
'ObjectPool::ScheduleJoinWorker' => 3,
'ObjectStorage::BackgroundMoveWorker' => 5,
'ObjectStorage::MigrateUploadsWorker' => 3,
'Packages::CleanupPackageFileWorker' => 0,
'Packages::Composer::CacheUpdateWorker' => false,
'Packages::Go::SyncPackagesWorker' => 3,
'Packages::Maven::Metadata::SyncWorker' => 3,
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::CleanupPackageFileWorker do
let_it_be(:package) { create(:package) }
let(:worker) { described_class.new }
describe '#perform_work' do
subject { worker.perform_work }
context 'with no work to do' do
it { is_expected.to be_nil }
end
context 'with work to do' do
let_it_be(:package_file1) { create(:package_file, package: package) }
let_it_be(:package_file2) { create(:package_file, :pending_destruction, package: package) }
let_it_be(:package_file3) { create(:package_file, :pending_destruction, package: package, updated_at: 1.year.ago, created_at: 1.year.ago) }
it 'deletes the oldest package file pending destruction based on id', :aggregate_failures do
# NOTE: The worker doesn't explicitly look for the lower id value, but this is how PostgreSQL works when
# using LIMIT without ORDER BY.
expect(worker).to receive(:log_extra_metadata_on_done).with(:package_file_id, package_file2.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:package_id, package.id)
expect { subject }.to change { Packages::PackageFile.count }.by(-1)
end
end
context 'with an error during the destroy' do
let_it_be(:package_file) { create(:package_file, :pending_destruction) }
before do
expect(worker).to receive(:log_metadata).and_raise('Error!')
end
it 'handles the error' do
expect { subject }.to change { Packages::PackageFile.error.count }.from(0).to(1)
expect(package_file.reload).to be_error
end
end
end
describe '#max_running_jobs' do
let(:capacity) { 5 }
subject { worker.max_running_jobs }
before do
stub_application_setting(packages_cleanup_package_file_worker_capacity: capacity)
end
it { is_expected.to eq(capacity) }
end
describe '#remaining_work_count' do
before(:context) do
create_list(:package_file, 3, :pending_destruction, package: package)
end
subject { worker.remaining_work_count }
it { is_expected.to eq(3) }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::CleanupPackageRegistryWorker do
describe '#perform' do
let_it_be_with_reload(:package_files) { create_list(:package_file, 2, :pending_destruction) }
let(:worker) { described_class.new }
subject(:perform) { worker.perform }
context 'with package files pending destruction' do
it_behaves_like 'an idempotent worker'
it 'queues the cleanup job' do
expect(Packages::CleanupPackageFileWorker).to receive(:perform_with_capacity)
perform
end
end
context 'with no package files pending destruction' do
before do
::Packages::PackageFile.update_all(status: :default)
end
it_behaves_like 'an idempotent worker'
it 'does not queue the cleanup job' do
expect(Packages::CleanupPackageFileWorker).not_to receive(:perform_with_capacity)
perform
end
end
describe 'counts logging' do
let_it_be(:processing_package_file) { create(:package_file, status: :processing) }
it 'logs all the counts', :aggregate_failures do
expect(worker).to receive(:log_extra_metadata_on_done).with(:pending_destruction_package_files_count, 2)
expect(worker).to receive(:log_extra_metadata_on_done).with(:processing_package_files_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:error_package_files_count, 0)
perform
end
context 'with load balancing enabled', :db_load_balancing do
it 'reads the count from the replica' do
expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
perform
end
end
end
end
end
......@@ -25,11 +25,11 @@ RSpec.describe PurgeDependencyProxyCacheWorker do
include_examples 'an idempotent worker' do
let(:job_args) { [user.id, group_id] }
it 'expires the blobs and returns ok', :aggregate_failures do
it 'marks the blobs as pending_destruction and returns ok', :aggregate_failures do
subject
expect(blob).to be_expired
expect(manifest).to be_expired
expect(blob).to be_pending_destruction
expect(manifest).to be_pending_destruction
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment