Commit 6e74fa07 authored by Marius Bobin's avatar Marius Bobin

Destroy job artifacts when deleting their pipeline

Destroy job artifacts when deleting their pipeline

Changelog: fixed
parent 4a08a9f3
......@@ -261,6 +261,22 @@ module Ci
self.where(project: project).sum(:size)
end
##
# FastDestroyAll concerns
# rubocop: disable CodeReuse/ServiceClass
def self.begin_fast_destroy
service = ::Ci::JobArtifacts::DestroyAssociationsService.new(self)
service.destroy_records
service
end
# rubocop: enable CodeReuse/ServiceClass
##
# FastDestroyAll concerns
def self.finalize_fast_destroy(service)
service.update_statistics
end
def local_store?
[nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
......
......@@ -17,6 +17,7 @@ module Ci
include FromUnion
include UpdatedAtFilterable
include EachBatch
include FastDestroyAll::Helpers
MAX_OPEN_MERGE_REQUESTS_REFS = 4
......@@ -126,6 +127,8 @@ module Ci
after_create :keep_around_commits, unless: :importing?
use_fast_destroy :job_artifacts
# We use `Enums::Ci::Pipeline.sources` here so that EE can more easily extend
# this `Hash` with new values.
enum_with_nil source: Enums::Ci::Pipeline.sources
......
......@@ -2578,6 +2578,12 @@ class Project < ApplicationRecord
Gitlab::Routing.url_helpers.activity_project_path(self)
end
def increment_statistic_value(statistic, delta)
return if pending_delete?
ProjectStatistics.increment_statistic(self, statistic, delta)
end
private
def set_container_registry_access_level
......
# frozen_string_literal: true
module Ci
module JobArtifacts
class DestroyAssociationsService
BATCH_SIZE = 100
def initialize(job_artifacts_relation)
@job_artifacts_relation = job_artifacts_relation
@statistics = {}
end
def destroy_records
@job_artifacts_relation.each_batch(of: BATCH_SIZE) do |relation|
service = Ci::JobArtifacts::DestroyBatchService.new(relation, pick_up_at: Time.current)
result = service.execute(update_stats: false)
updates = result[:statistics_updates]
@statistics.merge!(updates) { |_key, oldval, newval| newval + oldval }
end
end
def update_statistics
@statistics.each do |project, delta|
project.increment_statistic_value(Ci::JobArtifact.project_statistics_name, delta)
end
end
end
end
end
......@@ -23,8 +23,8 @@ module Ci
end
# rubocop: disable CodeReuse/ActiveRecord
def execute
return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
def execute(update_stats: true)
return success(destroyed_artifacts_count: 0, statistics_updates: {}) if @job_artifacts.empty?
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
......@@ -33,10 +33,11 @@ module Ci
end
# This is executed outside of the transaction because it depends on Redis
update_project_statistics
update_project_statistics! if update_stats
increment_monitoring_statistics(artifacts_count)
success(destroyed_artifacts_count: artifacts_count)
success(destroyed_artifacts_count: artifacts_count,
statistics_updates: affected_project_statistics)
end
# rubocop: enable CodeReuse/ActiveRecord
......@@ -45,12 +46,20 @@ module Ci
# This method is implemented in EE and it must do only database work
def destroy_related_records(artifacts); end
def update_project_statistics
# using ! here since this can't be called inside a transaction
def update_project_statistics!
affected_project_statistics.each do |project, delta|
project.increment_statistic_value(Ci::JobArtifact.project_statistics_name, delta)
end
end
def affected_project_statistics
strong_memoize(:affected_project_statistics) do
artifacts_by_project = @job_artifacts.group_by(&:project)
artifacts_by_project.each do |project, artifacts|
artifacts_by_project.each.with_object({}) do |(project, artifacts), accumulator|
delta = -artifacts.sum { |artifact| artifact.size.to_i }
ProjectStatistics.increment_statistic(
project, Ci::JobArtifact.project_statistics_name, delta)
accumulator[project] = delta
end
end
end
......
---
title: Fast destroy job artifacts when destroying a pipeline
merge_request: 60391
author:
type: fixed
......@@ -602,6 +602,34 @@ RSpec.describe Ci::JobArtifact do
end
end
context 'FastDestroyAll' do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:job) { create(:ci_build, pipeline: pipeline, project: project) }
let!(:job_artifact) { create(:ci_job_artifact, :archive, job: job) }
let(:subjects) { pipeline.job_artifacts }
describe '.use_fast_destroy' do
it 'performs cascading delete with fast_destroy_all' do
expect(Ci::DeletedObject.count).to eq(0)
expect(subjects.count).to be > 0
expect { pipeline.destroy! }.not_to raise_error
expect(subjects.count).to eq(0)
expect(Ci::DeletedObject.count).to be > 0
end
it 'updates project statistics' do
expect(ProjectStatistics).to receive(:increment_statistic).once
.with(project, :build_artifacts_size, -job_artifact.file.size)
pipeline.destroy!
end
end
end
def file_type_limit_failure_message(type, limit_name)
<<~MSG
The artifact type `#{type}` is missing its counterpart plan limit which is expected to be named `#{limit_name}`.
......
......@@ -6936,6 +6936,32 @@ RSpec.describe Project, factory_default: :keep do
end
end
describe '#increment_statistic_value' do
let(:project) { build_stubbed(:project) }
subject(:increment) do
project.increment_statistic_value(:build_artifacts_size, -10)
end
it 'increments the value' do
expect(ProjectStatistics)
.to receive(:increment_statistic)
.with(project, :build_artifacts_size, -10)
increment
end
context 'when the project is scheduled for removal' do
let(:project) { build_stubbed(:project, pending_delete: true) }
it 'does not increment the value' do
expect(ProjectStatistics).not_to receive(:increment_statistic)
increment
end
end
end
def finish_job(export_job)
export_job.start
export_job.finish
......
......@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe ::Ci::DestroyPipelineService do
let(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let!(:pipeline) { create(:ci_pipeline, :success, project: project, sha: project.commit.id) }
subject { described_class.new(project, user).execute(pipeline) }
......@@ -60,6 +61,10 @@ RSpec.describe ::Ci::DestroyPipelineService do
expect { artifact.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'inserts deleted objects for object storage files' do
expect { subject }.to change { Ci::DeletedObject.count }
end
end
end
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyAssociationsService do
let(:artifacts) { Ci::JobArtifact.all }
let(:service) { described_class.new(artifacts) }
let_it_be(:artifact, refind: true) do
create(:ci_job_artifact)
end
before do
artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
artifact.save!
end
describe '#destroy_records' do
it 'removes artifacts without updating statistics' do
expect(ProjectStatistics).not_to receive(:increment_statistic)
expect { service.destroy_records }.to change { Ci::JobArtifact.count }
end
context 'when there are no artifacts' do
let(:artifacts) { Ci::JobArtifact.none }
it 'does not raise error' do
expect { service.destroy_records }.not_to raise_error
end
end
end
describe '#update_statistics' do
before do
service.destroy_records
end
it 'updates project statistics' do
expect(ProjectStatistics).to receive(:increment_statistic).once
.with(artifact.project, :build_artifacts_size, -artifact.file.size)
service.update_statistics
end
context 'when there are no artifacts' do
let(:artifacts) { Ci::JobArtifact.none }
it 'does not raise error' do
expect { service.update_statistics }.not_to raise_error
end
end
end
end
......@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
include ExclusiveLeaseHelpers
let(:artifacts) { Ci::JobArtifact.all }
let(:service) { described_class.new(artifacts, pick_up_at: Time.current) }
......@@ -25,6 +23,19 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
expect { subject }.to change { Ci::DeletedObject.count }.by(1)
end
it 'does not remove the files' do
expect { execute }.not_to change { artifact.file.exists? }
end
it 'reports metrics for destroyed artifacts' do
expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics|
expect(metrics).to receive(:increment_destroyed_artifacts).with(1).and_call_original
end
execute
end
context 'ProjectStatistics' do
it 'resets project statistics' do
expect(ProjectStatistics).to receive(:increment_statistic).once
.with(artifact.project, :build_artifacts_size, -artifact.file.size)
......@@ -33,16 +44,18 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
execute
end
it 'does not remove the files' do
expect { execute }.not_to change { artifact.file.exists? }
end
context 'with update_stats: false' do
it 'does not update project statistics' do
expect(ProjectStatistics).not_to receive(:increment_statistic)
it 'reports metrics for destroyed artifacts' do
expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics|
expect(metrics).to receive(:increment_destroyed_artifacts).with(1).and_call_original
service.execute(update_stats: false)
end
execute
it 'returns size statistics' do
expect(service.execute(update_stats: false)).to match(
a_hash_including(statistics_updates: { artifact.project => -artifact.file.size }))
end
end
end
end
......@@ -65,16 +78,12 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
context 'when there are no artifacts' do
let(:artifacts) { Ci::JobArtifact.none }
before do
artifact.destroy!
end
it 'does not raise error' do
expect { execute }.not_to raise_error
end
it 'reports the number of destroyed artifacts' do
is_expected.to eq(destroyed_artifacts_count: 0, status: :success)
is_expected.to eq(destroyed_artifacts_count: 0, statistics_updates: {}, status: :success)
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment