Commit 5f99ecc9 authored by Stan Hu's avatar Stan Hu

Merge branch '4596-fix-geo-repositoriescleanupworker-to-work-with-hashed-storage' into 'master'

Resolve "Fix Geo::RepositoriesCleanUpWorker to work with hashed storage"

Closes #4596

See merge request gitlab-org/gitlab-ee!4059
parents df530ed0 448933c6
---
title: Geo - Fix repository clean up when selective replication changes with hashed storage enabled
merge_request: 4059
author:
type: fixed
......@@ -30,12 +30,12 @@ module Geo
# There is a possibility project does not have repository or wiki
return true unless gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")
job_id = ::GeoRepositoryDestroyWorker.perform_async(project.id, project.name, project.full_path)
job_id = ::GeoRepositoryDestroyWorker.perform_async(project.id, project.name, project.disk_path, project.repository.storage)
if job_id
log_info('Repository cleaned up', project_id: project.id, full_path: project.full_path, job_id: job_id)
log_info('Repository cleaned up', project_id: project.id, shard: project.repository.storage, disk_path: project.disk_path, job_id: job_id)
else
log_error('Could not clean up repository', project_id: project.id, full_path: project.full_path)
log_error('Could not clean up repository', project_id: project.id, shard: project.repository.storage, disk_path: project.disk_path)
end
end
......
......@@ -12,18 +12,44 @@ describe Geo::RepositoriesCleanUpWorker do
let(:synced_group) { create(:group) }
let(:geo_node) { create(:geo_node, namespaces: [synced_group]) }
it 'performs GeoRepositoryDestroyWorker for each project that does not belong to selected namespaces to replicate' do
project_in_synced_group = create(:project, group: synced_group)
unsynced_project = create(:project, :repository)
context 'legacy storage' do
it 'performs GeoRepositoryDestroyWorker for each project that does not belong to selected namespaces to replicate' do
project_in_synced_group = create(:project, group: synced_group)
unsynced_project = create(:project, :repository)
disk_path = "#{unsynced_project.namespace.full_path}/#{unsynced_project.path}"
expect(GeoRepositoryDestroyWorker).to receive(:perform_async)
.with(unsynced_project.id, unsynced_project.name, unsynced_project.full_path)
.once.and_return(1)
expect(GeoRepositoryDestroyWorker).to receive(:perform_async)
.with(unsynced_project.id, unsynced_project.name, disk_path, unsynced_project.repository.storage)
.once.and_return(1)
expect(GeoRepositoryDestroyWorker).not_to receive(:perform_async)
.with(project_in_synced_group.id, project_in_synced_group.name, project_in_synced_group.full_path)
expect(GeoRepositoryDestroyWorker).not_to receive(:perform_async)
.with(project_in_synced_group.id, project_in_synced_group.name, project_in_synced_group.disk_path, project_in_synced_group.repository.storage)
subject.perform(geo_node.id)
subject.perform(geo_node.id)
end
end
context 'hashed storage' do
before do
stub_application_setting(hashed_storage_enabled: true)
end
it 'performs GeoRepositoryDestroyWorker for each project that does not belong to selected namespaces to replicate' do
project_in_synced_group = create(:project, group: synced_group)
unsynced_project = create(:project, :repository)
hash = Digest::SHA2.hexdigest(unsynced_project.id.to_s)
disk_path = "@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}"
expect(GeoRepositoryDestroyWorker).to receive(:perform_async)
.with(unsynced_project.id, unsynced_project.name, disk_path, unsynced_project.repository.storage)
.once.and_return(1)
expect(GeoRepositoryDestroyWorker).not_to receive(:perform_async)
.with(project_in_synced_group.id, project_in_synced_group.name, project_in_synced_group.disk_path, project_in_synced_group.repository.storage)
subject.perform(geo_node.id)
end
end
it 'does not perform GeoRepositoryDestroyWorker when repository does not exist' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment