Commit 13590b3f authored by Stan Hu's avatar Stan Hu

Merge branch 'concurrent-export-requests' into 'master'

Fix logic to determine export status

Closes #32203

See merge request gitlab-org/gitlab!23664
parents a99f6c34 a309861a
# frozen_string_literal: true
module Projects
class ExportJobFinder
InvalidExportJobStatusError = Class.new(StandardError)
attr_reader :project, :params
def initialize(project, params = {})
@project = project
@params = params
end
def execute
export_jobs = project.export_jobs
export_jobs = by_status(export_jobs)
export_jobs
end
private
def by_status(export_jobs)
return export_jobs unless params[:status]
raise InvalidExportJobStatusError, 'Invalid export job status' unless ProjectExportJob.state_machines[:status].states.map(&:name).include?(params[:status])
export_jobs.with_status(params[:status])
end
end
end
...@@ -186,6 +186,7 @@ class Project < ApplicationRecord ...@@ -186,6 +186,7 @@ class Project < ApplicationRecord
has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project
has_one :import_export_upload, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_one :import_export_upload, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :export_jobs, class_name: 'ProjectExportJob'
has_one :project_repository, inverse_of: :project has_one :project_repository, inverse_of: :project
has_one :incident_management_setting, inverse_of: :project, class_name: 'IncidentManagement::ProjectIncidentManagementSetting' has_one :incident_management_setting, inverse_of: :project, class_name: 'IncidentManagement::ProjectIncidentManagementSetting'
has_one :error_tracking_setting, inverse_of: :project, class_name: 'ErrorTracking::ProjectErrorTrackingSetting' has_one :error_tracking_setting, inverse_of: :project, class_name: 'ErrorTracking::ProjectErrorTrackingSetting'
...@@ -1850,10 +1851,12 @@ class Project < ApplicationRecord ...@@ -1850,10 +1851,12 @@ class Project < ApplicationRecord
end end
def export_status def export_status
if export_in_progress? if regeneration_in_progress?
:regeneration_in_progress
elsif export_enqueued?
:queued
elsif export_in_progress?
:started :started
elsif after_export_in_progress?
:after_export_action
elsif export_file_exists? elsif export_file_exists?
:finished :finished
else else
...@@ -1862,11 +1865,19 @@ class Project < ApplicationRecord ...@@ -1862,11 +1865,19 @@ class Project < ApplicationRecord
end end
def export_in_progress? def export_in_progress?
import_export_shared.active_export_count > 0 strong_memoize(:export_in_progress) do
::Projects::ExportJobFinder.new(self, { status: :started }).execute.present?
end
end
def export_enqueued?
strong_memoize(:export_enqueued) do
::Projects::ExportJobFinder.new(self, { status: :queued }).execute.present?
end
end end
def after_export_in_progress? def regeneration_in_progress?
import_export_shared.after_export_in_progress? (export_enqueued? || export_in_progress?) && export_file_exists?
end end
def remove_exports def remove_exports
......
# frozen_string_literal: true
class ProjectExportJob < ApplicationRecord
belongs_to :project
validates :project, :jid, :status, presence: true
state_machine :status, initial: :queued do
event :start do
transition [:queued] => :started
end
event :finish do
transition [:started] => :finished
end
event :fail_op do
transition [:queued, :started] => :failed
end
state :queued, value: 0
state :started, value: 1
state :finished, value: 2
state :failed, value: 3
end
end
...@@ -234,6 +234,13 @@ ...@@ -234,6 +234,13 @@
:resource_boundary: :cpu :resource_boundary: :cpu
:weight: 1 :weight: 1
:idempotent: :idempotent:
- :name: cronjob:stuck_export_jobs
:feature_category: :importers
:has_external_dependencies:
:urgency: :default
:resource_boundary: :cpu
:weight: 1
:idempotent:
- :name: cronjob:stuck_import_jobs - :name: cronjob:stuck_import_jobs
:feature_category: :importers :feature_category: :importers
:has_external_dependencies: :has_external_dependencies:
......
# frozen_string_literal: true
module ProjectExportOptions
extend ActiveSupport::Concern
EXPORT_RETRY_COUNT = 3
included do
sidekiq_options retry: EXPORT_RETRY_COUNT, status_expiration: StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION
# We mark the project export as failed once we have exhausted all retries
sidekiq_retries_exhausted do |job|
project = Project.find(job['args'][1])
# rubocop: disable CodeReuse/ActiveRecord
job = project.export_jobs.find_by(jid: job["jid"])
# rubocop: enable CodeReuse/ActiveRecord
if job&.fail_op
Sidekiq.logger.info "Job #{job['jid']} for project #{project.id} has been set to failed state"
else
Sidekiq.logger.error "Failed to set Job #{job['jid']} for project #{project.id} to failed state"
end
end
end
end
...@@ -3,17 +3,24 @@ ...@@ -3,17 +3,24 @@
class ProjectExportWorker # rubocop:disable Scalability/IdempotentWorker class ProjectExportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker include ApplicationWorker
include ExceptionBacktrace include ExceptionBacktrace
include ProjectExportOptions
sidekiq_options retry: 3
feature_category :importers feature_category :importers
worker_resource_boundary :memory worker_resource_boundary :memory
def perform(current_user_id, project_id, after_export_strategy = {}, params = {}) def perform(current_user_id, project_id, after_export_strategy = {}, params = {})
current_user = User.find(current_user_id) current_user = User.find(current_user_id)
project = Project.find(project_id) project = Project.find(project_id)
export_job = project.export_jobs.safe_find_or_create_by(jid: self.jid)
after_export = build!(after_export_strategy) after_export = build!(after_export_strategy)
export_job&.start
::Projects::ImportExport::ExportService.new(project, current_user, params).execute(after_export) ::Projects::ImportExport::ExportService.new(project, current_user, params).execute(after_export)
export_job&.finish
rescue ActiveRecord::RecordNotFound, Gitlab::ImportExport::AfterExportStrategyBuilder::StrategyNotFoundError => e
logger.error("Failed to export project #{project_id}: #{e.message}")
end end
private private
......
# frozen_string_literal: true
# rubocop:disable Scalability/IdempotentWorker
class StuckExportJobsWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
# This worker updates export states inline and does not schedule
# other jobs.
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
feature_category :importers
worker_resource_boundary :cpu
EXPORT_JOBS_EXPIRATION = 6.hours.to_i
def perform
failed_jobs_count = mark_stuck_jobs_as_failed!
Gitlab::Metrics.add_event(:stuck_export_jobs,
failed_jobs_count: failed_jobs_count)
end
private
# rubocop: disable CodeReuse/ActiveRecord
def mark_stuck_jobs_as_failed!
jids_and_ids = enqueued_exports.pluck(:jid, :id).to_h
completed_jids = Gitlab::SidekiqStatus.completed_jids(jids_and_ids.keys)
return unless completed_jids.any?
completed_ids = jids_and_ids.values_at(*completed_jids)
# We select the export states again, because they may have transitioned from
# started to finished while we were looking up their Sidekiq status.
completed_jobs = enqueued_exports.where(id: completed_ids)
Sidekiq.logger.info(
message: 'Marked stuck export jobs as failed',
job_ids: completed_jobs.map(&:jid)
)
completed_jobs.each do |job|
job.fail_op
end.count
end
# rubocop: enable CodeReuse/ActiveRecord
def enqueued_exports
ProjectExportJob.with_status([:started, :queued])
end
end
# rubocop:enable Scalability/IdempotentWorker
---
title: Fix logic to determine project export state and add regeneration_in_progress state
merge_request: 23664
author:
type: fixed
...@@ -453,6 +453,9 @@ Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['job_class'] = 'Rem ...@@ -453,6 +453,9 @@ Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['job_class'] = 'Rem
Settings.cron_jobs['stuck_import_jobs_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['stuck_import_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_import_jobs_worker']['cron'] ||= '15 * * * *' Settings.cron_jobs['stuck_import_jobs_worker']['cron'] ||= '15 * * * *'
Settings.cron_jobs['stuck_import_jobs_worker']['job_class'] = 'StuckImportJobsWorker' Settings.cron_jobs['stuck_import_jobs_worker']['job_class'] = 'StuckImportJobsWorker'
Settings.cron_jobs['stuck_export_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_export_jobs_worker']['cron'] ||= '30 * * * *'
Settings.cron_jobs['stuck_export_jobs_worker']['job_class'] = 'StuckExportJobsWorker'
Settings.cron_jobs['gitlab_usage_ping_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['gitlab_usage_ping_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['gitlab_usage_ping_worker']['cron'] ||= nil # This is dynamically loaded in the sidekiq initializer Settings.cron_jobs['gitlab_usage_ping_worker']['cron'] ||= nil # This is dynamically loaded in the sidekiq initializer
Settings.cron_jobs['gitlab_usage_ping_worker']['job_class'] = 'GitlabUsagePingWorker' Settings.cron_jobs['gitlab_usage_ping_worker']['job_class'] = 'GitlabUsagePingWorker'
......
# frozen_string_literal: true
class CreateProjectExportJobs < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
create_table :project_export_jobs do |t|
t.references :project, index: false, null: false, foreign_key: { on_delete: :cascade }
t.timestamps_with_timezone null: false
t.integer :status, limit: 2, null: false, default: 0
t.string :jid, limit: 100, null: false, unique: true
t.index [:project_id, :jid]
t.index [:jid], unique: true
t.index [:status]
t.index [:project_id, :status]
end
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_03_10_135823) do ActiveRecord::Schema.define(version: 2020_03_11_165635) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm" enable_extension "pg_trgm"
...@@ -3242,6 +3242,18 @@ ActiveRecord::Schema.define(version: 2020_03_10_135823) do ...@@ -3242,6 +3242,18 @@ ActiveRecord::Schema.define(version: 2020_03_10_135823) do
t.string "organization_name" t.string "organization_name"
end end
create_table "project_export_jobs", force: :cascade do |t|
t.bigint "project_id", null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.integer "status", limit: 2, default: 0, null: false
t.string "jid", limit: 100, null: false
t.index ["jid"], name: "index_project_export_jobs_on_jid", unique: true
t.index ["project_id", "jid"], name: "index_project_export_jobs_on_project_id_and_jid"
t.index ["project_id", "status"], name: "index_project_export_jobs_on_project_id_and_status"
t.index ["status"], name: "index_project_export_jobs_on_status"
end
create_table "project_feature_usages", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t| create_table "project_feature_usages", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t|
t.datetime "jira_dvcs_cloud_last_sync_at" t.datetime "jira_dvcs_cloud_last_sync_at"
t.datetime "jira_dvcs_server_last_sync_at" t.datetime "jira_dvcs_server_last_sync_at"
...@@ -5017,6 +5029,7 @@ ActiveRecord::Schema.define(version: 2020_03_10_135823) do ...@@ -5017,6 +5029,7 @@ ActiveRecord::Schema.define(version: 2020_03_10_135823) do
add_foreign_key "project_deploy_tokens", "deploy_tokens", on_delete: :cascade add_foreign_key "project_deploy_tokens", "deploy_tokens", on_delete: :cascade
add_foreign_key "project_deploy_tokens", "projects", on_delete: :cascade add_foreign_key "project_deploy_tokens", "projects", on_delete: :cascade
add_foreign_key "project_error_tracking_settings", "projects", on_delete: :cascade add_foreign_key "project_error_tracking_settings", "projects", on_delete: :cascade
add_foreign_key "project_export_jobs", "projects", on_delete: :cascade
add_foreign_key "project_feature_usages", "projects", on_delete: :cascade add_foreign_key "project_feature_usages", "projects", on_delete: :cascade
add_foreign_key "project_features", "projects", name: "fk_18513d9b92", on_delete: :cascade add_foreign_key "project_features", "projects", name: "fk_18513d9b92", on_delete: :cascade
add_foreign_key "project_group_links", "projects", name: "fk_daa8cee94c", on_delete: :cascade add_foreign_key "project_group_links", "projects", name: "fk_daa8cee94c", on_delete: :cascade
......
...@@ -61,14 +61,20 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap ...@@ -61,14 +61,20 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/ap
Status can be one of: Status can be one of:
- `none` - `none`
- `queued`
- `started` - `started`
- `after_export_action`
- `finished` - `finished`
- `regeneration_in_progress`
The `after_export_action` state represents that the export process has been completed successfully and `queued` state represents the request for export is received, and is currently in the queue to be processed.
the platform is performing some actions on the resulted file. For example, sending
an email notifying the user to download the file, uploading the exported file The `started` state represents that the export process has started and is currently in progress.
to a web server, etc. It includes the process of exporting, actions performed on the resultant file such as sending
an email notifying the user to download the file, uploading the exported file to a web server, etc.
`finished` state is after the export process has completed and the user has been notified.
`regeneration_in_progress` is when an export file is available to download, and a request to generate a new export is in process.
`_links` are only present when export has finished. `_links` are only present when export has finished.
......
...@@ -3,19 +3,8 @@ ...@@ -3,19 +3,8 @@
require 'spec_helper' require 'spec_helper'
describe EE::Gitlab::ImportExport::AfterExportStrategies::CustomTemplateExportImportStrategy do describe EE::Gitlab::ImportExport::AfterExportStrategies::CustomTemplateExportImportStrategy do
let!(:project_template) { create(:project, :repository, :with_export) }
let(:project) { create(:project, :import_scheduled, import_type: 'gitlab_custom_project_template') }
let(:user) { build(:user) }
let(:repository_import_worker) { RepositoryImportWorker.new }
subject { described_class.new(export_into_project_id: project.id) } subject { described_class.new(export_into_project_id: project.id) }
before do
stub_licensed_features(custom_project_templates: true)
allow(RepositoryImportWorker).to receive(:new).and_return(repository_import_worker)
allow(repository_import_worker).to receive(:perform)
end
describe 'validations' do describe 'validations' do
it 'export_into_project_id must be present' do it 'export_into_project_id must be present' do
expect(described_class.new(export_into_project_id: nil)).to be_invalid expect(described_class.new(export_into_project_id: nil)).to be_invalid
...@@ -24,6 +13,21 @@ describe EE::Gitlab::ImportExport::AfterExportStrategies::CustomTemplateExportIm ...@@ -24,6 +13,21 @@ describe EE::Gitlab::ImportExport::AfterExportStrategies::CustomTemplateExportIm
end end
describe '#execute' do describe '#execute' do
before do
allow_next_instance_of(ProjectExportWorker) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
stub_licensed_features(custom_project_templates: true)
allow(RepositoryImportWorker).to receive(:new).and_return(repository_import_worker)
allow(repository_import_worker).to receive(:perform)
end
let!(:project_template) { create(:project, :repository, :with_export) }
let(:project) { create(:project, :import_scheduled, import_type: 'gitlab_custom_project_template') }
let(:user) { build(:user) }
let(:repository_import_worker) { RepositoryImportWorker.new }
it 'updates the project import_source with the path to import' do it 'updates the project import_source with the path to import' do
file = fixture_file_upload('spec/fixtures/project_export.tar.gz') file = fixture_file_upload('spec/fixtures/project_export.tar.gz')
......
...@@ -1140,7 +1140,7 @@ describe ProjectsController do ...@@ -1140,7 +1140,7 @@ describe ProjectsController do
end end
it 'prevents requesting project export' do it 'prevents requesting project export' do
get action, params: { namespace_id: project.namespace, id: project } post action, params: { namespace_id: project.namespace, id: project }
expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.') expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.')
expect(response).to have_gitlab_http_status(:found) expect(response).to have_gitlab_http_status(:found)
...@@ -1152,7 +1152,7 @@ describe ProjectsController do ...@@ -1152,7 +1152,7 @@ describe ProjectsController do
context 'when project export is enabled' do context 'when project export is enabled' do
it 'returns 302' do it 'returns 302' do
get action, params: { namespace_id: project.namespace, id: project } post action, params: { namespace_id: project.namespace, id: project }
expect(response).to have_gitlab_http_status(:found) expect(response).to have_gitlab_http_status(:found)
end end
...@@ -1164,7 +1164,7 @@ describe ProjectsController do ...@@ -1164,7 +1164,7 @@ describe ProjectsController do
end end
it 'returns 404' do it 'returns 404' do
get action, params: { namespace_id: project.namespace, id: project } post action, params: { namespace_id: project.namespace, id: project }
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:not_found)
end end
......
# frozen_string_literal: true
FactoryBot.define do
factory :project_export_job do
project
jid { SecureRandom.hex(8) }
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Projects::ExportJobFinder do
let(:project) { create(:project) }
let(:project_export_job1) { create(:project_export_job, project: project) }
let(:project_export_job2) { create(:project_export_job, project: project) }
describe '#execute' do
subject { described_class.new(project, params).execute }
context 'when queried for a project' do
let(:params) { {} }
it 'scopes to the project' do
expect(subject).to contain_exactly(
project_export_job1, project_export_job2
)
end
end
context 'when queried by job id' do
let(:params) { { jid: project_export_job1.jid } }
it 'filters records' do
expect(subject).to contain_exactly(project_export_job1)
end
end
context 'when queried by status' do
let(:params) { { status: :started } }
before do
project_export_job2.start!
end
it 'filters records' do
expect(subject).to contain_exactly(project_export_job2)
end
end
context 'when queried by invalid status' do
let(:params) { { status: '1234ad' } }
it 'raises exception' do
expect { subject }.to raise_error(described_class::InvalidExportJobStatusError, 'Invalid export job status')
end
end
end
end
...@@ -13,9 +13,10 @@ ...@@ -13,9 +13,10 @@
"type": "string", "type": "string",
"enum": [ "enum": [
"none", "none",
"queued",
"started", "started",
"finished", "finished",
"after_export_action" "regeneration_in_progress"
] ]
} }
} }
......
...@@ -3,6 +3,12 @@ ...@@ -3,6 +3,12 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do
before do
allow_next_instance_of(ProjectExportWorker) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
end
let!(:service) { described_class.new } let!(:service) { described_class.new }
let!(:project) { create(:project, :with_export) } let!(:project) { create(:project, :with_export) }
let(:shared) { project.import_export_shared } let(:shared) { project.import_export_shared }
......
...@@ -5,6 +5,12 @@ require 'spec_helper' ...@@ -5,6 +5,12 @@ require 'spec_helper'
describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
include StubRequests include StubRequests
before do
allow_next_instance_of(ProjectExportWorker) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
end
let(:example_url) { 'http://www.example.com' } let(:example_url) { 'http://www.example.com' }
let(:strategy) { subject.new(url: example_url, http_method: 'post') } let(:strategy) { subject.new(url: example_url, http_method: 'post') }
let!(:project) { create(:project, :with_export) } let!(:project) { create(:project, :with_export) }
......
...@@ -469,6 +469,7 @@ project: ...@@ -469,6 +469,7 @@ project:
- autoclose_referenced_issues - autoclose_referenced_issues
- status_page_setting - status_page_setting
- requirements - requirements
- export_jobs
award_emoji: award_emoji:
- awardable - awardable
- user - user
......
# frozen_string_literal: true
require 'spec_helper'
describe ProjectExportJob, type: :model do
let(:project) { create(:project) }
let!(:job1) { create(:project_export_job, project: project, status: 0) }
let!(:job2) { create(:project_export_job, project: project, status: 2) }
describe 'associations' do
it { expect(job1).to belong_to(:project) }
end
describe 'validations' do
it { expect(job1).to validate_presence_of(:project) }
it { expect(job1).to validate_presence_of(:jid) }
it { expect(job1).to validate_presence_of(:status) }
end
end
...@@ -3957,6 +3957,12 @@ describe Project do ...@@ -3957,6 +3957,12 @@ describe Project do
describe '#remove_export' do describe '#remove_export' do
let(:project) { create(:project, :with_export) } let(:project) { create(:project, :with_export) }
before do
allow_next_instance_of(ProjectExportWorker) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
end
it 'removes the export' do it 'removes the export' do
project.remove_exports project.remove_exports
...@@ -5813,6 +5819,86 @@ describe Project do ...@@ -5813,6 +5819,86 @@ describe Project do
end end
end end
describe '#add_export_job' do
context 'if not already present' do
it 'starts project export job' do
user = create(:user)
project = build(:project)
expect(ProjectExportWorker).to receive(:perform_async).with(user.id, project.id, nil, {})
project.add_export_job(current_user: user)
end
end
end
describe '#export_in_progress?' do
let(:project) { build(:project) }
let!(:project_export_job ) { create(:project_export_job, project: project) }
context 'when project export is enqueued' do
it { expect(project.export_in_progress?).to be false }
end
context 'when project export is in progress' do
before do
project_export_job.start!
end
it { expect(project.export_in_progress?).to be true }
end
context 'when project export is completed' do
before do
finish_job(project_export_job)
end
it { expect(project.export_in_progress?).to be false }
end
end
describe '#export_status' do
let(:project) { build(:project) }
let!(:project_export_job ) { create(:project_export_job, project: project) }
context 'when project export is enqueued' do
it { expect(project.export_status).to eq :queued }
end
context 'when project export is in progress' do
before do
project_export_job.start!
end
it { expect(project.export_status).to eq :started }
end
context 'when project export is completed' do
before do
finish_job(project_export_job)
allow(project).to receive(:export_file).and_return(double(ImportExportUploader, file: 'exists.zip'))
end
it { expect(project.export_status).to eq :finished }
end
context 'when project export is being regenerated' do
let!(:new_project_export_job ) { create(:project_export_job, project: project) }
before do
finish_job(project_export_job)
allow(project).to receive(:export_file).and_return(double(ImportExportUploader, file: 'exists.zip'))
end
it { expect(project.export_status).to eq :regeneration_in_progress }
end
end
def finish_job(export_job)
export_job.start
export_job.finish
end
def rugged_config def rugged_config
rugged_repo(project.repository).config rugged_repo(project.repository).config
end end
......
...@@ -27,12 +27,9 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do ...@@ -27,12 +27,9 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
before do before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
allow_next_instance_of(ProjectExportWorker) do |job|
# simulate exporting work directory allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
FileUtils.mkdir_p File.join(project_started.export_path, 'securerandom-hex') end
# simulate in after export action
FileUtils.touch File.join(project_after_export.import_export_shared.lock_files_path, SecureRandom.hex)
end end
after do after do
...@@ -82,28 +79,42 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do ...@@ -82,28 +79,42 @@ describe API::ProjectExport, :clean_gitlab_redis_cache do
expect(json_response['export_status']).to eq('none') expect(json_response['export_status']).to eq('none')
end end
it 'is started' do context 'when project export has started' do
get api(path_started, user) before do
create(:project_export_job, project: project_started, status: 1)
end
expect(response).to have_gitlab_http_status(:ok) it 'returns status started' do
expect(response).to match_response_schema('public_api/v4/project/export_status') get api(path_started, user)
expect(json_response['export_status']).to eq('started')
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/project/export_status')
expect(json_response['export_status']).to eq('started')
end
end end
it 'is after_export' do context 'when project export has finished' do
get api(path_after_export, user) it 'returns status finished' do
get api(path_finished, user)
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/project/export_status') expect(response).to match_response_schema('public_api/v4/project/export_status')
expect(json_response['export_status']).to eq('after_export_action') expect(json_response['export_status']).to eq('finished')
end
end end
it 'is finished' do context 'when project export is being regenerated' do
get api(path_finished, user) before do
create(:project_export_job, project: project_finished, status: 1)
end
it 'returns status regeneration_in_progress' do
get api(path_finished, user)
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/project/export_status') expect(response).to match_response_schema('public_api/v4/project/export_status')
expect(json_response['export_status']).to eq('finished') expect(json_response['export_status']).to eq('regeneration_in_progress')
end
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
describe ProjectExportOptions do
let(:project) { create(:project) }
let(:project_export_job) { create(:project_export_job, project: project, jid: '123', status: 1) }
let(:job) { { 'args' => [project.owner.id, project.id, nil, nil], 'jid' => '123' } }
let(:worker_class) do
Class.new do
include Sidekiq::Worker
include ProjectExportOptions
end
end
it 'sets default retry limit' do
expect(worker_class.sidekiq_options['retry']).to eq(ProjectExportOptions::EXPORT_RETRY_COUNT)
end
it 'sets default status expiration' do
expect(worker_class.sidekiq_options['status_expiration']).to eq(StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION)
end
describe '.sidekiq_retries_exhausted' do
it 'marks status as failed' do
expect { worker_class.sidekiq_retries_exhausted_block.call(job) }.to change { project_export_job.reload.status }.from(1).to(3)
end
context 'when status update fails' do
before do
project_export_job.update(status: 2)
end
it 'logs an error' do
expect(Sidekiq.logger).to receive(:error).with("Failed to set Job #{job['jid']} for project #{project.id} to failed state")
worker_class.sidekiq_retries_exhausted_block.call(job)
end
end
end
end
...@@ -9,21 +9,59 @@ describe ProjectExportWorker do ...@@ -9,21 +9,59 @@ describe ProjectExportWorker do
subject { described_class.new } subject { described_class.new }
describe '#perform' do describe '#perform' do
before do
allow_next_instance_of(described_class) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
end
context 'when it succeeds' do context 'when it succeeds' do
it 'calls the ExportService' do it 'calls the ExportService' do
expect_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute) expect_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute)
subject.perform(user.id, project.id, { 'klass' => 'Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy' }) subject.perform(user.id, project.id, { 'klass' => 'Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy' })
end end
context 'export job' do
before do
allow_any_instance_of(::Projects::ImportExport::ExportService).to receive(:execute)
end
it 'creates an export job record for the project' do
expect { subject.perform(user.id, project.id, {}) }.to change { project.export_jobs.count }.from(0).to(1)
end
it 'sets the export job status to started' do
expect_next_instance_of(ProjectExportJob) do |job|
expect(job).to receive(:start)
end
subject.perform(user.id, project.id, {})
end
it 'sets the export job status to finished' do
expect_next_instance_of(ProjectExportJob) do |job|
expect(job).to receive(:finish)
end
subject.perform(user.id, project.id, {})
end
end
end end
context 'when it fails' do context 'when it fails' do
it 'raises an exception when params are invalid' do it 'does not raise an exception when strategy is invalid' do
expect_any_instance_of(::Projects::ImportExport::ExportService).not_to receive(:execute) expect_any_instance_of(::Projects::ImportExport::ExportService).not_to receive(:execute)
expect { subject.perform(1234, project.id, {}) }.to raise_exception(ActiveRecord::RecordNotFound) expect { subject.perform(user.id, project.id, { 'klass' => 'Whatever' }) }.not_to raise_error
expect { subject.perform(user.id, 1234, {}) }.to raise_exception(ActiveRecord::RecordNotFound) end
expect { subject.perform(user.id, project.id, { 'klass' => 'Whatever' }) }.to raise_exception(Gitlab::ImportExport::AfterExportStrategyBuilder::StrategyNotFoundError)
it 'does not raise error when project cannot be found' do
expect { subject.perform(user.id, -234, {}) }.not_to raise_error
end
it 'does not raise error when user cannot be found' do
expect { subject.perform(-863, project.id, {}) }.not_to raise_error
end end
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
describe StuckExportJobsWorker do
let(:worker) { described_class.new }
shared_examples 'project export job detection' do
context 'when the job has completed' do
context 'when the export status was already updated' do
before do
allow(Gitlab::SidekiqStatus).to receive(:completed_jids) do
project_export_job.start
project_export_job.finish
[project_export_job.jid]
end
end
it 'does not mark the export as failed' do
worker.perform
expect(project_export_job.reload.finished?).to be true
end
end
context 'when the export status was not updated' do
before do
allow(Gitlab::SidekiqStatus).to receive(:completed_jids) do
project_export_job.start
[project_export_job.jid]
end
end
it 'marks the project as failed' do
worker.perform
expect(project_export_job.reload.failed?).to be true
end
end
context 'when the job is not in queue and db record in queued state' do
before do
allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([project_export_job.jid])
end
it 'marks the project as failed' do
expect(project_export_job.queued?).to be true
worker.perform
expect(project_export_job.reload.failed?).to be true
end
end
end
context 'when the job is running in Sidekiq' do
before do
allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([])
end
it 'does not mark the project export as failed' do
expect { worker.perform }.not_to change { project_export_job.reload.status }
end
end
end
describe 'with started export status' do
it_behaves_like 'project export job detection' do
let(:project) { create(:project) }
let!(:project_export_job) { create(:project_export_job, project: project, jid: '123') }
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment