Commit 08344e29 authored by Arturo Herrero's avatar Arturo Herrero

Merge branch 'georgekoltsov/add-group-relation-export-models' into 'master'

Add Group relations export models

See merge request gitlab-org/gitlab!59976
parents 2c386afb 9d24ed8e
# frozen_string_literal: true
module BulkImports
class Export < ApplicationRecord
include Gitlab::Utils::StrongMemoize
self.table_name = 'bulk_import_exports'
belongs_to :project, optional: true
belongs_to :group, optional: true
has_one :upload, class_name: 'BulkImports::ExportUpload'
validates :project, presence: true, unless: :group
validates :group, presence: true, unless: :project
validates :relation, :status, presence: true
validate :exportable_relation?
state_machine :status, initial: :started do
state :started, value: 0
state :finished, value: 1
state :failed, value: -1
event :start do
transition any => :started
end
event :finish do
transition started: :finished
transition failed: :failed
end
event :fail_op do
transition any => :failed
end
end
def exportable_relation?
return unless exportable
errors.add(:relation, 'Unsupported exportable relation') unless config.exportable_relations.include?(relation)
end
def exportable
strong_memoize(:exportable) do
project || group
end
end
def relation_definition
config.exportable_tree[:include].find { |include| include[relation.to_sym] }
end
def config
strong_memoize(:config) do
case exportable
when ::Project
Exports::ProjectConfig.new(exportable)
when ::Group
Exports::GroupConfig.new(exportable)
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
class ExportUpload < ApplicationRecord
include WithUploads
include ObjectStorage::BackgroundMove
self.table_name = 'bulk_import_export_uploads'
belongs_to :export, class_name: 'BulkImports::Export'
mount_uploader :export_file, ExportUploader
def retrieve_upload(_identifier, paths)
Upload.find_by(model: self, path: paths)
end
end
end
# frozen_string_literal: true
module BulkImports
module Exports
class BaseConfig
include Gitlab::Utils::StrongMemoize
def initialize(exportable)
@exportable = exportable
end
def exportable_tree
attributes_finder.find_root(exportable_class_sym)
end
def validate_user_permissions!(user)
user.can?(ability, exportable) ||
raise(::Gitlab::ImportExport::Error.permission_error(user, exportable))
end
def export_path
strong_memoize(:export_path) do
relative_path = File.join(base_export_path, SecureRandom.hex)
::Gitlab::ImportExport.export_path(relative_path: relative_path)
end
end
def exportable_relations
import_export_config.dig(:tree, exportable_class_sym).keys.map(&:to_s)
end
private
attr_reader :exportable
def attributes_finder
strong_memoize(:attributes_finder) do
::Gitlab::ImportExport::AttributesFinder.new(config: import_export_config)
end
end
def import_export_config
::Gitlab::ImportExport::Config.new(config: import_export_yaml).to_h
end
def exportable_class
@exportable_class ||= exportable.class
end
def exportable_class_sym
@exportable_class_sym ||= exportable_class.to_s.downcase.to_sym
end
def import_export_yaml
raise NotImplementedError
end
def ability
raise NotImplementedError
end
def base_export_path
raise NotImplementedError
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Exports
class GroupConfig < BaseConfig
private
def base_export_path
exportable.full_path
end
def import_export_yaml
::Gitlab::ImportExport.group_config_file
end
def ability
:admin_group
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Exports
class ProjectConfig < BaseConfig
private
def base_export_path
exportable.disk_path
end
def import_export_yaml
::Gitlab::ImportExport.config_file
end
def ability
:admin_project
end
end
end
end
......@@ -67,6 +67,8 @@ class Group < Namespace
has_one :import_state, class_name: 'GroupImportState', inverse_of: :group
has_many :bulk_import_exports, class_name: 'BulkImports::Export', inverse_of: :group
has_many :group_deploy_keys_groups, inverse_of: :group
has_many :group_deploy_keys, through: :group_deploy_keys_groups
has_many :group_deploy_tokens
......
# frozen_string_literal: true
module BulkImports
class ExportUploader < ImportExportUploader
EXTENSION_WHITELIST = %w[ndjson.gz].freeze
end
end
---
title: Add Group relations export models
merge_request: 59976
author:
type: added
# frozen_string_literal: true
class AddBulkImportExportsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
def up
create_table_with_constraints :bulk_import_exports do |t|
t.bigint :group_id
t.bigint :project_id
t.timestamps_with_timezone null: false
t.integer :status, limit: 2, null: false, default: 0
t.text :relation, null: false
t.text :jid, unique: true
t.text :error
t.text_limit :relation, 255
t.text_limit :jid, 255
t.text_limit :error, 255
end
end
def down
drop_table :bulk_import_exports
end
end
# frozen_string_literal: true
class AddForeignKeyToBulkImportExportsOnProject < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
def up
add_concurrent_foreign_key :bulk_import_exports, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :bulk_import_exports, column: :project_id
end
end
end
# frozen_string_literal: true
class AddForeignKeyToBulkImportExportsOnGroup < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
def up
add_concurrent_foreign_key :bulk_import_exports, :namespaces, column: :group_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :bulk_import_exports, column: :group_id
end
end
end
# frozen_string_literal: true
class AddBulkImportExportsTableIndexes < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
GROUP_INDEX_NAME = 'partial_index_bulk_import_exports_on_group_id_and_relation'
PROJECT_INDEX_NAME = 'partial_index_bulk_import_exports_on_project_id_and_relation'
def up
add_concurrent_index :bulk_import_exports,
[:group_id, :relation],
unique: true,
where: 'group_id IS NOT NULL',
name: GROUP_INDEX_NAME
add_concurrent_index :bulk_import_exports,
[:project_id, :relation],
unique: true,
where: 'project_id IS NOT NULL',
name: PROJECT_INDEX_NAME
end
def down
remove_concurrent_index_by_name(:bulk_import_exports, GROUP_INDEX_NAME)
remove_concurrent_index_by_name(:bulk_import_exports, PROJECT_INDEX_NAME)
end
end
# frozen_string_literal: true
class AddBulkImportExportUploadsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
def up
create_table_with_constraints :bulk_import_export_uploads do |t|
t.references :export, index: true, null: false, foreign_key: { to_table: :bulk_import_exports, on_delete: :cascade }
t.datetime_with_timezone :updated_at, null: false
t.text :export_file
t.text_limit :export_file, 255
end
end
def down
drop_table :bulk_import_export_uploads
end
end
4950567ba7071183bc008936e4bbe1391dd0100c5caa2a6821be85dc3d2423fc
\ No newline at end of file
202409998a03fd29c52e3ee9546ab8ec7aa3c56173ee755e9342f1cc6a5f1f6b
\ No newline at end of file
2343decc3abb79b38bcde6aba5a8fd208842096d7fb7a4c51872f66f1a125296
\ No newline at end of file
4db08c0fecd210b329492596cf029518484d256bdb06efff233b3a38677fd6a6
\ No newline at end of file
f306cf9553e4bd237cfdff31d5432d4ff44302a923e475c477f76d32ccb4d257
\ No newline at end of file
......@@ -10212,6 +10212,47 @@ CREATE SEQUENCE bulk_import_entities_id_seq
ALTER SEQUENCE bulk_import_entities_id_seq OWNED BY bulk_import_entities.id;
CREATE TABLE bulk_import_export_uploads (
id bigint NOT NULL,
export_id bigint NOT NULL,
updated_at timestamp with time zone NOT NULL,
export_file text,
CONSTRAINT check_5add76239d CHECK ((char_length(export_file) <= 255))
);
CREATE SEQUENCE bulk_import_export_uploads_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE bulk_import_export_uploads_id_seq OWNED BY bulk_import_export_uploads.id;
CREATE TABLE bulk_import_exports (
id bigint NOT NULL,
group_id bigint,
project_id bigint,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
status smallint DEFAULT 0 NOT NULL,
relation text NOT NULL,
jid text,
error text,
CONSTRAINT check_24cb010672 CHECK ((char_length(relation) <= 255)),
CONSTRAINT check_8f0f357334 CHECK ((char_length(error) <= 255)),
CONSTRAINT check_9ee6d14d33 CHECK ((char_length(jid) <= 255))
);
CREATE SEQUENCE bulk_import_exports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE bulk_import_exports_id_seq OWNED BY bulk_import_exports.id;
CREATE TABLE bulk_import_failures (
id bigint NOT NULL,
bulk_import_entity_id bigint NOT NULL,
......@@ -19268,6 +19309,10 @@ ALTER TABLE ONLY bulk_import_configurations ALTER COLUMN id SET DEFAULT nextval(
ALTER TABLE ONLY bulk_import_entities ALTER COLUMN id SET DEFAULT nextval('bulk_import_entities_id_seq'::regclass);
ALTER TABLE ONLY bulk_import_export_uploads ALTER COLUMN id SET DEFAULT nextval('bulk_import_export_uploads_id_seq'::regclass);
ALTER TABLE ONLY bulk_import_exports ALTER COLUMN id SET DEFAULT nextval('bulk_import_exports_id_seq'::regclass);
ALTER TABLE ONLY bulk_import_failures ALTER COLUMN id SET DEFAULT nextval('bulk_import_failures_id_seq'::regclass);
ALTER TABLE ONLY bulk_import_trackers ALTER COLUMN id SET DEFAULT nextval('bulk_import_trackers_id_seq'::regclass);
......@@ -20387,6 +20432,12 @@ ALTER TABLE ONLY bulk_import_configurations
ALTER TABLE ONLY bulk_import_entities
ADD CONSTRAINT bulk_import_entities_pkey PRIMARY KEY (id);
ALTER TABLE ONLY bulk_import_export_uploads
ADD CONSTRAINT bulk_import_export_uploads_pkey PRIMARY KEY (id);
ALTER TABLE ONLY bulk_import_exports
ADD CONSTRAINT bulk_import_exports_pkey PRIMARY KEY (id);
ALTER TABLE ONLY bulk_import_failures
ADD CONSTRAINT bulk_import_failures_pkey PRIMARY KEY (id);
......@@ -22207,6 +22258,8 @@ CREATE INDEX index_bulk_import_entities_on_parent_id ON bulk_import_entities USI
CREATE INDEX index_bulk_import_entities_on_project_id ON bulk_import_entities USING btree (project_id);
CREATE INDEX index_bulk_import_export_uploads_on_export_id ON bulk_import_export_uploads USING btree (export_id);
CREATE INDEX index_bulk_import_failures_on_bulk_import_entity_id ON bulk_import_failures USING btree (bulk_import_entity_id);
CREATE INDEX index_bulk_import_failures_on_correlation_id_value ON bulk_import_failures USING btree (correlation_id_value);
......@@ -24511,6 +24564,10 @@ CREATE INDEX packages_packages_needs_verification ON packages_package_files USIN
CREATE INDEX packages_packages_pending_verification ON packages_package_files USING btree (verified_at NULLS FIRST) WHERE (verification_state = 0);
CREATE UNIQUE INDEX partial_index_bulk_import_exports_on_group_id_and_relation ON bulk_import_exports USING btree (group_id, relation) WHERE (group_id IS NOT NULL);
CREATE UNIQUE INDEX partial_index_bulk_import_exports_on_project_id_and_relation ON bulk_import_exports USING btree (project_id, relation) WHERE (project_id IS NOT NULL);
CREATE INDEX partial_index_ci_builds_on_scheduled_at_with_scheduled_jobs ON ci_builds USING btree (scheduled_at) WHERE ((scheduled_at IS NOT NULL) AND ((type)::text = 'Ci::Build'::text) AND ((status)::text = 'scheduled'::text));
CREATE INDEX partial_index_deployments_for_legacy_successful_deployments ON deployments USING btree (id) WHERE ((finished_at IS NULL) AND (status = 2));
......@@ -25002,6 +25059,9 @@ ALTER TABLE ONLY sprints
ALTER TABLE ONLY push_event_payloads
ADD CONSTRAINT fk_36c74129da FOREIGN KEY (event_id) REFERENCES events(id) ON DELETE CASCADE;
ALTER TABLE ONLY bulk_import_exports
ADD CONSTRAINT fk_39c726d3b5 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_builds
ADD CONSTRAINT fk_3a9eaa254d FOREIGN KEY (stage_id) REFERENCES ci_stages(id) ON DELETE CASCADE;
......@@ -25197,6 +25257,9 @@ ALTER TABLE ONLY issues
ALTER TABLE ONLY protected_branch_merge_access_levels
ADD CONSTRAINT fk_8a3072ccb3 FOREIGN KEY (protected_branch_id) REFERENCES protected_branches(id) ON DELETE CASCADE;
ALTER TABLE ONLY bulk_import_exports
ADD CONSTRAINT fk_8c6f33cebe FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY releases
ADD CONSTRAINT fk_8e4456f90f FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;
......@@ -26859,6 +26922,9 @@ ALTER TABLE ONLY incident_management_oncall_shifts
ALTER TABLE ONLY analytics_cycle_analytics_group_stages
ADD CONSTRAINT fk_rails_dfb37c880d FOREIGN KEY (end_event_label_id) REFERENCES labels(id) ON DELETE CASCADE;
ALTER TABLE ONLY bulk_import_export_uploads
ADD CONSTRAINT fk_rails_dfbfb45eca FOREIGN KEY (export_id) REFERENCES bulk_import_exports(id) ON DELETE CASCADE;
ALTER TABLE ONLY label_priorities
ADD CONSTRAINT fk_rails_e161058b0f FOREIGN KEY (label_id) REFERENCES labels(id) ON DELETE CASCADE;
......@@ -6,6 +6,7 @@ module Gitlab
USER_UPLOADS_OBJECT_TYPES = %i[
attachment
avatar
bulk_imports/export
design_management/design_v432x230
favicon
file
......
......@@ -8,6 +8,7 @@ FactoryBot.define do
trait(:attachment) { file_type { :attachment } }
trait(:avatar) { file_type { :avatar } }
trait(:'bulk_imports/export') { file_type { :'bulk_imports/export' } }
trait(:favicon) { file_type { :favicon } }
trait(:file) { file_type { :file } }
trait(:import_export) { file_type { :import_export } }
......
......@@ -463,6 +463,13 @@ RSpec.describe Geo::FileDownloadService do
it_behaves_like 'a service that handles orphaned uploads', 'import_export'
end
context 'with bulk imports export upload' do
let(:file) { create(:upload, model: build(:bulk_import_export_upload)) }
it_behaves_like 'a service that downloads the file and registers the sync result', :'bulk_imports/export'
it_behaves_like 'a service that handles orphaned uploads', :'bulk_imports/export'
end
context 'bad object type' do
it 'raises an error' do
expect { described_class.new(:bad, 1).execute }.to raise_error(NotImplementedError)
......
......@@ -234,5 +234,30 @@ RSpec.describe Geo::FileUploadService do
include_examples 'no decoded params'
end
context 'bulk imports export file' do
let_it_be(:type) { :'bulk_imports/export' }
let_it_be(:export) { create(:bulk_import_export) }
let_it_be(:file) { fixture_file_upload('spec/fixtures/bulk_imports/labels.ndjson.gz') }
let(:upload) { Upload.find_by(model: export, uploader: 'BulkImports::ExportUploader') }
let(:request_data) { Gitlab::Geo::Replication::FileTransfer.new(type, upload).request_data }
let(:params) { { id: upload.id, type: type } }
before do
BulkImports::ExportUploader.new(export).store!(file)
end
it 'sends the file' do
service = described_class.new(params, request_data)
response = service.execute
expect(response[:code]).to eq(:ok)
expect(response[:file].path).to end_with('ndjson.gz')
end
include_examples 'no decoded params'
end
end
end
......@@ -3,12 +3,20 @@
module Gitlab
module ImportExport
class Error < StandardError
def self.permission_error(user, importable)
def self.permission_error(user, object)
self.new(
"User with ID: %s does not have required permissions for %s: %s with ID: %s" %
[user.id, importable.class.name, importable.name, importable.id]
[user.id, object.class.name, object.name, object.id]
)
end
def self.unsupported_object_type_error
self.new('Unknown object type')
end
def self.file_compression_error
self.new('File compression failed')
end
end
end
end
# frozen_string_literal: true
FactoryBot.define do
factory :bulk_import_export_upload, class: 'BulkImports::ExportUpload' do
export { association(:bulk_import_export) }
end
end
# frozen_string_literal: true
FactoryBot.define do
factory :bulk_import_export, class: 'BulkImports::Export', traits: %i[started] do
group
relation { 'labels' }
trait :started do
status { 0 }
sequence(:jid) { |n| "bulk_import_export_#{n}" }
end
trait :finished do
status { 1 }
sequence(:jid) { |n| "bulk_import_export_#{n}" }
end
trait :failed do
status { -1 }
end
end
end
......@@ -746,3 +746,5 @@ issuable_sla:
- issue
push_rule:
- group
bulk_import_export:
- group
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Export, type: :model do
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_one(:upload) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:relation) }
it { is_expected.to validate_presence_of(:status) }
context 'when not associated with a group or project' do
it 'is invalid' do
export = build(:bulk_import_export, group: nil, project: nil)
expect(export).not_to be_valid
end
end
context 'when associated with a group' do
it 'is valid' do
export = build(:bulk_import_export, group: build(:group), project: nil)
expect(export).to be_valid
end
end
context 'when associated with a project' do
it 'is valid' do
export = build(:bulk_import_export, group: nil, project: build(:project))
expect(export).to be_valid
end
end
context 'when relation is invalid' do
it 'is invalid' do
export = build(:bulk_import_export, relation: 'unsupported')
expect(export).not_to be_valid
expect(export.errors).to include(:relation)
end
end
end
describe '#exportable' do
context 'when associated with project' do
it 'returns project' do
export = create(:bulk_import_export, project: create(:project), group: nil)
expect(export.exportable).to be_instance_of(Project)
end
end
context 'when associated with group' do
it 'returns group' do
export = create(:bulk_import_export)
expect(export.exportable).to be_instance_of(Group)
end
end
end
describe '#config' do
context 'when associated with project' do
it 'returns project config' do
export = create(:bulk_import_export, project: create(:project), group: nil)
expect(export.config).to be_instance_of(BulkImports::Exports::ProjectConfig)
end
end
context 'when associated with group' do
it 'returns group config' do
export = create(:bulk_import_export)
expect(export.config).to be_instance_of(BulkImports::Exports::GroupConfig)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ExportUpload do
subject { described_class.new(export: create(:bulk_import_export)) }
describe 'associations' do
it { is_expected.to belong_to(:export) }
end
it 'stores export file' do
method = 'export_file'
filename = 'labels.ndjson.gz'
subject.public_send("#{method}=", fixture_file_upload("spec/fixtures/bulk_imports/#{filename}"))
subject.save!
url = "/uploads/-/system/bulk_imports/export_upload/export_file/#{subject.id}/#{filename}"
expect(subject.public_send(method).url).to eq(url)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Exports::GroupConfig do
let_it_be(:exportable) { create(:group) }
let_it_be(:hex) { '123' }
before do
allow(SecureRandom).to receive(:hex).and_return(hex)
end
subject { described_class.new(exportable) }
describe '#exportable_tree' do
it 'returns exportable tree' do
expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder|
expect(finder).to receive(:find_root).with(:group).and_call_original
end
expect(subject.exportable_tree).not_to be_empty
end
end
describe '#export_path' do
it 'returns correct export path' do
expect(::Gitlab::ImportExport).to receive(:storage_path).and_return('storage_path')
expect(subject.export_path).to eq("storage_path/#{exportable.full_path}/#{hex}")
end
end
describe '#validate_user_permissions' do
let_it_be(:user) { create(:user) }
context 'when user cannot admin project' do
it 'returns false' do
expect { subject.validate_user_permissions!(user) }.to raise_error(Gitlab::ImportExport::Error)
end
end
context 'when user can admin project' do
it 'returns true' do
exportable.add_owner(user)
expect(subject.validate_user_permissions!(user)).to eq(true)
end
end
end
describe '#exportable_relations' do
it 'returns a list of top level exportable relations' do
expect(subject.exportable_relations).to include('milestones', 'badges', 'boards', 'labels')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Exports::ProjectConfig do
let_it_be(:exportable) { create(:project) }
let_it_be(:hex) { '123' }
before do
allow(SecureRandom).to receive(:hex).and_return(hex)
end
subject { described_class.new(exportable) }
describe '#exportable_tree' do
it 'returns exportable tree' do
expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder|
expect(finder).to receive(:find_root).with(:project).and_call_original
end
expect(subject.exportable_tree).not_to be_empty
end
end
describe '#export_path' do
it 'returns correct export path' do
expect(::Gitlab::ImportExport).to receive(:storage_path).and_return('storage_path')
expect(subject.export_path).to eq("storage_path/#{exportable.disk_path}/#{hex}")
end
end
describe '#validate_user_permissions' do
let_it_be(:user) { create(:user) }
context 'when user cannot admin project' do
it 'returns false' do
expect { subject.validate_user_permissions!(user) }.to raise_error(Gitlab::ImportExport::Error)
end
end
context 'when user can admin project' do
it 'returns true' do
exportable.add_maintainer(user)
expect(subject.validate_user_permissions!(user)).to eq(true)
end
end
end
describe '#exportable_relations' do
it 'returns a list of top level exportable relations' do
expect(subject.exportable_relations).to include('issues', 'labels', 'milestones', 'merge_requests')
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment