Commit c1fac2c8 authored by Sean McGivern's avatar Sean McGivern

Merge branch 'try-to-support-ndjson-in-a-generic-way' into 'master'

Refactor JSON reader/writer for import/export

See merge request gitlab-org/gitlab!27679
parents 0a81eafa 1c5fadf1
......@@ -4,12 +4,13 @@ module Gitlab
module ImportExport
module Group
class TreeRestorer
include Gitlab::Utils::StrongMemoize
attr_reader :user
attr_reader :shared
attr_reader :group
def initialize(user:, shared:, group:, group_hash:)
@path = File.join(shared.export_path, 'group.json')
@user = user
@shared = shared
@group = group
......@@ -17,17 +18,14 @@ module Gitlab
end
def restore
@relation_reader ||=
if @group_hash.present?
ImportExport::JSON::LegacyReader::User.new(@group_hash, reader.group_relation_names)
else
ImportExport::JSON::LegacyReader::File.new(@path, reader.group_relation_names)
end
@group_attributes = relation_reader.consume_attributes(nil)
@group_members = relation_reader.consume_relation(nil, 'members')
@group_members = @relation_reader.consume_relation('members')
@children = @relation_reader.consume_attribute('children')
@relation_reader.consume_attribute('name')
@relation_reader.consume_attribute('path')
# We need to remove `name` and `path` as we did consume it in previous pass
@group_attributes.delete('name')
@group_attributes.delete('path')
@children = @group_attributes.delete('children')
if members_mapper.map && restorer.restore
@children&.each do |group_hash|
......@@ -53,16 +51,32 @@ module Gitlab
private
def relation_reader
strong_memoize(:relation_reader) do
if @group_hash.present?
ImportExport::JSON::LegacyReader::Hash.new(
@group_hash,
relation_names: reader.group_relation_names)
else
ImportExport::JSON::LegacyReader::File.new(
File.join(shared.export_path, 'group.json'),
relation_names: reader.group_relation_names)
end
end
end
def restorer
@relation_tree_restorer ||= RelationTreeRestorer.new(
user: @user,
shared: @shared,
importable: @group,
relation_reader: @relation_reader,
relation_reader: relation_reader,
members_mapper: members_mapper,
object_builder: object_builder,
relation_factory: relation_factory,
reader: reader
reader: reader,
importable: @group,
importable_attributes: @group_attributes,
importable_path: nil
)
end
......@@ -88,7 +102,11 @@ module Gitlab
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @group_members, user: @user, importable: @group)
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(
exported_members: @group_members,
user: @user,
importable: @group
)
end
def relation_factory
......
......@@ -5,19 +5,25 @@ module Gitlab
module JSON
class LegacyReader
class File < LegacyReader
def initialize(path, relation_names)
include Gitlab::Utils::StrongMemoize
def initialize(path, relation_names:, allowed_path: nil)
@path = path
super(relation_names)
super(
relation_names: relation_names,
allowed_path: allowed_path)
end
def valid?
def exist?
::File.exist?(@path)
end
private
protected
def tree_hash
@tree_hash ||= read_hash
strong_memoize(:tree_hash) do
read_hash
end
end
def read_hash
......@@ -28,13 +34,15 @@ module Gitlab
end
end
class User < LegacyReader
def initialize(tree_hash, relation_names)
class Hash < LegacyReader
def initialize(tree_hash, relation_names:, allowed_path: nil)
@tree_hash = tree_hash
super(relation_names)
super(
relation_names: relation_names,
allowed_path: allowed_path)
end
def valid?
def exist?
@tree_hash.present?
end
......@@ -43,11 +51,16 @@ module Gitlab
attr_reader :tree_hash
end
def initialize(relation_names)
def initialize(relation_names:, allowed_path:)
@relation_names = relation_names.map(&:to_s)
# This is legacy reader, to be used in transition
# period before `.ndjson`,
# we strong validate what is being readed
@allowed_path = allowed_path
end
def valid?
def exist?
raise NotImplementedError
end
......@@ -55,15 +68,22 @@ module Gitlab
true
end
def root_attributes(excluded_attributes = [])
attributes.except(*excluded_attributes.map(&:to_s))
def consume_attributes(importable_path)
unless importable_path == @allowed_path
raise ArgumentError, "Invalid #{importable_path} passed to `consume_attributes`. Use #{@allowed_path} instead."
end
attributes
end
def consume_relation(importable_path, key)
unless importable_path == @allowed_path
raise ArgumentError, "Invalid #{importable_name} passed to `consume_relation`. Use #{@allowed_path} instead."
end
def consume_relation(key)
value = relations.delete(key)
return value unless block_given?
return if value.nil?
if value.is_a?(Array)
......@@ -75,17 +95,13 @@ module Gitlab
end
end
def consume_attribute(key)
attributes.delete(key)
end
def sort_ci_pipelines_by_id
relations['ci_pipelines']&.sort_by! { |hash| hash['id'] }
end
private
attr_reader :relation_names
attr_reader :relation_names, :allowed_path
def tree_hash
raise NotImplementedError
......
......@@ -8,11 +8,15 @@ module Gitlab
attr_reader :path
def initialize(path)
def initialize(path, allowed_path:)
@path = path
@last_array = nil
@keys = Set.new
# This is legacy writer, to be used in transition
# period before `.ndjson`,
# we strong validate what is being written
@allowed_path = allowed_path
mkdir_p(File.dirname(@path))
file.write('{}')
end
......@@ -22,12 +26,44 @@ module Gitlab
@file = nil
end
def set(hash)
def write_attributes(exportable_path, hash)
unless exportable_path == @allowed_path
raise ArgumentError, "Invalid #{exportable_path}"
end
hash.each do |key, value|
write(key, value)
end
end
def write_relation(exportable_path, key, value)
unless exportable_path == @allowed_path
raise ArgumentError, "Invalid #{exportable_path}"
end
write(key, value)
end
def write_relation_array(exportable_path, key, items)
unless exportable_path == @allowed_path
raise ArgumentError, "Invalid #{exportable_path}"
end
write(key, [])
# rewind by two bytes, to overwrite ']}'
file.pos = file.size - 2
items.each_with_index do |item, idx|
file.write(',') if idx > 0
file.write(item.to_json)
end
file.write(']}')
end
private
def write(key, value)
raise ArgumentError, "key '#{key}' already written" if @keys.include?(key)
......@@ -41,29 +77,8 @@ module Gitlab
file.write('}')
@keys.add(key)
@last_array = nil
@last_array_count = nil
end
def append(key, value)
unless @last_array == key
write(key, [])
@last_array = key
@last_array_count = 0
end
# rewind by two bytes, to overwrite ']}'
file.pos = file.size - 2
file.write(',') if @last_array_count > 0
file.write(value.to_json)
file.write(']}')
@last_array_count += 1
end
private
def file
@file ||= File.open(@path, "wb")
end
......
......@@ -14,8 +14,9 @@ module Gitlab
end
end
def initialize(exportable, relations_schema, json_writer)
def initialize(exportable, relations_schema, json_writer, exportable_path:)
@exportable = exportable
@exportable_path = exportable_path
@relations_schema = relations_schema
@json_writer = json_writer
end
......@@ -35,7 +36,7 @@ module Gitlab
def serialize_root
attributes = exportable.as_json(
relations_schema.merge(include: nil, preloads: nil))
json_writer.set(attributes)
json_writer.write_attributes(@exportable_path, attributes)
end
def serialize_relation(definition)
......@@ -53,20 +54,22 @@ module Gitlab
end
def serialize_many_relations(key, records, options)
enumerator = Enumerator.new do |items|
key_preloads = preloads&.dig(key)
records = records.preload(key_preloads) if key_preloads
records.find_each(batch_size: BATCH_SIZE) do |record|
json = Raw.new(record.to_json(options))
json_writer.append(key, json)
items << Raw.new(record.to_json(options))
end
end
json_writer.write_relation_array(@exportable_path, key, enumerator)
end
def serialize_single_relation(key, record, options)
json = Raw.new(record.to_json(options))
json_writer.write(key, json)
json_writer.write_relation(@exportable_path, key, json)
end
def includes
......
......@@ -4,6 +4,8 @@ module Gitlab
module ImportExport
module Project
class TreeRestorer
include Gitlab::Utils::StrongMemoize
attr_reader :user
attr_reader :shared
attr_reader :project
......@@ -15,9 +17,8 @@ module Gitlab
end
def restore
@relation_reader = ImportExport::JSON::LegacyReader::File.new(File.join(shared.export_path, 'project.json'), reader.project_relation_names)
@project_members = @relation_reader.consume_relation('project_members')
@project_attributes = relation_reader.consume_attributes(importable_path)
@project_members = relation_reader.consume_relation(importable_path, 'project_members')
if relation_tree_restorer.restore
import_failure_service.with_retry(action: 'set_latest_merge_request_diff_ids!') do
......@@ -35,16 +36,28 @@ module Gitlab
private
def relation_reader
strong_memoize(:relation_reader) do
ImportExport::JSON::LegacyReader::File.new(
File.join(shared.export_path, 'project.json'),
relation_names: reader.project_relation_names,
allowed_path: importable_path
)
end
end
def relation_tree_restorer
@relation_tree_restorer ||= RelationTreeRestorer.new(
user: @user,
shared: @shared,
importable: @project,
relation_reader: @relation_reader,
relation_reader: relation_reader,
object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader
reader: reader,
importable: @project,
importable_attributes: @project_attributes,
importable_path: importable_path
)
end
......@@ -69,6 +82,10 @@ module Gitlab
def import_failure_service
@import_failure_service ||= ImportFailureService.new(@project)
end
def importable_path
"project"
end
end
end
end
......
......@@ -15,10 +15,17 @@ module Gitlab
end
def save
json_writer = ImportExport::JSON::LegacyWriter.new(@full_path)
serializer = ImportExport::JSON::StreamingSerializer.new(exportable, reader.project_tree, json_writer)
serializer.execute
json_writer = ImportExport::JSON::LegacyWriter.new(
@full_path,
allowed_path: "project"
)
ImportExport::JSON::StreamingSerializer.new(
exportable,
reader.project_tree,
json_writer,
exportable_path: "project"
).execute
true
rescue => e
......
......@@ -11,7 +11,15 @@ module Gitlab
attr_reader :importable
attr_reader :relation_reader
def initialize(user:, shared:, importable:, relation_reader:, members_mapper:, object_builder:, relation_factory:, reader:)
def initialize( # rubocop:disable Metrics/ParameterLists
user:, shared:, relation_reader:,
members_mapper:, object_builder:,
relation_factory:,
reader:,
importable:,
importable_attributes:,
importable_path:
)
@user = user
@shared = shared
@importable = importable
......@@ -20,6 +28,8 @@ module Gitlab
@object_builder = object_builder
@relation_factory = relation_factory
@reader = reader
@importable_attributes = importable_attributes
@importable_path = importable_path
end
def restore
......@@ -57,7 +67,7 @@ module Gitlab
end
def process_relation!(relation_key, relation_definition)
@relation_reader.consume_relation(relation_key) do |data_hash, relation_index|
@relation_reader.consume_relation(@importable_path, relation_key) do |data_hash, relation_index|
process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
end
end
......@@ -94,7 +104,7 @@ module Gitlab
end
def update_params!
params = @relation_reader.root_attributes(relations.keys)
params = @importable_attributes.except(*relations.keys.map(&:to_s))
params = params.merge(present_override_params)
# Cleaning all imported and overridden params
......
# frozen_string_literal: true
require 'spec_helper'
require_relative 'shared_example.rb'
describe Gitlab::ImportExport::JSON::LegacyReader::File do
it_behaves_like 'import/export json legacy reader' do
let(:valid_path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' }
let(:data) { valid_path }
let(:json_data) { JSON.parse(File.read(valid_path)) }
end
describe '#exist?' do
let(:legacy_reader) do
described_class.new(path, relation_names: [])
end
subject { legacy_reader.exist? }
context 'given valid path' do
let(:path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' }
it { is_expected.to be true }
end
context 'given invalid path' do
let(:path) { 'spec/non-existing-folder/do-not-create-this-file.json' }
it { is_expected.to be false }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_relative 'shared_example.rb'
describe Gitlab::ImportExport::JSON::LegacyReader::Hash do
it_behaves_like 'import/export json legacy reader' do
let(:path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' }
# the hash is modified by the `LegacyReader`
# we need to deep-dup it
let(:json_data) { JSON.parse(File.read(path)) }
let(:data) { JSON.parse(File.read(path)) }
end
describe '#exist?' do
let(:legacy_reader) do
described_class.new(tree_hash, relation_names: [])
end
subject { legacy_reader.exist? }
context 'tree_hash is nil' do
let(:tree_hash) { nil }
it { is_expected.to be_falsey }
end
context 'tree_hash presents' do
let(:tree_hash) { { "issues": [] } }
it { is_expected.to be_truthy }
end
end
end
# frozen_string_literal: true
RSpec.shared_examples 'import/export json legacy reader' do
let(:relation_names) { [] }
let(:legacy_reader) do
described_class.new(
data,
relation_names: relation_names,
allowed_path: "project")
end
describe '#consume_attributes' do
context 'when valid path is passed' do
subject { legacy_reader.consume_attributes("project") }
context 'no excluded attributes' do
let(:excluded_attributes) { [] }
let(:relation_names) { [] }
it 'returns the whole tree from parsed JSON' do
expect(subject).to eq(json_data)
end
end
context 'some attributes are excluded' do
let(:relation_names) { %w[milestones labels] }
it 'returns hash without excluded attributes and relations' do
expect(subject).not_to include('milestones', 'labels')
end
end
end
context 'when invalid path is passed' do
it 'raises an exception' do
expect { legacy_reader.consume_attributes("invalid-path") }
.to raise_error(ArgumentError)
end
end
end
describe '#consume_relation' do
context 'when valid path is passed' do
let(:key) { 'description' }
context 'block not given' do
it 'returns value of the key' do
expect(legacy_reader).to receive(:relations).and_return({ key => 'test value' })
expect(legacy_reader.consume_relation("project", key)).to eq('test value')
end
end
context 'key has been consumed' do
before do
legacy_reader.consume_relation("project", key)
end
it 'does not yield' do
expect do |blk|
legacy_reader.consume_relation("project", key, &blk)
end.not_to yield_control
end
end
context 'value is nil' do
before do
expect(legacy_reader).to receive(:relations).and_return({ key => nil })
end
it 'does not yield' do
expect do |blk|
legacy_reader.consume_relation("project", key, &blk)
end.not_to yield_control
end
end
context 'value is not array' do
before do
expect(legacy_reader).to receive(:relations).and_return({ key => 'value' })
end
it 'yield the value with index 0' do
expect do |blk|
legacy_reader.consume_relation("project", key, &blk)
end.to yield_with_args('value', 0)
end
end
context 'value is an array' do
before do
expect(legacy_reader).to receive(:relations).and_return({ key => %w[item1 item2 item3] })
end
it 'yield each array element with index' do
expect do |blk|
legacy_reader.consume_relation("project", key, &blk)
end.to yield_successive_args(['item1', 0], ['item2', 1], ['item3', 2])
end
end
end
context 'when invalid path is passed' do
it 'raises an exception' do
expect { legacy_reader.consume_relation("invalid") }
.to raise_error(ArgumentError)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::JSON::LegacyReader::User do
let(:relation_names) { [] }
let(:legacy_reader) { described_class.new(tree_hash, relation_names) }
describe '#valid?' do
subject { legacy_reader.valid? }
context 'tree_hash not present' do
let(:tree_hash) { nil }
it { is_expected.to be false }
end
context 'tree_hash presents' do
let(:tree_hash) { { "issues": [] } }
it { is_expected.to be true }
end
end
end
describe Gitlab::ImportExport::JSON::LegacyReader::File do
let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' }
let(:project_tree) { JSON.parse(File.read(fixture)) }
let(:relation_names) { [] }
let(:legacy_reader) { described_class.new(path, relation_names) }
describe '#valid?' do
subject { legacy_reader.valid? }
context 'given valid path' do
let(:path) { fixture }
it { is_expected.to be true }
end
context 'given invalid path' do
let(:path) { 'spec/non-existing-folder/do-not-create-this-file.json' }
it { is_expected.to be false }
end
end
describe '#root_attributes' do
let(:path) { fixture }
subject { legacy_reader.root_attributes(excluded_attributes) }
context 'No excluded attributes' do
let(:excluded_attributes) { [] }
let(:relation_names) { [] }
it 'returns the whole tree from parsed JSON' do
expect(subject).to eq(project_tree)
end
end
context 'Some attributes are excluded' do
let(:excluded_attributes) { %w[milestones labels issues services snippets] }
let(:relation_names) { %w[import_type archived] }
it 'returns hash without excluded attributes and relations' do
expect(subject).not_to include('milestones', 'labels', 'issues', 'services', 'snippets', 'import_type', 'archived')
end
end
end
describe '#consume_relation' do
let(:path) { fixture }
let(:key) { 'description' }
context 'block not given' do
it 'returns value of the key' do
expect(legacy_reader).to receive(:relations).and_return({ key => 'test value' })
expect(legacy_reader.consume_relation(key)).to eq('test value')
end
end
context 'key has been consumed' do
before do
legacy_reader.consume_relation(key)
end
it 'does not yield' do
expect do |blk|
legacy_reader.consume_relation(key, &blk)
end.not_to yield_control
end
end
context 'value is nil' do
before do
expect(legacy_reader).to receive(:relations).and_return({ key => nil })
end
it 'does not yield' do
expect do |blk|
legacy_reader.consume_relation(key, &blk)
end.not_to yield_control
end
end
context 'value is not array' do
before do
expect(legacy_reader).to receive(:relations).and_return({ key => 'value' })
end
it 'yield the value with index 0' do
expect do |blk|
legacy_reader.consume_relation(key, &blk)
end.to yield_with_args('value', 0)
end
end
context 'value is an array' do
before do
expect(legacy_reader).to receive(:relations).and_return({ key => %w[item1 item2 item3] })
end
it 'yield each array element with index' do
expect do |blk|
legacy_reader.consume_relation(key, &blk)
end.to yield_successive_args(['item1', 0], ['item2', 1], ['item3', 2])
end
end
end
describe '#tree_hash' do
let(:path) { fixture }
subject { legacy_reader.send(:tree_hash) }
it 'parses the JSON into the expected tree' do
expect(subject).to eq(project_tree)
end
context 'invalid JSON' do
let(:path) { 'spec/fixtures/lib/gitlab/import_export/invalid_json/project.json' }
it 'raise Exception' do
expect { subject }.to raise_exception(Gitlab::ImportExport::Error, 'Incorrect JSON format')
end
end
end
end
......@@ -5,20 +5,37 @@ require 'spec_helper'
describe Gitlab::ImportExport::JSON::LegacyWriter do
let(:path) { "#{Dir.tmpdir}/legacy_writer_spec/test.json" }
subject { described_class.new(path) }
subject do
described_class.new(path, allowed_path: "project")
end
after do
FileUtils.rm_rf(path)
end
describe "#write" do
describe "#write_attributes" do
it "writes correct json" do
expected_hash = { "key" => "value_1", "key_1" => "value_2" }
subject.write_attributes("project", expected_hash)
expect(subject_json).to eq(expected_hash)
end
context 'when invalid path is used' do
it 'raises an exception' do
expect { subject.write_attributes("invalid", { "key" => "value" }) }
.to raise_error(ArgumentError)
end
end
end
describe "#write_relation" do
context "when key is already written" do
it "raises exception" do
key = "key"
value = "value"
subject.write(key, value)
subject.write_relation("project", "key", "old value")
expect { subject.write(key, "new value") }.to raise_exception("key '#{key}' already written")
expect { subject.write_relation("project", "key", "new value") }
.to raise_exception("key 'key' already written")
end
end
......@@ -27,53 +44,58 @@ describe Gitlab::ImportExport::JSON::LegacyWriter do
it "writes correct json" do
expected_hash = { "key" => "value_1", "key_1" => "value_2" }
expected_hash.each do |key, value|
subject.write(key, value)
subject.write_relation("project", key, value)
end
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
expect(subject_json).to eq(expected_hash)
end
end
end
describe "#append" do
context "when key is already written" do
it "appends values under a given key" do
key = "key"
values = %w(value_1 value_2)
expected_hash = { key => values }
values.each do |value|
subject.append(key, value)
context 'when invalid path is used' do
it 'raises an exception' do
expect { subject.write_relation("invalid", "key", "value") }
.to raise_error(ArgumentError)
end
subject.close
end
end
describe "#write_relation_array" do
context 'when array is used' do
it 'writes correct json' do
subject.write_relation_array("project", "key", ["value"])
expect(saved_json(path)).to eq(expected_hash)
expect(subject_json).to eq({ "key" => ["value"] })
end
end
context "when key is not already written" do
it "writes correct json" do
expected_hash = { "key" => ["value"] }
subject.append("key", "value")
subject.close
context 'when enumerable is used' do
it 'writes correct json' do
values = %w(value1 value2)
expect(saved_json(path)).to eq(expected_hash)
enumerator = Enumerator.new do |items|
values.each { |value| items << value }
end
subject.write_relation_array("project", "key", enumerator)
expect(subject_json).to eq({ "key" => values })
end
end
describe "#set" do
it "writes correct json" do
expected_hash = { "key" => "value_1", "key_1" => "value_2" }
subject.set(expected_hash)
subject.close
context "when key is already written" do
it "raises an exception" do
subject.write_relation_array("project", "key", %w(old_value))
expect(saved_json(path)).to eq(expected_hash)
expect { subject.write_relation_array("project", "key", %w(new_value)) }
.to raise_error(ArgumentError)
end
end
end
def subject_json
subject.close
def saved_json(filename)
::JSON.parse(IO.read(filename))
::JSON.parse(IO.read(subject.path))
end
end
......@@ -14,11 +14,10 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
let(:user) { create(:user) }
let(:shared) { Gitlab::ImportExport::Shared.new(importable) }
let(:members_mapper) { Gitlab::ImportExport::MembersMapper.new(exported_members: {}, user: user, importable: importable) }
let(:attributes) { {} }
let(:importable_hash) do
json = IO.read(path)
ActiveSupport::JSON.decode(json)
let(:members_mapper) do
Gitlab::ImportExport::MembersMapper.new(exported_members: {}, user: user, importable: importable)
end
let(:relation_tree_restorer) do
......@@ -26,11 +25,13 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
user: user,
shared: shared,
relation_reader: relation_reader,
importable: importable,
object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader
reader: reader,
importable: importable,
importable_path: nil,
importable_attributes: attributes
)
end
......@@ -100,7 +101,14 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
context 'using legacy reader' do
let(:relation_reader) { Gitlab::ImportExport::JSON::LegacyReader::File.new(path, reader.project_relation_names) }
let(:relation_reader) do
Gitlab::ImportExport::JSON::LegacyReader::File.new(
path,
relation_names: reader.project_relation_names
)
end
let(:attributes) { relation_reader.consume_attributes(nil) }
it_behaves_like 'import project successfully'
......@@ -119,7 +127,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
let(:importable) { create(:group, parent: group) }
let(:object_builder) { Gitlab::ImportExport::Group::ObjectBuilder }
let(:relation_factory) { Gitlab::ImportExport::Group::RelationFactory }
let(:relation_reader) { Gitlab::ImportExport::JSON::LegacyReader::File.new(path, reader.group_relation_names) }
let(:relation_reader) { Gitlab::ImportExport::JSON::LegacyReader::File.new(path, relation_names: reader.group_relation_names) }
let(:reader) do
Gitlab::ImportExport::Reader.new(
shared: shared,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment