Commit 96db744c authored by Illya Klymov's avatar Illya Klymov

Implement separate status for bulk imports

- return separate status for each top level group

Changelog: added
parent e4d749f0
......@@ -314,9 +314,8 @@ export default {
variables: { importRequests },
});
} catch (error) {
const message = error?.networkError?.response?.data?.error ?? i18n.ERROR_IMPORT;
createFlash({
message,
message: i18n.ERROR_IMPORT,
captureError: true,
error,
});
......
......@@ -32,8 +32,10 @@ export default {
fullPath() {
return this.group.importTarget.targetNamespace.fullPath || s__('BulkImport|No parent');
},
invalidNameValidationMessage() {
return getInvalidNameValidationMessage(this.group.importTarget);
validationMessage() {
return (
this.group.progress?.message || getInvalidNameValidationMessage(this.group.importTarget)
);
},
},
};
......@@ -93,10 +95,11 @@ export default {
@input="$emit('update-new-name', $event)"
/>
<p
v-if="group.flags.isAvailableForImport && group.flags.isInvalid"
v-if="group.flags.isAvailableForImport && (group.flags.isInvalid || validationMessage)"
class="gl-text-red-500 gl-m-0 gl-mt-2"
role="alert"
>
{{ invalidNameValidationMessage }}
{{ validationMessage }}
</p>
</div>
</div>
......
......@@ -142,9 +142,7 @@ export function createResolvers({ endpoints }) {
};
});
const {
data: { id: jobId },
} = await axios.post(endpoints.createBulkImport, {
const { data: responses } = await axios.post(endpoints.createBulkImport, {
bulk_import: importOperations.map((op) => ({
source_type: 'group_entity',
source_full_path: op.group.fullPath,
......@@ -153,15 +151,17 @@ export function createResolvers({ endpoints }) {
})),
});
return importOperations.map((op) => {
return importOperations.map((op, idx) => {
const response = responses[idx];
const lastImportTarget = {
targetNamespace: op.targetNamespace,
newName: op.newName,
};
const progress = {
id: jobId,
status: STATUSES.CREATED,
id: response.id || `local-${Date.now()}-${idx}`,
status: response.success ? STATUSES.CREATED : STATUSES.FAILED,
message: response.message || null,
};
localStorageCache.set(op.group.webUrl, { progress, lastImportTarget });
......
......@@ -9,6 +9,7 @@ mutation importGroups($importRequests: [ImportGroupInput!]!) {
progress {
id
status
message
}
}
}
......@@ -22,7 +22,14 @@ export class LocalStorageCache {
loadCacheFromStorage() {
try {
return JSON.parse(this.storage.getItem(KEY)) ?? {};
const storage = JSON.parse(this.storage.getItem(KEY)) ?? {};
Object.values(storage).forEach((entry) => {
if (entry.progress && !('message' in entry.progress)) {
// eslint-disable-next-line no-param-reassign
entry.progress.message = '';
}
});
return storage;
} catch {
return {};
}
......
......@@ -16,6 +16,7 @@ type ClientBulkImportSourceGroupConnection {
type ClientBulkImportProgress {
id: ID!
status: String!
message: String
}
type ClientBulkImportValidationError {
......
......@@ -40,13 +40,9 @@ class Import::BulkImportsController < ApplicationController
end
def create
response = ::BulkImports::CreateService.new(current_user, create_params, credentials).execute
responses = create_params.map { |entry| ::BulkImports::CreateService.new(current_user, [entry], credentials).execute }
if response.success?
render json: response.payload.to_json(only: [:id])
else
render json: { error: response.message }, status: response.http_status
end
render json: responses.map { |response| { success: response.success?, id: response.payload[:id], message: response.message } }
end
def realtime_changes
......
......@@ -215,9 +215,13 @@ RSpec.describe Import::BulkImportsController do
let(:pat) { "fake-pat" }
let(:bulk_import_params) do
[{ "source_type" => "group_entity",
"source_full_path" => "full_path",
"destination_name" => "destination_name",
"destination_namespace" => "root" }]
"source_full_path" => "full_path",
"destination_name" => "destination_name",
"destination_namespace" => "root" },
{ "source_type" => "group_entity2",
"source_full_path" => "full_path2",
"destination_name" => "destination_name2",
"destination_namespace" => "root" }]
end
before do
......@@ -225,29 +229,23 @@ RSpec.describe Import::BulkImportsController do
session[:bulk_import_gitlab_url] = instance_url
end
it 'executes BulkImpors::CreatetService' do
it 'executes BulkImpors::CreateService' do
error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity)
expect_next_instance_of(
::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service|
::BulkImports::CreateService, user, [bulk_import_params[0]], { url: instance_url, access_token: pat }) do |service|
allow(service).to receive(:execute).and_return(ServiceResponse.success(payload: bulk_import))
end
post :create, params: { bulk_import: bulk_import_params }
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq({ id: bulk_import.id }.to_json)
end
it 'returns error when validation fails' do
error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity)
expect_next_instance_of(
::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service|
::BulkImports::CreateService, user, [bulk_import_params[1]], { url: instance_url, access_token: pat }) do |service|
allow(service).to receive(:execute).and_return(error_response)
end
post :create, params: { bulk_import: bulk_import_params }
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(response.body).to eq({ error: 'Record invalid' }.to_json)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq([{ "success" => true, "id" => bulk_import.id, "message" => nil },
{ "success" => false, "id" => nil, "message" => "Record invalid" }])
end
end
end
......
......@@ -123,13 +123,22 @@ describe('import target cell', () => {
});
describe('when entity is available for import', () => {
const FAKE_PROGRESS_MESSAGE = 'progress message';
beforeEach(() => {
group = generateFakeTableEntry({ id: 1, flags: { isAvailableForImport: true } });
group = generateFakeTableEntry({
id: 1,
flags: { isAvailableForImport: true },
progress: { message: FAKE_PROGRESS_MESSAGE },
});
createComponent({ group });
});
it('renders namespace dropdown as enabled', () => {
expect(findNamespaceDropdown().attributes('disabled')).toBe(undefined);
});
it('renders progress message as error if it exists', () => {
expect(wrapper.find('[role=alert]').text()).toBe(FAKE_PROGRESS_MESSAGE);
});
});
});
......@@ -163,12 +163,14 @@ describe('Bulk import resolvers', () => {
});
describe('mutations', () => {
beforeEach(() => {
axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 });
});
beforeEach(() => {});
describe('importGroup', () => {
it('sets import status to CREATED when request completes', async () => {
it('sets import status to CREATED for successful groups when request completes', async () => {
axiosMockAdapter
.onPost(FAKE_ENDPOINTS.createBulkImport)
.reply(httpStatus.OK, [{ success: true, id: 1 }]);
await client.mutate({
mutation: importGroupsMutation,
variables: {
......@@ -185,9 +187,37 @@ describe('Bulk import resolvers', () => {
await axios.waitForAll();
expect(results[0].progress.status).toBe(STATUSES.CREATED);
});
it('sets import status to FAILED and sets progress message for failed groups when request completes', async () => {
const FAKE_ERROR_MESSAGE = 'foo';
axiosMockAdapter
.onPost(FAKE_ENDPOINTS.createBulkImport)
.reply(httpStatus.OK, [{ success: false, id: 1, message: FAKE_ERROR_MESSAGE }]);
await client.mutate({
mutation: importGroupsMutation,
variables: {
importRequests: [
{
sourceGroupId: statusEndpointFixture.importable_data[0].id,
newName: 'test',
targetNamespace: 'root',
},
],
},
});
await axios.waitForAll();
expect(results[0].progress.status).toBe(STATUSES.FAILED);
expect(results[0].progress.message).toBe(FAKE_ERROR_MESSAGE);
});
});
it('updateImportStatus updates status', async () => {
axiosMockAdapter
.onPost(FAKE_ENDPOINTS.createBulkImport)
.reply(httpStatus.OK, [{ success: true, id: 1 }]);
const NEW_STATUS = 'dummy';
await client.mutate({
mutation: importGroupsMutation,
......@@ -216,6 +246,7 @@ describe('Bulk import resolvers', () => {
expect(statusInResponse).toStrictEqual({
__typename: clientTypenames.BulkImportProgress,
id,
message: null,
status: NEW_STATUS,
});
});
......
import { STATUSES } from '~/import_entities/constants';
import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory';
export const generateFakeEntry = ({ id, status, ...rest }) => ({
export const generateFakeEntry = ({ id, status, message, ...rest }) => ({
__typename: clientTypenames.BulkImportSourceGroup,
webUrl: `https://fake.host/${id}`,
fullPath: `fake_group_${id}`,
......@@ -18,6 +18,7 @@ export const generateFakeEntry = ({ id, status, ...rest }) => ({
: {
id,
status,
message: message || '',
},
...rest,
});
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment