Commit 219eead2 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 7c38405b
......@@ -305,7 +305,6 @@ linters:
- 'app/views/shared/_milestone_expired.html.haml'
- 'app/views/shared/_no_password.html.haml'
- 'app/views/shared/_no_ssh.html.haml'
- 'app/views/shared/_outdated_browser.html.haml'
- 'app/views/shared/_ping_consent.html.haml'
- 'app/views/shared/_project_limit.html.haml'
- 'app/views/shared/boards/components/_board.html.haml'
......
......@@ -25,6 +25,8 @@ import { severityLevel, severityLevelVariant, errorStatus } from './constants';
import query from '../queries/details.query.graphql';
const SENTRY_TIMEOUT = 10000;
export default {
components: {
GlButton,
......@@ -87,6 +89,8 @@ export default {
if (res.data.project?.sentryErrors?.detailedError) {
this.$apollo.queries.error.stopPolling();
this.setStatus(this.error.status);
} else {
this.onNoApolloResult();
}
},
},
......@@ -94,6 +98,8 @@ export default {
data() {
return {
error: null,
errorLoading: true,
errorPollTimeout: 0,
issueCreationInProgress: false,
isAlertVisible: false,
closedIssueId: null,
......@@ -158,8 +164,19 @@ export default {
return this.errorStatus !== errorStatus.RESOLVED ? __('Resolve') : __('Unresolve');
},
},
watch: {
error(val) {
if (val) {
this.errorLoading = false;
}
},
},
mounted() {
this.startPollingStacktrace(this.issueStackTracePath);
this.errorPollTimeout = Date.now() + SENTRY_TIMEOUT;
this.$apollo.queries.error.setOptions({
fetchPolicy: 'cache-and-network',
});
},
methods: {
...mapActions('details', [
......@@ -191,6 +208,13 @@ export default {
}
});
},
onNoApolloResult() {
if (Date.now() > this.errorPollTimeout) {
this.$apollo.queries.error.stopPolling();
this.errorLoading = false;
createFlash(__('Could not connect to Sentry. Refresh the page to try again.'), 'warning');
}
},
formatDate(date) {
return `${this.timeFormatted(date)} (${dateFormat(date, 'UTC:yyyy-mm-dd h:MM:ssTT Z')})`;
},
......@@ -200,7 +224,7 @@ export default {
<template>
<div>
<div v-if="$apollo.queries.error.loading" class="py-3">
<div v-if="errorLoading" class="py-3">
<gl-loading-icon :size="3" />
</div>
<div v-else-if="error" class="error-details">
......
# frozen_string_literal: true
class Import::GitlabProjectsController < Import::BaseController
include WorkhorseRequest
before_action :whitelist_query_limiting, only: [:create]
before_action :verify_gitlab_project_import_enabled
skip_before_action :verify_authenticity_token, only: [:authorize]
before_action :verify_workhorse_api!, only: [:authorize]
def new
@namespace = Namespace.find(project_params[:namespace_id])
return render_404 unless current_user.can?(:create_projects, @namespace)
......@@ -28,10 +33,29 @@ class Import::GitlabProjectsController < Import::BaseController
end
end
def authorize
set_workhorse_internal_api_content_type
authorized = ImportExportUploader.workhorse_authorize(
has_length: false,
maximum_size: Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i)
render json: authorized
rescue SocketError
render json: _("Error uploading file"), status: :internal_server_error
end
private
def file_is_valid?
return false unless project_params[:file] && project_params[:file].respond_to?(:read)
# TODO: remove the condition and the private method after the WH version including
# https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/470
# is released and GITLAB_WORKHORSE_VERSION is updated accordingly.
if with_workhorse_upload_acceleration?
return false unless project_params[:file].is_a?(::UploadedFile)
else
return false unless project_params[:file] && project_params[:file].respond_to?(:read)
end
filename = project_params[:file].original_filename
......@@ -51,4 +75,8 @@ class Import::GitlabProjectsController < Import::BaseController
def whitelist_query_limiting
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42437')
end
def with_workhorse_upload_acceleration?
request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER].present?
end
end
......@@ -227,7 +227,7 @@ module ApplicationHelper
end
def outdated_browser?
browser.ie? && browser.version.to_i < 10
browser.ie?
end
def path_to_key(key, admin = false)
......
......@@ -111,8 +111,8 @@ module BulkInsertSafe
end
def _bulk_insert_reject_primary_key!(attributes, primary_key)
if attributes.delete(primary_key)
raise PrimaryKeySetError, "Primary key set: #{primary_key}:#{attributes[primary_key]}\n" \
if existing_pk = attributes.delete(primary_key)
raise PrimaryKeySetError, "Primary key set: #{primary_key}:#{existing_pk}\n" \
"Bulk-inserts are only supported for rows that don't already have PK set"
end
end
......
......@@ -93,11 +93,14 @@ module BulkInsertableAssociations
end
def _bulk_insert_configure_foreign_key(reflection, items)
primary_key = self[reflection.active_record_primary_key]
raise "Classes including `BulkInsertableAssociations` must define a `primary_key`" unless primary_key
primary_key_column = reflection.active_record_primary_key
raise "Classes including `BulkInsertableAssociations` must define a `primary_key`" unless primary_key_column
primary_key_value = self[primary_key_column]
raise "No value found for primary key `#{primary_key_column}`" unless primary_key_value
items.each do |item|
item[reflection.foreign_key] = primary_key
item[reflection.foreign_key] = primary_key_value
if reflection.type
item[reflection.type] = self.class.polymorphic_name
......
......@@ -14,23 +14,23 @@ module NotificationRecipients
end
def self.build_recipients(*args)
Builder::Default.new(*args).notification_recipients
::NotificationRecipients::Builder::Default.new(*args).notification_recipients
end
def self.build_new_note_recipients(*args)
Builder::NewNote.new(*args).notification_recipients
::NotificationRecipients::Builder::NewNote.new(*args).notification_recipients
end
def self.build_merge_request_unmergeable_recipients(*args)
Builder::MergeRequestUnmergeable.new(*args).notification_recipients
::NotificationRecipients::Builder::MergeRequestUnmergeable.new(*args).notification_recipients
end
def self.build_project_maintainers_recipients(*args)
Builder::ProjectMaintainers.new(*args).notification_recipients
::NotificationRecipients::Builder::ProjectMaintainers.new(*args).notification_recipients
end
def self.build_new_release_recipients(*args)
Builder::NewRelease.new(*args).notification_recipients
::NotificationRecipients::Builder::NewRelease.new(*args).notification_recipients
end
end
end
......
......@@ -2,9 +2,9 @@
- if defined?(nav) && nav
= render "layouts/nav/sidebar/#{nav}"
.content-wrapper{ class: "#{@content_wrapper_class}" }
= render 'shared/outdated_browser'
.mobile-overlay
.alert-wrapper
= render 'shared/outdated_browser'
= render_if_exists "layouts/header/ee_license_banner"
= render "layouts/broadcast"
= render "layouts/header/read_only_banner"
......
......@@ -5,9 +5,9 @@
= render 'peek/bar'
= header_message
= render partial: "layouts/header/default", locals: { project: @project, group: @group }
= render 'shared/outdated_browser'
.mobile-overlay
.alert-wrapper
= render 'shared/outdated_browser'
= render "layouts/broadcast"
= yield :flash_message
= render "layouts/flash"
......
- if outdated_browser?
.flash-container
.flash-alert.text-center
GitLab may not work properly because you are using an outdated web browser.
.gl-alert.gl-alert-danger.outdated-browser{ :role => "alert" }
= sprite_icon('error', size: 16, css_class: "gl-alert-icon gl-alert-icon-no-title gl-icon")
.gl-alert-body
- if browser.ie? && browser.version.to_i == 11
- feedback_link_url = 'https://gitlab.com/gitlab-org/gitlab/issues/197987'
- feedback_link_start = '<a href="%{url}" class="gl-link" target="_blank" rel="noopener noreferrer">'.html_safe % { url: feedback_link_url }
= s_('OutdatedBrowser|From May 2020 GitLab no longer supports Internet Explorer 11.')
%br
= s_('OutdatedBrowser|You can provide feedback %{feedback_link_start}on this issue%{feedback_link_end} or via your usual support channels.').html_safe % { feedback_link_start: feedback_link_start, feedback_link_end: '</a>'.html_safe }
- else
= s_('OutdatedBrowser|GitLab may not work properly, because you are using an outdated web browser.')
%br
Please install a
= link_to 'supported web browser', help_page_path('install/requirements', anchor: 'supported-web-browsers')
for a better experience.
- browser_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: help_page_path('install/requirements', anchor: 'supported-web-browsers') }
= s_('OutdatedBrowser|Please install a %{browser_link_start}supported web browser%{browser_link_end} for a better experience.').html_safe % { browser_link_start: browser_link_start, browser_link_end: '</a>'.html_safe }
---
title: Fix infinite spinner on error detail page
merge_request: 26188
author:
type: fixed
---
title: Remove unused file_type column from packages_package_files
merge_request: 26527
author:
type: changed
---
title: Use Workhorse acceleration for Project Import file upload via UI
merge_request: 26278
author:
type: performance
---
title: Fix package file finder for conan packages with a conan_package_reference filter
merge_request: 26240
author:
type: fixed
---
title: Default to generating blob links for missing paths
merge_request: 26817
author:
type: fixed
......@@ -1226,6 +1226,8 @@ test:
gitaly:
client_path: tmp/tests/gitaly
token: secret
workhorse:
secret_file: tmp/tests/gitlab_workhorse_secret
backup:
path: tmp/tests/backups
pseudonymizer:
......
......@@ -60,6 +60,7 @@ namespace :import do
resource :gitlab_project, only: [:create, :new] do
post :create
post :authorize
end
resource :manifest, only: [:create, :new], controller: :manifest do
......
# frozen_string_literal: true
class RemoveFileTypeFromPackagesPackageFiles < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
remove_column :packages_package_files, :file_type, :integer
end
end
......@@ -3029,7 +3029,6 @@ ActiveRecord::Schema.define(version: 2020_03_06_170531) do
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.bigint "size"
t.integer "file_type"
t.integer "file_store"
t.binary "file_md5"
t.binary "file_sha1"
......
......@@ -236,18 +236,21 @@ For reference, GitLab.com's [auto-scaling shared runner](../user/gitlab_com/inde
## Supported web browsers
CAUTION: **Caution:** With GitLab 13.0 (May 2020) we are removing official support for Internet Explorer 11.
With the release of GitLab 13.4 (September 2020) we will remove all code that supports Internet Explorer 11.
You can provide feedback [on this issue](https://gitlab.com/gitlab-org/gitlab/issues/197987) or via your usual support channels.
GitLab supports the following web browsers:
- Firefox
- Chrome/Chromium
- Safari
- Microsoft Edge
- Internet Explorer 11
- Internet Explorer 11 (until May 2020)
For the listed web browsers, GitLab supports:
- The current and previous major versions of browsers except Internet Explorer.
- Only version 11 of Internet Explorer.
- The current minor version of a supported major version.
NOTE: **Note:** We do not support running GitLab with JavaScript disabled in the browser and have no plans of supporting that
......
......@@ -74,6 +74,8 @@ You can view the exact JSON payload in the administration panel. To view the pay
You can see how [the usage ping data maps to different stages of the product](https://gitlab.com/gitlab-data/analytics/blob/master/transform/snowflake-dbt/data/version_usage_stats_to_stage_mappings.csv).
Usage ping is important to GitLab as we use it to calculate our [Action Monthly Active Users (AMAU)](https://about.gitlab.com/handbook/product/metrics/#action-monthly-active-users-amau) which helps us measure the success of our features.
### Request flow example
The following example shows a basic request/response flow between the self-managed GitLab instance, GitLab Version Application,
......
......@@ -131,7 +131,7 @@ module Banzai
path = cleaned_file_path(uri)
nested_path = relative_file_path(uri)
file_exists?(nested_path) ? nested_path : path
path_exists?(nested_path) ? nested_path : path
end
def cleaned_file_path(uri)
......@@ -190,12 +190,12 @@ module Banzai
parts.push(path).join('/')
end
def file_exists?(path)
path.present? && uri_type(path).present?
def path_exists?(path)
path.present? && @uri_types[path] != :unknown
end
def uri_type(path)
@uri_types[path] == :unknown ? "" : @uri_types[path]
@uri_types[path] == :unknown ? :blob : @uri_types[path]
end
def current_commit
......
......@@ -5609,6 +5609,9 @@ msgstr ""
msgid "Could not connect to FogBugz, check your URL"
msgstr ""
msgid "Could not connect to Sentry. Refresh the page to try again."
msgstr ""
msgid "Could not connect to Web IDE file mirror service."
msgstr ""
......@@ -13697,6 +13700,18 @@ msgstr ""
msgid "Outbound requests"
msgstr ""
msgid "OutdatedBrowser|From May 2020 GitLab no longer supports Internet Explorer 11."
msgstr ""
msgid "OutdatedBrowser|GitLab may not work properly, because you are using an outdated web browser."
msgstr ""
msgid "OutdatedBrowser|Please install a %{browser_link_start}supported web browser%{browser_link_end} for a better experience."
msgstr ""
msgid "OutdatedBrowser|You can provide feedback %{feedback_link_start}on this issue%{feedback_link_end} or via your usual support channels."
msgstr ""
msgid "Overview"
msgstr ""
......
......@@ -39,4 +39,62 @@ describe Import::GitlabProjectsController do
it_behaves_like 'project import rate limiter'
end
describe 'POST authorize' do
let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
before do
request.headers['GitLab-Workhorse'] = '1.0'
request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER] = workhorse_token
end
it 'authorizes importing project with workhorse header' do
post :authorize, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
it 'rejects requests that bypassed gitlab-workhorse or have invalid header' do
request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER] = 'INVALID_HEADER'
expect { post :authorize, format: :json }.to raise_error(JWT::DecodeError)
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: true)
end
it 'responds with status 200, location of file remote store and object details' do
post :authorize, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response).not_to have_key('TempPath')
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
expect(json_response['RemoteObject']).to have_key('MultipartUpload')
end
end
context 'when direct upload is disabled' do
before do
stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: false)
end
it 'handles as a local file' do
post :authorize, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
end
end
end
end
......@@ -156,7 +156,7 @@ describe "User creates issue" do
expect(page.find_field("issue_description").value).not_to match /\n\n$/
end
it "cancels a file upload correctly" do
it "cancels a file upload correctly", :capybara_ignore_server_errors do
slow_requests do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
......
......@@ -68,7 +68,7 @@ describe 'Project > Tags', :js do
end
end
it 'shows "Attaching a file" message on uploading 1 file', :js do
it 'shows "Attaching a file" message on uploading 1 file', :js, :capybara_ignore_server_errors do
slow_requests do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
......
......@@ -22,7 +22,7 @@ describe 'User uploads file to note' do
end
end
context 'uploading is in progress' do
context 'uploading is in progress', :capybara_ignore_server_errors do
it 'cancels uploading on clicking to "Cancel" button', :js do
slow_requests do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
......
......@@ -59,7 +59,9 @@ describe('diffs/components/commit_item', () => {
expect(titleElement.text()).toBe(commit.title_html);
});
it('renders commit description', () => {
// https://gitlab.com/gitlab-org/gitlab/-/issues/209776
// eslint-disable-next-line jest/no-disabled-tests
it.skip('renders commit description', () => {
const descElement = getDescElement();
const descExpandElement = getDescExpandElement();
......
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import { __ } from '~/locale';
import createFlash from '~/flash';
import {
GlButton,
GlLoadingIcon,
......@@ -18,6 +19,8 @@ import {
errorStatus,
} from '~/error_tracking/components/constants';
jest.mock('~/flash');
const localVue = createLocalVue();
localVue.use(Vuex);
......@@ -49,18 +52,6 @@ describe('ErrorDetails', () => {
csrfToken: 'fakeToken',
},
});
wrapper.setData({
error: {
id: 'gid://gitlab/Gitlab::ErrorTracking::DetailedError/129381',
sentryId: 129381,
title: 'Issue title',
externalUrl: 'http://sentry.gitlab.net/gitlab',
firstSeen: '2017-05-26T13:32:48Z',
lastSeen: '2018-05-26T13:32:48Z',
count: 12,
userCount: 2,
},
});
}
beforeEach(() => {
......@@ -78,6 +69,7 @@ describe('ErrorDetails', () => {
const state = {
stacktraceData: {},
loadingStacktrace: true,
errorStatus: '',
};
store = new Vuex.Store({
......@@ -99,6 +91,7 @@ describe('ErrorDetails', () => {
error: {
loading: true,
stopPolling: jest.fn(),
setOptions: jest.fn(),
},
},
},
......@@ -123,10 +116,61 @@ describe('ErrorDetails', () => {
});
});
describe('sentry response timeout', () => {
const initTime = 300000;
const endTime = initTime + 10000;
beforeEach(() => {
mocks.$apollo.queries.error.loading = false;
jest.spyOn(Date, 'now').mockReturnValue(initTime);
mountComponent();
});
it('when before timeout, still shows loading', () => {
Date.now.mockReturnValue(endTime - 1);
wrapper.vm.onNoApolloResult();
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
expect(createFlash).not.toHaveBeenCalled();
expect(mocks.$apollo.queries.error.stopPolling).not.toHaveBeenCalled();
});
});
it('when timeout is hit and no apollo result, stops loading and shows flash', () => {
Date.now.mockReturnValue(endTime + 1);
wrapper.vm.onNoApolloResult();
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.find(GlLink).exists()).toBe(false);
expect(createFlash).toHaveBeenCalledWith(
'Could not connect to Sentry. Refresh the page to try again.',
'warning',
);
expect(mocks.$apollo.queries.error.stopPolling).toHaveBeenCalled();
});
});
});
describe('Error details', () => {
beforeEach(() => {
mocks.$apollo.queries.error.loading = false;
mountComponent();
wrapper.setData({
error: {
id: 'gid://gitlab/Gitlab::ErrorTracking::DetailedError/129381',
sentryId: 129381,
title: 'Issue title',
externalUrl: 'http://sentry.gitlab.net/gitlab',
firstSeen: '2017-05-26T13:32:48Z',
lastSeen: '2018-05-26T13:32:48Z',
count: 12,
userCount: 2,
},
});
});
it('should show Sentry error details without stacktrace', () => {
......@@ -232,10 +276,6 @@ describe('ErrorDetails', () => {
});
describe('When a user clicks the create issue button', () => {
beforeEach(() => {
mountComponent();
});
it('should send sentry_issue_identifier', () => {
const sentryErrorIdInput = findInput(
'issue[sentry_issue_attributes][sentry_issue_identifier]',
......@@ -275,7 +315,8 @@ describe('ErrorDetails', () => {
describe('when error is unresolved', () => {
beforeEach(() => {
store.state.details.errorStatus = errorStatus.UNRESOLVED;
mountComponent();
return wrapper.vm.$nextTick();
});
it('displays Ignore and Resolve buttons', () => {
......@@ -301,7 +342,8 @@ describe('ErrorDetails', () => {
describe('when error is ignored', () => {
beforeEach(() => {
store.state.details.errorStatus = errorStatus.IGNORED;
mountComponent();
return wrapper.vm.$nextTick();
});
it('displays Undo Ignore and Resolve buttons', () => {
......@@ -327,7 +369,8 @@ describe('ErrorDetails', () => {
describe('when error is resolved', () => {
beforeEach(() => {
store.state.details.errorStatus = errorStatus.RESOLVED;
mountComponent();
return wrapper.vm.$nextTick();
});
it('displays Ignore and Unresolve buttons', () => {
......
......@@ -114,7 +114,7 @@ describe MarkupHelper do
let(:requested_path) { nil }
it 'returns the link to the image path as a relative path' do
expanded_path = "/#{project.full_path}/master/./#{image_file}"
expanded_path = "/#{project.full_path}/-/blob/master/./#{image_file}"
expect(subject.css('a')[0].attr('href')).to eq(expanded_path)
end
......
......@@ -145,7 +145,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'ignores ref if commit is passed' do
doc = filter(link('non/existent.file'), commit: project.commit('empty-branch') )
expect(doc.at_css('a')['href'])
.to eq "/#{project_path}/#{ref}/non/existent.file" # non-existent files have no leading blob/raw/tree
.to eq "/#{project_path}/-/blob/#{ref}/non/existent.file"
end
shared_examples :valid_repository do
......@@ -201,6 +201,12 @@ describe Banzai::Filter::RepositoryLinkFilter do
.to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
end
it 'rebuilds relative URL for a missing file in the repo' do
doc = filter(link('missing-file'))
expect(doc.at_css('a')['href'])
.to eq "/#{project_path}/-/blob/#{ref}/missing-file"
end
it 'rebuilds relative URL for a file in the repo with leading ./' do
doc = filter(link('./doc/api/README.md'))
expect(doc.at_css('a')['href'])
......
......@@ -57,16 +57,12 @@ describe BulkInsertableAssociations do
end
end
before do
ActiveRecord::Base.connection.execute('TRUNCATE bulk_foos RESTART IDENTITY')
end
context 'saving bulk insertable associations' do
let(:parent) { BulkParent.new(name: 'parent') }
context 'when items already have IDs' do
it 'stores nothing and raises an error' do
build_items(parent: parent) { |n, item| item.id = 100 + n }
build_items(parent: parent) { |n, item| item.id = n }
expect { save_with_bulk_inserts(parent) }.to raise_error(BulkInsertSafe::PrimaryKeySetError)
expect(BulkFoo.count).to eq(0)
......@@ -79,7 +75,7 @@ describe BulkInsertableAssociations do
expect(BulkFoo).to receive(:bulk_insert!).once.and_call_original
expect { save_with_bulk_inserts(parent) }.to change { BulkFoo.count }.from(0).to(items.size)
expect(parent.bulk_foos.pluck(:id)).to contain_exactly(*(1..10))
expect(parent.bulk_foos.pluck(:id)).to all(be_a Integer)
end
end
......
......@@ -23,6 +23,18 @@ JS_CONSOLE_FILTER = Regexp.union([
CAPYBARA_WINDOW_SIZE = [1366, 768].freeze
# Run Workhorse on the given host and port, proxying to Puma on a UNIX socket,
# for a closer-to-production experience
Capybara.register_server :puma_via_workhorse do |app, port, host, **options|
file = Tempfile.new
socket_path = file.path
file.close! # We just want the filename
TestEnv.with_workhorse(TestEnv.workhorse_dir, host, port, socket_path) do
Capybara.servers[:puma].call(app, nil, socket_path, **options)
end
end
Capybara.register_driver :chrome do |app|
capabilities = Selenium::WebDriver::Remote::Capabilities.chrome(
# This enables access to logs with `page.driver.manage.get_log(:browser)`
......@@ -60,7 +72,7 @@ Capybara.register_driver :chrome do |app|
)
end
Capybara.server = :puma
Capybara.server = :puma_via_workhorse
Capybara.javascript_driver = :chrome
Capybara.default_max_wait_time = timeout
Capybara.ignore_hidden_elements = true
......@@ -101,6 +113,18 @@ RSpec.configure do |config|
end
end
# The :capybara_ignore_server_errors metadata means unhandled exceptions raised
# by the application under test will not necessarily fail the server. This is
# useful when testing conditions that are expected to raise a 500 error in
# production; it should not be used on the happy path.
config.around(:each, :capybara_ignore_server_errors) do |example|
Capybara.raise_server_errors = false
example.run
ensure
Capybara.raise_server_errors = true
end
config.after(:example, :js) do |example|
# when a test fails, display any messages in the browser's console
# but fail don't add the message if the failure is a pending test that got
......
......@@ -104,6 +104,9 @@ module TestEnv
setup_gitaly
# Feature specs are run through Workhorse
setup_workhorse
# Create repository for FactoryBot.create(:project)
setup_factory_repo
......@@ -218,6 +221,52 @@ module TestEnv
ENV.fetch('GITALY_REPO_URL', nil)
end
def setup_workhorse
install_workhorse_args = [workhorse_dir, workhorse_url].compact.join(',')
component_timed_setup(
'GitLab Workhorse',
install_dir: workhorse_dir,
version: Gitlab::Workhorse.version,
task: "gitlab:workhorse:install[#{install_workhorse_args}]"
)
end
def workhorse_dir
@workhorse_path ||= File.join('tmp', 'tests', 'gitlab-workhorse')
end
def with_workhorse(workhorse_dir, host, port, upstream, &blk)
host = "[#{host}]" if host.include?(':')
listen_addr = [host, port].join(':')
workhorse_pid = spawn(
File.join(workhorse_dir, 'gitlab-workhorse'),
'-authSocket', upstream,
'-documentRoot', Rails.root.join('public').to_s,
'-listenAddr', listen_addr,
'-secretPath', Gitlab::Workhorse.secret_path.to_s,
# TODO: Needed for workhorse + redis features.
# https://gitlab.com/gitlab-org/gitlab/-/issues/209245
#
# '-config', '',
'-logFile', 'log/workhorse-test.log',
'-logFormat', 'structured',
'-developmentMode' # to serve assets and rich error messages
)
begin
yield
ensure
Process.kill('TERM', workhorse_pid)
Process.wait(workhorse_pid)
end
end
def workhorse_url
ENV.fetch('GITLAB_WORKHORSE_URL', nil)
end
def setup_factory_repo
setup_repo(factory_repo_path, factory_repo_path_bare, factory_repo_name,
BRANCH_SHA)
......@@ -347,6 +396,8 @@ module TestEnv
gitlab-test_bare
gitlab-test-fork
gitlab-test-fork_bare
gitlab-workhorse
gitlab_workhorse_secret
]
end
......
......@@ -20,7 +20,7 @@ RSpec.shared_examples 'wiki file attachments' do
end
end
context 'uploading is in progress' do
context 'uploading is in progress', :capybara_ignore_server_errors do
it 'cancels uploading on clicking to "Cancel" button' do
slow_requests do
attach_with_dropzone
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment