Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
gitlab-ce
Commits
b9245625
Commit
b9245625
authored
Sep 30, 2021
by
GitLab Bot
Browse files
Options
Browse Files
Download
Plain Diff
Automatic merge of gitlab-org/gitlab master
parents
100709af
e413e8c8
Changes
14
Hide whitespace changes
Inline
Side-by-side
Showing
14 changed files
with
291 additions
and
43 deletions
+291
-43
.gitlab/ci/global.gitlab-ci.yml
.gitlab/ci/global.gitlab-ci.yml
+2
-0
app/assets/javascripts/notebook/cells/markdown.vue
app/assets/javascripts/notebook/cells/markdown.vue
+7
-2
app/models/bulk_imports/tracker.rb
app/models/bulk_imports/tracker.rb
+2
-0
app/workers/bulk_imports/pipeline_worker.rb
app/workers/bulk_imports/pipeline_worker.rb
+27
-5
lib/bulk_imports/clients/graphql.rb
lib/bulk_imports/clients/graphql.rb
+2
-0
lib/bulk_imports/clients/http.rb
lib/bulk_imports/clients/http.rb
+2
-2
lib/bulk_imports/network_error.rb
lib/bulk_imports/network_error.rb
+61
-0
lib/gitlab/cache/import/caching.rb
lib/gitlab/cache/import/caching.rb
+3
-1
qa/qa/support/formatters/test_stats_formatter.rb
qa/qa/support/formatters/test_stats_formatter.rb
+13
-2
qa/spec/support/formatters/test_stats_formatter_spec.rb
qa/spec/support/formatters/test_stats_formatter_spec.rb
+8
-3
spec/lib/bulk_imports/clients/http_spec.rb
spec/lib/bulk_imports/clients/http_spec.rb
+7
-3
spec/lib/bulk_imports/network_error_spec.rb
spec/lib/bulk_imports/network_error_spec.rb
+72
-0
spec/lib/gitlab/cache/import/caching_spec.rb
spec/lib/gitlab/cache/import/caching_spec.rb
+10
-0
spec/workers/bulk_imports/pipeline_worker_spec.rb
spec/workers/bulk_imports/pipeline_worker_spec.rb
+75
-25
No files found.
.gitlab/ci/global.gitlab-ci.yml
View file @
b9245625
...
...
@@ -193,10 +193,12 @@
.storybook-yarn-cache
:
cache
:
-
*node-modules-cache
-
*storybook-node-modules-cache
.storybook-yarn-cache-push
:
cache
:
-
*node-modules-cache
# We don't push this cache as it's already rebuilt by `update-yarn-cache`
-
*storybook-node-modules-cache-push
.use-pg11
:
...
...
app/assets/javascripts/notebook/cells/markdown.vue
View file @
b9245625
<
script
>
import
katex
from
'
katex
'
;
import
marked
from
'
marked
'
;
import
{
GlSafeHtmlDirective
as
SafeHtml
}
from
'
@gitlab/ui
'
;
import
{
sanitize
}
from
'
~/lib/dompurify
'
;
import
{
hasContent
,
markdownConfig
}
from
'
~/lib/utils/text_utility
'
;
import
Prompt
from
'
./prompt.vue
'
;
...
...
@@ -138,6 +139,9 @@ export default {
components
:
{
prompt
:
Prompt
,
},
directives
:
{
SafeHtml
,
},
inject
:
[
'
relativeRawPath
'
],
props
:
{
cell
:
{
...
...
@@ -150,16 +154,17 @@ export default {
renderer
.
attachments
=
this
.
cell
.
attachments
;
renderer
.
relativeRawPath
=
this
.
relativeRawPath
;
return
sanitize
(
marked
(
this
.
cell
.
source
.
join
(
''
).
replace
(
/
\\
/g
,
'
\\\\
'
)),
markdownConfig
);
return
marked
(
this
.
cell
.
source
.
join
(
''
).
replace
(
/
\\
/g
,
'
\\\\
'
)
);
},
},
markdownConfig
,
};
</
script
>
<
template
>
<div
class=
"cell text-cell"
>
<prompt
/>
<div
class=
"markdown"
v-html=
"markdown /* eslint-disable-line vue/no-v-html */
"
></div>
<div
v-safe-html:
[$
options.markdownConfig]=
"markdown"
class=
"markdown
"
></div>
</div>
</
template
>
...
...
app/models/bulk_imports/tracker.rb
View file @
b9245625
...
...
@@ -50,6 +50,8 @@ class BulkImports::Tracker < ApplicationRecord
event
:start
do
transition
created: :started
# To avoid errors when re-starting a pipeline in case of network errors
transition
started: :started
end
event
:finish
do
...
...
app/workers/bulk_imports/pipeline_worker.rb
View file @
b9245625
...
...
@@ -16,7 +16,7 @@ module BulkImports
def
perform
(
pipeline_tracker_id
,
stage
,
entity_id
)
pipeline_tracker
=
::
BulkImports
::
Tracker
.
with_status
(
:created
)
.
with_status
(
:created
,
:started
)
.
find_by_id
(
pipeline_tracker_id
)
if
pipeline_tracker
.
present?
...
...
@@ -59,18 +59,35 @@ module BulkImports
pipeline_tracker
.
pipeline_class
.
new
(
context
).
run
pipeline_tracker
.
finish!
rescue
BulkImports
::
NetworkError
=>
e
if
e
.
retriable?
(
pipeline_tracker
)
logger
.
error
(
worker:
self
.
class
.
name
,
entity_id:
pipeline_tracker
.
entity
.
id
,
pipeline_name:
pipeline_tracker
.
pipeline_name
,
message:
"Retrying error:
#{
e
.
message
}
"
)
reenqueue
(
pipeline_tracker
,
delay:
e
.
retry_delay
)
else
fail_tracker
(
pipeline_tracker
,
e
)
end
rescue
StandardError
=>
e
fail_tracker
(
pipeline_tracker
,
e
)
end
def
fail_tracker
(
pipeline_tracker
,
exception
)
pipeline_tracker
.
update!
(
status_event:
'fail_op'
,
jid:
jid
)
logger
.
error
(
worker:
self
.
class
.
name
,
entity_id:
pipeline_tracker
.
entity
.
id
,
pipeline_name:
pipeline_tracker
.
pipeline_name
,
message:
e
.
message
message:
e
xception
.
message
)
Gitlab
::
ErrorTracking
.
track_exception
(
e
,
e
xception
,
entity_id:
pipeline_tracker
.
entity
.
id
,
pipeline_name:
pipeline_tracker
.
pipeline_name
)
...
...
@@ -88,8 +105,13 @@ module BulkImports
(
Time
.
zone
.
now
-
pipeline_tracker
.
entity
.
created_at
)
>
Pipeline
::
NDJSON_EXPORT_TIMEOUT
end
def
reenqueue
(
pipeline_tracker
)
self
.
class
.
perform_in
(
NDJSON_PIPELINE_PERFORM_DELAY
,
pipeline_tracker
.
id
,
pipeline_tracker
.
stage
,
pipeline_tracker
.
entity
.
id
)
def
reenqueue
(
pipeline_tracker
,
delay:
NDJSON_PIPELINE_PERFORM_DELAY
)
self
.
class
.
perform_in
(
delay
,
pipeline_tracker
.
id
,
pipeline_tracker
.
stage
,
pipeline_tracker
.
entity
.
id
)
end
end
end
lib/bulk_imports/clients/graphql.rb
View file @
b9245625
...
...
@@ -17,6 +17,8 @@ module BulkImports
)
::
Gitlab
::
Json
.
parse
(
response
.
body
)
rescue
*
Gitlab
::
HTTP
::
HTTP_ERRORS
=>
e
raise
::
BulkImports
::
NetworkError
,
e
end
end
private_constant
:HTTP
...
...
lib/bulk_imports/clients/http.rb
View file @
b9245625
...
...
@@ -113,11 +113,11 @@ module BulkImports
def
with_error_handling
response
=
yield
raise
(
::
BulkImports
::
Error
,
"Error
#{
response
.
code
}
"
)
unless
response
.
success?
raise
::
BulkImports
::
NetworkError
.
new
(
response:
response
)
unless
response
.
success?
response
rescue
*
Gitlab
::
HTTP
::
HTTP_ERRORS
=>
e
raise
(
::
BulkImports
::
Error
,
e
)
raise
::
BulkImports
::
NetworkError
,
e
end
def
api_url
...
...
lib/bulk_imports/network_error.rb
0 → 100644
View file @
b9245625
# frozen_string_literal: true
module
BulkImports
class
NetworkError
<
Error
COUNTER_KEY
=
'bulk_imports/%{entity_id}/%{stage}/%{tracker_id}/network_error/%{error}'
RETRIABLE_EXCEPTIONS
=
Gitlab
::
HTTP
::
HTTP_TIMEOUT_ERRORS
RETRIABLE_HTTP_CODES
=
[
429
].
freeze
DEFAULT_RETRY_DELAY_SECONDS
=
60
MAX_RETRIABLE_COUNT
=
3
def
initialize
(
message
=
nil
,
response:
nil
)
raise
ArgumentError
,
'message or response required'
if
message
.
blank?
&&
response
.
blank?
super
(
message
)
@response
=
response
end
def
retriable?
(
tracker
)
if
retriable_exception?
||
retriable_http_code?
increment
(
tracker
)
<=
MAX_RETRIABLE_COUNT
else
false
end
end
def
retry_delay
if
response
&
.
code
==
429
response
.
headers
.
fetch
(
'Retry-After'
,
DEFAULT_RETRY_DELAY_SECONDS
).
to_i
else
DEFAULT_RETRY_DELAY_SECONDS
end
.
seconds
end
private
attr_reader
:response
def
retriable_exception?
RETRIABLE_EXCEPTIONS
.
include?
(
cause
&
.
class
)
end
def
retriable_http_code?
RETRIABLE_HTTP_CODES
.
include?
(
response
&
.
code
)
end
def
increment
(
tracker
)
key
=
COUNTER_KEY
%
{
stage:
tracker
.
stage
,
tracker_id:
tracker
.
id
,
entity_id:
tracker
.
entity
.
id
,
error:
cause
.
class
.
name
}
Gitlab
::
Cache
::
Import
::
Caching
.
increment
(
key
)
end
end
end
lib/gitlab/cache/import/caching.rb
View file @
b9245625
...
...
@@ -84,8 +84,10 @@ module Gitlab
key
=
cache_key_for
(
raw_key
)
Redis
::
Cache
.
with
do
|
redis
|
redis
.
incr
(
key
)
value
=
redis
.
incr
(
key
)
redis
.
expire
(
key
,
timeout
)
value
end
end
...
...
qa/qa/support/formatters/test_stats_formatter.rb
View file @
b9245625
...
...
@@ -57,19 +57,22 @@ module QA
# @param [RSpec::Core::Example] example
# @return [Hash]
def
test_stats
(
example
)
file_path
=
example
.
metadata
[
:file_path
].
gsub
(
'./qa/specs/features'
,
''
)
{
name:
'test-stats'
,
time:
time
,
tags:
{
name:
example
.
full_description
,
file_path:
example
.
metadata
[
:file_path
].
gsub
(
'./qa/specs/features'
,
''
)
,
file_path:
file_path
,
status:
example
.
execution_result
.
status
,
reliable:
example
.
metadata
.
key?
(
:reliable
).
to_s
,
quarantined:
example
.
metadata
.
key?
(
:quarantine
).
to_s
,
retried:
((
example
.
metadata
[
:retry_attempts
]
||
0
)
>
0
).
to_s
,
job_name:
job_name
,
merge_request:
merge_request
,
run_type:
env
(
'QA_RUN_TYPE'
)
||
run_type
run_type:
env
(
'QA_RUN_TYPE'
)
||
run_type
,
stage:
devops_stage
(
file_path
)
},
fields:
{
id:
example
.
id
,
...
...
@@ -150,6 +153,14 @@ module QA
ENV
[
name
]
end
# Get spec devops stage
#
# @param [String] location
# @return [String, nil]
def
devops_stage
(
file_path
)
file_path
.
match
(
%r{(
\d
{1,2}_
\w
+)/}
)
&
.
captures
&
.
first
end
end
end
end
...
...
qa/spec/support/formatters/test_stats_formatter_spec.rb
View file @
b9245625
...
...
@@ -18,6 +18,8 @@ describe QA::Support::Formatters::TestStatsFormatter do
let
(
:quarantined
)
{
'false'
}
let
(
:influx_client
)
{
instance_double
(
'InfluxDB2::Client'
,
create_write_api:
influx_write_api
)
}
let
(
:influx_write_api
)
{
instance_double
(
'InfluxDB2::WriteApi'
,
write:
nil
)
}
let
(
:stage
)
{
'1_manage'
}
let
(
:file_path
)
{
"./qa/specs/features/
#{
stage
}
/subfolder/some_spec.rb"
}
let
(
:influx_client_args
)
do
{
...
...
@@ -34,14 +36,15 @@ describe QA::Support::Formatters::TestStatsFormatter do
time:
DateTime
.
strptime
(
ci_timestamp
).
to_time
,
tags:
{
name:
'stats export spec'
,
file_path:
'./spec/support/formatters/test_stats_formatter_spec.rb'
,
file_path:
file_path
.
gsub
(
'./qa/specs/features'
,
''
)
,
status: :passed
,
reliable:
reliable
,
quarantined:
quarantined
,
retried:
"false"
,
job_name:
"test-job"
,
merge_request:
"false"
,
run_type:
run_type
run_type:
run_type
,
stage:
stage
},
fields:
{
id:
'./spec/support/formatters/test_stats_formatter_spec.rb[1:1]'
,
...
...
@@ -57,7 +60,9 @@ describe QA::Support::Formatters::TestStatsFormatter do
def
run_spec
(
&
spec
)
spec
||=
->
{
it
(
'spec'
)
{}
}
describe_successfully
(
'stats export'
,
&
spec
)
describe_successfully
(
'stats export'
,
&
spec
).
tap
do
|
example_group
|
example_group
.
examples
.
each
{
|
ex
|
ex
.
metadata
[
:file_path
]
=
file_path
}
end
send_stop_notification
end
...
...
spec/lib/bulk_imports/clients/http_spec.rb
View file @
b9245625
...
...
@@ -32,7 +32,7 @@ RSpec.describe BulkImports::Clients::HTTP do
it
'raises BulkImports::Error'
do
allow
(
Gitlab
::
HTTP
).
to
receive
(
method
).
and_raise
(
Errno
::
ECONNREFUSED
)
expect
{
subject
.
public_send
(
method
,
resource
)
}.
to
raise_exception
(
BulkImports
::
Error
)
expect
{
subject
.
public_send
(
method
,
resource
)
}.
to
raise_exception
(
BulkImports
::
Network
Error
)
end
end
...
...
@@ -42,7 +42,7 @@ RSpec.describe BulkImports::Clients::HTTP do
allow
(
Gitlab
::
HTTP
).
to
receive
(
method
).
and_return
(
response_double
)
expect
{
subject
.
public_send
(
method
,
resource
)
}.
to
raise_exception
(
BulkImports
::
Error
)
expect
{
subject
.
public_send
(
method
,
resource
)
}.
to
raise_exception
(
BulkImports
::
Network
Error
)
end
end
end
...
...
@@ -180,7 +180,11 @@ RSpec.describe BulkImports::Clients::HTTP do
let
(
:version
)
{
'13.0.0'
}
it
'raises an error'
do
expect
{
subject
.
get
(
resource
)
}.
to
raise_error
(
::
BulkImports
::
Error
,
"Unsupported GitLab Version. Minimum Supported Gitlab Version
#{
BulkImport
::
MINIMUM_GITLAB_MAJOR_VERSION
}
."
)
expect
{
subject
.
get
(
resource
)
}
.
to
raise_error
(
::
BulkImports
::
Error
,
"Unsupported GitLab Version. Minimum Supported Gitlab Version
#{
BulkImport
::
MINIMUM_GITLAB_MAJOR_VERSION
}
."
)
end
end
...
...
spec/lib/bulk_imports/network_error_spec.rb
0 → 100644
View file @
b9245625
# frozen_string_literal: true
require
'spec_helper'
RSpec
.
describe
BulkImports
::
NetworkError
,
:clean_gitlab_redis_cache
do
let
(
:tracker
)
{
double
(
id:
1
,
stage:
2
,
entity:
double
(
id:
3
))
}
describe
'.new'
do
it
'requires either a message or a HTTP response'
do
expect
{
described_class
.
new
}
.
to
raise_error
(
ArgumentError
,
'message or response required'
)
end
end
describe
'#retriable?'
do
it
'returns true for MAX_RETRIABLE_COUNT times when cause if one of RETRIABLE_EXCEPTIONS'
do
raise
described_class
::
RETRIABLE_EXCEPTIONS
.
sample
rescue
StandardError
=>
cause
begin
raise
described_class
,
cause
rescue
StandardError
=>
exception
described_class
::
MAX_RETRIABLE_COUNT
.
times
do
expect
(
exception
.
retriable?
(
tracker
)).
to
eq
(
true
)
end
expect
(
exception
.
retriable?
(
tracker
)).
to
eq
(
false
)
end
end
it
'returns true for MAX_RETRIABLE_COUNT times when response is one of RETRIABLE_CODES'
do
exception
=
described_class
.
new
(
response:
double
(
code:
429
))
described_class
::
MAX_RETRIABLE_COUNT
.
times
do
expect
(
exception
.
retriable?
(
tracker
)).
to
eq
(
true
)
end
expect
(
exception
.
retriable?
(
tracker
)).
to
eq
(
false
)
end
it
'returns false for other exceptions'
do
raise
StandardError
rescue
StandardError
=>
cause
begin
raise
described_class
,
cause
rescue
StandardError
=>
exception
expect
(
exception
.
retriable?
(
tracker
)).
to
eq
(
false
)
end
end
end
describe
'#retry_delay'
do
it
'returns the default value when there is not a rate limit error'
do
exception
=
described_class
.
new
(
'foo'
)
expect
(
exception
.
retry_delay
).
to
eq
(
described_class
::
DEFAULT_RETRY_DELAY_SECONDS
.
seconds
)
end
context
'when the exception is a rate limit error'
do
it
'returns the "Retry-After"'
do
exception
=
described_class
.
new
(
response:
double
(
code:
429
,
headers:
{
'Retry-After'
=>
20
}))
expect
(
exception
.
retry_delay
).
to
eq
(
20
.
seconds
)
end
it
'returns the default value when there is no "Retry-After" header'
do
exception
=
described_class
.
new
(
response:
double
(
code:
429
,
headers:
{}))
expect
(
exception
.
retry_delay
).
to
eq
(
described_class
::
DEFAULT_RETRY_DELAY_SECONDS
.
seconds
)
end
end
end
end
spec/lib/gitlab/cache/import/caching_spec.rb
View file @
b9245625
...
...
@@ -58,6 +58,16 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
end
describe
'.increment'
do
it
'increment a key and returns the current value'
do
expect
(
described_class
.
increment
(
'foo'
)).
to
eq
(
1
)
value
=
Gitlab
::
Redis
::
Cache
.
with
{
|
r
|
r
.
get
(
described_class
.
cache_key_for
(
'foo'
))
}
expect
(
value
.
to_i
).
to
eq
(
1
)
end
end
describe
'.set_add'
do
it
'adds a value to a set'
do
described_class
.
set_add
(
'foo'
,
10
)
...
...
spec/workers/bulk_imports/pipeline_worker_spec.rb
View file @
b9245625
...
...
@@ -27,42 +27,59 @@ RSpec.describe BulkImports::PipelineWorker do
.
and_return
([[
0
,
pipeline_class
]])
end
it
'runs the given pipeline successfully'
do
pipeline_tracker
=
create
(
:bulk_import_tracker
,
entity:
entity
,
pipeline_name:
'FakePipeline'
)
expect_next_instance_of
(
Gitlab
::
Import
::
Logger
)
do
|
logger
|
expect
(
logger
)
.
to
receive
(
:info
)
.
with
(
worker:
described_class
.
name
,
pipeline_name:
'FakePipeline'
,
entity_id:
entity
.
id
)
end
shared_examples
'successfully runs the pipeline'
do
it
'runs the given pipeline successfully'
do
expect_next_instance_of
(
Gitlab
::
Import
::
Logger
)
do
|
logger
|
expect
(
logger
)
.
to
receive
(
:info
)
.
with
(
worker:
described_class
.
name
,
pipeline_name:
'FakePipeline'
,
entity_id:
entity
.
id
)
end
expect
(
BulkImports
::
EntityWorker
)
.
to
receive
(
:perform_async
)
.
with
(
entity
.
id
,
pipeline_tracker
.
stage
)
expect
(
BulkImports
::
EntityWorker
)
.
to
receive
(
:perform_async
)
.
with
(
entity
.
id
,
pipeline_tracker
.
stage
)
expect
(
subject
).
to
receive
(
:jid
).
and_return
(
'jid'
)
expect
(
subject
).
to
receive
(
:jid
).
and_return
(
'jid'
)
subject
.
perform
(
pipeline_tracker
.
id
,
pipeline_tracker
.
stage
,
entity
.
id
)
subject
.
perform
(
pipeline_tracker
.
id
,
pipeline_tracker
.
stage
,
entity
.
id
)
pipeline_tracker
.
reload
pipeline_tracker
.
reload
expect
(
pipeline_tracker
.
status_name
).
to
eq
(
:finished
)
expect
(
pipeline_tracker
.
jid
).
to
eq
(
'jid'
)
end
end
expect
(
pipeline_tracker
.
status_name
).
to
eq
(
:finished
)
expect
(
pipeline_tracker
.
jid
).
to
eq
(
'jid'
)
it_behaves_like
'successfully runs the pipeline'
do
let
(
:pipeline_tracker
)
do
create
(
:bulk_import_tracker
,
entity:
entity
,
pipeline_name:
'FakePipeline'
)
end
end
it_behaves_like
'successfully runs the pipeline'
do
let
(
:pipeline_tracker
)
do
create
(
:bulk_import_tracker
,
:started
,
entity:
entity
,
pipeline_name:
'FakePipeline'
)
end
end
context
'when the pipeline cannot be found'
do
it
'logs the error'
do
pipeline_tracker
=
create
(
:bulk_import_tracker
,
:
start
ed
,
:
finish
ed
,
entity:
entity
,
pipeline_name:
'FakePipeline'
)
...
...
@@ -126,6 +143,39 @@ RSpec.describe BulkImports::PipelineWorker do
expect
(
pipeline_tracker
.
status_name
).
to
eq
(
:failed
)
expect
(
pipeline_tracker
.
jid
).
to
eq
(
'jid'
)
end
context
'when it is a network error'
do
it
'reenqueue on retriable network errors'
do
pipeline_tracker
=
create
(
:bulk_import_tracker
,
entity:
entity
,
pipeline_name:
'FakePipeline'
)
exception
=
BulkImports
::
NetworkError
.
new
(
response:
double
(
code:
429
,
headers:
{})
)
expect_next_instance_of
(
pipeline_class
)
do
|
pipeline
|
expect
(
pipeline
)
.
to
receive
(
:run
)
.
and_raise
(
exception
)
end
expect
(
subject
).
to
receive
(
:jid
).
and_return
(
'jid'
)
expect
(
described_class
)
.
to
receive
(
:perform_in
)
.
with
(
60
.
seconds
,
pipeline_tracker
.
id
,
pipeline_tracker
.
stage
,
pipeline_tracker
.
entity
.
id
)
subject
.
perform
(
pipeline_tracker
.
id
,
pipeline_tracker
.
stage
,
entity
.
id
)
end
end
end
context
'when ndjson pipeline'
do
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment