Commit 83a79a28 authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'ce-to-ee-2018-07-06' into 'master'

CE upstream - 2018-07-06 12:21 UTC

See merge request gitlab-org/gitlab-ee!6407
parents c502c355 e60f1769
...@@ -102,12 +102,12 @@ const Api = { ...@@ -102,12 +102,12 @@ const Api = {
}, },
// Return Merge Request for project // Return Merge Request for project
mergeRequest(projectPath, mergeRequestId) { mergeRequest(projectPath, mergeRequestId, params = {}) {
const url = Api.buildUrl(Api.mergeRequestPath) const url = Api.buildUrl(Api.mergeRequestPath)
.replace(':id', encodeURIComponent(projectPath)) .replace(':id', encodeURIComponent(projectPath))
.replace(':mrid', mergeRequestId); .replace(':mrid', mergeRequestId);
return axios.get(url); return axios.get(url, { params });
}, },
mergeRequests(params = {}) { mergeRequests(params = {}) {
......
<script>
import { mapGetters } from 'vuex';
import Icon from '../../../vue_shared/components/icon.vue';
import TitleComponent from '../../../issue_show/components/title.vue';
import DescriptionComponent from '../../../issue_show/components/description.vue';
export default {
components: {
Icon,
TitleComponent,
DescriptionComponent,
},
computed: {
...mapGetters(['currentMergeRequest']),
},
};
</script>
<template>
<div class="ide-merge-request-info h-100 d-flex flex-column">
<div class="detail-page-header">
<icon
name="git-merge"
class="align-self-center append-right-8"
/>
<strong>
!{{ currentMergeRequest.iid }}
</strong>
</div>
<div class="issuable-details">
<title-component
:issuable-ref="currentMergeRequest.iid"
:title-html="currentMergeRequest.title_html"
:title-text="currentMergeRequest.title"
/>
<description-component
:description-html="currentMergeRequest.description_html"
:description-text="currentMergeRequest.description"
:can-update="false"
/>
</div>
</div>
</template>
...@@ -5,6 +5,7 @@ import Icon from '../../../vue_shared/components/icon.vue'; ...@@ -5,6 +5,7 @@ import Icon from '../../../vue_shared/components/icon.vue';
import { rightSidebarViews } from '../../constants'; import { rightSidebarViews } from '../../constants';
import PipelinesList from '../pipelines/list.vue'; import PipelinesList from '../pipelines/list.vue';
import JobsDetail from '../jobs/detail.vue'; import JobsDetail from '../jobs/detail.vue';
import MergeRequestInfo from '../merge_requests/info.vue';
import ResizablePanel from '../resizable_panel.vue'; import ResizablePanel from '../resizable_panel.vue';
export default { export default {
...@@ -16,9 +17,10 @@ export default { ...@@ -16,9 +17,10 @@ export default {
PipelinesList, PipelinesList,
JobsDetail, JobsDetail,
ResizablePanel, ResizablePanel,
MergeRequestInfo,
}, },
computed: { computed: {
...mapState(['rightPane']), ...mapState(['rightPane', 'currentMergeRequestId']),
pipelinesActive() { pipelinesActive() {
return ( return (
this.rightPane === rightSidebarViews.pipelines || this.rightPane === rightSidebarViews.pipelines ||
...@@ -54,10 +56,33 @@ export default { ...@@ -54,10 +56,33 @@ export default {
</resizable-panel> </resizable-panel>
<nav class="ide-activity-bar"> <nav class="ide-activity-bar">
<ul class="list-unstyled"> <ul class="list-unstyled">
<li
v-if="currentMergeRequestId"
>
<button
v-tooltip
:title="__('Merge Request')"
:aria-label="__('Merge Request')"
:class="{
active: rightPane === $options.rightSidebarViews.mergeRequestInfo
}"
data-container="body"
data-placement="left"
class="ide-sidebar-link is-right"
type="button"
@click="clickTab($event, $options.rightSidebarViews.mergeRequestInfo)"
>
<icon
:size="16"
name="text-description"
/>
</button>
</li>
<li> <li>
<button <button
v-tooltip v-tooltip
:title="__('Pipelines')" :title="__('Pipelines')"
:aria-label="__('Pipelines')"
:class="{ :class="{
active: pipelinesActive active: pipelinesActive
}" }"
......
...@@ -31,6 +31,7 @@ export const diffModes = { ...@@ -31,6 +31,7 @@ export const diffModes = {
export const rightSidebarViews = { export const rightSidebarViews = {
pipelines: 'pipelines-list', pipelines: 'pipelines-list',
jobsDetail: 'jobs-detail', jobsDetail: 'jobs-detail',
mergeRequestInfo: 'merge-request-info',
}; };
export const stageKeys = { export const stageKeys = {
......
...@@ -40,8 +40,8 @@ export default { ...@@ -40,8 +40,8 @@ export default {
getProjectData(namespace, project) { getProjectData(namespace, project) {
return Api.project(`${namespace}/${project}`); return Api.project(`${namespace}/${project}`);
}, },
getProjectMergeRequestData(projectId, mergeRequestId) { getProjectMergeRequestData(projectId, mergeRequestId, params = {}) {
return Api.mergeRequest(projectId, mergeRequestId); return Api.mergeRequest(projectId, mergeRequestId, params);
}, },
getProjectMergeRequestChanges(projectId, mergeRequestId) { getProjectMergeRequestChanges(projectId, mergeRequestId) {
return Api.mergeRequestChanges(projectId, mergeRequestId); return Api.mergeRequestChanges(projectId, mergeRequestId);
......
...@@ -9,7 +9,7 @@ export const getMergeRequestData = ( ...@@ -9,7 +9,7 @@ export const getMergeRequestData = (
new Promise((resolve, reject) => { new Promise((resolve, reject) => {
if (!state.projects[projectId].mergeRequests[mergeRequestId] || force) { if (!state.projects[projectId].mergeRequests[mergeRequestId] || force) {
service service
.getProjectMergeRequestData(projectId, mergeRequestId) .getProjectMergeRequestData(projectId, mergeRequestId, { render_html: true })
.then(({ data }) => { .then(({ data }) => {
commit(types.SET_MERGE_REQUEST, { commit(types.SET_MERGE_REQUEST, {
projectPath: projectId, projectPath: projectId,
......
<script> <script>
import animateMixin from '../mixins/animate'; import animateMixin from '../mixins/animate';
import eventHub from '../event_hub'; import eventHub from '../event_hub';
import tooltip from '../../vue_shared/directives/tooltip'; import tooltip from '../../vue_shared/directives/tooltip';
import { spriteIcon } from '../../lib/utils/common_utils'; import { spriteIcon } from '../../lib/utils/common_utils';
export default { export default {
directives: { directives: {
tooltip, tooltip,
},
mixins: [animateMixin],
props: {
issuableRef: {
type: [String, Number],
required: true,
}, },
mixins: [animateMixin], canUpdate: {
props: { required: false,
issuableRef: { type: Boolean,
type: String, default: false,
required: true,
},
canUpdate: {
required: false,
type: Boolean,
default: false,
},
titleHtml: {
type: String,
required: true,
},
titleText: {
type: String,
required: true,
},
showInlineEditButton: {
type: Boolean,
required: false,
default: false,
},
}, },
data() { titleHtml: {
return { type: String,
preAnimation: false, required: true,
pulseAnimation: false,
titleEl: document.querySelector('title'),
};
}, },
computed: { titleText: {
pencilIcon() { type: String,
return spriteIcon('pencil', 'link-highlight'); required: true,
},
}, },
watch: { showInlineEditButton: {
titleHtml() { type: Boolean,
this.setPageTitle(); required: false,
this.animateChange(); default: false,
},
}, },
methods: { },
setPageTitle() { data() {
const currentPageTitleScope = this.titleEl.innerText.split('·'); return {
currentPageTitleScope[0] = `${this.titleText} (${this.issuableRef}) `; preAnimation: false,
this.titleEl.textContent = currentPageTitleScope.join('·'); pulseAnimation: false,
}, titleEl: document.querySelector('title'),
edit() { };
eventHub.$emit('open.form'); },
}, computed: {
pencilIcon() {
return spriteIcon('pencil', 'link-highlight');
}, },
}; },
watch: {
titleHtml() {
this.setPageTitle();
this.animateChange();
},
},
methods: {
setPageTitle() {
const currentPageTitleScope = this.titleEl.innerText.split('·');
currentPageTitleScope[0] = `${this.titleText} (${this.issuableRef}) `;
this.titleEl.textContent = currentPageTitleScope.join('·');
},
edit() {
eventHub.$emit('open.form');
},
},
};
</script> </script>
<template> <template>
......
...@@ -1329,3 +1329,14 @@ ...@@ -1329,3 +1329,14 @@
line-height: 16px; line-height: 16px;
color: $gl-text-color-secondary; color: $gl-text-color-secondary;
} }
.ide-merge-request-info {
.detail-page-header {
line-height: initial;
min-height: 38px;
}
.issuable-details {
overflow: auto;
}
}
...@@ -390,6 +390,10 @@ module Ci ...@@ -390,6 +390,10 @@ module Ci
trace.exist? trace.exist?
end end
def has_old_trace?
old_trace.present?
end
def trace=(data) def trace=(data)
raise NotImplementedError raise NotImplementedError
end end
...@@ -399,6 +403,8 @@ module Ci ...@@ -399,6 +403,8 @@ module Ci
end end
def erase_old_trace! def erase_old_trace!
return unless has_old_trace?
update_column(:trace, nil) update_column(:trace, nil)
end end
......
module Ci module Ci
class BuildTraceChunk < ActiveRecord::Base class BuildTraceChunk < ActiveRecord::Base
include FastDestroyAll include FastDestroyAll
include ::Gitlab::ExclusiveLeaseHelpers
extend Gitlab::Ci::Model extend Gitlab::Ci::Model
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
default_value_for :data_store, :redis default_value_for :data_store, :redis
WriteError = Class.new(StandardError)
CHUNK_SIZE = 128.kilobytes CHUNK_SIZE = 128.kilobytes
CHUNK_REDIS_TTL = 1.week
WRITE_LOCK_RETRY = 10 WRITE_LOCK_RETRY = 10
WRITE_LOCK_SLEEP = 0.01.seconds WRITE_LOCK_SLEEP = 0.01.seconds
WRITE_LOCK_TTL = 1.minute WRITE_LOCK_TTL = 1.minute
# Note: The ordering of this enum is related to the precedence of persist store.
# The bottom item takes the higest precedence, and the top item takes the lowest precedence.
enum data_store: { enum data_store: {
redis: 1, redis: 1,
db: 2 database: 2,
fog: 3
} }
class << self class << self
def redis_data_key(build_id, chunk_index) def all_stores
"gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}" @all_stores ||= self.data_stores.keys
end end
def redis_data_keys def persistable_store
redis.pluck(:build_id, :chunk_index).map do |data| # get first available store from the back of the list
redis_data_key(data.first, data.second) all_stores.reverse.find { |store| get_store_class(store).available? }
end
end end
def redis_delete_data(keys) def get_store_class(store)
return if keys.empty? @stores ||= {}
@stores[store] ||= "Ci::BuildTraceChunks::#{store.capitalize}".constantize.new
Gitlab::Redis::SharedState.with do |redis|
redis.del(keys)
end
end end
## ##
# FastDestroyAll concerns # FastDestroyAll concerns
def begin_fast_destroy def begin_fast_destroy
redis_data_keys all_stores.each_with_object({}) do |store, result|
relation = public_send(store) # rubocop:disable GitlabSecurity/PublicSend
keys = get_store_class(store).keys(relation)
result[store] = keys if keys.present?
end
end end
## ##
# FastDestroyAll concerns # FastDestroyAll concerns
def finalize_fast_destroy(keys) def finalize_fast_destroy(keys)
redis_delete_data(keys) keys.each do |store, value|
get_store_class(store).delete_keys(value)
end
end end
end end
...@@ -66,10 +70,15 @@ module Ci ...@@ -66,10 +70,15 @@ module Ci
end end
def append(new_data, offset) def append(new_data, offset)
raise ArgumentError, 'New data is missing' unless new_data
raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
set_data(data.byteslice(0, offset) + new_data) in_lock(*lock_params) do # Write opetation is atomic
unsafe_set_data!(data.byteslice(0, offset) + new_data)
end
schedule_to_persist if full?
end end
def size def size
...@@ -88,93 +97,63 @@ module Ci ...@@ -88,93 +97,63 @@ module Ci
(start_offset...end_offset) (start_offset...end_offset)
end end
def use_database! def persist_data!
in_lock do in_lock(*lock_params) do # Write opetation is atomic
break if db? unsafe_persist_to!(self.class.persistable_store)
break unless size > 0
self.update!(raw_data: data, data_store: :db)
self.class.redis_delete_data([redis_data_key])
end end
end end
private private
def get_data def unsafe_persist_to!(new_store)
if redis? return if data_store == new_store.to_s
redis_data raise ArgumentError, 'Can not persist empty data' unless size > 0
elsif db?
raw_data
else
raise 'Unsupported data store'
end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def set_data(value)
raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
in_lock do
if redis?
redis_set_data(value)
elsif db?
self.raw_data = value
else
raise 'Unsupported data store'
end
@data = value old_store_class = self.class.get_store_class(data_store)
save! if changed? get_data.tap do |the_data|
self.raw_data = nil
self.data_store = new_store
unsafe_set_data!(the_data)
end end
schedule_to_db if full? old_store_class.delete_data(self)
end
def schedule_to_db
return if db?
Ci::BuildTraceChunkFlushWorker.perform_async(id)
end end
def full? def get_data
size == CHUNK_SIZE self.class.get_store_class(data_store).data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
rescue Excon::Error::NotFound
# If the data store is :fog and the file does not exist in the object storage, this method returns nil.
end end
def redis_data def unsafe_set_data!(value)
Gitlab::Redis::SharedState.with do |redis| raise ArgumentError, 'New data size exceeds chunk size' if value.bytesize > CHUNK_SIZE
redis.get(redis_data_key)
end
end
def redis_set_data(data) self.class.get_store_class(data_store).set_data(self, value)
Gitlab::Redis::SharedState.with do |redis| @data = value
redis.set(redis_data_key, data, ex: CHUNK_REDIS_TTL)
end
end
def redis_data_key save! if changed?
self.class.redis_data_key(build_id, chunk_index)
end end
def in_lock def schedule_to_persist
write_lock_key = "trace_write:#{build_id}:chunks:#{chunk_index}" return if data_persisted?
lease = Gitlab::ExclusiveLease.new(write_lock_key, timeout: WRITE_LOCK_TTL) Ci::BuildTraceChunkFlushWorker.perform_async(id)
retry_count = 0 end
until uuid = lease.try_obtain def data_persisted?
# Keep trying until we obtain the lease. To prevent hammering Redis too !redis?
# much we'll wait for a bit between retries. end
sleep(WRITE_LOCK_SLEEP)
break if WRITE_LOCK_RETRY < (retry_count += 1)
end
raise WriteError, 'Failed to obtain write lock' unless uuid def full?
size == CHUNK_SIZE
end
self.reload if self.persisted? def lock_params
return yield ["trace_write:#{build_id}:chunks:#{chunk_index}",
ensure { ttl: WRITE_LOCK_TTL,
Gitlab::ExclusiveLease.cancel(write_lock_key, uuid) retries: WRITE_LOCK_RETRY,
sleep_sec: WRITE_LOCK_SLEEP }]
end end
end end
end end
module Ci
module BuildTraceChunks
class Database
def available?
true
end
def keys(relation)
[]
end
def delete_keys(keys)
# no-op
end
def data(model)
model.raw_data
end
def set_data(model, data)
model.raw_data = data
end
def delete_data(model)
model.update_columns(raw_data: nil) unless model.raw_data.nil?
end
end
end
end
module Ci
module BuildTraceChunks
class Fog
def available?
object_store.enabled
end
def data(model)
connection.get_object(bucket_name, key(model))[:body]
end
def set_data(model, data)
connection.put_object(bucket_name, key(model), data)
end
def delete_data(model)
delete_keys([[model.build_id, model.chunk_index]])
end
def keys(relation)
return [] unless available?
relation.pluck(:build_id, :chunk_index)
end
def delete_keys(keys)
keys.each do |key|
connection.delete_object(bucket_name, key_raw(*key))
end
end
private
def key(model)
key_raw(model.build_id, model.chunk_index)
end
def key_raw(build_id, chunk_index)
"tmp/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log"
end
def bucket_name
return unless available?
object_store.remote_directory
end
def connection
return unless available?
@connection ||= ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
end
def object_store
Gitlab.config.artifacts.object_store
end
end
end
end
module Ci
module BuildTraceChunks
class Redis
CHUNK_REDIS_TTL = 1.week
def available?
true
end
def data(model)
Gitlab::Redis::SharedState.with do |redis|
redis.get(key(model))
end
end
def set_data(model, data)
Gitlab::Redis::SharedState.with do |redis|
redis.set(key(model), data, ex: CHUNK_REDIS_TTL)
end
end
def delete_data(model)
delete_keys([[model.build_id, model.chunk_index]])
end
def keys(relation)
relation.pluck(:build_id, :chunk_index)
end
def delete_keys(keys)
return if keys.empty?
keys = keys.map { |key| key_raw(*key) }
Gitlab::Redis::SharedState.with do |redis|
redis.del(keys)
end
end
private
def key(model)
key_raw(model.build_id, model.chunk_index)
end
def key_raw(build_id, chunk_index)
"gitlab:ci:trace:#{build_id.to_i}:chunks:#{chunk_index.to_i}"
end
end
end
end
...@@ -135,9 +135,10 @@ class Milestone < ActiveRecord::Base ...@@ -135,9 +135,10 @@ class Milestone < ActiveRecord::Base
rel.order(:project_id, :due_date).select('DISTINCT ON (project_id) id') rel.order(:project_id, :due_date).select('DISTINCT ON (project_id) id')
else else
rel rel
.group(:project_id) .group(:project_id, :due_date, :id)
.having('due_date = MIN(due_date)') .having('due_date = MIN(due_date)')
.pluck(:id, :project_id, :due_date) .pluck(:id, :project_id, :due_date)
.uniq(&:second)
.map(&:first) .map(&:first)
end end
end end
......
...@@ -139,6 +139,8 @@ class NotificationService ...@@ -139,6 +139,8 @@ class NotificationService
# #
# In EE, approvers of the merge request are also included # In EE, approvers of the merge request are also included
# #
# In EE, approvers of the merge request are also included
#
def new_merge_request(merge_request, current_user) def new_merge_request(merge_request, current_user)
new_resource_email(merge_request, :new_merge_request_email) new_resource_email(merge_request, :new_merge_request_email)
end end
......
...@@ -7,7 +7,7 @@ module Ci ...@@ -7,7 +7,7 @@ module Ci
def perform(build_trace_chunk_id) def perform(build_trace_chunk_id)
::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk| ::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk|
build_trace_chunk.use_database! build_trace_chunk.persist_data!
end end
end end
end end
......
---
title: Load Devise with Omniauth when auto_sign_in_with_provider is configured
merge_request: 20302
author:
type: fixed
---
title: "[Rails5] Fix milestone GROUP BY query"
merge_request: 20256
author: "@blackst0ne"
type: fixed
---
title: Use object storage as the first class persistable store for new live trace
architecture
merge_request: 19515
author:
type: changed
---
title: Display merge request title & description in Web IDE
merge_request:
author:
type: added
---
title: Fix cross-project label references.
merge_request:
author:
type: fixed
---
title: Remove redundant query when removing trace
merge_request: 20324
author:
type: performance
...@@ -219,7 +219,7 @@ Devise.setup do |config| ...@@ -219,7 +219,7 @@ Devise.setup do |config|
end end
end end
if Gitlab.config.omniauth.enabled if Gitlab::OmniauthInitializer.enabled?
Gitlab::OmniauthInitializer.new(config).execute(Gitlab.config.omniauth.providers) Gitlab::OmniauthInitializer.new(config).execute(Gitlab.config.omniauth.providers)
end end
end end
...@@ -17,7 +17,7 @@ OmniAuth.config.before_request_phase do |env| ...@@ -17,7 +17,7 @@ OmniAuth.config.before_request_phase do |env|
Gitlab::RequestForgeryProtection.call(env) Gitlab::RequestForgeryProtection.call(env)
end end
if Gitlab.config.omniauth.enabled if Gitlab::OmniauthInitializer.enabled?
provider_names = Gitlab.config.omniauth.providers.map(&:name) provider_names = Gitlab.config.omniauth.providers.map(&:name)
Gitlab::Auth.omniauth_setup_providers(provider_names) Gitlab::Auth.omniauth_setup_providers(provider_names)
end end
class EnqueueFixCrossProjectLabelLinks < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 100
MIGRATION = 'FixCrossProjectLabelLinks'
DELAY_INTERVAL = 5.minutes
disable_ddl_transaction!
class Label < ActiveRecord::Base
self.table_name = 'labels'
end
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
include ::EachBatch
default_scope { where(type: 'Group', id: Label.where(type: 'GroupLabel').select('distinct group_id')) }
end
def up
queue_background_migration_jobs_by_range_at_intervals(Namespace, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
def down
# noop
end
end
...@@ -77,10 +77,10 @@ cloud-native, for example on Kubernetes. ...@@ -77,10 +77,10 @@ cloud-native, for example on Kubernetes.
The data flow is the same as described in the [data flow section](#data-flow) The data flow is the same as described in the [data flow section](#data-flow)
with one change: _the stored path of the first two phases is different_. This new live with one change: _the stored path of the first two phases is different_. This new live
trace architecture stores chunks of traces in Redis and the database instead of trace architecture stores chunks of traces in Redis and a persistent store (object storage or database) instead of
file storage. Redis is used as first-class storage, and it stores up-to 128KB file storage. Redis is used as first-class storage, and it stores up-to 128KB
of data. Once the full chunk is sent, it is flushed to database. After a while, of data. Once the full chunk is sent, it is flushed a persistent store, either object storage(temporary directory) or database.
the data in Redis and database will be archived to [object storage](#uploading-traces-to-object-storage). After a while, the data in Redis and a persitent store will be archived to [object storage](#uploading-traces-to-object-storage).
The data are stored in the following Redis namespace: `Gitlab::Redis::SharedState`. The data are stored in the following Redis namespace: `Gitlab::Redis::SharedState`.
...@@ -89,11 +89,11 @@ Here is the detailed data flow: ...@@ -89,11 +89,11 @@ Here is the detailed data flow:
1. GitLab Runner picks a job from GitLab 1. GitLab Runner picks a job from GitLab
1. GitLab Runner sends a piece of trace to GitLab 1. GitLab Runner sends a piece of trace to GitLab
1. GitLab appends the data to Redis 1. GitLab appends the data to Redis
1. Once the data in Redis reach 128KB, the data is flushed to the database. 1. Once the data in Redis reach 128KB, the data is flushed to a persistent store (object storage or the database).
1. The above steps are repeated until the job is finished. 1. The above steps are repeated until the job is finished.
1. Once the job is finished, GitLab schedules a Sidekiq worker to archive the trace. 1. Once the job is finished, GitLab schedules a Sidekiq worker to archive the trace.
1. The Sidekiq worker archives the trace to object storage and cleans up the trace 1. The Sidekiq worker archives the trace to object storage and cleans up the trace
in Redis and the database. in Redis and a persistent store (object storage or the database).
### Enabling live trace ### Enabling live trace
......
...@@ -359,6 +359,7 @@ Parameters: ...@@ -359,6 +359,7 @@ Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user - `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
- `merge_request_iid` (required) - The internal ID of the merge request - `merge_request_iid` (required) - The internal ID of the merge request
- `render_html` (optional) - If `true` response includes rendered HTML for title and description
```json ```json
{ {
......
...@@ -534,6 +534,12 @@ module API ...@@ -534,6 +534,12 @@ module API
end end
class MergeRequestBasic < ProjectEntity class MergeRequestBasic < ProjectEntity
expose :title_html, if: -> (_, options) { options[:render_html] } do |entity|
MarkupHelper.markdown_field(entity, :title)
end
expose :description_html, if: -> (_, options) { options[:render_html] } do |entity|
MarkupHelper.markdown_field(entity, :description)
end
expose :target_branch, :source_branch expose :target_branch, :source_branch
expose :upvotes do |merge_request, options| expose :upvotes do |merge_request, options|
if options[:issuable_metadata] if options[:issuable_metadata]
......
...@@ -234,6 +234,7 @@ module API ...@@ -234,6 +234,7 @@ module API
params do params do
requires :merge_request_iid, type: Integer, desc: 'The IID of a merge request' requires :merge_request_iid, type: Integer, desc: 'The IID of a merge request'
optional :render_html, type: Boolean, desc: 'Returns the description and title rendered HTML'
end end
desc 'Get a single merge request' do desc 'Get a single merge request' do
success Entities::MergeRequest success Entities::MergeRequest
...@@ -241,7 +242,7 @@ module API ...@@ -241,7 +242,7 @@ module API
get ':id/merge_requests/:merge_request_iid' do get ':id/merge_requests/:merge_request_iid' do
merge_request = find_merge_request_with_access(params[:merge_request_iid]) merge_request = find_merge_request_with_access(params[:merge_request_iid])
present merge_request, with: Entities::MergeRequest, current_user: current_user, project: user_project present merge_request, with: Entities::MergeRequest, current_user: current_user, project: user_project, render_html: params[:render_html]
end end
desc 'Get the participants of a merge request' do desc 'Get the participants of a merge request' do
......
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class FixCrossProjectLabelLinks
GROUP_NESTED_LEVEL = 10.freeze
class Project < ActiveRecord::Base
self.table_name = 'projects'
end
class Label < ActiveRecord::Base
self.table_name = 'labels'
end
class LabelLink < ActiveRecord::Base
self.table_name = 'label_links'
end
class Issue < ActiveRecord::Base
self.table_name = 'issues'
end
class MergeRequest < ActiveRecord::Base
self.table_name = 'merge_requests'
end
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
def self.groups_with_descendants_ids(start_id, stop_id)
# To isolate migration code, we avoid usage of
# Gitlab::GroupHierarchy#base_and_descendants which already
# does this job better
ids = Namespace.where(type: 'Group', id: Label.where(type: 'GroupLabel').select('distinct group_id')).where(id: start_id..stop_id).pluck(:id)
group_ids = ids
GROUP_NESTED_LEVEL.times do
ids = Namespace.where(type: 'Group', parent_id: ids).pluck(:id)
break if ids.empty?
group_ids += ids
end
group_ids.uniq
end
end
def perform(start_id, stop_id)
group_ids = Namespace.groups_with_descendants_ids(start_id, stop_id)
project_ids = Project.where(namespace_id: group_ids).select(:id)
fix_issues(project_ids)
fix_merge_requests(project_ids)
end
private
# select IDs of issues which reference a label which is:
# a) a project label of a different project, or
# b) a group label of a different group than issue's project group
def fix_issues(project_ids)
issue_ids = Label
.joins('INNER JOIN label_links ON label_links.label_id = labels.id AND label_links.target_type = \'Issue\'
INNER JOIN issues ON issues.id = label_links.target_id
INNER JOIN projects ON projects.id = issues.project_id')
.where('issues.project_id in (?)', project_ids)
.where('(labels.project_id is not null and labels.project_id != issues.project_id) '\
'or (labels.group_id is not null and labels.group_id != projects.namespace_id)')
.select('distinct issues.id')
Issue.where(id: issue_ids).find_each { |issue| check_resource_labels(issue, issue.project_id) }
end
# select IDs of MRs which reference a label which is:
# a) a project label of a different project, or
# b) a group label of a different group than MR's project group
def fix_merge_requests(project_ids)
mr_ids = Label
.joins('INNER JOIN label_links ON label_links.label_id = labels.id AND label_links.target_type = \'MergeRequest\'
INNER JOIN merge_requests ON merge_requests.id = label_links.target_id
INNER JOIN projects ON projects.id = merge_requests.target_project_id')
.where('merge_requests.target_project_id in (?)', project_ids)
.where('(labels.project_id is not null and labels.project_id != merge_requests.target_project_id) '\
'or (labels.group_id is not null and labels.group_id != projects.namespace_id)')
.select('distinct merge_requests.id')
MergeRequest.where(id: mr_ids).find_each { |merge_request| check_resource_labels(merge_request, merge_request.target_project_id) }
end
def check_resource_labels(resource, project_id)
local_labels = available_labels(project_id)
# get all label links for the given resource (issue/MR)
# which reference a label not included in avaiable_labels
# (other than its project labels and labels of ancestor groups)
cross_labels = LabelLink
.select('label_id, labels.title as title, labels.color as color, label_links.id as label_link_id')
.joins('INNER JOIN labels ON labels.id = label_links.label_id')
.where(target_type: resource.class.name.demodulize, target_id: resource.id)
.where('labels.id not in (?)', local_labels.select(:id))
cross_labels.each do |label|
matching_label = local_labels.find {|l| l.title == label.title && l.color == label.color}
next unless matching_label
Rails.logger.info "#{resource.class.name.demodulize} #{resource.id}: replacing #{label.label_id} with #{matching_label.id}"
LabelLink.update(label.label_link_id, label_id: matching_label.id)
end
end
# get all labels available for the project (including
# group labels of ancestor groups)
def available_labels(project_id)
@labels ||= {}
@labels[project_id] ||= Label
.where("(type = 'GroupLabel' and group_id in (?)) or (type = 'ProjectLabel' and id = ?)",
project_group_ids(project_id),
project_id)
end
def project_group_ids(project_id)
ids = [Project.find(project_id).namespace_id]
GROUP_NESTED_LEVEL.times do
group = Namespace.find(ids.last)
break unless group.parent_id
ids << group.parent_id
end
ids
end
end
end
end
...@@ -101,14 +101,17 @@ module Gitlab ...@@ -101,14 +101,17 @@ module Gitlab
end end
def erase! def erase!
trace_artifact&.destroy ##
# Erase the archived trace
paths.each do |trace_path| trace_artifact&.destroy!
FileUtils.rm(trace_path, force: true)
end ##
# Erase the live trace
job.trace_chunks.fast_destroy_all job.trace_chunks.fast_destroy_all # Destroy chunks of a live trace
job.erase_old_trace! FileUtils.rm_f(current_path) if current_path # Remove a trace file of a live trace
job.erase_old_trace! if job.has_old_trace? # Remove a trace in database of a live trace
ensure
@current_path = nil
end end
def archive! def archive!
......
module Gitlab
# This module provides helper methods which are intregrated with GitLab::ExclusiveLease
module ExclusiveLeaseHelpers
FailedToObtainLockError = Class.new(StandardError)
##
# This helper method blocks a process/thread until the other process cancel the obrainted lease key.
#
# Note: It's basically discouraged to use this method in the unicorn's thread,
# because it holds the connection until all `retries` is consumed.
# This could potentially eat up all connection pools.
def in_lock(key, ttl: 1.minute, retries: 10, sleep_sec: 0.01.seconds)
lease = Gitlab::ExclusiveLease.new(key, timeout: ttl)
until uuid = lease.try_obtain
# Keep trying until we obtain the lease. To prevent hammering Redis too
# much we'll wait for a bit.
sleep(sleep_sec)
break if (retries -= 1) < 0
end
raise FailedToObtainLockError, 'Failed to obtain a lock' unless uuid
return yield
ensure
Gitlab::ExclusiveLease.cancel(key, uuid)
end
end
end
...@@ -63,12 +63,8 @@ module Gitlab ...@@ -63,12 +63,8 @@ module Gitlab
# This saves us an RPC round trip. # This saves us an RPC round trip.
return nil if commit_id.include?(':') return nil if commit_id.include?(':')
commit = repo.gitaly_migrate(:find_commit) do |is_enabled| commit = repo.wrapped_gitaly_errors do
if is_enabled repo.gitaly_commit_client.find_commit(commit_id)
repo.gitaly_commit_client.find_commit(commit_id)
else
rugged_find(repo, commit_id)
end
end end
decorate(repo, commit) if commit decorate(repo, commit) if commit
...@@ -78,12 +74,6 @@ module Gitlab ...@@ -78,12 +74,6 @@ module Gitlab
nil nil
end end
def rugged_find(repo, commit_id)
obj = repo.rev_parse_target(commit_id)
obj.is_a?(Rugged::Commit) ? obj : nil
end
# Get last commit for HEAD # Get last commit for HEAD
# #
# Ex. # Ex.
......
...@@ -12,14 +12,8 @@ module Gitlab ...@@ -12,14 +12,8 @@ module Gitlab
end end
def conflicts def conflicts
@conflicts ||= begin @conflicts ||= @target_repository.wrapped_gitaly_errors do
@target_repository.gitaly_migrate(:conflicts_list_conflict_files) do |is_enabled| gitaly_conflicts_client(@target_repository).list_conflict_files.to_a
if is_enabled
gitaly_conflicts_client(@target_repository).list_conflict_files.to_a
else
rugged_list_conflict_files
end
end
end end
rescue GRPC::FailedPrecondition => e rescue GRPC::FailedPrecondition => e
raise Gitlab::Git::Conflict::Resolver::ConflictSideMissing.new(e.message) raise Gitlab::Git::Conflict::Resolver::ConflictSideMissing.new(e.message)
...@@ -28,12 +22,8 @@ module Gitlab ...@@ -28,12 +22,8 @@ module Gitlab
end end
def resolve_conflicts(source_repository, resolution, source_branch:, target_branch:) def resolve_conflicts(source_repository, resolution, source_branch:, target_branch:)
source_repository.gitaly_migrate(:conflicts_resolve_conflicts) do |is_enabled| source_repository.wrapped_gitaly_errors do
if is_enabled gitaly_conflicts_client(source_repository).resolve_conflicts(@target_repository, resolution, source_branch, target_branch)
gitaly_conflicts_client(source_repository).resolve_conflicts(@target_repository, resolution, source_branch, target_branch)
else
rugged_resolve_conflicts(source_repository, resolution, source_branch, target_branch)
end
end end
end end
...@@ -61,57 +51,6 @@ module Gitlab ...@@ -61,57 +51,6 @@ module Gitlab
def gitaly_conflicts_client(repository) def gitaly_conflicts_client(repository)
repository.gitaly_conflicts_client(@our_commit_oid, @their_commit_oid) repository.gitaly_conflicts_client(@our_commit_oid, @their_commit_oid)
end end
def write_resolved_file_to_index(repository, index, file, params)
if params[:sections]
resolved_lines = file.resolve_lines(params[:sections])
new_file = resolved_lines.map { |line| line[:full_line] }.join("\n")
new_file << "\n" if file.our_blob.data.end_with?("\n")
elsif params[:content]
new_file = file.resolve_content(params[:content])
end
our_path = file.our_path
oid = repository.rugged.write(new_file, :blob)
index.add(path: our_path, oid: oid, mode: file.our_mode)
index.conflict_remove(our_path)
end
def rugged_list_conflict_files
target_index = @target_repository.rugged.merge_commits(@our_commit_oid, @their_commit_oid)
# We don't need to do `with_repo_branch_commit` here, because the target
# project always fetches source refs when creating merge request diffs.
conflict_files(@target_repository, target_index)
end
def rugged_resolve_conflicts(source_repository, resolution, source_branch, target_branch)
source_repository.with_repo_branch_commit(@target_repository, target_branch) do
index = source_repository.rugged.merge_commits(@our_commit_oid, @their_commit_oid)
conflicts = conflict_files(source_repository, index)
resolution.files.each do |file_params|
conflict_file = conflict_for_path(conflicts, file_params[:old_path], file_params[:new_path])
write_resolved_file_to_index(source_repository, index, conflict_file, file_params)
end
unless index.conflicts.empty?
missing_files = index.conflicts.map { |file| file[:ours][:path] }
raise ResolutionError, "Missing resolutions for the following files: #{missing_files.join(', ')}"
end
commit_params = {
message: resolution.commit_message,
parents: [@our_commit_oid, @their_commit_oid]
}
source_repository.commit_index(resolution.user, source_branch, index, commit_params)
end
end
end end
end end
end end
......
module Gitlab module Gitlab
class OmniauthInitializer class OmniauthInitializer
def self.enabled?
Gitlab.config.omniauth.enabled ||
Gitlab.config.omniauth.auto_sign_in_with_provider.present?
end
def initialize(devise_config) def initialize(devise_config)
@devise_config = devise_config @devise_config = devise_config
end end
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
"webpack-prod": "NODE_ENV=production webpack --config config/webpack.config.js" "webpack-prod": "NODE_ENV=production webpack --config config/webpack.config.js"
}, },
"dependencies": { "dependencies": {
"@gitlab-org/gitlab-svgs": "^1.24.0", "@gitlab-org/gitlab-svgs": "^1.25.0",
"autosize": "^4.0.0", "autosize": "^4.0.0",
"axios": "^0.17.1", "axios": "^0.17.1",
"babel-core": "^6.26.3", "babel-core": "^6.26.3",
......
...@@ -3,5 +3,53 @@ FactoryBot.define do ...@@ -3,5 +3,53 @@ FactoryBot.define do
build factory: :ci_build build factory: :ci_build
chunk_index 0 chunk_index 0
data_store :redis data_store :redis
trait :redis_with_data do
data_store :redis
transient do
initial_data 'test data'
end
after(:create) do |build_trace_chunk, evaluator|
Ci::BuildTraceChunks::Redis.new.set_data(build_trace_chunk, evaluator.initial_data)
end
end
trait :redis_without_data do
data_store :redis
end
trait :database_with_data do
data_store :database
transient do
initial_data 'test data'
end
after(:build) do |build_trace_chunk, evaluator|
Ci::BuildTraceChunks::Database.new.set_data(build_trace_chunk, evaluator.initial_data)
end
end
trait :database_without_data do
data_store :database
end
trait :fog_with_data do
data_store :fog
transient do
initial_data 'test data'
end
after(:create) do |build_trace_chunk, evaluator|
Ci::BuildTraceChunks::Fog.new.set_data(build_trace_chunk, evaluator.initial_data)
end
end
trait :fog_without_data do
data_store :fog
end
end end
end end
import Vue from 'vue';
import '~/behaviors/markdown/render_gfm';
import { createStore } from '~/ide/stores';
import Info from '~/ide/components/merge_requests/info.vue';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
describe('IDE merge request details', () => {
let Component;
let vm;
beforeAll(() => {
Component = Vue.extend(Info);
});
beforeEach(() => {
const store = createStore();
store.state.currentProjectId = 'gitlab-ce';
store.state.currentMergeRequestId = 1;
store.state.projects['gitlab-ce'] = {
mergeRequests: {
1: {
iid: 1,
title: 'Testing',
title_html: '<span class="title-html">Testing</span>',
description: 'Description',
description_html: '<p class="description-html">Description HTML</p>',
},
},
};
vm = createComponentWithStore(Component, store).$mount();
});
afterEach(() => {
vm.$destroy();
});
it('renders merge request IID', () => {
expect(vm.$el.querySelector('.detail-page-header').textContent).toContain('!1');
});
it('renders title as HTML', () => {
expect(vm.$el.querySelector('.title-html')).not.toBe(null);
expect(vm.$el.querySelector('.title').textContent).toContain('Testing');
});
it('renders description as HTML', () => {
expect(vm.$el.querySelector('.description-html')).not.toBe(null);
expect(vm.$el.querySelector('.description').textContent).toContain('Description HTML');
});
});
import Vue from 'vue';
import '~/behaviors/markdown/render_gfm';
import { createStore } from '~/ide/stores';
import RightPane from '~/ide/components/panes/right.vue';
import { rightSidebarViews } from '~/ide/constants';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
describe('IDE right pane', () => {
let Component;
let vm;
beforeAll(() => {
Component = Vue.extend(RightPane);
});
beforeEach(() => {
const store = createStore();
vm = createComponentWithStore(Component, store).$mount();
});
afterEach(() => {
vm.$destroy();
});
describe('active', () => {
it('renders merge request button as active', done => {
vm.$store.state.rightPane = rightSidebarViews.mergeRequestInfo;
vm.$store.state.currentMergeRequestId = '123';
vm.$store.state.currentProjectId = 'gitlab-ce';
vm.$store.state.currentMergeRequestId = 1;
vm.$store.state.projects['gitlab-ce'] = {
mergeRequests: {
1: {
iid: 1,
title: 'Testing',
title_html: '<span class="title-html">Testing</span>',
description: 'Description',
description_html: '<p class="description-html">Description HTML</p>',
},
},
};
vm.$nextTick(() => {
expect(vm.$el.querySelector('.ide-sidebar-link.active')).not.toBe(null);
expect(
vm.$el.querySelector('.ide-sidebar-link.active').getAttribute('data-original-title'),
).toBe('Merge Request');
done();
});
});
});
describe('click', () => {
beforeEach(() => {
spyOn(vm, 'setRightPane');
});
it('sets view to merge request', done => {
vm.$store.state.currentMergeRequestId = '123';
vm.$nextTick(() => {
vm.$el.querySelector('.ide-sidebar-link').click();
expect(vm.setRightPane).toHaveBeenCalledWith(rightSidebarViews.mergeRequestInfo);
done();
});
});
});
});
...@@ -39,7 +39,9 @@ describe('IDE store merge request actions', () => { ...@@ -39,7 +39,9 @@ describe('IDE store merge request actions', () => {
store store
.dispatch('getMergeRequestData', { projectId: 'abcproject', mergeRequestId: 1 }) .dispatch('getMergeRequestData', { projectId: 'abcproject', mergeRequestId: 1 })
.then(() => { .then(() => {
expect(service.getProjectMergeRequestData).toHaveBeenCalledWith('abcproject', 1); expect(service.getProjectMergeRequestData).toHaveBeenCalledWith('abcproject', 1, {
render_html: true,
});
done(); done();
}) })
......
require 'spec_helper'
describe Gitlab::BackgroundMigration::FixCrossProjectLabelLinks, :migration, schema: 20180702120647 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:issues_table) { table(:issues) }
let(:merge_requests_table) { table(:merge_requests) }
let(:labels_table) { table(:labels) }
let(:label_links_table) { table(:label_links) }
let!(:group1) { namespaces_table.create(id: 10, type: 'Group', name: 'group1', path: 'group1') }
let!(:group2) { namespaces_table.create(id: 20, type: 'Group', name: 'group2', path: 'group2') }
let!(:project1) { projects_table.create(id: 1, name: 'project1', path: 'group1/project1', namespace_id: 10) }
let!(:project2) { projects_table.create(id: 3, name: 'project2', path: 'group1/project2', namespace_id: 20) }
let!(:label1) { labels_table.create(id: 1, title: 'bug', color: 'red', group_id: 10, type: 'GroupLabel') }
let!(:label2) { labels_table.create(id: 2, title: 'bug', color: 'red', group_id: 20, type: 'GroupLabel') }
def create_merge_request(id, project_id)
merge_requests_table.create(id: id,
target_project_id: project_id,
target_branch: 'master',
source_project_id: project_id,
source_branch: 'mr name',
title: "mr name#{id}")
end
def create_issue(id, project_id)
issues_table.create(id: id, title: "issue#{id}", project_id: project_id)
end
def create_resource(target_type, id, project_id)
target_type == 'Issue' ? create_issue(id, project_id) : create_merge_request(id, project_id)
end
shared_examples_for 'resource with cross-project labels' do
it 'updates only cross-project label links which exist in the local project or group' do
create_resource(target_type, 1, 1)
create_resource(target_type, 2, 3)
labels_table.create(id: 3, title: 'bug', color: 'red', project_id: 3, type: 'ProjectLabel')
link = label_links_table.create(label_id: 2, target_type: target_type, target_id: 1)
link2 = label_links_table.create(label_id: 3, target_type: target_type, target_id: 2)
subject.perform(1, 100)
expect(link.reload.label_id).to eq(1)
expect(link2.reload.label_id).to eq(3)
end
it 'ignores cross-project label links if label color is different' do
labels_table.create(id: 3, title: 'bug', color: 'green', group_id: 20, type: 'GroupLabel')
create_resource(target_type, 1, 1)
link = label_links_table.create(label_id: 3, target_type: target_type, target_id: 1)
subject.perform(1, 100)
expect(link.reload.label_id).to eq(3)
end
it 'ignores cross-project label links if label name is different' do
labels_table.create(id: 3, title: 'bug1', color: 'red', group_id: 20, type: 'GroupLabel')
create_resource(target_type, 1, 1)
link = label_links_table.create(label_id: 3, target_type: target_type, target_id: 1)
subject.perform(1, 100)
expect(link.reload.label_id).to eq(3)
end
context 'with nested group' do
before do
namespaces_table.create(id: 11, type: 'Group', name: 'subgroup1', path: 'group1/subgroup1', parent_id: 10)
projects_table.create(id: 2, name: 'subproject1', path: 'group1/subgroup1/subproject1', namespace_id: 11)
create_resource(target_type, 1, 2)
end
it 'ignores label links referencing ancestor group labels', :nested_groups do
labels_table.create(id: 4, title: 'bug', color: 'red', project_id: 2, type: 'ProjectLabel')
label_links_table.create(label_id: 4, target_type: target_type, target_id: 1)
link = label_links_table.create(label_id: 1, target_type: target_type, target_id: 1)
subject.perform(1, 100)
expect(link.reload.label_id).to eq(1)
end
it 'checks also issues and MRs in subgroups', :nested_groups do
link = label_links_table.create(label_id: 2, target_type: target_type, target_id: 1)
subject.perform(1, 100)
expect(link.reload.label_id).to eq(1)
end
end
end
context 'resource is Issue' do
it_behaves_like 'resource with cross-project labels' do
let(:target_type) { 'Issue' }
end
end
context 'resource is Merge Request' do
it_behaves_like 'resource with cross-project labels' do
let(:target_type) { 'MergeRequest' }
end
end
end
require 'spec_helper'
describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do
include ::ExclusiveLeaseHelpers
let(:class_instance) { (Class.new { include ::Gitlab::ExclusiveLeaseHelpers }).new }
let(:unique_key) { SecureRandom.hex(10) }
describe '#in_lock' do
subject { class_instance.in_lock(unique_key, **options) { } }
let(:options) { {} }
context 'when the lease is not obtained yet' do
before do
stub_exclusive_lease(unique_key, 'uuid')
end
it 'calls the given block' do
expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
end
it 'calls the given block continuously' do
expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
end
it 'cancels the exclusive lease after the block' do
expect_to_cancel_exclusive_lease(unique_key, 'uuid')
subject
end
end
context 'when the lease is obtained already' do
let!(:lease) { stub_exclusive_lease_taken(unique_key) }
it 'retries to obtain a lease and raises an error' do
expect(lease).to receive(:try_obtain).exactly(11).times
expect { subject }.to raise_error('Failed to obtain a lock')
end
context 'when ttl is specified' do
let(:options) { { ttl: 10.minutes } }
it 'receives the specified argument' do
expect(Gitlab::ExclusiveLease).to receive(:new).with(unique_key, { timeout: 10.minutes } )
expect { subject }.to raise_error('Failed to obtain a lock')
end
end
context 'when retry count is specified' do
let(:options) { { retries: 3 } }
it 'retries for the specified times' do
expect(lease).to receive(:try_obtain).exactly(4).times
expect { subject }.to raise_error('Failed to obtain a lock')
end
end
context 'when sleep second is specified' do
let(:options) { { retries: 0, sleep_sec: 0.05.seconds } }
it 'receives the specified argument' do
expect(class_instance).to receive(:sleep).with(0.05.seconds).once
expect { subject }.to raise_error('Failed to obtain a lock')
end
end
end
end
end
...@@ -533,6 +533,22 @@ describe Ci::Build do ...@@ -533,6 +533,22 @@ describe Ci::Build do
end end
end end
describe '#has_old_trace?' do
subject { build.has_old_trace? }
context 'when old trace exists' do
before do
build.update_column(:trace, 'old trace')
end
it { is_expected.to be_truthy }
end
context 'when old trace does not exist' do
it { is_expected.to be_falsy }
end
end
describe '#trace=' do describe '#trace=' do
it "expect to fail trace=" do it "expect to fail trace=" do
expect { build.trace = "new" }.to raise_error(NotImplementedError) expect { build.trace = "new" }.to raise_error(NotImplementedError)
...@@ -552,16 +568,32 @@ describe Ci::Build do ...@@ -552,16 +568,32 @@ describe Ci::Build do
end end
describe '#erase_old_trace!' do describe '#erase_old_trace!' do
subject { build.send(:read_attribute, :trace) } subject { build.erase_old_trace! }
before do context 'when old trace exists' do
build.send(:write_attribute, :trace, 'old trace') before do
build.update_column(:trace, 'old trace')
end
it "erases old trace" do
subject
expect(build.old_trace).to be_nil
end
it "executes UPDATE query" do
recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.log.select { |l| l.match?(/UPDATE.*ci_builds/) }.count).to eq(1)
end
end end
it "expect to receive data from database" do context 'when old trace does not exist' do
build.erase_old_trace! it 'does not execute UPDATE query' do
recorded = ActiveRecord::QueryRecorder.new { subject }
is_expected.to be_nil expect(recorded.log.select { |l| l.match?(/UPDATE.*ci_builds/) }.count).to eq(0)
end
end end
end end
......
...@@ -14,6 +14,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -14,6 +14,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
before do before do
stub_feature_flags(ci_enable_live_trace: true) stub_feature_flags(ci_enable_live_trace: true)
stub_artifacts_object_storage
end end
context 'FastDestroyAll' do context 'FastDestroyAll' do
...@@ -37,6 +38,22 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -37,6 +38,22 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end end
end end
describe '.all_stores' do
subject { described_class.all_stores }
it 'returns a correctly ordered array' do
is_expected.to eq(%w[redis database fog])
end
it 'returns redis store as the the lowest precedence' do
expect(subject.first).to eq('redis')
end
it 'returns fog store as the the highest precedence' do
expect(subject.last).to eq('fog')
end
end
describe '#data' do describe '#data' do
subject { build_trace_chunk.data } subject { build_trace_chunk.data }
...@@ -44,181 +61,269 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -44,181 +61,269 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis } let(:data_store) { :redis }
before do before do
build_trace_chunk.send(:redis_set_data, 'Sample data in redis') build_trace_chunk.send(:unsafe_set_data!, 'Sample data in redis')
end end
it { is_expected.to eq('Sample data in redis') } it { is_expected.to eq('Sample data in redis') }
end end
context 'when data_store is database' do context 'when data_store is database' do
let(:data_store) { :db } let(:data_store) { :database }
let(:raw_data) { 'Sample data in db' } let(:raw_data) { 'Sample data in database' }
it { is_expected.to eq('Sample data in db') } it { is_expected.to eq('Sample data in database') }
end end
end
describe '#set_data' do
subject { build_trace_chunk.send(:set_data, value) }
let(:value) { 'Sample data' } context 'when data_store is fog' do
let(:data_store) { :fog }
context 'when value bytesize is bigger than CHUNK_SIZE' do before do
let(:value) { 'a' * (described_class::CHUNK_SIZE + 1) } build_trace_chunk.send(:unsafe_set_data!, 'Sample data in fog')
end
it { expect { subject }.to raise_error('too much data') } it { is_expected.to eq('Sample data in fog') }
end end
end
context 'when data_store is redis' do describe '#append' do
let(:data_store) { :redis } subject { build_trace_chunk.append(new_data, offset) }
it do let(:new_data) { 'Sample new data' }
expect(build_trace_chunk.send(:redis_data)).to be_nil let(:offset) { 0 }
let(:merged_data) { data + new_data.to_s }
subject shared_examples_for 'Appending correctly' do
context 'when offset is negative' do
let(:offset) { -1 }
it { expect { subject }.to raise_error('Offset is out of range') }
end
expect(build_trace_chunk.send(:redis_data)).to eq(value) context 'when offset is bigger than data size' do
let(:offset) { data.bytesize + 1 }
it { expect { subject }.to raise_error('Offset is out of range') }
end end
context 'when fullfilled chunk size' do context 'when new data overflows chunk size' do
let(:value) { 'a' * described_class::CHUNK_SIZE } let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) }
it 'schedules stashing data' do it { expect { subject }.to raise_error('Chunk size overflow') }
expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once end
context 'when offset is EOF' do
let(:offset) { data.bytesize }
it 'appends' do
subject subject
expect(build_trace_chunk.data).to eq(merged_data)
end end
end
end
context 'when data_store is database' do context 'when the other process is appending' do
let(:data_store) { :db } let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" }
it 'sets data' do before do
expect(build_trace_chunk.raw_data).to be_nil stub_exclusive_lease_taken(lease_key)
end
subject it 'raise an error' do
expect { subject }.to raise_error('Failed to obtain a lock')
end
end
expect(build_trace_chunk.raw_data).to eq(value) context 'when new_data is nil' do
expect(build_trace_chunk.persisted?).to be_truthy let(:new_data) { nil }
end
context 'when raw_data is not changed' do it 'raises an error' do
it 'does not execute UPDATE' do expect { subject }.to raise_error('New data is missing')
expect(build_trace_chunk.raw_data).to be_nil end
build_trace_chunk.save! end
# First set context 'when new_data is empty' do
expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0 let(:new_data) { '' }
expect(build_trace_chunk.raw_data).to eq(value)
expect(build_trace_chunk.persisted?).to be_truthy
# Second set it 'does not append' do
build_trace_chunk.reload subject
expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0)
expect(build_trace_chunk.data).to eq(data)
end
it 'does not execute UPDATE' do
ActiveRecord::QueryRecorder.new { subject }.log.map do |query|
expect(query).not_to include('UPDATE')
end
end
end end
end end
context 'when fullfilled chunk size' do context 'when offset is middle of datasize' do
it 'does not schedule stashing data' do let(:offset) { data.bytesize / 2 }
expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
it 'appends' do
subject subject
expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
end end
end end
end end
end
describe '#truncate' do shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do
subject { build_trace_chunk.truncate(offset) } context 'when new data fullfilled chunk size' do
let(:new_data) { 'a' * described_class::CHUNK_SIZE }
shared_examples_for 'truncates' do it 'schedules trace chunk flush worker' do
context 'when offset is negative' do expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
let(:offset) { -1 }
it { expect { subject }.to raise_error('Offset is out of range') } subject
end end
context 'when offset is bigger than data size' do it 'migrates data to object storage' do
let(:offset) { data.bytesize + 1 } Sidekiq::Testing.inline! do
subject
it { expect { subject }.to raise_error('Offset is out of range') } build_trace_chunk.reload
expect(build_trace_chunk.fog?).to be_truthy
expect(build_trace_chunk.data).to eq(new_data)
end
end
end end
end
context 'when offset is 10' do shared_examples_for 'Scheduling no sidekiq worker' do
let(:offset) { 10 } context 'when new data fullfilled chunk size' do
let(:new_data) { 'a' * described_class::CHUNK_SIZE }
it 'does not schedule trace chunk flush worker' do
expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
it 'truncates' do
subject subject
end
expect(build_trace_chunk.data).to eq(data.byteslice(0, offset)) it 'does not migrate data to object storage' do
Sidekiq::Testing.inline! do
data_store = build_trace_chunk.data_store
subject
build_trace_chunk.reload
expect(build_trace_chunk.data_store).to eq(data_store)
end
end end
end end
end end
context 'when data_store is redis' do context 'when data_store is redis' do
let(:data_store) { :redis } let(:data_store) { :redis }
let(:data) { 'Sample data in redis' }
before do context 'when there are no data' do
build_trace_chunk.send(:redis_set_data, data) let(:data) { '' }
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end end
it_behaves_like 'truncates' context 'when there are some data' do
end let(:data) { 'Sample data in redis' }
context 'when data_store is database' do before do
let(:data_store) { :db } build_trace_chunk.send(:unsafe_set_data!, data)
let(:raw_data) { 'Sample data in db' } end
let(:data) { raw_data }
it_behaves_like 'truncates' it 'has data' do
expect(build_trace_chunk.data).to eq(data)
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end
end end
end
describe '#append' do context 'when data_store is database' do
subject { build_trace_chunk.append(new_data, offset) } let(:data_store) { :database }
let(:new_data) { 'Sample new data' } context 'when there are no data' do
let(:offset) { 0 } let(:data) { '' }
let(:total_data) { data + new_data }
shared_examples_for 'appends' do it 'has no data' do
context 'when offset is negative' do expect(build_trace_chunk.data).to be_empty
let(:offset) { -1 } end
it { expect { subject }.to raise_error('Offset is out of range') } it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
end end
context 'when offset is bigger than data size' do context 'when there are some data' do
let(:offset) { data.bytesize + 1 } let(:raw_data) { 'Sample data in database' }
let(:data) { raw_data }
it { expect { subject }.to raise_error('Offset is out of range') } it 'has data' do
expect(build_trace_chunk.data).to eq(data)
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
end end
end
context 'when offset is bigger than data size' do context 'when data_store is fog' do
let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) } let(:data_store) { :fog }
it { expect { subject }.to raise_error('Chunk size overflow') } context 'when there are no data' do
let(:data) { '' }
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
end end
context 'when offset is EOF' do context 'when there are some data' do
let(:offset) { data.bytesize } let(:data) { 'Sample data in fog' }
it 'appends' do before do
subject build_trace_chunk.send(:unsafe_set_data!, data)
end
expect(build_trace_chunk.data).to eq(total_data) it 'has data' do
expect(build_trace_chunk.data).to eq(data)
end end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
end
end
end
describe '#truncate' do
subject { build_trace_chunk.truncate(offset) }
shared_examples_for 'truncates' do
context 'when offset is negative' do
let(:offset) { -1 }
it { expect { subject }.to raise_error('Offset is out of range') }
end
context 'when offset is bigger than data size' do
let(:offset) { data.bytesize + 1 }
it { expect { subject }.to raise_error('Offset is out of range') }
end end
context 'when offset is 10' do context 'when offset is 10' do
let(:offset) { 10 } let(:offset) { 10 }
it 'appends' do it 'truncates' do
subject subject
expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
end end
end end
end end
...@@ -228,18 +333,29 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -228,18 +333,29 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' } let(:data) { 'Sample data in redis' }
before do before do
build_trace_chunk.send(:redis_set_data, data) build_trace_chunk.send(:unsafe_set_data!, data)
end end
it_behaves_like 'appends' it_behaves_like 'truncates'
end end
context 'when data_store is database' do context 'when data_store is database' do
let(:data_store) { :db } let(:data_store) { :database }
let(:raw_data) { 'Sample data in db' } let(:raw_data) { 'Sample data in database' }
let(:data) { raw_data } let(:data) { raw_data }
it_behaves_like 'appends' it_behaves_like 'truncates'
end
context 'when data_store is fog' do
let(:data_store) { :fog }
let(:data) { 'Sample data in fog' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
it_behaves_like 'truncates'
end end
end end
...@@ -253,7 +369,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -253,7 +369,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' } let(:data) { 'Sample data in redis' }
before do before do
build_trace_chunk.send(:redis_set_data, data) build_trace_chunk.send(:unsafe_set_data!, data)
end end
it { is_expected.to eq(data.bytesize) } it { is_expected.to eq(data.bytesize) }
...@@ -265,10 +381,10 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -265,10 +381,10 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end end
context 'when data_store is database' do context 'when data_store is database' do
let(:data_store) { :db } let(:data_store) { :database }
context 'when data exists' do context 'when data exists' do
let(:raw_data) { 'Sample data in db' } let(:raw_data) { 'Sample data in database' }
let(:data) { raw_data } let(:data) { raw_data }
it { is_expected.to eq(data.bytesize) } it { is_expected.to eq(data.bytesize) }
...@@ -278,10 +394,43 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -278,10 +394,43 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it { is_expected.to eq(0) } it { is_expected.to eq(0) }
end end
end end
context 'when data_store is fog' do
let(:data_store) { :fog }
context 'when data exists' do
let(:data) { 'Sample data in fog' }
let(:key) { "tmp/builds/#{build.id}/chunks/#{chunk_index}.log" }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
it { is_expected.to eq(data.bytesize) }
end
context 'when data does not exist' do
it { is_expected.to eq(0) }
end
end
end end
describe '#use_database!' do describe '#persist_data!' do
subject { build_trace_chunk.use_database! } subject { build_trace_chunk.persist_data! }
shared_examples_for 'Atomic operation' do
context 'when the other process is persisting' do
let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" }
before do
stub_exclusive_lease_taken(lease_key)
end
it 'raise an error' do
expect { subject }.to raise_error('Failed to obtain a lock')
end
end
end
context 'when data_store is redis' do context 'when data_store is redis' do
let(:data_store) { :redis } let(:data_store) { :redis }
...@@ -290,46 +439,93 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -290,46 +439,93 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' } let(:data) { 'Sample data in redis' }
before do before do
build_trace_chunk.send(:redis_set_data, data) build_trace_chunk.send(:unsafe_set_data!, data)
end end
it 'stashes the data' do it 'persists the data' do
expect(build_trace_chunk.data_store).to eq('redis') expect(build_trace_chunk.redis?).to be_truthy
expect(build_trace_chunk.send(:redis_data)).to eq(data) expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(build_trace_chunk.raw_data).to be_nil expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
subject subject
expect(build_trace_chunk.data_store).to eq('db') expect(build_trace_chunk.fog?).to be_truthy
expect(build_trace_chunk.send(:redis_data)).to be_nil expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(build_trace_chunk.raw_data).to eq(data) expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end end
it_behaves_like 'Atomic operation'
end end
context 'when data does not exist' do context 'when data does not exist' do
it 'does not call UPDATE' do it 'does not persist' do
expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) expect { subject }.to raise_error('Can not persist empty data')
end end
end end
end end
context 'when data_store is database' do context 'when data_store is database' do
let(:data_store) { :db } let(:data_store) { :database }
it 'does not call UPDATE' do context 'when data exists' do
expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) let(:data) { 'Sample data in database' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
it 'persists the data' do
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
subject
expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
it_behaves_like 'Atomic operation'
end end
end
end
describe 'ExclusiveLock' do context 'when data does not exist' do
before do it 'does not persist' do
stub_exclusive_lease_taken expect { subject }.to raise_error('Can not persist empty data')
stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1) end
end
end end
it 'raise an error' do context 'when data_store is fog' do
expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') let(:data_store) { :fog }
context 'when data exists' do
let(:data) { 'Sample data in fog' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
it 'does not change data store' do
expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
subject
expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
it_behaves_like 'Atomic operation'
end
end end
end end
......
require 'spec_helper'
describe Ci::BuildTraceChunks::Database do
let(:data_store) { described_class.new }
describe '#available?' do
subject { data_store.available? }
it { is_expected.to be_truthy }
end
describe '#data' do
subject { data_store.data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') }
it 'returns the data' do
is_expected.to eq('sample data in database')
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
it 'returns nil' do
is_expected.to be_nil
end
end
end
describe '#set_data' do
subject { data_store.set_data(model, data) }
let(:data) { 'abc123' }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') }
it 'overwrites data' do
expect(data_store.data(model)).to eq('sample data in database')
subject
expect(data_store.data(model)).to eq('abc123')
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
it 'sets new data' do
expect(data_store.data(model)).to be_nil
subject
expect(data_store.data(model)).to eq('abc123')
end
end
end
describe '#delete_data' do
subject { data_store.delete_data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') }
it 'deletes data' do
expect(data_store.data(model)).to eq('sample data in database')
subject
expect(data_store.data(model)).to be_nil
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
it 'does nothing' do
expect(data_store.data(model)).to be_nil
subject
expect(data_store.data(model)).to be_nil
end
end
end
describe '#keys' do
subject { data_store.keys(relation) }
let(:build) { create(:ci_build) }
let(:relation) { build.trace_chunks }
before do
create(:ci_build_trace_chunk, :database_with_data, chunk_index: 0, build: build)
create(:ci_build_trace_chunk, :database_with_data, chunk_index: 1, build: build)
end
it 'returns empty array' do
is_expected.to eq([])
end
end
end
require 'spec_helper'
describe Ci::BuildTraceChunks::Fog do
let(:data_store) { described_class.new }
before do
stub_artifacts_object_storage
end
describe '#available?' do
subject { data_store.available? }
context 'when object storage is enabled' do
it { is_expected.to be_truthy }
end
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it { is_expected.to be_falsy }
end
end
describe '#data' do
subject { data_store.data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
it 'returns the data' do
is_expected.to eq('sample data in fog')
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'returns nil' do
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
end
end
end
describe '#set_data' do
subject { data_store.set_data(model, data) }
let(:data) { 'abc123' }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
it 'overwrites data' do
expect(data_store.data(model)).to eq('sample data in fog')
subject
expect(data_store.data(model)).to eq('abc123')
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'sets new data' do
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
subject
expect(data_store.data(model)).to eq('abc123')
end
end
end
describe '#delete_data' do
subject { data_store.delete_data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
it 'deletes data' do
expect(data_store.data(model)).to eq('sample data in fog')
subject
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'does nothing' do
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
subject
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
end
end
end
describe '#keys' do
subject { data_store.keys(relation) }
let(:build) { create(:ci_build) }
let(:relation) { build.trace_chunks }
before do
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
end
it 'returns keys' do
is_expected.to eq([[build.id, 0], [build.id, 1]])
end
end
describe '#delete_keys' do
subject { data_store.delete_keys(keys) }
let(:build) { create(:ci_build) }
let(:relation) { build.trace_chunks }
let(:keys) { data_store.keys(relation) }
before do
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
end
it 'deletes multiple data' do
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present
end
subject
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound)
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound)
end
end
end
end
require 'spec_helper'
describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
let(:data_store) { described_class.new }
describe '#available?' do
subject { data_store.available? }
it { is_expected.to be_truthy }
end
describe '#data' do
subject { data_store.data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
it 'returns the data' do
is_expected.to eq('sample data in redis')
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
it 'returns nil' do
is_expected.to be_nil
end
end
end
describe '#set_data' do
subject { data_store.set_data(model, data) }
let(:data) { 'abc123' }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
it 'overwrites data' do
expect(data_store.data(model)).to eq('sample data in redis')
subject
expect(data_store.data(model)).to eq('abc123')
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
it 'sets new data' do
expect(data_store.data(model)).to be_nil
subject
expect(data_store.data(model)).to eq('abc123')
end
end
end
describe '#delete_data' do
subject { data_store.delete_data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
it 'deletes data' do
expect(data_store.data(model)).to eq('sample data in redis')
subject
expect(data_store.data(model)).to be_nil
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
it 'does nothing' do
expect(data_store.data(model)).to be_nil
subject
expect(data_store.data(model)).to be_nil
end
end
end
describe '#keys' do
subject { data_store.keys(relation) }
let(:build) { create(:ci_build) }
let(:relation) { build.trace_chunks }
before do
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build)
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build)
end
it 'returns keys' do
is_expected.to eq([[build.id, 0], [build.id, 1]])
end
end
describe '#delete_keys' do
subject { data_store.delete_keys(keys) }
let(:build) { create(:ci_build) }
let(:relation) { build.trace_chunks }
let(:keys) { data_store.keys(relation) }
before do
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build)
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build)
end
it 'deletes multiple data' do
Gitlab::Redis::SharedState.with do |redis|
expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_truthy
expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_truthy
end
subject
Gitlab::Redis::SharedState.with do |redis|
expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_falsy
expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_falsy
end
end
end
end
...@@ -306,6 +306,14 @@ describe API::MergeRequests do ...@@ -306,6 +306,14 @@ describe API::MergeRequests do
expect(json_response['changes_count']).to eq(merge_request.merge_request_diff.real_size) expect(json_response['changes_count']).to eq(merge_request.merge_request_diff.real_size)
end end
it 'exposes description and title html when render_html is true' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), render_html: true
expect(response).to have_gitlab_http_status(200)
expect(json_response).to include('title_html', 'description_html')
end
context 'merge_request_metrics' do context 'merge_request_metrics' do
before do before do
merge_request.metrics.update!(merged_by: user, merge_request.metrics.update!(merged_by: user,
......
...@@ -84,23 +84,5 @@ describe MergeRequests::Conflicts::ListService do ...@@ -84,23 +84,5 @@ describe MergeRequests::Conflicts::ListService do
expect(service.can_be_resolved_in_ui?).to be_falsey expect(service.can_be_resolved_in_ui?).to be_falsey
end end
context 'with gitaly disabled', :skip_gitaly_mock do
it 'returns a falsey value when the MR has a missing ref after a force push' do
merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request)
allow_any_instance_of(Rugged::Repository).to receive(:merge_commits).and_raise(Rugged::OdbError)
expect(service.can_be_resolved_in_ui?).to be_falsey
end
it 'returns a falsey value when the MR has a missing revision after a force push' do
merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request)
allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::BLANK_SHA)
expect(service.can_be_resolved_in_ui?).to be_falsey
end
end
end end
end end
...@@ -123,17 +123,6 @@ describe MergeRequests::Conflicts::ResolveService do ...@@ -123,17 +123,6 @@ describe MergeRequests::Conflicts::ResolveService do
expect(merge_request_from_fork.source_branch_head.parents.map(&:id)) expect(merge_request_from_fork.source_branch_head.parents.map(&:id))
.to eq(['404fa3fc7c2c9b5dacff102f353bdf55b1be2813', target_head]) .to eq(['404fa3fc7c2c9b5dacff102f353bdf55b1be2813', target_head])
end end
context 'when gitaly is disabled', :skip_gitaly_mock do
it 'gets conflicts from the source project' do
# REFACTOR NOTE: We used to test that `project.repository.rugged` wasn't
# used in this case, but since the refactor, for simplification,
# we always use that repository for read only operations.
expect(forked_project.repository.rugged).to receive(:merge_commits).and_call_original
subject
end
end
end end
end end
......
...@@ -25,6 +25,11 @@ module StubObjectStorage ...@@ -25,6 +25,11 @@ module StubObjectStorage
::Fog::Storage.new(connection_params).tap do |connection| ::Fog::Storage.new(connection_params).tap do |connection|
begin begin
connection.directories.create(key: remote_directory) connection.directories.create(key: remote_directory)
# Cleanup remaining files
connection.directories.each do |directory|
directory.files.map(&:destroy)
end
rescue Excon::Error::Conflict rescue Excon::Error::Conflict
end end
end end
......
...@@ -611,6 +611,55 @@ shared_examples_for 'trace with disabled live trace feature' do ...@@ -611,6 +611,55 @@ shared_examples_for 'trace with disabled live trace feature' do
end end
end end
end end
describe '#erase!' do
subject { trace.erase! }
context 'when it is a live trace' do
context 'when trace is stored in database' do
let(:build) { create(:ci_build) }
before do
build.update_column(:trace, 'sample trace')
end
it { expect(trace.raw).not_to be_nil }
it "removes trace" do
subject
expect(trace.raw).to be_nil
end
end
context 'when trace is stored in file storage' do
let(:build) { create(:ci_build, :trace_live) }
it { expect(trace.raw).not_to be_nil }
it "removes trace" do
subject
expect(trace.raw).to be_nil
end
end
end
context 'when it is an archived trace' do
let(:build) { create(:ci_build, :trace_artifact) }
it "has trace at first" do
expect(trace.raw).not_to be_nil
end
it "removes trace" do
subject
build.reload
expect(trace.raw).to be_nil
end
end
end
end end
shared_examples_for 'trace with enabled live trace feature' do shared_examples_for 'trace with enabled live trace feature' do
...@@ -798,4 +847,35 @@ shared_examples_for 'trace with enabled live trace feature' do ...@@ -798,4 +847,35 @@ shared_examples_for 'trace with enabled live trace feature' do
end end
end end
end end
describe '#erase!' do
subject { trace.erase! }
context 'when it is a live trace' do
let(:build) { create(:ci_build, :trace_live) }
it { expect(trace.raw).not_to be_nil }
it "removes trace" do
subject
expect(trace.raw).to be_nil
end
end
context 'when it is an archived trace' do
let(:build) { create(:ci_build, :trace_artifact) }
it "has trace at first" do
expect(trace.raw).not_to be_nil
end
it "removes trace" do
subject
build.reload
expect(trace.raw).to be_nil
end
end
end
end end
...@@ -78,9 +78,9 @@ ...@@ -78,9 +78,9 @@
lodash "^4.2.0" lodash "^4.2.0"
to-fast-properties "^2.0.0" to-fast-properties "^2.0.0"
"@gitlab-org/gitlab-svgs@^1.24.0": "@gitlab-org/gitlab-svgs@^1.25.0":
version "1.24.0" version "1.25.0"
resolved "https://registry.yarnpkg.com/@gitlab-org/gitlab-svgs/-/gitlab-svgs-1.24.0.tgz#3b2b58c5a1d58ce784f486d648bd87cbbb06cedc" resolved "https://registry.yarnpkg.com/@gitlab-org/gitlab-svgs/-/gitlab-svgs-1.25.0.tgz#1a82b1be43e1a46e6b0767ef46f26f5fd6bbd101"
"@sindresorhus/is@^0.7.0": "@sindresorhus/is@^0.7.0":
version "0.7.0" version "0.7.0"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment