Commit 51e3e5f6 authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-06-07

# Conflicts:
#	app/views/admin/appearances/_form.html.haml
#	app/views/groups/labels/index.html.haml
#	app/views/projects/pipelines/_with_tabs.html.haml
#	app/views/shared/_label_row.html.haml
#	db/schema.rb
#	doc/topics/autodevops/index.md
#	lib/gitlab/favicon.rb
#	spec/lib/gitlab/favicon_spec.rb
#	spec/lib/gitlab/usage_data_spec.rb

[ci skip]
parents 1d2993a7 ba4dc01e
...@@ -85,9 +85,9 @@ export function redirectTo(url) { ...@@ -85,9 +85,9 @@ export function redirectTo(url) {
} }
export function webIDEUrl(route = undefined) { export function webIDEUrl(route = undefined) {
let returnUrl = `${gon.relative_url_root}/-/ide/`; let returnUrl = `${gon.relative_url_root || ''}/-/ide/`;
if (route) { if (route) {
returnUrl += `project${route}`; returnUrl += `project${route.replace(new RegExp(`^${gon.relative_url_root || ''}`), '')}`;
} }
return returnUrl; return returnUrl;
} }
...@@ -139,6 +139,8 @@ ...@@ -139,6 +139,8 @@
} }
.nav { .nav {
flex-wrap: nowrap;
> li:not(.d-none) a { > li:not(.d-none) a {
@include media-breakpoint-down(xs) { @include media-breakpoint-down(xs) {
margin-left: 0; margin-left: 0;
...@@ -158,11 +160,12 @@ ...@@ -158,11 +160,12 @@
} }
.navbar-toggler { .navbar-toggler {
position: relative;
right: -10px; right: -10px;
border-radius: 0; border-radius: 0;
min-width: 45px; min-width: 45px;
padding: 0; padding: 0;
margin-right: -7px; margin: $gl-padding-8 -7px $gl-padding-8 0;
font-size: 14px; font-size: 14px;
text-align: center; text-align: center;
color: currentColor; color: currentColor;
...@@ -186,6 +189,7 @@ ...@@ -186,6 +189,7 @@
display: -webkit-flex; display: -webkit-flex;
display: flex; display: flex;
padding-right: 10px; padding-right: 10px;
flex-direction: row;
} }
li { li {
...@@ -290,6 +294,10 @@ ...@@ -290,6 +294,10 @@
margin: 8px; margin: 8px;
} }
} }
.dropdown-menu {
position: absolute;
}
} }
.navbar-sub-nav { .navbar-sub-nav {
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
color: $perf-bar-text; color: $perf-bar-text;
select { select {
color: $perf-bar-text;
width: 200px; width: 200px;
} }
......
...@@ -42,6 +42,6 @@ class Projects::Clusters::ApplicationsController < Projects::ApplicationControll ...@@ -42,6 +42,6 @@ class Projects::Clusters::ApplicationsController < Projects::ApplicationControll
owner: current_user owner: current_user
} }
Applications::CreateService.new(current_user, oauth_application_params).execute Applications::CreateService.new(current_user, oauth_application_params).execute(request)
end end
end end
...@@ -64,7 +64,7 @@ class NotificationRecipient ...@@ -64,7 +64,7 @@ class NotificationRecipient
return false unless @target return false unless @target
return false unless @target.respond_to?(:subscriptions) return false unless @target.respond_to?(:subscriptions)
subscription = @target.subscriptions.find_by_user_id(@user.id) subscription = @target.subscriptions.find { |subscription| subscription.user_id == @user.id }
subscription && !subscription.subscribed subscription && !subscription.subscribed
end end
......
...@@ -6,6 +6,8 @@ class ProjectAutoDevops < ActiveRecord::Base ...@@ -6,6 +6,8 @@ class ProjectAutoDevops < ActiveRecord::Base
validates :domain, allow_blank: true, hostname: { allow_numeric_hostname: true } validates :domain, allow_blank: true, hostname: { allow_numeric_hostname: true }
after_save :create_gitlab_deploy_token, if: :needs_to_create_deploy_token?
def instance_domain def instance_domain
Gitlab::CurrentSettings.auto_devops_domain Gitlab::CurrentSettings.auto_devops_domain
end end
...@@ -22,4 +24,23 @@ class ProjectAutoDevops < ActiveRecord::Base ...@@ -22,4 +24,23 @@ class ProjectAutoDevops < ActiveRecord::Base
end end
end end
end end
private
def create_gitlab_deploy_token
project.deploy_tokens.create!(
name: DeployToken::GITLAB_DEPLOY_TOKEN_NAME,
read_registry: true
)
end
def needs_to_create_deploy_token?
auto_devops_enabled? &&
!project.public? &&
!project.deploy_tokens.find_by(name: DeployToken::GITLAB_DEPLOY_TOKEN_NAME).present?
end
def auto_devops_enabled?
Gitlab::CurrentSettings.auto_devops_enabled? || enabled?
end
end end
...@@ -1060,7 +1060,10 @@ class User < ActiveRecord::Base ...@@ -1060,7 +1060,10 @@ class User < ActiveRecord::Base
def notification_settings_for(source) def notification_settings_for(source)
if notification_settings.loaded? if notification_settings.loaded?
notification_settings.find { |notification| notification.source == source } notification_settings.find do |notification|
notification.source_type == source.class.base_class.name &&
notification.source_id == source.id
end
else else
notification_settings.find_or_initialize_by(source: source) notification_settings.find_or_initialize_by(source: source)
end end
......
...@@ -7,7 +7,7 @@ module Applications ...@@ -7,7 +7,7 @@ module Applications
@params = params.except(:ip_address) @params = params.except(:ip_address)
end end
def execute(request = nil) def execute(request)
Doorkeeper::Application.create(@params) Doorkeeper::Application.create(@params)
end end
end end
......
...@@ -37,8 +37,11 @@ ...@@ -37,8 +37,11 @@
%br %br
The resulting favicons will be cropped to be square and scaled down to a size of 32x32 px. The resulting favicons will be cropped to be square and scaled down to a size of 32x32 px.
<<<<<<< HEAD
= render partial: 'admin/appearances/system_header_footer_form', locals: { form: f } = render partial: 'admin/appearances/system_header_footer_form', locals: { form: f }
=======
>>>>>>> upstream/master
%fieldset.sign-in %fieldset.sign-in
%legend %legend
Sign in/Sign up pages: Sign in/Sign up pages:
......
<<<<<<< HEAD
- page_title 'Labels' - page_title 'Labels'
- can_admin_label = can?(current_user, :admin_label, @group) - can_admin_label = can?(current_user, :admin_label, @group)
- issuables = ['issues', 'merge requests'] + (@group&.feature_available?(:epics) ? ['epics'] : []) - issuables = ['issues', 'merge requests'] + (@group&.feature_available?(:epics) ? ['epics'] : [])
...@@ -7,6 +8,20 @@ ...@@ -7,6 +8,20 @@
.nav-controls .nav-controls
= link_to _('New label'), new_group_label_path(@group), class: "btn btn-new" = link_to _('New label'), new_group_label_path(@group), class: "btn btn-new"
=======
- @no_container = true
- page_title "Labels"
- can_admin_label = can?(current_user, :admin_label, @group)
- hide_class = ''
- hide = @available_labels.empty? || (params[:page].present? && params[:page] != '1')
- issuables = ['issues', 'merge requests']
- if can_admin_label
- content_for(:header_content) do
.nav-controls
= link_to _('New label'), new_group_label_path(@group), class: "btn btn-new"
>>>>>>> upstream/master
- if @labels.exists? - if @labels.exists?
#promote-label-modal #promote-label-modal
%div{ class: container_class } %div{ class: container_class }
...@@ -16,7 +31,12 @@ ...@@ -16,7 +31,12 @@
.labels-container.prepend-top-5 .labels-container.prepend-top-5
.other-labels .other-labels
<<<<<<< HEAD
%h5 Labels %h5 Labels
=======
- if can_admin_label
%h5{ class: ('hide' if hide) } Labels
>>>>>>> upstream/master
%ul.content-list.manage-labels-list.js-other-labels %ul.content-list.manage-labels-list.js-other-labels
= render partial: 'shared/label', subject: @group, collection: @labels, as: :label, locals: { use_label_priority: false } = render partial: 'shared/label', subject: @group, collection: @labels, as: :label, locals: { use_label_priority: false }
= paginate @labels, theme: 'gitlab' = paginate @labels, theme: 'gitlab'
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
= field.fields_for :provider_gcp, @cluster.provider_gcp do |provider_gcp_field| = field.fields_for :provider_gcp, @cluster.provider_gcp do |provider_gcp_field|
.form-group .form-group
= provider_gcp_field.label :gcp_project_id, s_('ClusterIntegration|Google Cloud Platform project ID') = provider_gcp_field.label :gcp_project_id, s_('ClusterIntegration|Google Cloud Platform project')
.js-gcp-project-id-dropdown-entry-point{ data: { docsUrl: 'https://console.cloud.google.com/home/dashboard' } } .js-gcp-project-id-dropdown-entry-point{ data: { docsUrl: 'https://console.cloud.google.com/home/dashboard' } }
= provider_gcp_field.hidden_field :gcp_project_id = provider_gcp_field.hidden_field :gcp_project_id
.dropdown .dropdown
......
= form_for @cluster, url: user_namespace_project_clusters_path(@project.namespace, @project), as: :cluster do |field| = form_for @cluster, url: user_namespace_project_clusters_path(@project.namespace, @project), as: :cluster do |field|
= form_errors(@cluster) = form_errors(@cluster)
.form-group .form-group
= field.label :name, s_('ClusterIntegration|Kubernetes cluster name') = field.label :name, s_('ClusterIntegration|Kubernetes cluster name'), class: 'label-light'
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Kubernetes cluster name') = field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Kubernetes cluster name')
.form-group .form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope') = field.label :environment_scope, s_('ClusterIntegration|Environment scope'), class: 'label-light'
= field.text_field :environment_scope, class: 'form-control', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope') = field.text_field :environment_scope, class: 'form-control', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field| = field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group .form-group
= platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL') = platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL'), class: 'label-light'
= platform_kubernetes_field.text_field :api_url, class: 'form-control', placeholder: s_('ClusterIntegration|API URL') = platform_kubernetes_field.text_field :api_url, class: 'form-control', placeholder: s_('ClusterIntegration|API URL')
.form-group .form-group
= platform_kubernetes_field.label :ca_cert, s_('ClusterIntegration|CA Certificate') = platform_kubernetes_field.label :ca_cert, s_('ClusterIntegration|CA Certificate'), class: 'label-light'
= platform_kubernetes_field.text_area :ca_cert, class: 'form-control', placeholder: s_('ClusterIntegration|Certificate Authority bundle (PEM format)') = platform_kubernetes_field.text_area :ca_cert, class: 'form-control', placeholder: s_('ClusterIntegration|Certificate Authority bundle (PEM format)')
.form-group .form-group
= platform_kubernetes_field.label :token, s_('ClusterIntegration|Token') = platform_kubernetes_field.label :token, s_('ClusterIntegration|Token'), class: 'label-light'
= platform_kubernetes_field.text_field :token, class: 'form-control', placeholder: s_('ClusterIntegration|Service token'), autocomplete: 'off' = platform_kubernetes_field.text_field :token, class: 'form-control', placeholder: s_('ClusterIntegration|Service token'), autocomplete: 'off'
.form-group .form-group
= platform_kubernetes_field.label :namespace, s_('ClusterIntegration|Project namespace (optional, unique)') = platform_kubernetes_field.label :namespace, s_('ClusterIntegration|Project namespace (optional, unique)'), class: 'label-light'
= platform_kubernetes_field.text_field :namespace, class: 'form-control', placeholder: s_('ClusterIntegration|Project namespace') = platform_kubernetes_field.text_field :namespace, class: 'form-control', placeholder: s_('ClusterIntegration|Project namespace')
.form-group .form-group
......
= form_for @cluster, url: namespace_project_cluster_path(@project.namespace, @project, @cluster), as: :cluster do |field| = form_for @cluster, url: namespace_project_cluster_path(@project.namespace, @project, @cluster), as: :cluster do |field|
= form_errors(@cluster) = form_errors(@cluster)
.form-group .form-group
= field.label :name, s_('ClusterIntegration|Kubernetes cluster name') = field.label :name, s_('ClusterIntegration|Kubernetes cluster name'), class: 'label-light'
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Kubernetes cluster name') = field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Kubernetes cluster name')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field| = field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group .form-group
= platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL') = platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL'), class: 'label-light'
= platform_kubernetes_field.text_field :api_url, class: 'form-control', placeholder: s_('ClusterIntegration|API URL') = platform_kubernetes_field.text_field :api_url, class: 'form-control', placeholder: s_('ClusterIntegration|API URL')
.form-group .form-group
= platform_kubernetes_field.label :ca_cert, s_('ClusterIntegration|CA Certificate') = platform_kubernetes_field.label :ca_cert, s_('ClusterIntegration|CA Certificate'), class: 'label-light'
= platform_kubernetes_field.text_area :ca_cert, class: 'form-control', placeholder: s_('ClusterIntegration|Certificate Authority bundle (PEM format)') = platform_kubernetes_field.text_area :ca_cert, class: 'form-control', placeholder: s_('ClusterIntegration|Certificate Authority bundle (PEM format)')
.form-group .form-group
= platform_kubernetes_field.label :token, s_('ClusterIntegration|Token') = platform_kubernetes_field.label :token, s_('ClusterIntegration|Token'), class: 'label-light'
.input-group .input-group
= platform_kubernetes_field.text_field :token, class: 'form-control js-cluster-token', type: 'password', placeholder: s_('ClusterIntegration|Token'), autocomplete: 'off' = platform_kubernetes_field.text_field :token, class: 'form-control js-cluster-token', type: 'password', placeholder: s_('ClusterIntegration|Token'), autocomplete: 'off'
%span.input-group-append.clipboard-addon %span.input-group-append.clipboard-addon
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
= s_('ClusterIntegration|Show') = s_('ClusterIntegration|Show')
.form-group .form-group
= platform_kubernetes_field.label :namespace, s_('ClusterIntegration|Project namespace (optional, unique)') = platform_kubernetes_field.label :namespace, s_('ClusterIntegration|Project namespace (optional, unique)'), class: 'label-light'
= platform_kubernetes_field.text_field :namespace, class: 'form-control', placeholder: s_('ClusterIntegration|Project namespace') = platform_kubernetes_field.text_field :namespace, class: 'form-control', placeholder: s_('ClusterIntegration|Project namespace')
.form-group .form-group
......
...@@ -84,6 +84,7 @@ ...@@ -84,6 +84,7 @@
%pre.build-trace.build-trace-rounded %pre.build-trace.build-trace-rounded
%code.bash.js-build-output %code.bash.js-build-output
= build_summary(build) = build_summary(build)
<<<<<<< HEAD
- if expose_sast_data || expose_dependency_data - if expose_sast_data || expose_dependency_data
#js-tab-security.build-security.tab-pane #js-tab-security.build-security.tab-pane
#js-security-report-app{ data: { endpoint: expose_sast_data ? sast_artifact_url(@pipeline) : nil, #js-security-report-app{ data: { endpoint: expose_sast_data ? sast_artifact_url(@pipeline) : nil,
...@@ -94,3 +95,6 @@ ...@@ -94,3 +95,6 @@
vulnerability_feedback_help_path: help_page_path("user/project/merge_requests/index", anchor: "interacting-with-security-reports-ultimate"), vulnerability_feedback_help_path: help_page_path("user/project/merge_requests/index", anchor: "interacting-with-security-reports-ultimate"),
sast_help_path: help_page_path('user/project/merge_requests/sast'), sast_help_path: help_page_path('user/project/merge_requests/sast'),
dependency_scanning_help_path: help_page_path('user/project/merge_requests/dependency_scanning')} } dependency_scanning_help_path: help_page_path('user/project/merge_requests/dependency_scanning')} }
=======
>>>>>>> upstream/master
- subject = local_assigns[:subject] - subject = local_assigns[:subject]
- force_priority = local_assigns.fetch(:force_priority, false) - force_priority = local_assigns.fetch(:force_priority, false)
<<<<<<< HEAD
- show_label_epics_link = @group&.feature_available?(:epics) - show_label_epics_link = @group&.feature_available?(:epics)
=======
>>>>>>> upstream/master
- show_label_issues_link = show_label_issuables_link?(label, :issues, project: @project) - show_label_issues_link = show_label_issuables_link?(label, :issues, project: @project)
- show_label_merge_requests_link = show_label_issuables_link?(label, :merge_requests, project: @project) - show_label_merge_requests_link = show_label_issuables_link?(label, :merge_requests, project: @project)
...@@ -12,6 +15,7 @@ ...@@ -12,6 +15,7 @@
.description-text.append-bottom-10 .description-text.append-bottom-10
= markdown_field(label, :description) = markdown_field(label, :description)
%ul.label-links %ul.label-links
<<<<<<< HEAD
- if show_label_epics_link - if show_label_epics_link
%li.label-link-item.inline %li.label-link-item.inline
= link_to 'Epics', group_epics_path(@group, label_name:[label.name]) = link_to 'Epics', group_epics_path(@group, label_name:[label.name])
...@@ -24,6 +28,15 @@ ...@@ -24,6 +28,15 @@
%li.label-link-item.inline %li.label-link-item.inline
= link_to_label(label, subject: subject, type: :merge_request) { _('Merge requests') } = link_to_label(label, subject: subject, type: :merge_request) { _('Merge requests') }
&middot; &middot;
=======
- if show_label_issues_link
%li.label-link-item.inline
= link_to_label(label, subject: subject) { 'Issues' }
- if show_label_merge_requests_link
&middot;
%li.label-link-item.inline
= link_to_label(label, subject: subject, type: :merge_request) { _('Merge requests') }
>>>>>>> upstream/master
- if force_priority - if force_priority
%li.label-link-item.js-priority-badge.inline.prepend-left-10 %li.label-link-item.js-priority-badge.inline.prepend-left-10
.label-badge.label-badge-blue= _('Prioritized label') .label-badge.label-badge-blue= _('Prioritized label')
class StorageMigratorWorker class StorageMigratorWorker
include ApplicationWorker include ApplicationWorker
BATCH_SIZE = 100
def perform(start, finish) def perform(start, finish)
projects = build_relation(start, finish) migrator = Gitlab::HashedStorage::Migrator.new
migrator.bulk_migrate(start, finish)
projects.with_route.find_each(batch_size: BATCH_SIZE) do |project|
Rails.logger.info "Starting storage migration of #{project.full_path} (ID=#{project.id})..."
begin
project.migrate_to_hashed_storage!
rescue => err
Rails.logger.error("#{err.message} migrating storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}")
end
end
end
def build_relation(start, finish)
relation = Project
table = Project.arel_table
relation = relation.where(table[:id].gteq(start)) if start
relation = relation.where(table[:id].lteq(finish)) if finish
relation
end end
end end
---
title: Improve Failed Jobs tab in the Pipeline detail page
merge_request:
author:
type: changed
---
title: Stop logging email information when emails are disabled
merge_request: 18521
author: Marc Shaw
type: fixed
---
title: Automatize Deploy Token creation for Auto Devops
merge_request: 19507
author:
type: added
---
title: 'Hashed Storage: migration rake task now can be executed to specific project'
merge_request: 19268
author:
type: changed
---
title: Improve performance of LFS integrity check
merge_request: 19494
author:
type: performance
---
title: Fixes Web IDE button on merge requests when GitLab is installed with relative
URL
merge_request:
author:
type: fixed
---
title: Make CI job update entrypoint to work as keep-alive endpoint
merge_request: 19543
author:
type: changed
---
title: Fix some sources of excessive query counts when calculating notification recipients
merge_request:
author:
type: performance
# Interceptor in lib/disable_email_interceptor.rb # Interceptor in lib/disable_email_interceptor.rb
ActionMailer::Base.register_interceptor(DisableEmailInterceptor) unless Gitlab.config.gitlab.email_enabled unless Gitlab.config.gitlab.email_enabled
ActionMailer::Base.register_interceptor(DisableEmailInterceptor)
ActionMailer::Base.logger = nil
end
...@@ -67,6 +67,10 @@ Sidekiq::Testing.inline! do ...@@ -67,6 +67,10 @@ Sidekiq::Testing.inline! do
skip_disk_validation: true skip_disk_validation: true
} }
if i % 2 == 0
params[:storage_version] = Project::LATEST_STORAGE_VERSION
end
project = Projects::CreateService.new(User.first, params).execute project = Projects::CreateService.new(User.first, params).execute
# Seed-Fu runs this entire fixture in a transaction, so the `after_commit` # Seed-Fu runs this entire fixture in a transaction, so the `after_commit`
# hook won't run until after the fixture is loaded. That is too late # hook won't run until after the fixture is loaded. That is too late
......
# We want to enable hashed storage for every new project in development
# Details https://gitlab.com/gitlab-org/gitlab-ce/issues/46241
Gitlab::Seeder.quiet do
ApplicationSetting.create_from_defaults unless ApplicationSetting.current_without_cache
ApplicationSetting.current_without_cache.update!(hashed_storage_enabled: true)
print '.'
end
...@@ -40,12 +40,15 @@ ActiveRecord::Schema.define(version: 20180605213516) do ...@@ -40,12 +40,15 @@ ActiveRecord::Schema.define(version: 20180605213516) do
t.text "new_project_guidelines" t.text "new_project_guidelines"
t.text "new_project_guidelines_html" t.text "new_project_guidelines_html"
t.string "favicon" t.string "favicon"
<<<<<<< HEAD
t.text "header_message" t.text "header_message"
t.text "header_message_html" t.text "header_message_html"
t.text "footer_message" t.text "footer_message"
t.text "footer_message_html" t.text "footer_message_html"
t.text "message_background_color" t.text "message_background_color"
t.text "message_font_color" t.text "message_font_color"
=======
>>>>>>> upstream/master
end end
create_table "application_setting_terms", force: :cascade do |t| create_table "application_setting_terms", force: :cascade do |t|
......
...@@ -17,13 +17,21 @@ This task will schedule all your existing projects and attachments associated wi ...@@ -17,13 +17,21 @@ This task will schedule all your existing projects and attachments associated wi
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:migrate_to_hashed sudo gitlab-rake gitlab:storage:migrate_to_hashed
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:migrate_to_hashed sudo -u git -H bundle exec rake gitlab:storage:migrate_to_hashed RAILS_ENV=production
```
They both also accept a range as environment variable:
```bash
# to migrate any non migrated project from ID 20 to 50.
export ID_FROM=20
export ID_TO=50
``` ```
You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen. You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen.
...@@ -44,13 +52,13 @@ To have a simple summary of projects using **Legacy** storage: ...@@ -44,13 +52,13 @@ To have a simple summary of projects using **Legacy** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:legacy_projects sudo gitlab-rake gitlab:storage:legacy_projects
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:legacy_projects sudo -u git -H bundle exec rake gitlab:storage:legacy_projects RAILS_ENV=production
``` ```
------ ------
...@@ -60,13 +68,13 @@ To list projects using **Legacy** storage: ...@@ -60,13 +68,13 @@ To list projects using **Legacy** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:list_legacy_projects sudo gitlab-rake gitlab:storage:list_legacy_projects
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:list_legacy_projects sudo -u git -H bundle exec rake gitlab:storage:list_legacy_projects RAILS_ENV=production
``` ```
...@@ -77,13 +85,13 @@ To have a simple summary of projects using **Hashed** storage: ...@@ -77,13 +85,13 @@ To have a simple summary of projects using **Hashed** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:hashed_projects sudo gitlab-rake gitlab:storage:hashed_projects
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:hashed_projects sudo -u git -H bundle exec rake gitlab:storage:hashed_projects RAILS_ENV=production
``` ```
------ ------
...@@ -93,14 +101,13 @@ To list projects using **Hashed** storage: ...@@ -93,14 +101,13 @@ To list projects using **Hashed** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:list_hashed_projects sudo gitlab-rake gitlab:storage:list_hashed_projects
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:list_hashed_projects sudo -u git -H bundle exec rake gitlab:storage:list_hashed_projects RAILS_ENV=production
``` ```
## List attachments on Legacy storage ## List attachments on Legacy storage
...@@ -110,13 +117,13 @@ To have a simple summary of project attachments using **Legacy** storage: ...@@ -110,13 +117,13 @@ To have a simple summary of project attachments using **Legacy** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:legacy_attachments sudo gitlab-rake gitlab:storage:legacy_attachments
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:legacy_attachments sudo -u git -H bundle exec rake gitlab:storage:legacy_attachments RAILS_ENV=production
``` ```
------ ------
...@@ -126,13 +133,13 @@ To list project attachments using **Legacy** storage: ...@@ -126,13 +133,13 @@ To list project attachments using **Legacy** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:list_legacy_attachments sudo gitlab-rake gitlab:storage:list_legacy_attachments
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:list_legacy_attachments sudo -u git -H bundle exec rake gitlab:storage:list_legacy_attachments RAILS_ENV=production
``` ```
## List attachments on Hashed storage ## List attachments on Hashed storage
...@@ -142,13 +149,13 @@ To have a simple summary of project attachments using **Hashed** storage: ...@@ -142,13 +149,13 @@ To have a simple summary of project attachments using **Hashed** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:hashed_attachments sudo gitlab-rake gitlab:storage:hashed_attachments
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:hashed_attachments sudo -u git -H bundle exec rake gitlab:storage:hashed_attachments RAILS_ENV=production
``` ```
------ ------
...@@ -158,13 +165,13 @@ To list project attachments using **Hashed** storage: ...@@ -158,13 +165,13 @@ To list project attachments using **Hashed** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:list_hashed_attachments sudo gitlab-rake gitlab:storage:list_hashed_attachments
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:list_hashed_attachments sudo -u git -H bundle exec rake gitlab:storage:list_hashed_attachments RAILS_ENV=production
``` ```
[storage-types]: ../repository_storage_types.md [storage-types]: ../repository_storage_types.md
......
...@@ -114,7 +114,7 @@ Let's now see how that information is exposed within GitLab. ...@@ -114,7 +114,7 @@ Let's now see how that information is exposed within GitLab.
## Viewing the current status of an environment ## Viewing the current status of an environment
The environment list under your project's **Pipelines ➔ Environments**, is The environment list under your project's **Operations > Environments**, is
where you can find information of the last deployment status of an environment. where you can find information of the last deployment status of an environment.
Here's how the Environments page looks so far. Here's how the Environments page looks so far.
...@@ -167,7 +167,7 @@ that works. ...@@ -167,7 +167,7 @@ that works.
You can't control everything, so sometimes things go wrong. When that unfortunate You can't control everything, so sometimes things go wrong. When that unfortunate
time comes GitLab has you covered. Simply by clicking the **Rollback** button time comes GitLab has you covered. Simply by clicking the **Rollback** button
that can be found in the deployments page that can be found in the deployments page
(**Pipelines ➔ Environments ➔ `environment name`**) you can relaunch the (**Operations > Environments > `environment name`**) you can relaunch the
job with the commit associated with it. job with the commit associated with it.
>**Note:** >**Note:**
......
...@@ -293,7 +293,11 @@ report is created, it's uploaded as an artifact which you can later download and ...@@ -293,7 +293,11 @@ report is created, it's uploaded as an artifact which you can later download and
check out. check out.
In GitLab Ultimate, any security warnings are also In GitLab Ultimate, any security warnings are also
<<<<<<< HEAD
[shown in the merge request widget](../../user/project/merge_requests/sast.md). [shown in the merge request widget](../../user/project/merge_requests/sast.md).
=======
[shown in the merge request widget](https://docs.gitlab.com/ee//user/project/merge_requests/sast.html).
>>>>>>> upstream/master
### Auto Dependency Scanning **[ULTIMATE]** ### Auto Dependency Scanning **[ULTIMATE]**
...@@ -306,7 +310,11 @@ report is created, it's uploaded as an artifact which you can later download and ...@@ -306,7 +310,11 @@ report is created, it's uploaded as an artifact which you can later download and
check out. check out.
In GitLab Ultimate, any security warnings are also In GitLab Ultimate, any security warnings are also
<<<<<<< HEAD
[shown in the merge request widget](../../user/project/merge_requests/dependency_scanning.md). [shown in the merge request widget](../../user/project/merge_requests/dependency_scanning.md).
=======
[shown in the merge request widget](https://docs.gitlab.com/ee//user/project/merge_requests/dependency_scanning.html).
>>>>>>> upstream/master
### Auto License Management **[ULTIMATE]** ### Auto License Management **[ULTIMATE]**
...@@ -319,7 +327,11 @@ report is created, it's uploaded as an artifact which you can later download and ...@@ -319,7 +327,11 @@ report is created, it's uploaded as an artifact which you can later download and
check out. check out.
In GitLab Ultimate, any licenses are also In GitLab Ultimate, any licenses are also
<<<<<<< HEAD
[shown in the merge request widget](../../user/project/merge_requests/license_management.md). [shown in the merge request widget](../../user/project/merge_requests/license_management.md).
=======
[shown in the merge request widget](https://docs.gitlab.com/ee//user/project/merge_requests/license_management.html).
>>>>>>> upstream/master
### Auto Container Scanning ### Auto Container Scanning
...@@ -332,7 +344,11 @@ created, it's uploaded as an artifact which you can later download and ...@@ -332,7 +344,11 @@ created, it's uploaded as an artifact which you can later download and
check out. check out.
In GitLab Ultimate, any security warnings are also In GitLab Ultimate, any security warnings are also
<<<<<<< HEAD
[shown in the merge request widget](../../user/project/merge_requests/container_scanning.md). [shown in the merge request widget](../../user/project/merge_requests/container_scanning.md).
=======
[shown in the merge request widget](https://docs.gitlab.com/ee//user/project/merge_requests/container_scanning.html).
>>>>>>> upstream/master
### Auto Review Apps ### Auto Review Apps
...@@ -371,7 +387,11 @@ issues. Once the report is created, it's uploaded as an artifact which you can ...@@ -371,7 +387,11 @@ issues. Once the report is created, it's uploaded as an artifact which you can
later download and check out. later download and check out.
In GitLab Ultimate, any security warnings are also In GitLab Ultimate, any security warnings are also
<<<<<<< HEAD
[shown in the merge request widget](../../user/project/merge_requests/dast.md). [shown in the merge request widget](../../user/project/merge_requests/dast.md).
=======
[shown in the merge request widget](https://docs.gitlab.com/ee//user/project/merge_requests/dast.html).
>>>>>>> upstream/master
### Auto Browser Performance Testing **[PREMIUM]** ### Auto Browser Performance Testing **[PREMIUM]**
...@@ -386,7 +406,11 @@ Auto Browser Performance Testing utilizes the [Sitespeed.io container](https://h ...@@ -386,7 +406,11 @@ Auto Browser Performance Testing utilizes the [Sitespeed.io container](https://h
``` ```
In GitLab Premium, performance differences between the source In GitLab Premium, performance differences between the source
<<<<<<< HEAD
and target branches are [shown in the merge request widget](../../user/project/merge_requests/browser_performance_testing.md). and target branches are [shown in the merge request widget](../../user/project/merge_requests/browser_performance_testing.md).
=======
and target branches are [shown in the merge request widget](https://docs.gitlab.com/ee//user/project/merge_requests/browser_performance_testing.html).
>>>>>>> upstream/master
### Auto Deploy ### Auto Deploy
...@@ -426,6 +450,15 @@ no longer be valid as soon as the deployment job finishes. This means that ...@@ -426,6 +450,15 @@ no longer be valid as soon as the deployment job finishes. This means that
Kubernetes can run the application, but in case it should be restarted or Kubernetes can run the application, but in case it should be restarted or
executed somewhere else, it cannot be accessed again. executed somewhere else, it cannot be accessed again.
> [Introduced][ce-19507] in GitLab 11.0.
For internal and private projects a [GitLab Deploy Token](../../user/project/deploy_tokens/index.md###gitlab-deploy-token)
will be automatically created, when Auto DevOps is enabled and the Auto DevOps settings are saved. This Deploy Token
can be used for permanent access to the registry.
Note: **Note**
When the GitLab Deploy Token has been manually revoked, it won't be automatically created.
### Auto Monitoring ### Auto Monitoring
NOTE: **Note:** NOTE: **Note:**
...@@ -503,7 +536,7 @@ repo or by specifying a project variable: ...@@ -503,7 +536,7 @@ repo or by specifying a project variable:
file in it, Auto DevOps will detect the chart and use it instead of the [default file in it, Auto DevOps will detect the chart and use it instead of the [default
one](https://gitlab.com/charts/charts.gitlab.io/tree/master/charts/auto-deploy-app). one](https://gitlab.com/charts/charts.gitlab.io/tree/master/charts/auto-deploy-app).
This can be a great way to control exactly how your application is deployed. This can be a great way to control exactly how your application is deployed.
- **Project variable** - Create a [variable](../../ci/variables/README.md#variables) - **Project variable** - Create a [project variable](../../ci/variables/README.md#secret-variables)
`AUTO_DEVOPS_CHART` with the URL of a custom chart to use. `AUTO_DEVOPS_CHART` with the URL of a custom chart to use.
### Customizing `.gitlab-ci.yml` ### Customizing `.gitlab-ci.yml`
...@@ -565,18 +598,17 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac ...@@ -565,18 +598,17 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). | | `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). |
| `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. | | `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. |
| `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. | | `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. |
| `CODE_QUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `code_quality` job. If the variable is present, the job will not be created. | | `CODEQUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. |
| `LICENSE_MANAGEMENT_DISABLED` | From GitLab 11.0, this variable can be used to disable the `license_management` job. If the variable is present, the job will not be created. |
| `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. | | `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. |
| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. | | `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. |
| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `container_scanning` job. If the variable is present, the job will not be created. | | `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. |
| `REVIEW_DISABLED` | From GitLab 11.0, this variable can be used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. | | `REVIEW_DISABLED` | From GitLab 11.0, this variable can be used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. |
| `DAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dast` job. If the variable is present, the job will not be created. | | `DAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dast` job. If the variable is present, the job will not be created. |
| `PERFORMANCE_DISABLED` | From GitLab 11.0, this variable can be used to disable the `performance` job. If the variable is present, the job will not be created. | | `PERFORMANCE_DISABLED` | From GitLab 11.0, this variable can be used to disable the `performance` job. If the variable is present, the job will not be created. |
TIP: **Tip:** TIP: **Tip:**
Set up the replica variables using a Set up the replica variables using a
[project variable](../../ci/variables/README.md#variables) [project variable](../../ci/variables/README.md#secret-variables)
and scale your application by just redeploying it! and scale your application by just redeploying it!
CAUTION: **Caution:** CAUTION: **Caution:**
...@@ -651,7 +683,7 @@ staging environment and deploy to production manually. For this scenario, the ...@@ -651,7 +683,7 @@ staging environment and deploy to production manually. For this scenario, the
`STAGING_ENABLED` environment variable was introduced. `STAGING_ENABLED` environment variable was introduced.
If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to
`1` as a variable), then the application will be automatically deployed `1` as a secret variable), then the application will be automatically deployed
to a `staging` environment, and a `production_manual` job will be created for to a `staging` environment, and a `production_manual` job will be created for
you when you're ready to manually deploy to production. you when you're ready to manually deploy to production.
...@@ -664,7 +696,7 @@ A [canary environment](https://docs.gitlab.com/ee/user/project/canary_deployment ...@@ -664,7 +696,7 @@ A [canary environment](https://docs.gitlab.com/ee/user/project/canary_deployment
before any changes are deployed to production. before any changes are deployed to production.
If `CANARY_ENABLED` is defined in your project (e.g., set `CANARY_ENABLED` to If `CANARY_ENABLED` is defined in your project (e.g., set `CANARY_ENABLED` to
`1` as a variable) then two manual jobs will be created: `1` as a secret variable) then two manual jobs will be created:
- `canary` which will deploy the application to the canary environment - `canary` which will deploy the application to the canary environment
- `production_manual` which is to be used by you when you're ready to manually - `production_manual` which is to be used by you when you're ready to manually
...@@ -680,7 +712,7 @@ This will allow you to first check how the app is behaving, and later manually ...@@ -680,7 +712,7 @@ This will allow you to first check how the app is behaving, and later manually
increasing the rollout up to 100%. increasing the rollout up to 100%.
If `INCREMENTAL_ROLLOUT_ENABLED` is defined in your project (e.g., set If `INCREMENTAL_ROLLOUT_ENABLED` is defined in your project (e.g., set
`INCREMENTAL_ROLLOUT_ENABLED` to `1` as a variable), then instead of the `INCREMENTAL_ROLLOUT_ENABLED` to `1` as a secret variable), then instead of the
standard `production` job, 4 different standard `production` job, 4 different
[manual jobs](../../ci/pipelines.md#manual-actions-from-the-pipeline-graph) [manual jobs](../../ci/pipelines.md#manual-actions-from-the-pipeline-graph)
will be created: will be created:
...@@ -810,3 +842,4 @@ curl --data "value=true" --header "PRIVATE-TOKEN: personal_access_token" https:/ ...@@ -810,3 +842,4 @@ curl --data "value=true" --header "PRIVATE-TOKEN: personal_access_token" https:/
[Auto DevOps template]: https://gitlab.com/gitlab-org/gitlab-ci-yml/blob/master/Auto-DevOps.gitlab-ci.yml [Auto DevOps template]: https://gitlab.com/gitlab-org/gitlab-ci-yml/blob/master/Auto-DevOps.gitlab-ci.yml
[GitLab Omnibus Helm Chart]: ../../install/kubernetes/gitlab_omnibus.md [GitLab Omnibus Helm Chart]: ../../install/kubernetes/gitlab_omnibus.md
[ee]: https://about.gitlab.com/products/ [ee]: https://about.gitlab.com/products/
[ce-19507]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/19507
...@@ -60,6 +60,7 @@ Below are the current settings regarding [GitLab CI/CD](../../ci/README.md). ...@@ -60,6 +60,7 @@ Below are the current settings regarding [GitLab CI/CD](../../ci/README.md).
| Setting | GitLab.com | Default | | Setting | GitLab.com | Default |
| ----------- | ----------------- | ------------- | | ----------- | ----------------- | ------------- |
| Artifacts maximum size | 1G | 100M | | Artifacts maximum size | 1G | 100M |
| Artifacts [expiry time](../../ci/yaml/README.md#artifacts-expire_in) | kept forever | deleted after 30 days unless otherwise specified |
## Repository size limit ## Repository size limit
......
...@@ -38,7 +38,7 @@ Give the project a name, and then select `Create project`. ...@@ -38,7 +38,7 @@ Give the project a name, and then select `Create project`.
## Connecting the EKS cluster ## Connecting the EKS cluster
From the left side bar, hover over `CI/CD` and select `Kubernetes`, then click on `Add Kubernetes cluster`, and finally `Add an existing Kubernetes cluster`. From the left side bar, hover over `Operations` and select `Kubernetes`, then click on `Add Kubernetes cluster`, and finally `Add an existing Kubernetes cluster`.
A few details from the EKS cluster will be required to connect it to GitLab. A few details from the EKS cluster will be required to connect it to GitLab.
......
...@@ -39,7 +39,7 @@ Before proceeding, make sure the following requirements are met: ...@@ -39,7 +39,7 @@ Before proceeding, make sure the following requirements are met:
If all of the above requirements are met, you can proceed to create and add a If all of the above requirements are met, you can proceed to create and add a
new Kubernetes cluster that will be hosted on GKE to your project: new Kubernetes cluster that will be hosted on GKE to your project:
1. Navigate to your project's **CI/CD > Kubernetes** page. 1. Navigate to your project's **Operations > Kubernetes** page.
1. Click on **Add Kubernetes cluster**. 1. Click on **Add Kubernetes cluster**.
1. Click on **Create with GKE**. 1. Click on **Create with GKE**.
1. Connect your Google account if you haven't done already by clicking the 1. Connect your Google account if you haven't done already by clicking the
...@@ -70,7 +70,7 @@ You need Maintainer [permissions] and above to access the Kubernetes page. ...@@ -70,7 +70,7 @@ You need Maintainer [permissions] and above to access the Kubernetes page.
To add an existing Kubernetes cluster to your project: To add an existing Kubernetes cluster to your project:
1. Navigate to your project's **CI/CD > Kubernetes** page. 1. Navigate to your project's **Operations > Kubernetes** page.
1. Click on **Add Kubernetes cluster**. 1. Click on **Add Kubernetes cluster**.
1. Click on **Add an existing Kubernetes cluster** and fill in the details: 1. Click on **Add an existing Kubernetes cluster** and fill in the details:
- **Kubernetes cluster name** (required) - The name you wish to give the cluster. - **Kubernetes cluster name** (required) - The name you wish to give the cluster.
......
...@@ -29,7 +29,9 @@ The following aspects of a project are imported: ...@@ -29,7 +29,9 @@ The following aspects of a project are imported:
* Regular issue and pull request comments * Regular issue and pull request comments
References to pull requests and issues are preserved (GitLab.com & 8.7+), and References to pull requests and issues are preserved (GitLab.com & 8.7+), and
each imported repository defaults to `private` but [can be made public](../settings/index.md#sharing-and-permissions), as needed. each imported repository maintains visibility level unless that [visibility
level is restricted](../../../public_access/public_access.md#restricting-the-use-of-public-or-internal-projects),
in which case it defaults to the default project visibility.
## How it works ## How it works
......
...@@ -30,7 +30,7 @@ GitLab can seamlessly deploy and manage Prometheus on a [connected Kubernetes cl ...@@ -30,7 +30,7 @@ GitLab can seamlessly deploy and manage Prometheus on a [connected Kubernetes cl
Once you have a connected Kubernetes cluster with Helm installed, deploying a managed Prometheus is as easy as a single click. Once you have a connected Kubernetes cluster with Helm installed, deploying a managed Prometheus is as easy as a single click.
1. Go to the `CI/CD > Kubernetes` page, to view your connected clusters 1. Go to the `Operations > Kubernetes` page, to view your connected clusters
1. Select the cluster you would like to deploy Prometheus to 1. Select the cluster you would like to deploy Prometheus to
1. Click the **Install** button to deploy Prometheus to the cluster 1. Click the **Install** button to deploy Prometheus to the cluster
......
...@@ -61,6 +61,11 @@ module API ...@@ -61,6 +61,11 @@ module API
def max_artifacts_size def max_artifacts_size
Gitlab::CurrentSettings.max_artifacts_size.megabytes.to_i Gitlab::CurrentSettings.max_artifacts_size.megabytes.to_i
end end
def job_forbidden!(job, reason)
header 'Job-Status', job.status
forbidden!(reason)
end
end end
end end
end end
...@@ -125,7 +125,7 @@ module API ...@@ -125,7 +125,7 @@ module API
end end
put '/:id' do put '/:id' do
job = authenticate_job! job = authenticate_job!
forbidden!('Job is not running') unless job.running? job_forbidden!(job, 'Job is not running') unless job.running?
job.trace.set(params[:trace]) if params[:trace] job.trace.set(params[:trace]) if params[:trace]
...@@ -133,6 +133,8 @@ module API ...@@ -133,6 +133,8 @@ module API
project: job.project.full_path) project: job.project.full_path)
case params[:state].to_s case params[:state].to_s
when 'running'
job.touch if job.needs_touch?
when 'success' when 'success'
job.success! job.success!
when 'failed' when 'failed'
...@@ -152,7 +154,7 @@ module API ...@@ -152,7 +154,7 @@ module API
end end
patch '/:id/trace' do patch '/:id/trace' do
job = authenticate_job! job = authenticate_job!
forbidden!('Job is not running') unless job.running? job_forbidden!(job, 'Job is not running') unless job.running?
error!('400 Missing header Content-Range', 400) unless request.headers.key?('Content-Range') error!('400 Missing header Content-Range', 400) unless request.headers.key?('Content-Range')
content_range = request.headers['Content-Range'] content_range = request.headers['Content-Range']
......
...@@ -24,7 +24,22 @@ module Gitlab ...@@ -24,7 +24,22 @@ module Gitlab
private private
def ensure_application_settings! def ensure_application_settings!
cached_application_settings || uncached_application_settings
end
def cached_application_settings
return in_memory_application_settings if ENV['IN_MEMORY_APPLICATION_SETTINGS'] == 'true' return in_memory_application_settings if ENV['IN_MEMORY_APPLICATION_SETTINGS'] == 'true'
begin
::ApplicationSetting.cached
rescue
# In case Redis isn't running
# or the Redis UNIX socket file is not available
# or the DB is not running (we use migrations in the cache key)
end
end
def uncached_application_settings
return fake_application_settings unless connect_to_db? return fake_application_settings unless connect_to_db?
current_settings = ::ApplicationSetting.current current_settings = ::ApplicationSetting.current
......
...@@ -8,7 +8,11 @@ module Gitlab ...@@ -8,7 +8,11 @@ module Gitlab
if Gitlab::Utils.to_boolean(ENV['CANARY']) if Gitlab::Utils.to_boolean(ENV['CANARY'])
'favicon-yellow.png' 'favicon-yellow.png'
elsif Rails.env.development? elsif Rails.env.development?
<<<<<<< HEAD
'favicon-green.png' 'favicon-green.png'
=======
'favicon-blue.png'
>>>>>>> upstream/master
else else
'favicon.png' 'favicon.png'
end end
......
...@@ -30,7 +30,7 @@ module Gitlab ...@@ -30,7 +30,7 @@ module Gitlab
def git_new_pointers(object_limit, not_in) def git_new_pointers(object_limit, not_in)
@new_pointers ||= begin @new_pointers ||= begin
rev_list.new_objects(not_in: not_in, require_path: true) do |object_ids| rev_list.new_objects(rev_list_params(not_in: not_in)) do |object_ids|
object_ids = object_ids.take(object_limit) if object_limit object_ids = object_ids.take(object_limit) if object_limit
Gitlab::Git::Blob.batch_lfs_pointers(@repository, object_ids) Gitlab::Git::Blob.batch_lfs_pointers(@repository, object_ids)
...@@ -39,9 +39,12 @@ module Gitlab ...@@ -39,9 +39,12 @@ module Gitlab
end end
def git_all_pointers def git_all_pointers
params = { options: ["--filter=blob:limit=#{Gitlab::Git::Blob::LFS_POINTER_MAX_SIZE}"], require_path: true } params = {}
if rev_list_supports_new_options?
params[:options] = ["--filter=blob:limit=#{Gitlab::Git::Blob::LFS_POINTER_MAX_SIZE}"]
end
rev_list.all_objects(params) do |object_ids| rev_list.all_objects(rev_list_params(params)) do |object_ids|
Gitlab::Git::Blob.batch_lfs_pointers(@repository, object_ids) Gitlab::Git::Blob.batch_lfs_pointers(@repository, object_ids)
end end
end end
...@@ -49,6 +52,23 @@ module Gitlab ...@@ -49,6 +52,23 @@ module Gitlab
def rev_list def rev_list
Gitlab::Git::RevList.new(@repository, newrev: @newrev) Gitlab::Git::RevList.new(@repository, newrev: @newrev)
end end
# We're passing the `--in-commit-order` arg to ensure we don't wait
# for git to traverse all commits before returning pointers.
# This is required in order to improve the performance of LFS integrity check
def rev_list_params(params = {})
params[:options] ||= []
params[:options] << "--in-commit-order" if rev_list_supports_new_options?
params[:require_path] = true
params
end
def rev_list_supports_new_options?
return @option_supported if defined?(@option_supported)
@option_supported = Gitlab::Git.version >= Gitlab::VersionInfo.parse('2.16.0')
end
end end
end end
end end
...@@ -27,9 +27,10 @@ module Gitlab ...@@ -27,9 +27,10 @@ module Gitlab
# #
# When given a block it will yield objects as a lazy enumerator so # When given a block it will yield objects as a lazy enumerator so
# the caller can limit work done instead of processing megabytes of data # the caller can limit work done instead of processing megabytes of data
def new_objects(require_path: nil, not_in: nil, &lazy_block) def new_objects(options: [], require_path: nil, not_in: nil, &lazy_block)
opts = { opts = {
including: newrev, including: newrev,
options: options,
excluding: not_in.nil? ? :all : not_in, excluding: not_in.nil? ? :all : not_in,
require_path: require_path require_path: require_path
} }
......
module Gitlab
module HashedStorage
# Hashed Storage Migrator
#
# This is responsible for scheduling and flagging projects
# to be migrated from Legacy to Hashed storage, either one by one or in bulk.
class Migrator
BATCH_SIZE = 100
# Schedule a range of projects to be bulk migrated with #bulk_migrate asynchronously
#
# @param [Object] start first project id for the range
# @param [Object] finish last project id for the range
def bulk_schedule(start, finish)
StorageMigratorWorker.perform_async(start, finish)
end
# Start migration of projects from specified range
#
# Flagging a project to be migrated is a synchronous action,
# but the migration runs through async jobs
#
# @param [Object] start first project id for the range
# @param [Object] finish last project id for the range
def bulk_migrate(start, finish)
projects = build_relation(start, finish)
projects.with_route.find_each(batch_size: BATCH_SIZE) do |project|
migrate(project)
end
end
# Flag a project to me migrated
#
# @param [Object] project that will be migrated
def migrate(project)
Rails.logger.info "Starting storage migration of #{project.full_path} (ID=#{project.id})..."
project.migrate_to_hashed_storage!
rescue => err
Rails.logger.error("#{err.message} migrating storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}")
end
private
def build_relation(start, finish)
relation = Project
table = Project.arel_table
relation = relation.where(table[:id].gteq(start)) if start
relation = relation.where(table[:id].lteq(finish)) if finish
relation
end
end
end
end
...@@ -9,8 +9,20 @@ module Gitlab ...@@ -9,8 +9,20 @@ module Gitlab
ENV.fetch('LIMIT', 500).to_i ENV.fetch('LIMIT', 500).to_i
end end
def self.range_from
ENV['ID_FROM']
end
def self.range_to
ENV['ID_TO']
end
def self.range_single_item?
!range_from.nil? && range_from == range_to
end
def self.project_id_batches(&block) def self.project_id_batches(&block)
Project.with_unmigrated_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches Project.with_unmigrated_storage.in_batches(of: batch_size, start: range_from, finish: range_to) do |relation| # rubocop: disable Cop/InBatches
ids = relation.pluck(:id) ids = relation.pluck(:id)
yield ids.min, ids.max yield ids.min, ids.max
......
...@@ -13,15 +13,15 @@ module Gitlab ...@@ -13,15 +13,15 @@ module Gitlab
# All available Themes # All available Themes
THEMES = [ THEMES = [
Theme.new(1, 'Indigo', 'ui-indigo'), Theme.new(1, 'Indigo', 'ui-indigo'),
Theme.new(2, 'Light Indigo', 'ui-light-indigo'), Theme.new(6, 'Light Indigo', 'ui-light-indigo'),
Theme.new(3, 'Blue', 'ui-blue'), Theme.new(4, 'Blue', 'ui-blue'),
Theme.new(4, 'Light Blue', 'ui-light-blue'), Theme.new(7, 'Light Blue', 'ui-light-blue'),
Theme.new(5, 'Green', 'ui-green'), Theme.new(5, 'Green', 'ui-green'),
Theme.new(6, 'Light Green', 'ui-light-green'), Theme.new(8, 'Light Green', 'ui-light-green'),
Theme.new(7, 'Red', 'ui-red'), Theme.new(9, 'Red', 'ui-red'),
Theme.new(8, 'Light Red', 'ui-light-red'), Theme.new(10, 'Light Red', 'ui-light-red'),
Theme.new(9, 'Dark', 'ui-dark'), Theme.new(2, 'Dark', 'ui-dark'),
Theme.new(10, 'Light', 'ui-light') Theme.new(3, 'Light', 'ui-light')
].freeze ].freeze
# Convenience method to get a space-separated String of all the theme # Convenience method to get a space-separated String of all the theme
......
...@@ -2,9 +2,26 @@ namespace :gitlab do ...@@ -2,9 +2,26 @@ namespace :gitlab do
namespace :storage do namespace :storage do
desc 'GitLab | Storage | Migrate existing projects to Hashed Storage' desc 'GitLab | Storage | Migrate existing projects to Hashed Storage'
task migrate_to_hashed: :environment do task migrate_to_hashed: :environment do
legacy_projects_count = Project.with_unmigrated_storage.count storage_migrator = Gitlab::HashedStorage::Migrator.new
helper = Gitlab::HashedStorage::RakeHelper helper = Gitlab::HashedStorage::RakeHelper
if helper.range_single_item?
project = Project.with_unmigrated_storage.find_by(id: helper.range_from)
unless project
puts "There are no projects requiring storage migration with ID=#{helper.range_from}"
next
end
puts "Enqueueing storage migration of #{project.full_path} (ID=#{project.id})..."
storage_migrator.migrate(project)
next
end
legacy_projects_count = Project.with_unmigrated_storage.count
if legacy_projects_count == 0 if legacy_projects_count == 0
puts 'There are no projects requiring storage migration. Nothing to do!' puts 'There are no projects requiring storage migration. Nothing to do!'
...@@ -14,7 +31,7 @@ namespace :gitlab do ...@@ -14,7 +31,7 @@ namespace :gitlab do
print "Enqueuing migration of #{legacy_projects_count} projects in batches of #{helper.batch_size}" print "Enqueuing migration of #{legacy_projects_count} projects in batches of #{helper.batch_size}"
helper.project_id_batches do |start, finish| helper.project_id_batches do |start, finish|
StorageMigratorWorker.perform_async(start, finish) storage_migrator.bulk_schedule(start, finish)
print '.' print '.'
end end
......
...@@ -1297,7 +1297,7 @@ msgstr "" ...@@ -1297,7 +1297,7 @@ msgstr ""
msgid "ClusterIntegration|GitLab Runner" msgid "ClusterIntegration|GitLab Runner"
msgstr "" msgstr ""
msgid "ClusterIntegration|Google Cloud Platform project ID" msgid "ClusterIntegration|Google Cloud Platform project"
msgstr "" msgstr ""
msgid "ClusterIntegration|Google Kubernetes Engine" msgid "ClusterIntegration|Google Kubernetes Engine"
......
...@@ -3,5 +3,9 @@ FactoryBot.define do ...@@ -3,5 +3,9 @@ FactoryBot.define do
project project
enabled true enabled true
domain "example.com" domain "example.com"
trait :disabled do
enabled false
end
end end
end end
...@@ -31,7 +31,7 @@ describe PreferencesHelper do ...@@ -31,7 +31,7 @@ describe PreferencesHelper do
describe '#user_application_theme' do describe '#user_application_theme' do
context 'with a user' do context 'with a user' do
it "returns user's theme's css_class" do it "returns user's theme's css_class" do
stub_user(theme_id: 10) stub_user(theme_id: 3)
expect(helper.user_application_theme).to eq 'ui-light' expect(helper.user_application_theme).to eq 'ui-light'
end end
......
import { webIDEUrl } from '~/lib/utils/url_utility';
describe('URL utility', () => {
describe('webIDEUrl', () => {
afterEach(() => {
gon.relative_url_root = '';
});
describe('without relative_url_root', () => {
it('returns IDE path with route', () => {
expect(webIDEUrl('/gitlab-org/gitlab-ce/merge_requests/1')).toBe(
'/-/ide/project/gitlab-org/gitlab-ce/merge_requests/1',
);
});
});
describe('with relative_url_root', () => {
beforeEach(() => {
gon.relative_url_root = '/gitlab';
});
it('returns IDE path with route', () => {
expect(webIDEUrl('/gitlab/gitlab-org/gitlab-ce/merge_requests/1')).toBe(
'/gitlab/-/ide/project/gitlab-org/gitlab-ce/merge_requests/1',
);
});
});
});
});
...@@ -12,6 +12,7 @@ describe('MRWidgetHeader', () => { ...@@ -12,6 +12,7 @@ describe('MRWidgetHeader', () => {
afterEach(() => { afterEach(() => {
vm.$destroy(); vm.$destroy();
gon.relative_url_root = '';
}); });
describe('computed', () => { describe('computed', () => {
...@@ -145,7 +146,16 @@ describe('MRWidgetHeader', () => { ...@@ -145,7 +146,16 @@ describe('MRWidgetHeader', () => {
const button = vm.$el.querySelector('.js-web-ide'); const button = vm.$el.querySelector('.js-web-ide');
expect(button.textContent.trim()).toEqual('Web IDE'); expect(button.textContent.trim()).toEqual('Web IDE');
expect(button.getAttribute('href')).toEqual('undefined/-/ide/projectabc'); expect(button.getAttribute('href')).toEqual('/-/ide/projectabc');
});
it('renders web ide button with relative URL', () => {
gon.relative_url_root = '/gitlab';
const button = vm.$el.querySelector('.js-web-ide');
expect(button.textContent.trim()).toEqual('Web IDE');
expect(button.getAttribute('href')).toEqual('/-/ide/projectabc');
}); });
it('renders download dropdown with links', () => { it('renders download dropdown with links', () => {
......
...@@ -5,6 +5,13 @@ describe Gitlab::CurrentSettings do ...@@ -5,6 +5,13 @@ describe Gitlab::CurrentSettings do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end end
shared_context 'with settings in cache' do
before do
create(:application_setting)
described_class.current_application_settings # warm the cache
end
end
describe '#current_application_settings', :use_clean_rails_memory_store_caching do describe '#current_application_settings', :use_clean_rails_memory_store_caching do
it 'allows keys to be called directly' do it 'allows keys to be called directly' do
db_settings = create(:application_setting, db_settings = create(:application_setting,
...@@ -31,16 +38,29 @@ describe Gitlab::CurrentSettings do ...@@ -31,16 +38,29 @@ describe Gitlab::CurrentSettings do
end end
context 'with DB unavailable' do context 'with DB unavailable' do
before do context 'and settings in cache' do
# For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues include_context 'with settings in cache'
# during the initialization phase of the test suite, so instead let's mock the internals of it
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false) it 'fetches the settings from cache without issuing any query' do
expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
end
end end
it 'returns an in-memory ApplicationSetting object' do context 'and no settings in cache' do
expect(ApplicationSetting).not_to receive(:current) before do
# For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
# during the initialization phase of the test suite, so instead let's mock the internals of it
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false)
expect(ApplicationSetting).not_to receive(:current)
end
expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings) it 'returns an in-memory ApplicationSetting object' do
expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
end
it 'does not issue any query' do
expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
end
end end
end end
...@@ -52,73 +72,86 @@ describe Gitlab::CurrentSettings do ...@@ -52,73 +72,86 @@ describe Gitlab::CurrentSettings do
ar_wrapped_defaults.slice(*::ApplicationSetting.defaults.keys) ar_wrapped_defaults.slice(*::ApplicationSetting.defaults.keys)
end end
before do context 'and settings in cache' do
# For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues include_context 'with settings in cache'
# during the initialization phase of the test suite, so instead let's mock the internals of it
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true)
allow(ActiveRecord::Base.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
end
it 'creates default ApplicationSettings if none are present' do it 'fetches the settings from cache' do
settings = described_class.current_application_settings # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues
# during the initialization phase of the test suite, so instead let's mock the internals of it
expect(settings).to be_a(ApplicationSetting) expect(ActiveRecord::Base.connection).not_to receive(:active?)
expect(settings).to be_persisted expect(ActiveRecord::Base.connection).not_to receive(:cached_table_exists?)
expect(settings).to have_attributes(settings_from_defaults) expect(ActiveRecord::Migrator).not_to receive(:needs_migration?)
expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
end
end end
context 'with migrations pending' do context 'and no settings in cache' do
before do before do
expect(ActiveRecord::Migrator).to receive(:needs_migration?).and_return(true) allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true)
allow(ActiveRecord::Base.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
end end
it 'returns an in-memory ApplicationSetting object' do it 'creates default ApplicationSettings if none are present' do
settings = described_class.current_application_settings settings = described_class.current_application_settings
expect(settings).to be_a(Gitlab::FakeApplicationSettings) expect(settings).to be_a(ApplicationSetting)
expect(settings.sign_in_enabled?).to eq(settings.sign_in_enabled) expect(settings).to be_persisted
expect(settings.sign_up_enabled?).to eq(settings.sign_up_enabled) expect(settings).to have_attributes(settings_from_defaults)
end end
it 'uses the existing database settings and falls back to defaults' do context 'with migrations pending' do
db_settings = create(:application_setting, before do
home_page_url: 'http://mydomain.com', expect(ActiveRecord::Migrator).to receive(:needs_migration?).and_return(true)
signup_enabled: false) end
settings = described_class.current_application_settings
app_defaults = ApplicationSetting.last it 'returns an in-memory ApplicationSetting object' do
settings = described_class.current_application_settings
expect(settings).to be_a(Gitlab::FakeApplicationSettings)
expect(settings.home_page_url).to eq(db_settings.home_page_url) expect(settings).to be_a(Gitlab::FakeApplicationSettings)
expect(settings.signup_enabled?).to be_falsey expect(settings.sign_in_enabled?).to eq(settings.sign_in_enabled)
expect(settings.signup_enabled).to be_falsey expect(settings.sign_up_enabled?).to eq(settings.sign_up_enabled)
end
# Check that unspecified values use the defaults
settings.reject! { |key, _| [:home_page_url, :signup_enabled].include? key } it 'uses the existing database settings and falls back to defaults' do
settings.each { |key, _| expect(settings[key]).to eq(app_defaults[key]) } db_settings = create(:application_setting,
home_page_url: 'http://mydomain.com',
signup_enabled: false)
settings = described_class.current_application_settings
app_defaults = ApplicationSetting.last
expect(settings).to be_a(Gitlab::FakeApplicationSettings)
expect(settings.home_page_url).to eq(db_settings.home_page_url)
expect(settings.signup_enabled?).to be_falsey
expect(settings.signup_enabled).to be_falsey
# Check that unspecified values use the defaults
settings.reject! { |key, _| [:home_page_url, :signup_enabled].include? key }
settings.each { |key, _| expect(settings[key]).to eq(app_defaults[key]) }
end
end end
end
context 'when ApplicationSettings.current is present' do context 'when ApplicationSettings.current is present' do
it 'returns the existing application settings' do it 'returns the existing application settings' do
expect(ApplicationSetting).to receive(:current).and_return(:current_settings) expect(ApplicationSetting).to receive(:current).and_return(:current_settings)
expect(described_class.current_application_settings).to eq(:current_settings) expect(described_class.current_application_settings).to eq(:current_settings)
end
end end
end
context 'when the application_settings table does not exists' do context 'when the application_settings table does not exists' do
it 'returns an in-memory ApplicationSetting object' do it 'returns an in-memory ApplicationSetting object' do
expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::StatementInvalid) expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::StatementInvalid)
expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings) expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
end
end end
end
context 'when the application_settings table is not fully migrated' do context 'when the application_settings table is not fully migrated' do
it 'returns an in-memory ApplicationSetting object' do it 'returns an in-memory ApplicationSetting object' do
expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::UnknownAttributeError) expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::UnknownAttributeError)
expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings) expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
end
end end
end end
end end
......
...@@ -7,9 +7,15 @@ RSpec.describe Gitlab::Favicon, :request_store do ...@@ -7,9 +7,15 @@ RSpec.describe Gitlab::Favicon, :request_store do
expect(described_class.main).to match_asset_path '/assets/favicon.png' expect(described_class.main).to match_asset_path '/assets/favicon.png'
end end
<<<<<<< HEAD
it 'has green favicon for development' do it 'has green favicon for development' do
allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development')) allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
expect(described_class.main).to match_asset_path 'favicon-green.png' expect(described_class.main).to match_asset_path 'favicon-green.png'
=======
it 'has blue favicon for development' do
allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
expect(described_class.main).to match_asset_path '/assets/favicon-blue.png'
>>>>>>> upstream/master
end end
it 'has yellow favicon for canary' do it 'has yellow favicon for canary' do
......
require 'spec_helper'
describe Gitlab::HashedStorage::Migrator do
describe '#bulk_schedule' do
it 'schedules job to StorageMigratorWorker' do
Sidekiq::Testing.fake! do
expect { subject.bulk_schedule(1, 5) }.to change(StorageMigratorWorker.jobs, :size).by(1)
end
end
end
describe '#bulk_migrate' do
let(:projects) { create_list(:project, 2, :legacy_storage) }
let(:ids) { projects.map(&:id) }
it 'enqueue jobs to ProjectMigrateHashedStorageWorker' do
Sidekiq::Testing.fake! do
expect { subject.bulk_migrate(ids.min, ids.max) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(2)
end
end
it 'sets projects as read only' do
allow(ProjectMigrateHashedStorageWorker).to receive(:perform_async).twice
subject.bulk_migrate(ids.min, ids.max)
projects.each do |project|
expect(project.reload.repository_read_only?).to be_truthy
end
end
it 'rescues and log exceptions' do
allow_any_instance_of(Project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
expect { subject.bulk_migrate(ids.min, ids.max) }.not_to raise_error
end
it 'delegates each project in specified range to #migrate' do
projects.each do |project|
expect(subject).to receive(:migrate).with(project)
end
subject.bulk_migrate(ids.min, ids.max)
end
end
describe '#migrate' do
let(:project) { create(:project, :legacy_storage, :empty_repo) }
it 'enqueues job to ProjectMigrateHashedStorageWorker' do
Sidekiq::Testing.fake! do
expect { subject.migrate(project) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(1)
end
end
it 'rescues and log exceptions' do
allow(project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
expect { subject.migrate(project) }.not_to raise_error
end
it 'sets project as read only' do
allow(ProjectMigrateHashedStorageWorker).to receive(:perform_async)
subject.migrate(project)
expect(project.reload.repository_read_only?).to be_truthy
end
it 'migrate project' do
Sidekiq::Testing.inline! do
subject.migrate(project)
end
expect(project.reload.hashed_storage?(:attachments)).to be_truthy
end
end
end
...@@ -6,7 +6,7 @@ describe Gitlab::Themes, lib: true do ...@@ -6,7 +6,7 @@ describe Gitlab::Themes, lib: true do
css = described_class.body_classes css = described_class.body_classes
expect(css).to include('ui-indigo') expect(css).to include('ui-indigo')
expect(css).to include('ui-dark ') expect(css).to include('ui-dark')
expect(css).to include('ui-blue') expect(css).to include('ui-blue')
end end
end end
...@@ -14,7 +14,7 @@ describe Gitlab::Themes, lib: true do ...@@ -14,7 +14,7 @@ describe Gitlab::Themes, lib: true do
describe '.by_id' do describe '.by_id' do
it 'returns a Theme by its ID' do it 'returns a Theme by its ID' do
expect(described_class.by_id(1).name).to eq 'Indigo' expect(described_class.by_id(1).name).to eq 'Indigo'
expect(described_class.by_id(10).name).to eq 'Light' expect(described_class.by_id(3).name).to eq 'Light'
end end
end end
......
...@@ -190,7 +190,10 @@ describe Gitlab::UsageData do ...@@ -190,7 +190,10 @@ describe Gitlab::UsageData do
expect(subject[:license_md5]).to eq(Digest::MD5.hexdigest(license.data)) expect(subject[:license_md5]).to eq(Digest::MD5.hexdigest(license.data))
expect(subject[:license_id]).to eq(license.license_id) expect(subject[:license_id]).to eq(license.license_id)
expect(subject[:version]).to eq(Gitlab::VERSION) expect(subject[:version]).to eq(Gitlab::VERSION)
<<<<<<< HEAD
expect(subject[:licensee]).to eq(license.licensee) expect(subject[:licensee]).to eq(license.licensee)
=======
>>>>>>> upstream/master
expect(subject[:installation_type]).to eq(Gitlab::INSTALLATION_TYPE) expect(subject[:installation_type]).to eq(Gitlab::INSTALLATION_TYPE)
expect(subject[:active_user_count]).to eq(User.active.count) expect(subject[:active_user_count]).to eq(User.active.count)
expect(subject[:licensee]).to eq(license.licensee) expect(subject[:licensee]).to eq(license.licensee)
......
...@@ -71,4 +71,97 @@ describe ProjectAutoDevops do ...@@ -71,4 +71,97 @@ describe ProjectAutoDevops do
{ key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true } { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
end end
end end
describe '#set_gitlab_deploy_token' do
let(:auto_devops) { build(:project_auto_devops, project: project) }
context 'when the project is public' do
let(:project) { create(:project, :repository, :public) }
it 'should not create a gitlab deploy token' do
expect do
auto_devops.save
end.not_to change { DeployToken.count }
end
end
context 'when the project is internal' do
let(:project) { create(:project, :repository, :internal) }
it 'should create a gitlab deploy token' do
expect do
auto_devops.save
end.to change { DeployToken.count }.by(1)
end
end
context 'when the project is private' do
let(:project) { create(:project, :repository, :private) }
it 'should create a gitlab deploy token' do
expect do
auto_devops.save
end.to change { DeployToken.count }.by(1)
end
end
context 'when autodevops is enabled at project level' do
let(:project) { create(:project, :repository, :internal) }
let(:auto_devops) { build(:project_auto_devops, project: project) }
it 'should create a deploy token' do
expect do
auto_devops.save
end.to change { DeployToken.count }.by(1)
end
end
context 'when autodevops is enabled at instancel level' do
let(:project) { create(:project, :repository, :internal) }
let(:auto_devops) { build(:project_auto_devops, :disabled, project: project) }
it 'should create a deploy token' do
allow(Gitlab::CurrentSettings).to receive(:auto_devops_enabled?).and_return(true)
expect do
auto_devops.save
end.to change { DeployToken.count }.by(1)
end
end
context 'when autodevops is disabled' do
let(:project) { create(:project, :repository, :internal) }
let(:auto_devops) { build(:project_auto_devops, :disabled, project: project) }
it 'should not create a deploy token' do
expect do
auto_devops.save
end.not_to change { DeployToken.count }
end
end
context 'when the project already has an active gitlab-deploy-token' do
let(:project) { create(:project, :repository, :internal) }
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project]) }
let(:auto_devops) { build(:project_auto_devops, project: project) }
it 'should not create a deploy token' do
expect do
auto_devops.save
end.not_to change { DeployToken.count }
end
end
context 'when the project already has a revoked gitlab-deploy-token' do
let(:project) { create(:project, :repository, :internal) }
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, :expired, projects: [project]) }
let(:auto_devops) { build(:project_auto_devops, project: project) }
it 'should not create a deploy token' do
expect do
auto_devops.save
end.not_to change { DeployToken.count }
end
end
end
end end
...@@ -821,6 +821,18 @@ describe API::Runner, :clean_gitlab_redis_shared_state do ...@@ -821,6 +821,18 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(job.reload.trace.raw).to eq 'BUILD TRACE' expect(job.reload.trace.raw).to eq 'BUILD TRACE'
end end
context 'when running state is sent' do
it 'updates update_at value' do
expect { update_job_after_time }.to change { job.reload.updated_at }
end
end
context 'when other state is sent' do
it "doesn't update update_at value" do
expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at }
end
end
end end
context 'when job has been erased' do context 'when job has been erased' do
...@@ -843,6 +855,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do ...@@ -843,6 +855,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
update_job(state: 'success', trace: 'BUILD TRACE UPDATED') update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
expect(response).to have_gitlab_http_status(403) expect(response).to have_gitlab_http_status(403)
expect(response.header['Job-Status']).to eq 'failed'
expect(job.trace.raw).to eq 'Job failed' expect(job.trace.raw).to eq 'Job failed'
expect(job).to be_failed expect(job).to be_failed
end end
...@@ -852,6 +865,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do ...@@ -852,6 +865,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
new_params = params.merge(token: token) new_params = params.merge(token: token)
put api("/jobs/#{job.id}"), new_params put api("/jobs/#{job.id}"), new_params
end end
def update_job_after_time(update_interval = 20.minutes, state = 'running')
Timecop.travel(job.updated_at + update_interval) do
update_job(job.token, state: state)
end
end
end end
describe 'PATCH /api/v4/jobs/:id/trace' do describe 'PATCH /api/v4/jobs/:id/trace' do
...@@ -984,6 +1003,17 @@ describe API::Runner, :clean_gitlab_redis_shared_state do ...@@ -984,6 +1003,17 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end end
end end
end end
context 'when the job is canceled' do
before do
job.cancel
patch_the_trace
end
it 'receives status in header' do
expect(response.header['Job-Status']).to eq 'canceled'
end
end
end end
context 'when Runner makes a force-patch' do context 'when Runner makes a force-patch' do
......
require 'spec_helper'
describe NotificationRecipientService do
let(:service) { described_class }
let(:assignee) { create(:user) }
let(:project) { create(:project, :public) }
let(:other_projects) { create_list(:project, 5, :public) }
describe '#build_new_note_recipients' do
let(:issue) { create(:issue, project: project, assignees: [assignee]) }
let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id) }
def create_watcher
watcher = create(:user)
create(:notification_setting, project: project, user: watcher, level: :watch)
other_projects.each do |other_project|
create(:notification_setting, project: other_project, user: watcher, level: :watch)
end
end
it 'avoids N+1 queries' do
create_watcher
service.build_new_note_recipients(note)
control_count = ActiveRecord::QueryRecorder.new do
service.build_new_note_recipients(note)
end
create_watcher
expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count)
end
end
end
require 'rake_helper' require 'rake_helper'
describe 'gitlab:storage:*' do describe 'rake gitlab:storage:*' do
before do before do
Rake.application.rake_require 'tasks/gitlab/storage' Rake.application.rake_require 'tasks/gitlab/storage'
...@@ -44,16 +44,18 @@ describe 'gitlab:storage:*' do ...@@ -44,16 +44,18 @@ describe 'gitlab:storage:*' do
end end
describe 'gitlab:storage:migrate_to_hashed' do describe 'gitlab:storage:migrate_to_hashed' do
let(:task) { 'gitlab:storage:migrate_to_hashed' }
context '0 legacy projects' do context '0 legacy projects' do
it 'does nothing' do it 'does nothing' do
expect(StorageMigratorWorker).not_to receive(:perform_async) expect(StorageMigratorWorker).not_to receive(:perform_async)
run_rake_task('gitlab:storage:migrate_to_hashed') run_rake_task(task)
end end
end end
context '3 legacy projects' do context '3 legacy projects' do
let(:projects) { create_list(:project, 3, storage_version: 0) } let(:projects) { create_list(:project, 3, :legacy_storage) }
context 'in batches of 1' do context 'in batches of 1' do
before do before do
...@@ -65,7 +67,7 @@ describe 'gitlab:storage:*' do ...@@ -65,7 +67,7 @@ describe 'gitlab:storage:*' do
expect(StorageMigratorWorker).to receive(:perform_async).with(project.id, project.id) expect(StorageMigratorWorker).to receive(:perform_async).with(project.id, project.id)
end end
run_rake_task('gitlab:storage:migrate_to_hashed') run_rake_task(task)
end end
end end
...@@ -80,23 +82,48 @@ describe 'gitlab:storage:*' do ...@@ -80,23 +82,48 @@ describe 'gitlab:storage:*' do
expect(StorageMigratorWorker).to receive(:perform_async).with(first, last) expect(StorageMigratorWorker).to receive(:perform_async).with(first, last)
end end
run_rake_task('gitlab:storage:migrate_to_hashed') run_rake_task(task)
end end
end end
end end
context 'with same id in range' do
it 'displays message when project cant be found' do
stub_env('ID_FROM', 99999)
stub_env('ID_TO', 99999)
expect { run_rake_task(task) }.to output(/There are no projects requiring storage migration with ID=99999/).to_stdout
end
it 'displays a message when project exists but its already migrated' do
project = create(:project)
stub_env('ID_FROM', project.id)
stub_env('ID_TO', project.id)
expect { run_rake_task(task) }.to output(/There are no projects requiring storage migration with ID=#{project.id}/).to_stdout
end
it 'enqueues migration when project can be found' do
project = create(:project, :legacy_storage)
stub_env('ID_FROM', project.id)
stub_env('ID_TO', project.id)
expect { run_rake_task(task) }.to output(/Enqueueing storage migration .* \(ID=#{project.id}\)/).to_stdout
end
end
end end
describe 'gitlab:storage:legacy_projects' do describe 'gitlab:storage:legacy_projects' do
it_behaves_like 'rake entities summary', 'projects', 'Legacy' do it_behaves_like 'rake entities summary', 'projects', 'Legacy' do
let(:task) { 'gitlab:storage:legacy_projects' } let(:task) { 'gitlab:storage:legacy_projects' }
let(:create_collection) { create_list(:project, 3, storage_version: 0) } let(:create_collection) { create_list(:project, 3, :legacy_storage) }
end end
end end
describe 'gitlab:storage:list_legacy_projects' do describe 'gitlab:storage:list_legacy_projects' do
it_behaves_like 'rake listing entities', 'projects', 'Legacy' do it_behaves_like 'rake listing entities', 'projects', 'Legacy' do
let(:task) { 'gitlab:storage:list_legacy_projects' } let(:task) { 'gitlab:storage:list_legacy_projects' }
let(:create_collection) { create_list(:project, 3, storage_version: 0) } let(:create_collection) { create_list(:project, 3, :legacy_storage) }
end end
end end
...@@ -133,7 +160,7 @@ describe 'gitlab:storage:*' do ...@@ -133,7 +160,7 @@ describe 'gitlab:storage:*' do
describe 'gitlab:storage:hashed_attachments' do describe 'gitlab:storage:hashed_attachments' do
it_behaves_like 'rake entities summary', 'attachments', 'Hashed' do it_behaves_like 'rake entities summary', 'attachments', 'Hashed' do
let(:task) { 'gitlab:storage:hashed_attachments' } let(:task) { 'gitlab:storage:hashed_attachments' }
let(:project) { create(:project, storage_version: 2) } let(:project) { create(:project) }
let(:create_collection) { create_list(:upload, 3, model: project) } let(:create_collection) { create_list(:upload, 3, model: project) }
end end
end end
...@@ -141,7 +168,7 @@ describe 'gitlab:storage:*' do ...@@ -141,7 +168,7 @@ describe 'gitlab:storage:*' do
describe 'gitlab:storage:list_hashed_attachments' do describe 'gitlab:storage:list_hashed_attachments' do
it_behaves_like 'rake listing entities', 'attachments', 'Hashed' do it_behaves_like 'rake listing entities', 'attachments', 'Hashed' do
let(:task) { 'gitlab:storage:list_hashed_attachments' } let(:task) { 'gitlab:storage:list_hashed_attachments' }
let(:project) { create(:project, storage_version: 2) } let(:project) { create(:project) }
let(:create_collection) { create_list(:upload, 3, model: project) } let(:create_collection) { create_list(:upload, 3, model: project) }
end end
end end
......
...@@ -2,29 +2,24 @@ require 'spec_helper' ...@@ -2,29 +2,24 @@ require 'spec_helper'
describe StorageMigratorWorker do describe StorageMigratorWorker do
subject(:worker) { described_class.new } subject(:worker) { described_class.new }
let(:projects) { create_list(:project, 2, :legacy_storage) } let(:projects) { create_list(:project, 2, :legacy_storage, :empty_repo) }
let(:ids) { projects.map(&:id) }
describe '#perform' do describe '#perform' do
let(:ids) { projects.map(&:id) } it 'delegates to MigratorService' do
expect_any_instance_of(Gitlab::HashedStorage::Migrator).to receive(:bulk_migrate).with(5, 10)
it 'enqueue jobs to ProjectMigrateHashedStorageWorker' do worker.perform(5, 10)
expect(ProjectMigrateHashedStorageWorker).to receive(:perform_async).twice
worker.perform(ids.min, ids.max)
end end
it 'sets projects as read only' do it 'migrates projects in the specified range' do
allow(ProjectMigrateHashedStorageWorker).to receive(:perform_async).twice Sidekiq::Testing.inline! do
worker.perform(ids.min, ids.max) worker.perform(ids.min, ids.max)
end
projects.each do |project| projects.each do |project|
expect(project.reload.repository_read_only?).to be_truthy expect(project.reload.hashed_storage?(:attachments)).to be_truthy
end end
end end
it 'rescues and log exceptions' do
allow_any_instance_of(Project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
expect { worker.perform(ids.min, ids.max) }.not_to raise_error
end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment