Commit 56c73eaf authored by Valery Sizov's avatar Valery Sizov

Merge branch 'ce-to-ee' into 'master'

CE Upstream - Tuesday

Closes #2890, omnibus-gitlab#2545, and gitlab-qa#56

See merge request !2402
parents d16c3015 89c2d7c9
...@@ -209,69 +209,69 @@ setup-test-env: ...@@ -209,69 +209,69 @@ setup-test-env:
- public/assets - public/assets
- tmp/tests - tmp/tests
rspec-pg 0 20: *rspec-knapsack-pg rspec-pg 0 25: *rspec-knapsack-pg
rspec-pg 1 20: *rspec-knapsack-pg rspec-pg 1 25: *rspec-knapsack-pg
rspec-pg 2 20: *rspec-knapsack-pg rspec-pg 2 25: *rspec-knapsack-pg
rspec-pg 3 20: *rspec-knapsack-pg rspec-pg 3 25: *rspec-knapsack-pg
rspec-pg 4 20: *rspec-knapsack-pg rspec-pg 4 25: *rspec-knapsack-pg
rspec-pg 5 20: *rspec-knapsack-pg rspec-pg 5 25: *rspec-knapsack-pg
rspec-pg 6 20: *rspec-knapsack-pg rspec-pg 6 25: *rspec-knapsack-pg
rspec-pg 7 20: *rspec-knapsack-pg rspec-pg 7 25: *rspec-knapsack-pg
rspec-pg 8 20: *rspec-knapsack-pg rspec-pg 8 25: *rspec-knapsack-pg
rspec-pg 9 20: *rspec-knapsack-pg rspec-pg 9 25: *rspec-knapsack-pg
rspec-pg 10 20: *rspec-knapsack-pg rspec-pg 10 25: *rspec-knapsack-pg
rspec-pg 11 20: *rspec-knapsack-pg rspec-pg 11 25: *rspec-knapsack-pg
rspec-pg 12 20: *rspec-knapsack-pg rspec-pg 12 25: *rspec-knapsack-pg
rspec-pg 13 20: *rspec-knapsack-pg rspec-pg 13 25: *rspec-knapsack-pg
rspec-pg 14 20: *rspec-knapsack-pg rspec-pg 14 25: *rspec-knapsack-pg
rspec-pg 15 20: *rspec-knapsack-pg rspec-pg 15 25: *rspec-knapsack-pg
rspec-pg 16 20: *rspec-knapsack-pg rspec-pg 16 25: *rspec-knapsack-pg
rspec-pg 17 20: *rspec-knapsack-pg rspec-pg 17 25: *rspec-knapsack-pg
rspec-pg 18 20: *rspec-knapsack-pg rspec-pg 18 25: *rspec-knapsack-pg
rspec-pg 19 20: *rspec-knapsack-pg rspec-pg 19 25: *rspec-knapsack-pg
rspec-pg 20 25: *rspec-knapsack-pg
rspec-mysql 0 20: *rspec-knapsack-mysql rspec-pg 21 25: *rspec-knapsack-pg
rspec-mysql 1 20: *rspec-knapsack-mysql rspec-pg 22 25: *rspec-knapsack-pg
rspec-mysql 2 20: *rspec-knapsack-mysql rspec-pg 23 25: *rspec-knapsack-pg
rspec-mysql 3 20: *rspec-knapsack-mysql rspec-pg 24 25: *rspec-knapsack-pg
rspec-mysql 4 20: *rspec-knapsack-mysql
rspec-mysql 5 20: *rspec-knapsack-mysql rspec-mysql 0 25: *rspec-knapsack-mysql
rspec-mysql 6 20: *rspec-knapsack-mysql rspec-mysql 1 25: *rspec-knapsack-mysql
rspec-mysql 7 20: *rspec-knapsack-mysql rspec-mysql 2 25: *rspec-knapsack-mysql
rspec-mysql 8 20: *rspec-knapsack-mysql rspec-mysql 3 25: *rspec-knapsack-mysql
rspec-mysql 9 20: *rspec-knapsack-mysql rspec-mysql 4 25: *rspec-knapsack-mysql
rspec-mysql 10 20: *rspec-knapsack-mysql rspec-mysql 5 25: *rspec-knapsack-mysql
rspec-mysql 11 20: *rspec-knapsack-mysql rspec-mysql 6 25: *rspec-knapsack-mysql
rspec-mysql 12 20: *rspec-knapsack-mysql rspec-mysql 7 25: *rspec-knapsack-mysql
rspec-mysql 13 20: *rspec-knapsack-mysql rspec-mysql 8 25: *rspec-knapsack-mysql
rspec-mysql 14 20: *rspec-knapsack-mysql rspec-mysql 9 25: *rspec-knapsack-mysql
rspec-mysql 15 20: *rspec-knapsack-mysql rspec-mysql 10 25: *rspec-knapsack-mysql
rspec-mysql 16 20: *rspec-knapsack-mysql rspec-mysql 11 25: *rspec-knapsack-mysql
rspec-mysql 17 20: *rspec-knapsack-mysql rspec-mysql 12 25: *rspec-knapsack-mysql
rspec-mysql 18 20: *rspec-knapsack-mysql rspec-mysql 13 25: *rspec-knapsack-mysql
rspec-mysql 19 20: *rspec-knapsack-mysql rspec-mysql 14 25: *rspec-knapsack-mysql
rspec-mysql 15 25: *rspec-knapsack-mysql
spinach-pg 0 10: *spinach-knapsack-pg rspec-mysql 16 25: *rspec-knapsack-mysql
spinach-pg 1 10: *spinach-knapsack-pg rspec-mysql 17 25: *rspec-knapsack-mysql
spinach-pg 2 10: *spinach-knapsack-pg rspec-mysql 18 25: *rspec-knapsack-mysql
spinach-pg 3 10: *spinach-knapsack-pg rspec-mysql 19 25: *rspec-knapsack-mysql
spinach-pg 4 10: *spinach-knapsack-pg rspec-mysql 20 25: *rspec-knapsack-mysql
spinach-pg 5 10: *spinach-knapsack-pg rspec-mysql 21 25: *rspec-knapsack-mysql
spinach-pg 6 10: *spinach-knapsack-pg rspec-mysql 22 25: *rspec-knapsack-mysql
spinach-pg 7 10: *spinach-knapsack-pg rspec-mysql 23 25: *rspec-knapsack-mysql
spinach-pg 8 10: *spinach-knapsack-pg rspec-mysql 24 25: *rspec-knapsack-mysql
spinach-pg 9 10: *spinach-knapsack-pg
spinach-pg 0 5: *spinach-knapsack-pg
spinach-mysql 0 10: *spinach-knapsack-mysql spinach-pg 1 5: *spinach-knapsack-pg
spinach-mysql 1 10: *spinach-knapsack-mysql spinach-pg 2 5: *spinach-knapsack-pg
spinach-mysql 2 10: *spinach-knapsack-mysql spinach-pg 3 5: *spinach-knapsack-pg
spinach-mysql 3 10: *spinach-knapsack-mysql spinach-pg 4 5: *spinach-knapsack-pg
spinach-mysql 4 10: *spinach-knapsack-mysql
spinach-mysql 5 10: *spinach-knapsack-mysql spinach-mysql 0 5: *spinach-knapsack-mysql
spinach-mysql 6 10: *spinach-knapsack-mysql spinach-mysql 1 5: *spinach-knapsack-mysql
spinach-mysql 7 10: *spinach-knapsack-mysql spinach-mysql 2 5: *spinach-knapsack-mysql
spinach-mysql 8 10: *spinach-knapsack-mysql spinach-mysql 3 5: *spinach-knapsack-mysql
spinach-mysql 9 10: *spinach-knapsack-mysql spinach-mysql 4 5: *spinach-knapsack-mysql
# Static analysis jobs # Static analysis jobs
.ruby-static-analysis: &ruby-static-analysis .ruby-static-analysis: &ruby-static-analysis
......
...@@ -2,7 +2,6 @@ source 'https://rubygems.org' ...@@ -2,7 +2,6 @@ source 'https://rubygems.org'
gem 'rails', '4.2.8' gem 'rails', '4.2.8'
gem 'rails-deprecated_sanitizer', '~> 1.0.3' gem 'rails-deprecated_sanitizer', '~> 1.0.3'
gem 'bootsnap', '~> 1.1'
# Responders respond_to and respond_with # Responders respond_to and respond_with
gem 'responders', '~> 2.0' gem 'responders', '~> 2.0'
......
...@@ -91,8 +91,6 @@ GEM ...@@ -91,8 +91,6 @@ GEM
bindata (2.3.5) bindata (2.3.5)
binding_of_caller (0.7.2) binding_of_caller (0.7.2)
debug_inspector (>= 0.0.1) debug_inspector (>= 0.0.1)
bootsnap (1.1.1)
msgpack (~> 1.0)
bootstrap-sass (3.3.6) bootstrap-sass (3.3.6)
autoprefixer-rails (>= 5.2.1) autoprefixer-rails (>= 5.2.1)
sass (>= 3.3.4) sass (>= 3.3.4)
...@@ -493,7 +491,6 @@ GEM ...@@ -493,7 +491,6 @@ GEM
minitest (5.7.0) minitest (5.7.0)
mmap2 (2.2.7) mmap2 (2.2.7)
mousetrap-rails (1.4.6) mousetrap-rails (1.4.6)
msgpack (1.1.0)
multi_json (1.12.1) multi_json (1.12.1)
multi_xml (0.6.0) multi_xml (0.6.0)
multipart-post (2.0.0) multipart-post (2.0.0)
...@@ -960,7 +957,6 @@ DEPENDENCIES ...@@ -960,7 +957,6 @@ DEPENDENCIES
benchmark-ips (~> 2.3.0) benchmark-ips (~> 2.3.0)
better_errors (~> 2.1.0) better_errors (~> 2.1.0)
binding_of_caller (~> 0.7.2) binding_of_caller (~> 0.7.2)
bootsnap (~> 1.1)
bootstrap-sass (~> 3.3.0) bootstrap-sass (~> 3.3.0)
bootstrap_form (~> 2.7.0) bootstrap_form (~> 2.7.0)
brakeman (~> 3.6.0) brakeman (~> 3.6.0)
......
...@@ -46,6 +46,8 @@ export default { ...@@ -46,6 +46,8 @@ export default {
}, },
methods: { methods: {
changePage(e) { changePage(e) {
if (e.target.parentElement.classList.contains('disabled')) return;
const text = e.target.innerText; const text = e.target.innerText;
const { totalPages, nextPage, previousPage } = this.pageInfo; const { totalPages, nextPage, previousPage } = this.pageInfo;
...@@ -82,7 +84,9 @@ export default { ...@@ -82,7 +84,9 @@ export default {
const page = this.pageInfo.page; const page = this.pageInfo.page;
const items = []; const items = [];
if (page > 1) items.push({ title: FIRST }); if (page > 1) {
items.push({ title: FIRST, first: true });
}
if (page > 1) { if (page > 1) {
items.push({ title: PREV, prev: true }); items.push({ title: PREV, prev: true });
...@@ -110,7 +114,9 @@ export default { ...@@ -110,7 +114,9 @@ export default {
items.push({ title: NEXT, next: true }); items.push({ title: NEXT, next: true });
} }
if (total - page >= 1) items.push({ title: LAST, last: true }); if (total - page >= 1) {
items.push({ title: LAST, last: true });
}
return items; return items;
}, },
...@@ -124,13 +130,15 @@ export default { ...@@ -124,13 +130,15 @@ export default {
v-for="item in getItems" v-for="item in getItems"
:class="{ :class="{
page: item.page, page: item.page,
prev: item.prev, 'js-previous-button': item.prev,
next: item.next, 'js-next-button': item.next,
'js-last-button': item.last,
'js-first-button': item.first,
separator: item.separator, separator: item.separator,
active: item.active, active: item.active,
disabled: item.disabled disabled: item.disabled
}"> }">
<a @click="changePage($event)">{{item.title}}</a> <a @click.prevent="changePage($event)">{{item.title}}</a>
</li> </li>
</ul> </ul>
</div> </div>
......
...@@ -125,7 +125,7 @@ ...@@ -125,7 +125,7 @@
.dropdown-menu { .dropdown-menu {
margin-top: 11px; margin-top: 11px;
z-index: 200; z-index: 300;
} }
.ci-action-icon-wrapper { .ci-action-icon-wrapper {
......
module IssuableCollections module IssuableCollections
extend ActiveSupport::Concern extend ActiveSupport::Concern
include SortingHelper include SortingHelper
include Gitlab::IssuableMetadata
included do included do
helper_method :issues_finder helper_method :issues_finder
...@@ -9,39 +10,6 @@ module IssuableCollections ...@@ -9,39 +10,6 @@ module IssuableCollections
private private
def issuable_meta_data(issuable_collection, collection_type)
# map has to be used here since using pluck or select will
# throw an error when ordering issuables by priority which inserts
# a new order into the collection.
# We cannot use reorder to not mess up the paginated collection.
issuable_ids = issuable_collection.map(&:id)
return {} if issuable_ids.empty?
issuable_note_count = Note.count_for_collection(issuable_ids, @collection_type)
issuable_votes_count = AwardEmoji.votes_for_collection(issuable_ids, @collection_type)
issuable_merge_requests_count =
if collection_type == 'Issue'
MergeRequestsClosingIssues.count_for_collection(issuable_ids)
else
[]
end
issuable_ids.each_with_object({}) do |id, issuable_meta|
downvotes = issuable_votes_count.find { |votes| votes.awardable_id == id && votes.downvote? }
upvotes = issuable_votes_count.find { |votes| votes.awardable_id == id && votes.upvote? }
notes = issuable_note_count.find { |notes| notes.noteable_id == id }
merge_requests = issuable_merge_requests_count.find { |mr| mr.first == id }
issuable_meta[id] = Issuable::IssuableMeta.new(
upvotes.try(:count).to_i,
downvotes.try(:count).to_i,
notes.try(:count).to_i,
merge_requests.try(:last).to_i
)
end
end
def issues_collection def issues_collection
issues_finder.execute.preload(:project, :author, :assignees, :labels, :milestone, project: :namespace) issues_finder.execute.preload(:project, :author, :assignees, :labels, :milestone, project: :namespace)
end end
......
module RequiresHealthToken module RequiresWhitelistedMonitoringClient
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
before_action :validate_health_check_access! before_action :validate_ip_whitelisted_or_valid_token!
end end
private private
def validate_health_check_access! def validate_ip_whitelisted_or_valid_token!
render_404 unless token_valid? render_404 unless client_ip_whitelisted? || valid_token?
end end
def token_valid? def client_ip_whitelisted?
ip_whitelist.any? { |e| e.include?(Gitlab::RequestContext.client_ip) }
end
def ip_whitelist
@ip_whitelist ||= Settings.monitoring.ip_whitelist.map(&IPAddr.method(:new))
end
def valid_token?
token = params[:token].presence || request.headers['TOKEN'] token = params[:token].presence || request.headers['TOKEN']
token.present? && token.present? &&
ActiveSupport::SecurityUtils.variable_size_secure_compare( ActiveSupport::SecurityUtils.variable_size_secure_compare(
......
class HealthCheckController < HealthCheck::HealthCheckController class HealthCheckController < HealthCheck::HealthCheckController
include RequiresHealthToken include RequiresWhitelistedMonitoringClient
end end
class HealthController < ActionController::Base class HealthController < ActionController::Base
protect_from_forgery with: :exception protect_from_forgery with: :exception
include RequiresHealthToken include RequiresWhitelistedMonitoringClient
CHECKS = [ CHECKS = [
Gitlab::HealthChecks::DbCheck, Gitlab::HealthChecks::DbCheck,
......
class MetricsController < ActionController::Base class MetricsController < ActionController::Base
include RequiresHealthToken include RequiresWhitelistedMonitoringClient
protect_from_forgery with: :exception protect_from_forgery with: :exception
before_action :validate_prometheus_metrics before_action :validate_prometheus_metrics
def index def index
render text: metrics_service.metrics_text, content_type: 'text/plain; verssion=0.0.4' render text: metrics_service.metrics_text, content_type: 'text/plain; version=0.0.4'
end end
private private
......
...@@ -197,6 +197,9 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -197,6 +197,9 @@ class ApplicationSetting < ActiveRecord::Base
Rails.cache.fetch(CACHE_KEY) do Rails.cache.fetch(CACHE_KEY) do
ApplicationSetting.last ApplicationSetting.last
end end
rescue
# Fall back to an uncached value if there are any problems (e.g. redis down)
ApplicationSetting.last
end end
def self.expire def self.expire
......
...@@ -4,7 +4,7 @@ module Ci ...@@ -4,7 +4,7 @@ module Ci
prepend EE::Ci::Runner prepend EE::Ci::Runner
RUNNER_QUEUE_EXPIRY_TIME = 60.minutes RUNNER_QUEUE_EXPIRY_TIME = 60.minutes
LAST_CONTACT_TIME = 1.hour.ago ONLINE_CONTACT_TIMEOUT = 1.hour
AVAILABLE_SCOPES = %w[specific shared active paused online].freeze AVAILABLE_SCOPES = %w[specific shared active paused online].freeze
FORM_EDITABLE = %i[description tag_list active run_untagged locked].freeze FORM_EDITABLE = %i[description tag_list active run_untagged locked].freeze
...@@ -20,7 +20,7 @@ module Ci ...@@ -20,7 +20,7 @@ module Ci
scope :shared, ->() { where(is_shared: true) } scope :shared, ->() { where(is_shared: true) }
scope :active, ->() { where(active: true) } scope :active, ->() { where(active: true) }
scope :paused, ->() { where(active: false) } scope :paused, ->() { where(active: false) }
scope :online, ->() { where('contacted_at > ?', LAST_CONTACT_TIME) } scope :online, ->() { where('contacted_at > ?', contact_time_deadline) }
scope :ordered, ->() { order(id: :desc) } scope :ordered, ->() { order(id: :desc) }
scope :owned_or_shared, ->(project_id) do scope :owned_or_shared, ->(project_id) do
...@@ -60,6 +60,10 @@ module Ci ...@@ -60,6 +60,10 @@ module Ci
where(t[:token].matches(pattern).or(t[:description].matches(pattern))) where(t[:token].matches(pattern).or(t[:description].matches(pattern)))
end end
def self.contact_time_deadline
ONLINE_CONTACT_TIMEOUT.ago
end
def set_default_values def set_default_values
self.token = SecureRandom.hex(15) if self.token.blank? self.token = SecureRandom.hex(15) if self.token.blank?
end end
...@@ -81,7 +85,7 @@ module Ci ...@@ -81,7 +85,7 @@ module Ci
end end
def online? def online?
contacted_at && contacted_at > LAST_CONTACT_TIME contacted_at && contacted_at > self.class.contact_time_deadline
end end
def status def status
......
---
title: Deprecate Healthcheck Access Token in favor of IP whitelist
merge_request:
author:
---
title: Prevent bad data being added to application settings when Redis is unavailable
merge_request: 12750
author:
---
title: Prevent disabled pagination button to be clicked
merge_request:
author:
---
title: Fix offline runner detection
merge_request: 11751
author: Alessio Caiazza
---
title: Remove remaining N+1 queries in merge requests API with emojis and labels
merge_request:
author:
---
title: Bump bootsnap to 1.1.1
merge_request: 12425
author: @blackst0ne
...@@ -19,7 +19,7 @@ an ERB file and then loads the resulting YML as its configuration. ...@@ -19,7 +19,7 @@ an ERB file and then loads the resulting YML as its configuration.
This file is called `resque.yml` for historical reasons. We are **NOT** This file is called `resque.yml` for historical reasons. We are **NOT**
using Resque at the moment. It is used to specify Redis configuration using Resque at the moment. It is used to specify Redis configuration
values instead when a single database instance of Redis is desired. values when a single database instance of Redis is desired.
# Advanced Redis configuration files # Advanced Redis configuration files
......
...@@ -5,12 +5,6 @@ ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) ...@@ -5,12 +5,6 @@ ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)
require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE']) require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])
begin
require 'bootsnap/setup'
rescue SystemCallError => exception
$stderr.puts "WARNING: Bootsnap failed to setup: #{exception.message}"
end
# set default directory for multiproces metrics gathering # set default directory for multiproces metrics gathering
if ENV['RAILS_ENV'] == 'development' || ENV['RAILS_ENV'] == 'test' if ENV['RAILS_ENV'] == 'development' || ENV['RAILS_ENV'] == 'test'
ENV['prometheus_multiproc_dir'] ||= 'tmp/prometheus_multiproc_dir' ENV['prometheus_multiproc_dir'] ||= 'tmp/prometheus_multiproc_dir'
......
...@@ -634,10 +634,15 @@ production: &base ...@@ -634,10 +634,15 @@ production: &base
# enabled: true # enabled: true
# host: localhost # host: localhost
# port: 3808 # port: 3808
prometheus:
## Monitoring
# Built in monitoring settings
monitoring:
# Time between sampling of unicorn socket metrics, in seconds # Time between sampling of unicorn socket metrics, in seconds
# unicorn_sampler_interval: 10 # unicorn_sampler_interval: 10
# IP whitelist to access monitoring endpoints
ip_whitelist:
- 127.0.0.0/8
## GitLab Geo settings (EE-only) ## GitLab Geo settings (EE-only)
geo_primary_role: geo_primary_role:
......
...@@ -590,10 +590,11 @@ Settings.webpack.dev_server['host'] ||= 'localhost' ...@@ -590,10 +590,11 @@ Settings.webpack.dev_server['host'] ||= 'localhost'
Settings.webpack.dev_server['port'] ||= 3808 Settings.webpack.dev_server['port'] ||= 3808
# #
# Prometheus metrics settings # Monitoring settings
# #
Settings['prometheus'] ||= Settingslogic.new({}) Settings['monitoring'] ||= Settingslogic.new({})
Settings.prometheus['unicorn_sampler_interval'] ||= 10 Settings.monitoring['ip_whitelist'] ||= ['127.0.0.1/8']
Settings.monitoring['unicorn_sampler_interval'] ||= 10
# #
# Testing settings # Testing settings
......
...@@ -141,7 +141,7 @@ def instrument_classes(instrumentation) ...@@ -141,7 +141,7 @@ def instrument_classes(instrumentation)
end end
# rubocop:enable Metrics/AbcSize # rubocop:enable Metrics/AbcSize
Gitlab::Metrics::UnicornSampler.initialize_instance(Settings.prometheus.unicorn_sampler_interval).start Gitlab::Metrics::UnicornSampler.initialize_instance(Settings.monitoring.unicorn_sampler_interval).start
Gitlab::Application.configure do |config| Gitlab::Application.configure do |config|
# 0 should be Sentry to catch errors in this middleware # 0 should be Sentry to catch errors in this middleware
......
...@@ -3,4 +3,6 @@ require 'flipper/middleware/memoizer' ...@@ -3,4 +3,6 @@ require 'flipper/middleware/memoizer'
unless Rails.env.test? unless Rails.env.test?
Rails.application.config.middleware.use Flipper::Middleware::Memoizer, Rails.application.config.middleware.use Flipper::Middleware::Memoizer,
lambda { Feature.flipper } lambda { Feature.flipper }
Feature.register_feature_groups
end end
...@@ -78,5 +78,5 @@ begin ...@@ -78,5 +78,5 @@ begin
end end
end end
end end
rescue ::Redis::BaseError, SocketError, Errno::ENOENT, Errno::EAFNOSUPPORT, Errno::ECONNRESET, Errno::ECONNREFUSED rescue Redis::BaseError, SocketError, Errno::ENOENT, Errno::EADDRNOTAVAIL, Errno::EAFNOSUPPORT, Errno::ECONNRESET, Errno::ECONNREFUSED
end end
...@@ -420,6 +420,12 @@ GitLab Shell is an SSH access and repository management software developed speci ...@@ -420,6 +420,12 @@ GitLab Shell is an SSH access and repository management software developed speci
**Note:** Make sure your hostname can be resolved on the machine itself by either a proper DNS record or an additional line in /etc/hosts ("127.0.0.1 hostname"). This might be necessary for example if you set up GitLab behind a reverse proxy. If the hostname cannot be resolved, the final installation check will fail with "Check GitLab API access: FAILED. code: 401" and pushing commits will be rejected with "[remote rejected] master -> master (hook declined)". **Note:** Make sure your hostname can be resolved on the machine itself by either a proper DNS record or an additional line in /etc/hosts ("127.0.0.1 hostname"). This might be necessary for example if you set up GitLab behind a reverse proxy. If the hostname cannot be resolved, the final installation check will fail with "Check GitLab API access: FAILED. code: 401" and pushing commits will be rejected with "[remote rejected] master -> master (hook declined)".
**Note:** GitLab Shell application startup time can be greatly reduced by disabling RubyGems. This can be done in several manners:
* Export `RUBYOPT=--disable-gems` environment variable for the processes
* Compile Ruby with `configure --disable-rubygems` to disable RubyGems by default. Not recommened for system-wide Ruby.
* Omnibus GitLab [replaces the *shebang* line of the `gitlab-shell/bin/*` scripts](https://gitlab.com/gitlab-org/omnibus-gitlab/merge_requests/1707)
### Install gitlab-workhorse ### Install gitlab-workhorse
GitLab-Workhorse uses [GNU Make](https://www.gnu.org/software/make/). The GitLab-Workhorse uses [GNU Make](https://www.gnu.org/software/make/). The
......
...@@ -5,6 +5,8 @@ ...@@ -5,6 +5,8 @@
- The `health_check` endpoint was [introduced][ce-3888] in GitLab 8.8 and will - The `health_check` endpoint was [introduced][ce-3888] in GitLab 8.8 and will
be deprecated in GitLab 9.1. Read more in the [old behavior](#old-behavior) be deprecated in GitLab 9.1. Read more in the [old behavior](#old-behavior)
section. section.
- [Access token](#access-token) has been deprecated in GitLab 9.4
in favor of [IP Whitelist](#ip-whitelist)
GitLab provides liveness and readiness probes to indicate service health and GitLab provides liveness and readiness probes to indicate service health and
reachability to required services. These probes report on the status of the reachability to required services. These probes report on the status of the
...@@ -12,7 +14,19 @@ database connection, Redis connection, and access to the filesystem. These ...@@ -12,7 +14,19 @@ database connection, Redis connection, and access to the filesystem. These
endpoints [can be provided to schedulers like Kubernetes][kubernetes] to hold endpoints [can be provided to schedulers like Kubernetes][kubernetes] to hold
traffic until the system is ready or restart the container as needed. traffic until the system is ready or restart the container as needed.
## Access Token ## IP Whitelist
To access monitoring resources the client IP needs to be included in the whitelist.
To add or remove hosts or IP ranges from the list you can edit `gitlab.rb` or `gitlab.yml`.
Example whitelist configuration:
```yaml
monitoring:
ip_whitelist:
- 127.0.0.0/8 # by default only local IPs are allowed to access monitoring resources
```
## Access Token (Deprecated)
An access token needs to be provided while accessing the probe endpoints. The current An access token needs to be provided while accessing the probe endpoints. The current
accepted token can be found under the **Admin area ➔ Monitoring ➔ Health check** accepted token can be found under the **Admin area ➔ Monitoring ➔ Health check**
...@@ -47,10 +61,10 @@ which will then provide a report of system health in JSON format: ...@@ -47,10 +61,10 @@ which will then provide a report of system health in JSON format:
## Using the Endpoint ## Using the Endpoint
Once you have the access token, the probes can be accessed: With default whitelist settings, the probes can be accessed from localhost:
- `https://gitlab.example.com/-/readiness?token=ACCESS_TOKEN` - `http://localhost/-/readiness`
- `https://gitlab.example.com/-/liveness?token=ACCESS_TOKEN` - `http://localhost/-/liveness`
## Status ## Status
...@@ -71,8 +85,8 @@ the database connection, the state of the database migrations, and the ability t ...@@ -71,8 +85,8 @@ the database connection, the state of the database migrations, and the ability t
and access the cache. This endpoint can be provided to uptime monitoring services like and access the cache. This endpoint can be provided to uptime monitoring services like
[Pingdom][pingdom], [Nagios][nagios-health], and [NewRelic][newrelic-health]. [Pingdom][pingdom], [Nagios][nagios-health], and [NewRelic][newrelic-health].
Once you have the [access token](#access-token), health information can be Once you have the [access token](#access-token) or your client IP is [whitelisted](#ip-whitelist),
retrieved as plain text, JSON, or XML using the `health_check` endpoint: health information can be retrieved as plain text, JSON, or XML using the `health_check` endpoint:
- `https://gitlab.example.com/health_check?token=ACCESS_TOKEN` - `https://gitlab.example.com/health_check?token=ACCESS_TOKEN`
- `https://gitlab.example.com/health_check.json?token=ACCESS_TOKEN` - `https://gitlab.example.com/health_check.json?token=ACCESS_TOKEN`
......
...@@ -355,10 +355,26 @@ module API ...@@ -355,10 +355,26 @@ module API
class MergeRequestBasic < ProjectEntity class MergeRequestBasic < ProjectEntity
expose :target_branch, :source_branch expose :target_branch, :source_branch
expose :upvotes, :downvotes expose :upvotes do |merge_request, options|
if options[:issuable_metadata]
options[:issuable_metadata][merge_request.id].upvotes
else
merge_request.upvotes
end
end
expose :downvotes do |merge_request, options|
if options[:issuable_metadata]
options[:issuable_metadata][merge_request.id].downvotes
else
merge_request.downvotes
end
end
expose :author, :assignee, using: Entities::UserBasic expose :author, :assignee, using: Entities::UserBasic
expose :source_project_id, :target_project_id expose :source_project_id, :target_project_id
expose :label_names, as: :labels expose :labels do |merge_request, options|
# Avoids an N+1 query since labels are preloaded
merge_request.labels.map(&:title).sort
end
expose :work_in_progress?, as: :work_in_progress expose :work_in_progress?, as: :work_in_progress
expose :milestone, using: Entities::Milestone expose :milestone, using: Entities::Milestone
expose :merge_when_pipeline_succeeds expose :merge_when_pipeline_succeeds
......
...@@ -10,6 +10,8 @@ module API ...@@ -10,6 +10,8 @@ module API
resource :projects, requirements: { id: %r{[^/]+} } do resource :projects, requirements: { id: %r{[^/]+} } do
include TimeTrackingEndpoints include TimeTrackingEndpoints
helpers ::Gitlab::IssuableMetadata
helpers do helpers do
def handle_merge_request_errors!(errors) def handle_merge_request_errors!(errors)
if errors[:project_access].any? if errors[:project_access].any?
...@@ -48,10 +50,9 @@ module API ...@@ -48,10 +50,9 @@ module API
args[:label_name] = args.delete(:labels) args[:label_name] = args.delete(:labels)
merge_requests = MergeRequestsFinder.new(current_user, args).execute merge_requests = MergeRequestsFinder.new(current_user, args).execute
.inc_notes_with_associations merge_requests = merge_requests.reorder(args[:order_by] => args[:sort])
.preload(:target_project, :author, :assignee, :milestone, :merge_request_diff) paginate(merge_requests)
.preload(:notes, :target_project, :author, :assignee, :milestone, :merge_request_diff, :labels)
merge_requests.reorder(args[:order_by] => args[:sort])
end end
params :optional_params_ce do params :optional_params_ce do
...@@ -94,8 +95,9 @@ module API ...@@ -94,8 +95,9 @@ module API
authorize! :read_merge_request, user_project authorize! :read_merge_request, user_project
merge_requests = find_merge_requests(project_id: user_project.id) merge_requests = find_merge_requests(project_id: user_project.id)
issuable_metadata = issuable_meta_data(merge_requests, 'MergeRequest')
present paginate(merge_requests), with: Entities::MergeRequestBasic, current_user: current_user, project: user_project present merge_requests, with: Entities::MergeRequestBasic, current_user: current_user, project: user_project, issuable_metadata: issuable_metadata
end end
desc 'Create a merge request' do desc 'Create a merge request' do
......
...@@ -57,5 +57,11 @@ class Feature ...@@ -57,5 +57,11 @@ class Feature
Flipper.new(adapter) Flipper.new(adapter)
end end
end end
# This method is called from config/initializers/flipper.rb and can be used
# to register Flipper groups.
# See https://docs.gitlab.com/ee/development/feature_flags.html#feature-groups
def register_feature_groups
end
end end
end end
...@@ -25,7 +25,7 @@ module Gitlab ...@@ -25,7 +25,7 @@ module Gitlab
def cached_application_settings def cached_application_settings
begin begin
::ApplicationSetting.cached ::ApplicationSetting.cached
rescue ::Redis::BaseError, ::Errno::ENOENT rescue ::Redis::BaseError, ::Errno::ENOENT, ::Errno::EADDRNOTAVAIL
# In case Redis isn't running or the Redis UNIX socket file is not available # In case Redis isn't running or the Redis UNIX socket file is not available
end end
end end
...@@ -33,12 +33,7 @@ module Gitlab ...@@ -33,12 +33,7 @@ module Gitlab
def uncached_application_settings def uncached_application_settings
return fake_application_settings unless connect_to_db? return fake_application_settings unless connect_to_db?
# This loads from the database into the cache, so handle Redis errors db_settings = ::ApplicationSetting.current
begin
db_settings = ::ApplicationSetting.current
rescue ::Redis::BaseError, ::Errno::ENOENT
# In case Redis isn't running or the Redis UNIX socket file is not available
end
# If there are pending migrations, it's possible there are columns that # If there are pending migrations, it's possible there are columns that
# need to be added to the application settings. To prevent Rake tasks # need to be added to the application settings. To prevent Rake tasks
......
...@@ -337,7 +337,7 @@ module Gitlab ...@@ -337,7 +337,7 @@ module Gitlab
# In the EE repo # In the EE repo
$ git push origin #{ee_branch_prefix} $ git push origin #{ee_branch_prefix}
⚠️ Also, don't forget to create a new merge request on gitlab-ce and ⚠️ Also, don't forget to create a new merge request on gitlab-ee and
cross-link it with the CE merge request. cross-link it with the CE merge request.
Once this is done, you can retry this failed build, and it should pass. Once this is done, you can retry this failed build, and it should pass.
......
module Gitlab
module IssuableMetadata
def issuable_meta_data(issuable_collection, collection_type)
# map has to be used here since using pluck or select will
# throw an error when ordering issuables by priority which inserts
# a new order into the collection.
# We cannot use reorder to not mess up the paginated collection.
issuable_ids = issuable_collection.map(&:id)
return {} if issuable_ids.empty?
issuable_note_count = ::Note.count_for_collection(issuable_ids, collection_type)
issuable_votes_count = ::AwardEmoji.votes_for_collection(issuable_ids, collection_type)
issuable_merge_requests_count =
if collection_type == 'Issue'
::MergeRequestsClosingIssues.count_for_collection(issuable_ids)
else
[]
end
issuable_ids.each_with_object({}) do |id, issuable_meta|
downvotes = issuable_votes_count.find { |votes| votes.awardable_id == id && votes.downvote? }
upvotes = issuable_votes_count.find { |votes| votes.awardable_id == id && votes.upvote? }
notes = issuable_note_count.find { |notes| notes.noteable_id == id }
merge_requests = issuable_merge_requests_count.find { |mr| mr.first == id }
issuable_meta[id] = ::Issuable::IssuableMeta.new(
upvotes.try(:count).to_i,
downvotes.try(:count).to_i,
notes.try(:count).to_i,
merge_requests.try(:last).to_i
)
end
end
end
end
...@@ -20,13 +20,8 @@ module Gitlab ...@@ -20,13 +20,8 @@ module Gitlab
def token def token
Gitlab::Redis::SharedState.with do |redis| Gitlab::Redis::SharedState.with do |redis|
token = redis.get(redis_shared_state_key) token = redis.get(redis_shared_state_key)
token ||= Devise.friendly_token(TOKEN_LENGTH)
if token redis.set(redis_shared_state_key, token, ex: EXPIRY_TIME)
redis.expire(redis_shared_state_key, EXPIRY_TIME)
else
token = Devise.friendly_token(TOKEN_LENGTH)
redis.set(redis_shared_state_key, token, ex: EXPIRY_TIME)
end
token token
end end
......
...@@ -49,7 +49,6 @@ module QA ...@@ -49,7 +49,6 @@ module QA
autoload :Entry, 'qa/page/main/entry' autoload :Entry, 'qa/page/main/entry'
autoload :Menu, 'qa/page/main/menu' autoload :Menu, 'qa/page/main/menu'
autoload :Groups, 'qa/page/main/groups' autoload :Groups, 'qa/page/main/groups'
autoload :Projects, 'qa/page/main/projects'
end end
module Project module Project
......
...@@ -14,6 +14,13 @@ module QA ...@@ -14,6 +14,13 @@ module QA
within_user_menu { click_link 'Admin area' } within_user_menu { click_link 'Admin area' }
end end
def go_to_new_project
within_user_menu do
find('.header-new-dropdown-toggle').click
click_link('New project')
end
end
def sign_out def sign_out
within_user_menu do within_user_menu do
find('.header-user-dropdown-toggle').click find('.header-user-dropdown-toggle').click
......
module QA
module Page
module Main
class Projects < Page::Base
def go_to_new_project
##
# There are 'New Project' and 'New project' buttons on the projects
# page, so we can't use `click_on`.
#
button = find('a', text: /^new project$/i)
button.click
end
end
end
end
end
...@@ -14,8 +14,7 @@ module QA ...@@ -14,8 +14,7 @@ module QA
def perform def perform
Page::Main::Menu.act { go_to_groups } Page::Main::Menu.act { go_to_groups }
Page::Main::Groups.act { prepare_test_namespace } Page::Main::Groups.act { prepare_test_namespace }
Page::Main::Menu.act { go_to_projects } Page::Main::Menu.act { go_to_new_project }
Page::Main::Projects.act { go_to_new_project }
Page::Project::New.perform do |page| Page::Project::New.perform do |page|
page.choose_test_namespace page.choose_test_namespace
......
...@@ -25,27 +25,15 @@ module QA ...@@ -25,27 +25,15 @@ module QA
def configure_rspec! def configure_rspec!
RSpec.configure do |config| RSpec.configure do |config|
config.expect_with :rspec do |expectations| config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`.
expectations.include_chain_clauses_in_custom_matcher_descriptions = true expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end end
config.mock_with :rspec do |mocks| config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true mocks.verify_partial_doubles = true
end end
# Run specs in random order to surface order dependencies.
config.order = :random config.order = :random
Kernel.srand config.seed Kernel.srand config.seed
# config.before(:all) do
# page.current_window.resize_to(1200, 1800)
# end
config.formatter = :documentation config.formatter = :documentation
config.color = true config.color = true
end end
...@@ -56,7 +44,7 @@ module QA ...@@ -56,7 +44,7 @@ module QA
capabilities = Selenium::WebDriver::Remote::Capabilities.chrome( capabilities = Selenium::WebDriver::Remote::Capabilities.chrome(
'chromeOptions' => { 'chromeOptions' => {
'binary' => '/usr/bin/google-chrome-stable', 'binary' => '/usr/bin/google-chrome-stable',
'args' => %w[headless no-sandbox disable-gpu] 'args' => %w[headless no-sandbox disable-gpu window-size=1280,1024]
} }
) )
...@@ -64,6 +52,10 @@ module QA ...@@ -64,6 +52,10 @@ module QA
.new(app, browser: :chrome, desired_capabilities: capabilities) .new(app, browser: :chrome, desired_capabilities: capabilities)
end end
Capybara::Screenshot.register_driver(:chrome) do |driver, path|
driver.browser.save_screenshot(path)
end
Capybara.configure do |config| Capybara.configure do |config|
config.app_host = @address config.app_host = @address
config.default_driver = :chrome config.default_driver = :chrome
......
...@@ -45,6 +45,9 @@ else # Assume it's mysql ...@@ -45,6 +45,9 @@ else # Assume it's mysql
sed -i 's/# host:.*/host: mysql/g' config/database_geo.yml sed -i 's/# host:.*/host: mysql/g' config/database_geo.yml
fi fi
cp config/resque.yml.example config/resque.yml
sed -i 's/localhost/redis/g' config/resque.yml
cp config/redis.cache.yml.example config/redis.cache.yml cp config/redis.cache.yml.example config/redis.cache.yml
sed -i 's/localhost/redis/g' config/redis.cache.yml sed -i 's/localhost/redis/g' config/redis.cache.yml
......
...@@ -3,52 +3,79 @@ require 'spec_helper' ...@@ -3,52 +3,79 @@ require 'spec_helper'
describe HealthCheckController do describe HealthCheckController do
include StubENV include StubENV
let(:token) { current_application_settings.health_check_access_token }
let(:json_response) { JSON.parse(response.body) } let(:json_response) { JSON.parse(response.body) }
let(:xml_response) { Hash.from_xml(response.body)['hash'] } let(:xml_response) { Hash.from_xml(response.body)['hash'] }
let(:token) { current_application_settings.health_check_access_token }
let(:whitelisted_ip) { '127.0.0.1' }
let(:not_whitelisted_ip) { '127.0.0.2' }
before do before do
allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip])
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end end
describe 'GET #index' do describe 'GET #index' do
context 'when services are up but NO access token' do context 'when services are up but accessed from outside whitelisted ips' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'returns a not found page' do it 'returns a not found page' do
get :index get :index
expect(response).to be_not_found expect(response).to be_not_found
end end
context 'when services are accessed with token' do
it 'supports passing the token in the header' do
request.headers['TOKEN'] = token
get :index
expect(response).to be_success
expect(response.content_type).to eq 'text/plain'
end
it 'supports passing the token in query params' do
get :index, token: token
expect(response).to be_success
expect(response.content_type).to eq 'text/plain'
end
end
end end
context 'when services are up and an access token is provided' do context 'when services are up and accessed from whitelisted ips' do
it 'supports passing the token in the header' do before do
request.headers['TOKEN'] = token allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
get :index
expect(response).to be_success
expect(response.content_type).to eq 'text/plain'
end end
it 'supports successful plaintest response' do it 'supports successful plaintext response' do
get :index, token: token get :index
expect(response).to be_success expect(response).to be_success
expect(response.content_type).to eq 'text/plain' expect(response.content_type).to eq 'text/plain'
end end
it 'supports successful json response' do it 'supports successful json response' do
get :index, token: token, format: :json get :index, format: :json
expect(response).to be_success expect(response).to be_success
expect(response.content_type).to eq 'application/json' expect(response.content_type).to eq 'application/json'
expect(json_response['healthy']).to be true expect(json_response['healthy']).to be true
end end
it 'supports successful xml response' do it 'supports successful xml response' do
get :index, token: token, format: :xml get :index, format: :xml
expect(response).to be_success expect(response).to be_success
expect(response.content_type).to eq 'application/xml' expect(response.content_type).to eq 'application/xml'
expect(xml_response['healthy']).to be true expect(xml_response['healthy']).to be true
end end
it 'supports successful responses for specific checks' do it 'supports successful responses for specific checks' do
get :index, token: token, checks: 'email', format: :json get :index, checks: 'email', format: :json
expect(response).to be_success expect(response).to be_success
expect(response.content_type).to eq 'application/json' expect(response.content_type).to eq 'application/json'
expect(json_response['healthy']).to be true expect(json_response['healthy']).to be true
...@@ -58,33 +85,29 @@ describe HealthCheckController do ...@@ -58,33 +85,29 @@ describe HealthCheckController do
context 'when a service is down but NO access token' do context 'when a service is down but NO access token' do
it 'returns a not found page' do it 'returns a not found page' do
get :index get :index
expect(response).to be_not_found expect(response).to be_not_found
end end
end end
context 'when a service is down and an access token is provided' do context 'when a service is down and an endpoint is accessed from whitelisted ip' do
before do before do
allow(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire') allow(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire')
allow(HealthCheck::Utils).to receive(:process_checks).with(['email']).and_return('Email is on fire') allow(HealthCheck::Utils).to receive(:process_checks).with(['email']).and_return('Email is on fire')
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
end end
it 'supports passing the token in the header' do it 'supports failure plaintext response' do
request.headers['TOKEN'] = token
get :index get :index
expect(response).to have_http_status(500)
expect(response.content_type).to eq 'text/plain'
expect(response.body).to include('The server is on fire')
end
it 'supports failure plaintest response' do
get :index, token: token
expect(response).to have_http_status(500) expect(response).to have_http_status(500)
expect(response.content_type).to eq 'text/plain' expect(response.content_type).to eq 'text/plain'
expect(response.body).to include('The server is on fire') expect(response.body).to include('The server is on fire')
end end
it 'supports failure json response' do it 'supports failure json response' do
get :index, token: token, format: :json get :index, format: :json
expect(response).to have_http_status(500) expect(response).to have_http_status(500)
expect(response.content_type).to eq 'application/json' expect(response.content_type).to eq 'application/json'
expect(json_response['healthy']).to be false expect(json_response['healthy']).to be false
...@@ -92,7 +115,8 @@ describe HealthCheckController do ...@@ -92,7 +115,8 @@ describe HealthCheckController do
end end
it 'supports failure xml response' do it 'supports failure xml response' do
get :index, token: token, format: :xml get :index, format: :xml
expect(response).to have_http_status(500) expect(response).to have_http_status(500)
expect(response.content_type).to eq 'application/xml' expect(response.content_type).to eq 'application/xml'
expect(xml_response['healthy']).to be false expect(xml_response['healthy']).to be false
...@@ -100,7 +124,8 @@ describe HealthCheckController do ...@@ -100,7 +124,8 @@ describe HealthCheckController do
end end
it 'supports failure responses for specific checks' do it 'supports failure responses for specific checks' do
get :index, token: token, checks: 'email', format: :json get :index, checks: 'email', format: :json
expect(response).to have_http_status(500) expect(response).to have_http_status(500)
expect(response.content_type).to eq 'application/json' expect(response.content_type).to eq 'application/json'
expect(json_response['healthy']).to be false expect(json_response['healthy']).to be false
......
...@@ -3,21 +3,25 @@ require 'spec_helper' ...@@ -3,21 +3,25 @@ require 'spec_helper'
describe HealthController do describe HealthController do
include StubENV include StubENV
let(:token) { current_application_settings.health_check_access_token }
let(:json_response) { JSON.parse(response.body) } let(:json_response) { JSON.parse(response.body) }
let(:token) { current_application_settings.health_check_access_token }
let(:whitelisted_ip) { '127.0.0.1' }
let(:not_whitelisted_ip) { '127.0.0.2' }
before do before do
allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip])
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end end
describe '#readiness' do describe '#readiness' do
context 'authorization token provided' do shared_context 'endpoint responding with readiness data' do
before do let(:request_params) { {} }
request.headers['TOKEN'] = token
end subject { get :readiness, request_params }
it 'responds with readiness checks data' do
subject
it 'returns proper response' do
get :readiness
expect(json_response['db_check']['status']).to eq('ok') expect(json_response['db_check']['status']).to eq('ok')
expect(json_response['cache_check']['status']).to eq('ok') expect(json_response['cache_check']['status']).to eq('ok')
expect(json_response['queues_check']['status']).to eq('ok') expect(json_response['queues_check']['status']).to eq('ok')
...@@ -27,22 +31,50 @@ describe HealthController do ...@@ -27,22 +31,50 @@ describe HealthController do
end end
end end
context 'without authorization token' do context 'accessed from whitelisted ip' do
it 'returns proper response' do before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
end
it_behaves_like 'endpoint responding with readiness data'
end
context 'accessed from not whitelisted ip' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'responds with resource not found' do
get :readiness get :readiness
expect(response.status).to eq(404) expect(response.status).to eq(404)
end end
context 'accessed with valid token' do
context 'token passed in request header' do
before do
request.headers['TOKEN'] = token
end
it_behaves_like 'endpoint responding with readiness data'
end
end
context 'token passed as URL param' do
it_behaves_like 'endpoint responding with readiness data' do
let(:request_params) { { token: token } }
end
end
end end
end end
describe '#liveness' do describe '#liveness' do
context 'authorization token provided' do shared_context 'endpoint responding with liveness data' do
before do subject { get :liveness }
request.headers['TOKEN'] = token
end it 'responds with liveness checks data' do
subject
it 'returns proper response' do
get :liveness
expect(json_response['db_check']['status']).to eq('ok') expect(json_response['db_check']['status']).to eq('ok')
expect(json_response['cache_check']['status']).to eq('ok') expect(json_response['cache_check']['status']).to eq('ok')
expect(json_response['queues_check']['status']).to eq('ok') expect(json_response['queues_check']['status']).to eq('ok')
...@@ -51,11 +83,40 @@ describe HealthController do ...@@ -51,11 +83,40 @@ describe HealthController do
end end
end end
context 'without authorization token' do context 'accessed from whitelisted ip' do
it 'returns proper response' do before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
end
it_behaves_like 'endpoint responding with liveness data'
end
context 'accessed from not whitelisted ip' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'responds with resource not found' do
get :liveness get :liveness
expect(response.status).to eq(404) expect(response.status).to eq(404)
end end
context 'accessed with valid token' do
context 'token passed in request header' do
before do
request.headers['TOKEN'] = token
end
it_behaves_like 'endpoint responding with liveness data'
end
context 'token passed as URL param' do
it_behaves_like 'endpoint responding with liveness data' do
subject { get :liveness, token: token }
end
end
end
end end
end end
end end
...@@ -3,22 +3,22 @@ require 'spec_helper' ...@@ -3,22 +3,22 @@ require 'spec_helper'
describe MetricsController do describe MetricsController do
include StubENV include StubENV
let(:token) { current_application_settings.health_check_access_token }
let(:json_response) { JSON.parse(response.body) } let(:json_response) { JSON.parse(response.body) }
let(:metrics_multiproc_dir) { Dir.mktmpdir } let(:metrics_multiproc_dir) { Dir.mktmpdir }
let(:whitelisted_ip) { '127.0.0.1' }
let(:whitelisted_ip_range) { '10.0.0.0/24' }
let(:ip_in_whitelisted_range) { '10.0.0.1' }
let(:not_whitelisted_ip) { '10.0.1.1' }
before do before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
stub_env('prometheus_multiproc_dir', metrics_multiproc_dir) stub_env('prometheus_multiproc_dir', metrics_multiproc_dir)
allow(Gitlab::Metrics).to receive(:prometheus_metrics_enabled?).and_return(true) allow(Gitlab::Metrics).to receive(:prometheus_metrics_enabled?).and_return(true)
allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip, whitelisted_ip_range])
end end
describe '#index' do describe '#index' do
context 'authorization token provided' do shared_examples_for 'endpoint providing metrics' do
before do
request.headers['TOKEN'] = token
end
it 'returns DB ping metrics' do it 'returns DB ping metrics' do
get :index get :index
...@@ -83,7 +83,27 @@ describe MetricsController do ...@@ -83,7 +83,27 @@ describe MetricsController do
end end
end end
context 'without authorization token' do context 'accessed from whitelisted ip' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
end
it_behaves_like 'endpoint providing metrics'
end
context 'accessed from ip in whitelisted range' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(ip_in_whitelisted_range)
end
it_behaves_like 'endpoint providing metrics'
end
context 'accessed from not whitelisted ip' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'returns proper response' do it 'returns proper response' do
get :index get :index
......
...@@ -143,6 +143,7 @@ import '~/lib/utils/common_utils'; ...@@ -143,6 +143,7 @@ import '~/lib/utils/common_utils';
it('should return valid parameter', () => { it('should return valid parameter', () => {
const value = gl.utils.getParameterByName('scope'); const value = gl.utils.getParameterByName('scope');
expect(gl.utils.getParameterByName('p')).toEqual('2');
expect(value).toBe('all'); expect(value).toBe('all');
}); });
......
import Vue from 'vue'; import Vue from 'vue';
import paginationComp from '~/vue_shared/components/table_pagination.vue'; import paginationComp from '~/vue_shared/components/table_pagination.vue';
import '~/lib/utils/common_utils';
describe('Pagination component', () => { describe('Pagination component', () => {
let component; let component;
let PaginationComponent; let PaginationComponent;
let spy;
const changeChanges = { let mountComponet;
one: '',
};
const change = (one) => {
changeChanges.one = one;
};
beforeEach(() => { beforeEach(() => {
spy = jasmine.createSpy('spy');
PaginationComponent = Vue.extend(paginationComp); PaginationComponent = Vue.extend(paginationComp);
});
it('should render and start at page 1', () => {
component = new PaginationComponent({
propsData: {
pageInfo: {
totalPages: 10,
nextPage: 2,
previousPage: '',
},
change,
},
}).$mount();
expect(component.$el.classList).toContain('gl-pagination'); mountComponet = function (props) {
return new PaginationComponent({
component.changePage({ target: { innerText: '1' } }); propsData: props,
}).$mount();
expect(changeChanges.one).toEqual(1); };
}); });
it('should go to the previous page', () => { describe('render', () => {
component = new PaginationComponent({ describe('prev button', () => {
propsData: { it('should be disabled and non clickable', () => {
component = mountComponet({
pageInfo: {
nextPage: 2,
page: 1,
perPage: 20,
previousPage: NaN,
total: 84,
totalPages: 5,
},
change: spy,
});
expect(
component.$el.querySelector('.js-previous-button').classList.contains('disabled'),
).toEqual(true);
component.$el.querySelector('.js-previous-button a').click();
expect(spy).not.toHaveBeenCalled();
});
it('should be enabled and clickable', () => {
component = mountComponet({
pageInfo: {
nextPage: 3,
page: 2,
perPage: 20,
previousPage: 1,
total: 84,
totalPages: 5,
},
change: spy,
});
component.$el.querySelector('.js-previous-button a').click();
expect(spy).toHaveBeenCalledWith(1);
});
});
describe('first button', () => {
it('should call the change callback with the first page', () => {
component = mountComponet({
pageInfo: {
nextPage: 3,
page: 2,
perPage: 20,
previousPage: 1,
total: 84,
totalPages: 5,
},
change: spy,
});
const button = component.$el.querySelector('.js-first-button a');
expect(button.textContent.trim()).toEqual('« First');
button.click();
expect(spy).toHaveBeenCalledWith(1);
});
});
describe('last button', () => {
it('should call the change callback with the last page', () => {
component = mountComponet({
pageInfo: {
nextPage: 3,
page: 2,
perPage: 20,
previousPage: 1,
total: 84,
totalPages: 5,
},
change: spy,
});
const button = component.$el.querySelector('.js-last-button a');
expect(button.textContent.trim()).toEqual('Last »');
button.click();
expect(spy).toHaveBeenCalledWith(5);
});
});
describe('next button', () => {
it('should be disabled and non clickable', () => {
component = mountComponet({
pageInfo: {
nextPage: 5,
page: 5,
perPage: 20,
previousPage: 1,
total: 84,
totalPages: 5,
},
change: spy,
});
expect(
component.$el.querySelector('.js-next-button').textContent.trim(),
).toEqual('Next');
component.$el.querySelector('.js-next-button a').click();
expect(spy).not.toHaveBeenCalled();
});
it('should be enabled and clickable', () => {
component = mountComponet({
pageInfo: {
nextPage: 4,
page: 3,
perPage: 20,
previousPage: 2,
total: 84,
totalPages: 5,
},
change: spy,
});
component.$el.querySelector('.js-next-button a').click();
expect(spy).toHaveBeenCalledWith(4);
});
});
describe('numbered buttons', () => {
it('should render 5 pages', () => {
component = mountComponet({
pageInfo: {
nextPage: 4,
page: 3,
perPage: 20,
previousPage: 2,
total: 84,
totalPages: 5,
},
change: spy,
});
expect(component.$el.querySelectorAll('.page').length).toEqual(5);
});
});
it('should render the spread operator', () => {
component = mountComponet({
pageInfo: { pageInfo: {
nextPage: 4,
page: 3,
perPage: 20,
previousPage: 2,
total: 84,
totalPages: 10, totalPages: 10,
nextPage: 3,
previousPage: 1,
}, },
change, change: spy,
}, });
}).$mount();
component.changePage({ target: { innerText: 'Prev' } });
expect(changeChanges.one).toEqual(1);
});
it('should go to the next page', () => {
component = new PaginationComponent({
propsData: {
pageInfo: {
totalPages: 10,
nextPage: 5,
previousPage: 3,
},
change,
},
}).$mount();
component.changePage({ target: { innerText: 'Next' } });
expect(changeChanges.one).toEqual(5);
});
it('should go to the last page', () => {
component = new PaginationComponent({
propsData: {
pageInfo: {
totalPages: 10,
nextPage: 5,
previousPage: 3,
},
change,
},
}).$mount();
component.changePage({ target: { innerText: 'Last »' } });
expect(changeChanges.one).toEqual(10);
});
it('should go to the first page', () => {
component = new PaginationComponent({
propsData: {
pageInfo: {
totalPages: 10,
nextPage: 5,
previousPage: 3,
},
change,
},
}).$mount();
component.changePage({ target: { innerText: '« First' } });
expect(changeChanges.one).toEqual(1);
});
it('should do nothing', () => {
component = new PaginationComponent({
propsData: {
pageInfo: {
totalPages: 10,
nextPage: 2,
previousPage: '',
},
change,
},
}).$mount();
component.changePage({ target: { innerText: '...' } });
expect(changeChanges.one).toEqual(1);
});
});
describe('paramHelper', () => {
afterEach(() => {
window.history.pushState({}, null, '');
});
it('can parse url parameters correctly', () => {
window.history.pushState({}, null, '?scope=all&p=2');
const scope = gl.utils.getParameterByName('scope');
const p = gl.utils.getParameterByName('p');
expect(scope).toEqual('all');
expect(p).toEqual('2');
});
it('returns null if param not in url', () => {
window.history.pushState({}, null, '?p=2');
const scope = gl.utils.getParameterByName('scope');
const p = gl.utils.getParameterByName('p');
expect(scope).toEqual(null); expect(component.$el.querySelector('.separator').textContent.trim()).toEqual('...');
expect(p).toEqual('2'); });
}); });
}); });
...@@ -13,6 +13,14 @@ describe Gitlab::CurrentSettings do ...@@ -13,6 +13,14 @@ describe Gitlab::CurrentSettings do
allow_any_instance_of(described_class).to receive(:connect_to_db?).and_return(true) allow_any_instance_of(described_class).to receive(:connect_to_db?).and_return(true)
end end
# This method returns the ::ApplicationSetting.defaults hash
# but with respect of custom attribute accessors of ApplicationSetting model
def settings_from_defaults
defaults = ::ApplicationSetting.defaults
ar_wrapped_defaults = ::ApplicationSetting.new(defaults).attributes
ar_wrapped_defaults.slice(*defaults.keys)
end
it 'attempts to use cached values first' do it 'attempts to use cached values first' do
expect(ApplicationSetting).to receive(:cached) expect(ApplicationSetting).to receive(:cached)
...@@ -27,10 +35,23 @@ describe Gitlab::CurrentSettings do ...@@ -27,10 +35,23 @@ describe Gitlab::CurrentSettings do
end end
it 'falls back to DB if Caching fails' do it 'falls back to DB if Caching fails' do
db_settings = ApplicationSetting.create!(ApplicationSetting.defaults)
expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError) expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError)
expect(ApplicationSetting).to receive(:last).and_call_original expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError)
expect(current_application_settings).to be_a(ApplicationSetting) expect(current_application_settings).to eq(db_settings)
end
it 'creates default ApplicationSettings if none are present' do
expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError)
expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError)
settings = current_application_settings
expect(settings).to be_a(ApplicationSetting)
expect(settings).to be_persisted
expect(settings).to have_attributes(settings_from_defaults)
end end
context 'with migrations pending' do context 'with migrations pending' do
......
require 'spec_helper'
describe Gitlab::IssuableMetadata, lib: true do
let(:user) { create(:user) }
let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
subject { Class.new { include Gitlab::IssuableMetadata }.new }
it 'returns an empty Hash if an empty collection is provided' do
expect(subject.issuable_meta_data(Issue.none, 'Issue')).to eq({})
end
context 'issues' do
let!(:issue) { create(:issue, author: user, project: project) }
let!(:closed_issue) { create(:issue, state: :closed, author: user, project: project) }
let!(:downvote) { create(:award_emoji, :downvote, awardable: closed_issue) }
let!(:upvote) { create(:award_emoji, :upvote, awardable: issue) }
let!(:merge_request) { create(:merge_request, :simple, author: user, assignee: user, source_project: project, target_project: project, title: "Test") }
let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) }
it 'aggregates stats on issues' do
data = subject.issuable_meta_data(Issue.all, 'Issue')
expect(data.count).to eq(2)
expect(data[issue.id].upvotes).to eq(1)
expect(data[issue.id].downvotes).to eq(0)
expect(data[issue.id].notes_count).to eq(0)
expect(data[issue.id].merge_requests_count).to eq(1)
expect(data[closed_issue.id].upvotes).to eq(0)
expect(data[closed_issue.id].downvotes).to eq(1)
expect(data[closed_issue.id].notes_count).to eq(0)
expect(data[closed_issue.id].merge_requests_count).to eq(0)
end
end
context 'merge requests' do
let!(:merge_request) { create(:merge_request, :simple, author: user, assignee: user, source_project: project, target_project: project, title: "Test") }
let!(:merge_request_closed) { create(:merge_request, state: "closed", source_project: project, target_project: project, title: "Closed Test") }
let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request) }
let!(:upvote) { create(:award_emoji, :upvote, awardable: merge_request) }
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
it 'aggregates stats on merge requests' do
data = subject.issuable_meta_data(MergeRequest.all, 'MergeRequest')
expect(data.count).to eq(2)
expect(data[merge_request.id].upvotes).to eq(1)
expect(data[merge_request.id].downvotes).to eq(1)
expect(data[merge_request.id].notes_count).to eq(1)
expect(data[merge_request.id].merge_requests_count).to eq(0)
expect(data[merge_request_closed.id].upvotes).to eq(0)
expect(data[merge_request_closed.id].downvotes).to eq(0)
expect(data[merge_request_closed.id].notes_count).to eq(0)
expect(data[merge_request_closed.id].merge_requests_count).to eq(0)
end
end
end
...@@ -174,6 +174,18 @@ describe ApplicationSetting, models: true do ...@@ -174,6 +174,18 @@ describe ApplicationSetting, models: true do
end end
end end
describe '.current' do
context 'redis unavailable' do
it 'returns an ApplicationSetting' do
allow(Rails.cache).to receive(:fetch).and_call_original
allow(ApplicationSetting).to receive(:last).and_return(:last)
expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(ArgumentError)
expect(ApplicationSetting.current).to eq(:last)
end
end
end
context 'restricted signup domains' do context 'restricted signup domains' do
it 'sets single domain' do it 'sets single domain' do
setting.domain_whitelist_raw = 'example.com' setting.domain_whitelist_raw = 'example.com'
......
...@@ -16,7 +16,11 @@ describe API::MergeRequests do ...@@ -16,7 +16,11 @@ describe API::MergeRequests do
let!(:label) do let!(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project) create(:label, title: 'label', color: '#FFAABB', project: project)
end end
let!(:label2) { create(:label, title: 'a-test', color: '#FFFFFF', project: project) }
let!(:label_link) { create(:label_link, label: label, target: merge_request) } let!(:label_link) { create(:label_link, label: label, target: merge_request) }
let!(:label_link2) { create(:label_link, label: label2, target: merge_request) }
let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request) }
let!(:upvote) { create(:award_emoji, :upvote, awardable: merge_request) }
before do before do
project.team << [user, :reporter] project.team << [user, :reporter]
...@@ -32,6 +36,18 @@ describe API::MergeRequests do ...@@ -32,6 +36,18 @@ describe API::MergeRequests do
end end
context "when authenticated" do context "when authenticated" do
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new do
get api("/projects/#{project.id}/merge_requests", user)
end.count
create(:merge_request, state: 'closed', milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Test", created_at: base_time)
expect do
get api("/projects/#{project.id}/merge_requests", user)
end.not_to exceed_query_limit(control_count)
end
it "returns an array of all merge_requests" do it "returns an array of all merge_requests" do
get api("/projects/#{project.id}/merge_requests", user) get api("/projects/#{project.id}/merge_requests", user)
...@@ -44,6 +60,9 @@ describe API::MergeRequests do ...@@ -44,6 +60,9 @@ describe API::MergeRequests do
expect(json_response.last['sha']).to eq(merge_request.diff_head_sha) expect(json_response.last['sha']).to eq(merge_request.diff_head_sha)
expect(json_response.last['merge_commit_sha']).to be_nil expect(json_response.last['merge_commit_sha']).to be_nil
expect(json_response.last['merge_commit_sha']).to eq(merge_request.merge_commit_sha) expect(json_response.last['merge_commit_sha']).to eq(merge_request.merge_commit_sha)
expect(json_response.last['downvotes']).to eq(1)
expect(json_response.last['upvotes']).to eq(1)
expect(json_response.last['labels']).to eq([label2.title, label.title])
expect(json_response.first['title']).to eq(merge_request_merged.title) expect(json_response.first['title']).to eq(merge_request_merged.title)
expect(json_response.first['sha']).to eq(merge_request_merged.diff_head_sha) expect(json_response.first['sha']).to eq(merge_request_merged.diff_head_sha)
expect(json_response.first['merge_commit_sha']).not_to be_nil expect(json_response.first['merge_commit_sha']).not_to be_nil
...@@ -146,7 +165,7 @@ describe API::MergeRequests do ...@@ -146,7 +165,7 @@ describe API::MergeRequests do
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
expect(json_response).to be_an Array expect(json_response).to be_an Array
expect(json_response.length).to eq(1) expect(json_response.length).to eq(1)
expect(json_response.first['labels']).to eq([label.title]) expect(json_response.first['labels']).to eq([label2.title, label.title])
end end
it 'returns an array of labeled merge requests where all labels match' do it 'returns an array of labeled merge requests where all labels match' do
...@@ -237,8 +256,8 @@ describe API::MergeRequests do ...@@ -237,8 +256,8 @@ describe API::MergeRequests do
expect(json_response['author']).to be_a Hash expect(json_response['author']).to be_a Hash
expect(json_response['target_branch']).to eq(merge_request.target_branch) expect(json_response['target_branch']).to eq(merge_request.target_branch)
expect(json_response['source_branch']).to eq(merge_request.source_branch) expect(json_response['source_branch']).to eq(merge_request.source_branch)
expect(json_response['upvotes']).to eq(0) expect(json_response['upvotes']).to eq(1)
expect(json_response['downvotes']).to eq(0) expect(json_response['downvotes']).to eq(1)
expect(json_response['source_project_id']).to eq(merge_request.source_project.id) expect(json_response['source_project_id']).to eq(merge_request.source_project.id)
expect(json_response['target_project_id']).to eq(merge_request.target_project.id) expect(json_response['target_project_id']).to eq(merge_request.target_project.id)
expect(json_response['work_in_progress']).to be_falsy expect(json_response['work_in_progress']).to be_falsy
......
...@@ -208,6 +208,7 @@ module TestEnv ...@@ -208,6 +208,7 @@ module TestEnv
# Otherwise they'd be created by the first test, often timing out and # Otherwise they'd be created by the first test, often timing out and
# causing a transient test failure # causing a transient test failure
def eager_load_driver_server def eager_load_driver_server
return unless ENV['CI']
return unless defined?(Capybara) return unless defined?(Capybara)
puts "Starting the Capybara driver server..." puts "Starting the Capybara driver server..."
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment