Commit 4156a269 authored by Rémy Coutable's avatar Rémy Coutable

Merge remote-tracking branch 'origin/master' into ce-to-ee-2018-07-30

Signed-off-by: default avatarRémy Coutable <remy@rymai.me>
parents 93f30155 979f8686
......@@ -602,8 +602,8 @@ rspec-pg-ee-rails5 0 2: *rspec-ee-pg-rails5
rspec-pg-ee-rails5 1 2: *rspec-ee-pg-rails5
rspec-mysql-ee-rails5 0 2: *rspec-ee-mysql-rails5
rspec-mysql-ee-rails5 1 2: *rspec-ee-mysql-rails5
rspec-pg-geo-rails5 0 2: *rspec-metadata-pg-geo-9-6-rails5
rspec-pg-geo-rails5 1 2: *rspec-metadata-pg-geo-10-2-rails5
rspec-pg-geo-rails5 0 1: *rspec-metadata-pg-geo-9-6-rails5
rspec-pg-10-geo-rails5 0 1: *rspec-metadata-pg-geo-10-2-rails5
## EE jobs
rspec-pg 0 30: *rspec-metadata-pg
......
Please view this file on the master branch, on stable branches it's out of date.
## 11.1.4 (2018-07-30)
- No changes.
## 11.1.3 (2018-07-27)
### Fixed (1 change)
......
......@@ -72,14 +72,14 @@ GEM
encryptor (~> 3.0.0)
attr_required (1.0.0)
awesome_print (1.8.0)
aws-sdk (2.11.64)
aws-sdk-resources (= 2.11.64)
aws-sdk-core (2.11.64)
aws-sdk (2.9.32)
aws-sdk-resources (= 2.9.32)
aws-sdk-core (2.9.32)
aws-sigv4 (~> 1.0)
jmespath (~> 1.0)
aws-sdk-resources (2.11.64)
aws-sdk-core (= 2.11.64)
aws-sigv4 (1.0.2)
aws-sdk-resources (2.9.32)
aws-sdk-core (= 2.9.32)
aws-sigv4 (1.0.0)
axiom-types (0.1.1)
descendants_tracker (~> 0.0.4)
ice_nine (~> 0.11.0)
......@@ -227,8 +227,8 @@ GEM
multipart-post (>= 1.2, < 3)
faraday_middleware (0.12.2)
faraday (>= 0.7.4, < 1.0)
faraday_middleware-aws-signers-v4 (0.1.9)
aws-sdk-resources (>= 2, < 3)
faraday_middleware-aws-signers-v4 (0.1.7)
aws-sdk-resources (~> 2)
faraday (~> 0.9)
faraday_middleware-multi_json (0.0.6)
faraday_middleware
......@@ -459,7 +459,7 @@ GEM
activesupport
multipart-post
oauth (~> 0.5, >= 0.5.0)
jmespath (1.4.0)
jmespath (1.3.1)
jquery-atwho-rails (1.3.2)
json (1.8.6)
json-jwt (1.9.4)
......
......@@ -128,13 +128,7 @@ class IssuesFinder < IssuableFinder
end
def by_assignee(items)
if assignees.any?
assignees.each do |assignee|
items = items.assigned_to(assignee)
end
items
elsif assignee && assignees.empty?
if assignee
items.assigned_to(assignee)
elsif no_assignee?
items.unassigned
......@@ -144,17 +138,4 @@ class IssuesFinder < IssuableFinder
items
end
end
def assignees
return @assignees if defined?(@assignees)
@assignees =
if params[:assignee_ids]
User.where(id: params[:assignee_ids])
elsif params[:assignee_username]
User.where(username: params[:assignee_username])
else
[]
end
end
end
......@@ -37,18 +37,6 @@ class MergeRequestsFinder < IssuableFinder
private
def by_assignee(items)
if assignee
items = items.where(assignee_id: assignee.id)
elsif no_assignee?
items = items.where(assignee_id: nil)
elsif assignee_id? || assignee_username? # assignee not found
items = items.none
end
items
end
def source_branch
@source_branch ||= params[:source_branch].presence
end
......
......@@ -114,7 +114,7 @@ module ApplicationSettingsHelper
def circuitbreaker_failure_count_help_text
health_link = link_to(s_('AdminHealthPageLink|health page'), admin_health_check_path)
api_link = link_to(s_('CircuitBreakerApiLink|circuitbreaker api'), help_page_path("api/repository_storage_health"))
message = _("The number of failures of after which GitLab will completely "\
message = _("The number of failures after which GitLab will completely "\
"prevent access to the storage. The number of failures can be "\
"reset in the admin interface: %{link_to_health_page} or using "\
"the %{api_documentation_link}.")
......@@ -138,8 +138,8 @@ module ApplicationSettingsHelper
end
def circuitbreaker_check_interval_help_text
_("The time in seconds between storage checks. When a previous check did "\
"complete yet, GitLab will skip a check.")
_("The time in seconds between storage checks. If a check did "\
"not complete yet, GitLab will skip the next check.")
end
def visible_attributes
......@@ -178,8 +178,8 @@ module ApplicationSettingsHelper
:enabled_git_access_protocol,
:enforce_terms,
:gitaly_timeout_default,
:gitaly_timeout_medium,
:gitaly_timeout_fast,
:gitaly_timeout_medium,
:gravatar_enabled,
:hashed_storage_enabled,
:help_page_hide_commercial_content,
......@@ -209,8 +209,8 @@ module ApplicationSettingsHelper
:metrics_timeout,
:mirror_available,
:pages_domain_verification_enabled,
:password_authentication_enabled_for_web,
:password_authentication_enabled_for_git,
:password_authentication_enabled_for_web,
:performance_bar_allowed_group_path,
:performance_bar_enabled,
:plantuml_enabled,
......@@ -239,15 +239,24 @@ module ApplicationSettingsHelper
:signup_enabled,
:terminal_max_session_time,
:terms,
<<<<<<< HEAD
:throttle_unauthenticated_enabled,
:throttle_unauthenticated_requests_per_period,
:throttle_unauthenticated_period_in_seconds,
:throttle_authenticated_web_enabled,
:throttle_authenticated_web_requests_per_period,
:throttle_authenticated_web_period_in_seconds,
=======
>>>>>>> origin/master
:throttle_authenticated_api_enabled,
:throttle_authenticated_api_requests_per_period,
:throttle_authenticated_api_period_in_seconds,
:throttle_authenticated_api_requests_per_period,
:throttle_authenticated_web_enabled,
:throttle_authenticated_web_period_in_seconds,
:throttle_authenticated_web_requests_per_period,
:throttle_unauthenticated_enabled,
:throttle_unauthenticated_period_in_seconds,
:throttle_unauthenticated_requests_per_period,
:two_factor_grace_period,
:unique_ips_limit_enabled,
:unique_ips_limit_per_user,
......
......@@ -230,6 +230,7 @@ class ApplicationSetting < ActiveRecord::Base
after_sign_up_text: nil,
allow_local_requests_from_hooks_and_services: false,
akismet_enabled: false,
allow_local_requests_from_hooks_and_services: false,
authorized_keys_enabled: true, # TODO default to false if the instance is configured to use AuthorizedKeysCommand
container_registry_token_expire_delay: 5,
default_artifacts_expire_in: '30 days',
......@@ -274,8 +275,8 @@ class ApplicationSetting < ActiveRecord::Base
repository_storages: ['default'],
require_two_factor_authentication: false,
restricted_visibility_levels: Settings.gitlab['restricted_visibility_levels'],
session_expire_delay: Settings.gitlab['session_expire_delay'],
send_user_confirmation_email: false,
session_expire_delay: Settings.gitlab['session_expire_delay'],
shared_runners_enabled: Settings.gitlab_ci['shared_runners_enabled'],
shared_runners_text: nil,
sidekiq_throttling_enabled: false,
......
# frozen_string_literal: true
class ProjectPresenter < Gitlab::View::Presenter::Delegated
prepend EE::ProjectPresenter
include ActionView::Helpers::NumberHelper
include ActionView::Helpers::UrlHelper
include GitlabRoutingHelper
......
......@@ -2,6 +2,7 @@ module EE
module IssuesFinder
extend ActiveSupport::Concern
extend ::Gitlab::Utils::Override
include ::Gitlab::Utils::StrongMemoize
module ClassMethods
extend ::Gitlab::Utils::Override
......@@ -42,5 +43,30 @@ module EE
def filter_by_any_weight?
params[:weight] == ::Issue::WEIGHT_ANY
end
override :by_assignee
def by_assignee(items)
if assignees.any?
assignees.each do |assignee|
items = items.assigned_to(assignee)
end
return items
end
super
end
def assignees
strong_memoize(:assignees) do
if params[:assignee_ids]
::User.where(id: params[:assignee_ids])
elsif params[:assignee_username]
::User.where(username: params[:assignee_username])
else
[]
end
end
end
end
end
......@@ -50,6 +50,7 @@ module EE
override :visible_attributes
def visible_attributes
super + [
:allow_group_owners_to_manage_ldap,
:check_namespace_plan,
:elasticsearch_aws,
:elasticsearch_aws_access_key,
......@@ -61,14 +62,13 @@ module EE
:elasticsearch_url,
:geo_status_timeout,
:help_text,
:pseudonymizer_enabled,
:repository_size_limit,
:shared_runners_minutes,
:slack_app_enabled,
:slack_app_id,
:slack_app_secret,
:slack_app_verification_token,
:allow_group_owners_to_manage_ldap,
:pseudonymizer_enabled
:slack_app_verification_token
]
end
......@@ -82,13 +82,13 @@ module EE
def self.external_authorization_service_attributes
[
:external_authorization_service_enabled,
:external_authorization_service_url,
:external_authorization_service_default_label,
:external_authorization_service_timeout,
:external_auth_client_cert,
:external_auth_client_key,
:external_auth_client_key_pass
:external_auth_client_key_pass,
:external_authorization_service_default_label,
:external_authorization_service_enabled,
:external_authorization_service_timeout,
:external_authorization_service_url
]
end
......
......@@ -96,12 +96,12 @@ module EE
mirror_capacity_threshold: Settings.gitlab['mirror_capacity_threshold'],
mirror_max_capacity: Settings.gitlab['mirror_max_capacity'],
mirror_max_delay: Settings.gitlab['mirror_max_delay'],
pseudonymizer_enabled: false,
repository_size_limit: 0,
slack_app_enabled: false,
slack_app_id: nil,
slack_app_secret: nil,
slack_app_verification_token: nil,
pseudonymizer_enabled: false
slack_app_verification_token: nil
)
end
end
......
......@@ -71,7 +71,7 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
"#{type}_retry_at" => next_retry_time(new_count))
end
def finish_sync!(type)
def finish_sync!(type, missing_on_primary = false)
update!(
# Indicate that the sync succeeded (but separately mark as synced atomically)
"last_#{type}_successful_sync_at" => Time.now,
......@@ -79,6 +79,7 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
"#{type}_retry_at" => nil,
"force_to_redownload_#{type}" => false,
"last_#{type}_sync_failure" => nil,
"#{type}_missing_on_primary" => missing_on_primary,
# Indicate that repository verification needs to be done again
"#{type}_verification_checksum_sha" => nil,
......
......@@ -204,7 +204,7 @@ class GeoNodeStatus < ActiveRecord::Base
def load_secondary_data
if Gitlab::Geo.secondary?
self.db_replication_lag_seconds = Gitlab::Geo::HealthCheck.db_replication_lag_seconds
self.cursor_last_event_id = Geo::EventLogState.last_processed&.event_id
self.cursor_last_event_id = current_cursor_last_event_id
self.cursor_last_event_date = Geo::EventLog.find_by(id: self.cursor_last_event_id)&.created_at
self.repositories_synced_count = projects_finder.count_synced_repositories
self.repositories_failed_count = projects_finder.count_failed_repositories
......@@ -241,6 +241,15 @@ class GeoNodeStatus < ActiveRecord::Base
end
end
def current_cursor_last_event_id
return unless Gitlab::Geo.secondary?
min_gap_id = ::Gitlab::Geo::EventGapTracking.min_gap_id
last_processed_id = Geo::EventLogState.last_processed&.event_id
[min_gap_id, last_processed_id].compact.min
end
def healthy?
!outdated? && status_message_healthy?
end
......
......@@ -50,6 +50,9 @@ module EE
@subject.feature_available?(:pod_logs, @user)
end
with_scope :subject
condition(:security_reports_feature_available) { @subject.security_reports_feature_available? }
condition(:prometheus_alerts_enabled) do
@subject.feature_available?(:prometheus_alerts, @user)
end
......@@ -91,9 +94,10 @@ module EE
rule { can?(:developer_access) }.policy do
enable :admin_board
enable :admin_vulnerability_feedback
enable :read_project_security_dashboard
end
rule { can?(:developer_access) & security_reports_feature_available }.enable :read_project_security_dashboard
rule { can?(:read_project) }.enable :read_vulnerability_feedback
rule { license_management_enabled & can?(:read_project) }.enable :read_software_license_policy
......
# frozen_string_literal: true
module EE
module ProjectPresenter
extend ::Gitlab::Utils::Override
override :statistics_anchors
def statistics_anchors(show_auto_devops_callout:)
super + extra_statistics_anchors
end
def extra_statistics_anchors
anchors = []
if can?(current_user, :read_project_security_dashboard, project) &&
project.latest_pipeline_with_security_reports
anchors << security_dashboard_data
end
anchors
end
private
def security_dashboard_data
OpenStruct.new(enabled: true,
label: _('Security Dashboard'),
link: project_security_dashboard_path(project))
end
end
end
......@@ -126,10 +126,10 @@ module Geo
@registry ||= Geo::ProjectRegistry.find_or_initialize_by(project_id: project.id)
end
def mark_sync_as_successful
def mark_sync_as_successful(missing_on_primary: false)
log_info("Marking #{type} sync as successful")
persisted = registry.finish_sync!(type)
persisted = registry.finish_sync!(type, missing_on_primary)
reschedule_sync unless persisted
......
......@@ -15,7 +15,7 @@ module Geo
# If it does not exist we should consider it as successfully downloaded.
if e.message.include? Gitlab::GitAccess::ERROR_MESSAGES[:no_repo]
log_info('Repository is not found, marking it as successfully synced')
mark_sync_as_successful
mark_sync_as_successful(missing_on_primary: true)
else
fail_registry!('Error syncing repository', e)
end
......
......@@ -13,7 +13,7 @@ module Geo
# If it does not exist we should consider it as successfully downloaded.
if e.message.include? Gitlab::GitAccess::ERROR_MESSAGES[:no_repo]
log_info('Wiki repository is not found, marking it as successfully synced')
mark_sync_as_successful
mark_sync_as_successful(missing_on_primary: true)
else
fail_registry!('Error syncing wiki repository', e)
end
......
- return unless @project.security_reports_feature_available? && can?(current_user, :read_project_security_dashboard, @project)
- return unless can?(current_user, :read_project_security_dashboard, @project)
= nav_link(path: 'projects/security/dashboard#show') do
= link_to project_security_dashboard_path(@project), title: _('Security Dashboard'), class: 'shortcuts-project-security-dashboard' do
......
---
title: Add Security Dashboard to project quick links
merge_request: 6652
author:
type: added
---
title: Ensure that Create issue button is shown in vulnerability dialog
merge_request: 6708
author:
type: fixed
---
title: Track registries marked as synced when repository does not found
merge_request: 6694
author:
type: other
---
title: Use same gem versions for Rails 5 as for Rails 4
merge_request: 6712
author: Jasper Maes
type: fixed
---
title: Rails5 correct wrong geo job name
merge_request: 6713
author: Jasper Maes
type: fixed
---
title: Track the Geo event log gaps in redis and handle them later
merge_request: 6640
author:
type: changed
# frozen_string_literal: true
class AddMissingOnPrimaryToProjectRegistry < ActiveRecord::Migration
def change
add_column :project_registry, :repository_missing_on_primary, :boolean
add_column :project_registry, :wiki_missing_on_primary, :boolean
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180613184349) do
ActiveRecord::Schema.define(version: 20180727221937) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -78,6 +78,8 @@ ActiveRecord::Schema.define(version: 20180613184349) do
t.datetime_with_timezone "last_repository_check_at"
t.datetime_with_timezone "resync_repository_was_scheduled_at"
t.datetime_with_timezone "resync_wiki_was_scheduled_at"
t.boolean "repository_missing_on_primary"
t.boolean "wiki_missing_on_primary"
end
add_index "project_registry", ["last_repository_successful_sync_at"], name: "index_project_registry_on_last_repository_successful_sync_at", using: :btree
......
module Gitlab
module Geo
class EventGapTracking
include Utils::StrongMemoize
include ::Gitlab::Geo::LogHelpers
attr_accessor :previous_id
GEO_EVENT_LOG_GAPS = 'geo:event_log:gaps'.freeze
GAP_GRACE_PERIOD = 10.minutes
GAP_OUTDATED_PERIOD = 1.hour
class << self
def min_gap_id
with_redis do |redis|
redis.zrange(GEO_EVENT_LOG_GAPS, 0, -1).min&.to_i
end
end
def gap_count
with_redis do |redis|
redis.zcount(GEO_EVENT_LOG_GAPS, '-inf', '+inf')
end
end
def with_redis
::Gitlab::Redis::SharedState.with { |redis| yield redis }
end
end
delegate :with_redis, to: :class
def initialize(logger = ::Gitlab::Geo::Logger)
@logger = logger
@previous_id = 0
end
def check!(current_id)
return unless previous_id > 0
return unless gap?(current_id)
track_gaps(current_id)
ensure
self.previous_id = current_id
end
# accepts a block that should return whether the event was handled
def fill_gaps
with_redis do |redis|
redis.zremrangebyscore(GEO_EVENT_LOG_GAPS, '-inf', outdated_timestamp)
gap_ids = redis.zrangebyscore(GEO_EVENT_LOG_GAPS, '-inf', grace_timestamp, with_scores: true)
gap_ids.each do |event_id, score|
handled = yield event_id.to_i
redis.zrem(GEO_EVENT_LOG_GAPS, event_id) if handled
end
end
end
def track_gaps(current_id)
log_info("Event log gap detected", previous_event_id: previous_id, current_event_id: current_id)
with_redis do |redis|
expire_time = Time.now.to_i
((previous_id + 1)..(current_id - 1)).each do |gap_id|
redis.zadd(GEO_EVENT_LOG_GAPS, expire_time, gap_id)
end
end
end
def gap?(current_id)
return false if previous_id <= 0
current_id > (previous_id + 1)
end
private
def grace_timestamp
(Time.now - GAP_GRACE_PERIOD).to_i
end
def outdated_timestamp
(Time.now - GAP_OUTDATED_PERIOD).to_i
end
end
end
end
......@@ -11,7 +11,7 @@ module Gitlab
@filename = upload.absolute_path
@request_data = build_request_data(upload)
rescue ObjectStorage::RemoteStoreError
Rails.logger.warn "Cannot transfer a remote object."
::Gitlab::Geo::Logger.warn "Cannot transfer a remote object."
end
private
......
......@@ -34,6 +34,8 @@ module Gitlab
end
def run_once!
gap_tracking.fill_gaps { |event_id| handle_gap_event(event_id) }
# Wrap this with the connection to make it possible to reconnect if
# PGbouncer dies: https://github.com/rails/rails/issues/29189
ActiveRecord::Base.connection_pool.with_connection do
......@@ -41,44 +43,50 @@ module Gitlab
end
end
private
def handle_events(batch, last_id)
def handle_events(batch, previous_batch_last_id)
logger.info("Handling events", first_id: batch.first.id, last_id: batch.last.id)
last_event_id = last_id
gap_tracking.previous_id = previous_batch_last_id
batch.each_with_index do |event_log, index|
event = event_log.event
batch.each do |event_log|
gap_tracking.check!(event_log.id)
# If a project is deleted, the event log and its associated event data
# could be purged from the log. We ignore this and move along.
unless event
logger.warn("Unknown event", event_log_id: event_log.id)
next
end
handle_single_event(event_log)
end
end
check_event_id(last_event_id, event_log.id) if last_event_id > 0
last_event_id = event_log.id
def handle_single_event(event_log)
event = event_log.event
unless can_replay?(event_log)
logger.event_info(event_log.created_at, 'Skipped event', event_data(event_log))
next
end
# If a project is deleted, the event log and its associated event data
# could be purged from the log. We ignore this and move along.
unless event
logger.warn("Unknown event", event_log_id: event_log.id)
return
end
begin
event_klass_for(event).new(event, event_log.created_at, logger).process
rescue NoMethodError => e
logger.error(e.message)
raise e
end
unless can_replay?(event_log)
logger.event_info(event_log.created_at, 'Skipped event', event_data(event_log))
return
end
process_event(event, event_log)
end
def check_event_id(last_event_id, current_log_id)
if last_event_id + 1 != current_log_id
logger.info("Event log gap", previous_event_log_id: last_event_id, event_log_id: current_log_id)
end
def process_event(event, event_log)
event_klass_for(event).new(event, event_log.created_at, logger).process
rescue NoMethodError => e
logger.error(e.message)
raise e
end
def handle_gap_event(event_id)
event_log = ::Geo::EventLog.find_by(id: event_id)
return false unless event_log
handle_single_event(event_log)
true
end
def event_klass_for(event)
......@@ -120,6 +128,10 @@ module Gitlab
sleep(delay + rand(1..20) * 0.1)
end
def gap_tracking
@gap_tracking ||= ::Gitlab::Geo::EventGapTracking.new(logger)
end
def logger
strong_memoize(:logger) do
Gitlab::Geo::LogCursor::Logger.new(self.class, log_level)
......
require 'spec_helper'
describe Gitlab::Geo::EventGapTracking, :clean_gitlab_redis_cache do
let(:previous_event_id) { 7 }
let(:gap_id) { previous_event_id + 1 }
let(:event_id_with_gap) { previous_event_id + 2 }
subject(:gap_tracking) { described_class.new }
before do
gap_tracking.previous_id = previous_event_id
end
describe '.min_gap_id' do
it 'returns nil when there are no gaps' do
expect(described_class.min_gap_id).to eq(nil)
end
it 'returns the lowest gap id' do
Timecop.travel(50.minutes.ago) do
gap_tracking.previous_id = 18
gap_tracking.track_gaps(20)
end
Timecop.travel(40.minutes.ago) do
gap_tracking.previous_id = 12
gap_tracking.track_gaps(14)
end
expect(described_class.min_gap_id).to eq(13)
end
end
describe '.gap_count' do
it 'returns 0 when there are no gaps' do
expect(described_class.gap_count).to be_zero
end
it 'returns the number of gaps' do
gap_tracking.previous_id = 18
gap_tracking.track_gaps(20)
gap_tracking.previous_id = 12
gap_tracking.track_gaps(14)
expect(described_class.gap_count).to eq(2)
end
end
describe '#check!' do
it 'does nothing when previous id not valid' do
gap_tracking.previous_id = 0
expect(gap_tracking).not_to receive(:gap?)
gap_tracking.check!(event_id_with_gap)
expect(gap_tracking.previous_id).to eq(event_id_with_gap)
end
it 'does nothing when there is no gap' do
expect(gap_tracking).not_to receive(:track_gaps)
gap_tracking.check!(previous_event_id + 1)
expect(gap_tracking.previous_id).to eq(previous_event_id + 1)
end
it 'tracks the gap if there is one' do
expect(gap_tracking).to receive(:track_gaps)
gap_tracking.check!(event_id_with_gap)
expect(gap_tracking.previous_id).to eq(event_id_with_gap)
end
end
describe '#fill_gaps' do
it 'ignore gaps that are less than 10 minutes old' do
Timecop.freeze do
gap_tracking.check!(event_id_with_gap)
expect { |blk| gap_tracking.fill_gaps(&blk) }.not_to yield_with_args(anything)
end
end
it 'handles gaps that are more than 10 minutes old' do
gap_tracking.check!(event_id_with_gap)
Timecop.travel(12.minutes) do
expect { |blk| gap_tracking.fill_gaps(&blk) }.to yield_with_args(gap_id)
end
end
it 'drops gaps older than 1 hour' do
gap_tracking.check!(event_id_with_gap)
Timecop.travel(62.minutes) do
expect { |blk| gap_tracking.fill_gaps(&blk) }.not_to yield_with_args(anything)
end
expect(read_gaps).to be_empty
end
end
describe '#track_gaps' do
it 'logs a message' do
expect(gap_tracking).to receive(:log_info).with(/gap detected/, hash_including(previous_event_id: previous_event_id, current_event_id: event_id_with_gap))
gap_tracking.track_gaps(event_id_with_gap)
end
it 'saves the gap id in redis' do
Timecop.freeze do
gap_tracking.track_gaps(event_id_with_gap)
expect(read_gaps).to contain_exactly([gap_id.to_s, Time.now.to_i])
end
end
it 'saves a range of gaps id in redis' do
Timecop.freeze do
gap_tracking.track_gaps(event_id_with_gap + 3)
expected_gaps = ((previous_event_id + 1)..(event_id_with_gap + 2)).collect { |id| [id.to_s, Time.now.to_i] }
expect(read_gaps).to match_array(expected_gaps)
end
end
it 'saves the gaps in order' do
expected_gaps = []
Timecop.freeze do
gap_tracking.track_gaps(event_id_with_gap)
expected_gaps << [gap_id.to_s, Time.now.to_i]
end
Timecop.travel(2.minutes) do
gap_tracking.previous_id = 17
gap_tracking.track_gaps(19)
expected_gaps << [18.to_s, Time.now.to_i]
end
expect(read_gaps).to eq(expected_gaps)
end
end
describe '#gap?' do
it 'returns false when current_id is the previous +1' do
expect(gap_tracking.gap?(previous_event_id + 1)).to be_falsy
end
it 'returns true when current_id is the previous +2' do
expect(gap_tracking.gap?(previous_event_id + 2)).to be_truthy
end
it 'returns false when current_id is equal to the previous' do
expect(gap_tracking.gap?(previous_event_id)).to be_falsy
end
it 'returns false when current_id less than the previous' do
expect(gap_tracking.gap?(previous_event_id - 1)).to be_falsy
end
it 'returns false when previous id is 0' do
gap_tracking.previous_id = 0
expect(gap_tracking.gap?(100)).to be_falsy
end
end
def read_gaps
::Gitlab::Redis::SharedState.with do |redis|
redis.zrangebyscore(described_class::GEO_EVENT_LOG_GAPS, '-inf', '+inf', with_scores: true)
end
end
end
......@@ -84,6 +84,15 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared
daemon.run_once!
end
it 'calls #handle_gap_event for each gap the gap tracking finds' do
allow(daemon.gap_tracking).to receive(:fill_gaps).and_yield(1).and_yield(5)
expect(daemon).to receive(:handle_gap_event).with(1)
expect(daemon).to receive(:handle_gap_event).with(5)
daemon.run_once!
end
end
context 'when node has namespace restrictions' do
......@@ -116,32 +125,28 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared
daemon.run_once!
end
it "logs a message if an event was skipped" do
it 'detects when an event was skipped' do
updated_event = create(:geo_repository_updated_event, project: project)
new_event = create(:geo_event_log, id: event_log.id + 2, repository_updated_event: updated_event)
expect(Gitlab::Geo::Logger).to receive(:info)
.with(hash_including(
class: 'Gitlab::Geo::LogCursor::Daemon',
message: 'Event log gap',
previous_event_log_id: event_log.id,
event_log_id: new_event.id))
daemon.run_once!
expect(read_gaps).to eq([event_log.id + 1])
expect(::Geo::EventLogState.last_processed.id).to eq(new_event.id)
end
it 'detects when an event was skipped between batches' do
updated_event = create(:geo_repository_updated_event, project: project)
new_event = create(:geo_event_log, repository_updated_event: updated_event)
# Test that the cursor picks up from the last stored ID
third_event = create(:geo_event_log, id: new_event.id + 3, repository_updated_event: updated_event)
daemon.run_once!
expect(Gitlab::Geo::Logger).to receive(:info)
.with(hash_including(
class: 'Gitlab::Geo::LogCursor::Daemon',
message: 'Event log gap',
previous_event_log_id: new_event.id,
event_log_id: third_event.id))
create(:geo_event_log, id: new_event.id + 3, repository_updated_event: updated_event)
daemon.run_once!
expect(read_gaps).to eq([new_event.id + 1, new_event.id + 2])
end
it "logs a message if an associated event can't be found" do
......@@ -183,4 +188,67 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared
end
end
end
describe '#handle_events' do
let(:batch) { create_list(:geo_event_log, 2) }
it 'passes the previous batch id on to gap tracking' do
expect(daemon.gap_tracking).to receive(:previous_id=).with(55).ordered
batch.each do |event_log|
expect(daemon.gap_tracking).to receive(:previous_id=).with(event_log.id).ordered
end
daemon.handle_events(batch, 55)
end
it 'checks for gaps for each id in batch' do
batch.each do |event_log|
expect(daemon.gap_tracking).to receive(:check!).with(event_log.id)
end
daemon.handle_events(batch, 55)
end
it 'handles every single event' do
batch.each do |event_log|
expect(daemon).to receive(:handle_single_event).with(event_log)
end
daemon.handle_events(batch, 55)
end
end
describe '#handle_single_event' do
set(:event_log) { create(:geo_event_log, :updated_event) }
it 'skips execution when no event data is found' do
event_log = build(:geo_event_log)
expect(daemon).not_to receive(:can_replay?)
daemon.handle_single_event(event_log)
end
it 'checks if it can replay the event' do
expect(daemon).to receive(:can_replay?)
daemon.handle_single_event(event_log)
end
it 'processes event when it is replayable' do
allow(daemon).to receive(:can_replay?).and_return(true)
expect(daemon).to receive(:process_event).with(event_log.event, event_log)
daemon.handle_single_event(event_log)
end
end
def read_gaps
gaps = []
Timecop.travel(12.minutes) do
daemon.gap_tracking.fill_gaps { |id| gaps << id }
end
gaps
end
end
......@@ -406,11 +406,14 @@ describe Geo::ProjectRegistry do
it 'resets sync state' do
subject.finish_sync!(type)
expect(subject.reload.resync_repository).to be false
expect(subject.reload.repository_retry_count).to be_nil
expect(subject.reload.repository_retry_at).to be_nil
expect(subject.reload.force_to_redownload_repository).to be false
expect(subject.reload.last_repository_sync_failure).to be_nil
expect(subject.reload).to have_attributes(
resync_repository: false,
repository_retry_count: be_nil,
repository_retry_at: be_nil,
force_to_redownload_repository: false,
last_repository_sync_failure: be_nil,
repository_missing_on_primary: false
)
end
it 'resets verification state' do
......@@ -421,6 +424,14 @@ describe Geo::ProjectRegistry do
expect(subject.reload.last_repository_verification_failure).to be_nil
end
context 'when a repository was missing on primary' do
it 'sets repository_missing_on_primary as true' do
subject.finish_sync!(type, true)
expect(subject.reload.repository_missing_on_primary).to be true
end
end
context 'when a repository sync was scheduled after the last sync began' do
before do
subject.update!(resync_repository_was_scheduled_at: subject.last_repository_synced_at + 1.minute)
......@@ -470,11 +481,14 @@ describe Geo::ProjectRegistry do
it 'resets sync state' do
subject.finish_sync!(type)
expect(subject.reload.resync_wiki).to be false
expect(subject.reload.wiki_retry_count).to be_nil
expect(subject.reload.wiki_retry_at).to be_nil
expect(subject.reload.force_to_redownload_wiki).to be false
expect(subject.reload.last_wiki_sync_failure).to be_nil
expect(subject.reload).to have_attributes(
resync_wiki: false,
wiki_retry_count: be_nil,
wiki_retry_at: be_nil,
force_to_redownload_wiki: false,
last_wiki_sync_failure: be_nil,
wiki_missing_on_primary: false
)
end
it 'resets verification state' do
......@@ -485,6 +499,14 @@ describe Geo::ProjectRegistry do
expect(subject.reload.last_wiki_verification_failure).to be_nil
end
context 'when a wiki was missing on primary' do
it 'sets wiki_missing_on_primary as true' do
subject.finish_sync!(type, true)
expect(subject.reload.wiki_missing_on_primary).to be true
end
end
context 'when a wiki sync was scheduled after the last sync began' do
before do
subject.update!(resync_wiki_was_scheduled_at: subject.last_wiki_synced_at + 1.minute)
......
......@@ -305,6 +305,10 @@ describe ProjectPolicy do
end
describe 'read_project_security_dashboard' do
before do
allow(project).to receive(:security_reports_feature_available?).and_return(true)
end
subject { described_class.new(current_user, project) }
context 'with admin' do
......@@ -329,6 +333,14 @@ describe ProjectPolicy do
let(:current_user) { developer }
it { is_expected.to be_allowed(:read_project_security_dashboard) }
context 'when security reports features are not available' do
before do
allow(project).to receive(:security_reports_feature_available?).and_return(false)
end
it { is_expected.to be_disallowed(:read_project_security_dashboard) }
end
end
context 'with reporter' do
......
# frozen_string_literal: true
require 'spec_helper'
describe ProjectPresenter do
include Gitlab::Routing.url_helpers
let(:user) { create(:user) }
describe '#extra_statistics_anchors' do
let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:presenter) { described_class.new(project, current_user: user) }
let(:security_dashboard_data) do
OpenStruct.new(enabled: true,
label: _('Security Dashboard'),
link: project_security_dashboard_path(project))
end
before do
allow(Ability).to receive(:allowed?).with(user, :read_project_security_dashboard, project).and_return(true)
allow(project).to receive(:latest_pipeline_with_security_reports).and_return(pipeline)
end
it 'has security dashboard link' do
expect(presenter.extra_statistics_anchors).to include(security_dashboard_data)
end
shared_examples 'has no security dashboard link' do
it do
expect(presenter.extra_statistics_anchors).not_to include(security_dashboard_data)
end
end
context 'user is not allowed to read security dashboard' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_project_security_dashboard, project).and_return(false)
end
it_behaves_like 'has no security dashboard link'
end
context 'no pipeline having security reports' do
before do
allow(project).to receive(:latest_pipeline_with_security_reports).and_return(nil)
end
it_behaves_like 'has no security dashboard link'
end
end
end
......@@ -105,8 +105,10 @@ describe Geo::RepositorySyncService do
subject.execute
expect(Geo::ProjectRegistry.last.resync_repository).to be true
expect(Geo::ProjectRegistry.last.repository_retry_count).to eq(1)
expect(Geo::ProjectRegistry.last).to have_attributes(
resync_repository: true,
repository_retry_count: 1
)
end
it 'marks sync as successful if no repository found' do
......@@ -118,8 +120,11 @@ describe Geo::RepositorySyncService do
subject.execute
expect(registry.reload.resync_repository).to be false
expect(registry.reload.last_repository_successful_sync_at).not_to be nil
expect(registry.reload).to have_attributes(
resync_repository: false,
last_repository_successful_sync_at: be_present,
repository_missing_on_primary: true
)
end
it 'marks resync as true after a failure' do
......
......@@ -84,8 +84,10 @@ RSpec.describe Geo::WikiSyncService do
subject.execute
expect(Geo::ProjectRegistry.last.resync_wiki).to be true
expect(Geo::ProjectRegistry.last.wiki_retry_count).to eq(1)
expect(Geo::ProjectRegistry.last).to have_attributes(
resync_wiki: true,
wiki_retry_count: 1
)
end
it 'marks sync as successful if no repository found' do
......@@ -97,8 +99,11 @@ RSpec.describe Geo::WikiSyncService do
subject.execute
expect(registry.reload.resync_wiki).to be false
expect(registry.last_wiki_successful_sync_at).not_to be nil
expect(registry.reload).to have_attributes(
resync_wiki: false,
last_wiki_successful_sync_at: be_present,
wiki_missing_on_primary: true
)
end
it 'marks resync as true after a failure' do
......
......@@ -62,7 +62,11 @@ module API
requires :housekeeping_incremental_repack_period, type: Integer, desc: "Number of Git pushes after which an incremental 'git repack' is run."
end
optional :html_emails_enabled, type: Boolean, desc: 'By default GitLab sends emails in HTML and plain text formats so mail clients can choose what format to use. Disable this option if you only want to send emails in plain text format.'
<<<<<<< HEAD
optional :import_sources, type: Array[String], values: %w[github bitbucket gitlab google_code fogbugz git gitlab_project],
=======
optional :import_sources, type: Array[String], values: %w[github bitbucket gitlab google_code fogbugz git gitlab_project manifest],
>>>>>>> origin/master
desc: 'Enabled sources for code import during project creation. OmniAuth must be configured for GitHub, Bitbucket, and GitLab.com'
optional :koding_enabled, type: Boolean, desc: 'Enable Koding'
given koding_enabled: ->(val) { val } do
......@@ -76,8 +80,13 @@ module API
requires :metrics_host, type: String, desc: 'The InfluxDB host'
requires :metrics_method_call_threshold, type: Integer, desc: 'A method call is only tracked when it takes longer to complete than the given amount of milliseconds.'
requires :metrics_packet_size, type: Integer, desc: 'The amount of points to store in a single UDP packet'
<<<<<<< HEAD
requires :metrics_port, type: Integer, desc: 'The UDP port to use for connecting to InfluxDB'
requires :metrics_pool_size, type: Integer, desc: 'The amount of InfluxDB connections to open'
=======
requires :metrics_pool_size, type: Integer, desc: 'The amount of InfluxDB connections to open'
requires :metrics_port, type: Integer, desc: 'The UDP port to use for connecting to InfluxDB'
>>>>>>> origin/master
requires :metrics_sample_interval, type: Integer, desc: 'The sampling interval in seconds'
requires :metrics_timeout, type: Integer, desc: 'The amount of seconds after which an InfluxDB connection will time out'
end
......@@ -97,8 +106,8 @@ module API
optional :prometheus_metrics_enabled, type: Boolean, desc: 'Enable Prometheus metrics'
optional :recaptcha_enabled, type: Boolean, desc: 'Helps prevent bots from creating accounts'
given recaptcha_enabled: ->(val) { val } do
requires :recaptcha_site_key, type: String, desc: 'Generate site key at http://www.google.com/recaptcha'
requires :recaptcha_private_key, type: String, desc: 'Generate private key at http://www.google.com/recaptcha'
requires :recaptcha_site_key, type: String, desc: 'Generate site key at http://www.google.com/recaptcha'
end
optional :repository_checks_enabled, type: Boolean, desc: "GitLab will periodically run 'git fsck' in all project and wiki repositories to look for silent disk corruption issues."
optional :repository_storages, type: Array[String], desc: 'Storage paths for new projects'
......@@ -139,22 +148,22 @@ module API
end
## EE-only START
optional :email_additional_text, type: String, desc: 'Additional text added to the bottom of every email for legal/auditing/compliance reasons'
optional :help_text, type: String, desc: 'GitLab server administrator information'
optional :elasticsearch_indexing, type: Boolean, desc: 'Enable Elasticsearch indexing'
given elasticsearch_indexing: ->(val) { val } do
optional :elasticsearch_search, type: Boolean, desc: 'Enable Elasticsearch search'
requires :elasticsearch_url, type: String, desc: 'The url to use for connecting to Elasticsearch. Use a comma-separated list to support clustering (e.g., "http://localhost:9200, http://localhost:9201")'
end
optional :elasticsearch_aws, type: Boolean, desc: 'Enable support for AWS hosted elasticsearch'
given elasticsearch_aws: ->(val) { val } do
requires :elasticsearch_aws_region, type: String, desc: 'The AWS region the elasticsearch domain is configured'
optional :elasticsearch_aws_access_key, type: String, desc: 'AWS IAM access key'
requires :elasticsearch_aws_region, type: String, desc: 'The AWS region the elasticsearch domain is configured'
optional :elasticsearch_aws_secret_access_key, type: String, desc: 'AWS IAM secret access key'
end
optional :usage_ping_enabled, type: Boolean, desc: 'Every week GitLab will report license usage back to GitLab, Inc.'
optional :repository_storages, type: Array[String], desc: 'A list of names of enabled storage paths, taken from `gitlab.yml`. New projects will be created in one of these stores, chosen at random.'
optional :elasticsearch_indexing, type: Boolean, desc: 'Enable Elasticsearch indexing'
given elasticsearch_indexing: ->(val) { val } do
optional :elasticsearch_search, type: Boolean, desc: 'Enable Elasticsearch search'
requires :elasticsearch_url, type: String, desc: 'The url to use for connecting to Elasticsearch. Use a comma-separated list to support clustering (e.g., "http://localhost:9200, http://localhost:9201")'
end
optional :email_additional_text, type: String, desc: 'Additional text added to the bottom of every email for legal/auditing/compliance reasons'
optional :help_text, type: String, desc: 'GitLab server administrator information'
optional :repository_size_limit, type: Integer, desc: 'Size limit per repository (MB)'
optional :repository_storages, type: Array[String], desc: 'A list of names of enabled storage paths, taken from `gitlab.yml`. New projects will be created in one of these stores, chosen at random.'
optional :usage_ping_enabled, type: Boolean, desc: 'Every week GitLab will report license usage back to GitLab, Inc.'
## EE-only END
optional_attributes = ::ApplicationSettingsHelper.visible_attributes << :performance_bar_allowed_group_id
......
......@@ -6151,7 +6151,7 @@ msgstr ""
msgid "The number of attempts GitLab will make to access a storage."
msgstr ""
msgid "The number of failures of after which GitLab will completely prevent access to the storage. The number of failures can be reset in the admin interface: %{link_to_health_page} or using the %{api_documentation_link}."
msgid "The number of failures after which GitLab will completely prevent access to the storage. The number of failures can be reset in the admin interface: %{link_to_health_page} or using the %{api_documentation_link}."
msgstr ""
msgid "The passphrase required to decrypt the private key. This is optional and the value is encrypted at rest."
......@@ -6211,7 +6211,7 @@ msgstr ""
msgid "The time in seconds GitLab will try to access storage. After this time a timeout error will be raised."
msgstr ""
msgid "The time in seconds between storage checks. When a previous check did complete yet, GitLab will skip a check."
msgid "The time in seconds between storage checks. If a check did not complete yet, GitLab will skip the next check."
msgstr ""
msgid "The time taken by each data entry gathered by that stage."
......
......@@ -80,14 +80,14 @@ function deploy() {
replicas="1"
service_enabled="false"
postgres_enabled="$POSTGRES_ENABLED"
gitlab_migrations_image_repository="registry.gitlab.com/gitlab-org/build/cng/gitlab-rails-ce"
gitlab_sidekiq_image_repository="registry.gitlab.com/gitlab-org/build/cng/gitlab-sidekiq-ce"
gitlab_unicorn_image_repository="registry.gitlab.com/gitlab-org/build/cng/gitlab-unicorn-ce"
gitlab_migrations_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-rails-ce"
gitlab_sidekiq_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-sidekiq-ce"
gitlab_unicorn_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-unicorn-ce"
if [[ "$CI_PROJECT_NAME" == "gitlab-ee" ]]; then
gitlab_migrations_image_repository="registry.gitlab.com/gitlab-org/build/cng/gitlab-rails-ee"
gitlab_sidekiq_image_repository="registry.gitlab.com/gitlab-org/build/cng/gitlab-sidekiq-ee"
gitlab_unicorn_image_repository="registry.gitlab.com/gitlab-org/build/cng/gitlab-unicorn-ee"
gitlab_migrations_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-rails-ee"
gitlab_sidekiq_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-sidekiq-ee"
gitlab_unicorn_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-unicorn-ee"
fi
# canary uses stable db
......@@ -138,9 +138,9 @@ function deploy() {
--set gitlab.sidekiq.image.tag="$CI_COMMIT_REF_NAME" \
--set gitlab.unicorn.image.repository="$gitlab_unicorn_image_repository" \
--set gitlab.unicorn.image.tag="$CI_COMMIT_REF_NAME" \
--set gitlab.gitaly.image.repository="registry.gitlab.com/gitlab-org/build/cng/gitaly" \
--set gitlab.gitaly.image.repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitaly" \
--set gitlab.gitaly.image.tag="v$GITALY_VERSION" \
--set gitlab.gitlab-shell.image.repository="registry.gitlab.com/gitlab-org/build/cng/gitlab-shell" \
--set gitlab.gitlab-shell.image.repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-shell" \
--set gitlab.gitlab-shell.image.tag="v$GITLAB_SHELL_VERSION" \
--namespace="$KUBE_NAMESPACE" \
--version="$CI_PIPELINE_ID-$CI_JOB_ID" \
......
......@@ -6,7 +6,7 @@ require 'cgi'
module Trigger
OMNIBUS_PROJECT_PATH = 'gitlab-org/omnibus-gitlab'.freeze
CNG_PROJECT_PATH = 'gitlab-org/build/CNG'.freeze
CNG_PROJECT_PATH = 'gitlab-org/build/CNG-mirror'.freeze
TOKEN = ENV['BUILD_TRIGGER_TOKEN']
def self.ee?
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment