Commit 48c62d42 authored by Yorick Peterse's avatar Yorick Peterse

Move EE specific code out of Project

This moves all EE specific code out of the Project model into separate
modules. State machine logic is injected using the
EE::Project::ImportStatusStateMachine module. By moving all this code
out of the Project model we reduce the number of possible merge
conflicts.
parent ee4b5fc6
...@@ -11,10 +11,27 @@ module EE ...@@ -11,10 +11,27 @@ module EE
ignore_column :sync_time ignore_column :sync_time
before_validation :mark_remote_mirrors_for_removal
after_save :create_mirror_data, if: ->(project) { project.mirror? && project.mirror_changed? } after_save :create_mirror_data, if: ->(project) { project.mirror? && project.mirror_changed? }
after_save :destroy_mirror_data, if: ->(project) { !project.mirror? && project.mirror_changed? } after_save :destroy_mirror_data, if: ->(project) { !project.mirror? && project.mirror_changed? }
after_update :remove_mirror_repository_reference,
if: ->(project) { project.mirror? && project.import_url_updated? }
belongs_to :mirror_user, foreign_key: 'mirror_user_id', class_name: 'User'
has_one :mirror_data, dependent: :delete, autosave: true, class_name: 'ProjectMirrorData' has_one :mirror_data, dependent: :delete, autosave: true, class_name: 'ProjectMirrorData'
has_one :push_rule, dependent: :destroy
has_one :index_status, dependent: :destroy
has_one :jenkins_service, dependent: :destroy
has_one :jenkins_deprecated_service, dependent: :destroy
has_many :approvers, as: :target, dependent: :destroy
has_many :approver_groups, as: :target, dependent: :destroy
has_many :audit_events, as: :entity, dependent: :destroy
has_many :remote_mirrors, inverse_of: :project, dependent: :destroy
has_many :path_locks, dependent: :destroy
scope :with_shared_runners_limit_enabled, -> { with_shared_runners.non_public_only } scope :with_shared_runners_limit_enabled, -> { with_shared_runners.non_public_only }
...@@ -29,11 +46,94 @@ module EE ...@@ -29,11 +46,94 @@ module EE
{ limit: 20.minutes.ago }) { limit: 20.minutes.ago })
end end
scope :mirror, -> { where(mirror: true) }
scope :with_remote_mirrors, -> { joins(:remote_mirrors).where(remote_mirrors: { enabled: true }).distinct }
scope :with_wiki_enabled, -> { with_feature_enabled(:wiki) }
delegate :shared_runners_minutes, :shared_runners_seconds, :shared_runners_seconds_last_reset, delegate :shared_runners_minutes, :shared_runners_seconds, :shared_runners_seconds_last_reset,
to: :statistics, allow_nil: true to: :statistics, allow_nil: true
delegate :actual_shared_runners_minutes_limit, delegate :actual_shared_runners_minutes_limit,
:shared_runners_minutes_used?, to: :namespace :shared_runners_minutes_used?, to: :namespace
validates :repository_size_limit,
numericality: { only_integer: true, greater_than_or_equal_to: 0, allow_nil: true }
validates :approvals_before_merge, numericality: true, allow_blank: true
accepts_nested_attributes_for :remote_mirrors,
allow_destroy: true,
reject_if: ->(attrs) { attrs[:id].blank? && attrs[:url].blank? }
with_options if: :mirror? do |project|
project.validates :import_url, presence: true
project.validates :mirror_user, presence: true
end
end
module ClassMethods
def search_by_visibility(level)
where(visibility_level: ::Gitlab::VisibilityLevel.string_options[level])
end
end
def mirror_updated?
mirror? && self.mirror_last_update_at
end
def updating_mirror?
return false unless mirror? && !empty_repo?
return true if import_in_progress?
self.mirror_data.next_execution_timestamp < Time.now
end
def mirror_last_update_status
return unless mirror_updated?
if self.mirror_last_update_at == self.mirror_last_successful_update_at
:success
else
:failed
end
end
def mirror_last_update_success?
mirror_last_update_status == :success
end
def mirror_last_update_failed?
mirror_last_update_status == :failed
end
def mirror_ever_updated_successfully?
mirror_updated? && self.mirror_last_successful_update_at
end
def has_remote_mirror?
remote_mirrors.enabled.exists?
end
def updating_remote_mirror?
remote_mirrors.enabled.started.exists?
end
def update_remote_mirrors
remote_mirrors.each(&:sync)
end
def mark_stuck_remote_mirrors_as_failed!
remote_mirrors.stuck.update_all(
update_status: :failed,
last_error: 'The remote mirror took to long to complete.',
last_update_at: Time.now
)
end
def fetch_mirror
return unless mirror?
repository.fetch_upstream(self.import_url)
end end
def shared_runners_available? def shared_runners_available?
...@@ -84,6 +184,150 @@ module EE ...@@ -84,6 +184,150 @@ module EE
end end
end end
def cache_has_external_issue_tracker
super unless ::Gitlab::Geo.secondary?
end
def cache_has_external_wiki
super unless ::Gitlab::Geo.secondary?
end
def execute_hooks(data, hooks_scope = :push_hooks)
super
if group
group.hooks.send(hooks_scope).each do |hook|
hook.async_execute(data, hooks_scope.to_s)
end
end
end
# No need to have a Kerberos Web url. Kerberos URL will be used only to
# clone
def kerberos_url_to_repo
"#{::Gitlab.config.build_gitlab_kerberos_url + ::Gitlab::Application.routes.url_helpers.namespace_project_path(self.namespace, self)}.git"
end
def group_ldap_synced?
if group
group.ldap_synced?
else
false
end
end
def reference_issue_tracker?
default_issues_tracker? || jira_tracker_active?
end
def approver_ids=(value)
value.split(",").map(&:strip).each do |user_id|
approvers.find_or_create_by(user_id: user_id, target_id: id)
end
end
def approver_group_ids=(value)
value.split(",").map(&:strip).each do |group_id|
approver_groups.find_or_initialize_by(group_id: group_id, target_id: id)
end
end
def find_path_lock(path, exact_match: false, downstream: false)
@path_lock_finder ||= ::Gitlab::PathLocksFinder.new(self)
@path_lock_finder.find(path, exact_match: exact_match, downstream: downstream)
end
def merge_method
if self.merge_requests_ff_only_enabled
:ff
elsif self.merge_requests_rebase_enabled
:rebase_merge
else
:merge
end
end
def merge_method=(method)
case method.to_s
when "ff"
self.merge_requests_ff_only_enabled = true
self.merge_requests_rebase_enabled = true
when "rebase_merge"
self.merge_requests_ff_only_enabled = false
self.merge_requests_rebase_enabled = true
when "merge"
self.merge_requests_ff_only_enabled = false
self.merge_requests_rebase_enabled = false
end
end
def ff_merge_must_be_possible?
self.merge_requests_ff_only_enabled || self.merge_requests_rebase_enabled
end
def import_url_updated?
# check if import_url has been updated and it's not just the first assignment
import_url_changed? && changes['import_url'].first
end
def remove_mirror_repository_reference
repository.remove_remote(Repository::MIRROR_REMOTE)
end
def import_url_availability
if remote_mirrors.find_by(url: import_url)
errors.add(:import_url, 'is already in use by a remote mirror')
end
end
def mark_remote_mirrors_for_removal
remote_mirrors.each(&:mark_for_delete_if_blank_url)
end
def change_repository_storage(new_repository_storage_key)
return if repository_read_only?
return if repository_storage == new_repository_storage_key
raise ArgumentError unless ::Gitlab.config.repositories.storages.keys.include?(new_repository_storage_key)
run_after_commit { ProjectUpdateRepositoryStorageWorker.perform_async(id, new_repository_storage_key) }
self.repository_read_only = true
end
def repository_and_lfs_size
statistics.total_repository_size
end
def above_size_limit?
return false unless size_limit_enabled?
repository_and_lfs_size > actual_size_limit
end
def size_to_remove
repository_and_lfs_size - actual_size_limit
end
def actual_size_limit
return namespace.actual_size_limit if repository_size_limit.nil?
repository_size_limit
end
def size_limit_enabled?
actual_size_limit != 0
end
def changes_will_exceed_size_limit?(size_in_bytes)
size_limit_enabled? &&
(size_in_bytes > actual_size_limit ||
size_in_bytes + repository_and_lfs_size > actual_size_limit)
end
def remove_import_data
super unless mirror?
end
private private
def licensed_feature_available?(feature) def licensed_feature_available?(feature)
......
module EE
module Project
module ImportStatusStateMachine
extend ActiveSupport::Concern
included do
state_machine :import_status, initial: :none do
before_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.mirror_data&.last_update_scheduled_at = Time.now
end
before_transition scheduled: :started do |project, _|
project.mirror_data&.last_update_started_at = Time.now
end
before_transition scheduled: :failed do |project, _|
if project.mirror?
timestamp = Time.now
project.mirror_last_update_at = timestamp
project.mirror_data.next_execution_timestamp = timestamp
end
end
after_transition [:scheduled, :started] => [:finished, :failed] do |project, _|
::Gitlab::Mirror.decrement_capacity(project.id) if project.mirror?
end
before_transition started: :failed do |project, _|
if project.mirror?
project.mirror_last_update_at = Time.now
mirror_data = project.mirror_data
mirror_data.increment_retry_count!
mirror_data.set_next_execution_timestamp!
end
end
before_transition started: :finished do |project, _|
if project.mirror?
timestamp = Time.now
project.mirror_last_update_at = timestamp
project.mirror_last_successful_update_at = timestamp
mirror_data = project.mirror_data
mirror_data.reset_retry_count!
mirror_data.set_next_execution_timestamp!
end
if current_application_settings.elasticsearch_indexing?
ElasticCommitIndexerWorker.perform_async(project.id)
end
end
after_transition [:finished, :failed] => [:scheduled, :started] do |project, _|
::Gitlab::Mirror.increment_capacity(project.id) if project.mirror?
end
end
end
end
end
end
...@@ -13,13 +13,16 @@ class Project < ActiveRecord::Base ...@@ -13,13 +13,16 @@ class Project < ActiveRecord::Base
include AfterCommitQueue include AfterCommitQueue
include CaseSensitivity include CaseSensitivity
include TokenAuthenticatable include TokenAuthenticatable
include Elastic::ProjectsSearch
include ValidAttribute include ValidAttribute
include ProjectFeaturesCompatibility include ProjectFeaturesCompatibility
include SelectForProjectAuthorization include SelectForProjectAuthorization
include Routable include Routable
# EE specific modules
include Elastic::ProjectsSearch
prepend EE::GeoAwareAvatar prepend EE::GeoAwareAvatar
prepend EE::Project prepend EE::Project
prepend EE::Project::ImportStatusStateMachine
extend Gitlab::ConfigHelper extend Gitlab::ConfigHelper
...@@ -66,8 +69,6 @@ class Project < ActiveRecord::Base ...@@ -66,8 +69,6 @@ class Project < ActiveRecord::Base
# update visibility_level of forks # update visibility_level of forks
after_update :update_forks_visibility_level after_update :update_forks_visibility_level
after_update :remove_mirror_repository_reference,
if: ->(project) { project.mirror? && project.import_url_updated? }
after_validation :check_pending_delete after_validation :check_pending_delete
...@@ -83,9 +84,7 @@ class Project < ActiveRecord::Base ...@@ -83,9 +84,7 @@ class Project < ActiveRecord::Base
belongs_to :creator, class_name: 'User' belongs_to :creator, class_name: 'User'
belongs_to :group, -> { where(type: 'Group') }, foreign_key: 'namespace_id' belongs_to :group, -> { where(type: 'Group') }, foreign_key: 'namespace_id'
belongs_to :namespace belongs_to :namespace
belongs_to :mirror_user, foreign_key: 'mirror_user_id', class_name: 'User'
has_one :push_rule, dependent: :destroy
has_one :last_event, -> {order 'events.created_at DESC'}, class_name: 'Event' has_one :last_event, -> {order 'events.created_at DESC'}, class_name: 'Event'
has_many :boards, dependent: :destroy has_many :boards, dependent: :destroy
...@@ -103,10 +102,8 @@ class Project < ActiveRecord::Base ...@@ -103,10 +102,8 @@ class Project < ActiveRecord::Base
has_one :gemnasium_service, dependent: :destroy has_one :gemnasium_service, dependent: :destroy
has_one :mattermost_slash_commands_service, dependent: :destroy has_one :mattermost_slash_commands_service, dependent: :destroy
has_one :mattermost_service, dependent: :destroy has_one :mattermost_service, dependent: :destroy
has_one :slack_service, dependent: :destroy
has_one :jenkins_service, dependent: :destroy
has_one :jenkins_deprecated_service, dependent: :destroy
has_one :slack_slash_commands_service, dependent: :destroy has_one :slack_slash_commands_service, dependent: :destroy
has_one :slack_service, dependent: :destroy
has_one :buildkite_service, dependent: :destroy has_one :buildkite_service, dependent: :destroy
has_one :bamboo_service, dependent: :destroy has_one :bamboo_service, dependent: :destroy
has_one :teamcity_service, dependent: :destroy has_one :teamcity_service, dependent: :destroy
...@@ -119,7 +116,6 @@ class Project < ActiveRecord::Base ...@@ -119,7 +116,6 @@ class Project < ActiveRecord::Base
has_one :external_wiki_service, dependent: :destroy has_one :external_wiki_service, dependent: :destroy
has_one :kubernetes_service, dependent: :destroy, inverse_of: :project has_one :kubernetes_service, dependent: :destroy, inverse_of: :project
has_one :prometheus_service, dependent: :destroy, inverse_of: :project has_one :prometheus_service, dependent: :destroy, inverse_of: :project
has_one :index_status, dependent: :destroy
has_one :mock_ci_service, dependent: :destroy has_one :mock_ci_service, dependent: :destroy
has_one :mock_deployment_service, dependent: :destroy has_one :mock_deployment_service, dependent: :destroy
has_one :mock_monitoring_service, dependent: :destroy has_one :mock_monitoring_service, dependent: :destroy
...@@ -156,8 +152,6 @@ class Project < ActiveRecord::Base ...@@ -156,8 +152,6 @@ class Project < ActiveRecord::Base
has_many :deploy_keys, through: :deploy_keys_projects has_many :deploy_keys, through: :deploy_keys_projects
has_many :users_star_projects, dependent: :destroy has_many :users_star_projects, dependent: :destroy
has_many :starrers, through: :users_star_projects, source: :user has_many :starrers, through: :users_star_projects, source: :user
has_many :approvers, as: :target, dependent: :destroy
has_many :approver_groups, as: :target, dependent: :destroy
has_many :releases, dependent: :destroy has_many :releases, dependent: :destroy
has_many :lfs_objects_projects, dependent: :destroy has_many :lfs_objects_projects, dependent: :destroy
has_many :lfs_objects, through: :lfs_objects_projects has_many :lfs_objects, through: :lfs_objects_projects
...@@ -165,7 +159,6 @@ class Project < ActiveRecord::Base ...@@ -165,7 +159,6 @@ class Project < ActiveRecord::Base
has_many :invited_groups, through: :project_group_links, source: :group has_many :invited_groups, through: :project_group_links, source: :group
has_many :pages_domains, dependent: :destroy has_many :pages_domains, dependent: :destroy
has_many :todos, dependent: :destroy has_many :todos, dependent: :destroy
has_many :audit_events, as: :entity, dependent: :destroy
has_many :notification_settings, dependent: :destroy, as: :source has_many :notification_settings, dependent: :destroy, as: :source
has_one :import_data, dependent: :delete, class_name: 'ProjectImportData' has_one :import_data, dependent: :delete, class_name: 'ProjectImportData'
...@@ -180,7 +173,6 @@ class Project < ActiveRecord::Base ...@@ -180,7 +173,6 @@ class Project < ActiveRecord::Base
has_many :runners, through: :runner_projects, source: :runner, class_name: 'Ci::Runner' has_many :runners, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
has_many :variables, class_name: 'Ci::Variable' has_many :variables, class_name: 'Ci::Variable'
has_many :triggers, dependent: :destroy, class_name: 'Ci::Trigger' has_many :triggers, dependent: :destroy, class_name: 'Ci::Trigger'
has_many :remote_mirrors, inverse_of: :project, dependent: :destroy
has_many :environments, dependent: :destroy has_many :environments, dependent: :destroy
has_many :deployments, dependent: :destroy has_many :deployments, dependent: :destroy
has_many :pipeline_schedules, dependent: :destroy, class_name: 'Ci::PipelineSchedule' has_many :pipeline_schedules, dependent: :destroy, class_name: 'Ci::PipelineSchedule'
...@@ -188,14 +180,9 @@ class Project < ActiveRecord::Base ...@@ -188,14 +180,9 @@ class Project < ActiveRecord::Base
has_many :sourced_pipelines, class_name: Ci::Sources::Pipeline, foreign_key: :source_project_id has_many :sourced_pipelines, class_name: Ci::Sources::Pipeline, foreign_key: :source_project_id
has_many :source_pipelines, class_name: Ci::Sources::Pipeline, foreign_key: :project_id has_many :source_pipelines, class_name: Ci::Sources::Pipeline, foreign_key: :project_id
has_many :path_locks, dependent: :destroy
has_many :active_runners, -> { active }, through: :runner_projects, source: :runner, class_name: 'Ci::Runner' has_many :active_runners, -> { active }, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
accepts_nested_attributes_for :variables, allow_destroy: true accepts_nested_attributes_for :variables, allow_destroy: true
accepts_nested_attributes_for :remote_mirrors,
allow_destroy: true, reject_if: ->(attrs) { attrs[:id].blank? && attrs[:url].blank? }
accepts_nested_attributes_for :project_feature accepts_nested_attributes_for :project_feature
delegate :name, to: :owner, allow_nil: true, prefix: true delegate :name, to: :owner, allow_nil: true, prefix: true
...@@ -230,7 +217,6 @@ class Project < ActiveRecord::Base ...@@ -230,7 +217,6 @@ class Project < ActiveRecord::Base
validate :avatar_type, validate :avatar_type,
if: ->(project) { project.avatar.present? && project.avatar_changed? } if: ->(project) { project.avatar.present? && project.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i } validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
validates :approvals_before_merge, numericality: true, allow_blank: true
validate :visibility_level_allowed_by_group validate :visibility_level_allowed_by_group
validate :visibility_level_allowed_as_fork validate :visibility_level_allowed_as_fork
validate :check_wiki_path_conflict validate :check_wiki_path_conflict
...@@ -238,17 +224,8 @@ class Project < ActiveRecord::Base ...@@ -238,17 +224,8 @@ class Project < ActiveRecord::Base
presence: true, presence: true,
inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } } inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
validates :repository_size_limit,
numericality: { only_integer: true, greater_than_or_equal_to: 0, allow_nil: true }
with_options if: :mirror? do |project|
project.validates :import_url, presence: true
project.validates :mirror_user, presence: true
end
add_authentication_token_field :runners_token add_authentication_token_field :runners_token
before_save :ensure_runners_token before_save :ensure_runners_token
before_validation :mark_remote_mirrors_for_removal
mount_uploader :avatar, AvatarUploader mount_uploader :avatar, AvatarUploader
has_many :uploads, as: :model, dependent: :destroy has_many :uploads, as: :model, dependent: :destroy
...@@ -267,10 +244,8 @@ class Project < ActiveRecord::Base ...@@ -267,10 +244,8 @@ class Project < ActiveRecord::Base
scope :starred_by, ->(user) { joins(:users_star_projects).where('users_star_projects.user_id': user.id) } scope :starred_by, ->(user) { joins(:users_star_projects).where('users_star_projects.user_id': user.id) }
scope :visible_to_user, ->(user) { where(id: user.authorized_projects.select(:id).reorder(nil)) } scope :visible_to_user, ->(user) { where(id: user.authorized_projects.select(:id).reorder(nil)) }
scope :non_archived, -> { where(archived: false) } scope :non_archived, -> { where(archived: false) }
scope :mirror, -> { where(mirror: true) }
scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct } scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct }
scope :with_push, -> { joins(:events).where('events.action = ?', Event::PUSHED) } scope :with_push, -> { joins(:events).where('events.action = ?', Event::PUSHED) }
scope :with_remote_mirrors, -> { joins(:remote_mirrors).where(remote_mirrors: { enabled: true }).distinct }
scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') } scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') }
scope :with_statistics, -> { includes(:statistics) } scope :with_statistics, -> { includes(:statistics) }
scope :with_shared_runners, -> { where(shared_runners_enabled: true) } scope :with_shared_runners, -> { where(shared_runners_enabled: true) }
...@@ -297,9 +272,6 @@ class Project < ActiveRecord::Base ...@@ -297,9 +272,6 @@ class Project < ActiveRecord::Base
scope :with_issues_enabled, -> { with_feature_enabled(:issues) } scope :with_issues_enabled, -> { with_feature_enabled(:issues) }
scope :with_merge_requests_enabled, -> { with_feature_enabled(:merge_requests) } scope :with_merge_requests_enabled, -> { with_feature_enabled(:merge_requests) }
# EE
scope :with_wiki_enabled, -> { with_feature_enabled(:wiki) }
enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 } enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 }
# project features may be "disabled", "internal" or "enabled". If "internal", # project features may be "disabled", "internal" or "enabled". If "internal",
...@@ -349,61 +321,11 @@ class Project < ActiveRecord::Base ...@@ -349,61 +321,11 @@ class Project < ActiveRecord::Base
state :finished state :finished
state :failed state :failed
before_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.mirror_data&.last_update_scheduled_at = Time.now
end
after_transition [:none, :finished, :failed] => :scheduled do |project, _| after_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.run_after_commit { add_import_job } project.run_after_commit { add_import_job }
end end
before_transition scheduled: :started do |project, _|
project.mirror_data&.last_update_started_at = Time.now
end
before_transition scheduled: :failed do |project, _|
if project.mirror?
timestamp = Time.now
project.mirror_last_update_at = timestamp
project.mirror_data.next_execution_timestamp = timestamp
end
end
after_transition [:scheduled, :started] => [:finished, :failed] do |project, _|
Gitlab::Mirror.decrement_capacity(project.id) if project.mirror?
end
before_transition started: :failed do |project, _|
if project.mirror?
project.mirror_last_update_at = Time.now
mirror_data = project.mirror_data
mirror_data.increment_retry_count!
mirror_data.set_next_execution_timestamp!
end
end
before_transition started: :finished do |project, _|
if project.mirror?
timestamp = Time.now
project.mirror_last_update_at = timestamp
project.mirror_last_successful_update_at = timestamp
mirror_data = project.mirror_data
mirror_data.reset_retry_count!
mirror_data.set_next_execution_timestamp!
end
if current_application_settings.elasticsearch_indexing?
ElasticCommitIndexerWorker.perform_async(project.id)
end
end
after_transition started: :finished, do: :reset_cache_and_import_attrs after_transition started: :finished, do: :reset_cache_and_import_attrs
after_transition [:finished, :failed] => [:scheduled, :started] do |project, _|
Gitlab::Mirror.increment_capacity(project.id) if project.mirror?
end
end end
class << self class << self
...@@ -564,7 +486,7 @@ class Project < ActiveRecord::Base ...@@ -564,7 +486,7 @@ class Project < ActiveRecord::Base
end end
def remove_import_data def remove_import_data
import_data&.destroy unless mirror? import_data&.destroy
end end
def import_url=(value) def import_url=(value)
...@@ -644,65 +566,6 @@ class Project < ActiveRecord::Base ...@@ -644,65 +566,6 @@ class Project < ActiveRecord::Base
Gitlab::UrlSanitizer.new(import_url).masked_url Gitlab::UrlSanitizer.new(import_url).masked_url
end end
def mirror_updated?
mirror? && self.mirror_last_update_at
end
def updating_mirror?
return false unless mirror? && !empty_repo?
return true if import_in_progress?
self.mirror_data.next_execution_timestamp < Time.now
end
def mirror_last_update_status
return unless mirror_updated?
if self.mirror_last_update_at == self.mirror_last_successful_update_at
:success
else
:failed
end
end
def mirror_last_update_success?
mirror_last_update_status == :success
end
def mirror_last_update_failed?
mirror_last_update_status == :failed
end
def mirror_ever_updated_successfully?
mirror_updated? && self.mirror_last_successful_update_at
end
def has_remote_mirror?
remote_mirrors.enabled.exists?
end
def updating_remote_mirror?
remote_mirrors.enabled.started.exists?
end
def update_remote_mirrors
remote_mirrors.each(&:sync)
end
def mark_stuck_remote_mirrors_as_failed!
remote_mirrors.stuck.update_all(
update_status: :failed,
last_error: 'The remote mirror took to long to complete.',
last_update_at: Time.now
)
end
def fetch_mirror
return unless mirror?
repository.fetch_upstream(self.import_url)
end
def gitlab_project_import? def gitlab_project_import?
import_type == 'gitlab_project' import_type == 'gitlab_project'
end end
...@@ -855,7 +718,7 @@ class Project < ActiveRecord::Base ...@@ -855,7 +718,7 @@ class Project < ActiveRecord::Base
end end
def cache_has_external_issue_tracker def cache_has_external_issue_tracker
update_column(:has_external_issue_tracker, services.external_issue_trackers.any?) unless Gitlab::Geo.secondary? update_column(:has_external_issue_tracker, services.external_issue_trackers.any?)
end end
def has_wiki? def has_wiki?
...@@ -875,7 +738,7 @@ class Project < ActiveRecord::Base ...@@ -875,7 +738,7 @@ class Project < ActiveRecord::Base
end end
def cache_has_external_wiki def cache_has_external_wiki
update_column(:has_external_wiki, services.external_wikis.any?) unless Gitlab::Geo.secondary? update_column(:has_external_wiki, services.external_wikis.any?)
end end
def find_or_initialize_services def find_or_initialize_services
...@@ -943,10 +806,6 @@ class Project < ActiveRecord::Base ...@@ -943,10 +806,6 @@ class Project < ActiveRecord::Base
issues_tracker.to_param == 'jira' issues_tracker.to_param == 'jira'
end end
def redmine_tracker?
issues_tracker.to_param == 'redmine'
end
def avatar_type def avatar_type
unless self.avatar.image? unless self.avatar.image?
self.errors.add :avatar, 'only images allowed' self.errors.add :avatar, 'only images allowed'
...@@ -995,11 +854,6 @@ class Project < ActiveRecord::Base ...@@ -995,11 +854,6 @@ class Project < ActiveRecord::Base
hooks.send(hooks_scope).each do |hook| hooks.send(hooks_scope).each do |hook|
hook.async_execute(data, hooks_scope.to_s) hook.async_execute(data, hooks_scope.to_s)
end end
if group
group.hooks.send(hooks_scope).each do |hook|
hook.async_execute(data, hooks_scope.to_s)
end
end
end end
def execute_services(data, hooks_scope = :push_hooks) def execute_services(data, hooks_scope = :push_hooks)
...@@ -1042,11 +896,6 @@ class Project < ActiveRecord::Base ...@@ -1042,11 +896,6 @@ class Project < ActiveRecord::Base
"#{web_url}.git" "#{web_url}.git"
end end
# No need to have a Kerberos Web url. Kerberos URL will be used only to clone
def kerberos_url_to_repo
"#{Gitlab.config.build_gitlab_kerberos_url + Gitlab::Application.routes.url_helpers.namespace_project_path(self.namespace, self)}.git"
end
def user_can_push_to_empty_repo?(user) def user_can_push_to_empty_repo?(user)
!ProtectedBranch.default_branch_protected? || team.max_member_access(user.id) > Gitlab::Access::DEVELOPER !ProtectedBranch.default_branch_protected? || team.max_member_access(user.id) > Gitlab::Access::DEVELOPER
end end
...@@ -1192,14 +1041,6 @@ class Project < ActiveRecord::Base ...@@ -1192,14 +1041,6 @@ class Project < ActiveRecord::Base
merge_requests.where(source_project_id: self.id) merge_requests.where(source_project_id: self.id)
end end
def group_ldap_synced?
if group
group.ldap_synced?
else
false
end
end
def create_repository def create_repository
# Forked import is handled asynchronously # Forked import is handled asynchronously
unless forked? unless forked?
...@@ -1217,6 +1058,17 @@ class Project < ActiveRecord::Base ...@@ -1217,6 +1058,17 @@ class Project < ActiveRecord::Base
!!repository.exists? !!repository.exists?
end end
def update_forks_visibility_level
return unless visibility_level < visibility_level_was
forks.each do |forked_project|
if forked_project.visibility_level > visibility_level
forked_project.visibility_level = visibility_level
forked_project.save!
end
end
end
def create_wiki def create_wiki
ProjectWiki.new(self, self.owner).wiki ProjectWiki.new(self, self.owner).wiki
true true
...@@ -1233,18 +1085,6 @@ class Project < ActiveRecord::Base ...@@ -1233,18 +1085,6 @@ class Project < ActiveRecord::Base
jira_tracker? && jira_service.active jira_tracker? && jira_service.active
end end
def approver_ids=(value)
value.split(",").map(&:strip).each do |user_id|
approvers.find_or_create_by(user_id: user_id, target_id: id)
end
end
def approver_group_ids=(value)
value.split(",").map(&:strip).each do |group_id|
approver_groups.find_or_initialize_by(group_id: group_id, target_id: id)
end
end
def allowed_to_share_with_group? def allowed_to_share_with_group?
!namespace.share_with_group_lock !namespace.share_with_group_lock
end end
...@@ -1322,11 +1162,6 @@ class Project < ActiveRecord::Base ...@@ -1322,11 +1162,6 @@ class Project < ActiveRecord::Base
Dir.exist?(public_pages_path) Dir.exist?(public_pages_path)
end end
def find_path_lock(path, exact_match: false, downstream: false)
@path_lock_finder ||= Gitlab::PathLocksFinder.new(self)
@path_lock_finder.find(path, exact_match: exact_match, downstream: downstream)
end
def pages_url def pages_url
subdomain, _, url_path = full_path.partition('/') subdomain, _, url_path = full_path.partition('/')
...@@ -1368,64 +1203,6 @@ class Project < ActiveRecord::Base ...@@ -1368,64 +1203,6 @@ class Project < ActiveRecord::Base
end end
end end
def merge_method
if self.merge_requests_ff_only_enabled
:ff
elsif self.merge_requests_rebase_enabled
:rebase_merge
else
:merge
end
end
def merge_method=(method)
case method.to_s
when "ff"
self.merge_requests_ff_only_enabled = true
self.merge_requests_rebase_enabled = true
when "rebase_merge"
self.merge_requests_ff_only_enabled = false
self.merge_requests_rebase_enabled = true
when "merge"
self.merge_requests_ff_only_enabled = false
self.merge_requests_rebase_enabled = false
end
end
def ff_merge_must_be_possible?
self.merge_requests_ff_only_enabled || self.merge_requests_rebase_enabled
end
def import_url_updated?
# check if import_url has been updated and it's not just the first assignment
import_url_changed? && changes['import_url'].first
end
def update_forks_visibility_level
return unless visibility_level < visibility_level_was
forks.each do |forked_project|
if forked_project.visibility_level > visibility_level
forked_project.visibility_level = visibility_level
forked_project.save!
end
end
end
def remove_mirror_repository_reference
repository.remove_remote(Repository::MIRROR_REMOTE)
end
def import_url_availability
if remote_mirrors.find_by(url: import_url)
errors.add(:import_url, 'is already in use by a remote mirror')
end
end
def mark_remote_mirrors_for_removal
remote_mirrors.each(&:mark_for_delete_if_blank_url)
end
def running_or_pending_build_count(force: false) def running_or_pending_build_count(force: false)
Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
builds.running_or_pending.count(:all) builds.running_or_pending.count(:all)
...@@ -1533,16 +1310,6 @@ class Project < ActiveRecord::Base ...@@ -1533,16 +1310,6 @@ class Project < ActiveRecord::Base
handle_update_attribute_error(e, value) handle_update_attribute_error(e, value)
end end
def change_repository_storage(new_repository_storage_key)
return if repository_read_only?
return if repository_storage == new_repository_storage_key
raise ArgumentError unless Gitlab.config.repositories.storages.keys.include?(new_repository_storage_key)
run_after_commit { ProjectUpdateRepositoryStorageWorker.perform_async(id, new_repository_storage_key) }
self.repository_read_only = true
end
def pushes_since_gc def pushes_since_gc
Gitlab::Redis.with { |redis| redis.get(pushes_since_gc_redis_key).to_i } Gitlab::Redis.with { |redis| redis.get(pushes_since_gc_redis_key).to_i }
end end
...@@ -1555,36 +1322,6 @@ class Project < ActiveRecord::Base ...@@ -1555,36 +1322,6 @@ class Project < ActiveRecord::Base
Gitlab::Redis.with { |redis| redis.del(pushes_since_gc_redis_key) } Gitlab::Redis.with { |redis| redis.del(pushes_since_gc_redis_key) }
end end
def repository_and_lfs_size
statistics.total_repository_size
end
def above_size_limit?
return false unless size_limit_enabled?
repository_and_lfs_size > actual_size_limit
end
def size_to_remove
repository_and_lfs_size - actual_size_limit
end
def actual_size_limit
return namespace.actual_size_limit if repository_size_limit.nil?
repository_size_limit
end
def size_limit_enabled?
actual_size_limit != 0
end
def changes_will_exceed_size_limit?(size_in_bytes)
size_limit_enabled? &&
(size_in_bytes > actual_size_limit ||
size_in_bytes + repository_and_lfs_size > actual_size_limit)
end
def route_map_for(commit_sha) def route_map_for(commit_sha)
@route_maps_by_commit ||= Hash.new do |h, sha| @route_maps_by_commit ||= Hash.new do |h, sha|
h[sha] = begin h[sha] = begin
......
...@@ -176,7 +176,9 @@ describe ProjectsController do ...@@ -176,7 +176,9 @@ describe ProjectsController do
context 'project repo over limit' do context 'project repo over limit' do
before do before do
allow_any_instance_of(Project).to receive(:above_size_limit?).and_return(true) allow_any_instance_of(EE::Project).
to receive(:above_size_limit?).and_return(true)
project.team << [user, :master] project.team << [user, :master]
end end
......
...@@ -665,7 +665,7 @@ describe 'Git HTTP requests', lib: true do ...@@ -665,7 +665,7 @@ describe 'Git HTTP requests', lib: true do
end end
end end
end end
context "when Kerberos token is provided" do context "when Kerberos token is provided" do
let(:env) { { spnego_request_token: 'opaque_request_token' } } let(:env) { { spnego_request_token: 'opaque_request_token' } }
...@@ -768,7 +768,8 @@ describe 'Git HTTP requests', lib: true do ...@@ -768,7 +768,8 @@ describe 'Git HTTP requests', lib: true do
end end
it 'responds with status 403 Forbidden' do it 'responds with status 403 Forbidden' do
allow_any_instance_of(Project).to receive(:above_size_limit?).and_return(true) allow_any_instance_of(EE::Project).
to receive(:above_size_limit?).and_return(true)
upload(path, env) do |response| upload(path, env) do |response|
expect(response).to have_http_status(:forbidden) expect(response).to have_http_status(:forbidden)
......
...@@ -710,7 +710,7 @@ describe 'Git LFS API and storage' do ...@@ -710,7 +710,7 @@ describe 'Git LFS API and storage' do
context 'and project is above the limit' do context 'and project is above the limit' do
let(:update_lfs_permissions) do let(:update_lfs_permissions) do
allow_any_instance_of(Project).to receive_messages( allow_any_instance_of(EE::Project).to receive_messages(
repository_and_lfs_size: 100.megabytes, repository_and_lfs_size: 100.megabytes,
actual_size_limit: 99.megabytes) actual_size_limit: 99.megabytes)
end end
...@@ -726,7 +726,7 @@ describe 'Git LFS API and storage' do ...@@ -726,7 +726,7 @@ describe 'Git LFS API and storage' do
context 'and project will go over the limit' do context 'and project will go over the limit' do
let(:update_lfs_permissions) do let(:update_lfs_permissions) do
allow_any_instance_of(Project).to receive_messages( allow_any_instance_of(EE::Project).to receive_messages(
repository_and_lfs_size: 200.megabytes, repository_and_lfs_size: 200.megabytes,
actual_size_limit: 300.megabytes) actual_size_limit: 300.megabytes)
end end
...@@ -982,7 +982,10 @@ describe 'Git LFS API and storage' do ...@@ -982,7 +982,10 @@ describe 'Git LFS API and storage' do
context 'and project has limit enabled but will stay under the limit' do context 'and project has limit enabled but will stay under the limit' do
before do before do
allow_any_instance_of(Project).to receive_messages(actual_size_limit: 200, size_limit_enabled?: true) allow_any_instance_of(EE::Project).to receive_messages(
actual_size_limit: 200,
size_limit_enabled?: true)
put_finalize put_finalize
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment