Commit 2d9670b5 authored by Matija Čupić's avatar Matija Čupić

Merge remote-tracking branch 'origin/master' into list-multiple-clusters

parents fc4f3164 8c77ae2d
...@@ -586,6 +586,7 @@ codequality: ...@@ -586,6 +586,7 @@ codequality:
paths: [codeclimate.json] paths: [codeclimate.json]
qa:internal: qa:internal:
<<: *except-docs
stage: test stage: test
variables: variables:
SETUP_DB: "false" SETUP_DB: "false"
......
...@@ -298,18 +298,21 @@ import ProjectVariables from './project_variables'; ...@@ -298,18 +298,21 @@ import ProjectVariables from './project_variables';
break; break;
case 'projects:snippets:show': case 'projects:snippets:show':
initNotes(); initNotes();
new ZenMode();
break; break;
case 'projects:snippets:new': case 'projects:snippets:new':
case 'projects:snippets:edit': case 'projects:snippets:edit':
case 'projects:snippets:create': case 'projects:snippets:create':
case 'projects:snippets:update': case 'projects:snippets:update':
new GLForm($('.snippet-form'), true); new GLForm($('.snippet-form'), true);
new ZenMode();
break; break;
case 'snippets:new': case 'snippets:new':
case 'snippets:edit': case 'snippets:edit':
case 'snippets:create': case 'snippets:create':
case 'snippets:update': case 'snippets:update':
new GLForm($('.snippet-form'), false); new GLForm($('.snippet-form'), false);
new ZenMode();
break; break;
case 'projects:releases:edit': case 'projects:releases:edit':
new ZenMode(); new ZenMode();
...@@ -546,6 +549,7 @@ import ProjectVariables from './project_variables'; ...@@ -546,6 +549,7 @@ import ProjectVariables from './project_variables';
new LineHighlighter(); new LineHighlighter();
new BlobViewer(); new BlobViewer();
initNotes(); initNotes();
new ZenMode();
break; break;
case 'import:fogbugz:new_user_map': case 'import:fogbugz:new_user_map':
new UsersSelect(); new UsersSelect();
......
...@@ -4,8 +4,8 @@ class Admin::AppearancesController < Admin::ApplicationController ...@@ -4,8 +4,8 @@ class Admin::AppearancesController < Admin::ApplicationController
def show def show
end end
def preview def preview_sign_in
render 'preview', layout: 'devise' render 'preview_sign_in', layout: 'devise'
end end
def create def create
...@@ -52,7 +52,7 @@ class Admin::AppearancesController < Admin::ApplicationController ...@@ -52,7 +52,7 @@ class Admin::AppearancesController < Admin::ApplicationController
def appearance_params def appearance_params
params.require(:appearance).permit( params.require(:appearance).permit(
:title, :description, :logo, :logo_cache, :header_logo, :header_logo_cache, :title, :description, :logo, :logo_cache, :header_logo, :header_logo_cache,
:updated_by :new_project_guidelines, :updated_by
) )
end end
end end
module AppearancesHelper module AppearancesHelper
def brand_title def brand_title
if brand_item && brand_item.title brand_item&.title.presence || 'GitLab Community Edition'
brand_item.title
else
'GitLab Community Edition'
end
end end
def brand_image def brand_image
if brand_item.logo? image_tag(brand_item.logo) if brand_item&.logo?
image_tag brand_item.logo
else
nil
end
end end
def brand_text def brand_text
markdown_field(brand_item, :description) markdown_field(brand_item, :description)
end end
def brand_new_project_guidelines
markdown_field(brand_item, :new_project_guidelines)
end
def brand_item def brand_item
@appearance ||= Appearance.current @appearance ||= Appearance.current
end end
def brand_header_logo def brand_header_logo
if brand_item && brand_item.header_logo? if brand_item&.header_logo?
image_tag brand_item.header_logo image_tag brand_item.header_logo
else else
render 'shared/logo.svg' render 'shared/logo.svg'
...@@ -33,7 +29,7 @@ module AppearancesHelper ...@@ -33,7 +29,7 @@ module AppearancesHelper
# Skip the 'GitLab' type logo when custom brand logo is set # Skip the 'GitLab' type logo when custom brand logo is set
def brand_header_logo_type def brand_header_logo_type
unless brand_item && brand_item.header_logo? unless brand_item&.header_logo?
render 'shared/logo_type.svg' render 'shared/logo_type.svg'
end end
end end
......
...@@ -2,9 +2,8 @@ class Appearance < ActiveRecord::Base ...@@ -2,9 +2,8 @@ class Appearance < ActiveRecord::Base
include CacheMarkdownField include CacheMarkdownField
cache_markdown_field :description cache_markdown_field :description
cache_markdown_field :new_project_guidelines
validates :title, presence: true
validates :description, presence: true
validates :logo, file_size: { maximum: 1.megabyte } validates :logo, file_size: { maximum: 1.megabyte }
validates :header_logo, file_size: { maximum: 1.megabyte } validates :header_logo, file_size: { maximum: 1.megabyte }
......
module Ci module Ci
class Build < CommitStatus class Build < CommitStatus
prepend ArtifactMigratable
include TokenAuthenticatable include TokenAuthenticatable
include AfterCommitQueue include AfterCommitQueue
include Presentable include Presentable
...@@ -10,9 +11,14 @@ module Ci ...@@ -10,9 +11,14 @@ module Ci
belongs_to :erased_by, class_name: 'User' belongs_to :erased_by, class_name: 'User'
has_many :deployments, as: :deployable has_many :deployments, as: :deployable
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment' has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection' has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name # The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment def persisted_environment
@persisted_environment ||= Environment.find_by( @persisted_environment ||= Environment.find_by(
...@@ -31,15 +37,37 @@ module Ci ...@@ -31,15 +37,37 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) } scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) } scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) } scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) } scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) } scope :ref_protected, -> { where(protected: true) }
mount_uploader :artifacts_file, ArtifactUploader scope :matches_tag_ids, -> (tag_ids) do
mount_uploader :artifacts_metadata, ArtifactUploader matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id')
.where.not(tag_id: tag_ids).select('1')
where("NOT EXISTS (?)", matcher)
end
scope :with_any_tags, -> do
matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id').select('1')
where("EXISTS (?)", matcher)
end
mount_uploader :legacy_artifacts_file, LegacyArtifactUploader, mount_on: :artifacts_file
mount_uploader :legacy_artifacts_metadata, LegacyArtifactUploader, mount_on: :artifacts_metadata
acts_as_taggable acts_as_taggable
...@@ -326,14 +354,6 @@ module Ci ...@@ -326,14 +354,6 @@ module Ci
project.running_or_pending_build_count(force: true) project.running_or_pending_build_count(force: true)
end end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options) def artifacts_metadata_entry(path, **options)
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new( metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path, artifacts_metadata.path,
...@@ -386,6 +406,7 @@ module Ci ...@@ -386,6 +406,7 @@ module Ci
def keep_artifacts! def keep_artifacts!
self.update(artifacts_expire_at: nil) self.update(artifacts_expire_at: nil)
self.job_artifacts.update_all(expire_at: nil)
end end
def coverage_regex def coverage_regex
...@@ -473,11 +494,7 @@ module Ci ...@@ -473,11 +494,7 @@ module Ci
private private
def update_artifacts_size def update_artifacts_size
self.artifacts_size = if artifacts_file.exists? self.artifacts_size = legacy_artifacts_file&.size
artifacts_file.size
else
nil
end
end end
def erase_trace! def erase_trace!
......
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :set_size, if: :file_changed?
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
def expire_in
expire_at - Time.now if expire_at
end
def expire_in=(value)
self.expire_at =
if value
ChronicDuration.parse(value)&.seconds&.from_now
end
end
end
end
...@@ -112,7 +112,7 @@ module Ci ...@@ -112,7 +112,7 @@ module Ci
def can_pick?(build) def can_pick?(build)
return false if self.ref_protected? && !build.protected? return false if self.ref_protected? && !build.protected?
assignable_for?(build.project) && accepting_tags?(build) assignable_for?(build.project_id) && accepting_tags?(build)
end end
def only_for?(project) def only_for?(project)
...@@ -171,8 +171,8 @@ module Ci ...@@ -171,8 +171,8 @@ module Ci
end end
end end
def assignable_for?(project) def assignable_for?(project_id)
is_shared? || projects.exists?(id: project.id) is_shared? || projects.exists?(id: project_id)
end end
def accepting_tags?(build) def accepting_tags?(build)
......
# Adapter class to unify the interface between mounted uploaders and the
# Ci::Artifact model
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
job_artifacts_archive&.file || legacy_artifacts_file
end
def artifacts_metadata
job_artifacts_metadata&.file || legacy_artifacts_metadata
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_artifacts_archive&.file_changed? || attribute_changed?(:artifacts_file)
end
def remove_artifacts_file!
if job_artifacts_archive
job_artifacts_archive.destroy
else
remove_legacy_artifacts_file!
end
end
def remove_artifacts_metadata!
if job_artifacts_metadata
job_artifacts_metadata.destroy
else
remove_legacy_artifacts_metadata!
end
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
end
end
...@@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord' ...@@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord'
class Group < Namespace class Group < Namespace
include Gitlab::ConfigHelper include Gitlab::ConfigHelper
include AfterCommitQueue
include AccessRequestable include AccessRequestable
include Avatarable include Avatarable
include Referable include Referable
......
...@@ -2,6 +2,7 @@ require 'digest/md5' ...@@ -2,6 +2,7 @@ require 'digest/md5'
class Key < ActiveRecord::Base class Key < ActiveRecord::Base
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
include AfterCommitQueue
include Sortable include Sortable
belongs_to :user belongs_to :user
......
class Member < ActiveRecord::Base class Member < ActiveRecord::Base
include AfterCommitQueue
include Sortable include Sortable
include Importable include Importable
include Expirable include Expirable
......
...@@ -35,7 +35,9 @@ class ProjectStatistics < ActiveRecord::Base ...@@ -35,7 +35,9 @@ class ProjectStatistics < ActiveRecord::Base
end end
def update_build_artifacts_size def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size) self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self)
end end
def update_storage_size def update_storage_size
......
...@@ -211,7 +211,7 @@ class Service < ActiveRecord::Base ...@@ -211,7 +211,7 @@ class Service < ActiveRecord::Base
def async_execute(data) def async_execute(data)
return unless supported_events.include?(data[:object_kind]) return unless supported_events.include?(data[:object_kind])
Sidekiq::Client.enqueue(ProjectServiceWorker, id, data) ProjectServiceWorker.perform_async(id, data)
end end
def issue_tracker? def issue_tracker?
......
...@@ -7,6 +7,7 @@ class User < ActiveRecord::Base ...@@ -7,6 +7,7 @@ class User < ActiveRecord::Base
include Gitlab::ConfigHelper include Gitlab::ConfigHelper
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
include Gitlab::SQL::Pattern include Gitlab::SQL::Pattern
include AfterCommitQueue
include Avatarable include Avatarable
include Referable include Referable
include Sortable include Sortable
...@@ -903,6 +904,7 @@ class User < ActiveRecord::Base ...@@ -903,6 +904,7 @@ class User < ActiveRecord::Base
def post_destroy_hook def post_destroy_hook
log_info("User \"#{name}\" (#{email}) was removed") log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy) system_hook_service.execute_hooks_for(self, :destroy)
end end
......
...@@ -22,6 +22,16 @@ module Ci ...@@ -22,6 +22,16 @@ module Ci
valid = true valid = true
if Feature.enabled?('ci_job_request_with_tags_matcher')
# pick builds that does not have other tags than runner's one
builds = builds.matches_tag_ids(runner.tags.ids)
# pick builds that have at least one tag
unless runner.run_untagged?
builds = builds.with_any_tags
end
end
builds.find do |build| builds.find do |build|
next unless runner.can_pick?(build) next unless runner.can_pick?(build)
......
...@@ -18,7 +18,7 @@ module Projects ...@@ -18,7 +18,7 @@ module Projects
@status.enqueue! @status.enqueue!
@status.run! @status.run!
raise 'missing pages artifacts' unless build.artifacts_file? raise 'missing pages artifacts' unless build.artifacts?
raise 'pages are outdated' unless latest? raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts # Create temporary directory in which we will extract the artifacts
......
class SystemHooksService class SystemHooksService
def execute_hooks_for(model, event) def execute_hooks_for(model, event)
execute_hooks(build_event_data(model, event)) data = build_event_data(model, event)
model.run_after_commit_or_now do
SystemHooksService.new.execute_hooks(data)
end
end end
def execute_hooks(data, hooks_scope = :all) def execute_hooks(data, hooks_scope = :all)
......
...@@ -63,7 +63,7 @@ class WebHookService ...@@ -63,7 +63,7 @@ class WebHookService
end end
def async_execute def async_execute
Sidekiq::Client.enqueue(WebHookWorker, hook.id, data, hook_name) WebHookWorker.perform_async(hook.id, data, hook_name)
end end
private private
......
class JobArtifactUploader < GitlabUploader
storage :file
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def size
return super if model.size.nil?
model.size
end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, model.job_id.to_s, model.id.to_s)
end
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(model.project_id.to_s)
end
end
class ArtifactUploader < GitlabUploader class LegacyArtifactUploader < GitlabUploader
storage :file storage :file
attr_reader :job, :field def self.local_store_path
def self.local_artifacts_store
Gitlab.config.artifacts.path Gitlab.config.artifacts.path
end end
def self.artifacts_upload_path def self.artifacts_upload_path
File.join(self.local_artifacts_store, 'tmp/uploads/') File.join(self.local_store_path, 'tmp/uploads/')
end
def initialize(job, field)
@job, @field = job, field
end end
def store_dir def store_dir
...@@ -20,20 +14,20 @@ class ArtifactUploader < GitlabUploader ...@@ -20,20 +14,20 @@ class ArtifactUploader < GitlabUploader
end end
def cache_dir def cache_dir
File.join(self.class.local_artifacts_store, 'tmp/cache') File.join(self.class.local_store_path, 'tmp/cache')
end end
def work_dir def work_dir
File.join(self.class.local_artifacts_store, 'tmp/work') File.join(self.class.local_store_path, 'tmp/work')
end end
private private
def default_local_path def default_local_path
File.join(self.class.local_artifacts_store, default_path) File.join(self.class.local_store_path, default_path)
end end
def default_path def default_path
File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s) File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end end
end end
= form_for @appearance, url: admin_appearances_path, html: { class: 'form-horizontal'} do |f| = form_for @appearance, url: admin_appearances_path, html: { class: 'form-horizontal'} do |f|
= form_errors(@appearance) = form_errors(@appearance)
%fieldset.app_logo
%legend
Navigation bar:
.form-group
= f.label :header_logo, 'Header logo', class: 'control-label'
.col-sm-10
- if @appearance.header_logo?
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
.hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo
%fieldset.sign-in %fieldset.sign-in
%legend %legend
Sign in/Sign up pages: Sign in/Sign up pages:
...@@ -28,27 +45,22 @@ ...@@ -28,27 +45,22 @@
.hint .hint
Maximum file size is 1MB. Pages are optimized for a 640x360 px logo. Maximum file size is 1MB. Pages are optimized for a 640x360 px logo.
%fieldset.app_logo %fieldset
%legend %legend
Navigation bar: New project pages:
.form-group .form-group
= f.label :header_logo, 'Header logo', class: 'control-label' = f.label :new_project_guidelines, class: 'control-label'
.col-sm-10 .col-sm-10
- if @appearance.header_logo? = f.text_area :new_project_guidelines, class: "form-control", rows: 10
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
.hint .hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo Guidelines parsed with #{link_to "GitLab Flavored Markdown", help_page_path('user/markdown'), target: '_blank'}.
.form-actions .form-actions
= f.submit 'Save', class: 'btn btn-save append-right-10' = f.submit 'Save', class: 'btn btn-save append-right-10'
- if @appearance.persisted? - if @appearance.persisted?
= link_to 'Preview last save', preview_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer' Preview last save:
= link_to 'Sign-in page', preview_sign_in_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
= link_to 'New project page', new_project_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
- if @appearance.updated_at - if @appearance.updated_at
%span.pull-right %span.pull-right
......
...@@ -15,8 +15,8 @@ ...@@ -15,8 +15,8 @@
.col-sm-7.brand-holder.pull-left .col-sm-7.brand-holder.pull-left
%h1 %h1
= brand_title = brand_title
- if brand_item
= brand_image = brand_image
- if brand_item&.description?
= brand_text = brand_text
- else - else
%h3 Open source software to collaborate on code %h3 Open source software to collaborate on code
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
= markdown_toolbar_button({ icon: "list-bulleted", data: { "md-tag" => "* ", "md-prepend" => true }, title: "Add a bullet list" }) = markdown_toolbar_button({ icon: "list-bulleted", data: { "md-tag" => "* ", "md-prepend" => true }, title: "Add a bullet list" })
= markdown_toolbar_button({ icon: "list-numbered", data: { "md-tag" => "1. ", "md-prepend" => true }, title: "Add a numbered list" }) = markdown_toolbar_button({ icon: "list-numbered", data: { "md-tag" => "1. ", "md-prepend" => true }, title: "Add a numbered list" })
= markdown_toolbar_button({ icon: "task-done", data: { "md-tag" => "* [ ] ", "md-prepend" => true }, title: "Add a task list" }) = markdown_toolbar_button({ icon: "task-done", data: { "md-tag" => "* [ ] ", "md-prepend" => true }, title: "Add a task list" })
%button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, aria: { label: "Go full screen" }, title: "Go full screen", data: { container: "body" } } %button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, "aria-label": "Go full screen", title: "Go full screen", data: { container: "body" } }
= sprite_icon("screen-full") = sprite_icon("screen-full")
.md-write-holder .md-write-holder
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), #{link_to 'among other things', help_page_path("user/project/index.md", anchor: "projects-features"), target: '_blank'}. A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), #{link_to 'among other things', help_page_path("user/project/index.md", anchor: "projects-features"), target: '_blank'}.
%p %p
All features are enabled when you create a project, but you can disable the ones you don’t need in the project settings. All features are enabled when you create a project, but you can disable the ones you don’t need in the project settings.
= brand_new_project_guidelines
.col-lg-9.js-toggle-container .col-lg-9.js-toggle-container
%ul.nav-links.gitlab-tabs{ role: 'tablist' } %ul.nav-links.gitlab-tabs{ role: 'tablist' }
%li.active{ role: 'presentation' } %li.active{ role: 'presentation' }
......
class AdminEmailWorker class AdminEmailWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class AuthorizedProjectsWorker class AuthorizedProjectsWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# Schedules multiple jobs and waits for them to be completed. # Schedules multiple jobs and waits for them to be completed.
def self.bulk_perform_and_wait(args_list) def self.bulk_perform_and_wait(args_list)
...@@ -17,11 +16,6 @@ class AuthorizedProjectsWorker ...@@ -17,11 +16,6 @@ class AuthorizedProjectsWorker
waiter.wait waiter.wait
end end
# Schedules multiple jobs to run in sidekiq without waiting for completion
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
# Performs multiple jobs directly. Failed jobs will be put into sidekiq so # Performs multiple jobs directly. Failed jobs will be put into sidekiq so
# they can benefit from retries # they can benefit from retries
def self.bulk_perform_inline(args_list) def self.bulk_perform_inline(args_list)
......
class BackgroundMigrationWorker class BackgroundMigrationWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# Enqueues a number of jobs in bulk.
#
# The `jobs` argument should be an Array of Arrays, each sub-array must be in
# the form:
#
# [migration-class, [arg1, arg2, ...]]
def self.perform_bulk(jobs)
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs)
end
# Schedules multiple jobs in bulk, with a delay.
#
def self.perform_bulk_in(delay, jobs)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs,
'at' => schedule)
end
# Performs the background migration. # Performs the background migration.
# #
......
class BuildCoverageWorker class BuildCoverageWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(build_id) def perform(build_id)
......
class BuildFinishedWorker class BuildFinishedWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class BuildHooksWorker class BuildHooksWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :hooks enqueue_in group: :hooks
......
class BuildQueueWorker class BuildQueueWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class BuildSuccessWorker class BuildSuccessWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class BuildTraceSectionsWorker class BuildTraceSectionsWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(build_id) def perform(build_id)
......
class ClusterInstallAppWorker class ClusterInstallAppWorker
include Sidekiq::Worker include ApplicationWorker
include ClusterQueue include ClusterQueue
include ClusterApplications include ClusterApplications
......
class ClusterProvisionWorker class ClusterProvisionWorker
include Sidekiq::Worker include ApplicationWorker
include ClusterQueue include ClusterQueue
def perform(cluster_id) def perform(cluster_id)
......
class ClusterWaitForAppInstallationWorker class ClusterWaitForAppInstallationWorker
include Sidekiq::Worker include ApplicationWorker
include ClusterQueue include ClusterQueue
include ClusterApplications include ClusterApplications
......
Sidekiq::Worker.extend ActiveSupport::Concern
module ApplicationWorker
extend ActiveSupport::Concern
include Sidekiq::Worker
included do
sidekiq_options queue: base_queue_name
end
module ClassMethods
def base_queue_name
name
.sub(/\AGitlab::/, '')
.sub(/Worker\z/, '')
.underscore
.tr('/', '_')
end
def queue
get_sidekiq_options['queue'].to_s
end
def bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list)
end
def bulk_perform_in(delay, args_list)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list, 'at' => schedule)
end
end
end
# Concern that sets the queue of a Sidekiq worker based on the worker's class
# name/namespace.
module DedicatedSidekiqQueue
extend ActiveSupport::Concern
included do
sidekiq_options queue: name.sub(/Worker\z/, '').underscore.tr('/', '_')
end
end
...@@ -8,7 +8,7 @@ module Gitlab ...@@ -8,7 +8,7 @@ module Gitlab
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include ReschedulingMethods include ReschedulingMethods
include NotifyUponDeath include NotifyUponDeath
......
class CreateGpgSignatureWorker class CreateGpgSignatureWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(commit_sha, project_id) def perform(commit_sha, project_id)
project = Project.find_by(id: project_id) project = Project.find_by(id: project_id)
......
class CreatePipelineWorker class CreatePipelineWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :creation enqueue_in group: :creation
......
class DeleteMergedBranchesWorker class DeleteMergedBranchesWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(project_id, user_id) def perform(project_id, user_id)
begin begin
......
class DeleteUserWorker class DeleteUserWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(current_user_id, delete_user_id, options = {}) def perform(current_user_id, delete_user_id, options = {})
delete_user = User.find(delete_user_id) delete_user = User.find(delete_user_id)
......
class EmailReceiverWorker class EmailReceiverWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(raw) def perform(raw)
return unless Gitlab::IncomingEmail.enabled? return unless Gitlab::IncomingEmail.enabled?
......
class EmailsOnPushWorker class EmailsOnPushWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
attr_reader :email, :skip_premailer attr_reader :email, :skip_premailer
......
class ExpireBuildArtifactsWorker class ExpireBuildArtifactsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
...@@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker ...@@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker
build_ids = Ci::Build.with_expired_artifacts.pluck(:id) build_ids = Ci::Build.with_expired_artifacts.pluck(:id)
build_ids = build_ids.map { |build_id| [build_id] } build_ids = build_ids.map { |build_id| [build_id] }
Sidekiq::Client.push_bulk('class' => ExpireBuildInstanceArtifactsWorker, 'args' => build_ids ) ExpireBuildInstanceArtifactsWorker.bulk_perform_async(build_ids)
end end
end end
class ExpireBuildInstanceArtifactsWorker class ExpireBuildInstanceArtifactsWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(build_id) def perform(build_id)
build = Ci::Build build = Ci::Build
......
class ExpireJobCacheWorker class ExpireJobCacheWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :cache enqueue_in group: :cache
......
class ExpirePipelineCacheWorker class ExpirePipelineCacheWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :cache enqueue_in group: :cache
......
class GitGarbageCollectWorker class GitGarbageCollectWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
sidekiq_options retry: false sidekiq_options retry: false
......
...@@ -7,7 +7,7 @@ module Gitlab ...@@ -7,7 +7,7 @@ module Gitlab
# been completed this worker will advance the import process to the next # been completed this worker will advance the import process to the next
# stage. # stage.
class AdvanceStageWorker class AdvanceStageWorker
include Sidekiq::Worker include ApplicationWorker
sidekiq_options queue: 'github_importer_advance_stage', dead: false sidekiq_options queue: 'github_importer_advance_stage', dead: false
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
module Gitlab module Gitlab
module GithubImport module GithubImport
class RefreshImportJidWorker class RefreshImportJidWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
# The interval to schedule new instances of this job at. # The interval to schedule new instances of this job at.
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class FinishImportWorker class FinishImportWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportBaseDataWorker class ImportBaseDataWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportIssuesAndDiffNotesWorker class ImportIssuesAndDiffNotesWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportNotesWorker class ImportNotesWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportPullRequestsWorker class ImportPullRequestsWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportRepositoryWorker class ImportRepositoryWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
class GitlabShellWorker class GitlabShellWorker
include Sidekiq::Worker include ApplicationWorker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
def perform(action, *arg) def perform(action, *arg)
gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
......
class GitlabUsagePingWorker class GitlabUsagePingWorker
LEASE_TIMEOUT = 86400 LEASE_TIMEOUT = 86400
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class GroupDestroyWorker class GroupDestroyWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
def perform(group_id, user_id) def perform(group_id, user_id)
......
class ImportExportProjectCleanupWorker class ImportExportProjectCleanupWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class InvalidGpgSignatureUpdateWorker class InvalidGpgSignatureUpdateWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(gpg_key_id) def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id) gpg_key = GpgKey.find_by(id: gpg_key_id)
......
...@@ -2,8 +2,7 @@ require 'json' ...@@ -2,8 +2,7 @@ require 'json'
require 'socket' require 'socket'
class IrkerWorker class IrkerWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(project_id, chans, colors, push_data, settings) def perform(project_id, chans, colors, push_data, settings)
project = Project.find(project_id) project = Project.find(project_id)
......
class MergeWorker class MergeWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(merge_request_id, current_user_id, params) def perform(merge_request_id, current_user_id, params)
params = params.with_indifferent_access params = params.with_indifferent_access
......
...@@ -5,14 +5,9 @@ ...@@ -5,14 +5,9 @@
# The worker will reject doing anything for projects that *do* have a # The worker will reject doing anything for projects that *do* have a
# namespace. For those use ProjectDestroyWorker instead. # namespace. For those use ProjectDestroyWorker instead.
class NamespacelessProjectDestroyWorker class NamespacelessProjectDestroyWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
def perform(project_id) def perform(project_id)
begin begin
project = Project.unscoped.find(project_id) project = Project.unscoped.find(project_id)
......
class NewIssueWorker class NewIssueWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include NewIssuable include NewIssuable
def perform(issue_id, user_id) def perform(issue_id, user_id)
......
class NewMergeRequestWorker class NewMergeRequestWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include NewIssuable include NewIssuable
def perform(merge_request_id, user_id) def perform(merge_request_id, user_id)
......
class NewNoteWorker class NewNoteWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# Keep extra parameter to preserve backwards compatibility with # Keep extra parameter to preserve backwards compatibility with
# old `NewNoteWorker` jobs (can remove later) # old `NewNoteWorker` jobs (can remove later)
......
class PagesWorker class PagesWorker
include Sidekiq::Worker include ApplicationWorker
sidekiq_options queue: :pages, retry: false sidekiq_options queue: :pages, retry: false
......
class PipelineHooksWorker class PipelineHooksWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :hooks enqueue_in group: :hooks
......
class PipelineMetricsWorker class PipelineMetricsWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(pipeline_id) def perform(pipeline_id)
......
class PipelineNotificationWorker class PipelineNotificationWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(pipeline_id, recipients = nil) def perform(pipeline_id, recipients = nil)
......
class PipelineProcessWorker class PipelineProcessWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class PipelineScheduleWorker class PipelineScheduleWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class PipelineSuccessWorker class PipelineSuccessWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class PipelineUpdateWorker class PipelineUpdateWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class PostReceive class PostReceive
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(gl_repository, identifier, changes) def perform(gl_repository, identifier, changes)
project, is_wiki = Gitlab::GlRepository.parse(gl_repository) project, is_wiki = Gitlab::GlRepository.parse(gl_repository)
......
...@@ -5,8 +5,7 @@ ...@@ -5,8 +5,7 @@
# Consider using an extra worker if you need to add any extra (and potentially # Consider using an extra worker if you need to add any extra (and potentially
# slow) processing of commits. # slow) processing of commits.
class ProcessCommitWorker class ProcessCommitWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# project_id - The ID of the project this commit belongs to. # project_id - The ID of the project this commit belongs to.
# user_id - The ID of the user that pushed the commit. # user_id - The ID of the user that pushed the commit.
......
# Worker for updating any project specific caches. # Worker for updating any project specific caches.
class ProjectCacheWorker class ProjectCacheWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
LEASE_TIMEOUT = 15.minutes.to_i LEASE_TIMEOUT = 15.minutes.to_i
......
class ProjectDestroyWorker class ProjectDestroyWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
def perform(project_id, user_id, params) def perform(project_id, user_id, params)
......
class ProjectExportWorker class ProjectExportWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
sidekiq_options retry: 3 sidekiq_options retry: 3
......
class ProjectMigrateHashedStorageWorker class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
LEASE_TIMEOUT = 30.seconds.to_i LEASE_TIMEOUT = 30.seconds.to_i
......
class ProjectServiceWorker class ProjectServiceWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
sidekiq_options dead: false sidekiq_options dead: false
......
# Worker for updating any project specific caches. # Worker for updating any project specific caches.
class PropagateServiceTemplateWorker class PropagateServiceTemplateWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
LEASE_TIMEOUT = 4.hours.to_i LEASE_TIMEOUT = 4.hours.to_i
......
class PruneOldEventsWorker class PruneOldEventsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class ReactiveCachingWorker class ReactiveCachingWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(class_name, id, *args) def perform(class_name, id, *args)
klass = begin klass = begin
......
class RemoveExpiredGroupLinksWorker class RemoveExpiredGroupLinksWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class RemoveExpiredMembersWorker class RemoveExpiredMembersWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class RemoveOldWebHookLogsWorker class RemoveOldWebHookLogsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
WEB_HOOK_LOG_LIFETIME = 2.days WEB_HOOK_LOG_LIFETIME = 2.days
......
class RemoveUnreferencedLfsObjectsWorker class RemoveUnreferencedLfsObjectsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class RepositoryArchiveCacheWorker class RepositoryArchiveCacheWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
module RepositoryCheck module RepositoryCheck
class BatchWorker class BatchWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
RUN_TIME = 3600 RUN_TIME = 3600
......
module RepositoryCheck module RepositoryCheck
class ClearWorker class ClearWorker
include Sidekiq::Worker include ApplicationWorker
include RepositoryCheckQueue include RepositoryCheckQueue
def perform def perform
......
module RepositoryCheck module RepositoryCheck
class SingleRepositoryWorker class SingleRepositoryWorker
include Sidekiq::Worker include ApplicationWorker
include RepositoryCheckQueue include RepositoryCheckQueue
def perform(project_id) def perform(project_id)
......
class RepositoryForkWorker class RepositoryForkWorker
ForkError = Class.new(StandardError) ForkError = Class.new(StandardError)
include Sidekiq::Worker include ApplicationWorker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
include ProjectStartImport include ProjectStartImport
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
......
class RepositoryImportWorker class RepositoryImportWorker
ImportError = Class.new(StandardError) ImportError = Class.new(StandardError)
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
include ProjectStartImport include ProjectStartImport
......
class RequestsProfilesWorker class RequestsProfilesWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment