Commit ea20437d authored by Dmitriy Zaporozhets's avatar Dmitriy Zaporozhets

Merge branch 'ce-to-ee' into 'master'

CE Upstream - Monday

Closes #3121, gitlab-ce#31706, gitlab-ce#36066, and gitlab-ce#31698

See merge request !2674
parents aa7951eb 7161aac3
......@@ -67,7 +67,7 @@ gem 'gpgme'
# LDAP Auth
# GitLab fork with several improvements to original library. For full list of changes
# see https://github.com/intridea/omniauth-ldap/compare/master...gitlabhq:master
gem 'gitlab_omniauth-ldap', '~> 2.0.3', require: 'omniauth-ldap'
gem 'gitlab_omniauth-ldap', '~> 2.0.4', require: 'omniauth-ldap'
gem 'net-ldap'
# Git Wiki
......@@ -419,7 +419,7 @@ group :ed25519 do
end
# Gitaly GRPC client
gem 'gitaly', '~> 0.26.0'
gem 'gitaly', '~> 0.27.0'
gem 'toml-rb', '~> 0.3.15', require: false
......
......@@ -125,7 +125,7 @@ GEM
activesupport (>= 4.0.0)
mime-types (>= 1.16)
cause (0.1)
charlock_holmes (0.7.3)
charlock_holmes (0.7.4)
chronic (0.10.2)
chronic_duration (0.10.6)
numerizer (~> 0.1.1)
......@@ -301,7 +301,7 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gherkin-ruby (0.3.2)
gitaly (0.26.0)
gitaly (0.27.0)
google-protobuf (~> 3.1)
grpc (~> 1.0)
github-linguist (4.7.6)
......@@ -321,7 +321,7 @@ GEM
posix-spawn (~> 0.3)
gitlab-license (1.0.0)
gitlab-markup (1.5.1)
gitlab_omniauth-ldap (2.0.3)
gitlab_omniauth-ldap (2.0.4)
net-ldap (~> 0.16)
omniauth (~> 1.3)
pyu-ruby-sasl (>= 0.0.3.3, < 0.1)
......@@ -1051,12 +1051,12 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0)
gitaly (~> 0.26.0)
gitaly (~> 0.27.0)
github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-license (~> 1.0)
gitlab-markup (~> 1.5.1)
gitlab_omniauth-ldap (~> 2.0.3)
gitlab_omniauth-ldap (~> 2.0.4)
gollum-lib (~> 4.2)
gollum-rugged_adapter (~> 0.4.4)
gon (~> 6.1.0)
......
......@@ -378,6 +378,11 @@ import initGroupAnalytics from './init_group_analytics';
if ($('#tree-slider').length) new TreeView();
if ($('.blob-viewer').length) new BlobViewer();
if ($('.project-show-activity').length) new gl.Activities();
$('#tree-slider').waitForImages(function() {
gl.utils.ajaxGet(document.querySelector('.js-tree-content').dataset.logsPath);
});
initGeoInfoModal();
break;
case 'projects:edit':
......
import Cookies from 'js-cookie';
import bp from './breakpoints';
const HIDE_INTERVAL_TIMEOUT = 300;
const IS_OVER_CLASS = 'is-over';
const IS_ABOVE_CLASS = 'is-above';
const IS_SHOWING_FLY_OUT_CLASS = 'is-showing-fly-out';
let currentOpenMenu = null;
let menuCornerLocs;
let timeoutId;
export const mousePos = [];
export const setOpenMenu = (menu = null) => { currentOpenMenu = menu; };
export const slope = (a, b) => (b.y - a.y) / (b.x - a.x);
export const canShowActiveSubItems = (el) => {
const isHiddenByMedia = bp.getBreakpointSize() === 'sm' || bp.getBreakpointSize() === 'md';
......@@ -10,8 +24,28 @@ export const canShowActiveSubItems = (el) => {
return true;
};
export const canShowSubItems = () => bp.getBreakpointSize() === 'sm' || bp.getBreakpointSize() === 'md' || bp.getBreakpointSize() === 'lg';
export const getHideSubItemsInterval = () => {
if (!currentOpenMenu) return 0;
const currentMousePos = mousePos[mousePos.length - 1];
const prevMousePos = mousePos[0];
const currentMousePosY = currentMousePos.y;
const [menuTop, menuBottom] = menuCornerLocs;
if (currentMousePosY < menuTop.y ||
currentMousePosY > menuBottom.y) return 0;
if (slope(prevMousePos, menuBottom) < slope(currentMousePos, menuBottom) &&
slope(prevMousePos, menuTop) > slope(currentMousePos, menuTop)) {
return HIDE_INTERVAL_TIMEOUT;
}
return 0;
};
export const calculateTop = (boundingRect, outerHeight) => {
const windowHeight = window.innerHeight;
const bottomOverflow = windowHeight - (boundingRect.top + outerHeight);
......@@ -20,45 +54,118 @@ export const calculateTop = (boundingRect, outerHeight) => {
boundingRect.top;
};
export const showSubLevelItems = (el) => {
const subItems = el.querySelector('.sidebar-sub-level-items');
export const hideMenu = (el) => {
if (!el) return;
if (!subItems || !canShowSubItems() || !canShowActiveSubItems(el)) return;
const parentEl = el.parentNode;
subItems.style.display = 'block';
el.classList.add('is-showing-fly-out');
el.classList.add('is-over');
el.style.display = ''; // eslint-disable-line no-param-reassign
el.style.transform = ''; // eslint-disable-line no-param-reassign
el.classList.remove(IS_ABOVE_CLASS);
parentEl.classList.remove(IS_OVER_CLASS);
parentEl.classList.remove(IS_SHOWING_FLY_OUT_CLASS);
setOpenMenu();
};
export const moveSubItemsToPosition = (el, subItems) => {
const boundingRect = el.getBoundingClientRect();
const top = calculateTop(boundingRect, subItems.offsetHeight);
const isAbove = top < boundingRect.top;
subItems.classList.add('fly-out-list');
subItems.style.transform = `translate3d(0, ${Math.floor(top)}px, 0)`;
subItems.style.transform = `translate3d(0, ${Math.floor(top)}px, 0)`; // eslint-disable-line no-param-reassign
const subItemsRect = subItems.getBoundingClientRect();
menuCornerLocs = [
{
x: subItemsRect.left, // left position of the sub items
y: subItemsRect.top, // top position of the sub items
},
{
x: subItemsRect.left, // left position of the sub items
y: subItemsRect.top + subItemsRect.height, // bottom position of the sub items
},
];
if (isAbove) {
subItems.classList.add('is-above');
subItems.classList.add(IS_ABOVE_CLASS);
}
};
export const hideSubLevelItems = (el) => {
export const showSubLevelItems = (el) => {
const subItems = el.querySelector('.sidebar-sub-level-items');
if (!canShowSubItems() || !canShowActiveSubItems(el)) return;
el.classList.add(IS_OVER_CLASS);
if (!subItems) return;
subItems.style.display = 'block';
el.classList.add(IS_SHOWING_FLY_OUT_CLASS);
setOpenMenu(subItems);
moveSubItemsToPosition(el, subItems);
};
export const mouseEnterTopItems = (el) => {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => {
if (currentOpenMenu) hideMenu(currentOpenMenu);
showSubLevelItems(el);
}, getHideSubItemsInterval());
};
export const mouseLeaveTopItem = (el) => {
const subItems = el.querySelector('.sidebar-sub-level-items');
if (!subItems || !canShowSubItems() || !canShowActiveSubItems(el)) return;
if (!canShowSubItems() || !canShowActiveSubItems(el) ||
(subItems && subItems === currentOpenMenu)) return;
el.classList.remove(IS_OVER_CLASS);
};
export const documentMouseMove = (e) => {
mousePos.push({
x: e.clientX,
y: e.clientY,
});
el.classList.remove('is-showing-fly-out');
el.classList.remove('is-over');
subItems.style.display = '';
subItems.style.transform = '';
subItems.classList.remove('is-above');
if (mousePos.length > 6) mousePos.shift();
};
export default () => {
const items = [...document.querySelectorAll('.sidebar-top-level-items > li')]
.filter(el => el.querySelector('.sidebar-sub-level-items'));
const sidebar = document.querySelector('.sidebar-top-level-items');
if (!sidebar) return;
const items = [...sidebar.querySelectorAll('.sidebar-top-level-items > li')];
sidebar.addEventListener('mouseleave', () => {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => {
if (currentOpenMenu) hideMenu(currentOpenMenu);
}, getHideSubItemsInterval());
});
items.forEach((el) => {
el.addEventListener('mouseenter', e => showSubLevelItems(e.currentTarget));
el.addEventListener('mouseleave', e => hideSubLevelItems(e.currentTarget));
const subItems = el.querySelector('.sidebar-sub-level-items');
if (subItems) {
subItems.addEventListener('mouseleave', () => {
clearTimeout(timeoutId);
hideMenu(currentOpenMenu);
});
}
el.addEventListener('mouseenter', e => mouseEnterTopItems(e.currentTarget));
el.addEventListener('mouseleave', e => mouseLeaveTopItem(e.currentTarget));
});
document.addEventListener('mousemove', documentMouseMove);
};
......@@ -36,7 +36,7 @@ const bindEvents = () => {
$('.how_to_import_link').on('click', (e) => {
e.preventDefault();
$('.how_to_import_link').next('.modal').show();
$(e.currentTarget).next('.modal').show();
});
$('.modal-header .close').on('click', () => {
......
......@@ -725,9 +725,9 @@
}
// TODO: change global style and remove mixin
@mixin new-style-dropdown {
.dropdown-menu,
.dropdown-menu-nav {
@mixin new-style-dropdown($selector: '') {
#{$selector}.dropdown-menu,
#{$selector}.dropdown-menu-nav {
.divider {
margin: 6px 0;
}
......@@ -773,7 +773,7 @@
}
}
.dropdown-menu-align-right {
#{$selector}.dropdown-menu-align-right {
margin-top: 2px;
}
}
......@@ -339,6 +339,8 @@ a > code {
@extend .ref-name;
}
@include new-style-dropdown('.git-revision-dropdown');
/**
* Apply Markdown typography
*
......
......@@ -103,6 +103,7 @@ $new-sidebar-collapsed-width: 50px;
&.sidebar-icons-only {
width: $new-sidebar-collapsed-width;
overflow-x: hidden;
.badge,
.project-title {
......@@ -249,32 +250,13 @@ $new-sidebar-collapsed-width: 50px;
position: absolute;
top: -30px;
bottom: -30px;
left: 0;
left: -10px;
right: -30px;
z-index: -1;
}
&::after {
content: "";
position: absolute;
top: 44px;
left: -30px;
right: 35px;
bottom: 0;
height: 100%;
max-height: 150px;
z-index: -1;
transform: skew(33deg);
}
&.is-above {
margin-top: 1px;
&::after {
top: auto;
bottom: 44px;
transform: skew(-30deg);
}
}
> .active {
......@@ -321,8 +303,7 @@ $new-sidebar-collapsed-width: 50px;
}
}
&:not(.active):hover > a,
> a:hover,
&.active > a:hover,
&.is-over > a {
background-color: $white-light;
}
......
......@@ -108,6 +108,7 @@
background-color: $orange-50;
border-radius: $border-radius-default $border-radius-default 0 0;
border: 1px solid $border-gray-normal;
border-bottom: none;
padding: 3px 12px;
margin: auto;
align-items: center;
......@@ -132,22 +133,9 @@
}
}
.not-confidential {
padding: 0;
border-top: none;
}
.right-sidebar-expanded {
.md-area {
border-radius: 0;
border-top: none;
}
}
.right-sidebar-collapsed {
.confidential-issue-warning {
border-bottom: none;
}
.confidential-issue-warning + .md-area {
border-top-left-radius: 0;
border-top-right-radius: 0;
}
.discussion-form {
......
......@@ -854,6 +854,7 @@ a.linked-pipeline-mini-item {
* Top arrow in the dropdown in the mini pipeline graph
*/
.mini-pipeline-graph-dropdown-menu {
z-index: 200;
&::before,
&::after {
......
......@@ -45,7 +45,7 @@ class Admin::AppearancesController < Admin::ApplicationController
# Use callbacks to share common setup or constraints between actions.
def set_appearance
@appearance = Appearance.last || Appearance.new
@appearance = Appearance.current || Appearance.new
end
# Only allow a trusted parameter "white list" through.
......
......@@ -69,7 +69,7 @@ module AuthenticatesWithTwoFactor
if U2fRegistration.authenticate(user, u2f_app_id, user_params[:device_response], session[:challenge])
# Remove any lingering user data from login
session.delete(:otp_user_id)
session.delete(:challenges)
session.delete(:challenge)
remember_me(user) if user_params[:remember_me] == '1'
sign_in(user)
......
......@@ -52,8 +52,10 @@ class Dashboard::ProjectsController < Dashboard::ApplicationController
end
def load_events
@events = Event.in_projects(load_projects(params.merge(non_public: true)))
@events = event_filter.apply_filter(@events).with_associations
@events = @events.limit(20).offset(params[:offset] || 0)
projects = load_projects(params.merge(non_public: true))
@events = EventCollection
.new(projects, offset: params[:offset].to_i, filter: event_filter)
.to_a
end
end
......@@ -29,9 +29,9 @@ class DashboardController < Dashboard::ApplicationController
current_user.authorized_projects
end
@events = Event.in_projects(projects)
@events = @event_filter.apply_filter(@events).with_associations
@events = @events.limit(20).offset(params[:offset] || 0)
@events = EventCollection
.new(projects, offset: params[:offset].to_i, filter: @event_filter)
.to_a
end
def set_show_full_reference
......
......@@ -167,9 +167,9 @@ class GroupsController < Groups::ApplicationController
end
def load_events
@events = Event.in_projects(@projects)
@events = event_filter.apply_filter(@events).with_associations
@events = @events.limit(20).offset(params[:offset] || 0)
@events = EventCollection
.new(@projects, offset: params[:offset].to_i, filter: event_filter)
.to_a
end
def user_actions
......
......@@ -8,6 +8,8 @@ class ProjectsController < Projects::ApplicationController
before_action :repository, except: [:index, :new, :create]
before_action :assign_ref_vars, only: [:show], if: :repo_exists?
before_action :assign_tree_vars, only: [:show], if: [:repo_exists?, :project_view_files?]
before_action :tree, only: [:show], if: [:repo_exists?, :project_view_files?]
before_action :project_export_enabled, only: [:export, :download_export, :remove_export, :generate_new_export]
# Authorize
before_action :authorize_admin_project!, only: [:edit, :update, :housekeeping, :download_export, :export, :remove_export, :generate_new_export]
......@@ -303,10 +305,11 @@ class ProjectsController < Projects::ApplicationController
end
def load_events
@events = @project.events.recent
@events = event_filter.apply_filter(@events).with_associations
limit = (params[:limit] || 20).to_i
@events = @events.limit(limit).offset(params[:offset] || 0)
projects = Project.where(id: @project.id)
@events = EventCollection
.new(projects, offset: params[:offset].to_i, filter: event_filter)
.to_a
end
def project_params
......@@ -398,4 +401,8 @@ class ProjectsController < Projects::ApplicationController
url_for(params)
end
def project_export_enabled
render_404 unless current_application_settings.project_export_enabled?
end
end
# :nocov:
if Rails.env.test?
class UnicornTestController < ActionController::Base
def pid
......@@ -10,3 +11,4 @@ if Rails.env.test?
end
end
end
# :nocov:
......@@ -18,7 +18,7 @@ class Admin::ProjectsFinder
end
def execute
items = Project.with_statistics
items = Project.without_deleted.with_statistics
items = items.in_namespace(namespace_id) if namespace_id.present?
items = items.where(visibility_level: visibility_level) if visibility_level.present?
items = items.with_push if with_push.present?
......
......@@ -20,7 +20,7 @@ module AppearancesHelper
end
def brand_item
@appearance ||= Appearance.first
@appearance ||= Appearance.current
end
def brand_header_logo
......
......@@ -148,6 +148,7 @@ module ApplicationSettingsHelper
:plantuml_enabled,
:plantuml_url,
:polling_interval_multiplier,
:project_export_enabled,
:prometheus_metrics_enabled,
:recaptcha_enabled,
:recaptcha_private_key,
......
......@@ -8,7 +8,27 @@ class Appearance < ActiveRecord::Base
validates :logo, file_size: { maximum: 1.megabyte }
validates :header_logo, file_size: { maximum: 1.megabyte }
validate :single_appearance_row, on: :create
mount_uploader :logo, AttachmentUploader
mount_uploader :header_logo, AttachmentUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
CACHE_KEY = 'current_appearance'.freeze
after_commit :flush_redis_cache
def self.current
Rails.cache.fetch(CACHE_KEY) { first }
end
def flush_redis_cache
Rails.cache.delete(CACHE_KEY)
end
def single_appearance_row
if self.class.any?
errors.add(:single_appearance_row, 'Only 1 appearances row can exist')
end
end
end
......@@ -254,6 +254,7 @@ class ApplicationSetting < ActiveRecord::Base
performance_bar_allowed_group_id: nil,
plantuml_enabled: false,
plantuml_url: nil,
project_export_enabled: true,
recaptcha_enabled: false,
repository_checks_enabled: true,
repository_storages: ['default'],
......
......@@ -14,9 +14,15 @@ class BroadcastMessage < ActiveRecord::Base
default_value_for :color, '#E75E40'
default_value_for :font, '#FFFFFF'
CACHE_KEY = 'broadcast_message_current'.freeze
after_commit :flush_redis_cache
def self.current
Rails.cache.fetch("broadcast_message_current", expires_in: 1.minute) do
where('ends_at > :now AND starts_at <= :now', now: Time.zone.now).order([:created_at, :id]).to_a
Rails.cache.fetch(CACHE_KEY) do
where('ends_at > :now AND starts_at <= :now', now: Time.zone.now)
.reorder(id: :asc)
.to_a
end
end
......@@ -31,4 +37,8 @@ class BroadcastMessage < ActiveRecord::Base
def ended?
ends_at < Time.zone.now
end
def flush_redis_cache
Rails.cache.delete(CACHE_KEY)
end
end
......@@ -48,6 +48,7 @@ class Event < ActiveRecord::Base
belongs_to :author, class_name: "User"
belongs_to :project
belongs_to :target, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
has_one :push_event_payload, foreign_key: :event_id
# For Hash only
serialize :data # rubocop:disable Cop/ActiveRecordSerialize
......@@ -55,16 +56,28 @@ class Event < ActiveRecord::Base
# Callbacks
after_create :reset_project_activity
after_create :set_last_repository_updated_at, if: :push?
after_create :replicate_event_for_push_events_migration
# Scopes
scope :recent, -> { reorder(id: :desc) }
scope :code_push, -> { where(action: PUSHED) }
scope :in_projects, ->(projects) do
where(project_id: projects.pluck(:id)).recent
scope :in_projects, -> (projects) do
sub_query = projects
.except(:order)
.select(1)
.where('projects.id = events.project_id')
where('EXISTS (?)', sub_query).recent
end
scope :with_associations, -> do
# We're using preload for "push_event_payload" as otherwise the association
# is not always available (depending on the query being built).
includes(:author, :project, project: :namespace)
.preload(:target, :push_event_payload)
end
scope :with_associations, -> { includes(:author, :project, project: :namespace).preload(:target) }
scope :for_milestone_id, ->(milestone_id) { where(target_type: "Milestone", target_id: milestone_id) }
scope :issues, -> { where(target_type: 'Issue') }
scope :merge_requests, -> { where(target_type: 'MergeRequest') }
......@@ -73,7 +86,27 @@ class Event < ActiveRecord::Base
scope :merged, -> { where(action: MERGED) }
scope :totals_by_author, -> { group(:author_id).count }
self.inheritance_column = 'action'
class << self
def find_sti_class(action)
if action.to_i == PUSHED
PushEvent
else
Event
end
end
def subclass_from_attributes(attrs)
# Without this Rails will keep calling this method on the returned class,
# resulting in an infinite loop.
return unless self == Event
action = attrs.with_indifferent_access[inheritance_column].to_i
PushEvent if action == PUSHED
end
# Update Gitlab::ContributionsCalendar#activity_dates if this changes
def contributions
where("action = ? OR (target_type IN (?) AND action IN (?)) OR (target_type = ? AND action = ?)",
......@@ -296,6 +329,16 @@ class Event < ActiveRecord::Base
@commits ||= (data[:commits] || []).reverse
end
def commit_title
commit = commits.last
commit[:message] if commit
end
def commit_id
commit_to || commit_from
end
def commits_count
data[:total_commits_count] || commits.count || 0
end
......@@ -391,6 +434,16 @@ class Event < ActiveRecord::Base
user ? author_id == user.id : false
end
# We're manually replicating data into the new table since database triggers
# are not dumped to db/schema.rb. This could mean that a new installation
# would not have the triggers in place, thus losing events data in GitLab
# 10.0.
def replicate_event_for_push_events_migration
new_attributes = attributes.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
end
private
def recent_update?
......
# A collection of events to display in an event list.
#
# An EventCollection is meant to be used for displaying events to a user (e.g.
# in a controller), it's not suitable for building queries that are used for
# building other queries.
class EventCollection
# To prevent users from putting too much pressure on the database by cycling
# through thousands of events we put a limit on the number of pages.
MAX_PAGE = 10
# projects - An ActiveRecord::Relation object that returns the projects for
# which to retrieve events.
# filter - An EventFilter instance to use for filtering events.
def initialize(projects, limit: 20, offset: 0, filter: nil)
@projects = projects
@limit = limit
@offset = offset
@filter = filter
end
# Returns an Array containing the events.
def to_a
return [] if current_page > MAX_PAGE
relation = if Gitlab::Database.join_lateral_supported?
relation_with_join_lateral
else
relation_without_join_lateral
end
relation.with_associations.to_a
end
private
# Returns the events relation to use when JOIN LATERAL is not supported.
#
# This relation simply gets all the events for all authorized projects, then
# limits that set.
def relation_without_join_lateral
events = filtered_events.in_projects(projects)
paginate_events(events)
end
# Returns the events relation to use when JOIN LATERAL is supported.
#
# This relation is built using JOIN LATERAL, producing faster queries than a
# regular LIMIT + OFFSET approach.
def relation_with_join_lateral
projects_for_lateral = projects.select(:id).to_sql
lateral = filtered_events
.limit(limit_for_join_lateral)
.where('events.project_id = projects_for_lateral.id')
.to_sql
# The outer query does not need to re-apply the filters since the JOIN
# LATERAL body already takes care of this.
outer = base_relation
.from("(#{projects_for_lateral}) projects_for_lateral")
.joins("JOIN LATERAL (#{lateral}) AS #{Event.table_name} ON true")
paginate_events(outer)
end
def filtered_events
@filter ? @filter.apply_filter(base_relation) : base_relation
end
def paginate_events(events)
events.limit(@limit).offset(@offset)
end
def base_relation
# We want to have absolute control over the event queries being built, thus
# we're explicitly opting out of any default scopes that may be set.
Event.unscoped.recent
end
def limit_for_join_lateral
# Applying the OFFSET on the inside of a JOIN LATERAL leads to incorrect
# results. To work around this we need to increase the inner limit for every
# page.
#
# This means that on page 1 we use LIMIT 20, and an outer OFFSET of 0. On
# page 2 we use LIMIT 40 and an outer OFFSET of 20.
@limit + @offset
end
def current_page
(@offset / @limit) + 1
end
def projects
@projects.except(:order)
end
end
# This model is used to replicate events between the old "events" table and the
# new "events_for_migration" table that will replace "events" in GitLab 10.0.
class EventForMigration < ActiveRecord::Base
self.table_name = 'events_for_migration'
end
......@@ -279,6 +279,13 @@ class Member < ActiveRecord::Base
@notification_setting ||= user.notification_settings_for(source)
end
def notifiable?(type, opts = {})
# always notify when there isn't a user yet
return true if user.blank?
NotificationRecipientService.notifiable?(user, type, notifiable_options.merge(opts))
end
private
def send_invite
......@@ -335,4 +342,8 @@ class Member < ActiveRecord::Base
def notification_service
NotificationService.new
end
def notifiable_options
{}
end
end
......@@ -35,6 +35,10 @@ class GroupMember < Member
'Group'
end
def notifiable_options
{ group: group }
end
private
def send_invite
......
......@@ -88,6 +88,10 @@ class ProjectMember < Member
project.owner == user
end
def notifiable_options
{ project: project }
end
private
def delete_member_todos
......
......@@ -5,14 +5,22 @@ class NotificationRecipient
custom_action: nil,
target: nil,
acting_user: nil,
project: nil
project: nil,
group: nil,
skip_read_ability: false
)
unless NotificationSetting.levels.key?(type) || type == :subscription
raise ArgumentError, "invalid type: #{type.inspect}"
end
@custom_action = custom_action
@acting_user = acting_user
@target = target
@project = project || @target&.project
@project = project || default_project
@group = group || @project&.group
@user = user
@type = type
@skip_read_ability = skip_read_ability
end
def notification_setting
......@@ -77,6 +85,8 @@ class NotificationRecipient
def has_access?
DeclarativePolicy.subject_scope do
return false unless user.can?(:receive_notifications)
return true if @skip_read_ability
return false if @project && !user.can?(:read_project, @project)
return true unless read_ability
......@@ -96,6 +106,7 @@ class NotificationRecipient
private
def read_ability
return nil if @skip_read_ability
return @read_ability if instance_variable_defined?(:@read_ability)
@read_ability =
......@@ -111,12 +122,18 @@ class NotificationRecipient
end
end
def default_project
return nil if @target.nil?
return @target if @target.is_a?(Project)
return @target.project if @target.respond_to?(:project)
end
def find_notification_setting
project_setting = @project && user.notification_settings_for(@project)
return project_setting unless project_setting.nil? || project_setting.global?
group_setting = @project&.group && user.notification_settings_for(@project.group)
group_setting = @group && user.notification_settings_for(@group)
return group_setting unless group_setting.nil? || group_setting.global?
......
class PushEvent < Event
# This validation exists so we can't accidentally use PushEvent with a
# different "action" value.
validate :validate_push_action
# Authors are required as they're used to display who pushed data.
#
# We're just validating the presence of the ID here as foreign key constraints
# should ensure the ID points to a valid user.
validates :author_id, presence: true
# The project is required to build links to commits, commit ranges, etc.
#
# We're just validating the presence of the ID here as foreign key constraints
# should ensure the ID points to a valid project.
validates :project_id, presence: true
# The "data" field must not be set for push events since it's not used and a
# waste of space.
validates :data, absence: true
# These fields are also not used for push events, thus storing them would be a
# waste.
validates :target_id, absence: true
validates :target_type, absence: true
def self.sti_name
PUSHED
end
def push?
true
end
def push_with_commits?
!!(commit_from && commit_to)
end
def tag?
return super unless push_event_payload
push_event_payload.tag?
end
def branch?
return super unless push_event_payload
push_event_payload.branch?
end
def valid_push?
return super unless push_event_payload
push_event_payload.ref.present?
end
def new_ref?
return super unless push_event_payload
push_event_payload.created?
end
def rm_ref?
return super unless push_event_payload
push_event_payload.removed?
end
def commit_from
return super unless push_event_payload
push_event_payload.commit_from
end
def commit_to
return super unless push_event_payload
push_event_payload.commit_to
end
def ref_name
return super unless push_event_payload
push_event_payload.ref
end
def ref_type
return super unless push_event_payload
push_event_payload.ref_type
end
def branch_name
return super unless push_event_payload
ref_name
end
def tag_name
return super unless push_event_payload
ref_name
end
def commit_title
return super unless push_event_payload
push_event_payload.commit_title
end
def commit_id
commit_to || commit_from
end
def commits_count
return super unless push_event_payload
push_event_payload.commit_count
end
def validate_push_action
return if action == PUSHED
errors.add(:action, "the action #{action.inspect} is not valid")
end
end
class PushEventPayload < ActiveRecord::Base
include ShaAttribute
belongs_to :event, inverse_of: :push_event_payload
validates :event_id, :commit_count, :action, :ref_type, presence: true
validates :commit_title, length: { maximum: 70 }
sha_attribute :commit_from
sha_attribute :commit_to
enum action: {
created: 0,
removed: 1,
pushed: 2
}
enum ref_type: {
branch: 0,
tag: 1
}
end
......@@ -1092,6 +1092,7 @@ class User < ActiveRecord::Base
# Added according to https://github.com/plataformatec/devise/blob/7df57d5081f9884849ca15e4fde179ef164a575f/README.md#activejob-integration
def send_devise_notification(notification, *args)
return true unless can?(:receive_notifications)
devise_mailer.send(notification, self, *args).deliver_later
end
......
......@@ -71,7 +71,14 @@ class EventCreateService
end
def push(project, current_user, push_data)
create_event(project, current_user, Event::PUSHED, data: push_data)
# We're using an explicit transaction here so that any errors that may occur
# when creating push payload data will result in the event creation being
# rolled back as well.
Event.transaction do
event = create_event(project, current_user, Event::PUSHED)
PushEventPayloadService.new(event, push_data).execute
end
Users::ActivityService.new(current_user, 'push').execute
end
......
......@@ -12,9 +12,11 @@ class NotificationService
# only if ssh key is not deploy key
#
# This is security email so it will be sent
# even if user disabled notifications
# even if user disabled notifications. However,
# it won't be sent to internal users like the
# ghost user or the EE support bot.
def new_key(key)
if key.user
if key.user&.can?(:receive_notifications)
mailer.new_ssh_key_email(key.id).deliver_later
end
end
......@@ -24,14 +26,14 @@ class NotificationService
# This is a security email so it will be sent even if the user user disabled
# notifications
def new_gpg_key(gpg_key)
if gpg_key.user
if gpg_key.user&.can?(:receive_notifications)
mailer.new_gpg_key_email(gpg_key.id).deliver_later
end
end
# Always notify user about email added to profile
def new_email(email)
if email.user
if email.user&.can?(:receive_notifications)
mailer.new_email_email(email.id).deliver_later
end
end
......@@ -204,6 +206,8 @@ class NotificationService
# Notify new user with email after creation
def new_user(user, token = nil)
return true unless notifiable?(user, :mention)
# Don't email omniauth created users
mailer.new_user_email(user.id, token).deliver_later unless user.identities.any?
end
......@@ -229,19 +233,27 @@ class NotificationService
# Members
def new_access_request(member)
return true unless member.notifiable?(:subscription)
mailer.member_access_requested_email(member.real_source_type, member.id).deliver_later
end
def decline_access_request(member)
return true unless member.notifiable?(:subscription)
mailer.member_access_denied_email(member.real_source_type, member.source_id, member.user_id).deliver_later
end
# Project invite
def invite_project_member(project_member, token)
return true unless project_member.notifiable?(:subscription)
mailer.member_invited_email(project_member.real_source_type, project_member.id, token).deliver_later
end
def accept_project_invite(project_member)
return true unless project_member.notifiable?(:subscription)
mailer.member_invite_accepted_email(project_member.real_source_type, project_member.id).deliver_later
end
......@@ -255,10 +267,14 @@ class NotificationService
end
def new_project_member(project_member)
return true unless project_member.notifiable?(:mention, skip_read_ability: true)
mailer.member_access_granted_email(project_member.real_source_type, project_member.id).deliver_later
end
def update_project_member(project_member)
return true unless project_member.notifiable?(:mention)
mailer.member_access_granted_email(project_member.real_source_type, project_member.id).deliver_later
end
......@@ -272,6 +288,9 @@ class NotificationService
end
def decline_group_invite(group_member)
# always send this one, since it's a response to the user's own
# action
mailer.member_invite_declined_email(
group_member.real_source_type,
group_member.group.id,
......@@ -281,15 +300,19 @@ class NotificationService
end
def new_group_member(group_member)
return true unless group_member.notifiable?(:mention)
mailer.member_access_granted_email(group_member.real_source_type, group_member.id).deliver_later
end
def update_group_member(group_member)
return true unless group_member.notifiable?(:mention)
mailer.member_access_granted_email(group_member.real_source_type, group_member.id).deliver_later
end
def project_was_moved(project, old_path_with_namespace)
recipients = NotificationRecipientService.notifiable_users(project.team.members, :mention, project: project)
recipients = notifiable_users(project.team.members, :mention, project: project)
recipients.each do |recipient|
mailer.project_was_moved_email(
......@@ -311,10 +334,14 @@ class NotificationService
end
def project_exported(project, current_user)
return true unless notifiable?(current_user, :mention, project: project)
mailer.project_was_exported_email(current_user, project).deliver_later
end
def project_not_exported(project, current_user, errors)
return true unless notifiable?(current_user, :mention, project: project)
mailer.project_was_not_exported_email(current_user, project, errors).deliver_later
end
......@@ -323,7 +350,7 @@ class NotificationService
return unless mailer.respond_to?(email_template)
recipients ||= NotificationRecipientService.notifiable_users(
recipients ||= notifiable_users(
[pipeline.user], :watch,
custom_action: :"#{pipeline.status}_pipeline",
target: pipeline
......@@ -392,7 +419,7 @@ class NotificationService
def relabeled_resource_email(target, labels, current_user, method)
recipients = labels.flat_map { |l| l.subscribers(target.project) }
recipients = NotificationRecipientService.notifiable_users(
recipients = notifiable_users(
recipients, :subscription,
target: target,
acting_user: current_user
......@@ -448,4 +475,14 @@ class NotificationService
object.previous_changes[attribute].first
end
end
private
def notifiable?(*args)
NotificationRecipientService.notifiable?(*args)
end
def notifiable_users(*args)
NotificationRecipientService.notifiable_users(*args)
end
end
# Service class for creating push event payloads as stored in the
# "push_event_payloads" table.
#
# Example:
#
# data = Gitlab::DataBuilder::Push.build(...)
# event = Event.create(...)
#
# PushEventPayloadService.new(event, data).execute
class PushEventPayloadService
# event - The event this push payload belongs to.
# push_data - A Hash produced by `Gitlab::DataBuilder::Push.build` to use for
# building the push payload.
def initialize(event, push_data)
@event = event
@push_data = push_data
end
# Creates and returns a new PushEventPayload row.
#
# This method will raise upon encountering validation errors.
#
# Returns an instance of PushEventPayload.
def execute
@event.build_push_event_payload(
commit_count: commit_count,
action: action,
ref_type: ref_type,
commit_from: commit_from_id,
commit_to: commit_to_id,
ref: trimmed_ref,
commit_title: commit_title,
event_id: @event.id
)
@event.push_event_payload.save!
@event.push_event_payload
end
# Returns the commit title to use.
#
# The commit title is limited to the first line and a maximum of 70
# characters.
def commit_title
commit = @push_data.fetch(:commits).last
return nil unless commit && commit[:message]
raw_msg = commit[:message]
# Find where the first line ends, without turning the entire message into an
# Array of lines (this is a waste of memory for large commit messages).
index = raw_msg.index("\n")
message = index ? raw_msg[0..index] : raw_msg
message.strip.truncate(70)
end
def commit_from_id
if create?
nil
else
revision_before
end
end
def commit_to_id
if remove?
nil
else
revision_after
end
end
def commit_count
@push_data.fetch(:total_commits_count)
end
def ref
@push_data.fetch(:ref)
end
def revision_before
@push_data.fetch(:before)
end
def revision_after
@push_data.fetch(:after)
end
def trimmed_ref
Gitlab::Git.ref_name(ref)
end
def create?
Gitlab::Git.blank_ref?(revision_before)
end
def remove?
Gitlab::Git.blank_ref?(revision_after)
end
def action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if Gitlab::Git.tag_ref?(ref)
:tag
else
:branch
end
end
end
......@@ -4,7 +4,7 @@ class PersonalFileUploader < FileUploader
end
def self.base_dir
File.join(root_dir, 'system')
File.join(root_dir, '-', 'system')
end
private
......
......@@ -48,6 +48,13 @@
= select(:application_setting, :enabled_git_access_protocol, [['Both SSH and HTTP(S)', nil], ['Only SSH', 'ssh'], ['Only HTTP(S)', 'http']], {}, class: 'form-control')
%span.help-block#clone-protocol-help
Allow only the selected protocols to be used for Git access.
.form-group
.col-sm-offset-2.col-sm-10
.checkbox
= f.label :project_export_enabled do
= f.check_box :project_export_enabled
Project export enabled
- if ldap_enabled?
.form-group
= f.label :allow_group_owners_to_manage_ldap, 'LDAP settings', class: 'control-label col-sm-2'
......
%li.commit
.commit-row-title
= link_to truncate_sha(commit[:id]), project_commit_path(project, commit[:id]), class: "commit-sha", alt: '', title: truncate_sha(commit[:id])
= link_to truncate_sha(event.commit_id), project_commit_path(project, event.commit_id), class: "commit-sha", alt: '', title: truncate_sha(event.commit_id)
&middot;
= markdown event_commit_title(commit[:message]), project: project, pipeline: :single_line, author: event.author
= markdown event_commit_title(event.commit_title), project: project, pipeline: :single_line, author: event.author
%div{ xmlns: "http://www.w3.org/1999/xhtml" }
- event.commits.first(15).each do |commit|
%p
%strong= commit[:author][:name]
= link_to "(##{truncate_sha(commit[:id])})", project_commit_path(event.project, id: commit[:id])
%strong= event.author_name
= link_to "(#{truncate_sha(event.commit_id)})", project_commit_path(event.project, event.commit_id)
%i
at
= commit[:timestamp].to_time.to_s(:short)
%blockquote= markdown(escape_once(commit[:message]), pipeline: :atom, project: event.project, author: event.author)
- if event.commits_count > 15
= event.created_at.to_s(:short)
%blockquote= markdown(escape_once(event.commit_title), pipeline: :atom, project: event.project, author: event.author)
- if event.commits_count > 1
%p
%i
\... and
= pluralize(event.commits_count - 15, "more commit")
= pluralize(event.commits_count - 1, "more commit")
......@@ -14,9 +14,7 @@
- if event.push_with_commits?
.event-body
%ul.well-list.event_commits
- few_commits = event.commits[0...2]
- few_commits.each do |commit|
= render "events/commit", commit: commit, project: project, event: event
= render "events/commit", project: project, event: event
- create_mr = event.new_ref? && create_mr_button?(project.default_branch, event.ref_name, project) && event.authored_by?(current_user)
- if event.commits_count > 1
......@@ -44,9 +42,6 @@
= link_to create_mr_path(project.default_branch, event.ref_name, project) do
Create Merge Request
- elsif event.rm_ref?
- repository = project.repository
- last_commit = repository.commit(event.commit_from)
- if last_commit
.event-body
%ul.well-list.event_commits
= render "events/commit", commit: last_commit, project: project, event: event
= render "events/commit", project: project, event: event
- return unless current_application_settings.project_export_enabled?
- project = local_assigns.fetch(:project)
- expanded = Rails.env.test?
%section.settings
.settings-header
%h4
Export project
%button.btn.js-settings-toggle
= expanded ? 'Collapse' : 'Expand'
%p
Export this project with all its related data in order to move your project to a new GitLab instance. Once the export is finished, you can import the file from the "New Project" page.
.settings-content.no-animate{ class: ('expanded' if expanded) }
.bs-callout.bs-callout-info
%p.append-bottom-0
%p
The following items will be exported:
%ul
%li Project and wiki repositories
%li Project uploads
%li Project configuration including web hooks and services
%li Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities
%p
The following items will NOT be exported:
%ul
%li Job traces and artifacts
%li LFS objects
%li Container registry images
%li CI variables
%li Any encrypted tokens
%p
Once the exported file is ready, you will receive a notification email with a download link.
- if project.export_project_path
= link_to 'Download export', download_export_project_path(project),
rel: 'nofollow', download: '', method: :get, class: "btn btn-default"
= link_to 'Generate new export', generate_new_export_project_path(project),
method: :post, class: "btn btn-default"
- else
= link_to 'Export project', export_project_path(project),
method: :post, class: "btn btn-default"
- referenced_users = local_assigns.fetch(:referenced_users, nil)
- if defined?(@issue) && @issue.confidential?
%li.confidential-issue-warning
.confidential-issue-warning
= confidential_icon(@issue)
%span This is a confidential issue. Your comment will not be visible to the public.
- else
%li.confidential-issue-warning.not-confidential
.md-area
.md-header
......
......@@ -182,42 +182,7 @@
enabled: "#{@project.service_desk_enabled}",
incoming_email: (@project.service_desk_address if @project.service_desk_enabled) } }
%section.settings
.settings-header
%h4
Export project
%button.btn.js-settings-toggle
= expanded ? 'Collapse' : 'Expand'
%p
Export this project with all its related data in order to move your project to a new GitLab instance. Once the export is finished, you can import the file from the "New Project" page.
.settings-content.no-animate{ class: ('expanded' if expanded) }
.bs-callout.bs-callout-info
%p.append-bottom-0
%p
The following items will be exported:
%ul
%li Project and wiki repositories
%li Project uploads
%li Project configuration including web hooks and services
%li Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities
%p
The following items will NOT be exported:
%ul
%li Job traces and artifacts
%li LFS objects
%li Container registry images
%li CI variables
%li Any encrypted tokens
%p
Once the exported file is ready, you will receive a notification email with a download link.
- if @project.export_project_path
= link_to 'Download export', download_export_project_path(@project),
rel: 'nofollow', download: '', method: :get, class: "btn btn-default"
= link_to 'Generate new export', generate_new_export_project_path(@project),
method: :post, class: "btn btn-default"
- else
= link_to 'Export project', export_project_path(@project),
method: :post, class: "btn btn-default"
= render 'export', project: @project
%section.settings.advanced-settings
.settings-header
......
......@@ -75,7 +75,7 @@
Pipeline
= link_to "##{@build.pipeline.id}", project_pipeline_path(@project, @build.pipeline), class: 'link-commit'
from
= link_to "#{@build.pipeline.ref}", project_branch_path(@project, @build.pipeline.ref), class: 'link-commit'
= link_to "#{@build.pipeline.ref}", project_ref_path(@project, @build.pipeline.ref), class: 'link-commit ref-name'
%button.dropdown-menu-toggle{ type: 'button', 'data-toggle' => 'dropdown' }
%span.stage-selection More
= icon('chevron-down')
......
---
title: disabling notifications globally now properly turns off group/project added
emails
merge_request: 13325
author: @jneen
type: fixed
---
title: Include the `is_admin` field in the `GET /users/:id` API when current user
is an admin
merge_request:
author:
type: fixed
---
title: Cache Appearance instances in Redis
merge_request:
author:
---
title: Better caching and indexing of broadcast messages
merge_request:
author:
---
title: Bumps omniauth-ldap gem version to 2.0.4
merge_request: 13465
author:
---
title: Don't rename namespace called system when upgrading from 9.1.x to 9.5
merge_request: 13228
author:
---
title: Add option to disable project export on instance
merge_request: 13211
author: Robin Bobbitt
---
title: Make GPGME temporary directory handling thread safe
merge_request: 13481
author: Alexis Reigel
---
title: Migrate events into a new format to reduce the storage necessary and improve performance
merge_request:
author:
---
title: Use a specialized class for querying events to improve performance
merge_request:
author:
......@@ -5,12 +5,12 @@ scope path: :uploads do
constraints: { model: /note|user|group|project/, mounted_as: /avatar|attachment/, filename: /[^\/]+/ }
# show uploads for models, snippets (notes) available for now
get 'system/:model/:id/:secret/:filename',
get '-/system/:model/:id/:secret/:filename',
to: 'uploads#show',
constraints: { model: /personal_snippet/, id: /\d+/, filename: /[^\/]+/ }
# show temporary uploads
get 'system/temp/:secret/:filename',
get '-/system/temp/:secret/:filename',
to: 'uploads#show',
constraints: { filename: /[^\/]+/ }
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class RenameSystemNamespaces < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
include Gitlab::ShellAdapter
disable_ddl_transaction!
class User < ActiveRecord::Base
self.table_name = 'users'
end
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
belongs_to :parent, class_name: 'RenameSystemNamespaces::Namespace'
has_one :route, as: :source
has_many :children, class_name: 'RenameSystemNamespaces::Namespace', foreign_key: :parent_id
belongs_to :owner, class_name: 'RenameSystemNamespaces::User'
# Overridden to have the correct `source_type` for the `route` relation
def self.name
'Namespace'
end
def full_path
if route && route.path.present?
@full_path ||= route.path
else
update_route if persisted?
build_full_path
end
end
def build_full_path
if parent && path
parent.full_path + '/' + path
else
path
end
end
def update_route
prepare_route
route.save
end
def prepare_route
route || build_route(source: self)
route.path = build_full_path
route.name = build_full_name
@full_path = nil
@full_name = nil
end
def build_full_name
if parent && name
parent.human_name + ' / ' + name
else
name
end
end
def human_name
owner&.name
end
end
class Route < ActiveRecord::Base
self.table_name = 'routes'
belongs_to :source, polymorphic: true
end
class Project < ActiveRecord::Base
self.table_name = 'projects'
def repository_storage_path
Gitlab.config.repositories.storages[repository_storage]['path']
end
end
DOWNTIME = false
def up
return unless system_namespace
old_path = system_namespace.path
old_full_path = system_namespace.full_path
# Only remove the last occurrence of the path name to get the parent namespace path
namespace_path = remove_last_occurrence(old_full_path, old_path)
new_path = rename_path(namespace_path, old_path)
new_full_path = join_namespace_path(namespace_path, new_path)
Namespace.where(id: system_namespace).update_all(path: new_path) # skips callbacks & validations
replace_statement = replace_sql(Route.arel_table[:path], old_full_path, new_full_path)
route_matches = [old_full_path, "#{old_full_path}/%"]
update_column_in_batches(:routes, :path, replace_statement) do |table, query|
query.where(Route.arel_table[:path].matches_any(route_matches))
end
clear_cache_for_namespace(system_namespace)
# tasks here are based on `Namespace#move_dir`
move_repositories(system_namespace, old_full_path, new_full_path)
move_namespace_folders(uploads_dir, old_full_path, new_full_path) if file_storage?
move_namespace_folders(pages_dir, old_full_path, new_full_path)
end
def down
# nothing to do
end
def remove_last_occurrence(string, pattern)
string.reverse.sub(pattern.reverse, "").reverse
end
def move_namespace_folders(directory, old_relative_path, new_relative_path)
old_path = File.join(directory, old_relative_path)
return unless File.directory?(old_path)
new_path = File.join(directory, new_relative_path)
FileUtils.mv(old_path, new_path)
end
def move_repositories(namespace, old_full_path, new_full_path)
repo_paths_for_namespace(namespace).each do |repository_storage_path|
# Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, old_full_path)
unless gitlab_shell.mv_namespace(repository_storage_path, old_full_path, new_full_path)
say "Exception moving path #{repository_storage_path} from #{old_full_path} to #{new_full_path}"
end
end
end
def rename_path(namespace_path, path_was)
counter = 0
path = "#{path_was}#{counter}"
while route_exists?(join_namespace_path(namespace_path, path))
counter += 1
path = "#{path_was}#{counter}"
end
path
end
def route_exists?(full_path)
Route.where(Route.arel_table[:path].matches(full_path)).any?
end
def join_namespace_path(namespace_path, path)
if namespace_path.present?
File.join(namespace_path, path)
else
path
end
end
def system_namespace
@system_namespace ||= Namespace.where(parent_id: nil)
.where(arel_table[:path].matches(system_namespace_path))
.first
end
def system_namespace_path
"system"
end
def clear_cache_for_namespace(namespace)
project_ids = projects_for_namespace(namespace).pluck(:id)
update_column_in_batches(:projects, :description_html, nil) do |table, query|
query.where(table[:id].in(project_ids))
end
update_column_in_batches(:issues, :description_html, nil) do |table, query|
query.where(table[:project_id].in(project_ids))
end
update_column_in_batches(:merge_requests, :description_html, nil) do |table, query|
query.where(table[:target_project_id].in(project_ids))
end
update_column_in_batches(:notes, :note_html, nil) do |table, query|
query.where(table[:project_id].in(project_ids))
end
update_column_in_batches(:milestones, :description_html, nil) do |table, query|
query.where(table[:project_id].in(project_ids))
end
end
def projects_for_namespace(namespace)
namespace_ids = child_ids_for_parent(namespace, ids: [namespace.id])
namespace_or_children = Project.arel_table[:namespace_id].in(namespace_ids)
Project.unscoped.where(namespace_or_children)
end
# This won't scale to huge trees, but it should do for a handful of namespaces
# called `system`.
def child_ids_for_parent(namespace, ids: [])
namespace.children.each do |child|
ids << child.id
child_ids_for_parent(child, ids: ids) if child.children.any?
end
ids
end
def repo_paths_for_namespace(namespace)
projects_for_namespace(namespace).distinct
.select(:repository_storage).map(&:repository_storage_path)
end
def uploads_dir
File.join(Rails.root, "public", "uploads")
end
def pages_dir
Settings.pages.path
end
def file_storage?
CarrierWave::Uploader::Base.storage == CarrierWave::Storage::File
end
def arel_table
Namespace.arel_table
end
end
......@@ -54,6 +54,6 @@ class MoveUploadsToSystemDir < ActiveRecord::Migration
end
def new_upload_dir
File.join(base_directory, "public", "uploads", "system")
File.join(base_directory, "public", "uploads", "-", "system")
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class PrepareEventsTableForPushEventsMigration < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
# The order of these columns is deliberate and results in the following
# columns and sizes:
#
# * id (4 bytes)
# * project_id (4 bytes)
# * author_id (4 bytes)
# * target_id (4 bytes)
# * created_at (8 bytes)
# * updated_at (8 bytes)
# * action (2 bytes)
# * target_type (variable)
#
# Unfortunately we can't make the "id" column a bigint/bigserial as Rails 4
# does not support this properly.
create_table :events_for_migration do |t|
t.references :project,
index: true,
foreign_key: { on_delete: :cascade }
t.integer :author_id, index: true, null: false
t.integer :target_id
t.timestamps_with_timezone null: false
t.integer :action, null: false, limit: 2, index: true
t.string :target_type
t.index %i[target_type target_id]
end
# t.references doesn't like it when the column name doesn't make the table
# name so we have to add the foreign key separately.
add_concurrent_foreign_key(:events_for_migration, :users, column: :author_id)
end
def down
drop_table :events_for_migration
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class CreatePushEventPayloadsTables < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
create_table :push_event_payloads, id: false do |t|
t.bigint :commit_count, null: false
t.integer :event_id, null: false
t.integer :action, null: false, limit: 2
t.integer :ref_type, null: false, limit: 2
t.binary :commit_from
t.binary :commit_to
t.text :ref
t.string :commit_title, limit: 70
t.index :event_id, unique: true
end
# We're adding a foreign key to the _shadow_ table, and this is deliberate.
# By using the shadow table we don't have to recreate/revalidate this
# foreign key after swapping the "events_for_migration" and "events" tables.
#
# The "events_for_migration" table has a foreign key to "projects.id"
# ensuring that project removals also remove events from the shadow table
# (and thus also from this table).
add_concurrent_foreign_key(
:push_event_payloads,
:events_for_migration,
column: :event_id
)
end
def down
drop_table :push_event_payloads
end
end
......@@ -15,6 +15,11 @@ class MoveSystemUploadFolder < ActiveRecord::Migration
return
end
if File.directory?(new_directory)
say "#{new_directory} already exists. No need to redo the move."
return
end
FileUtils.mkdir_p(File.join(base_directory, '-'))
say "Moving #{old_directory} -> #{new_directory}"
......@@ -33,6 +38,11 @@ class MoveSystemUploadFolder < ActiveRecord::Migration
return
end
if !File.symlink?(old_directory) && File.directory?(old_directory)
say "#{old_directory} already exists and is not a symlink, no need to revert."
return
end
if File.symlink?(old_directory)
say "Removing #{old_directory} -> #{new_directory} symlink"
FileUtils.rm(old_directory)
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddIndexOnEventsProjectIdId < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
COLUMNS = %i[project_id id].freeze
TABLES = %i[events events_for_migration].freeze
disable_ddl_transaction!
def up
TABLES.each do |table|
add_concurrent_index(table, COLUMNS) unless index_exists?(table, COLUMNS)
# We remove the index _after_ adding the new one since MySQL doesn't let
# you remove an index when a foreign key exists for the same column.
if index_exists?(table, :project_id)
remove_concurrent_index(table, :project_id)
end
end
end
def down
TABLES.each do |table|
unless index_exists?(table, :project_id)
add_concurrent_index(table, :project_id)
end
unless index_exists?(table, COLUMNS)
remove_concurrent_index(table, COLUMNS)
end
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddBroadcastMessagesIndex < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
COLUMNS = %i[starts_at ends_at id].freeze
def up
add_concurrent_index :broadcast_messages, COLUMNS
end
def down
remove_concurrent_index :broadcast_messages, COLUMNS
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddBroadcastMessageNotNullConstraints < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
COLUMNS = %i[starts_at ends_at created_at updated_at message_html]
def change
COLUMNS.each do |column|
change_column_null :broadcast_messages, column, false
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class CleanupAppearancesSchema < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
NOT_NULL_COLUMNS = %i[title description description_html created_at updated_at]
TIME_COLUMNS = %i[created_at updated_at]
def up
NOT_NULL_COLUMNS.each do |column|
change_column_null :appearances, column, false
end
TIME_COLUMNS.each do |column|
change_column :appearances, column, :datetime_with_timezone
end
end
def down
NOT_NULL_COLUMNS.each do |column|
change_column_null :appearances, column, true
end
TIME_COLUMNS.each do |column|
change_column :appearances, column, :datetime # rubocop: disable Migration/Datetime
end
end
end
class AddProjectExportEnabledToApplicationSettings < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
add_column_with_default(:application_settings, :project_export_enabled, :boolean, default: true)
end
def down
remove_column(:application_settings, :project_export_enabled)
end
end
......@@ -48,7 +48,7 @@ class UpdateUploadPathsToSystem < ActiveRecord::Migration
end
def new_upload_dir
File.join(base_directory, "system")
File.join(base_directory, "-", "system")
end
def arel_table
......
......@@ -47,6 +47,6 @@ class CleanUploadSymlinks < ActiveRecord::Migration
end
def new_upload_dir
File.join(base_directory, "public", "uploads", "system")
File.join(base_directory, "public", "uploads", "-", "system")
end
end
......@@ -52,6 +52,6 @@ class MoveAppearanceToSystemDir < ActiveRecord::Migration
end
def new_upload_dir
File.join(base_directory, "public", "uploads", "system")
File.join(base_directory, "public", "uploads", "-", "system")
end
end
......@@ -10,7 +10,7 @@ class MovePersonalSnippetsFiles < ActiveRecord::Migration
return unless file_storage?
@source_relative_location = File.join('/uploads', 'personal_snippet')
@destination_relative_location = File.join('/uploads', 'system', 'personal_snippet')
@destination_relative_location = File.join('/uploads', '-', 'system', 'personal_snippet')
move_personal_snippet_files
end
......@@ -18,7 +18,7 @@ class MovePersonalSnippetsFiles < ActiveRecord::Migration
def down
return unless file_storage?
@source_relative_location = File.join('/uploads', 'system', 'personal_snippet')
@source_relative_location = File.join('/uploads', '-', 'system', 'personal_snippet')
@destination_relative_location = File.join('/uploads', 'personal_snippet')
move_personal_snippet_files
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class ScheduleEventMigrations < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BUFFER_SIZE = 1000
disable_ddl_transaction!
class Event < ActiveRecord::Base
include EachBatch
self.table_name = 'events'
end
def up
jobs = []
Event.each_batch(of: 1000) do |relation|
min, max = relation.pluck('MIN(id), MAX(id)').first
if jobs.length == BUFFER_SIZE
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs)
jobs.clear
end
jobs << ['MigrateEventsToPushEventPayloads', [min, max]]
end
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
end
def down
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class MovePersonalSnippetFilesIntoCorrectFolder < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
NEW_DIRECTORY = File.join('/uploads', '-', 'system', 'personal_snippet')
OLD_DIRECTORY = File.join('/uploads', 'system', 'personal_snippet')
def up
return unless file_storage?
BackgroundMigrationWorker.perform_async('MovePersonalSnippetFiles',
[OLD_DIRECTORY, NEW_DIRECTORY])
end
def down
return unless file_storage?
BackgroundMigrationWorker.perform_async('MovePersonalSnippetFiles',
[NEW_DIRECTORY, OLD_DIRECTORY])
end
def file_storage?
CarrierWave::Uploader::Base.storage == CarrierWave::Storage::File
end
end
......@@ -28,14 +28,14 @@ ActiveRecord::Schema.define(version: 20170811203342) do
end
create_table "appearances", force: :cascade do |t|
t.string "title"
t.text "description"
t.string "title", null: false
t.text "description", null: false
t.string "header_logo"
t.string "logo"
t.integer "updated_by"
t.datetime "created_at"
t.datetime "updated_at"
t.string "header_logo"
t.text "description_html"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "description_html", null: false
t.integer "cached_markdown_version"
end
......@@ -150,6 +150,7 @@ ActiveRecord::Schema.define(version: 20170811203342) do
t.integer "performance_bar_allowed_group_id"
t.boolean "password_authentication_enabled"
t.boolean "allow_group_owners_to_manage_ldap", default: true, null: false
t.boolean "project_export_enabled", default: true, null: false
end
create_table "approvals", force: :cascade do |t|
......@@ -220,16 +221,18 @@ ActiveRecord::Schema.define(version: 20170811203342) do
create_table "broadcast_messages", force: :cascade do |t|
t.text "message", null: false
t.datetime "starts_at"
t.datetime "ends_at"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "starts_at", null: false
t.datetime "ends_at", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "color"
t.string "font"
t.text "message_html"
t.text "message_html", null: false
t.integer "cached_markdown_version"
end
add_index "broadcast_messages", ["starts_at", "ends_at", "id"], name: "index_broadcast_messages_on_starts_at_and_ends_at_and_id", using: :btree
create_table "chat_names", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "service_id", null: false
......@@ -603,10 +606,25 @@ ActiveRecord::Schema.define(version: 20170811203342) do
add_index "events", ["action"], name: "index_events_on_action", using: :btree
add_index "events", ["author_id"], name: "index_events_on_author_id", using: :btree
add_index "events", ["created_at"], name: "index_events_on_created_at", using: :btree
add_index "events", ["project_id"], name: "index_events_on_project_id", using: :btree
add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree
add_index "events", ["target_id"], name: "index_events_on_target_id", using: :btree
add_index "events", ["target_type"], name: "index_events_on_target_type", using: :btree
create_table "events_for_migration", force: :cascade do |t|
t.integer "project_id"
t.integer "author_id", null: false
t.integer "target_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "action", limit: 2, null: false
t.string "target_type"
end
add_index "events_for_migration", ["action"], name: "index_events_for_migration_on_action", using: :btree
add_index "events_for_migration", ["author_id"], name: "index_events_for_migration_on_author_id", using: :btree
add_index "events_for_migration", ["project_id", "id"], name: "index_events_for_migration_on_project_id_and_id", using: :btree
add_index "events_for_migration", ["target_type", "target_id"], name: "index_events_for_migration_on_target_type_and_target_id", using: :btree
create_table "feature_gates", force: :cascade do |t|
t.string "feature_key", null: false
t.string "key", null: false
......@@ -1562,6 +1580,19 @@ ActiveRecord::Schema.define(version: 20170811203342) do
add_index "push_rules", ["project_id"], name: "index_push_rules_on_project_id", using: :btree
create_table "push_event_payloads", id: false, force: :cascade do |t|
t.integer "commit_count", limit: 8, null: false
t.integer "event_id", null: false
t.integer "action", limit: 2, null: false
t.integer "ref_type", limit: 2, null: false
t.binary "commit_from"
t.binary "commit_to"
t.text "ref"
t.string "commit_title", limit: 70
end
add_index "push_event_payloads", ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree
create_table "redirect_routes", force: :cascade do |t|
t.integer "source_id", null: false
t.string "source_type", null: false
......@@ -2011,6 +2042,8 @@ ActiveRecord::Schema.define(version: 20170811203342) do
add_foreign_key "deployments", "projects", name: "fk_b9a3851b82", on_delete: :cascade
add_foreign_key "environments", "projects", name: "fk_d1c8c1da6a", on_delete: :cascade
add_foreign_key "events", "projects", name: "fk_0434b48643", on_delete: :cascade
add_foreign_key "events_for_migration", "projects", on_delete: :cascade
add_foreign_key "events_for_migration", "users", column: "author_id", name: "fk_edfd187b6f", on_delete: :cascade
add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repositories_changed_events", column: "repositories_changed_event_id", name: "fk_4a99ebfd60", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repository_deleted_events", column: "repository_deleted_event_id", name: "fk_c4b1c1f66e", on_delete: :cascade
......@@ -2075,6 +2108,7 @@ ActiveRecord::Schema.define(version: 20170811203342) do
add_foreign_key "protected_tag_create_access_levels", "users"
add_foreign_key "protected_tags", "projects", name: "fk_8e4af87648", on_delete: :cascade
add_foreign_key "push_rules", "projects", name: "fk_83b29894de", on_delete: :cascade
add_foreign_key "push_event_payloads", "events_for_migration", column: "event_id", name: "fk_36c74129da", on_delete: :cascade
add_foreign_key "releases", "projects", name: "fk_47fe2a0596", on_delete: :cascade
add_foreign_key "remote_mirrors", "projects", name: "fk_43a9aa4ca8", on_delete: :cascade
add_foreign_key "services", "projects", name: "fk_71cce407f9", on_delete: :cascade
......
......@@ -79,7 +79,6 @@ Example response:
"target_id":160,
"target_type":"Issue",
"author_id":25,
"data":null,
"target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.",
"created_at":"2017-02-09T10:43:19.667Z",
"author":{
......@@ -99,7 +98,6 @@ Example response:
"target_id":159,
"target_type":"Issue",
"author_id":21,
"data":null,
"target_title":"Nostrum enim non et sed optio illo deleniti non.",
"created_at":"2017-02-09T10:43:19.426Z",
"author":{
......@@ -151,7 +149,6 @@ Example response:
"target_id": 830,
"target_type": "Issue",
"author_id": 1,
"data": null,
"target_title": "Public project search field",
"author": {
"name": "Dmitriy Zaporozhets",
......@@ -166,7 +163,7 @@ Example response:
{
"title": null,
"project_id": 15,
"action_name": "opened",
"action_name": "pushed",
"target_id": null,
"target_type": null,
"author_id": 1,
......@@ -179,31 +176,14 @@ Example response:
"web_url": "http://localhost:3000/root"
},
"author_username": "john",
"data": {
"before": "50d4420237a9de7be1304607147aec22e4a14af7",
"after": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"ref": "refs/heads/master",
"user_id": 1,
"user_name": "Dmitriy Zaporozhets",
"repository": {
"name": "gitlabhq",
"url": "git@dev.gitlab.org:gitlab/gitlabhq.git",
"description": "GitLab: self hosted Git management software. \r\nDistributed under the MIT License.",
"homepage": "https://dev.gitlab.org/gitlab/gitlabhq"
},
"commits": [
{
"id": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"message": "Add simple search to projects in public area",
"timestamp": "2013-05-13T18:18:08+00:00",
"url": "https://dev.gitlab.org/gitlab/gitlabhq/commit/c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"author": {
"name": "Dmitriy Zaporozhets",
"email": "dmitriy.zaporozhets@gmail.com"
}
}
],
"total_commits_count": 1
"push_data": {
"commit_count": 1,
"action": "pushed",
"ref_type": "branch",
"commit_from": "50d4420237a9de7be1304607147aec22e4a14af7",
"commit_to": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"ref": "master",
"commit_title": "Add simple search to projects in public area"
},
"target_title": null
},
......@@ -214,7 +194,6 @@ Example response:
"target_id": 840,
"target_type": "Issue",
"author_id": 1,
"data": null,
"target_title": "Finish & merge Code search PR",
"author": {
"name": "Dmitriy Zaporozhets",
......@@ -233,7 +212,6 @@ Example response:
"target_id": 1312,
"target_type": "Note",
"author_id": 1,
"data": null,
"target_title": null,
"created_at": "2015-12-04T10:33:58.089Z",
"note": {
......@@ -305,7 +283,6 @@ Example response:
"target_iid":160,
"target_type":"Issue",
"author_id":25,
"data":null,
"target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.",
"created_at":"2017-02-09T10:43:19.667Z",
"author":{
......@@ -326,7 +303,6 @@ Example response:
"target_iid":159,
"target_type":"Issue",
"author_id":21,
"data":null,
"target_title":"Nostrum enim non et sed optio illo deleniti non.",
"created_at":"2017-02-09T10:43:19.426Z",
"author":{
......
......@@ -328,6 +328,15 @@ complexity of RSpec expectations.They should be placed under
a certain type of specs only (e.g. features, requests etc.) but shouldn't be if
they apply to multiple type of specs.
#### have_gitlab_http_status
Prefer `have_gitlab_http_status` over `have_http_status` because the former
could also show the response body whenever the status mismatched. This would
be very useful whenever some tests start breaking and we would love to know
why without editing the source and rerun the tests.
This is especially useful whenever it's showing 500 internal server error.
### Shared contexts
All shared contexts should be be placed under `spec/support/shared_contexts/`.
......
......@@ -106,6 +106,10 @@ features of GitLab work with MySQL/MariaDB:
1. [Zero downtime migrations][zero] do not work with MySQL
1. [Database load balancing](../administration/database_load_balancing.md) is
supported only for PostgreSQL.
1. GitLab [optimizes the loading of dashboard events](https://gitlab.com/gitlab-org/gitlab-ce/issues/31806) using [PostgreSQL LATERAL JOINs](https://blog.heapanalytics.com/postgresqls-powerful-new-join-type-lateral/).
1. In general, SQL optimized for PostgreSQL may run much slower in MySQL due to
differences in query planners. For example, subqueries that work well in PostgreSQL
may not be [performant in MySQL](https://dev.mysql.com/doc/refman/5.7/en/optimizing-subqueries.html)
1. We expect this list to grow over time.
Existing users using GitLab with MySQL/MariaDB are advised to
......
......@@ -56,21 +56,17 @@ sudo -u git -H bundle clean
# Run database migrations
sudo -u git -H bundle exec rake db:migrate RAILS_ENV=production
### 4. Compile GetText PO files
Internationalization was added in `v9.2.0` so these commands are only
required for versions equal or major to it.
```bash
# Compile GetText PO files
# Internationalization was added in `v9.2.0` so these commands are only
# required for versions equal or major to it.
sudo -u git -H bundle exec rake gettext:pack RAILS_ENV=production
sudo -u git -H bundle exec rake gettext:po_to_json RAILS_ENV=production
```
# Clean up assets and cache
sudo -u git -H bundle exec rake yarn:install gitlab:assets:clean gitlab:assets:compile cache:clear RAILS_ENV=production NODE_ENV=production
```
### 5. Update gitlab-workhorse to the corresponding version
### 4. Update gitlab-workhorse to the corresponding version
```bash
cd /home/git/gitlab
......@@ -78,7 +74,7 @@ cd /home/git/gitlab
sudo -u git -H bundle exec rake "gitlab:workhorse:install[/home/git/gitlab-workhorse]" RAILS_ENV=production
```
### 6. Update gitlab-shell to the corresponding version
### 5. Update gitlab-shell to the corresponding version
```bash
cd /home/git/gitlab-shell
......@@ -88,14 +84,14 @@ sudo -u git -H git checkout v`cat /home/git/gitlab/GITLAB_SHELL_VERSION` -b v`ca
sudo -u git -H sh -c 'if [ -x bin/compile ]; then bin/compile; fi'
```
### 7. Start application
### 6. Start application
```bash
sudo service gitlab start
sudo service nginx restart
```
### 8. Check application status
### 7. Check application status
Check if GitLab and its environment are configured correctly:
......
......@@ -9,6 +9,9 @@
> application settings (`/admin/application_settings`) under 'Import sources'.
> Ask your administrator if you don't see the **GitLab export** button when
> creating a new project.
> - Starting with GitLab 10.0, administrators can disable the project export option
> on the GitLab instance in application settings (`/admin/application_settings`)
> under 'Visibility and Access Controls'.
> - You can find some useful raketasks if you are an administrator in the
> [import_export](../../../administration/raketasks/project_import_export.md)
> raketask.
......
......@@ -71,28 +71,14 @@ module SharedProject
step 'project "Shop" has push event' do
@project = Project.find_by(name: "Shop")
data = {
before: Gitlab::Git::BLANK_SHA,
after: "6d394385cf567f80a8fd85055db1ab4c5295806f",
ref: "refs/heads/fix",
user_id: @user.id,
user_name: @user.name,
repository: {
name: @project.name,
url: "localhost/rubinius",
description: "",
homepage: "localhost/rubinius",
private: true
}
}
@event = Event.create(
project: @project,
action: Event::PUSHED,
data: data,
author_id: @user.id
)
@event = create(:push_event, project: @project, author: @user)
create(:push_event_payload,
event: @event,
action: :created,
commit_to: '6d394385cf567f80a8fd85055db1ab4c5295806f',
ref: 'fix',
commit_count: 1)
end
step 'I should see project "Shop" activity feed' do
......
......@@ -559,14 +559,24 @@ module API
expose :author, using: Entities::UserBasic
end
class PushEventPayload < Grape::Entity
expose :commit_count, :action, :ref_type, :commit_from, :commit_to
expose :ref, :commit_title
end
class Event < Grape::Entity
expose :title, :project_id, :action_name
expose :target_id, :target_iid, :target_type, :author_id
expose :data, :target_title
expose :target_title
expose :created_at
expose :note, using: Entities::Note, if: ->(event, options) { event.note? }
expose :author, using: Entities::UserBasic, if: ->(event, options) { event.author }
expose :push_event_payload,
as: :push_data,
using: PushEventPayload,
if: -> (event, _) { event.push? }
expose :author_username do |event, options|
event.author&.username
end
......
......@@ -284,7 +284,15 @@ module API
message << " " << trace.join("\n ")
API.logger.add Logger::FATAL, message
rack_response({ 'message' => '500 Internal Server Error' }.to_json, 500)
response_message =
if Rails.env.test?
message
else
'500 Internal Server Error'
end
rack_response({ 'message' => response_message }.to_json, 500)
end
# project helpers
......
......@@ -29,6 +29,7 @@ module API
desc: 'Enabled sources for code import during project creation. OmniAuth must be configured for GitHub, Bitbucket, and GitLab.com'
optional :disabled_oauth_sign_in_sources, type: Array[String], desc: 'Disable certain OAuth sign-in sources'
optional :enabled_git_access_protocol, type: String, values: %w[ssh http nil], desc: 'Allow only the selected protocols to be used for Git access.'
optional :project_export_enabled, type: Boolean, desc: 'Enable project export'
optional :gravatar_enabled, type: Boolean, desc: 'Flag indicating if the Gravatar service is enabled'
optional :default_projects_limit, type: Integer, desc: 'The maximum number of personal projects'
optional :max_attachment_size, type: Integer, desc: 'Maximum attachment size in MB'
......
......@@ -85,22 +85,17 @@ module API
end
desc 'Get a single user' do
success Entities::UserBasic
success Entities::User
end
params do
requires :id, type: Integer, desc: 'The ID of the user'
end
get ":id" do
user = User.find_by(id: params[:id])
not_found!('User') unless user
not_found!('User') unless user && can?(current_user, :read_user, user)
if current_user && current_user.admin?
present user, with: Entities::UserPublic
elsif can?(current_user, :read_user, user)
present user, with: Entities::User
else
render_api_error!("User not found.", 404)
end
opts = current_user&.admin? ? { with: Entities::UserWithAdmin } : {}
present user, opts
end
desc 'Create a user. Available only for admins.' do
......
......@@ -25,14 +25,24 @@ module API
expose(:downvote?) { |note| false }
end
class PushEventPayload < Grape::Entity
expose :commit_count, :action, :ref_type, :commit_from, :commit_to
expose :ref, :commit_title
end
class Event < Grape::Entity
expose :title, :project_id, :action_name
expose :target_id, :target_type, :author_id
expose :data, :target_title
expose :target_title
expose :created_at
expose :note, using: Entities::Note, if: ->(event, options) { event.note? }
expose :author, using: ::API::Entities::UserBasic, if: ->(event, options) { event.author }
expose :push_event_payload,
as: :push_data,
using: PushEventPayload,
if: -> (event, _) { event.push? }
expose :author_username do |event, options|
event.author&.username
end
......
class FileStreamer #:nodoc:
attr_reader :to_path
def initialize(path)
@to_path = path
end
# Stream the file's contents if Rack::Sendfile isn't present.
def each
File.open(to_path, 'rb') do |file|
while chunk = file.read(16384)
yield chunk
end
end
end
end
......@@ -85,6 +85,8 @@ module Gitlab
diff_hash.tap do |hash|
diff_text = hash[:diff]
hash[:too_large] = !!hash[:too_large]
if diff_text.encoding == Encoding::BINARY && !diff_text.ascii_only?
hash[:binary] = true
hash[:diff] = [diff_text].pack('m0')
......
module Gitlab
module BackgroundMigration
# Class that migrates events for the new push event payloads setup. All
# events are copied to a shadow table, and push events will also have a row
# created in the push_event_payloads table.
class MigrateEventsToPushEventPayloads
class Event < ActiveRecord::Base
self.table_name = 'events'
serialize :data
BLANK_REF = ('0' * 40).freeze
TAG_REF_PREFIX = 'refs/tags/'.freeze
MAX_INDEX = 69
PUSHED = 5
def push_event?
action == PUSHED && data.present?
end
def commit_title
commit = commits.last
return nil unless commit && commit[:message]
index = commit[:message].index("\n")
message = index ? commit[:message][0..index] : commit[:message]
message.strip.truncate(70)
end
def commit_from_sha
if create?
nil
else
data[:before]
end
end
def commit_to_sha
if remove?
nil
else
data[:after]
end
end
def data
super || {}
end
def commits
data[:commits] || []
end
def commit_count
data[:total_commits_count] || 0
end
def ref
data[:ref]
end
def trimmed_ref_name
if ref_type == :tag
ref[10..-1]
else
ref[11..-1]
end
end
def create?
data[:before] == BLANK_REF
end
def remove?
data[:after] == BLANK_REF
end
def push_action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if ref.start_with?(TAG_REF_PREFIX)
:tag
else
:branch
end
end
end
class EventForMigration < ActiveRecord::Base
self.table_name = 'events_for_migration'
end
class PushEventPayload < ActiveRecord::Base
self.table_name = 'push_event_payloads'
enum action: {
created: 0,
removed: 1,
pushed: 2
}
enum ref_type: {
branch: 0,
tag: 1
}
end
# start_id - The start ID of the range of events to process
# end_id - The end ID of the range to process.
def perform(start_id, end_id)
return unless migrate?
find_events(start_id, end_id).each { |event| process_event(event) }
end
def process_event(event)
replicate_event(event)
create_push_event_payload(event) if event.push_event?
end
def replicate_event(event)
new_attributes = event.attributes
.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
rescue ActiveRecord::InvalidForeignKey
# A foreign key error means the associated event was removed. In this
# case we'll just skip migrating the event.
end
def create_push_event_payload(event)
commit_from = pack(event.commit_from_sha)
commit_to = pack(event.commit_to_sha)
PushEventPayload.create!(
event_id: event.id,
commit_count: event.commit_count,
ref_type: event.ref_type,
action: event.push_action,
commit_from: commit_from,
commit_to: commit_to,
ref: event.trimmed_ref_name,
commit_title: event.commit_title
)
rescue ActiveRecord::InvalidForeignKey
# A foreign key error means the associated event was removed. In this
# case we'll just skip migrating the event.
end
def find_events(start_id, end_id)
Event
.where('NOT EXISTS (SELECT true FROM events_for_migration WHERE events_for_migration.id = events.id)')
.where(id: start_id..end_id)
end
def migrate?
Event.table_exists? && PushEventPayload.table_exists? &&
EventForMigration.table_exists?
end
def pack(value)
value ? [value].pack('H*') : nil
end
end
end
end
module Gitlab
module BackgroundMigration
class MovePersonalSnippetFiles
delegate :select_all, :execute, :quote_string, to: :connection
def perform(relative_source, relative_destination)
@source_relative_location = relative_source
@destination_relative_location = relative_destination
move_personal_snippet_files
end
def move_personal_snippet_files
query = "SELECT uploads.path, uploads.model_id FROM uploads "\
"INNER JOIN snippets ON snippets.id = uploads.model_id WHERE uploader = 'PersonalFileUploader'"
select_all(query).each do |upload|
secret = upload['path'].split('/')[0]
file_name = upload['path'].split('/')[1]
move_file(upload['model_id'], secret, file_name)
update_markdown(upload['model_id'], secret, file_name)
end
end
def move_file(snippet_id, secret, file_name)
source_dir = File.join(base_directory, @source_relative_location, snippet_id.to_s, secret)
destination_dir = File.join(base_directory, @destination_relative_location, snippet_id.to_s, secret)
source_file_path = File.join(source_dir, file_name)
destination_file_path = File.join(destination_dir, file_name)
unless File.exist?(source_file_path)
say "Source file `#{source_file_path}` doesn't exist. Skipping."
return
end
say "Moving file #{source_file_path} -> #{destination_file_path}"
FileUtils.mkdir_p(destination_dir)
FileUtils.move(source_file_path, destination_file_path)
end
def update_markdown(snippet_id, secret, file_name)
source_markdown_path = File.join(@source_relative_location, snippet_id.to_s, secret, file_name)
destination_markdown_path = File.join(@destination_relative_location, snippet_id.to_s, secret, file_name)
source_markdown = "](#{source_markdown_path})"
destination_markdown = "](#{destination_markdown_path})"
quoted_source = quote_string(source_markdown)
quoted_destination = quote_string(destination_markdown)
execute("UPDATE snippets "\
"SET description = replace(snippets.description, '#{quoted_source}', '#{quoted_destination}'), description_html = NULL "\
"WHERE id = #{snippet_id}")
query = "SELECT id, note FROM notes WHERE noteable_id = #{snippet_id} "\
"AND noteable_type = 'Snippet' AND note IS NOT NULL"
select_all(query).each do |note|
text = note['note'].gsub(source_markdown, destination_markdown)
quoted_text = quote_string(text)
execute("UPDATE notes SET note = '#{quoted_text}', note_html = NULL WHERE id = #{note['id']}")
end
end
def base_directory
File.join(Rails.root, 'public')
end
def connection
ActiveRecord::Base.connection
end
def say(message)
Rails.logger.debug(message)
end
end
end
end
......@@ -5,8 +5,14 @@ module Gitlab
return false if project.empty_repo?
# Created or deleted branch
if Gitlab::Git.blank_ref?(oldrev) || Gitlab::Git.blank_ref?(newrev)
false
return false if Gitlab::Git.blank_ref?(oldrev) || Gitlab::Git.blank_ref?(newrev)
GitalyClient.migrate(:force_push) do |is_enabled|
if is_enabled
!project
.repository
.gitaly_commit_client
.is_ancestor(oldrev, newrev)
else
Gitlab::Git::RevList.new(
path_to_repo: project.repository.path_to_repo,
......@@ -15,4 +21,5 @@ module Gitlab
end
end
end
end
end
......@@ -25,6 +25,10 @@ module Gitlab
database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
end
def self.join_lateral_supported?
postgresql? && version.to_f >= 9.3
end
def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}"
......
......@@ -324,6 +324,23 @@ module Gitlab
raw_log(options).map { |c| Commit.decorate(self, c) }
end
# Used in gitaly-ruby
def raw_log(options)
actual_ref = options[:ref] || root_ref
begin
sha = sha_from_ref(actual_ref)
rescue Rugged::OdbError, Rugged::InvalidError, Rugged::ReferenceError
# Return an empty array if the ref wasn't found
return []
end
if log_using_shell?(options)
log_by_shell(sha, options)
else
log_by_walk(sha, options)
end
end
def count_commits(options)
gitaly_migrate(:count_commits) do |is_enabled|
if is_enabled
......@@ -733,22 +750,6 @@ module Gitlab
sort_branches(branches, sort_by)
end
def raw_log(options)
actual_ref = options[:ref] || root_ref
begin
sha = sha_from_ref(actual_ref)
rescue Rugged::OdbError, Rugged::InvalidError, Rugged::ReferenceError
# Return an empty array if the ref wasn't found
return []
end
if log_using_shell?(options)
log_by_shell(sha, options)
else
log_by_walk(sha, options)
end
end
def log_using_shell?(options)
options[:path].present? ||
options[:disable_walk] ||
......
......@@ -5,7 +5,9 @@ module Gitlab
def repository(repository_storage, relative_path)
Gitaly::Repository.new(
storage_name: repository_storage,
relative_path: relative_path
relative_path: relative_path,
git_object_directory: Gitlab::Git::Env['GIT_OBJECT_DIRECTORY'].to_s,
git_alternate_object_directories: Array.wrap(Gitlab::Git::Env['GIT_ALTERNATE_OBJECT_DIRECTORIES'])
)
end
end
......
......@@ -2,6 +2,8 @@ module Gitlab
module Gpg
extend self
MUTEX = Mutex.new
module CurrentKeyChain
extend self
......@@ -42,21 +44,37 @@ module Gitlab
end
end
def using_tmp_keychain
Dir.mktmpdir do |dir|
@original_dirs ||= [GPGME::Engine.dirinfo('homedir')]
@original_dirs.push(dir)
GPGME::Engine.home_dir = dir
return_value = yield
# Allows thread safe switching of temporary keychain files
#
# 1. The current thread may use nesting of temporary keychain
# 2. Another thread needs to wait for the lock to be released
def using_tmp_keychain(&block)
if MUTEX.locked? && MUTEX.owned?
optimistic_using_tmp_keychain(&block)
else
MUTEX.synchronize do
optimistic_using_tmp_keychain(&block)
end
end
end
@original_dirs.pop
# 1. Returns the custom home directory if one has been set by calling
# `GPGME::Engine.home_dir=`
# 2. Returns the default home directory otherwise
def current_home_dir
GPGME::Engine.info.first.home_dir || GPGME::Engine.dirinfo('homedir')
end
GPGME::Engine.home_dir = @original_dirs[-1]
private
return_value
def optimistic_using_tmp_keychain
previous_dir = current_home_dir
Dir.mktmpdir do |dir|
GPGME::Engine.home_dir = dir
yield
end
ensure
GPGME::Engine.home_dir = previous_dir
end
end
end
......@@ -3,18 +3,22 @@ project_tree:
- labels:
:priorities
- milestones:
- :events
- events:
- :push_event_payload
- issues:
- :events
- events:
- :push_event_payload
- :timelogs
- notes:
- :author
- :events
- events:
- :push_event_payload
- label_links:
- label:
:priorities
- milestone:
- :events
- events:
- :push_event_payload
- snippets:
- :award_emoji
- notes:
......@@ -25,21 +29,25 @@ project_tree:
- merge_requests:
- notes:
- :author
- :events
- events:
- :push_event_payload
- merge_request_diff:
- :merge_request_diff_commits
- :merge_request_diff_files
- :events
- events:
- :push_event_payload
- :timelogs
- label_links:
- label:
:priorities
- milestone:
- :events
- events:
- :push_event_payload
- pipelines:
- notes:
- :author
- :events
- events:
- :push_event_payload
- :stages
- :statuses
- :triggers
......@@ -111,6 +119,8 @@ excluded_attributes:
statuses:
- :trace
- :token
push_event_payload:
- :event_id
methods:
labels:
......
# This Rack middleware is intended to proxy the webpack assets directory to the
# webpack-dev-server. It is only intended for use in development.
# :nocov:
module Gitlab
module Middleware
class WebpackProxy < Rack::Proxy
......@@ -22,3 +23,4 @@ module Gitlab
end
end
end
# :nocov:
# :nocov:
module Gitlab
module OAuth
module Session
......@@ -15,3 +16,4 @@ module Gitlab
end
end
end
# :nocov:
# :nocov:
module DeliverNever
def deliver_later
self
......@@ -21,3 +22,4 @@ module Gitlab
end
end
end
# :nocov:
......@@ -417,7 +417,7 @@ msgstr[0] "Fourche"
msgstr[1] "Fourches"
msgid "ForkedFromProjectPath|Forked from"
msgstr "Fouché depuis"
msgstr "Fourché depuis"
msgid "From issue creation until deploy to production"
msgstr "Depuis la création de l'incident jusqu'au déploiement en production"
......
......@@ -12,12 +12,24 @@ describe Admin::ProjectsController do
it 'retrieves the project for the given visibility level' do
get :index, visibility_level: [Gitlab::VisibilityLevel::PUBLIC]
expect(response.body).to match(project.name)
end
it 'does not retrieve the project' do
get :index, visibility_level: [Gitlab::VisibilityLevel::INTERNAL]
expect(response.body).not_to match(project.name)
end
it 'does not respond with projects pending deletion' do
pending_delete_project = create(:project, pending_delete: true)
get :index
expect(response).to have_http_status(200)
expect(response.body).not_to match(pending_delete_project.name)
expect(response.body).to match(project.name)
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment