Commit 20f39b1f authored by Achilleas Pipinellis's avatar Achilleas Pipinellis

Merge branch 'master' into pages_user_docs

parents 9a623ca0 808fba83
......@@ -7,6 +7,7 @@ v 8.6.0 (unreleased)
- [Elastic] Removing repository and wiki index after removing project
- [Elastic] Update index on push to wiki
- [Elastic] Use subprocesses for ElasticSearch index jobs
- [Elastic] More accurate as_indexed_json (More stable database indexer)
v 8.5.4
- [Elastic][Security] Notes exposure
......
class Admin::GeoNodesController < Admin::ApplicationController
def index
@nodes = GeoNode.all
@node = GeoNode.new
end
def create
@node = GeoNode.new(geo_node_params)
if @node.save
redirect_to admin_geo_nodes_path, notice: 'Node was successfully created.'
else
@nodes = GeoNode.all
render :index
end
end
def destroy
@node = GeoNode.find(params[:id])
@node.destroy
redirect_to admin_geo_nodes_path, notice: 'Node was successfully removed.'
end
def geo_node_params
params.require(:geo_node).permit(:url, :primary, geo_node_key_attributes: [:key])
end
end
......@@ -107,7 +107,7 @@ class ApplicationController < ActionController::Base
end
def after_sign_out_path_for(resource)
if Gitlab::Geo.readonly?
if Gitlab::Geo.secondary?
Gitlab::Geo.primary_node.url
else
current_application_settings.after_sign_out_path || new_user_session_path
......
......@@ -91,7 +91,7 @@ class SessionsController < Devise::SessionsController
end
def gitlab_geo_login
if !signed_in? && Gitlab::Geo.enabled? && Gitlab::Geo.readonly?
if !signed_in? && Gitlab::Geo.enabled? && Gitlab::Geo.secondary?
# share full url with primary node by shared session
user_return_to = URI.join(root_url, session[:user_return_to]).to_s
session[:geo_node_return_to] = @redirect_to || user_return_to
......
......@@ -11,20 +11,65 @@
#
class GeoNode < ActiveRecord::Base
default_value_for :schema, 'http'
default_value_for :port, 80
default_value_for :relative_url_root, ''
default_value_for :primary, false
belongs_to :geo_node_key, dependent: :destroy
default_values schema: 'http',
host: lambda { Gitlab.config.gitlab.host },
port: 80,
relative_url_root: '',
primary: false
accepts_nested_attributes_for :geo_node_key
validates :host, host: true, presence: true, uniqueness: { case_sensitive: false, scope: :port }
validates :primary, uniqueness: { message: 'primary node already exists' }, if: :primary
validates :schema, inclusion: %w(http https)
validates :relative_url_root, length: { minimum: 0, allow_nil: false }
after_initialize :check_geo_node_key
after_save :refresh_bulk_notify_worker_status
after_destroy :refresh_bulk_notify_worker_status
before_validation :change_geo_node_key_title
def uri
URI.parse("#{schema}://#{host}:#{port}/#{relative_url_root}")
if relative_url_root
relative_url = relative_url_root.starts_with?('/') ? relative_url_root : "/#{relative_url_root}"
end
URI.parse(URI::Generic.build(scheme: schema, host: host, port: port, path: relative_url).normalize.to_s)
end
def url
uri.to_s
end
def url=(new_url)
new_uri = URI.parse(new_url)
self.schema = new_uri.scheme
self.host = new_uri.host
self.port = new_uri.port
self.relative_url_root = new_uri.path != '/' ? new_uri.path : ''
end
def notify_url
URI.join(uri, "#{uri.path}/", "api/#{API::API.version}/geo/refresh_projects").to_s
end
private
def refresh_bulk_notify_worker_status
if Gitlab::Geo.primary?
Gitlab::Geo.bulk_notify_job.try(:enable!)
else
Gitlab::Geo.bulk_notify_job.try(:disable!)
end
end
def check_geo_node_key
self.build_geo_node_key if geo_node_key.nil?
end
def change_geo_node_key_title
self.geo_node_key.title = "Geo node: #{self.url}" if self.geo_node_key
end
end
# == Schema Information
#
# Table name: keys
#
# id :integer not null, primary key
# user_id :integer
# created_at :datetime
# updated_at :datetime
# key :text
# title :string(255)
# type :string(255)
# fingerprint :string(255)
# public :boolean default(FALSE), not null
#
class GeoNodeKey < Key
has_one :geo_node
def orphaned?
self.geo_nodes.length == 0
end
def almost_orphaned?
self.geo_nodes.length == 1
end
def destroyed_when_orphaned?
true
end
end
......@@ -6,6 +6,7 @@ class Repository
class CommitError < StandardError; end
MIRROR_REMOTE = "upstream"
MIRROR_GEO = "geo"
include Gitlab::ShellAdapter
......@@ -181,10 +182,23 @@ class Repository
raw_repository.remote_update(name, url: url)
end
def set_remote_as_mirror(name)
remote_config = raw_repository.rugged.config
# This is used by Gitlab Geo to define repository as equivalent as "git clone --mirror"
remote_config["remote.#{name}.fetch"] = 'refs/*:refs/*'
remote_config["remote.#{name}.mirror"] = true
remote_config["remote.#{name}.prune"] = true
end
def fetch_remote(remote)
gitlab_shell.fetch_remote(path_with_namespace, remote)
end
def fetch_remote_forced!(remote)
gitlab_shell.fetch_remote(path_with_namespace, remote, true)
end
def branch_names
cache.fetch(:branch_names) { raw_repository.branch_names }
end
......@@ -742,6 +756,12 @@ class Repository
fetch_remote(Repository::MIRROR_REMOTE)
end
def fetch_geo_mirror(url)
add_remote(Repository::MIRROR_GEO, url)
set_remote_as_mirror(Repository::MIRROR_GEO)
fetch_remote_forced!(Repository::MIRROR_GEO)
end
def upstream_branches
rugged.references.each("refs/remotes/#{Repository::MIRROR_REMOTE}/*").map do |ref|
name = ref.name.sub(/\Arefs\/remotes\/#{Repository::MIRROR_REMOTE}\//, "")
......
module Geo
class BaseService
def initialize
@queue = Gitlab::Geo::UpdateQueue.new
end
end
end
module Geo
class EnqueueUpdateService < Geo::BaseService
attr_reader :project
def initialize(project)
super()
@project = project
end
def execute
@queue.store({ id: @project.id, clone_url: @project.url_to_repo })
end
end
end
module Geo
class NotifyNodesService < Geo::BaseService
include HTTParty
# HTTParty timeout
default_timeout Gitlab.config.gitlab.webhook_timeout
def execute
return if @queue.empty?
projects = @queue.fetch_batched_data
::Gitlab::Geo.secondary_nodes.each do |node|
success, message = notify_updated_projects(node, projects)
unless success
Rails.logger.error("GitLab failed to notify #{node.url} : #{message}")
@queue.store_batched_data(projects)
end
end
end
private
def notify_updated_projects(node, projects)
response = self.class.post(node.notify_url,
body: { projects: projects }.to_json,
headers: {
'Content-Type' => 'application/json',
'PRIVATE-TOKEN' => private_token
})
[(response.code >= 200 && response.code < 300), ActionView::Base.full_sanitizer.sanitize(response.to_s)]
rescue HTTParty::Error, Errno::ECONNREFUSED => e
[false, ActionView::Base.full_sanitizer.sanitize(e.message)]
end
def private_token
# TODO: should we ask admin user to be defined as part of configuration?
@private_token ||= User.find_by(admin: true).authentication_token
end
end
end
module Geo
class ScheduleRepoUpdateService
attr_reader :projects
def initialize(projects)
@projects = projects
end
def execute
@projects.each do |project|
GeoRepositoryUpdateWorker.perform_async(project['id'], project['clone_url'])
end
end
end
end
- page_title 'Geo nodes'
%h3.page-title
Geo Nodes
%p.light
With #{link_to 'GitLab Geo', help_page_path('gitlab-geo', 'overview'), class: 'vlink'} you can install a special
read-only and replicated instance anywhere.
%hr
= form_for @node, as: :geo_node, url: admin_geo_nodes_path, html: { class: 'form-horizontal' } do |f|
-if @node.errors.any?
.alert.alert-danger
- @node.errors.full_messages.each do |msg|
%p= msg
.form-group
.col-sm-offset-2.col-sm-10
.checkbox
= f.label :primary do
= f.check_box :primary
%strong This is a primary node
.form-group
= f.label :url, 'URL', class: 'control-label'
.col-sm-10
= f.text_field :url, class: 'form-control'
= f.fields_for :geo_node_key, @node.geo_node_key do |fg|
.form-group
= fg.label :key, 'Public Key', class: 'control-label'
.col-sm-10
= fg.text_area :key, class: 'form-control thin_area', rows: 5
%p.help-block
Paste a machine public key here for the GitLab user this node runs on. Read more about how to generate it
= link_to "here", help_page_path("ssh", "README")
.form-actions
= f.submit 'Add Node', class: 'btn btn-create'
%hr
-if @nodes.any?
.panel.panel-default
.panel-heading
Geo nodes (#{@nodes.count})
%ul.well-list
- @nodes.each do |node|
%li
.list-item-name
%strong= node.url
%p #{node.primary ? 'Primary node' : 'Secondary node'}
.pull-right
= link_to 'Remove', admin_geo_node_path(node), data: { confirm: 'Are you sure?' }, method: :delete, class: 'btn btn-remove btn-sm'
......@@ -85,6 +85,12 @@
%span
Labels
= nav_link(controller: :geo_nodes) do
= link_to admin_geo_nodes_path, title: 'Geo Nodes' do
= icon('globe fw')
%span
Geo Nodes
= nav_link(controller: :abuse_reports) do
= link_to admin_abuse_reports_path, title: "Abuse Reports" do
= icon('exclamation-circle fw')
......
class GeoBulkNotifyWorker
include Sidekiq::Worker
sidekiq_options queue: :default
def perform
Geo::NotifyNodesService.new.execute
end
end
class GeoRepositoryUpdateWorker
include Sidekiq::Worker
include Gitlab::ShellAdapter
sidekiq_options queue: :default
attr_accessor :project
def perform(project_id, clone_url)
@project = Project.find(project_id)
fetch_repository(@project, clone_url)
end
private
def fetch_repository(project, remote_url)
project.create_repository unless project.repository_exists?
project.repository.fetch_geo_mirror(remote_url)
end
end
......@@ -37,6 +37,9 @@ class PostReceive
return false
end
# Triggers repository update on secondary nodes when Geo is enabled
Gitlab::Geo.notify_update(project) if Gitlab::Geo.enabled?
if Gitlab::Git.tag_ref?(ref)
GitTagPushService.new.execute(project, @user, oldrev, newrev, ref)
else
......
......@@ -196,6 +196,11 @@ production: &base
ldap_sync_worker:
cron: "30 1 * * *"
# Gitlab Geo nodes notification worker
# NOTE: This will only take effect if Geo is enabled
geo_bulk_notify_worker:
cron: "*/10 * * * * *"
#
# 2. GitLab CI settings
......
......@@ -331,6 +331,9 @@ Settings.cron_jobs['update_all_mirrors_worker']['job_class'] = 'UpdateAllMirrors
Settings.cron_jobs['ldap_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ldap_sync_worker']['cron'] ||= '30 1 * * *'
Settings.cron_jobs['ldap_sync_worker']['job_class'] = 'LdapSyncWorker'
Settings.cron_jobs['geo_bulk_notify_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_bulk_notify_worker']['cron'] ||= '*/10 * * * * *'
Settings.cron_jobs['geo_bulk_notify_worker']['job_class'] ||= 'GeoBulkNotifyWorker'
#
# GitLab Shell
......
......@@ -25,6 +25,9 @@ Sidekiq.configure_server do |config|
cron_jobs.each { |k,v| cron_jobs[k]['class'] = cron_jobs[k].delete('job_class') }
Sidekiq::Cron::Job.load_from_hash! cron_jobs
# Gitlab Geo: enable bulk notify job only on primary node
Gitlab::Geo.bulk_notify_job.disable! unless Gitlab::Geo.primary?
# Database pool should be at least `sidekiq_concurrency` + 2
# For more info, see: https://github.com/mperham/sidekiq/blob/master/4.0-Upgrade.md
config = ActiveRecord::Base.configurations[Rails.env] ||
......
......@@ -277,6 +277,8 @@ Rails.application.routes.draw do
get :download, on: :member
end
resources :geo_nodes, only: [:index, :create, :destroy]
resources :labels
resources :runners, only: [:index, :show, :update, :destroy] do
......
class AddGeoNodeKeyToGeoNode < ActiveRecord::Migration
def change
change_table :geo_nodes do |t|
t.belongs_to :geo_node_key, index: true
end
end
end
......@@ -413,8 +413,10 @@ ActiveRecord::Schema.define(version: 20160302141317) do
t.integer "port"
t.string "relative_url_root"
t.boolean "primary"
t.integer "geo_node_key_id"
end
add_index "geo_nodes", ["geo_node_key_id"], name: "index_geo_nodes_on_geo_node_key_id", using: :btree
add_index "geo_nodes", ["host"], name: "index_geo_nodes_on_host", using: :btree
add_index "geo_nodes", ["primary"], name: "index_geo_nodes_on_primary", using: :btree
......
......@@ -27,6 +27,7 @@ module API
helpers Helpers
mount Geo
mount Groups
mount GroupMembers
mount Users
......
module API
class Geo < Grape::API
before { authenticated_as_admin! }
resource :geo do
# Enqueue a batch of IDs of modified projects to have their
# repositories updated
#
# Example request:
# POST /refresh_projects
post 'refresh_projects' do
required_attributes! [:projects]
::Geo::ScheduleRepoUpdateService.new(params[:projects]).execute
end
end
end
end
......@@ -27,12 +27,18 @@ module Elastic
end
def as_indexed_json(options = {})
as_json(
include: {
project: { only: :id },
author: { only: :id }
}
).merge({ updated_at_sort: updated_at })
data = {}
# We don't use as_json(only: ...) because it calls all virtual and serialized attributtes
# https://gitlab.com/gitlab-org/gitlab-ee/issues/349
[:id, :iid, :title, :description, :created_at, :updated_at, :state, :project_id, :author_id].each do |attr|
data[attr.to_s] = self.send(attr)
end
data['project'] = { 'id' => project.id }
data['author'] = { 'id' => author.id }
data['updated_at_sort'] = updated_at
data
end
def self.elastic_search(query, options: {})
......
......@@ -34,8 +34,11 @@ module Elastic
end
def as_indexed_json(options = {})
as_json({
only: [
# We don't use as_json(only: ...) because it calls all virtual and serialized attributtes
# https://gitlab.com/gitlab-org/gitlab-ee/issues/349
data = {}
[
:id,
:iid,
:target_branch,
......@@ -49,13 +52,15 @@ module Elastic
:source_project_id,
:target_project_id,
:author_id
],
include: {
source_project: { only: :id },
target_project: { only: :id },
author: { only: :id }
}
}).merge({ updated_at_sort: updated_at })
].each do |attr|
data[attr.to_s] = self.send(attr)
end
data['source_project'] = { 'id' => source_project_id }
data['target_project'] = { 'id' => target_project_id }
data['author'] = { 'id' => author.id }
data['updated_at_sort'] = updated_at
data
end
def self.elastic_search(query, options: {})
......
......@@ -16,9 +16,16 @@ module Elastic
end
def as_indexed_json(options = {})
as_json(
only: [:id, :note, :project_id, :created_at]
).merge({ updated_at_sort: updated_at })
data = {}
# We don't use as_json(only: ...) because it calls all virtual and serialized attributtes
# https://gitlab.com/gitlab-org/gitlab-ee/issues/349
[:id, :note, :project_id, :created_at].each do |attr|
data[attr.to_s] = self.send(attr)
end
data['updated_at_sort'] = updated_at
data
end
def self.elastic_search(query, options: {})
......
......@@ -29,8 +29,11 @@ module Elastic
end
def as_indexed_json(options = {})
as_json({
only: [
# We don't use as_json(only: ...) because it calls all virtual and serialized attributtes
# https://gitlab.com/gitlab-org/gitlab-ee/issues/349
data = {}
[
:id,
:name,
:path,
......@@ -40,12 +43,13 @@ module Elastic
:archived,
:visibility_level,
:last_activity_at,
:last_pushed_at
]
}).merge({
name_with_namespace: name_with_namespace,
path_with_namespace: path_with_namespace
})
:name_with_namespace,
:path_with_namespace
].each do |attr|
data[attr.to_s] = self.send(attr)
end
data
end
def self.elastic_search(query, options: {})
......@@ -55,28 +59,6 @@ module Elastic
filters = []
if options[:abandoned]
filters << {
range: {
last_pushed_at: {
lte: "now-6M/m"
}
}
}
end
if options[:with_push]
filters << {
not: {
missing: {
field: :last_pushed_at,
existence: true,
null_value: true
}
}
}
end
if options[:namespace_id]
filters << {
terms: {
......
......@@ -45,12 +45,15 @@ module Gitlab
#
# name - project path with namespace
# remote - remote name
# forced - should we use --force flag?
#
# Ex.
# fetch_remote("gitlab/gitlab-ci", "upstream")
#
def fetch_remote(name, remote)
output, status = Popen::popen([gitlab_shell_projects_path, 'fetch-remote', "#{name}.git", remote, '600'])
def fetch_remote(name, remote, forced: false)
args = [gitlab_shell_projects_path, 'fetch-remote', "#{name}.git", remote, '600']
args << '--force' if forced
output, status = Popen::popen(args)
raise Error, output unless status.zero?
true
end
......
......@@ -9,19 +9,35 @@ module Gitlab
end
def self.primary_node
RequestStore.store[:geo_node_primary] ||= GeoNode.find_by(primary: true)
RequestStore.store[:geo_primary_node] ||= GeoNode.find_by(primary: true)
end
def self.secondary_nodes
RequestStore.store[:geo_secondary_nodes] ||= GeoNode.where(primary: false)
end
def self.enabled?
RequestStore.store[:geo_node_enabled] ||= GeoNode.exists?
end
def self.readonly?
RequestStore.store[:geo_node_readonly] ||= self.enabled? && !self.current_node.primary?
def self.primary?
RequestStore.store[:geo_node_primary?] ||= self.enabled? && self.current_node && self.current_node.primary?
end
def self.secondary?
RequestStore.store[:geo_node_secondary] ||= self.enabled? && self.current_node && !self.current_node.primary?
end
def self.geo_node?(host:, port:)
GeoNode.where(host: host, port: port).exists?
end
def self.notify_update(project)
::Geo::EnqueueUpdateService.new(project).execute
end
def self.bulk_notify_job
Sidekiq::Cron::Job.find('geo_bulk_notify_worker')
end
end
end
module Gitlab
module Geo
class UpdateQueue
BATCH_SIZE = 250
NAMESPACE = 'geo:gitlab'
QUEUE = 'updated_projects'
def store(data)
redis.rpush(QUEUE, data.to_json)
expire_queue_size!
end
def first
data = fetch(0, 0)
data.first unless data.empty?
end
def last
data = fetch(-1, -1)
data.first unless data.empty?
end
def fetch_batched_data
projects = []
bsize = batch_size
redis.multi do
projects = redis.lrange(QUEUE, 0, bsize - 1)
redis.ltrim(QUEUE, bsize, -1)
end
expire_queue_size!
deserialize(projects.value)
end
def store_batched_data(projects)
redis.pipelined do
projects.reverse_each do |project|
# enqueue again to the head of the queue
redis.lpush(QUEUE, project.to_json)
end
end
expire_queue_size!
end
def batch_size
queue_size > BATCH_SIZE ? BATCH_SIZE : queue_size
end
def queue_size
@queue_size ||= fetch_queue_size
end
def empty?
queue_size == 0
end
def empty!
redis.del(QUEUE)
end
protected
def fetch(start, stop)
deserialize(redis.lrange(QUEUE, start, stop))
end
def fetch_queue_size
redis.llen(QUEUE)
end
def expire_queue_size!
@queue_size = nil
end
def deserialize(data)
data.map! { |item| JSON.parse(item) } unless data.empty?
data
end
def redis
self.class.redis
end
def self.redis_connection
redis_config_file = Rails.root.join('config', 'resque.yml')
redis_url_string = if File.exists?(redis_config_file)
YAML.load_file(redis_config_file)[Rails.env]
else
'redis://localhost:6379'
end
Redis::Namespace.new(NAMESPACE, redis: Redis.new(url: redis_url_string))
end
def self.redis
@redis ||= redis_connection
end
end
end
end
......@@ -20,6 +20,8 @@ module Gitlab
actor
when DeployKey
nil
when GeoNodeKey
nil
when Key
actor.user
end
......@@ -29,6 +31,10 @@ module Gitlab
actor if actor.is_a?(DeployKey)
end
def geo_node_key
actor if actor.is_a?(GeoNodeKey)
end
def can_push_to_branch?(ref)
return false unless user
......@@ -44,6 +50,8 @@ module Gitlab
user.can?(:read_project, project)
elsif deploy_key
deploy_key.projects.include?(project)
elsif geo_node_key
true
else
false
end
......@@ -77,7 +85,7 @@ module Gitlab
def download_access_check
if user
user_download_access_check
elsif deploy_key
elsif deploy_key || geo_node_key
build_status_object(true)
else
raise 'Wrong actor'
......@@ -85,6 +93,11 @@ module Gitlab
end
def push_access_check(changes)
if Gitlab::Geo.enabled? && Gitlab::Geo.secondary?
return build_status_object(false, "You can't push code on a secondary GitLab Geo node.")
end
return build_status_object(true) if git_annex_branch_sync?(changes)
if user
......@@ -323,6 +336,10 @@ module Gitlab
return build_status_object(false, "Repository does not exist")
end
if Gitlab::Geo.enabled? && Gitlab::Geo.secondary?
return build_status_object(false, "You can't use git-annex with a secondary GitLab Geo node.")
end
if user.can?(:push_code, project)
build_status_object(true)
else
......
module Gitlab
class GitAccessWiki < GitAccess
def change_access_check(change)
if user.can?(:create_wiki, project)
if Gitlab::Geo.enabled? && Gitlab::Geo.secondary?
build_status_object(false, "You can't push code to a secondary GitLab Geo node.")
elsif user.can?(:create_wiki, project)
build_status_object(true)
else
build_status_object(false, "You are not allowed to write to this project's wiki.")
......
......@@ -10,10 +10,10 @@ module Gitlab
def call(env)
@env = env
if disallowed_request? && Gitlab::Geo.readonly?
if disallowed_request? && Gitlab::Geo.secondary?
Rails.logger.debug('Gitlab Geo: preventing possible non readonly operation')
rack_flash.alert = 'You cannot do writing operations on a readonly Gitlab Geo instance'
rack_flash.alert = 'You cannot do writing operations on a secondary Gitlab Geo instance'
rack_session['flash'] = rack_flash.to_session_value
return [301, { 'Location' => last_visited_url }, []]
......@@ -25,7 +25,7 @@ module Gitlab
private
def disallowed_request?
DISALLOWED_METHODS.include?(@env['REQUEST_METHOD']) && !logout_route
DISALLOWED_METHODS.include?(@env['REQUEST_METHOD']) && !whitelisted_routes
end
def rack_flash
......@@ -48,6 +48,11 @@ module Gitlab
@route_hash ||= Rails.application.routes.recognize_path(request.url, { method: request.request_method }) rescue {}
end
def whitelisted_routes
whitelisted = %w(api/v3/internal api/v3/geo/refresh_projects)
logout_route || whitelisted.any? { |path| @request.path.include?(path) }
end
def logout_route
route_hash[:controller] == 'sessions' && route_hash[:action] == 'destroy'
end
......
FactoryGirl.define do
factory :geo_node_key, class: 'GeoNodeKey' do
title
key do
"ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt4596k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0= dummy@gitlab.com"
end
trait :another_key do
key do
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDmTillFzNTrrGgwaCKaSj+QCz81E6jBc/s9av0+3b1Hwfxgkqjl4nAK/OD2NjgyrONDTDfR8cRN4eAAy6nY8GLkOyYBDyuc5nTMqs5z3yVuTwf3koGm/YQQCmo91psZ2BgDFTor8SVEE5Mm1D1k3JDMhDFxzzrOtRYFPci9lskTJaBjpqWZ4E9rDTD2q/QZntCqbC3wE9uSemRQB5f8kik7vD/AD8VQXuzKladrZKkzkONCPWsXDspUitjM8HkQdOf0PsYn1CMUC1xKYbCxkg5TkEosIwGv6CoEArUrdu/4+10LVslq494mAvEItywzrluCLCnwELfW+h/m8UHoVhZ"
end
end
end
end
......@@ -2,10 +2,12 @@ FactoryGirl.define do
factory :geo_node do
host { Gitlab.config.gitlab.host }
sequence(:port) {|n| n}
association :geo_node_key
trait :primary do
primary true
port { Gitlab.config.gitlab.port }
association :geo_node_key, :another_key
end
end
end
......@@ -27,4 +27,18 @@ describe "Issue", elastic: true do
expect(Issue.elastic_search('term', options: options).total_count).to eq(2)
end
it "returns json with all needed elements" do
project = create :empty_project
issue = create :issue, project: project
expected_hash = issue.attributes.extract!('id', 'iid', 'title', 'description', 'created_at',
'updated_at', 'state', 'project_id', 'author_id')
expected_hash['project'] = { "id" => project.id }
expected_hash['author'] = { "id" => issue.author_id }
expected_hash['updated_at_sort'] = issue.updated_at
expect(issue.as_indexed_json).to eq(expected_hash)
end
end
......@@ -27,4 +27,31 @@ describe "MergeRequest", elastic: true do
expect(MergeRequest.elastic_search('term', options: options).total_count).to eq(2)
end
it "returns json with all needed elements" do
merge_request = create :merge_request
expected_hash = merge_request.attributes.extract!(
'id',
'iid',
'target_branch',
'source_branch',
'title',
'description',
'created_at',
'updated_at',
'state',
'merge_status',
'source_project_id',
'target_project_id',
'author_id'
)
expected_hash['source_project'] = { 'id' => merge_request.source_project_id }
expected_hash['target_project'] = { 'id' => merge_request.target_project_id }
expected_hash['author'] = { 'id' => merge_request.author.id }
expected_hash['updated_at_sort'] = merge_request.updated_at
expect(merge_request.as_indexed_json).to eq(expected_hash)
end
end
......@@ -27,4 +27,20 @@ describe "Milestone", elastic: true do
expect(Milestone.elastic_search('term', options: options).total_count).to eq(2)
end
it "returns json with all needed elements" do
milestone = create :milestone
expected_hash = milestone.attributes.extract!(
'id',
'title',
'description',
'project_id',
'created_at'
)
expected_hash[:updated_at_sort] = milestone.updated_at
expect(milestone.as_indexed_json).to eq(expected_hash)
end
end
......@@ -26,4 +26,19 @@ describe "Note", elastic: true do
expect(Note.elastic_search('term', options: options).total_count).to eq(1)
end
it "returns json with all needed elements" do
note = create :note
expected_hash = note.attributes.extract!(
'id',
'note',
'project_id',
'created_at'
)
expected_hash['updated_at_sort'] = note.updated_at
expect(note.as_indexed_json).to eq(expected_hash)
end
end
......@@ -24,4 +24,25 @@ describe "Projects", elastic: true do
expect(Project.elastic_search('test1', options: { pids: @project_ids }).total_count).to eq(1)
expect(Project.elastic_search('someone_elses_project', options: { pids: @project_ids }).total_count).to eq(0)
end
it "returns json with all needed elements" do
project = create :project
expected_hash = project.attributes.extract!(
'id',
'name',
'path',
'description',
'namespace_id',
'created_at',
'archived',
'visibility_level',
'last_activity_at'
)
expected_hash['name_with_namespace'] = project.name_with_namespace
expected_hash['path_with_namespace'] = project.path_with_namespace
expect(project.as_indexed_json).to eq(expected_hash)
end
end
......@@ -44,4 +44,25 @@ describe "Snippet", elastic: true do
expect(Snippet.elastic_search('home', options: options).total_count).to eq(1)
expect(Snippet.elastic_search('index.php', options: options).total_count).to eq(1)
end
it "returns json with all needed elements" do
snippet = create :project_snippet
expected_hash = snippet.attributes.extract!(
'id',
'title',
'file_name',
'content',
'created_at',
'updated_at',
'state',
'project_id',
'author_id',
)
expected_hash['project'] = { 'id' => snippet.project.id }
expected_hash['author'] = { 'id' => snippet.author.id }
expect(snippet.as_indexed_json).to eq(expected_hash)
end
end
describe Gitlab::Geo::UpdateQueue do
subject { described_class.new }
let(:dummy_data) { { 'id' => 1, 'clone_url' => 'git@localhost:repo/path.git' } }
let(:dummy_data2) { { 'id' => 99, 'clone_url' => 'git@localhost:other_repo/path.git' } }
let(:multiple_dummy_data) { [dummy_data, dummy_data2] * 10 }
before(:each) { subject.empty! }
describe '#store' do
before(:each) { subject.store(dummy_data) }
it 'stores data to the queue' do
expect(subject).not_to be_empty
end
it 'stored data is equal to original' do
expect(subject.first).to eq(dummy_data)
end
end
context 'when queue has elements' do
before(:each) do
subject.store(dummy_data)
subject.store(dummy_data2)
end
describe '#first' do
it { expect(subject.first).to eq(dummy_data) }
end
describe '#last' do
it { expect(subject.last).to eq(dummy_data2) }
end
end
describe '#fetch_batched_data' do
before(:each) { subject.store_batched_data(multiple_dummy_data) }
it 'returns same stored data' do
expect(subject.fetch_batched_data).to eq(multiple_dummy_data)
end
end
describe '#store_batched_data' do
let(:ordered_data) { [{ 'a' => 1 }, { 'a' => 2 }, { 'a' => 3 }, { 'a' => 4 }, { 'a' => 5 }] }
it 'stores multiple items to the queue' do
expect { subject.store_batched_data(multiple_dummy_data) }.to change { subject.batch_size }.by(multiple_dummy_data.size)
end
it 'returns data in equal order to original' do
subject.store_batched_data(ordered_data)
expect(subject.first).to eq(ordered_data.first)
expect(subject.last).to eq(ordered_data.last)
end
end
describe '#batch_size' do
before(:each) { allow(subject).to receive(:queue_size) { queue_size } }
context 'when queue size is smaller than BATCH_SIZE' do
let(:queue_size) { described_class::BATCH_SIZE - 20 }
it 'equals to the queue size' do
expect(subject.batch_size).to eq(queue_size)
end
end
context 'when queue size is bigger than BATCH_SIZE' do
let(:queue_size) { described_class::BATCH_SIZE + 20 }
it 'equals to the BATCH_SIZE' do
expect(subject.batch_size).to eq(described_class::BATCH_SIZE)
end
end
end
describe '#queue_size' do
it 'returns the ammount of items in queue' do
expect { subject.store(dummy_data) }.to change { subject.queue_size }.by(1)
end
end
describe '#empty?' do
it 'returns true when empty' do
is_expected.to be_empty
end
it 'returns false when there are enqueue data' do
subject.store(dummy_data)
is_expected.not_to be_empty
end
end
end
......@@ -44,8 +44,8 @@ describe Gitlab::Geo, lib: true do
before(:each) { secondary_node }
it 'returns true' do
expect(described_class).to receive(:current_node) { secondary_node }
expect(described_class.readonly?).to be_truthy
allow(described_class).to receive(:current_node) { secondary_node }
expect(described_class.secondary?).to be_truthy
end
end
......@@ -53,8 +53,8 @@ describe Gitlab::Geo, lib: true do
before(:each) { primary_node }
it 'returns false when ' do
expect(described_class).to receive(:current_node) { primary_node }
expect(described_class.readonly?).to be_falsey
allow(described_class).to receive(:current_node) { primary_node }
expect(described_class.secondary?).to be_falsey
end
end
end
......@@ -68,4 +68,15 @@ describe Gitlab::Geo, lib: true do
expect(described_class.geo_node?(host: 'inexistent', port: 1234)).to be_falsey
end
end
describe 'notify_update' do
let(:project) { FactoryGirl.build(:project) }
it 'delegates to NotifyService' do
expect(Geo::EnqueueUpdateService).to receive(:new).with(project).and_call_original
expect_any_instance_of(Geo::EnqueueUpdateService).to receive(:execute)
described_class.notify_update(project)
end
end
end
......@@ -125,6 +125,17 @@ describe Gitlab::GitAccess, lib: true do
it { expect(subject.allowed?).to be_truthy }
end
end
describe 'geo node key permissions' do
let(:key) { build(:geo_node_key) }
let(:actor) { key }
context 'pull code' do
subject { access.download_access_check }
it { expect(subject.allowed?).to be_truthy }
end
end
end
describe 'push_access_check' do
......@@ -249,9 +260,41 @@ describe Gitlab::GitAccess, lib: true do
end
end
context "when in a secondary gitlab geo node" do
before do
allow(Gitlab::Geo).to receive(:enabled?) { true }
allow(Gitlab::Geo).to receive(:secondary?) { true }
end
permissions_matrix.keys.each do |role|
describe "#{role} access" do
before { protect_feature_branch }
before { project.team << [user, role] }
permissions_matrix[role].each do |action, allowed|
context action do
subject { access.push_access_check(changes[action]) }
it { expect(subject.allowed?).to be_falsey }
end
end
end
end
end
context "when using git annex" do
before { project.team << [user, :master] }
describe 'and gitlab geo is enabled in a secondary node' do
before do
allow(Gitlab.config.gitlab_shell).to receive(:git_annex_enabled).and_return(true)
allow(Gitlab::Geo).to receive(:enabled?) { true }
allow(Gitlab::Geo).to receive(:secondary?) { true }
end
it { expect(access.push_access_check(git_annex_changes)).not_to be_allowed }
end
describe 'and git hooks unset' do
describe 'git annex enabled' do
before { allow(Gitlab.config.gitlab_shell).to receive(:git_annex_enabled).and_return(true) }
......
......@@ -4,8 +4,10 @@ describe Gitlab::GitAccessWiki, lib: true do
let(:access) { Gitlab::GitAccessWiki.new(user, project) }
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] }
describe 'push_allowed?' do
describe '#push_access_check' do
context 'when user can :create_wiki' do
before do
create(:protected_branch, name: 'master', project: project)
project.team << [user, :developer]
......@@ -14,9 +16,16 @@ describe Gitlab::GitAccessWiki, lib: true do
subject { access.push_access_check(changes) }
it { expect(subject.allowed?).to be_truthy }
context 'when in a secondary gitlab geo node' do
before do
allow(Gitlab::Geo).to receive(:enabled?) { true }
allow(Gitlab::Geo).to receive(:secondary?) { true }
end
it { expect(subject.allowed?).to be_falsey }
end
end
def changes
['6f6d7e7ed 570e7b2ab refs/heads/master']
end
end
......@@ -31,8 +31,8 @@ describe Gitlab::Middleware::ReadonlyGeo, lib: true do
let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'text/plain' }, ['OK']] } }
let(:request) { @request ||= Rack::MockRequest.new(rack_stack) }
context 'when in Gitlab Geo readonly node' do
before(:each) { allow(Gitlab::Geo).to receive(:readonly?) { true } }
context 'when in secondary Gitlab Geo node' do
before(:each) { allow(Gitlab::Geo).to receive(:secondary?) { true } }
it 'expects PATCH requests to be disallowed' do
response = request.patch('/test_request')
......
# == Schema Information
#
# Table name: keys
#
# id :integer not null, primary key
# user_id :integer
# created_at :datetime
# updated_at :datetime
# key :text
# title :string(255)
# type :string(255)
# fingerprint :string(255)
# public :boolean default(FALSE), not null
#
require 'spec_helper'
describe GeoNodeKey, models: true do
let(:geo_node) { create(:geo_node) }
let(:geo_node_key) { create(:geo_node_key, geo_nodes: [geo_node]) }
describe 'Associations' do
it { is_expected.to have_one(:geo_node) }
end
end
require 'spec_helper'
describe GeoNode, type: :model do
let(:dummy_url) { 'https://localhost:3000/gitlab' }
context 'associations' do
it { is_expected.to belong_to(:geo_node_key).dependent(:destroy) }
end
context 'default values' do
let(:gitlab_host) { 'gitlabhost' }
before(:each) { allow(Gitlab.config.gitlab).to receive(:host) { gitlab_host } }
it 'defines a default schema' do
expect(subject.schema).to eq('http')
end
it 'defines a default host' do
expect(subject.host).to eq(gitlab_host)
end
it 'defines a default port' do
expect(subject.port).to eq(80)
end
it 'defines a default relative_url_root' do
expect(subject.relative_url_root).to eq('')
end
it 'defines a default primary flag' do
expect(subject.primary).to eq(false)
end
end
describe '#uri' do
context 'when all fields are filled' do
subject { GeoNode.new(schema: 'https', host: 'localhost', port: 3000, relative_url_root: 'gitlab') }
it 'returns an URI object' do
expect(subject.uri).to be_a URI
end
it 'includes schema home port and relative_url' do
expected_uri = URI.parse(dummy_url)
expect(subject.uri).to eq(expected_uri)
end
end
context 'when required fields are not filled' do
subject { GeoNode.new(schema: nil, host: nil, port: nil, relative_url_root: nil) }
it 'returns an URI object' do
expect(subject.uri).to be_a URI
end
end
end
describe '#url' do
subject { GeoNode.new(schema: 'https', host: 'localhost', port: 3000, relative_url_root: 'gitlab') }
it 'returns a string' do
expect(subject.url).to be_a String
end
it 'includes schema home port and relative_url' do
expected_url = 'https://localhost:3000/gitlab'
expect(subject.url).to eq(expected_url)
end
end
describe '#url=' do
subject { GeoNode.new }
before(:each) { subject.url = dummy_url }
it 'sets schema field based on url' do
expect(subject.schema).to eq('https')
end
it 'sets host field based on url' do
expect(subject.host).to eq('localhost')
end
it 'sets port field based on specified by url' do
expect(subject.port).to eq(3000)
end
context 'when unspecified ports' do
let(:dummy_http) { 'http://example.com/' }
let(:dummy_https) { 'https://example.com/' }
it 'sets port 80 when http and no port is specified' do
subject.url = dummy_http
expect(subject.port).to eq(80)
end
it 'sets port 443 when https and no port is specified' do
subject.url = dummy_https
expect(subject.port).to eq(443)
end
end
end
describe '#notify_url' do
subject { GeoNode.new(schema: 'https', host: 'localhost', port: 3000, relative_url_root: 'gitlab') }
let(:refresh_url) { 'https://localhost:3000/gitlab/api/v3/geo/refresh_projects' }
it 'returns api url based on node uri' do
expect(subject.notify_url).to eq(refresh_url)
end
end
end
require 'spec_helper'
describe API::API, api: true do
include ApiHelpers
let(:admin) { create(:admin) }
let(:user) { create(:user) }
describe 'POST /geo/refresh_projects' do
before(:each) { allow_any_instance_of(::Geo::ScheduleRepoUpdateService).to receive(:execute) }
it 'should retrieve the license information if admin is logged in' do
post api('/geo/refresh_projects', admin), projects: ['1', '2', '3']
expect(response.status).to eq 201
end
it 'should deny access if not admin' do
post api('/geo/refresh_projects', user)
expect(response.status).to eq 403
end
end
end
describe Geo::EnqueueUpdateService, service: true do
subject { Geo::EnqueueUpdateService.new(project) }
let(:project) { double(:project) }
let(:fake_url) { 'git@localhost:repo/path.git' }
let(:fake_id) { 999 }
let(:queue) { subject.instance_variable_get(:@queue) }
before(:each) do
queue.empty!
expect(project).to receive(:url_to_repo) { fake_url }
expect(project).to receive(:id) { fake_id }
end
describe '#execute' do
let(:stored_data) { queue.first }
before(:each) { subject.execute }
it 'persists id and clone_url to redis queue' do
expect(stored_data).to have_key('id')
expect(stored_data).to have_key('clone_url')
end
it 'persisted id is equal to original' do
expect(stored_data['id']).to eq(fake_id)
end
it 'persisted clone_url is equal to original' do
expect(stored_data['clone_url']).to eq(fake_url)
end
end
end
......@@ -95,9 +95,16 @@ describe Projects::ImportService, services: true do
def stub_github_omniauth_provider
provider = OpenStruct.new(
name: 'github',
app_id: 'asd123',
app_secret: 'asd123'
'name' => 'github',
'app_id' => 'asd123',
'app_secret' => 'asd123',
'args' => {
'client_options' => {
'site' => 'https://github.com/api/v3',
'authorize_url' => 'https://github.com/login/oauth/authorize',
'token_url' => 'https://github.com/login/oauth/access_token'
}
}
)
Gitlab.config.omniauth.providers << provider
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment