Commit 05542079 authored by Rémy Coutable's avatar Rémy Coutable

Merge branch 'ce-to-ee-2018-07-04' into 'master'

CE upstream - 2018-07-04 12:21 UTC

Closes gitaly#329, gitaly#320, gitaly#323 et gitaly#328

See merge request gitlab-org/gitlab-ee!6381
parents 8078f779 c32af9da
...@@ -116,7 +116,7 @@ GEM ...@@ -116,7 +116,7 @@ GEM
capybara-screenshot (1.0.14) capybara-screenshot (1.0.14)
capybara (>= 1.0, < 3) capybara (>= 1.0, < 3)
launchy launchy
carrierwave (1.2.1) carrierwave (1.2.3)
activemodel (>= 4.0.0) activemodel (>= 4.0.0)
activesupport (>= 4.0.0) activesupport (>= 4.0.0)
mime-types (>= 1.16) mime-types (>= 1.16)
......
import $ from 'jquery'; import $ from 'jquery';
import { sprintf, __ } from '~/locale'; import { sprintf, __ } from '~/locale';
import flash from '~/flash'; import flash from '~/flash';
import { stripHtml } from '~/lib/utils/text_utility';
import * as rootTypes from '../../mutation_types'; import * as rootTypes from '../../mutation_types';
import { createCommitPayload, createNewMergeRequestUrl } from '../../utils'; import { createCommitPayload, createNewMergeRequestUrl } from '../../utils';
import router from '../../../ide_router'; import router from '../../../ide_router';
...@@ -198,11 +197,18 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState, roo ...@@ -198,11 +197,18 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState, roo
if (err.response.status === 400) { if (err.response.status === 400) {
$('#ide-create-branch-modal').modal('show'); $('#ide-create-branch-modal').modal('show');
} else { } else {
let errMsg = __('Error committing changes. Please try again.'); dispatch(
if (err.response.data && err.response.data.message) { 'setErrorMessage',
errMsg += ` (${stripHtml(err.response.data.message)})`; {
} text: __('An error accured whilst committing your changes.'),
flash(errMsg, 'alert', document, null, false, true); action: () =>
dispatch('commitChanges').then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
},
{ root: true },
);
window.dispatchEvent(new Event('resize')); window.dispatchEvent(new Event('resize'));
} }
......
import { __ } from '../../../../locale'; import { __ } from '../../../../locale';
import Api from '../../../../api'; import Api from '../../../../api';
import flash from '../../../../flash';
import router from '../../../ide_router'; import router from '../../../ide_router';
import { scopes } from './constants'; import { scopes } from './constants';
import * as types from './mutation_types'; import * as types from './mutation_types';
...@@ -8,8 +7,20 @@ import * as rootTypes from '../../mutation_types'; ...@@ -8,8 +7,20 @@ import * as rootTypes from '../../mutation_types';
export const requestMergeRequests = ({ commit }, type) => export const requestMergeRequests = ({ commit }, type) =>
commit(types.REQUEST_MERGE_REQUESTS, type); commit(types.REQUEST_MERGE_REQUESTS, type);
export const receiveMergeRequestsError = ({ commit }, type) => { export const receiveMergeRequestsError = ({ commit, dispatch }, { type, search }) => {
flash(__('Error loading merge requests.')); dispatch(
'setErrorMessage',
{
text: __('Error loading merge requests.'),
action: payload =>
dispatch('fetchMergeRequests', payload).then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
actionPayload: { type, search },
},
{ root: true },
);
commit(types.RECEIVE_MERGE_REQUESTS_ERROR, type); commit(types.RECEIVE_MERGE_REQUESTS_ERROR, type);
}; };
export const receiveMergeRequestsSuccess = ({ commit }, { type, data }) => export const receiveMergeRequestsSuccess = ({ commit }, { type, data }) =>
...@@ -22,7 +33,7 @@ export const fetchMergeRequests = ({ dispatch, state: { state } }, { type, searc ...@@ -22,7 +33,7 @@ export const fetchMergeRequests = ({ dispatch, state: { state } }, { type, searc
Api.mergeRequests({ scope, state, search }) Api.mergeRequests({ scope, state, search })
.then(({ data }) => dispatch('receiveMergeRequestsSuccess', { type, data })) .then(({ data }) => dispatch('receiveMergeRequestsSuccess', { type, data }))
.catch(() => dispatch('receiveMergeRequestsError', type)); .catch(() => dispatch('receiveMergeRequestsError', { type, search }));
}; };
export const resetMergeRequests = ({ commit }, type) => commit(types.RESET_MERGE_REQUESTS, type); export const resetMergeRequests = ({ commit }, type) => commit(types.RESET_MERGE_REQUESTS, type);
......
import Visibility from 'visibilityjs'; import Visibility from 'visibilityjs';
import axios from 'axios'; import axios from 'axios';
import httpStatus from '../../../../lib/utils/http_status';
import { __ } from '../../../../locale'; import { __ } from '../../../../locale';
import flash from '../../../../flash';
import Poll from '../../../../lib/utils/poll'; import Poll from '../../../../lib/utils/poll';
import service from '../../../services'; import service from '../../../services';
import { rightSidebarViews } from '../../../constants'; import { rightSidebarViews } from '../../../constants';
...@@ -18,10 +18,27 @@ export const stopPipelinePolling = () => { ...@@ -18,10 +18,27 @@ export const stopPipelinePolling = () => {
export const restartPipelinePolling = () => { export const restartPipelinePolling = () => {
if (eTagPoll) eTagPoll.restart(); if (eTagPoll) eTagPoll.restart();
}; };
export const forcePipelineRequest = () => {
if (eTagPoll) eTagPoll.makeRequest();
};
export const requestLatestPipeline = ({ commit }) => commit(types.REQUEST_LATEST_PIPELINE); export const requestLatestPipeline = ({ commit }) => commit(types.REQUEST_LATEST_PIPELINE);
export const receiveLatestPipelineError = ({ commit, dispatch }) => { export const receiveLatestPipelineError = ({ commit, dispatch }, err) => {
flash(__('There was an error loading latest pipeline')); if (err.status !== httpStatus.NOT_FOUND) {
dispatch(
'setErrorMessage',
{
text: __('An error occured whilst fetching the latest pipline.'),
action: () =>
dispatch('forcePipelineRequest').then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
actionPayload: null,
},
{ root: true },
);
}
commit(types.RECEIVE_LASTEST_PIPELINE_ERROR); commit(types.RECEIVE_LASTEST_PIPELINE_ERROR);
dispatch('stopPipelinePolling'); dispatch('stopPipelinePolling');
}; };
...@@ -46,7 +63,7 @@ export const fetchLatestPipeline = ({ dispatch, rootGetters }) => { ...@@ -46,7 +63,7 @@ export const fetchLatestPipeline = ({ dispatch, rootGetters }) => {
method: 'lastCommitPipelines', method: 'lastCommitPipelines',
data: { getters: rootGetters }, data: { getters: rootGetters },
successCallback: ({ data }) => dispatch('receiveLatestPipelineSuccess', data), successCallback: ({ data }) => dispatch('receiveLatestPipelineSuccess', data),
errorCallback: () => dispatch('receiveLatestPipelineError'), errorCallback: err => dispatch('receiveLatestPipelineError', err),
}); });
if (!Visibility.hidden()) { if (!Visibility.hidden()) {
...@@ -63,9 +80,21 @@ export const fetchLatestPipeline = ({ dispatch, rootGetters }) => { ...@@ -63,9 +80,21 @@ export const fetchLatestPipeline = ({ dispatch, rootGetters }) => {
}; };
export const requestJobs = ({ commit }, id) => commit(types.REQUEST_JOBS, id); export const requestJobs = ({ commit }, id) => commit(types.REQUEST_JOBS, id);
export const receiveJobsError = ({ commit }, id) => { export const receiveJobsError = ({ commit, dispatch }, stage) => {
flash(__('There was an error loading jobs')); dispatch(
commit(types.RECEIVE_JOBS_ERROR, id); 'setErrorMessage',
{
text: __('An error occured whilst loading the pipelines jobs.'),
action: payload =>
dispatch('fetchJobs', payload).then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
actionPayload: stage,
},
{ root: true },
);
commit(types.RECEIVE_JOBS_ERROR, stage.id);
}; };
export const receiveJobsSuccess = ({ commit }, { id, data }) => export const receiveJobsSuccess = ({ commit }, { id, data }) =>
commit(types.RECEIVE_JOBS_SUCCESS, { id, data }); commit(types.RECEIVE_JOBS_SUCCESS, { id, data });
...@@ -76,7 +105,7 @@ export const fetchJobs = ({ dispatch }, stage) => { ...@@ -76,7 +105,7 @@ export const fetchJobs = ({ dispatch }, stage) => {
axios axios
.get(stage.dropdownPath) .get(stage.dropdownPath)
.then(({ data }) => dispatch('receiveJobsSuccess', { id: stage.id, data })) .then(({ data }) => dispatch('receiveJobsSuccess', { id: stage.id, data }))
.catch(() => dispatch('receiveJobsError', stage.id)); .catch(() => dispatch('receiveJobsError', stage));
}; };
export const toggleStageCollapsed = ({ commit }, stageId) => export const toggleStageCollapsed = ({ commit }, stageId) =>
...@@ -90,8 +119,18 @@ export const setDetailJob = ({ commit, dispatch }, job) => { ...@@ -90,8 +119,18 @@ export const setDetailJob = ({ commit, dispatch }, job) => {
}; };
export const requestJobTrace = ({ commit }) => commit(types.REQUEST_JOB_TRACE); export const requestJobTrace = ({ commit }) => commit(types.REQUEST_JOB_TRACE);
export const receiveJobTraceError = ({ commit }) => { export const receiveJobTraceError = ({ commit, dispatch }) => {
flash(__('Error fetching job trace')); dispatch(
'setErrorMessage',
{
text: __('An error occured whilst fetching the job trace.'),
action: () =>
dispatch('fetchJobTrace').then(() => dispatch('setErrorMessage', null, { root: true })),
actionText: __('Please try again'),
actionPayload: null,
},
{ root: true },
);
commit(types.RECEIVE_JOB_TRACE_ERROR); commit(types.RECEIVE_JOB_TRACE_ERROR);
}; };
export const receiveJobTraceSuccess = ({ commit }, data) => export const receiveJobTraceSuccess = ({ commit }, data) =>
......
...@@ -15,7 +15,7 @@ class Projects::PipelinesController < Projects::ApplicationController ...@@ -15,7 +15,7 @@ class Projects::PipelinesController < Projects::ApplicationController
def index def index
@scope = params[:scope] @scope = params[:scope]
@pipelines = PipelinesFinder @pipelines = PipelinesFinder
.new(project, scope: @scope) .new(project, current_user, scope: @scope)
.execute .execute
.page(params[:page]) .page(params[:page])
.per(30) .per(30)
...@@ -180,7 +180,7 @@ class Projects::PipelinesController < Projects::ApplicationController ...@@ -180,7 +180,7 @@ class Projects::PipelinesController < Projects::ApplicationController
end end
def limited_pipelines_count(project, scope = nil) def limited_pipelines_count(project, scope = nil)
finder = PipelinesFinder.new(project, scope: scope) finder = PipelinesFinder.new(project, current_user, scope: scope)
view_context.limited_counter_with_delimiter(finder.execute) view_context.limited_counter_with_delimiter(finder.execute)
end end
......
...@@ -74,7 +74,7 @@ module Projects ...@@ -74,7 +74,7 @@ module Projects
.ordered .ordered
.page(params[:page]).per(20) .page(params[:page]).per(20)
@shared_runners = ::Ci::Runner.shared.active @shared_runners = ::Ci::Runner.instance_type.active
@shared_runners_count = @shared_runners.count(:all) @shared_runners_count = @shared_runners.count(:all)
......
class PipelinesFinder class PipelinesFinder
attr_reader :project, :pipelines, :params attr_reader :project, :pipelines, :params, :current_user
ALLOWED_INDEXED_COLUMNS = %w[id status ref user_id].freeze ALLOWED_INDEXED_COLUMNS = %w[id status ref user_id].freeze
def initialize(project, params = {}) def initialize(project, current_user, params = {})
@project = project @project = project
@current_user = current_user
@pipelines = project.pipelines @pipelines = project.pipelines
@params = params @params = params
end end
def execute def execute
unless Ability.allowed?(current_user, :read_pipeline, project)
return Ci::Pipeline.none
end
items = pipelines items = pipelines
items = by_scope(items) items = by_scope(items)
items = by_status(items) items = by_status(items)
......
...@@ -2,7 +2,10 @@ class GitlabSchema < GraphQL::Schema ...@@ -2,7 +2,10 @@ class GitlabSchema < GraphQL::Schema
use BatchLoader::GraphQL use BatchLoader::GraphQL
use Gitlab::Graphql::Authorize use Gitlab::Graphql::Authorize
use Gitlab::Graphql::Present use Gitlab::Graphql::Present
use Gitlab::Graphql::Connections
query(Types::QueryType) query(Types::QueryType)
default_max_page_size 100
# mutation(Types::MutationType) # mutation(Types::MutationType)
end end
module ResolvesPipelines
extend ActiveSupport::Concern
included do
type [Types::Ci::PipelineType], null: false
argument :status,
Types::Ci::PipelineStatusEnum,
required: false,
description: "Filter pipelines by their status"
argument :ref,
GraphQL::STRING_TYPE,
required: false,
description: "Filter pipelines by the ref they are run for"
argument :sha,
GraphQL::STRING_TYPE,
required: false,
description: "Filter pipelines by the sha of the commit they are run for"
end
def resolve_pipelines(project, params = {})
PipelinesFinder.new(project, context[:current_user], params).execute
end
end
module Resolvers
class MergeRequestPipelinesResolver < BaseResolver
include ::ResolvesPipelines
alias_method :merge_request, :object
def resolve(**args)
resolve_pipelines(project, args)
.merge(merge_request.all_pipelines)
end
def project
merge_request.source_project
end
end
end
module Resolvers
class ProjectPipelinesResolver < BaseResolver
include ResolvesPipelines
alias_method :project, :object
def resolve(**args)
resolve_pipelines(project, args)
end
end
end
module Types
module Ci
class PipelineStatusEnum < BaseEnum
::Ci::Pipeline.all_state_names.each do |state_symbol|
value state_symbol.to_s.upcase, value: state_symbol.to_s
end
end
end
end
module Types
module Ci
class PipelineType < BaseObject
expose_permissions Types::PermissionTypes::Ci::Pipeline
graphql_name 'Pipeline'
field :id, GraphQL::ID_TYPE, null: false
field :iid, GraphQL::ID_TYPE, null: false
field :sha, GraphQL::STRING_TYPE, null: false
field :before_sha, GraphQL::STRING_TYPE, null: true
field :status, PipelineStatusEnum, null: false
field :duration,
GraphQL::INT_TYPE,
null: true,
description: "Duration of the pipeline in seconds"
field :coverage,
GraphQL::FLOAT_TYPE,
null: true,
description: "Coverage percentage"
field :created_at, Types::TimeType, null: false
field :updated_at, Types::TimeType, null: false
field :started_at, Types::TimeType, null: true
field :finished_at, Types::TimeType, null: true
field :committed_at, Types::TimeType, null: true
# TODO: Add triggering user as a type
end
end
end
...@@ -45,5 +45,11 @@ module Types ...@@ -45,5 +45,11 @@ module Types
field :upvotes, GraphQL::INT_TYPE, null: false field :upvotes, GraphQL::INT_TYPE, null: false
field :downvotes, GraphQL::INT_TYPE, null: false field :downvotes, GraphQL::INT_TYPE, null: false
field :subscribed, GraphQL::BOOLEAN_TYPE, method: :subscribed?, null: false field :subscribed, GraphQL::BOOLEAN_TYPE, method: :subscribed?, null: false
field :head_pipeline, Types::Ci::PipelineType, null: true, method: :actual_head_pipeline do
authorize :read_pipeline
end
field :pipelines, Types::Ci::PipelineType.connection_type,
resolver: Resolvers::MergeRequestPipelinesResolver
end end
end end
module Types
module PermissionTypes
module Ci
class Pipeline < BasePermissionType
graphql_name 'PipelinePermissions'
abilities :update_pipeline, :admin_pipeline, :destroy_pipeline
end
end
end
end
...@@ -70,5 +70,10 @@ module Types ...@@ -70,5 +70,10 @@ module Types
resolver: Resolvers::MergeRequestResolver do resolver: Resolvers::MergeRequestResolver do
authorize :read_merge_request authorize :read_merge_request
end end
field :pipelines,
Types::Ci::PipelineType.connection_type,
null: false,
resolver: Resolvers::ProjectPipelinesResolver
end end
end end
...@@ -122,7 +122,7 @@ module CiStatusHelper ...@@ -122,7 +122,7 @@ module CiStatusHelper
def no_runners_for_project?(project) def no_runners_for_project?(project)
project.runners.blank? && project.runners.blank? &&
Ci::Runner.shared.blank? Ci::Runner.instance_type.blank?
end end
def render_status_with_link(type, status, path = nil, tooltip_placement: 'left', cssclass: '', container: 'body') def render_status_with_link(type, status, path = nil, tooltip_placement: 'left', cssclass: '', container: 'body')
......
...@@ -2,6 +2,7 @@ module Ci ...@@ -2,6 +2,7 @@ module Ci
class Runner < ActiveRecord::Base class Runner < ActiveRecord::Base
extend Gitlab::Ci::Model extend Gitlab::Ci::Model
include Gitlab::SQL::Pattern include Gitlab::SQL::Pattern
include IgnorableColumn
include RedisCacheable include RedisCacheable
include ChronicDurationAttribute include ChronicDurationAttribute
prepend EE::Ci::Runner prepend EE::Ci::Runner
...@@ -12,6 +13,8 @@ module Ci ...@@ -12,6 +13,8 @@ module Ci
AVAILABLE_SCOPES = %w[specific shared active paused online].freeze AVAILABLE_SCOPES = %w[specific shared active paused online].freeze
FORM_EDITABLE = %i[description tag_list active run_untagged locked access_level maximum_timeout_human_readable].freeze FORM_EDITABLE = %i[description tag_list active run_untagged locked access_level maximum_timeout_human_readable].freeze
ignore_column :is_shared
has_many :builds has_many :builds
has_many :runner_projects, inverse_of: :runner, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :runner_projects, inverse_of: :runner, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :runner_projects has_many :projects, through: :runner_projects
...@@ -22,13 +25,16 @@ module Ci ...@@ -22,13 +25,16 @@ module Ci
before_validation :set_default_values before_validation :set_default_values
scope :specific, -> { where(is_shared: false) }
scope :shared, -> { where(is_shared: true) }
scope :active, -> { where(active: true) } scope :active, -> { where(active: true) }
scope :paused, -> { where(active: false) } scope :paused, -> { where(active: false) }
scope :online, -> { where('contacted_at > ?', contact_time_deadline) } scope :online, -> { where('contacted_at > ?', contact_time_deadline) }
scope :ordered, -> { order(id: :desc) } scope :ordered, -> { order(id: :desc) }
# BACKWARD COMPATIBILITY: There are needed to maintain compatibility with `AVAILABLE_SCOPES` used by `lib/api/runners.rb`
scope :deprecated_shared, -> { instance_type }
# this should get replaced with `project_type.or(group_type)` once using Rails5
scope :deprecated_specific, -> { where(runner_type: [runner_types[:project_type], runner_types[:group_type]]) }
scope :belonging_to_project, -> (project_id) { scope :belonging_to_project, -> (project_id) {
joins(:runner_projects).where(ci_runner_projects: { project_id: project_id }) joins(:runner_projects).where(ci_runner_projects: { project_id: project_id })
} }
...@@ -40,9 +46,9 @@ module Ci ...@@ -40,9 +46,9 @@ module Ci
joins(:groups).where(namespaces: { id: hierarchy_groups }) joins(:groups).where(namespaces: { id: hierarchy_groups })
} }
scope :owned_or_shared, -> (project_id) do scope :owned_or_instance_wide, -> (project_id) do
union = Gitlab::SQL::Union.new( union = Gitlab::SQL::Union.new(
[belonging_to_project(project_id), belonging_to_parent_group_of_project(project_id), shared], [belonging_to_project(project_id), belonging_to_parent_group_of_project(project_id), instance_type],
remove_duplicates: false remove_duplicates: false
) )
from("(#{union.to_sql}) ci_runners") from("(#{union.to_sql}) ci_runners")
...@@ -64,7 +70,6 @@ module Ci ...@@ -64,7 +70,6 @@ module Ci
validate :no_groups, unless: :group_type? validate :no_groups, unless: :group_type?
validate :any_project, if: :project_type? validate :any_project, if: :project_type?
validate :exactly_one_group, if: :group_type? validate :exactly_one_group, if: :group_type?
validate :validate_is_shared
acts_as_taggable acts_as_taggable
...@@ -114,8 +119,7 @@ module Ci ...@@ -114,8 +119,7 @@ module Ci
end end
def assign_to(project, current_user = nil) def assign_to(project, current_user = nil)
if shared? if instance_type?
self.is_shared = false if shared?
self.runner_type = :project_type self.runner_type = :project_type
elsif group_type? elsif group_type?
raise ArgumentError, 'Transitioning a group runner to a project runner is not supported' raise ArgumentError, 'Transitioning a group runner to a project runner is not supported'
...@@ -138,10 +142,6 @@ module Ci ...@@ -138,10 +142,6 @@ module Ci
description description
end end
def shared?
is_shared
end
def online? def online?
contacted_at && contacted_at > self.class.contact_time_deadline contacted_at && contacted_at > self.class.contact_time_deadline
end end
...@@ -160,10 +160,6 @@ module Ci ...@@ -160,10 +160,6 @@ module Ci
runner_projects.count == 1 runner_projects.count == 1
end end
def specific?
!shared?
end
def assigned_to_group? def assigned_to_group?
runner_namespaces.any? runner_namespaces.any?
end end
...@@ -261,7 +257,7 @@ module Ci ...@@ -261,7 +257,7 @@ module Ci
end end
def assignable_for?(project_id) def assignable_for?(project_id)
self.class.owned_or_shared(project_id).where(id: self.id).any? self.class.owned_or_instance_wide(project_id).where(id: self.id).any?
end end
def no_projects def no_projects
...@@ -288,12 +284,6 @@ module Ci ...@@ -288,12 +284,6 @@ module Ci
end end
end end
def validate_is_shared
unless is_shared? == instance_type?
errors.add(:is_shared, 'is not equal to instance_type?')
end
end
def accepting_tags?(build) def accepting_tags?(build)
(run_untagged? || build.has_tags?) && (build.tag_list - tag_list).empty? (run_untagged? || build.has_tags?) && (build.tag_list - tag_list).empty?
end end
......
...@@ -1440,7 +1440,7 @@ class Project < ActiveRecord::Base ...@@ -1440,7 +1440,7 @@ class Project < ActiveRecord::Base
end end
def shared_runners def shared_runners
@shared_runners ||= shared_runners_available? ? Ci::Runner.shared : Ci::Runner.none @shared_runners ||= shared_runners_available? ? Ci::Runner.instance_type : Ci::Runner.none
end end
def group_runners def group_runners
......
...@@ -1054,7 +1054,7 @@ class User < ActiveRecord::Base ...@@ -1054,7 +1054,7 @@ class User < ActiveRecord::Base
union = Gitlab::SQL::Union.new([project_runner_ids, group_runner_ids]) union = Gitlab::SQL::Union.new([project_runner_ids, group_runner_ids])
Ci::Runner.specific.where("ci_runners.id IN (#{union.to_sql})") # rubocop:disable GitlabSecurity/SqlInjection Ci::Runner.where("ci_runners.id IN (#{union.to_sql})") # rubocop:disable GitlabSecurity/SqlInjection
end end
end end
......
...@@ -4,7 +4,7 @@ class RunnerEntity < Grape::Entity ...@@ -4,7 +4,7 @@ class RunnerEntity < Grape::Entity
expose :id, :description expose :id, :description
expose :edit_path, expose :edit_path,
if: -> (*) { can?(request.current_user, :admin_build, project) && runner.specific? } do |runner| if: -> (*) { can?(request.current_user, :admin_build, project) && runner.project_type? } do |runner|
edit_project_runner_path(project, runner) edit_project_runner_path(project, runner)
end end
......
...@@ -17,7 +17,7 @@ module Ci ...@@ -17,7 +17,7 @@ module Ci
def execute def execute
builds = builds =
if runner.shared? if runner.instance_type?
builds_for_shared_runner builds_for_shared_runner
elsif runner.group_type? elsif runner.group_type?
builds_for_group_runner builds_for_group_runner
...@@ -101,7 +101,7 @@ module Ci ...@@ -101,7 +101,7 @@ module Ci
end end
def running_builds_for_shared_runners def running_builds_for_shared_runners
Ci::Build.running.where(runner: Ci::Runner.shared) Ci::Build.running.where(runner: Ci::Runner.instance_type)
.group(:project_id).select(:project_id, 'count(*) AS running_builds') .group(:project_id).select(:project_id, 'count(*) AS running_builds')
end end
...@@ -117,7 +117,7 @@ module Ci ...@@ -117,7 +117,7 @@ module Ci
end end
def register_success(job) def register_success(job)
labels = { shared_runner: runner.shared?, labels = { shared_runner: runner.instance_type?,
jobs_running_for_project: jobs_running_for_project(job) } jobs_running_for_project: jobs_running_for_project(job) }
job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil? job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil?
...@@ -125,10 +125,10 @@ module Ci ...@@ -125,10 +125,10 @@ module Ci
end end
def jobs_running_for_project(job) def jobs_running_for_project(job)
return '+Inf' unless runner.shared? return '+Inf' unless runner.instance_type?
# excluding currently started job # excluding currently started job
running_jobs_count = job.project.builds.running.where(runner: Ci::Runner.shared) running_jobs_count = job.project.builds.running.where(runner: Ci::Runner.instance_type)
.limit(JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET + 1).count - 1 .limit(JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET + 1).count - 1
running_jobs_count < JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET ? running_jobs_count : "#{JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET}+" running_jobs_count < JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET ? running_jobs_count : "#{JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET}+"
end end
......
%tr{ id: dom_id(runner) } %tr{ id: dom_id(runner) }
%td %td
- if runner.shared? - if runner.instance_type?
%span.badge.badge-success shared %span.badge.badge-success shared
- elsif runner.group_type? - elsif runner.group_type?
%span.badge.badge-success group %span.badge.badge-success group
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
%td %td
= runner.ip_address = runner.ip_address
%td %td
- if runner.shared? || runner.group_type? - if runner.instance_type? || runner.group_type?
n/a n/a
- else - else
= runner.projects.count(:all) = runner.projects.count(:all)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
%h3.project-title %h3.project-title
Runner ##{@runner.id} Runner ##{@runner.id}
.float-right .float-right
- if @runner.shared? - if @runner.instance_type?
%span.runner-state.runner-state-shared %span.runner-state.runner-state-shared
Shared Shared
- else - else
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
- breadcrumb_title "##{@runner.id}" - breadcrumb_title "##{@runner.id}"
- @no_container = true - @no_container = true
- if @runner.shared? - if @runner.instance_type?
.bs-callout.bs-callout-success .bs-callout.bs-callout-success
%h4 This Runner will process jobs from ALL UNASSIGNED projects %h4 This Runner will process jobs from ALL UNASSIGNED projects
%p %p
......
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
- else - else
- runner_project = @project.runner_projects.find_by(runner_id: runner) - runner_project = @project.runner_projects.find_by(runner_id: runner)
= link_to _('Disable for this project'), project_runner_project_path(@project, runner_project), data: { confirm: _("Are you sure?") }, method: :delete, class: 'btn btn-danger btn-sm' = link_to _('Disable for this project'), project_runner_project_path(@project, runner_project), data: { confirm: _("Are you sure?") }, method: :delete, class: 'btn btn-danger btn-sm'
- elsif !(runner.is_shared? || runner.group_type?) # We can simplify this to `runner.project_type?` when migrating #runner_type is complete - elsif runner.project_type?
= form_for [@project.namespace.becomes(Namespace), @project, @project.runner_projects.new] do |f| = form_for [@project.namespace.becomes(Namespace), @project, @project.runner_projects.new] do |f|
= f.hidden_field :runner_id, value: runner.id = f.hidden_field :runner_id, value: runner.id
= f.submit _('Enable for this project'), class: 'btn btn-sm' = f.submit _('Enable for this project'), class: 'btn btn-sm'
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
%h3.page-title %h3.page-title
Runner ##{@runner.id} Runner ##{@runner.id}
.float-right .float-right
- if @runner.shared? - if @runner.instance_type?
%span.runner-state.runner-state-shared %span.runner-state.runner-state-shared
Shared Shared
- elsif @runner.group_type? - elsif @runner.group_type?
......
---
title: Bump carrierwave gem verion to 1.2.3
merge_request: 20287
author:
type: performance
---
title: Add pipeline lists to GraphQL
merge_request: 20249
author:
type: added
---
title: Invalidate merge request diffs cache if diff data change.
merge_request:
author:
type: fixed
---
title: Remove the use of `is_shared` of `Ci::Runner`
merge_request:
author:
type: other
...@@ -47,7 +47,8 @@ module Gitlab ...@@ -47,7 +47,8 @@ module Gitlab
#{config.root}/app/workers/concerns #{config.root}/app/workers/concerns
#{config.root}/app/services/concerns #{config.root}/app/services/concerns
#{config.root}/app/serializers/concerns #{config.root}/app/serializers/concerns
#{config.root}/app/finders/concerns]) #{config.root}/app/finders/concerns
#{config.root}/app/graphql/resolvers/concerns])
config.generators.templates.push("#{config.root}/generator_templates") config.generators.templates.push("#{config.root}/generator_templates")
......
# This fixes the problem https://gitlab.com/gitlab-org/gitlab-ce/issues/46182 that carrierwave eagerly loads upoloading files into memory
# There is an PR https://github.com/carrierwaveuploader/carrierwave/pull/2314 which has the identical change.
module CarrierWave
module Storage
class Fog < Abstract
class File
module MonkeyPatch
##
# Read content of file from service
#
# === Returns
#
# [String] contents of file
def read
file_body = file.body
return if file_body.nil?
return file_body unless file_body.is_a?(::File)
# Fog::Storage::XXX::File#body could return the source file which was upoloaded to the remote server.
read_source_file(file_body) if ::File.exist?(file_body.path)
# If the source file doesn't exist, the remote content is read
@file = nil # rubocop:disable Gitlab/ModuleWithInstanceVariables
file.body
end
##
# Write file to service
#
# === Returns
#
# [Boolean] true on success or raises error
def store(new_file)
if new_file.is_a?(self.class) # rubocop:disable Cop/LineBreakAroundConditionalBlock
new_file.copy_to(path)
else
fog_file = new_file.to_file
@content_type ||= new_file.content_type # rubocop:disable Gitlab/ModuleWithInstanceVariables
@file = directory.files.create({ # rubocop:disable Gitlab/ModuleWithInstanceVariables
:body => fog_file ? fog_file : new_file.read, # rubocop:disable Style/HashSyntax
:content_type => @content_type, # rubocop:disable Style/HashSyntax,Gitlab/ModuleWithInstanceVariables
:key => path, # rubocop:disable Style/HashSyntax
:public => @uploader.fog_public # rubocop:disable Style/HashSyntax,Gitlab/ModuleWithInstanceVariables
}.merge(@uploader.fog_attributes)) # rubocop:disable Gitlab/ModuleWithInstanceVariables
fog_file.close if fog_file && !fog_file.closed?
end
true
end
private
def read_source_file(file_body)
return unless ::File.exist?(file_body.path)
begin
file_body = ::File.open(file_body.path) if file_body.closed? # Reopen if it's already closed
file_body.read
ensure
file_body.close
end
end
end
prepend MonkeyPatch
end
end
end
end
...@@ -51,7 +51,7 @@ _The uploads are stored by default in ...@@ -51,7 +51,7 @@ _The uploads are stored by default in
### Using object storage **[PREMIUM]** ### Using object storage **[PREMIUM]**
>**Notes:** >**Notes:**
- [Introduced][ee-3867] in [GitLab Premium][eep] 10.5. - [Introduced][ee-3867] in [GitLab Enterprise Edition Premium][eep] 10.5.
- Since version 11.1, we support direct_upload to S3. - Since version 11.1, we support direct_upload to S3.
If you don't want to use the local disk where GitLab is installed to store the If you don't want to use the local disk where GitLab is installed to store the
...@@ -66,7 +66,7 @@ For source installations the following settings are nested under `uploads:` and ...@@ -66,7 +66,7 @@ For source installations the following settings are nested under `uploads:` and
|---------|-------------|---------| |---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` | | `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| | | `remote_directory` | The bucket name where Uploads will be stored| |
| `direct_upload` | Set to true to enable direct upload of Uploads without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. If enabled Workhorse uploads files directly to the object storage | `false` | | `direct_upload` | Set to true to enable direct upload of Uploads without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` | | `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` | | `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | | | `connection` | Various connection options described below | |
......
...@@ -54,6 +54,94 @@ a new presenter specifically for GraphQL. ...@@ -54,6 +54,94 @@ a new presenter specifically for GraphQL.
The presenter is initialized using the object resolved by a field, and The presenter is initialized using the object resolved by a field, and
the context. the context.
### Connection Types
GraphQL uses [cursor based
pagination](https://graphql.org/learn/pagination/#pagination-and-edges)
to expose collections of items. This provides the clients with a lot
of flexibility while also allowing the backend to use different
pagination models.
To expose a collection of resources we can use a connection type. This wraps the array with default pagination fields. For example a query for project-pipelines could look like this:
```
query($project_path: ID!) {
project(fullPath: $project_path) {
pipelines(first: 2) {
pageInfo {
hasNextPage
hasPreviousPage
}
edges {
cursor
node {
id
status
}
}
}
}
}
```
This would return the first 2 pipelines of a project and related
pagination info., ordered by descending ID. The returned data would
look like this:
```json
{
"data": {
"project": {
"pipelines": {
"pageInfo": {
"hasNextPage": true,
"hasPreviousPage": false
},
"edges": [
{
"cursor": "Nzc=",
"node": {
"id": "77",
"status": "FAILED"
}
},
{
"cursor": "Njc=",
"node": {
"id": "67",
"status": "FAILED"
}
}
]
}
}
}
}
```
To get the next page, the cursor of the last known element could be
passed:
```
query($project_path: ID!) {
project(fullPath: $project_path) {
pipelines(first: 2, after: "Njc=") {
pageInfo {
hasNextPage
hasPreviousPage
}
edges {
cursor
node {
id
status
}
}
}
}
}
```
### Exposing permissions for a type ### Exposing permissions for a type
To expose permissions the current user has on a resource, you can call To expose permissions the current user has on a resource, you can call
......
...@@ -38,7 +38,7 @@ module EE ...@@ -38,7 +38,7 @@ module EE
end end
def shared_runners_minutes_limit_enabled? def shared_runners_minutes_limit_enabled?
runner && runner.shared? && project.shared_runners_minutes_limit_enabled? runner && runner.instance_type? && project.shared_runners_minutes_limit_enabled?
end end
def stick_build_if_status_changed def stick_build_if_status_changed
......
...@@ -1043,7 +1043,7 @@ module API ...@@ -1043,7 +1043,7 @@ module API
expose :description expose :description
expose :ip_address expose :ip_address
expose :active expose :active
expose :is_shared expose :instance_type?, as: :is_shared
expose :name expose :name
expose :online?, as: :online expose :online?, as: :online
expose :status expose :status
...@@ -1057,7 +1057,7 @@ module API ...@@ -1057,7 +1057,7 @@ module API
expose :access_level expose :access_level
expose :version, :revision, :platform, :architecture expose :version, :revision, :platform, :architecture
expose :contacted_at expose :contacted_at
expose :token, if: lambda { |runner, options| options[:current_user].admin? || !runner.is_shared? } expose :token, if: lambda { |runner, options| options[:current_user].admin? || !runner.instance_type? }
expose :projects, with: Entities::BasicProjectDetails do |runner, options| expose :projects, with: Entities::BasicProjectDetails do |runner, options|
if options[:current_user].admin? if options[:current_user].admin?
runner.projects runner.projects
......
...@@ -31,7 +31,7 @@ module API ...@@ -31,7 +31,7 @@ module API
get ':id/pipelines' do get ':id/pipelines' do
authorize! :read_pipeline, user_project authorize! :read_pipeline, user_project
pipelines = PipelinesFinder.new(user_project, params).execute pipelines = PipelinesFinder.new(user_project, current_user, params).execute
present paginate(pipelines), with: Entities::PipelineBasic present paginate(pipelines), with: Entities::PipelineBasic
end end
......
...@@ -24,13 +24,13 @@ module API ...@@ -24,13 +24,13 @@ module API
attributes = attributes =
if runner_registration_token_valid? if runner_registration_token_valid?
# Create shared runner. Requires admin access # Create shared runner. Requires admin access
attributes.merge(is_shared: true, runner_type: :instance_type) attributes.merge(runner_type: :instance_type)
elsif project = Project.find_by(runners_token: params[:token]) elsif project = Project.find_by(runners_token: params[:token])
# Create a specific runner for the project # Create a specific runner for the project
attributes.merge(is_shared: false, runner_type: :project_type, projects: [project]) attributes.merge(runner_type: :project_type, projects: [project])
elsif group = Group.find_by(runners_token: params[:token]) elsif group = Group.find_by(runners_token: params[:token])
# Create a specific runner for the group # Create a specific runner for the group
attributes.merge(is_shared: false, runner_type: :group_type, groups: [group]) attributes.merge(runner_type: :group_type, groups: [group])
else else
forbidden! forbidden!
end end
......
...@@ -119,7 +119,7 @@ module API ...@@ -119,7 +119,7 @@ module API
use :pagination use :pagination
end end
get ':id/runners' do get ':id/runners' do
runners = filter_runners(Ci::Runner.owned_or_shared(user_project.id), params[:scope]) runners = filter_runners(Ci::Runner.owned_or_instance_wide(user_project.id), params[:scope])
present paginate(runners), with: Entities::Runner present paginate(runners), with: Entities::Runner
end end
...@@ -170,6 +170,11 @@ module API ...@@ -170,6 +170,11 @@ module API
render_api_error!('Scope contains invalid value', 400) render_api_error!('Scope contains invalid value', 400)
end end
# Support deprecated scopes
if runners.respond_to?("deprecated_#{scope}")
scope = "deprecated_#{scope}"
end
runners.public_send(scope) # rubocop:disable GitlabSecurity/PublicSend runners.public_send(scope) # rubocop:disable GitlabSecurity/PublicSend
end end
...@@ -180,7 +185,7 @@ module API ...@@ -180,7 +185,7 @@ module API
end end
def authenticate_show_runner!(runner) def authenticate_show_runner!(runner)
return if runner.is_shared || current_user.admin? return if runner.instance_type? || current_user.admin?
forbidden!("No access granted") unless can?(current_user, :read_runner, runner) forbidden!("No access granted") unless can?(current_user, :read_runner, runner)
end end
......
...@@ -55,7 +55,7 @@ module Gitlab ...@@ -55,7 +55,7 @@ module Gitlab
id: runner.id, id: runner.id,
description: runner.description, description: runner.description,
active: runner.active?, active: runner.active?,
is_shared: runner.is_shared? is_shared: runner.instance_type?
} }
end end
end end
......
...@@ -34,7 +34,7 @@ module Gitlab ...@@ -34,7 +34,7 @@ module Gitlab
end end
def cache_key def cache_key
[@merge_request_diff, 'highlighted-diff-files', diff_options] [@merge_request_diff, 'highlighted-diff-files', Gitlab::Diff::Line::SERIALIZE_KEYS, diff_options]
end end
private private
......
module Gitlab module Gitlab
module Diff module Diff
class Line class Line
SERIALIZE_KEYS = %i(line_code text type index old_pos new_pos).freeze
attr_reader :line_code, :type, :index, :old_pos, :new_pos attr_reader :line_code, :type, :index, :old_pos, :new_pos
attr_writer :rich_text attr_writer :rich_text
attr_accessor :text attr_accessor :text
...@@ -19,13 +21,9 @@ module Gitlab ...@@ -19,13 +21,9 @@ module Gitlab
new(hash[:text], hash[:type], hash[:index], hash[:old_pos], hash[:new_pos], line_code: hash[:line_code]) new(hash[:text], hash[:type], hash[:index], hash[:old_pos], hash[:new_pos], line_code: hash[:line_code])
end end
def serialize_keys
@serialize_keys ||= %i(line_code text type index old_pos new_pos)
end
def to_hash def to_hash
hash = {} hash = {}
serialize_keys.each { |key| hash[key] = send(key) } # rubocop:disable GitlabSecurity/PublicSend SERIALIZE_KEYS.each { |key| hash[key] = send(key) } # rubocop:disable GitlabSecurity/PublicSend
hash hash
end end
......
# Gitaly note: JV: 1 RPC, migration in progress.
# Gitlab::Git::CommitStats counts the additions, deletions, and total changes # Gitlab::Git::CommitStats counts the additions, deletions, and total changes
# in a commit. # in a commit.
module Gitlab module Gitlab
...@@ -16,12 +14,8 @@ module Gitlab ...@@ -16,12 +14,8 @@ module Gitlab
@deletions = 0 @deletions = 0
@total = 0 @total = 0
repo.gitaly_migrate(:commit_stats) do |is_enabled| repo.wrapped_gitaly_errors do
if is_enabled gitaly_stats(repo, commit)
gitaly_stats(repo, commit)
else
rugged_stats(commit)
end
end end
end end
...@@ -31,12 +25,6 @@ module Gitlab ...@@ -31,12 +25,6 @@ module Gitlab
@deletions = stats.deletions @deletions = stats.deletions
@total = @additions + @deletions @total = @additions + @deletions
end end
def rugged_stats(commit)
diff = commit.rugged_diff_from_parent
_files_changed, @additions, @deletions = diff.stat
@total = @additions + @deletions
end
end end
end end
end end
...@@ -251,7 +251,6 @@ module Gitlab ...@@ -251,7 +251,6 @@ module Gitlab
# Returns an Array of Tags # Returns an Array of Tags
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/390
def tags def tags
wrapped_gitaly_errors do wrapped_gitaly_errors do
gitaly_ref_client.tags gitaly_ref_client.tags
...@@ -598,17 +597,9 @@ module Gitlab ...@@ -598,17 +597,9 @@ module Gitlab
# @repository.submodule_url_for('master', 'rack') # @repository.submodule_url_for('master', 'rack')
# # => git@localhost:rack.git # # => git@localhost:rack.git
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/329
def submodule_url_for(ref, path) def submodule_url_for(ref, path)
Gitlab::GitalyClient.migrate(:submodule_url_for) do |is_enabled| wrapped_gitaly_errors do
if is_enabled gitaly_submodule_url_for(ref, path)
gitaly_submodule_url_for(ref, path)
else
if submodules(ref).any?
submodule = submodules(ref)[path]
submodule['url'] if submodule
end
end
end end
end end
...@@ -833,22 +824,14 @@ module Gitlab ...@@ -833,22 +824,14 @@ module Gitlab
# Ex. # Ex.
# repo.ls_files('master') # repo.ls_files('master')
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/327
def ls_files(ref) def ls_files(ref)
gitaly_commit_client.ls_files(ref) gitaly_commit_client.ls_files(ref)
end end
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/328
def copy_gitattributes(ref) def copy_gitattributes(ref)
Gitlab::GitalyClient.migrate(:apply_gitattributes) do |is_enabled| wrapped_gitaly_errors do
if is_enabled gitaly_repository_client.apply_gitattributes(ref)
gitaly_copy_gitattributes(ref)
else
rugged_copy_gitattributes(ref)
end
end end
rescue GRPC::InvalidArgument
raise InvalidRef
end end
def info_attributes def info_attributes
......
# Gitaly note: JV: needs 1 RPC, migration is in progress.
module Gitlab module Gitlab
module Git module Git
class Tree class Tree
...@@ -17,12 +15,8 @@ module Gitlab ...@@ -17,12 +15,8 @@ module Gitlab
def where(repository, sha, path = nil, recursive = false) def where(repository, sha, path = nil, recursive = false)
path = nil if path == '' || path == '/' path = nil if path == '' || path == '/'
Gitlab::GitalyClient.migrate(:tree_entries) do |is_enabled| repository.wrapped_gitaly_errors do
if is_enabled repository.gitaly_commit_client.tree_entries(repository, sha, path, recursive)
repository.gitaly_commit_client.tree_entries(repository, sha, path, recursive)
else
tree_entries_from_rugged(repository, sha, path, recursive)
end
end end
end end
......
...@@ -48,6 +48,8 @@ module Gitlab ...@@ -48,6 +48,8 @@ module Gitlab
def apply_gitattributes(revision) def apply_gitattributes(revision)
request = Gitaly::ApplyGitattributesRequest.new(repository: @gitaly_repo, revision: encode_binary(revision)) request = Gitaly::ApplyGitattributesRequest.new(repository: @gitaly_repo, revision: encode_binary(revision))
GitalyClient.call(@storage, :repository_service, :apply_gitattributes, request) GitalyClient.call(@storage, :repository_service, :apply_gitattributes, request)
rescue GRPC::InvalidArgument => ex
raise Gitlab::Git::Repository::InvalidRef, ex
end end
def info_attributes def info_attributes
......
module Gitlab
module Graphql
module Connections
def self.use(_schema)
GraphQL::Relay::BaseConnection.register_connection_implementation(
ActiveRecord::Relation,
Gitlab::Graphql::Connections::KeysetConnection
)
end
end
end
end
module Gitlab
module Graphql
module Connections
class KeysetConnection < GraphQL::Relay::BaseConnection
def cursor_from_node(node)
encode(node[order_field].to_s)
end
def sliced_nodes
@sliced_nodes ||=
begin
sliced = nodes
sliced = sliced.where(before_slice) if before.present?
sliced = sliced.where(after_slice) if after.present?
sliced
end
end
def paged_nodes
if first && last
raise Gitlab::Graphql::Errors::ArgumentError.new("Can only provide either `first` or `last`, not both")
end
if last
sliced_nodes.last(limit_value)
else
sliced_nodes.limit(limit_value)
end
end
private
def before_slice
if sort_direction == :asc
table[order_field].lt(decode(before))
else
table[order_field].gt(decode(before))
end
end
def after_slice
if sort_direction == :asc
table[order_field].gt(decode(after))
else
table[order_field].lt(decode(after))
end
end
def limit_value
@limit_value ||= [first, last, max_page_size].compact.min
end
def table
nodes.arel_table
end
def order_info
@order_info ||= nodes.order_values.first
end
def order_field
@order_field ||= order_info&.expr&.name || nodes.primary_key
end
def sort_direction
@order_direction ||= order_info&.direction || :desc
end
end
end
end
end
module Gitlab
module Graphql
module Errors
BaseError = Class.new(GraphQL::ExecutionError)
ArgumentError = Class.new(BaseError)
end
end
end
...@@ -3,6 +3,8 @@ module Gitlab ...@@ -3,6 +3,8 @@ module Gitlab
module Present module Present
class Instrumentation class Instrumentation
def instrument(type, field) def instrument(type, field)
return field unless field.metadata[:type_class]
presented_in = field.metadata[:type_class].owner presented_in = field.metadata[:type_class].owner
return field unless presented_in.respond_to?(:presenter_class) return field unless presented_in.respond_to?(:presenter_class)
return field unless presented_in.presenter_class return field unless presented_in.presenter_class
......
...@@ -42,10 +42,10 @@ module Gitlab ...@@ -42,10 +42,10 @@ module Gitlab
key, value = parsed_field.first key, value = parsed_field.first
if value.nil? if value.nil?
value = open_file(tmp_path, @request.params["#{key}.name"]) value = open_file(@request.params, key)
@open_files << value @open_files << value
else else
value = decorate_params_value(value, @request.params[key], tmp_path) value = decorate_params_value(value, @request.params[key])
end end
@request.update_param(key, value) @request.update_param(key, value)
...@@ -57,7 +57,7 @@ module Gitlab ...@@ -57,7 +57,7 @@ module Gitlab
end end
# This function calls itself recursively # This function calls itself recursively
def decorate_params_value(path_hash, value_hash, tmp_path) def decorate_params_value(path_hash, value_hash)
unless path_hash.is_a?(Hash) && path_hash.count == 1 unless path_hash.is_a?(Hash) && path_hash.count == 1
raise "invalid path: #{path_hash.inspect}" raise "invalid path: #{path_hash.inspect}"
end end
...@@ -70,19 +70,21 @@ module Gitlab ...@@ -70,19 +70,21 @@ module Gitlab
case path_value case path_value
when nil when nil
value_hash[path_key] = open_file(tmp_path, value_hash.dig(path_key, '.name')) value_hash[path_key] = open_file(value_hash.dig(path_key), '')
@open_files << value_hash[path_key] @open_files << value_hash[path_key]
value_hash value_hash
when Hash when Hash
decorate_params_value(path_value, value_hash[path_key], tmp_path) decorate_params_value(path_value, value_hash[path_key])
value_hash value_hash
else else
raise "unexpected path value: #{path_value.inspect}" raise "unexpected path value: #{path_value.inspect}"
end end
end end
def open_file(path, name) def open_file(params, key)
::UploadedFile.new(path, filename: name || File.basename(path), content_type: 'application/octet-stream') ::UploadedFile.from_params(
params, key,
Gitlab.config.uploads.storage_path)
end end
end end
......
...@@ -64,6 +64,10 @@ module QA ...@@ -64,6 +64,10 @@ module QA
def gcloud_zone def gcloud_zone
ENV.fetch('GCLOUD_ZONE') ENV.fetch('GCLOUD_ZONE')
end end
def has_gcloud_credentials?
%w[GCLOUD_ACCOUNT_KEY GCLOUD_ACCOUNT_EMAIL].none? { |var| ENV[var].to_s.empty? }
end
end end
end end
end end
...@@ -50,11 +50,15 @@ module QA ...@@ -50,11 +50,15 @@ module QA
end end
def login_if_not_already_logged_in def login_if_not_already_logged_in
account = `gcloud auth list --filter=status:ACTIVE --format="value(account)"` if Runtime::Env.has_gcloud_credentials?
if account.empty?
attempt_login_with_env_vars attempt_login_with_env_vars
else else
puts "gcloud account found. Using: #{account} for creating K8s cluster." account = `gcloud auth list --filter=status:ACTIVE --format="value(account)"`
if account.empty?
raise "Failed to login to gcloud. No credentials provided in environment and no credentials found locally."
else
puts "gcloud account found. Using: #{account} for creating K8s cluster."
end
end end
end end
......
...@@ -4,7 +4,7 @@ describe Projects::PipelinesController do ...@@ -4,7 +4,7 @@ describe Projects::PipelinesController do
include ApiHelpers include ApiHelpers
set(:user) { create(:user) } set(:user) { create(:user) }
set(:project) { create(:project, :public, :repository) } let(:project) { create(:project, :public, :repository) }
let(:feature) { ProjectFeature::DISABLED } let(:feature) { ProjectFeature::DISABLED }
before do before do
...@@ -74,7 +74,7 @@ describe Projects::PipelinesController do ...@@ -74,7 +74,7 @@ describe Projects::PipelinesController do
expect(stages.count).to eq 3 expect(stages.count).to eq 3
end end
expect(queries.count).to be_within(3).of(30) expect(queries.count).to be_within(5).of(30)
end end
end end
...@@ -91,6 +91,24 @@ describe Projects::PipelinesController do ...@@ -91,6 +91,24 @@ describe Projects::PipelinesController do
end end
end end
context 'when the project is private' do
let(:project) { create(:project, :private, :repository) }
it 'returns `not_found` when the user does not have access' do
sign_in(create(:user))
get_pipelines_index_json
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns the pipelines when the user has access' do
get_pipelines_index_json
expect(json_response['pipelines'].size).to eq(5)
end
end
def get_pipelines_index_json def get_pipelines_index_json
get :index, namespace_id: project.namespace, get :index, namespace_id: project.namespace,
project_id: project, project_id: project,
......
...@@ -6,7 +6,6 @@ FactoryBot.define do ...@@ -6,7 +6,6 @@ FactoryBot.define do
active true active true
access_level :not_protected access_level :not_protected
is_shared true
runner_type :instance_type runner_type :instance_type
trait :online do trait :online do
...@@ -14,12 +13,10 @@ FactoryBot.define do ...@@ -14,12 +13,10 @@ FactoryBot.define do
end end
trait :instance do trait :instance do
is_shared true
runner_type :instance_type runner_type :instance_type
end end
trait :group do trait :group do
is_shared false
runner_type :group_type runner_type :group_type
after(:build) do |runner, evaluator| after(:build) do |runner, evaluator|
...@@ -28,7 +25,6 @@ FactoryBot.define do ...@@ -28,7 +25,6 @@ FactoryBot.define do
end end
trait :project do trait :project do
is_shared false
runner_type :project_type runner_type :project_type
after(:build) do |runner, evaluator| after(:build) do |runner, evaluator|
......
require 'spec_helper' require 'spec_helper'
describe PipelinesFinder do describe PipelinesFinder do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :public, :repository) }
let(:current_user) { nil }
subject { described_class.new(project, params).execute } let(:params) { {} }
subject { described_class.new(project, current_user, params).execute }
describe "#execute" do describe "#execute" do
context 'when params is empty' do context 'when params is empty' do
...@@ -223,5 +224,27 @@ describe PipelinesFinder do ...@@ -223,5 +224,27 @@ describe PipelinesFinder do
end end
end end
end end
context 'when the project has limited access to piplines' do
let(:project) { create(:project, :private, :repository) }
let(:current_user) { create(:user) }
let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) }
context 'when the user has access' do
before do
project.add_developer(current_user)
end
it 'is expected to return pipelines' do
is_expected.to contain_exactly(*pipelines)
end
end
context 'the user is not allowed to read pipelines' do
it 'returns empty' do
is_expected.to be_empty
end
end
end
end end
end end
...@@ -27,6 +27,12 @@ describe GitlabSchema do ...@@ -27,6 +27,12 @@ describe GitlabSchema do
expect(described_class.query).to eq(::Types::QueryType.to_graphql) expect(described_class.query).to eq(::Types::QueryType.to_graphql)
end end
it 'paginates active record relations using `Gitlab::Graphql::Connections::KeysetConnection`' do
connection = GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS[ActiveRecord::Relation.name]
expect(connection).to eq(Gitlab::Graphql::Connections::KeysetConnection)
end
def field_instrumenters def field_instrumenters
described_class.instrumenters[:field] described_class.instrumenters[:field]
end end
......
require 'spec_helper'
describe ResolvesPipelines do
include GraphqlHelpers
subject(:resolver) do
Class.new(Resolvers::BaseResolver) do
include ResolvesPipelines
def resolve(**args)
resolve_pipelines(object, args)
end
end
end
let(:current_user) { create(:user) }
set(:project) { create(:project, :private) }
set(:pipeline) { create(:ci_pipeline, project: project) }
set(:failed_pipeline) { create(:ci_pipeline, :failed, project: project) }
set(:ref_pipeline) { create(:ci_pipeline, project: project, ref: 'awesome-feature') }
set(:sha_pipeline) { create(:ci_pipeline, project: project, sha: 'deadbeef') }
before do
project.add_developer(current_user)
end
it { is_expected.to have_graphql_arguments(:status, :ref, :sha) }
it 'finds all pipelines' do
expect(resolve_pipelines).to contain_exactly(pipeline, failed_pipeline, ref_pipeline, sha_pipeline)
end
it 'allows filtering by status' do
expect(resolve_pipelines(status: 'failed')).to contain_exactly(failed_pipeline)
end
it 'allows filtering by ref' do
expect(resolve_pipelines(ref: 'awesome-feature')).to contain_exactly(ref_pipeline)
end
it 'allows filtering by sha' do
expect(resolve_pipelines(sha: 'deadbeef')).to contain_exactly(sha_pipeline)
end
it 'does not return any pipelines if the user does not have access' do
expect(resolve_pipelines({}, {})).to be_empty
end
def resolve_pipelines(args = {}, context = { current_user: current_user })
resolve(resolver, obj: project, args: args, ctx: context)
end
end
require 'spec_helper'
describe Resolvers::MergeRequestPipelinesResolver do
include GraphqlHelpers
set(:merge_request) { create(:merge_request) }
set(:pipeline) do
create(
:ci_pipeline,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha
)
end
set(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project) }
set(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
before do
merge_request.project.add_developer(current_user)
end
def resolve_pipelines
resolve(described_class, obj: merge_request, ctx: { current_user: current_user })
end
it 'resolves only MRs for the passed merge request' do
expect(resolve_pipelines).to contain_exactly(pipeline)
end
end
require 'spec_helper'
describe Resolvers::ProjectPipelinesResolver do
include GraphqlHelpers
set(:project) { create(:project) }
set(:pipeline) { create(:ci_pipeline, project: project) }
set(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
before do
project.add_developer(current_user)
end
def resolve_pipelines
resolve(described_class, obj: project, ctx: { current_user: current_user })
end
it 'resolves only MRs for the passed merge request' do
expect(resolve_pipelines).to contain_exactly(pipeline)
end
end
require 'spec_helper'
describe Types::Ci::PipelineType do
it { expect(described_class.graphql_name).to eq('Pipeline') }
it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Ci::Pipeline) }
end
require 'spec_helper' require 'spec_helper'
describe Types::MergeRequestType do describe GitlabSchema.types['MergeRequest'] do
it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::MergeRequest) } it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::MergeRequest) }
describe 'head pipeline' do
it 'has a head pipeline field' do
expect(described_class).to have_graphql_field(:head_pipeline)
end
it 'authorizes the field' do
expect(described_class.fields['headPipeline'])
.to require_graphql_authorizations(:read_pipeline)
end
end
end end
...@@ -13,4 +13,6 @@ describe GitlabSchema.types['Project'] do ...@@ -13,4 +13,6 @@ describe GitlabSchema.types['Project'] do
.to require_graphql_authorizations(:read_merge_request) .to require_graphql_authorizations(:read_merge_request)
end end
end end
it { is_expected.to have_graphql_field(:pipelines) }
end end
...@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter'; ...@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/merge_requests/state'; import state from '~/ide/stores/modules/merge_requests/state';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types'; import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
import actions, { import {
requestMergeRequests, requestMergeRequests,
receiveMergeRequestsError, receiveMergeRequestsError,
receiveMergeRequestsSuccess, receiveMergeRequestsSuccess,
...@@ -41,28 +41,26 @@ describe('IDE merge requests actions', () => { ...@@ -41,28 +41,26 @@ describe('IDE merge requests actions', () => {
}); });
describe('receiveMergeRequestsError', () => { describe('receiveMergeRequestsError', () => {
let flashSpy;
beforeEach(() => {
flashSpy = spyOnDependency(actions, 'flash');
});
it('should should commit error', done => { it('should should commit error', done => {
testAction( testAction(
receiveMergeRequestsError, receiveMergeRequestsError,
'created', { type: 'created', search: '' },
mockedState, mockedState,
[{ type: types.RECEIVE_MERGE_REQUESTS_ERROR, payload: 'created' }], [{ type: types.RECEIVE_MERGE_REQUESTS_ERROR, payload: 'created' }],
[], [
{
type: 'setErrorMessage',
payload: {
text: 'Error loading merge requests.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: { type: 'created', search: '' },
},
},
],
done, done,
); );
}); });
it('creates flash message', () => {
receiveMergeRequestsError({ commit() {} }, 'created');
expect(flashSpy).toHaveBeenCalled();
});
}); });
describe('receiveMergeRequestsSuccess', () => { describe('receiveMergeRequestsSuccess', () => {
......
import Visibility from 'visibilityjs'; import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import actions, { import {
requestLatestPipeline, requestLatestPipeline,
receiveLatestPipelineError, receiveLatestPipelineError,
receiveLatestPipelineSuccess, receiveLatestPipelineSuccess,
...@@ -59,7 +59,7 @@ describe('IDE pipelines actions', () => { ...@@ -59,7 +59,7 @@ describe('IDE pipelines actions', () => {
it('commits error', done => { it('commits error', done => {
testAction( testAction(
receiveLatestPipelineError, receiveLatestPipelineError,
null, { status: 404 },
mockedState, mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }], [{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
[{ type: 'stopPipelinePolling' }], [{ type: 'stopPipelinePolling' }],
...@@ -67,12 +67,26 @@ describe('IDE pipelines actions', () => { ...@@ -67,12 +67,26 @@ describe('IDE pipelines actions', () => {
); );
}); });
it('creates flash message', () => { it('dispatches setErrorMessage is not 404', done => {
const flashSpy = spyOnDependency(actions, 'flash'); testAction(
receiveLatestPipelineError,
receiveLatestPipelineError({ commit() {}, dispatch() {} }); { status: 500 },
mockedState,
expect(flashSpy).toHaveBeenCalled(); [{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
[
{
type: 'setErrorMessage',
payload: {
text: 'An error occured whilst fetching the latest pipline.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
},
{ type: 'stopPipelinePolling' },
],
done,
);
}); });
}); });
...@@ -181,7 +195,10 @@ describe('IDE pipelines actions', () => { ...@@ -181,7 +195,10 @@ describe('IDE pipelines actions', () => {
new Promise(resolve => requestAnimationFrame(resolve)) new Promise(resolve => requestAnimationFrame(resolve))
.then(() => { .then(() => {
expect(dispatch.calls.argsFor(1)).toEqual(['receiveLatestPipelineError']); expect(dispatch.calls.argsFor(1)).toEqual([
'receiveLatestPipelineError',
jasmine.anything(),
]);
}) })
.then(done) .then(done)
.catch(done.fail); .catch(done.fail);
...@@ -199,21 +216,23 @@ describe('IDE pipelines actions', () => { ...@@ -199,21 +216,23 @@ describe('IDE pipelines actions', () => {
it('commits error', done => { it('commits error', done => {
testAction( testAction(
receiveJobsError, receiveJobsError,
1, { id: 1 },
mockedState, mockedState,
[{ type: types.RECEIVE_JOBS_ERROR, payload: 1 }], [{ type: types.RECEIVE_JOBS_ERROR, payload: 1 }],
[], [
{
type: 'setErrorMessage',
payload: {
text: 'An error occured whilst loading the pipelines jobs.',
action: jasmine.anything(),
actionText: 'Please try again',
actionPayload: { id: 1 },
},
},
],
done, done,
); );
}); });
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveJobsError({ commit() {} }, 1);
expect(flashSpy).toHaveBeenCalled();
});
}); });
describe('receiveJobsSuccess', () => { describe('receiveJobsSuccess', () => {
...@@ -268,7 +287,7 @@ describe('IDE pipelines actions', () => { ...@@ -268,7 +287,7 @@ describe('IDE pipelines actions', () => {
[], [],
[ [
{ type: 'requestJobs', payload: stage.id }, { type: 'requestJobs', payload: stage.id },
{ type: 'receiveJobsError', payload: stage.id }, { type: 'receiveJobsError', payload: stage },
], ],
done, done,
); );
...@@ -337,18 +356,20 @@ describe('IDE pipelines actions', () => { ...@@ -337,18 +356,20 @@ describe('IDE pipelines actions', () => {
null, null,
mockedState, mockedState,
[{ type: types.RECEIVE_JOB_TRACE_ERROR }], [{ type: types.RECEIVE_JOB_TRACE_ERROR }],
[], [
{
type: 'setErrorMessage',
payload: {
text: 'An error occured whilst fetching the job trace.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
},
],
done, done,
); );
}); });
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveJobTraceError({ commit() {} });
expect(flashSpy).toHaveBeenCalled();
});
}); });
describe('receiveJobTraceSuccess', () => { describe('receiveJobTraceSuccess', () => {
......
...@@ -20,6 +20,15 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do ...@@ -20,6 +20,15 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
diff_files diff_files
end end
it 'it uses a different cache key if diff line keys change' do
mr_diff = described_class.new(merge_request.merge_request_diff, diff_options: nil)
key = mr_diff.cache_key
stub_const('Gitlab::Diff::Line::SERIALIZE_KEYS', [:foo])
expect(mr_diff.cache_key).not_to eq(key)
end
shared_examples 'initializes a DiffCollection' do shared_examples 'initializes a DiffCollection' do
it 'returns a valid instance of a DiffCollection' do it 'returns a valid instance of a DiffCollection' do
expect(diff_files).to be_a(Gitlab::Git::DiffCollection) expect(diff_files).to be_a(Gitlab::Git::DiffCollection)
......
...@@ -1402,94 +1402,84 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -1402,94 +1402,84 @@ describe Gitlab::Git::Repository, seed_helper: true do
end end
describe "#copy_gitattributes" do describe "#copy_gitattributes" do
shared_examples 'applying git attributes' do let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') }
let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') }
after do after do
FileUtils.rm_rf(attributes_path) if Dir.exist?(attributes_path) FileUtils.rm_rf(attributes_path) if Dir.exist?(attributes_path)
end end
it "raises an error with invalid ref" do
expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef)
end
context 'when forcing encoding issues' do
let(:branch_name) { "ʕ•ᴥ•ʔ" }
before do it "raises an error with invalid ref" do
repository.create_branch(branch_name, "master") expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef)
end end
after do context 'when forcing encoding issues' do
repository.rm_branch(branch_name, user: build(:admin)) let(:branch_name) { "ʕ•ᴥ•ʔ" }
end
it "doesn't raise with a valid unicode ref" do before do
expect { repository.copy_gitattributes(branch_name) }.not_to raise_error repository.create_branch(branch_name, "master")
end
repository after do
end repository.rm_branch(branch_name, user: build(:admin))
end end
context "with no .gitattrbutes" do it "doesn't raise with a valid unicode ref" do
before do expect { repository.copy_gitattributes(branch_name) }.not_to raise_error
repository.copy_gitattributes("master")
end
it "does not have an info/attributes" do repository
expect(File.exist?(attributes_path)).to be_falsey
end
end end
end
context "with .gitattrbutes" do context "with no .gitattrbutes" do
before do before do
repository.copy_gitattributes("gitattributes") repository.copy_gitattributes("master")
end end
it "has an info/attributes" do it "does not have an info/attributes" do
expect(File.exist?(attributes_path)).to be_truthy expect(File.exist?(attributes_path)).to be_falsey
end end
end
it "has the same content in info/attributes as .gitattributes" do context "with .gitattrbutes" do
contents = File.open(attributes_path, "rb") { |f| f.read } before do
expect(contents).to eq("*.md binary\n") repository.copy_gitattributes("gitattributes")
end
end end
context "with updated .gitattrbutes" do it "has an info/attributes" do
before do expect(File.exist?(attributes_path)).to be_truthy
repository.copy_gitattributes("gitattributes") end
repository.copy_gitattributes("gitattributes-updated")
end
it "has an info/attributes" do it "has the same content in info/attributes as .gitattributes" do
expect(File.exist?(attributes_path)).to be_truthy contents = File.open(attributes_path, "rb") { |f| f.read }
end expect(contents).to eq("*.md binary\n")
end
end
it "has the updated content in info/attributes" do context "with updated .gitattrbutes" do
contents = File.read(attributes_path) before do
expect(contents).to eq("*.txt binary\n") repository.copy_gitattributes("gitattributes")
end repository.copy_gitattributes("gitattributes-updated")
end end
context "with no .gitattrbutes in HEAD but with previous info/attributes" do it "has an info/attributes" do
before do expect(File.exist?(attributes_path)).to be_truthy
repository.copy_gitattributes("gitattributes") end
repository.copy_gitattributes("master")
end
it "does not have an info/attributes" do it "has the updated content in info/attributes" do
expect(File.exist?(attributes_path)).to be_falsey contents = File.read(attributes_path)
end expect(contents).to eq("*.txt binary\n")
end end
end end
context 'when gitaly is enabled' do context "with no .gitattrbutes in HEAD but with previous info/attributes" do
it_behaves_like 'applying git attributes' before do
end repository.copy_gitattributes("gitattributes")
repository.copy_gitattributes("master")
end
context 'when gitaly is disabled', :disable_gitaly do it "does not have an info/attributes" do
it_behaves_like 'applying git attributes' expect(File.exist?(attributes_path)).to be_falsey
end
end end
end end
......
require 'spec_helper'
describe Gitlab::Graphql::Connections::KeysetConnection do
let(:nodes) { Project.all.order(id: :asc) }
let(:arguments) { {} }
subject(:connection) do
described_class.new(nodes, arguments, max_page_size: 3)
end
def encoded_property(value)
Base64.strict_encode64(value.to_s)
end
describe '#cursor_from_nodes' do
let(:project) { create(:project) }
it 'returns an encoded ID' do
expect(connection.cursor_from_node(project))
.to eq(encoded_property(project.id))
end
context 'when an order was specified' do
let(:nodes) { Project.order(:updated_at) }
it 'returns the encoded value of the order' do
expect(connection.cursor_from_node(project))
.to eq(encoded_property(project.updated_at))
end
end
end
describe '#sliced_nodes' do
let(:projects) { create_list(:project, 4) }
context 'when before is passed' do
let(:arguments) { { before: encoded_property(projects[1].id) } }
it 'only returns the project before the selected one' do
expect(subject.sliced_nodes).to contain_exactly(projects.first)
end
context 'when the sort order is descending' do
let(:nodes) { Project.all.order(id: :desc) }
it 'returns the correct nodes' do
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
end
end
end
context 'when after is passed' do
let(:arguments) { { after: encoded_property(projects[1].id) } }
it 'only returns the project before the selected one' do
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
end
context 'when the sort order is descending' do
let(:nodes) { Project.all.order(id: :desc) }
it 'returns the correct nodes' do
expect(subject.sliced_nodes).to contain_exactly(projects.first)
end
end
end
context 'when both before and after are passed' do
let(:arguments) do
{
after: encoded_property(projects[1].id),
before: encoded_property(projects[3].id)
}
end
it 'returns the expected set' do
expect(subject.sliced_nodes).to contain_exactly(projects[2])
end
end
end
describe '#paged_nodes' do
let!(:projects) { create_list(:project, 5) }
it 'returns the collection limited to max page size' do
expect(subject.paged_nodes.size).to eq(3)
end
context 'when `first` is passed' do
let(:arguments) { { first: 2 } }
it 'returns only the first elements' do
expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
end
end
context 'when `last` is passed' do
let(:arguments) { { last: 2 } }
it 'returns only the last elements' do
expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
end
end
context 'when both are passed' do
let(:arguments) { { first: 2, last: 2 } }
it 'raises an error' do
expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
end
end
end
...@@ -7,18 +7,47 @@ describe Gitlab::Middleware::Multipart do ...@@ -7,18 +7,47 @@ describe Gitlab::Middleware::Multipart do
let(:middleware) { described_class.new(app) } let(:middleware) { described_class.new(app) }
let(:original_filename) { 'filename' } let(:original_filename) { 'filename' }
it 'opens top-level files' do shared_examples_for 'multipart upload files' do
Tempfile.open('top-level') do |tempfile| it 'opens top-level files' do
env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename }, Gitlab::Workhorse.secret, 'gitlab-workhorse') Tempfile.open('top-level') do |tempfile|
env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename, 'file.path' => tempfile.path, 'file.remote_id' => remote_id }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect_uploaded_file(tempfile, %w(file))
middleware.call(env)
end
end
it 'opens files one level deep' do
Tempfile.open('one-level') do |tempfile|
in_params = { 'user' => { 'avatar' => { '.name' => original_filename, '.path' => tempfile.path, '.remote_id' => remote_id } } }
env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect_uploaded_file(tempfile, %w(user avatar))
middleware.call(env)
end
end
it 'opens files two levels deep' do
Tempfile.open('two-levels') do |tempfile|
in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename, '.path' => tempfile.path, '.remote_id' => remote_id } } } }
env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect_uploaded_file(tempfile, %w(project milestone themesong))
middleware.call(env)
end
end
def expect_uploaded_file(tempfile, path, remote: false)
expect(app).to receive(:call) do |env| expect(app).to receive(:call) do |env|
file = Rack::Request.new(env).params['file'] file = Rack::Request.new(env).params.dig(*path)
expect(file).to be_a(::UploadedFile) expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path) expect(file.path).to eq(tempfile.path)
expect(file.original_filename).to eq(original_filename) expect(file.original_filename).to eq(original_filename)
expect(file.remote_id).to eq(remote_id)
end end
middleware.call(env)
end end
end end
...@@ -34,36 +63,16 @@ describe Gitlab::Middleware::Multipart do ...@@ -34,36 +63,16 @@ describe Gitlab::Middleware::Multipart do
expect { middleware.call(env) }.to raise_error(JWT::InvalidIssuerError) expect { middleware.call(env) }.to raise_error(JWT::InvalidIssuerError)
end end
it 'opens files one level deep' do context 'with remote file' do
Tempfile.open('one-level') do |tempfile| let(:remote_id) { 'someid' }
in_params = { 'user' => { 'avatar' => { '.name' => original_filename } } }
env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect(app).to receive(:call) do |env| it_behaves_like 'multipart upload files'
file = Rack::Request.new(env).params['user']['avatar']
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
expect(file.original_filename).to eq(original_filename)
end
middleware.call(env)
end
end end
it 'opens files two levels deep' do context 'with local file' do
Tempfile.open('two-levels') do |tempfile| let(:remote_id) { nil }
in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename } } } }
env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect(app).to receive(:call) do |env| it_behaves_like 'multipart upload files'
file = Rack::Request.new(env).params['project']['milestone']['themesong']
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
expect(file.original_filename).to eq(original_filename)
end
middleware.call(env)
end
end end
def post_env(rewritten_fields, params, secret, issuer) def post_env(rewritten_fields, params, secret, issuer)
......
...@@ -105,7 +105,7 @@ describe Ci::Runner do ...@@ -105,7 +105,7 @@ describe Ci::Runner do
end end
end end
describe '.shared' do describe '.instance_type' do
let(:group) { create(:group) } let(:group) { create(:group) }
let(:project) { create(:project) } let(:project) { create(:project) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) } let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
...@@ -113,7 +113,7 @@ describe Ci::Runner do ...@@ -113,7 +113,7 @@ describe Ci::Runner do
let!(:shared_runner) { create(:ci_runner, :instance) } let!(:shared_runner) { create(:ci_runner, :instance) }
it 'returns only shared runners' do it 'returns only shared runners' do
expect(described_class.shared).to contain_exactly(shared_runner) expect(described_class.instance_type).to contain_exactly(shared_runner)
end end
end end
...@@ -155,7 +155,7 @@ describe Ci::Runner do ...@@ -155,7 +155,7 @@ describe Ci::Runner do
end end
end end
describe '.owned_or_shared' do describe '.owned_or_instance_wide' do
it 'returns a globally shared, a project specific and a group specific runner' do it 'returns a globally shared, a project specific and a group specific runner' do
# group specific # group specific
group = create(:group) group = create(:group)
...@@ -168,7 +168,7 @@ describe Ci::Runner do ...@@ -168,7 +168,7 @@ describe Ci::Runner do
# globally shared # globally shared
shared_runner = create(:ci_runner, :instance) shared_runner = create(:ci_runner, :instance)
expect(described_class.owned_or_shared(project.id)).to contain_exactly( expect(described_class.owned_or_instance_wide(project.id)).to contain_exactly(
group_runner, project_runner, shared_runner group_runner, project_runner, shared_runner
) )
end end
...@@ -202,7 +202,6 @@ describe Ci::Runner do ...@@ -202,7 +202,6 @@ describe Ci::Runner do
it 'transitions shared runner to project runner and assigns project' do it 'transitions shared runner to project runner and assigns project' do
expect(subject).to be_truthy expect(subject).to be_truthy
expect(runner).to be_specific
expect(runner).to be_project_type expect(runner).to be_project_type
expect(runner.projects).to eq([project]) expect(runner.projects).to eq([project])
expect(runner.only_for?(project)).to be_truthy expect(runner.only_for?(project)).to be_truthy
......
...@@ -67,4 +67,28 @@ describe 'getting merge request information nested in a project' do ...@@ -67,4 +67,28 @@ describe 'getting merge request information nested in a project' do
expect(merge_request_graphql_data).to be_nil expect(merge_request_graphql_data).to be_nil
end end
end end
context 'when there are pipelines' do
before do
pipeline = create(
:ci_pipeline,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha
)
merge_request.update!(head_pipeline: pipeline)
end
it 'has a head pipeline' do
post_graphql(query, current_user: current_user)
expect(merge_request_graphql_data['headPipeline']).to be_present
end
it 'has pipeline connections' do
post_graphql(query, current_user: current_user)
expect(merge_request_graphql_data['pipelines']['edges'].size).to eq(1)
end
end
end end
...@@ -26,6 +26,18 @@ describe 'getting project information' do ...@@ -26,6 +26,18 @@ describe 'getting project information' do
post_graphql(query, current_user: current_user) post_graphql(query, current_user: current_user)
end end
end end
context 'when there are pipelines present' do
before do
create(:ci_pipeline, project: project)
end
it 'is included in the pipelines connection' do
post_graphql(query, current_user: current_user)
expect(graphql_data['project']['pipelines']['edges'].size).to eq(1)
end
end
end end
context 'when the user does not have access to the project' do context 'when the user does not have access to the project' do
......
...@@ -89,6 +89,17 @@ describe API::Runners do ...@@ -89,6 +89,17 @@ describe API::Runners do
end end
end end
it 'filters runners by scope' do
get api('/runners/all?scope=shared', admin)
shared = json_response.all? { |r| r['is_shared'] }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response[0]).to have_key('ip_address')
expect(shared).to be_truthy
end
it 'filters runners by scope' do it 'filters runners by scope' do
get api('/runners/all?scope=specific', admin) get api('/runners/all?scope=specific', admin)
...@@ -136,7 +147,7 @@ describe API::Runners do ...@@ -136,7 +147,7 @@ describe API::Runners do
delete api("/runners/#{unused_project_runner.id}", admin) delete api("/runners/#{unused_project_runner.id}", admin)
expect(response).to have_gitlab_http_status(204) expect(response).to have_gitlab_http_status(204)
end.to change { Ci::Runner.specific.count }.by(-1) end.to change { Ci::Runner.project_type.count }.by(-1)
end end
end end
...@@ -300,7 +311,7 @@ describe API::Runners do ...@@ -300,7 +311,7 @@ describe API::Runners do
delete api("/runners/#{shared_runner.id}", admin) delete api("/runners/#{shared_runner.id}", admin)
expect(response).to have_gitlab_http_status(204) expect(response).to have_gitlab_http_status(204)
end.to change { Ci::Runner.shared.count }.by(-1) end.to change { Ci::Runner.instance_type.count }.by(-1)
end end
it_behaves_like '412 response' do it_behaves_like '412 response' do
...@@ -314,7 +325,7 @@ describe API::Runners do ...@@ -314,7 +325,7 @@ describe API::Runners do
delete api("/runners/#{project_runner.id}", admin) delete api("/runners/#{project_runner.id}", admin)
expect(response).to have_http_status(204) expect(response).to have_http_status(204)
end.to change { Ci::Runner.specific.count }.by(-1) end.to change { Ci::Runner.project_type.count }.by(-1)
end end
end end
...@@ -349,7 +360,7 @@ describe API::Runners do ...@@ -349,7 +360,7 @@ describe API::Runners do
delete api("/runners/#{project_runner.id}", user) delete api("/runners/#{project_runner.id}", user)
expect(response).to have_http_status(204) expect(response).to have_http_status(204)
end.to change { Ci::Runner.specific.count }.by(-1) end.to change { Ci::Runner.project_type.count }.by(-1)
end end
it_behaves_like '412 response' do it_behaves_like '412 response' do
...@@ -584,12 +595,12 @@ describe API::Runners do ...@@ -584,12 +595,12 @@ describe API::Runners do
end end
end end
it 'enables a shared runner' do it 'enables a instance type runner' do
expect do expect do
post api("/projects/#{project.id}/runners", admin), runner_id: shared_runner.id post api("/projects/#{project.id}/runners", admin), runner_id: shared_runner.id
end.to change { project.runners.count }.by(1) end.to change { project.runners.count }.by(1)
expect(shared_runner.reload).not_to be_shared expect(shared_runner.reload).not_to be_instance_type
expect(response).to have_gitlab_http_status(201) expect(response).to have_gitlab_http_status(201)
end end
end end
......
...@@ -57,12 +57,12 @@ module GraphqlHelpers ...@@ -57,12 +57,12 @@ module GraphqlHelpers
type.fields.map do |name, field| type.fields.map do |name, field|
# We can't guess arguments, so skip fields that require them # We can't guess arguments, so skip fields that require them
next if field.arguments.any? next if required_arguments?(field)
if scalar?(field) if nested_fields?(field)
name
else
"#{name} { #{all_graphql_fields_for(field_type(field))} }" "#{name} { #{all_graphql_fields_for(field_type(field))} }"
else
name
end end
end.compact.join("\n") end.compact.join("\n")
end end
...@@ -85,10 +85,22 @@ module GraphqlHelpers ...@@ -85,10 +85,22 @@ module GraphqlHelpers
json_response['data'] json_response['data']
end end
def nested_fields?(field)
!scalar?(field) && !enum?(field)
end
def scalar?(field) def scalar?(field)
field_type(field).kind.scalar? field_type(field).kind.scalar?
end end
def enum?(field)
field_type(field).kind.enum?
end
def required_arguments?(field)
field.arguments.values.any? { |argument| argument.type.non_null? }
end
def field_type(field) def field_type(field)
if field.type.respond_to?(:of_type) if field.type.respond_to?(:of_type)
field.type.of_type field.type.of_type
......
...@@ -136,8 +136,9 @@ shared_examples 'merge requests list' do ...@@ -136,8 +136,9 @@ shared_examples 'merge requests list' do
it 'returns an array of merge requests in given milestone' do it 'returns an array of merge requests in given milestone' do
get api(endpoint_path, user), milestone: '0.9' get api(endpoint_path, user), milestone: '0.9'
expect(json_response.first['title']).to eq merge_request_closed.title closed_issues = json_response.select { |mr| mr['id'] == merge_request_closed.id }
expect(json_response.first['id']).to eq merge_request_closed.id expect(closed_issues.length).to eq(1)
expect(closed_issues.first['title']).to eq merge_request_closed.title
end end
it 'returns an array of merge requests matching state in milestone' do it 'returns an array of merge requests matching state in milestone' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment