Commit 05542079 authored by Rémy Coutable's avatar Rémy Coutable

Merge branch 'ce-to-ee-2018-07-04' into 'master'

CE upstream - 2018-07-04 12:21 UTC

Closes gitaly#329, gitaly#320, gitaly#323 et gitaly#328

See merge request gitlab-org/gitlab-ee!6381
parents 8078f779 c32af9da
......@@ -116,7 +116,7 @@ GEM
capybara-screenshot (1.0.14)
capybara (>= 1.0, < 3)
launchy
carrierwave (1.2.1)
carrierwave (1.2.3)
activemodel (>= 4.0.0)
activesupport (>= 4.0.0)
mime-types (>= 1.16)
......
import $ from 'jquery';
import { sprintf, __ } from '~/locale';
import flash from '~/flash';
import { stripHtml } from '~/lib/utils/text_utility';
import * as rootTypes from '../../mutation_types';
import { createCommitPayload, createNewMergeRequestUrl } from '../../utils';
import router from '../../../ide_router';
......@@ -198,11 +197,18 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState, roo
if (err.response.status === 400) {
$('#ide-create-branch-modal').modal('show');
} else {
let errMsg = __('Error committing changes. Please try again.');
if (err.response.data && err.response.data.message) {
errMsg += ` (${stripHtml(err.response.data.message)})`;
}
flash(errMsg, 'alert', document, null, false, true);
dispatch(
'setErrorMessage',
{
text: __('An error accured whilst committing your changes.'),
action: () =>
dispatch('commitChanges').then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
},
{ root: true },
);
window.dispatchEvent(new Event('resize'));
}
......
import { __ } from '../../../../locale';
import Api from '../../../../api';
import flash from '../../../../flash';
import router from '../../../ide_router';
import { scopes } from './constants';
import * as types from './mutation_types';
......@@ -8,8 +7,20 @@ import * as rootTypes from '../../mutation_types';
export const requestMergeRequests = ({ commit }, type) =>
commit(types.REQUEST_MERGE_REQUESTS, type);
export const receiveMergeRequestsError = ({ commit }, type) => {
flash(__('Error loading merge requests.'));
export const receiveMergeRequestsError = ({ commit, dispatch }, { type, search }) => {
dispatch(
'setErrorMessage',
{
text: __('Error loading merge requests.'),
action: payload =>
dispatch('fetchMergeRequests', payload).then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
actionPayload: { type, search },
},
{ root: true },
);
commit(types.RECEIVE_MERGE_REQUESTS_ERROR, type);
};
export const receiveMergeRequestsSuccess = ({ commit }, { type, data }) =>
......@@ -22,7 +33,7 @@ export const fetchMergeRequests = ({ dispatch, state: { state } }, { type, searc
Api.mergeRequests({ scope, state, search })
.then(({ data }) => dispatch('receiveMergeRequestsSuccess', { type, data }))
.catch(() => dispatch('receiveMergeRequestsError', type));
.catch(() => dispatch('receiveMergeRequestsError', { type, search }));
};
export const resetMergeRequests = ({ commit }, type) => commit(types.RESET_MERGE_REQUESTS, type);
......
import Visibility from 'visibilityjs';
import axios from 'axios';
import httpStatus from '../../../../lib/utils/http_status';
import { __ } from '../../../../locale';
import flash from '../../../../flash';
import Poll from '../../../../lib/utils/poll';
import service from '../../../services';
import { rightSidebarViews } from '../../../constants';
......@@ -18,10 +18,27 @@ export const stopPipelinePolling = () => {
export const restartPipelinePolling = () => {
if (eTagPoll) eTagPoll.restart();
};
export const forcePipelineRequest = () => {
if (eTagPoll) eTagPoll.makeRequest();
};
export const requestLatestPipeline = ({ commit }) => commit(types.REQUEST_LATEST_PIPELINE);
export const receiveLatestPipelineError = ({ commit, dispatch }) => {
flash(__('There was an error loading latest pipeline'));
export const receiveLatestPipelineError = ({ commit, dispatch }, err) => {
if (err.status !== httpStatus.NOT_FOUND) {
dispatch(
'setErrorMessage',
{
text: __('An error occured whilst fetching the latest pipline.'),
action: () =>
dispatch('forcePipelineRequest').then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
actionPayload: null,
},
{ root: true },
);
}
commit(types.RECEIVE_LASTEST_PIPELINE_ERROR);
dispatch('stopPipelinePolling');
};
......@@ -46,7 +63,7 @@ export const fetchLatestPipeline = ({ dispatch, rootGetters }) => {
method: 'lastCommitPipelines',
data: { getters: rootGetters },
successCallback: ({ data }) => dispatch('receiveLatestPipelineSuccess', data),
errorCallback: () => dispatch('receiveLatestPipelineError'),
errorCallback: err => dispatch('receiveLatestPipelineError', err),
});
if (!Visibility.hidden()) {
......@@ -63,9 +80,21 @@ export const fetchLatestPipeline = ({ dispatch, rootGetters }) => {
};
export const requestJobs = ({ commit }, id) => commit(types.REQUEST_JOBS, id);
export const receiveJobsError = ({ commit }, id) => {
flash(__('There was an error loading jobs'));
commit(types.RECEIVE_JOBS_ERROR, id);
export const receiveJobsError = ({ commit, dispatch }, stage) => {
dispatch(
'setErrorMessage',
{
text: __('An error occured whilst loading the pipelines jobs.'),
action: payload =>
dispatch('fetchJobs', payload).then(() =>
dispatch('setErrorMessage', null, { root: true }),
),
actionText: __('Please try again'),
actionPayload: stage,
},
{ root: true },
);
commit(types.RECEIVE_JOBS_ERROR, stage.id);
};
export const receiveJobsSuccess = ({ commit }, { id, data }) =>
commit(types.RECEIVE_JOBS_SUCCESS, { id, data });
......@@ -76,7 +105,7 @@ export const fetchJobs = ({ dispatch }, stage) => {
axios
.get(stage.dropdownPath)
.then(({ data }) => dispatch('receiveJobsSuccess', { id: stage.id, data }))
.catch(() => dispatch('receiveJobsError', stage.id));
.catch(() => dispatch('receiveJobsError', stage));
};
export const toggleStageCollapsed = ({ commit }, stageId) =>
......@@ -90,8 +119,18 @@ export const setDetailJob = ({ commit, dispatch }, job) => {
};
export const requestJobTrace = ({ commit }) => commit(types.REQUEST_JOB_TRACE);
export const receiveJobTraceError = ({ commit }) => {
flash(__('Error fetching job trace'));
export const receiveJobTraceError = ({ commit, dispatch }) => {
dispatch(
'setErrorMessage',
{
text: __('An error occured whilst fetching the job trace.'),
action: () =>
dispatch('fetchJobTrace').then(() => dispatch('setErrorMessage', null, { root: true })),
actionText: __('Please try again'),
actionPayload: null,
},
{ root: true },
);
commit(types.RECEIVE_JOB_TRACE_ERROR);
};
export const receiveJobTraceSuccess = ({ commit }, data) =>
......
......@@ -15,7 +15,7 @@ class Projects::PipelinesController < Projects::ApplicationController
def index
@scope = params[:scope]
@pipelines = PipelinesFinder
.new(project, scope: @scope)
.new(project, current_user, scope: @scope)
.execute
.page(params[:page])
.per(30)
......@@ -180,7 +180,7 @@ class Projects::PipelinesController < Projects::ApplicationController
end
def limited_pipelines_count(project, scope = nil)
finder = PipelinesFinder.new(project, scope: scope)
finder = PipelinesFinder.new(project, current_user, scope: scope)
view_context.limited_counter_with_delimiter(finder.execute)
end
......
......@@ -74,7 +74,7 @@ module Projects
.ordered
.page(params[:page]).per(20)
@shared_runners = ::Ci::Runner.shared.active
@shared_runners = ::Ci::Runner.instance_type.active
@shared_runners_count = @shared_runners.count(:all)
......
class PipelinesFinder
attr_reader :project, :pipelines, :params
attr_reader :project, :pipelines, :params, :current_user
ALLOWED_INDEXED_COLUMNS = %w[id status ref user_id].freeze
def initialize(project, params = {})
def initialize(project, current_user, params = {})
@project = project
@current_user = current_user
@pipelines = project.pipelines
@params = params
end
def execute
unless Ability.allowed?(current_user, :read_pipeline, project)
return Ci::Pipeline.none
end
items = pipelines
items = by_scope(items)
items = by_status(items)
......
......@@ -2,7 +2,10 @@ class GitlabSchema < GraphQL::Schema
use BatchLoader::GraphQL
use Gitlab::Graphql::Authorize
use Gitlab::Graphql::Present
use Gitlab::Graphql::Connections
query(Types::QueryType)
default_max_page_size 100
# mutation(Types::MutationType)
end
module ResolvesPipelines
extend ActiveSupport::Concern
included do
type [Types::Ci::PipelineType], null: false
argument :status,
Types::Ci::PipelineStatusEnum,
required: false,
description: "Filter pipelines by their status"
argument :ref,
GraphQL::STRING_TYPE,
required: false,
description: "Filter pipelines by the ref they are run for"
argument :sha,
GraphQL::STRING_TYPE,
required: false,
description: "Filter pipelines by the sha of the commit they are run for"
end
def resolve_pipelines(project, params = {})
PipelinesFinder.new(project, context[:current_user], params).execute
end
end
module Resolvers
class MergeRequestPipelinesResolver < BaseResolver
include ::ResolvesPipelines
alias_method :merge_request, :object
def resolve(**args)
resolve_pipelines(project, args)
.merge(merge_request.all_pipelines)
end
def project
merge_request.source_project
end
end
end
module Resolvers
class ProjectPipelinesResolver < BaseResolver
include ResolvesPipelines
alias_method :project, :object
def resolve(**args)
resolve_pipelines(project, args)
end
end
end
module Types
module Ci
class PipelineStatusEnum < BaseEnum
::Ci::Pipeline.all_state_names.each do |state_symbol|
value state_symbol.to_s.upcase, value: state_symbol.to_s
end
end
end
end
module Types
module Ci
class PipelineType < BaseObject
expose_permissions Types::PermissionTypes::Ci::Pipeline
graphql_name 'Pipeline'
field :id, GraphQL::ID_TYPE, null: false
field :iid, GraphQL::ID_TYPE, null: false
field :sha, GraphQL::STRING_TYPE, null: false
field :before_sha, GraphQL::STRING_TYPE, null: true
field :status, PipelineStatusEnum, null: false
field :duration,
GraphQL::INT_TYPE,
null: true,
description: "Duration of the pipeline in seconds"
field :coverage,
GraphQL::FLOAT_TYPE,
null: true,
description: "Coverage percentage"
field :created_at, Types::TimeType, null: false
field :updated_at, Types::TimeType, null: false
field :started_at, Types::TimeType, null: true
field :finished_at, Types::TimeType, null: true
field :committed_at, Types::TimeType, null: true
# TODO: Add triggering user as a type
end
end
end
......@@ -45,5 +45,11 @@ module Types
field :upvotes, GraphQL::INT_TYPE, null: false
field :downvotes, GraphQL::INT_TYPE, null: false
field :subscribed, GraphQL::BOOLEAN_TYPE, method: :subscribed?, null: false
field :head_pipeline, Types::Ci::PipelineType, null: true, method: :actual_head_pipeline do
authorize :read_pipeline
end
field :pipelines, Types::Ci::PipelineType.connection_type,
resolver: Resolvers::MergeRequestPipelinesResolver
end
end
module Types
module PermissionTypes
module Ci
class Pipeline < BasePermissionType
graphql_name 'PipelinePermissions'
abilities :update_pipeline, :admin_pipeline, :destroy_pipeline
end
end
end
end
......@@ -70,5 +70,10 @@ module Types
resolver: Resolvers::MergeRequestResolver do
authorize :read_merge_request
end
field :pipelines,
Types::Ci::PipelineType.connection_type,
null: false,
resolver: Resolvers::ProjectPipelinesResolver
end
end
......@@ -122,7 +122,7 @@ module CiStatusHelper
def no_runners_for_project?(project)
project.runners.blank? &&
Ci::Runner.shared.blank?
Ci::Runner.instance_type.blank?
end
def render_status_with_link(type, status, path = nil, tooltip_placement: 'left', cssclass: '', container: 'body')
......
......@@ -2,6 +2,7 @@ module Ci
class Runner < ActiveRecord::Base
extend Gitlab::Ci::Model
include Gitlab::SQL::Pattern
include IgnorableColumn
include RedisCacheable
include ChronicDurationAttribute
prepend EE::Ci::Runner
......@@ -12,6 +13,8 @@ module Ci
AVAILABLE_SCOPES = %w[specific shared active paused online].freeze
FORM_EDITABLE = %i[description tag_list active run_untagged locked access_level maximum_timeout_human_readable].freeze
ignore_column :is_shared
has_many :builds
has_many :runner_projects, inverse_of: :runner, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :runner_projects
......@@ -22,13 +25,16 @@ module Ci
before_validation :set_default_values
scope :specific, -> { where(is_shared: false) }
scope :shared, -> { where(is_shared: true) }
scope :active, -> { where(active: true) }
scope :paused, -> { where(active: false) }
scope :online, -> { where('contacted_at > ?', contact_time_deadline) }
scope :ordered, -> { order(id: :desc) }
# BACKWARD COMPATIBILITY: There are needed to maintain compatibility with `AVAILABLE_SCOPES` used by `lib/api/runners.rb`
scope :deprecated_shared, -> { instance_type }
# this should get replaced with `project_type.or(group_type)` once using Rails5
scope :deprecated_specific, -> { where(runner_type: [runner_types[:project_type], runner_types[:group_type]]) }
scope :belonging_to_project, -> (project_id) {
joins(:runner_projects).where(ci_runner_projects: { project_id: project_id })
}
......@@ -40,9 +46,9 @@ module Ci
joins(:groups).where(namespaces: { id: hierarchy_groups })
}
scope :owned_or_shared, -> (project_id) do
scope :owned_or_instance_wide, -> (project_id) do
union = Gitlab::SQL::Union.new(
[belonging_to_project(project_id), belonging_to_parent_group_of_project(project_id), shared],
[belonging_to_project(project_id), belonging_to_parent_group_of_project(project_id), instance_type],
remove_duplicates: false
)
from("(#{union.to_sql}) ci_runners")
......@@ -64,7 +70,6 @@ module Ci
validate :no_groups, unless: :group_type?
validate :any_project, if: :project_type?
validate :exactly_one_group, if: :group_type?
validate :validate_is_shared
acts_as_taggable
......@@ -114,8 +119,7 @@ module Ci
end
def assign_to(project, current_user = nil)
if shared?
self.is_shared = false if shared?
if instance_type?
self.runner_type = :project_type
elsif group_type?
raise ArgumentError, 'Transitioning a group runner to a project runner is not supported'
......@@ -138,10 +142,6 @@ module Ci
description
end
def shared?
is_shared
end
def online?
contacted_at && contacted_at > self.class.contact_time_deadline
end
......@@ -160,10 +160,6 @@ module Ci
runner_projects.count == 1
end
def specific?
!shared?
end
def assigned_to_group?
runner_namespaces.any?
end
......@@ -261,7 +257,7 @@ module Ci
end
def assignable_for?(project_id)
self.class.owned_or_shared(project_id).where(id: self.id).any?
self.class.owned_or_instance_wide(project_id).where(id: self.id).any?
end
def no_projects
......@@ -288,12 +284,6 @@ module Ci
end
end
def validate_is_shared
unless is_shared? == instance_type?
errors.add(:is_shared, 'is not equal to instance_type?')
end
end
def accepting_tags?(build)
(run_untagged? || build.has_tags?) && (build.tag_list - tag_list).empty?
end
......
......@@ -1440,7 +1440,7 @@ class Project < ActiveRecord::Base
end
def shared_runners
@shared_runners ||= shared_runners_available? ? Ci::Runner.shared : Ci::Runner.none
@shared_runners ||= shared_runners_available? ? Ci::Runner.instance_type : Ci::Runner.none
end
def group_runners
......
......@@ -1054,7 +1054,7 @@ class User < ActiveRecord::Base
union = Gitlab::SQL::Union.new([project_runner_ids, group_runner_ids])
Ci::Runner.specific.where("ci_runners.id IN (#{union.to_sql})") # rubocop:disable GitlabSecurity/SqlInjection
Ci::Runner.where("ci_runners.id IN (#{union.to_sql})") # rubocop:disable GitlabSecurity/SqlInjection
end
end
......
......@@ -4,7 +4,7 @@ class RunnerEntity < Grape::Entity
expose :id, :description
expose :edit_path,
if: -> (*) { can?(request.current_user, :admin_build, project) && runner.specific? } do |runner|
if: -> (*) { can?(request.current_user, :admin_build, project) && runner.project_type? } do |runner|
edit_project_runner_path(project, runner)
end
......
......@@ -17,7 +17,7 @@ module Ci
def execute
builds =
if runner.shared?
if runner.instance_type?
builds_for_shared_runner
elsif runner.group_type?
builds_for_group_runner
......@@ -101,7 +101,7 @@ module Ci
end
def running_builds_for_shared_runners
Ci::Build.running.where(runner: Ci::Runner.shared)
Ci::Build.running.where(runner: Ci::Runner.instance_type)
.group(:project_id).select(:project_id, 'count(*) AS running_builds')
end
......@@ -117,7 +117,7 @@ module Ci
end
def register_success(job)
labels = { shared_runner: runner.shared?,
labels = { shared_runner: runner.instance_type?,
jobs_running_for_project: jobs_running_for_project(job) }
job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil?
......@@ -125,10 +125,10 @@ module Ci
end
def jobs_running_for_project(job)
return '+Inf' unless runner.shared?
return '+Inf' unless runner.instance_type?
# excluding currently started job
running_jobs_count = job.project.builds.running.where(runner: Ci::Runner.shared)
running_jobs_count = job.project.builds.running.where(runner: Ci::Runner.instance_type)
.limit(JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET + 1).count - 1
running_jobs_count < JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET ? running_jobs_count : "#{JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET}+"
end
......
%tr{ id: dom_id(runner) }
%td
- if runner.shared?
- if runner.instance_type?
%span.badge.badge-success shared
- elsif runner.group_type?
%span.badge.badge-success group
......@@ -21,7 +21,7 @@
%td
= runner.ip_address
%td
- if runner.shared? || runner.group_type?
- if runner.instance_type? || runner.group_type?
n/a
- else
= runner.projects.count(:all)
......
......@@ -2,7 +2,7 @@
%h3.project-title
Runner ##{@runner.id}
.float-right
- if @runner.shared?
- if @runner.instance_type?
%span.runner-state.runner-state-shared
Shared
- else
......@@ -13,7 +13,7 @@
- breadcrumb_title "##{@runner.id}"
- @no_container = true
- if @runner.shared?
- if @runner.instance_type?
.bs-callout.bs-callout-success
%h4 This Runner will process jobs from ALL UNASSIGNED projects
%p
......
......@@ -26,7 +26,7 @@
- else
- runner_project = @project.runner_projects.find_by(runner_id: runner)
= link_to _('Disable for this project'), project_runner_project_path(@project, runner_project), data: { confirm: _("Are you sure?") }, method: :delete, class: 'btn btn-danger btn-sm'
- elsif !(runner.is_shared? || runner.group_type?) # We can simplify this to `runner.project_type?` when migrating #runner_type is complete
- elsif runner.project_type?
= form_for [@project.namespace.becomes(Namespace), @project, @project.runner_projects.new] do |f|
= f.hidden_field :runner_id, value: runner.id
= f.submit _('Enable for this project'), class: 'btn btn-sm'
......
......@@ -3,7 +3,7 @@
%h3.page-title
Runner ##{@runner.id}
.float-right
- if @runner.shared?
- if @runner.instance_type?
%span.runner-state.runner-state-shared
Shared
- elsif @runner.group_type?
......
---
title: Bump carrierwave gem verion to 1.2.3
merge_request: 20287
author:
type: performance
---
title: Add pipeline lists to GraphQL
merge_request: 20249
author:
type: added
---
title: Invalidate merge request diffs cache if diff data change.
merge_request:
author:
type: fixed
---
title: Remove the use of `is_shared` of `Ci::Runner`
merge_request:
author:
type: other
......@@ -47,7 +47,8 @@ module Gitlab
#{config.root}/app/workers/concerns
#{config.root}/app/services/concerns
#{config.root}/app/serializers/concerns
#{config.root}/app/finders/concerns])
#{config.root}/app/finders/concerns
#{config.root}/app/graphql/resolvers/concerns])
config.generators.templates.push("#{config.root}/generator_templates")
......
# This fixes the problem https://gitlab.com/gitlab-org/gitlab-ce/issues/46182 that carrierwave eagerly loads upoloading files into memory
# There is an PR https://github.com/carrierwaveuploader/carrierwave/pull/2314 which has the identical change.
module CarrierWave
module Storage
class Fog < Abstract
class File
module MonkeyPatch
##
# Read content of file from service
#
# === Returns
#
# [String] contents of file
def read
file_body = file.body
return if file_body.nil?
return file_body unless file_body.is_a?(::File)
# Fog::Storage::XXX::File#body could return the source file which was upoloaded to the remote server.
read_source_file(file_body) if ::File.exist?(file_body.path)
# If the source file doesn't exist, the remote content is read
@file = nil # rubocop:disable Gitlab/ModuleWithInstanceVariables
file.body
end
##
# Write file to service
#
# === Returns
#
# [Boolean] true on success or raises error
def store(new_file)
if new_file.is_a?(self.class) # rubocop:disable Cop/LineBreakAroundConditionalBlock
new_file.copy_to(path)
else
fog_file = new_file.to_file
@content_type ||= new_file.content_type # rubocop:disable Gitlab/ModuleWithInstanceVariables
@file = directory.files.create({ # rubocop:disable Gitlab/ModuleWithInstanceVariables
:body => fog_file ? fog_file : new_file.read, # rubocop:disable Style/HashSyntax
:content_type => @content_type, # rubocop:disable Style/HashSyntax,Gitlab/ModuleWithInstanceVariables
:key => path, # rubocop:disable Style/HashSyntax
:public => @uploader.fog_public # rubocop:disable Style/HashSyntax,Gitlab/ModuleWithInstanceVariables
}.merge(@uploader.fog_attributes)) # rubocop:disable Gitlab/ModuleWithInstanceVariables
fog_file.close if fog_file && !fog_file.closed?
end
true
end
private
def read_source_file(file_body)
return unless ::File.exist?(file_body.path)
begin
file_body = ::File.open(file_body.path) if file_body.closed? # Reopen if it's already closed
file_body.read
ensure
file_body.close
end
end
end
prepend MonkeyPatch
end
end
end
end
......@@ -51,7 +51,7 @@ _The uploads are stored by default in
### Using object storage **[PREMIUM]**
>**Notes:**
- [Introduced][ee-3867] in [GitLab Premium][eep] 10.5.
- [Introduced][ee-3867] in [GitLab Enterprise Edition Premium][eep] 10.5.
- Since version 11.1, we support direct_upload to S3.
If you don't want to use the local disk where GitLab is installed to store the
......@@ -66,7 +66,7 @@ For source installations the following settings are nested under `uploads:` and
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| |
| `direct_upload` | Set to true to enable direct upload of Uploads without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. If enabled Workhorse uploads files directly to the object storage | `false` |
| `direct_upload` | Set to true to enable direct upload of Uploads without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
......
......@@ -54,6 +54,94 @@ a new presenter specifically for GraphQL.
The presenter is initialized using the object resolved by a field, and
the context.
### Connection Types
GraphQL uses [cursor based
pagination](https://graphql.org/learn/pagination/#pagination-and-edges)
to expose collections of items. This provides the clients with a lot
of flexibility while also allowing the backend to use different
pagination models.
To expose a collection of resources we can use a connection type. This wraps the array with default pagination fields. For example a query for project-pipelines could look like this:
```
query($project_path: ID!) {
project(fullPath: $project_path) {
pipelines(first: 2) {
pageInfo {
hasNextPage
hasPreviousPage
}
edges {
cursor
node {
id
status
}
}
}
}
}
```
This would return the first 2 pipelines of a project and related
pagination info., ordered by descending ID. The returned data would
look like this:
```json
{
"data": {
"project": {
"pipelines": {
"pageInfo": {
"hasNextPage": true,
"hasPreviousPage": false
},
"edges": [
{
"cursor": "Nzc=",
"node": {
"id": "77",
"status": "FAILED"
}
},
{
"cursor": "Njc=",
"node": {
"id": "67",
"status": "FAILED"
}
}
]
}
}
}
}
```
To get the next page, the cursor of the last known element could be
passed:
```
query($project_path: ID!) {
project(fullPath: $project_path) {
pipelines(first: 2, after: "Njc=") {
pageInfo {
hasNextPage
hasPreviousPage
}
edges {
cursor
node {
id
status
}
}
}
}
}
```
### Exposing permissions for a type
To expose permissions the current user has on a resource, you can call
......
......@@ -38,7 +38,7 @@ module EE
end
def shared_runners_minutes_limit_enabled?
runner && runner.shared? && project.shared_runners_minutes_limit_enabled?
runner && runner.instance_type? && project.shared_runners_minutes_limit_enabled?
end
def stick_build_if_status_changed
......
......@@ -1043,7 +1043,7 @@ module API
expose :description
expose :ip_address
expose :active
expose :is_shared
expose :instance_type?, as: :is_shared
expose :name
expose :online?, as: :online
expose :status
......@@ -1057,7 +1057,7 @@ module API
expose :access_level
expose :version, :revision, :platform, :architecture
expose :contacted_at
expose :token, if: lambda { |runner, options| options[:current_user].admin? || !runner.is_shared? }
expose :token, if: lambda { |runner, options| options[:current_user].admin? || !runner.instance_type? }
expose :projects, with: Entities::BasicProjectDetails do |runner, options|
if options[:current_user].admin?
runner.projects
......
......@@ -31,7 +31,7 @@ module API
get ':id/pipelines' do
authorize! :read_pipeline, user_project
pipelines = PipelinesFinder.new(user_project, params).execute
pipelines = PipelinesFinder.new(user_project, current_user, params).execute
present paginate(pipelines), with: Entities::PipelineBasic
end
......
......@@ -24,13 +24,13 @@ module API
attributes =
if runner_registration_token_valid?
# Create shared runner. Requires admin access
attributes.merge(is_shared: true, runner_type: :instance_type)
attributes.merge(runner_type: :instance_type)
elsif project = Project.find_by(runners_token: params[:token])
# Create a specific runner for the project
attributes.merge(is_shared: false, runner_type: :project_type, projects: [project])
attributes.merge(runner_type: :project_type, projects: [project])
elsif group = Group.find_by(runners_token: params[:token])
# Create a specific runner for the group
attributes.merge(is_shared: false, runner_type: :group_type, groups: [group])
attributes.merge(runner_type: :group_type, groups: [group])
else
forbidden!
end
......
......@@ -119,7 +119,7 @@ module API
use :pagination
end
get ':id/runners' do
runners = filter_runners(Ci::Runner.owned_or_shared(user_project.id), params[:scope])
runners = filter_runners(Ci::Runner.owned_or_instance_wide(user_project.id), params[:scope])
present paginate(runners), with: Entities::Runner
end
......@@ -170,6 +170,11 @@ module API
render_api_error!('Scope contains invalid value', 400)
end
# Support deprecated scopes
if runners.respond_to?("deprecated_#{scope}")
scope = "deprecated_#{scope}"
end
runners.public_send(scope) # rubocop:disable GitlabSecurity/PublicSend
end
......@@ -180,7 +185,7 @@ module API
end
def authenticate_show_runner!(runner)
return if runner.is_shared || current_user.admin?
return if runner.instance_type? || current_user.admin?
forbidden!("No access granted") unless can?(current_user, :read_runner, runner)
end
......
......@@ -55,7 +55,7 @@ module Gitlab
id: runner.id,
description: runner.description,
active: runner.active?,
is_shared: runner.is_shared?
is_shared: runner.instance_type?
}
end
end
......
......@@ -34,7 +34,7 @@ module Gitlab
end
def cache_key
[@merge_request_diff, 'highlighted-diff-files', diff_options]
[@merge_request_diff, 'highlighted-diff-files', Gitlab::Diff::Line::SERIALIZE_KEYS, diff_options]
end
private
......
module Gitlab
module Diff
class Line
SERIALIZE_KEYS = %i(line_code text type index old_pos new_pos).freeze
attr_reader :line_code, :type, :index, :old_pos, :new_pos
attr_writer :rich_text
attr_accessor :text
......@@ -19,13 +21,9 @@ module Gitlab
new(hash[:text], hash[:type], hash[:index], hash[:old_pos], hash[:new_pos], line_code: hash[:line_code])
end
def serialize_keys
@serialize_keys ||= %i(line_code text type index old_pos new_pos)
end
def to_hash
hash = {}
serialize_keys.each { |key| hash[key] = send(key) } # rubocop:disable GitlabSecurity/PublicSend
SERIALIZE_KEYS.each { |key| hash[key] = send(key) } # rubocop:disable GitlabSecurity/PublicSend
hash
end
......
# Gitaly note: JV: 1 RPC, migration in progress.
# Gitlab::Git::CommitStats counts the additions, deletions, and total changes
# in a commit.
module Gitlab
......@@ -16,12 +14,8 @@ module Gitlab
@deletions = 0
@total = 0
repo.gitaly_migrate(:commit_stats) do |is_enabled|
if is_enabled
gitaly_stats(repo, commit)
else
rugged_stats(commit)
end
repo.wrapped_gitaly_errors do
gitaly_stats(repo, commit)
end
end
......@@ -31,12 +25,6 @@ module Gitlab
@deletions = stats.deletions
@total = @additions + @deletions
end
def rugged_stats(commit)
diff = commit.rugged_diff_from_parent
_files_changed, @additions, @deletions = diff.stat
@total = @additions + @deletions
end
end
end
end
......@@ -251,7 +251,6 @@ module Gitlab
# Returns an Array of Tags
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/390
def tags
wrapped_gitaly_errors do
gitaly_ref_client.tags
......@@ -598,17 +597,9 @@ module Gitlab
# @repository.submodule_url_for('master', 'rack')
# # => git@localhost:rack.git
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/329
def submodule_url_for(ref, path)
Gitlab::GitalyClient.migrate(:submodule_url_for) do |is_enabled|
if is_enabled
gitaly_submodule_url_for(ref, path)
else
if submodules(ref).any?
submodule = submodules(ref)[path]
submodule['url'] if submodule
end
end
wrapped_gitaly_errors do
gitaly_submodule_url_for(ref, path)
end
end
......@@ -833,22 +824,14 @@ module Gitlab
# Ex.
# repo.ls_files('master')
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/327
def ls_files(ref)
gitaly_commit_client.ls_files(ref)
end
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/328
def copy_gitattributes(ref)
Gitlab::GitalyClient.migrate(:apply_gitattributes) do |is_enabled|
if is_enabled
gitaly_copy_gitattributes(ref)
else
rugged_copy_gitattributes(ref)
end
wrapped_gitaly_errors do
gitaly_repository_client.apply_gitattributes(ref)
end
rescue GRPC::InvalidArgument
raise InvalidRef
end
def info_attributes
......
# Gitaly note: JV: needs 1 RPC, migration is in progress.
module Gitlab
module Git
class Tree
......@@ -17,12 +15,8 @@ module Gitlab
def where(repository, sha, path = nil, recursive = false)
path = nil if path == '' || path == '/'
Gitlab::GitalyClient.migrate(:tree_entries) do |is_enabled|
if is_enabled
repository.gitaly_commit_client.tree_entries(repository, sha, path, recursive)
else
tree_entries_from_rugged(repository, sha, path, recursive)
end
repository.wrapped_gitaly_errors do
repository.gitaly_commit_client.tree_entries(repository, sha, path, recursive)
end
end
......
......@@ -48,6 +48,8 @@ module Gitlab
def apply_gitattributes(revision)
request = Gitaly::ApplyGitattributesRequest.new(repository: @gitaly_repo, revision: encode_binary(revision))
GitalyClient.call(@storage, :repository_service, :apply_gitattributes, request)
rescue GRPC::InvalidArgument => ex
raise Gitlab::Git::Repository::InvalidRef, ex
end
def info_attributes
......
module Gitlab
module Graphql
module Connections
def self.use(_schema)
GraphQL::Relay::BaseConnection.register_connection_implementation(
ActiveRecord::Relation,
Gitlab::Graphql::Connections::KeysetConnection
)
end
end
end
end
module Gitlab
module Graphql
module Connections
class KeysetConnection < GraphQL::Relay::BaseConnection
def cursor_from_node(node)
encode(node[order_field].to_s)
end
def sliced_nodes
@sliced_nodes ||=
begin
sliced = nodes
sliced = sliced.where(before_slice) if before.present?
sliced = sliced.where(after_slice) if after.present?
sliced
end
end
def paged_nodes
if first && last
raise Gitlab::Graphql::Errors::ArgumentError.new("Can only provide either `first` or `last`, not both")
end
if last
sliced_nodes.last(limit_value)
else
sliced_nodes.limit(limit_value)
end
end
private
def before_slice
if sort_direction == :asc
table[order_field].lt(decode(before))
else
table[order_field].gt(decode(before))
end
end
def after_slice
if sort_direction == :asc
table[order_field].gt(decode(after))
else
table[order_field].lt(decode(after))
end
end
def limit_value
@limit_value ||= [first, last, max_page_size].compact.min
end
def table
nodes.arel_table
end
def order_info
@order_info ||= nodes.order_values.first
end
def order_field
@order_field ||= order_info&.expr&.name || nodes.primary_key
end
def sort_direction
@order_direction ||= order_info&.direction || :desc
end
end
end
end
end
module Gitlab
module Graphql
module Errors
BaseError = Class.new(GraphQL::ExecutionError)
ArgumentError = Class.new(BaseError)
end
end
end
......@@ -3,6 +3,8 @@ module Gitlab
module Present
class Instrumentation
def instrument(type, field)
return field unless field.metadata[:type_class]
presented_in = field.metadata[:type_class].owner
return field unless presented_in.respond_to?(:presenter_class)
return field unless presented_in.presenter_class
......
......@@ -42,10 +42,10 @@ module Gitlab
key, value = parsed_field.first
if value.nil?
value = open_file(tmp_path, @request.params["#{key}.name"])
value = open_file(@request.params, key)
@open_files << value
else
value = decorate_params_value(value, @request.params[key], tmp_path)
value = decorate_params_value(value, @request.params[key])
end
@request.update_param(key, value)
......@@ -57,7 +57,7 @@ module Gitlab
end
# This function calls itself recursively
def decorate_params_value(path_hash, value_hash, tmp_path)
def decorate_params_value(path_hash, value_hash)
unless path_hash.is_a?(Hash) && path_hash.count == 1
raise "invalid path: #{path_hash.inspect}"
end
......@@ -70,19 +70,21 @@ module Gitlab
case path_value
when nil
value_hash[path_key] = open_file(tmp_path, value_hash.dig(path_key, '.name'))
value_hash[path_key] = open_file(value_hash.dig(path_key), '')
@open_files << value_hash[path_key]
value_hash
when Hash
decorate_params_value(path_value, value_hash[path_key], tmp_path)
decorate_params_value(path_value, value_hash[path_key])
value_hash
else
raise "unexpected path value: #{path_value.inspect}"
end
end
def open_file(path, name)
::UploadedFile.new(path, filename: name || File.basename(path), content_type: 'application/octet-stream')
def open_file(params, key)
::UploadedFile.from_params(
params, key,
Gitlab.config.uploads.storage_path)
end
end
......
......@@ -64,6 +64,10 @@ module QA
def gcloud_zone
ENV.fetch('GCLOUD_ZONE')
end
def has_gcloud_credentials?
%w[GCLOUD_ACCOUNT_KEY GCLOUD_ACCOUNT_EMAIL].none? { |var| ENV[var].to_s.empty? }
end
end
end
end
......@@ -50,11 +50,15 @@ module QA
end
def login_if_not_already_logged_in
account = `gcloud auth list --filter=status:ACTIVE --format="value(account)"`
if account.empty?
if Runtime::Env.has_gcloud_credentials?
attempt_login_with_env_vars
else
puts "gcloud account found. Using: #{account} for creating K8s cluster."
account = `gcloud auth list --filter=status:ACTIVE --format="value(account)"`
if account.empty?
raise "Failed to login to gcloud. No credentials provided in environment and no credentials found locally."
else
puts "gcloud account found. Using: #{account} for creating K8s cluster."
end
end
end
......
......@@ -4,7 +4,7 @@ describe Projects::PipelinesController do
include ApiHelpers
set(:user) { create(:user) }
set(:project) { create(:project, :public, :repository) }
let(:project) { create(:project, :public, :repository) }
let(:feature) { ProjectFeature::DISABLED }
before do
......@@ -74,7 +74,7 @@ describe Projects::PipelinesController do
expect(stages.count).to eq 3
end
expect(queries.count).to be_within(3).of(30)
expect(queries.count).to be_within(5).of(30)
end
end
......@@ -91,6 +91,24 @@ describe Projects::PipelinesController do
end
end
context 'when the project is private' do
let(:project) { create(:project, :private, :repository) }
it 'returns `not_found` when the user does not have access' do
sign_in(create(:user))
get_pipelines_index_json
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns the pipelines when the user has access' do
get_pipelines_index_json
expect(json_response['pipelines'].size).to eq(5)
end
end
def get_pipelines_index_json
get :index, namespace_id: project.namespace,
project_id: project,
......
......@@ -6,7 +6,6 @@ FactoryBot.define do
active true
access_level :not_protected
is_shared true
runner_type :instance_type
trait :online do
......@@ -14,12 +13,10 @@ FactoryBot.define do
end
trait :instance do
is_shared true
runner_type :instance_type
end
trait :group do
is_shared false
runner_type :group_type
after(:build) do |runner, evaluator|
......@@ -28,7 +25,6 @@ FactoryBot.define do
end
trait :project do
is_shared false
runner_type :project_type
after(:build) do |runner, evaluator|
......
require 'spec_helper'
describe PipelinesFinder do
let(:project) { create(:project, :repository) }
subject { described_class.new(project, params).execute }
let(:project) { create(:project, :public, :repository) }
let(:current_user) { nil }
let(:params) { {} }
subject { described_class.new(project, current_user, params).execute }
describe "#execute" do
context 'when params is empty' do
......@@ -223,5 +224,27 @@ describe PipelinesFinder do
end
end
end
context 'when the project has limited access to piplines' do
let(:project) { create(:project, :private, :repository) }
let(:current_user) { create(:user) }
let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) }
context 'when the user has access' do
before do
project.add_developer(current_user)
end
it 'is expected to return pipelines' do
is_expected.to contain_exactly(*pipelines)
end
end
context 'the user is not allowed to read pipelines' do
it 'returns empty' do
is_expected.to be_empty
end
end
end
end
end
......@@ -27,6 +27,12 @@ describe GitlabSchema do
expect(described_class.query).to eq(::Types::QueryType.to_graphql)
end
it 'paginates active record relations using `Gitlab::Graphql::Connections::KeysetConnection`' do
connection = GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS[ActiveRecord::Relation.name]
expect(connection).to eq(Gitlab::Graphql::Connections::KeysetConnection)
end
def field_instrumenters
described_class.instrumenters[:field]
end
......
require 'spec_helper'
describe ResolvesPipelines do
include GraphqlHelpers
subject(:resolver) do
Class.new(Resolvers::BaseResolver) do
include ResolvesPipelines
def resolve(**args)
resolve_pipelines(object, args)
end
end
end
let(:current_user) { create(:user) }
set(:project) { create(:project, :private) }
set(:pipeline) { create(:ci_pipeline, project: project) }
set(:failed_pipeline) { create(:ci_pipeline, :failed, project: project) }
set(:ref_pipeline) { create(:ci_pipeline, project: project, ref: 'awesome-feature') }
set(:sha_pipeline) { create(:ci_pipeline, project: project, sha: 'deadbeef') }
before do
project.add_developer(current_user)
end
it { is_expected.to have_graphql_arguments(:status, :ref, :sha) }
it 'finds all pipelines' do
expect(resolve_pipelines).to contain_exactly(pipeline, failed_pipeline, ref_pipeline, sha_pipeline)
end
it 'allows filtering by status' do
expect(resolve_pipelines(status: 'failed')).to contain_exactly(failed_pipeline)
end
it 'allows filtering by ref' do
expect(resolve_pipelines(ref: 'awesome-feature')).to contain_exactly(ref_pipeline)
end
it 'allows filtering by sha' do
expect(resolve_pipelines(sha: 'deadbeef')).to contain_exactly(sha_pipeline)
end
it 'does not return any pipelines if the user does not have access' do
expect(resolve_pipelines({}, {})).to be_empty
end
def resolve_pipelines(args = {}, context = { current_user: current_user })
resolve(resolver, obj: project, args: args, ctx: context)
end
end
require 'spec_helper'
describe Resolvers::MergeRequestPipelinesResolver do
include GraphqlHelpers
set(:merge_request) { create(:merge_request) }
set(:pipeline) do
create(
:ci_pipeline,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha
)
end
set(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project) }
set(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
before do
merge_request.project.add_developer(current_user)
end
def resolve_pipelines
resolve(described_class, obj: merge_request, ctx: { current_user: current_user })
end
it 'resolves only MRs for the passed merge request' do
expect(resolve_pipelines).to contain_exactly(pipeline)
end
end
require 'spec_helper'
describe Resolvers::ProjectPipelinesResolver do
include GraphqlHelpers
set(:project) { create(:project) }
set(:pipeline) { create(:ci_pipeline, project: project) }
set(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
before do
project.add_developer(current_user)
end
def resolve_pipelines
resolve(described_class, obj: project, ctx: { current_user: current_user })
end
it 'resolves only MRs for the passed merge request' do
expect(resolve_pipelines).to contain_exactly(pipeline)
end
end
require 'spec_helper'
describe Types::Ci::PipelineType do
it { expect(described_class.graphql_name).to eq('Pipeline') }
it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Ci::Pipeline) }
end
require 'spec_helper'
describe Types::MergeRequestType do
describe GitlabSchema.types['MergeRequest'] do
it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::MergeRequest) }
describe 'head pipeline' do
it 'has a head pipeline field' do
expect(described_class).to have_graphql_field(:head_pipeline)
end
it 'authorizes the field' do
expect(described_class.fields['headPipeline'])
.to require_graphql_authorizations(:read_pipeline)
end
end
end
......@@ -13,4 +13,6 @@ describe GitlabSchema.types['Project'] do
.to require_graphql_authorizations(:read_merge_request)
end
end
it { is_expected.to have_graphql_field(:pipelines) }
end
......@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/merge_requests/state';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
import actions, {
import {
requestMergeRequests,
receiveMergeRequestsError,
receiveMergeRequestsSuccess,
......@@ -41,28 +41,26 @@ describe('IDE merge requests actions', () => {
});
describe('receiveMergeRequestsError', () => {
let flashSpy;
beforeEach(() => {
flashSpy = spyOnDependency(actions, 'flash');
});
it('should should commit error', done => {
testAction(
receiveMergeRequestsError,
'created',
{ type: 'created', search: '' },
mockedState,
[{ type: types.RECEIVE_MERGE_REQUESTS_ERROR, payload: 'created' }],
[],
[
{
type: 'setErrorMessage',
payload: {
text: 'Error loading merge requests.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: { type: 'created', search: '' },
},
},
],
done,
);
});
it('creates flash message', () => {
receiveMergeRequestsError({ commit() {} }, 'created');
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveMergeRequestsSuccess', () => {
......
import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import actions, {
import {
requestLatestPipeline,
receiveLatestPipelineError,
receiveLatestPipelineSuccess,
......@@ -59,7 +59,7 @@ describe('IDE pipelines actions', () => {
it('commits error', done => {
testAction(
receiveLatestPipelineError,
null,
{ status: 404 },
mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
[{ type: 'stopPipelinePolling' }],
......@@ -67,12 +67,26 @@ describe('IDE pipelines actions', () => {
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveLatestPipelineError({ commit() {}, dispatch() {} });
expect(flashSpy).toHaveBeenCalled();
it('dispatches setErrorMessage is not 404', done => {
testAction(
receiveLatestPipelineError,
{ status: 500 },
mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
[
{
type: 'setErrorMessage',
payload: {
text: 'An error occured whilst fetching the latest pipline.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
},
{ type: 'stopPipelinePolling' },
],
done,
);
});
});
......@@ -181,7 +195,10 @@ describe('IDE pipelines actions', () => {
new Promise(resolve => requestAnimationFrame(resolve))
.then(() => {
expect(dispatch.calls.argsFor(1)).toEqual(['receiveLatestPipelineError']);
expect(dispatch.calls.argsFor(1)).toEqual([
'receiveLatestPipelineError',
jasmine.anything(),
]);
})
.then(done)
.catch(done.fail);
......@@ -199,21 +216,23 @@ describe('IDE pipelines actions', () => {
it('commits error', done => {
testAction(
receiveJobsError,
1,
{ id: 1 },
mockedState,
[{ type: types.RECEIVE_JOBS_ERROR, payload: 1 }],
[],
[
{
type: 'setErrorMessage',
payload: {
text: 'An error occured whilst loading the pipelines jobs.',
action: jasmine.anything(),
actionText: 'Please try again',
actionPayload: { id: 1 },
},
},
],
done,
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveJobsError({ commit() {} }, 1);
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveJobsSuccess', () => {
......@@ -268,7 +287,7 @@ describe('IDE pipelines actions', () => {
[],
[
{ type: 'requestJobs', payload: stage.id },
{ type: 'receiveJobsError', payload: stage.id },
{ type: 'receiveJobsError', payload: stage },
],
done,
);
......@@ -337,18 +356,20 @@ describe('IDE pipelines actions', () => {
null,
mockedState,
[{ type: types.RECEIVE_JOB_TRACE_ERROR }],
[],
[
{
type: 'setErrorMessage',
payload: {
text: 'An error occured whilst fetching the job trace.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
},
],
done,
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveJobTraceError({ commit() {} });
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveJobTraceSuccess', () => {
......
......@@ -20,6 +20,15 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
diff_files
end
it 'it uses a different cache key if diff line keys change' do
mr_diff = described_class.new(merge_request.merge_request_diff, diff_options: nil)
key = mr_diff.cache_key
stub_const('Gitlab::Diff::Line::SERIALIZE_KEYS', [:foo])
expect(mr_diff.cache_key).not_to eq(key)
end
shared_examples 'initializes a DiffCollection' do
it 'returns a valid instance of a DiffCollection' do
expect(diff_files).to be_a(Gitlab::Git::DiffCollection)
......
......@@ -1402,94 +1402,84 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe "#copy_gitattributes" do
shared_examples 'applying git attributes' do
let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') }
let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') }
after do
FileUtils.rm_rf(attributes_path) if Dir.exist?(attributes_path)
end
it "raises an error with invalid ref" do
expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef)
end
context 'when forcing encoding issues' do
let(:branch_name) { "ʕ•ᴥ•ʔ" }
after do
FileUtils.rm_rf(attributes_path) if Dir.exist?(attributes_path)
end
before do
repository.create_branch(branch_name, "master")
end
it "raises an error with invalid ref" do
expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef)
end
after do
repository.rm_branch(branch_name, user: build(:admin))
end
context 'when forcing encoding issues' do
let(:branch_name) { "ʕ•ᴥ•ʔ" }
it "doesn't raise with a valid unicode ref" do
expect { repository.copy_gitattributes(branch_name) }.not_to raise_error
before do
repository.create_branch(branch_name, "master")
end
repository
end
after do
repository.rm_branch(branch_name, user: build(:admin))
end
context "with no .gitattrbutes" do
before do
repository.copy_gitattributes("master")
end
it "doesn't raise with a valid unicode ref" do
expect { repository.copy_gitattributes(branch_name) }.not_to raise_error
it "does not have an info/attributes" do
expect(File.exist?(attributes_path)).to be_falsey
end
repository
end
end
context "with .gitattrbutes" do
before do
repository.copy_gitattributes("gitattributes")
end
context "with no .gitattrbutes" do
before do
repository.copy_gitattributes("master")
end
it "has an info/attributes" do
expect(File.exist?(attributes_path)).to be_truthy
end
it "does not have an info/attributes" do
expect(File.exist?(attributes_path)).to be_falsey
end
end
it "has the same content in info/attributes as .gitattributes" do
contents = File.open(attributes_path, "rb") { |f| f.read }
expect(contents).to eq("*.md binary\n")
end
context "with .gitattrbutes" do
before do
repository.copy_gitattributes("gitattributes")
end
context "with updated .gitattrbutes" do
before do
repository.copy_gitattributes("gitattributes")
repository.copy_gitattributes("gitattributes-updated")
end
it "has an info/attributes" do
expect(File.exist?(attributes_path)).to be_truthy
end
it "has an info/attributes" do
expect(File.exist?(attributes_path)).to be_truthy
end
it "has the same content in info/attributes as .gitattributes" do
contents = File.open(attributes_path, "rb") { |f| f.read }
expect(contents).to eq("*.md binary\n")
end
end
it "has the updated content in info/attributes" do
contents = File.read(attributes_path)
expect(contents).to eq("*.txt binary\n")
end
context "with updated .gitattrbutes" do
before do
repository.copy_gitattributes("gitattributes")
repository.copy_gitattributes("gitattributes-updated")
end
context "with no .gitattrbutes in HEAD but with previous info/attributes" do
before do
repository.copy_gitattributes("gitattributes")
repository.copy_gitattributes("master")
end
it "has an info/attributes" do
expect(File.exist?(attributes_path)).to be_truthy
end
it "does not have an info/attributes" do
expect(File.exist?(attributes_path)).to be_falsey
end
it "has the updated content in info/attributes" do
contents = File.read(attributes_path)
expect(contents).to eq("*.txt binary\n")
end
end
context 'when gitaly is enabled' do
it_behaves_like 'applying git attributes'
end
context "with no .gitattrbutes in HEAD but with previous info/attributes" do
before do
repository.copy_gitattributes("gitattributes")
repository.copy_gitattributes("master")
end
context 'when gitaly is disabled', :disable_gitaly do
it_behaves_like 'applying git attributes'
it "does not have an info/attributes" do
expect(File.exist?(attributes_path)).to be_falsey
end
end
end
......
require 'spec_helper'
describe Gitlab::Graphql::Connections::KeysetConnection do
let(:nodes) { Project.all.order(id: :asc) }
let(:arguments) { {} }
subject(:connection) do
described_class.new(nodes, arguments, max_page_size: 3)
end
def encoded_property(value)
Base64.strict_encode64(value.to_s)
end
describe '#cursor_from_nodes' do
let(:project) { create(:project) }
it 'returns an encoded ID' do
expect(connection.cursor_from_node(project))
.to eq(encoded_property(project.id))
end
context 'when an order was specified' do
let(:nodes) { Project.order(:updated_at) }
it 'returns the encoded value of the order' do
expect(connection.cursor_from_node(project))
.to eq(encoded_property(project.updated_at))
end
end
end
describe '#sliced_nodes' do
let(:projects) { create_list(:project, 4) }
context 'when before is passed' do
let(:arguments) { { before: encoded_property(projects[1].id) } }
it 'only returns the project before the selected one' do
expect(subject.sliced_nodes).to contain_exactly(projects.first)
end
context 'when the sort order is descending' do
let(:nodes) { Project.all.order(id: :desc) }
it 'returns the correct nodes' do
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
end
end
end
context 'when after is passed' do
let(:arguments) { { after: encoded_property(projects[1].id) } }
it 'only returns the project before the selected one' do
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
end
context 'when the sort order is descending' do
let(:nodes) { Project.all.order(id: :desc) }
it 'returns the correct nodes' do
expect(subject.sliced_nodes).to contain_exactly(projects.first)
end
end
end
context 'when both before and after are passed' do
let(:arguments) do
{
after: encoded_property(projects[1].id),
before: encoded_property(projects[3].id)
}
end
it 'returns the expected set' do
expect(subject.sliced_nodes).to contain_exactly(projects[2])
end
end
end
describe '#paged_nodes' do
let!(:projects) { create_list(:project, 5) }
it 'returns the collection limited to max page size' do
expect(subject.paged_nodes.size).to eq(3)
end
context 'when `first` is passed' do
let(:arguments) { { first: 2 } }
it 'returns only the first elements' do
expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
end
end
context 'when `last` is passed' do
let(:arguments) { { last: 2 } }
it 'returns only the last elements' do
expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
end
end
context 'when both are passed' do
let(:arguments) { { first: 2, last: 2 } }
it 'raises an error' do
expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
end
end
end
......@@ -7,18 +7,47 @@ describe Gitlab::Middleware::Multipart do
let(:middleware) { described_class.new(app) }
let(:original_filename) { 'filename' }
it 'opens top-level files' do
Tempfile.open('top-level') do |tempfile|
env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
shared_examples_for 'multipart upload files' do
it 'opens top-level files' do
Tempfile.open('top-level') do |tempfile|
env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename, 'file.path' => tempfile.path, 'file.remote_id' => remote_id }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect_uploaded_file(tempfile, %w(file))
middleware.call(env)
end
end
it 'opens files one level deep' do
Tempfile.open('one-level') do |tempfile|
in_params = { 'user' => { 'avatar' => { '.name' => original_filename, '.path' => tempfile.path, '.remote_id' => remote_id } } }
env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect_uploaded_file(tempfile, %w(user avatar))
middleware.call(env)
end
end
it 'opens files two levels deep' do
Tempfile.open('two-levels') do |tempfile|
in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename, '.path' => tempfile.path, '.remote_id' => remote_id } } } }
env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect_uploaded_file(tempfile, %w(project milestone themesong))
middleware.call(env)
end
end
def expect_uploaded_file(tempfile, path, remote: false)
expect(app).to receive(:call) do |env|
file = Rack::Request.new(env).params['file']
file = Rack::Request.new(env).params.dig(*path)
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
expect(file.original_filename).to eq(original_filename)
expect(file.remote_id).to eq(remote_id)
end
middleware.call(env)
end
end
......@@ -34,36 +63,16 @@ describe Gitlab::Middleware::Multipart do
expect { middleware.call(env) }.to raise_error(JWT::InvalidIssuerError)
end
it 'opens files one level deep' do
Tempfile.open('one-level') do |tempfile|
in_params = { 'user' => { 'avatar' => { '.name' => original_filename } } }
env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
context 'with remote file' do
let(:remote_id) { 'someid' }
expect(app).to receive(:call) do |env|
file = Rack::Request.new(env).params['user']['avatar']
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
expect(file.original_filename).to eq(original_filename)
end
middleware.call(env)
end
it_behaves_like 'multipart upload files'
end
it 'opens files two levels deep' do
Tempfile.open('two-levels') do |tempfile|
in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename } } } }
env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
context 'with local file' do
let(:remote_id) { nil }
expect(app).to receive(:call) do |env|
file = Rack::Request.new(env).params['project']['milestone']['themesong']
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
expect(file.original_filename).to eq(original_filename)
end
middleware.call(env)
end
it_behaves_like 'multipart upload files'
end
def post_env(rewritten_fields, params, secret, issuer)
......
......@@ -105,7 +105,7 @@ describe Ci::Runner do
end
end
describe '.shared' do
describe '.instance_type' do
let(:group) { create(:group) }
let(:project) { create(:project) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
......@@ -113,7 +113,7 @@ describe Ci::Runner do
let!(:shared_runner) { create(:ci_runner, :instance) }
it 'returns only shared runners' do
expect(described_class.shared).to contain_exactly(shared_runner)
expect(described_class.instance_type).to contain_exactly(shared_runner)
end
end
......@@ -155,7 +155,7 @@ describe Ci::Runner do
end
end
describe '.owned_or_shared' do
describe '.owned_or_instance_wide' do
it 'returns a globally shared, a project specific and a group specific runner' do
# group specific
group = create(:group)
......@@ -168,7 +168,7 @@ describe Ci::Runner do
# globally shared
shared_runner = create(:ci_runner, :instance)
expect(described_class.owned_or_shared(project.id)).to contain_exactly(
expect(described_class.owned_or_instance_wide(project.id)).to contain_exactly(
group_runner, project_runner, shared_runner
)
end
......@@ -202,7 +202,6 @@ describe Ci::Runner do
it 'transitions shared runner to project runner and assigns project' do
expect(subject).to be_truthy
expect(runner).to be_specific
expect(runner).to be_project_type
expect(runner.projects).to eq([project])
expect(runner.only_for?(project)).to be_truthy
......
......@@ -67,4 +67,28 @@ describe 'getting merge request information nested in a project' do
expect(merge_request_graphql_data).to be_nil
end
end
context 'when there are pipelines' do
before do
pipeline = create(
:ci_pipeline,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha
)
merge_request.update!(head_pipeline: pipeline)
end
it 'has a head pipeline' do
post_graphql(query, current_user: current_user)
expect(merge_request_graphql_data['headPipeline']).to be_present
end
it 'has pipeline connections' do
post_graphql(query, current_user: current_user)
expect(merge_request_graphql_data['pipelines']['edges'].size).to eq(1)
end
end
end
......@@ -26,6 +26,18 @@ describe 'getting project information' do
post_graphql(query, current_user: current_user)
end
end
context 'when there are pipelines present' do
before do
create(:ci_pipeline, project: project)
end
it 'is included in the pipelines connection' do
post_graphql(query, current_user: current_user)
expect(graphql_data['project']['pipelines']['edges'].size).to eq(1)
end
end
end
context 'when the user does not have access to the project' do
......
......@@ -89,6 +89,17 @@ describe API::Runners do
end
end
it 'filters runners by scope' do
get api('/runners/all?scope=shared', admin)
shared = json_response.all? { |r| r['is_shared'] }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response[0]).to have_key('ip_address')
expect(shared).to be_truthy
end
it 'filters runners by scope' do
get api('/runners/all?scope=specific', admin)
......@@ -136,7 +147,7 @@ describe API::Runners do
delete api("/runners/#{unused_project_runner.id}", admin)
expect(response).to have_gitlab_http_status(204)
end.to change { Ci::Runner.specific.count }.by(-1)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
......@@ -300,7 +311,7 @@ describe API::Runners do
delete api("/runners/#{shared_runner.id}", admin)
expect(response).to have_gitlab_http_status(204)
end.to change { Ci::Runner.shared.count }.by(-1)
end.to change { Ci::Runner.instance_type.count }.by(-1)
end
it_behaves_like '412 response' do
......@@ -314,7 +325,7 @@ describe API::Runners do
delete api("/runners/#{project_runner.id}", admin)
expect(response).to have_http_status(204)
end.to change { Ci::Runner.specific.count }.by(-1)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
......@@ -349,7 +360,7 @@ describe API::Runners do
delete api("/runners/#{project_runner.id}", user)
expect(response).to have_http_status(204)
end.to change { Ci::Runner.specific.count }.by(-1)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
it_behaves_like '412 response' do
......@@ -584,12 +595,12 @@ describe API::Runners do
end
end
it 'enables a shared runner' do
it 'enables a instance type runner' do
expect do
post api("/projects/#{project.id}/runners", admin), runner_id: shared_runner.id
end.to change { project.runners.count }.by(1)
expect(shared_runner.reload).not_to be_shared
expect(shared_runner.reload).not_to be_instance_type
expect(response).to have_gitlab_http_status(201)
end
end
......
......@@ -57,12 +57,12 @@ module GraphqlHelpers
type.fields.map do |name, field|
# We can't guess arguments, so skip fields that require them
next if field.arguments.any?
next if required_arguments?(field)
if scalar?(field)
name
else
if nested_fields?(field)
"#{name} { #{all_graphql_fields_for(field_type(field))} }"
else
name
end
end.compact.join("\n")
end
......@@ -85,10 +85,22 @@ module GraphqlHelpers
json_response['data']
end
def nested_fields?(field)
!scalar?(field) && !enum?(field)
end
def scalar?(field)
field_type(field).kind.scalar?
end
def enum?(field)
field_type(field).kind.enum?
end
def required_arguments?(field)
field.arguments.values.any? { |argument| argument.type.non_null? }
end
def field_type(field)
if field.type.respond_to?(:of_type)
field.type.of_type
......
......@@ -136,8 +136,9 @@ shared_examples 'merge requests list' do
it 'returns an array of merge requests in given milestone' do
get api(endpoint_path, user), milestone: '0.9'
expect(json_response.first['title']).to eq merge_request_closed.title
expect(json_response.first['id']).to eq merge_request_closed.id
closed_issues = json_response.select { |mr| mr['id'] == merge_request_closed.id }
expect(closed_issues.length).to eq(1)
expect(closed_issues.first['title']).to eq merge_request_closed.title
end
it 'returns an array of merge requests matching state in milestone' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment