Commit a6caf898 authored by Luke Duncalfe's avatar Luke Duncalfe

Merge branch 'ajk-13984-resolver-test-changes' into 'master'

Forwards compatible test changes for framework authorization [RUN AS-IF-FOSS]

See merge request gitlab-org/gitlab!48080
parents 4106da77 2ef928ca
......@@ -43,8 +43,10 @@
# (i.e. `resolve(**args).sync == query_for(query_input(**args)).to_a`).
#
# Classes may implement:
# - #item_found(A, R) (return value is ignored)
# - max_union_size Integer (the maximum number of queries to run in any one union)
# - preload -> Preloads|NilClass (a set of preloads to apply to each query)
# - #item_found(A, R) (return value is ignored)
# - allowed?(R) -> Boolean (if this method returns false, the value is not resolved)
module CachingArrayResolver
MAX_UNION_SIZE = 50
......@@ -62,6 +64,7 @@ module CachingArrayResolver
queries.in_groups_of(max_union_size, false).each do |group|
by_id = model_class
.from_union(tag(group), remove_duplicates: false)
.preload(preload) # rubocop: disable CodeReuse/ActiveRecord
.group_by { |r| r[primary_key] }
by_id.values.each do |item_group|
......@@ -75,6 +78,16 @@ module CachingArrayResolver
end
end
# Override to apply filters on a per-item basis
def allowed?(item)
true
end
# Override to specify preloads for each query
def preload
nil
end
# Override this to intercept the items once they are found
def item_found(query_input, item)
end
......@@ -94,6 +107,8 @@ module CachingArrayResolver
end
def found(loader, key, value)
return unless allowed?(value)
loader.call(key) do |vs|
item_found(key, value)
vs << value
......
# frozen_string_literal: true
module Resolvers
module Projects
class JiraImportsResolver < BaseResolver
prepend ::ManualAuthorization
include Gitlab::Graphql::Authorize::AuthorizeResource
type Types::JiraImportType.connection_type, null: true
alias_method :project, :object
def resolve(**args)
authorize!(project)
project.jira_imports
end
def authorized_resource?(project)
context[:current_user].present? && Ability.allowed?(context[:current_user], :read_project, project)
end
end
end
end
......@@ -2,7 +2,7 @@
module Types
module AlertManagement
class PrometheusIntegrationType < BaseObject
class PrometheusIntegrationType < ::Types::BaseObject
include ::Gitlab::Routing
graphql_name 'AlertManagementPrometheusIntegration'
......
......@@ -2,8 +2,7 @@
module Types
# rubocop: disable Graphql/AuthorizeTypes
# Authorization is at project level for owners or admins,
# so it is added directly to the Resolvers::JiraImportsResolver
# Authorization is at project level for owners or admins
class JiraImportType < BaseObject
graphql_name 'JiraImport'
......
......@@ -22,6 +22,7 @@ class Issue < ApplicationRecord
include Presentable
include IssueAvailableFeatures
include Todoable
include FromUnion
DueDateStruct = Struct.new(:title, :name).freeze
NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
......
......@@ -2,12 +2,30 @@
module Resolvers
class EpicIssuesResolver < BaseResolver
include CachingArrayResolver
type Types::EpicIssueType.connection_type, null: true
alias_method :epic, :object
def resolve(**args)
epic.issues_readable_by(context[:current_user], preload: { project: [:namespace, :project_feature] })
def model_class
::Issue
end
def allowed?(issue)
DeclarativePolicy.user_scope { issue.visible_to_user?(current_user) }
end
def query_input(**args)
epic.id
end
def query_for(id)
::Epic.related_issues(ids: id)
end
def preload
{ project: [:namespace, :project_feature] }
end
end
end
......@@ -5,6 +5,8 @@ module Gitlab
module Aggregations
module VulnerabilityStatistics
class LazyAggregate
include ::Gitlab::Graphql::Deferred
attr_reader :vulnerable, :lazy_state
def initialize(query_ctx, vulnerable, include_subgroups: false)
......
......@@ -23,11 +23,11 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:weight_issue4) { create(:issue, project: project, weight: nil) }
it 'sorts issues ascending' do
expect(resolve_issues(sort: :weight_asc)).to eq [weight_issue3, weight_issue1, weight_issue4, weight_issue2]
expect(resolve_issues(sort: :weight_asc).to_a).to eq [weight_issue3, weight_issue1, weight_issue4, weight_issue2]
end
it 'sorts issues descending' do
expect(resolve_issues(sort: :weight_desc)).to eq [weight_issue1, weight_issue3, weight_issue4, weight_issue2]
expect(resolve_issues(sort: :weight_desc).to_a).to eq [weight_issue1, weight_issue3, weight_issue4, weight_issue2]
end
end
......@@ -36,11 +36,11 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:published) { create(:issue, :published, project: project) }
it 'sorts issues ascending' do
expect(resolve_issues(sort: :published_asc)).to eq [not_published, published]
expect(resolve_issues(sort: :published_asc).to_a).to eq [not_published, published]
end
it 'sorts issues descending' do
expect(resolve_issues(sort: :published_desc)).to eq [published, not_published]
expect(resolve_issues(sort: :published_desc).to_a).to eq [published, not_published]
end
end
......@@ -54,11 +54,11 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'sorts issues ascending' do
expect(resolve_issues(sort: :sla_due_at_asc)).to eq [sla_due_first, sla_due_last]
expect(resolve_issues(sort: :sla_due_at_asc).to_a).to eq [sla_due_first, sla_due_last]
end
it 'sorts issues descending' do
expect(resolve_issues(sort: :sla_due_at_desc)).to eq [sla_due_last, sla_due_first]
expect(resolve_issues(sort: :sla_due_at_desc).to_a).to eq [sla_due_last, sla_due_first]
end
end
end
......@@ -69,7 +69,7 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:issue_without_iteration) { create(:issue, project: project) }
it 'returns issues with iteration' do
expect(resolve_issues(iteration_id: iteration1.id)).to eq [issue_with_iteration]
expect(resolve_issues(iteration_id: [iteration1.id])).to contain_exactly(issue_with_iteration)
end
end
......
......@@ -69,7 +69,7 @@ RSpec.describe Resolvers::BoardGroupings::EpicsResolver do
it 'finds only epics for issues matching issue filters' do
result = resolve_board_epics(
group_board, { issue_filters: { label_name: label1.title, not: { label_name: label2.title } } })
group_board, { issue_filters: { label_name: [label1.title], not: { label_name: [label2.title] } } })
expect(result).to match_array([epic1])
end
......@@ -82,13 +82,15 @@ RSpec.describe Resolvers::BoardGroupings::EpicsResolver do
end
it 'accepts negated issue params' do
filters = { label_name: ['foo'], not: { label_name: %w(foo bar) } }
expect(Boards::Issues::ListService).to receive(:new).with(
group_board.resource_parent,
current_user,
{ all_lists: true, board_id: group_board.id, label_name: 'foo', not: { label_name: %w(foo bar) } }
{ all_lists: true, board_id: group_board.id, **filters }
).and_call_original
resolve_board_epics(group_board, { issue_filters: { label_name: 'foo', not: { label_name: %w(foo bar) } } })
resolve_board_epics(group_board, { issue_filters: filters })
end
it 'raises an exception if both epic_id and epic_wildcard_id are present' do
......
......@@ -14,32 +14,39 @@ RSpec.describe Resolvers::Clusters::AgentsResolver do
end
describe '#resolve' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) }
let_it_be(:developer) { create(:user, developer_projects: [project]) }
let_it_be(:agents) { create_list(:cluster_agent, 2, project: project) }
let(:finder) { double(execute: relation) }
let(:relation) { double }
let(:project) { create(:project) }
let(:args) { Hash(key: 'value') }
let(:ctx) { Hash(current_user: user) }
let(:ctx) { { current_user: current_user } }
let(:lookahead) do
double(selects?: true).tap do |selection|
allow(selection).to receive(:selection).and_return(selection)
end
before do
stub_licensed_features(cluster_agents: true)
end
subject { resolve(described_class, obj: project, args: args.merge(lookahead: lookahead), ctx: ctx) }
subject { resolve_agents }
context 'the current user has access to clusters' do
let(:current_user) { maintainer }
it 'calls the agents finder' do
expect(::Clusters::AgentsFinder).to receive(:new)
.with(project, user, params: args).and_return(finder)
it 'finds all agents' do
expect(subject).to match_array(agents)
end
end
expect(relation).to receive(:preload)
.with(:agent_tokens).and_return(relation)
context 'the current user does not have access to clusters' do
let(:current_user) { developer }
expect(subject).to eq(relation)
it 'returns an empty result' do
expect(subject).to be_empty
end
end
end
def resolve_agents(args = {})
resolve(described_class, obj: project, ctx: ctx, lookahead: positive_lookahead, args: args)
end
end
RSpec.describe Resolvers::Clusters::AgentsResolver.single do
......
......@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Resolvers::EpicIssuesResolver do
include ::Gitlab::Graphql::Laziness
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
......@@ -21,20 +22,22 @@ RSpec.describe Resolvers::EpicIssuesResolver do
let_it_be(:epic_issue3) { create(:epic_issue, epic: epic2, issue: issue3, relative_position: 1) }
let_it_be(:epic_issue4) { create(:epic_issue, epic: epic2, issue: issue4, relative_position: nil) }
specify do
expect(described_class).to have_nullable_graphql_type(Types::EpicIssueType.connection_type)
end
before do
group.add_developer(current_user)
stub_licensed_features(epics: true)
end
specify do
expect(described_class).to have_nullable_graphql_type(Types::EpicIssueType.connection_type)
end
describe '#resolve' do
let(:epics) { [epic1, epic2] }
it 'finds all epic issues' do
result = [resolve_epic_issues(epic1), resolve_epic_issues(epic2)]
result = epics.map { |epic| resolve_epic_issues(epic).to_a }
expect(result).to contain_exactly([issue2, issue1], [issue3, issue4])
expect(result).to eq [[issue2, issue1], [issue3, issue4]]
end
it 'finds only epic issues that user can read' do
......@@ -42,15 +45,15 @@ RSpec.describe Resolvers::EpicIssuesResolver do
result =
[
resolve_epic_issues(epic1, {}, { current_user: guest }),
resolve_epic_issues(epic2, {}, { current_user: guest })
resolve_epic_issues(epic1, user: guest).to_a,
resolve_epic_issues(epic2, user: guest).to_a
]
expect(result).to contain_exactly([], [issue1])
expect(result).to eq([[issue1], []])
end
end
def resolve_epic_issues(object, args = {}, context = { current_user: current_user })
resolve(described_class, obj: object, args: args, ctx: context)
def resolve_epic_issues(object, user: current_user)
force(resolve(described_class, obj: object, ctx: { current_user: user }))
end
end
......@@ -44,7 +44,7 @@ RSpec.describe Resolvers::EpicsResolver do
context 'with iids' do
it 'finds a specific epic with iids' do
expect(resolve_epics(iids: epic1.iid)).to contain_exactly(epic1)
expect(resolve_epics(iids: [epic1.iid.to_s])).to contain_exactly(epic1)
end
it 'finds multiple epics with iids' do
......
......@@ -45,11 +45,11 @@ RSpec.describe Resolvers::RequirementsManagement::RequirementsResolver do
describe 'sorting' do
context 'when sorting by created_at' do
it 'sorts requirements ascending' do
expect(resolve_requirements(sort: 'created_asc')).to eq([requirement1, requirement3, requirement2])
expect(resolve_requirements(sort: 'created_asc').to_a).to eq([requirement1, requirement3, requirement2])
end
it 'sorts requirements descending' do
expect(resolve_requirements(sort: 'created_desc')).to eq([requirement2, requirement3, requirement1])
expect(resolve_requirements(sort: 'created_desc').to_a).to eq([requirement2, requirement3, requirement1])
end
end
end
......@@ -89,7 +89,7 @@ RSpec.describe Resolvers::RequirementsManagement::RequirementsResolver do
context 'single author exists' do
let(:params) do
{ author_username: other_user.username }
{ author_username: [other_user.username] }
end
it 'filters requirements by author' do
......@@ -99,7 +99,7 @@ RSpec.describe Resolvers::RequirementsManagement::RequirementsResolver do
context 'single nonexistent author' do
let(:params) do
{ author_username: "nonsense" }
{ author_username: ["nonsense"] }
end
it_behaves_like 'returns no items'
......@@ -121,14 +121,6 @@ RSpec.describe Resolvers::RequirementsManagement::RequirementsResolver do
it_behaves_like 'returns unfiltered'
end
context 'single author is nil' do
let(:params) do
{ author_username: nil }
end
it_behaves_like 'returns unfiltered'
end
context 'an empty array' do
let(:params) do
{ author_username: [] }
......
......@@ -29,10 +29,10 @@ RSpec.describe Resolvers::SecurityReportSummaryResolver do
pipeline,
expected_selection_info
) do |summary_service|
expect(summary_service).to receive(:execute)
expect(summary_service).to receive(:execute).and_return({})
end
resolve(described_class, obj: pipeline, args: { lookahead: lookahead })
resolve(described_class, obj: pipeline, lookahead: lookahead)
end
end
end
......
......@@ -4,8 +4,11 @@ require 'spec_helper'
RSpec.describe Resolvers::VulnerabilitiesGradeResolver do
include GraphqlHelpers
include ::Gitlab::Graphql::Laziness
subject { resolve(described_class, obj: group, args: args, ctx: { current_user: user }).execute }
subject do
force(resolve(described_class, obj: group, args: args, ctx: { current_user: user }))
end
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
......
......@@ -34,15 +34,15 @@ RSpec.describe 'Getting issues for an epic' do
NODE
end
def epic_query(params = {})
def epic_query(params = {}, epic_fields = epic_node)
graphql_query_for("group", { "fullPath" => group.full_path },
query_graphql_field("epics", params, epic_node)
query_graphql_field("epics", params, epic_fields)
)
end
def issue_ids
node_array(epics_data).each_with_object({}) do |node, result|
result[node['iid'].to_i] = node_array(node['issues']['edges'], 'id')
def issue_ids(epics = epics_data)
node_array(epics).to_h do |node|
[node['iid'].to_i, node_array(node['issues']['edges'], 'id')]
end
end
......@@ -75,52 +75,21 @@ RSpec.describe 'Getting issues for an epic' do
end
context 'pagination' do
let(:after_cursor) { '' }
let(:epic_node) do
<<~NODE
edges {
node {
iid
issues(first: 1, after: "#{after_cursor}") {
pageInfo {
hasNextPage
hasPreviousPage
startCursor
endCursor
},
edges {
node {
id
}
}
}
}
}
NODE
end
context 'without a cursor' do
it 'return first page of issues' do
post_graphql(epic_query(iid: epic.iid), current_user: user)
let(:data_path) { %i[group epics nodes] + [0] + %i[issues] }
expect(response).to have_gitlab_http_status(:success)
expect(first_epic_issues_page_info['hasNextPage']).to be_truthy
expect(first_epic_issues_page_info['endCursor']).to eq 'MQ'
expect(issue_ids[epic.iid]).to eq [issue.to_global_id.to_s]
end
def pagination_query(args)
epic_query({ iid: epic.iid }, epic_fields(args))
end
context 'with an after cursor' do
let(:after_cursor) { 'MQ' }
it 'return first page after the cursor' do
post_graphql(epic_query(iid: epic.iid), current_user: user)
def epic_fields(args)
query_graphql_field(:nodes, query_nodes(:issues, :id, args: args, include_pagination_info: true))
end
expect(response).to have_gitlab_http_status(:success)
expect(first_epic_issues_page_info['hasNextPage']).to be_falsey
expect(first_epic_issues_page_info['endCursor']).to eq 'Mg'
expect(issue_ids[epic.iid]).to eq [confidential_issue.to_global_id.to_s]
end
it_behaves_like 'sorted paginated query' do
let(:current_user) { user }
let(:sort_param) { }
let(:first_param) { 1 }
let(:expected_results) { [issue, confidential_issue].map { |i| global_id_of(i) } }
end
end
end
......@@ -144,11 +113,8 @@ RSpec.describe 'Getting issues for an epic' do
let_it_be(:epic_issue3) { create(:epic_issue, epic: epic2, issue: issue2, relative_position: 3) }
let(:params) { { iids: [epic.iid, epic2.iid] } }
before do
project.add_developer(user)
end
it 'returns issues for each epic' do
project.add_developer(user)
post_graphql(epic_query(params), current_user: user)
expect(response).to have_gitlab_http_status(:success)
......@@ -157,17 +123,17 @@ RSpec.describe 'Getting issues for an epic' do
expect(result[epic2.iid]).to eq [issue2.to_global_id.to_s]
end
# TODO remove the pending state of this spec when
# we have an efficient way of preloading data on GraphQL.
# For more information check: https://gitlab.com/gitlab-org/gitlab/-/issues/207898
xit 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new do
post_graphql(epic_query(iid: epic.iid), current_user: user)
end.count
it 'avoids N+1 queries' do
user_1 = create(:user, developer_projects: [project])
user_2 = create(:user, developer_projects: [project])
control_count = ActiveRecord::QueryRecorder.new(query_recorder_debug: true) do
post_graphql(epic_query(iid: epic.iid), current_user: user_1)
end
expect do
post_graphql(epic_query(params), current_user: user)
end.not_to exceed_query_limit(control_count)
post_graphql(epic_query(params), current_user: user_2)
end.not_to exceed_query_limit(control_count).ignoring(/FROM "namespaces"/)
expect(graphql_errors).to be_nil
end
......
......@@ -9,8 +9,6 @@ RSpec.describe 'Query.instanceSecurityDashboard.projects' do
let_it_be(:other_project) { create(:project) }
let_it_be(:user) { create(:user, security_dashboard_projects: [project]) }
let(:current_user) { user }
before do
project.add_developer(user)
other_project.add_developer(user)
......@@ -18,28 +16,26 @@ RSpec.describe 'Query.instanceSecurityDashboard.projects' do
stub_licensed_features(security_dashboard: true)
end
let(:fields) do
<<~QUERY
nodes {
id
}
QUERY
end
let(:query) do
graphql_query_for('instanceSecurityDashboard', nil, query_graphql_field('projects', {}, fields))
graphql_query_for(:instance_security_dashboard, dashboard_fields)
end
subject(:projects) { graphql_data.dig('instanceSecurityDashboard', 'projects', 'nodes') }
context 'with logged in user' do
it_behaves_like 'a working graphql query' do
before do
post_graphql(query, current_user: current_user)
end
let(:current_user) { user }
context 'requesting projects in the dashboard' do
let(:dashboard_fields) { query_graphql_path(%i[projects nodes], 'id') }
it 'finds only projects that were added to instance security dashboard' do
expect(projects).to eq([{ "id" => GitlabSchema.id_from_object(project).to_s }])
subject(:projects) { graphql_data_at(:instance_security_dashboard, :projects, :nodes) }
it_behaves_like 'a working graphql query' do
before do
post_graphql(query, current_user: current_user)
end
it 'finds only projects that were added to instance security dashboard' do
expect(projects).to contain_exactly({ "id" => global_id_of(project) })
end
end
end
......@@ -188,6 +184,10 @@ RSpec.describe 'Query.instanceSecurityDashboard.projects' do
context 'with no user' do
let(:current_user) { nil }
let(:dashboard_fields) { nil }
subject { graphql_data_at(:instance_security_dashboard) }
it_behaves_like 'a working graphql query' do
before do
post_graphql(query, current_user: current_user)
......
# frozen_string_literal: true
# A marker interface that allows use to lazily resolve a wider range of value
module Gitlab
module Graphql
module Deferred
def execute
raise NotImplementedError, 'Deferred classes must provide an execute method'
end
end
end
end
......@@ -3,12 +3,12 @@
module Gitlab
module Graphql
class ExternallyPaginatedArray < Array
attr_reader :previous_cursor, :next_cursor
attr_reader :start_cursor, :end_cursor
def initialize(previous_cursor, next_cursor, *args)
super(args)
@previous_cursor = previous_cursor
@next_cursor = next_cursor
@start_cursor = previous_cursor
@end_cursor = next_cursor
end
end
end
......
# frozen_string_literal: true
module Gitlab
module Graphql
# This module allows your class to easily defer and force values.
# Its methods are just sugar for calls to the Gitlab::Graphql::Lazy class.
#
# example:
#
# class MyAwesomeClass
# include ::Gitlab::Graphql::Laziness
#
# # takes a list of id and list of factors, and computes
# # sum of [SomeObject[i]#value * factor[i]]
# def resolve(ids:, factors:)
# ids.zip(factors)
# .map { |id, factor| promise_an_int(id, factor) }
# .map(&method(:force))
# .sum
# end
#
# # returns a promise for an Integer
# def (id, factor)
# thunk = SomeObject.lazy_find(id)
# defer { force(thunk).value * factor }
# end
# end
#
# In the example above, we use defer to delay forcing the batch-loaded
# item until we need it, and then we use `force` to consume the lazy values
#
# If `SomeObject.lazy_find(id)` batches correctly, calling
# `resolve` will only perform one batched load for all objects, rather than
# loading them individually before combining the results.
#
module Laziness
def defer(&block)
::Gitlab::Graphql::Lazy.new(&block)
end
def force(lazy)
::Gitlab::Graphql::Lazy.force(lazy)
end
end
end
end
......@@ -24,6 +24,8 @@ module Gitlab
value.force
when ::BatchLoader::GraphQL
value.sync
when ::Gitlab::Graphql::Deferred
value.execute
when ::GraphQL::Execution::Lazy
value.value # part of the private api, but we can force this as well
when ::Concurrent::Promise
......
......@@ -8,13 +8,7 @@ module Gitlab
include ::Gitlab::Graphql::ConnectionCollectionMethods
prepend ::Gitlab::Graphql::ConnectionRedaction
def start_cursor
items.previous_cursor
end
def end_cursor
items.next_cursor
end
delegate :start_cursor, :end_cursor, to: :items
def next_page?
end_cursor.present?
......
......@@ -33,7 +33,7 @@ RSpec.describe Resolvers::AlertManagement::AlertResolver do
end
context 'finding by status' do
let(:args) { { status: [Types::AlertManagement::StatusEnum.values['IGNORED'].value] } }
let(:args) { { statuses: [Types::AlertManagement::StatusEnum.values['IGNORED'].value] } }
it { is_expected.to contain_exactly(ignored_alert) }
end
......
......@@ -29,7 +29,7 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
end
it 'finds only issues matching filters' do
result = resolve_board_list_issues(args: { filters: { label_name: label.title, not: { label_name: label2.title } } }).items
result = resolve_board_list_issues(args: { filters: { label_name: [label.title], not: { label_name: [label2.title] } } }).items
expect(result).to match_array([issue1, issue3])
end
......
......@@ -17,10 +17,14 @@ RSpec.describe Resolvers::Ci::JobsResolver do
describe '#resolve' do
context 'when security_report_types is empty' do
it "returns all of the pipeline's jobs" do
jobs = resolve(described_class, obj: pipeline, args: {}, ctx: {})
job_names = jobs.map(&:name)
expect(job_names).to contain_exactly('Normal job', 'DAST job', 'SAST job', 'Container scanning job')
jobs = resolve(described_class, obj: pipeline)
expect(jobs).to contain_exactly(
have_attributes(name: 'Normal job'),
have_attributes(name: 'DAST job'),
have_attributes(name: 'SAST job'),
have_attributes(name: 'Container scanning job')
)
end
end
......@@ -30,10 +34,12 @@ RSpec.describe Resolvers::Ci::JobsResolver do
::Types::Security::ReportTypeEnum.values['SAST'].value,
::Types::Security::ReportTypeEnum.values['DAST'].value
]
jobs = resolve(described_class, obj: pipeline, args: { security_report_types: report_types }, ctx: {})
jobs = resolve(described_class, obj: pipeline, args: { security_report_types: report_types })
job_names = jobs.map(&:name)
expect(job_names).to contain_exactly('DAST job', 'SAST job')
expect(jobs).to contain_exactly(
have_attributes(name: 'DAST job'),
have_attributes(name: 'SAST job')
)
end
end
end
......
......@@ -50,6 +50,6 @@ RSpec.describe Resolvers::CommitPipelinesResolver do
it 'resolves pipelines for commit and ref' do
pipelines = resolve_pipelines
expect(pipelines).to eq([pipeline2, pipeline])
expect(pipelines.to_a).to eq([pipeline2, pipeline])
end
end
......@@ -4,18 +4,25 @@ require 'spec_helper'
RSpec.describe ::CachingArrayResolver do
include GraphqlHelpers
include Gitlab::Graphql::Laziness
let_it_be(:non_admins) { create_list(:user, 4, admin: false) }
let(:query_context) { {} }
let_it_be(:admins) { create_list(:user, 4, admin: true) }
let(:query_context) { { current_user: admins.first } }
let(:max_page_size) { 10 }
let(:field) { double('Field', max_page_size: max_page_size) }
let(:schema) { double('Schema', default_max_page_size: 3) }
let(:schema) do
Class.new(GitlabSchema) do
default_max_page_size 3
end
end
let_it_be(:caching_resolver) do
mod = described_class
Class.new(::Resolvers::BaseResolver) do
include mod
type [::Types::UserType], null: true
argument :is_admin, ::GraphQL::BOOLEAN_TYPE, required: false
def query_input(is_admin:)
is_admin
......@@ -44,6 +51,8 @@ RSpec.describe ::CachingArrayResolver do
Class.new(::Resolvers::BaseResolver) do
include mod
type [::Types::UserType], null: true
argument :username, ::GraphQL::STRING_TYPE, required: false
def query_input(username:)
username
......@@ -72,7 +81,7 @@ RSpec.describe ::CachingArrayResolver do
expect(User).to receive(:from_union).twice.and_call_original
results = users.in_groups_of(2, false).map do |users|
resolve(resolver, args: { username: users.map(&:username) }, field: field, schema: schema)
resolve(resolver, args: { username: users.map(&:username) }, schema: schema)
end
expect(results.flat_map(&method(:force))).to match_array(users)
......@@ -80,19 +89,19 @@ RSpec.describe ::CachingArrayResolver do
end
context 'all queries return results' do
let_it_be(:admins) { create_list(:admin, 3) }
let_it_be(:non_admins) { create_list(:user, 3, admin: false) }
it 'batches the queries' do
expect do
[resolve_users(true), resolve_users(false)].each(&method(:force))
end.to issue_same_number_of_queries_as { force(resolve_users(nil)) }
[resolve_users(admin: true), resolve_users(admin: false)].each(&method(:force))
end.to issue_same_number_of_queries_as { force(resolve_users(admin: nil)) }
end
it 'finds the correct values' do
found_admins = resolve_users(true)
found_others = resolve_users(false)
admins_again = resolve_users(true)
found_all = resolve_users(nil)
found_admins = resolve_users(admin: true)
found_others = resolve_users(admin: false)
admins_again = resolve_users(admin: true)
found_all = resolve_users(admin: nil)
expect(force(found_admins)).to match_array(admins)
expect(force(found_others)).to match_array(non_admins)
......@@ -104,37 +113,37 @@ RSpec.describe ::CachingArrayResolver do
it 'does not perform a union of a query with itself' do
expect(User).to receive(:where).once.and_call_original
[resolve_users(false), resolve_users(false)].each(&method(:force))
[resolve_users(admin: false), resolve_users(admin: false)].each(&method(:force))
end
context 'one of the queries returns no results' do
it 'finds the correct values' do
found_admins = resolve_users(true)
found_others = resolve_users(false)
found_all = resolve_users(nil)
found_admins = resolve_users(admin: true)
found_others = resolve_users(admin: false)
found_all = resolve_users(admin: nil)
expect(force(found_admins)).to be_empty
expect(force(found_others)).to match_array(non_admins)
expect(force(found_all)).to match_array(non_admins)
expect(force(found_admins)).to match_array(admins)
expect(force(found_others)).to be_empty
expect(force(found_all)).to match_array(admins)
end
end
context 'one of the queries has already been cached' do
before do
force(resolve_users(nil))
force(resolve_users(admin: nil))
end
it 'avoids further queries' do
expect do
repeated_find = resolve_users(nil)
repeated_find = resolve_users(admin: nil)
expect(force(repeated_find)).to match_array(non_admins)
expect(force(repeated_find)).to match_array(admins)
end.not_to exceed_query_limit(0)
end
end
context 'the resolver overrides item_found' do
let_it_be(:admins) { create_list(:admin, 2) }
let_it_be(:non_admins) { create_list(:user, 2, admin: false) }
let(:query_context) do
{
found: { true => [], false => [], nil => [] }
......@@ -150,14 +159,14 @@ RSpec.describe ::CachingArrayResolver do
end
it 'receives item_found for each key the item mapped to' do
found_admins = resolve_users(true, with_item_found)
found_all = resolve_users(nil, with_item_found)
found_admins = resolve_users(admin: true, resolver: with_item_found)
found_all = resolve_users(admin: nil, resolver: with_item_found)
[found_admins, found_all].each(&method(:force))
expect(query_context[:found]).to match({
false => be_empty,
true => match_array(admins),
false => be_empty,
nil => match_array(admins + non_admins)
})
end
......@@ -167,11 +176,11 @@ RSpec.describe ::CachingArrayResolver do
let(:max_page_size) { 2 }
it 'respects the max_page_size, on a per subset basis' do
found_all = resolve_users(nil)
found_others = resolve_users(false)
found_all = resolve_users(admin: nil)
found_admins = resolve_users(admin: true)
expect(force(found_all).size).to eq(2)
expect(force(found_others).size).to eq(2)
expect(force(found_admins).size).to eq(2)
end
end
......@@ -179,11 +188,11 @@ RSpec.describe ::CachingArrayResolver do
let(:max_page_size) { nil }
it 'takes the page size from schema.default_max_page_size' do
found_all = resolve_users(nil)
found_others = resolve_users(false)
found_all = resolve_users(admin: nil)
found_admins = resolve_users(admin: true)
expect(force(found_all).size).to eq(schema.default_max_page_size)
expect(force(found_others).size).to eq(schema.default_max_page_size)
expect(force(found_admins).size).to eq(schema.default_max_page_size)
end
end
......@@ -197,12 +206,10 @@ RSpec.describe ::CachingArrayResolver do
end
end
def resolve_users(is_admin, resolver = caching_resolver)
args = { is_admin: is_admin }
resolve(resolver, args: args, field: field, ctx: query_context, schema: schema)
end
def force(lazy)
::Gitlab::Graphql::Lazy.force(lazy)
def resolve_users(admin:, resolver: caching_resolver)
args = { is_admin: admin }
opts = resolver.field_options
allow(resolver).to receive(:field_options).and_return(opts.merge(max_page_size: max_page_size))
resolve(resolver, args: args, ctx: query_context, schema: schema, field: field)
end
end
......@@ -15,7 +15,7 @@ RSpec.describe Resolvers::DesignManagement::Version::DesignAtVersionResolver do
let(:all_singular_args) do
{
design_at_version_id: global_id_of(dav(design)),
id: global_id_of(dav(design)),
design_id: global_id_of(design),
filename: design.filename
}
......@@ -50,7 +50,7 @@ RSpec.describe Resolvers::DesignManagement::Version::DesignAtVersionResolver do
end
end
%i[design_at_version_id design_id filename].each do |arg|
%i[id design_id filename].each do |arg|
describe "passing #{arg}" do
let(:args) { all_singular_args.slice(arg) }
......@@ -71,7 +71,7 @@ RSpec.describe Resolvers::DesignManagement::Version::DesignAtVersionResolver do
describe 'attempting to retrieve an object not visible at this version' do
let(:design) { design_d }
%i[design_at_version_id design_id filename].each do |arg|
%i[id design_id filename].each do |arg|
describe "passing #{arg}" do
let(:args) { all_singular_args.slice(arg) }
......
......@@ -32,7 +32,7 @@ RSpec.describe Resolvers::DesignManagement::VersionInCollectionResolver do
end
context 'we pass an id' do
let(:params) { { version_id: global_id_of(first_version) } }
let(:params) { { id: global_id_of(first_version) } }
it { is_expected.to eq(first_version) }
end
......@@ -44,13 +44,13 @@ RSpec.describe Resolvers::DesignManagement::VersionInCollectionResolver do
end
context 'we pass an inconsistent mixture of sha and version id' do
let(:params) { { sha: first_version.sha, version_id: global_id_of(create(:design_version)) } }
let(:params) { { sha: first_version.sha, id: global_id_of(create(:design_version)) } }
it { is_expected.to be_nil }
end
context 'we pass the id of something that is not a design_version' do
let(:params) { { version_id: global_id_of(project) } }
let(:params) { { id: global_id_of(project) } }
let(:appropriate_error) { ::GraphQL::CoercionError }
it 'raises an appropriate error' do
......
......@@ -27,7 +27,7 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
end
shared_examples 'a source of versions' do
subject(:result) { resolve_versions(object) }
subject(:result) { resolve_versions(object)&.to_a }
let_it_be(:all_versions) { object.versions.ordered }
......@@ -39,7 +39,7 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
context 'without constraints' do
it 'returns the ordered versions' do
expect(result).to eq(all_versions)
expect(result.to_a).to eq(all_versions)
end
end
......@@ -51,13 +51,13 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
end
context 'by earlier_or_equal_to_id' do
let(:params) { { id: global_id_of(first_version) } }
let(:params) { { earlier_or_equal_to_id: global_id_of(first_version) } }
it_behaves_like 'a query for all_versions up to the first_version'
end
context 'by earlier_or_equal_to_sha' do
let(:params) { { sha: first_version.sha } }
let(:params) { { earlier_or_equal_to_sha: first_version.sha } }
it_behaves_like 'a query for all_versions up to the first_version'
end
......@@ -68,8 +68,8 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
# return successfully
let(:params) do
{
sha: first_version.sha,
id: global_id_of(first_version)
earlier_or_equal_to_sha: first_version.sha,
earlier_or_equal_to_id: global_id_of(first_version)
}
end
......@@ -79,8 +79,8 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
context 'and they do not match' do
let(:params) do
{
sha: first_version.sha,
id: global_id_of(other_version)
earlier_or_equal_to_sha: first_version.sha,
earlier_or_equal_to_id: global_id_of(other_version)
}
end
......@@ -111,7 +111,7 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
end
def resolve_versions(obj, context = { current_user: current_user })
eager_resolve(resolver, obj: obj, args: params.merge(parent: parent), ctx: context)
eager_resolve(resolver, obj: obj, parent: parent, args: params, ctx: context)
end
end
end
......@@ -88,8 +88,8 @@ RSpec.describe Resolvers::ErrorTracking::SentryErrorsResolver do
it 'sets the pagination variables' do
result = resolve_errors
expect(result.next_cursor).to eq 'next'
expect(result.previous_cursor).to eq 'prev'
expect(result.end_cursor).to eq 'next'
expect(result.start_cursor).to eq 'prev'
end
it 'returns an externally paginated array' do
......
......@@ -62,22 +62,13 @@ RSpec.describe Resolvers::IssueStatusCountsResolver do
end
it 'filters by issue type', :aggregate_failures do
result = resolve_issue_status_counts(issue_types: ['incident'])
result = resolve_issue_status_counts(types: ['incident'])
expect(result.all).to eq 1
expect(result.opened).to eq 0
expect(result.closed).to eq 1
end
# The state param is ignored in IssuableFinder#count_by_state
it 'ignores state filter', :aggregate_failures do
result = resolve_issue_status_counts(state: 'closed')
expect(result.all).to eq 2
expect(result.opened).to eq 1
expect(result.closed).to eq 1
end
private
def resolve_issue_status_counts(args = {}, context = { current_user: current_user })
......
......@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Resolvers::IssuesResolver do
include GraphqlHelpers
let(:current_user) { create(:user) }
let_it_be(:current_user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
......@@ -25,7 +25,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
context "with a project" do
before do
before_all do
project.add_developer(current_user)
create(:label_link, label: label1, target: issue1)
create(:label_link, label: label1, target: issue2)
......@@ -43,11 +43,11 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'filters by milestone' do
expect(resolve_issues(milestone_title: milestone.title)).to contain_exactly(issue1)
expect(resolve_issues(milestone_title: [milestone.title])).to contain_exactly(issue1)
end
it 'filters by assignee_username' do
expect(resolve_issues(assignee_username: assignee.username)).to contain_exactly(issue2)
expect(resolve_issues(assignee_username: [assignee.username])).to contain_exactly(issue2)
end
it 'filters by two assignees' do
......@@ -112,15 +112,19 @@ RSpec.describe Resolvers::IssuesResolver do
describe 'filters by issue_type' do
it 'filters by a single type' do
expect(resolve_issues(issue_types: ['incident'])).to contain_exactly(issue1)
expect(resolve_issues(types: %w[incident])).to contain_exactly(issue1)
end
it 'filters by a single type, negative assertion' do
expect(resolve_issues(types: %w[issue])).not_to include(issue1)
end
it 'filters by more than one type' do
expect(resolve_issues(issue_types: %w(incident issue))).to contain_exactly(issue1, issue2)
expect(resolve_issues(types: %w[incident issue])).to contain_exactly(issue1, issue2)
end
it 'ignores the filter if none given' do
expect(resolve_issues(issue_types: [])).to contain_exactly(issue1, issue2)
expect(resolve_issues(types: [])).to contain_exactly(issue1, issue2)
end
end
......@@ -143,44 +147,44 @@ RSpec.describe Resolvers::IssuesResolver do
describe 'sorting' do
context 'when sorting by created' do
it 'sorts issues ascending' do
expect(resolve_issues(sort: 'created_asc')).to eq [issue1, issue2]
expect(resolve_issues(sort: 'created_asc').to_a).to eq [issue1, issue2]
end
it 'sorts issues descending' do
expect(resolve_issues(sort: 'created_desc')).to eq [issue2, issue1]
expect(resolve_issues(sort: 'created_desc').to_a).to eq [issue2, issue1]
end
end
context 'when sorting by due date' do
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:due_issue1) { create(:issue, project: project, due_date: 3.days.from_now) }
let_it_be(:due_issue2) { create(:issue, project: project, due_date: nil) }
let_it_be(:due_issue3) { create(:issue, project: project, due_date: 2.days.ago) }
let_it_be(:due_issue4) { create(:issue, project: project, due_date: nil) }
it 'sorts issues ascending' do
expect(resolve_issues(sort: :due_date_asc)).to eq [due_issue3, due_issue1, due_issue4, due_issue2]
expect(resolve_issues(sort: :due_date_asc).to_a).to eq [due_issue3, due_issue1, due_issue4, due_issue2]
end
it 'sorts issues descending' do
expect(resolve_issues(sort: :due_date_desc)).to eq [due_issue1, due_issue3, due_issue4, due_issue2]
expect(resolve_issues(sort: :due_date_desc).to_a).to eq [due_issue1, due_issue3, due_issue4, due_issue2]
end
end
context 'when sorting by relative position' do
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:relative_issue1) { create(:issue, project: project, relative_position: 2000) }
let_it_be(:relative_issue2) { create(:issue, project: project, relative_position: nil) }
let_it_be(:relative_issue3) { create(:issue, project: project, relative_position: 1000) }
let_it_be(:relative_issue4) { create(:issue, project: project, relative_position: nil) }
it 'sorts issues ascending' do
expect(resolve_issues(sort: :relative_position_asc)).to eq [relative_issue3, relative_issue1, relative_issue4, relative_issue2]
expect(resolve_issues(sort: :relative_position_asc).to_a).to eq [relative_issue3, relative_issue1, relative_issue4, relative_issue2]
end
end
context 'when sorting by priority' do
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:early_milestone) { create(:milestone, project: project, due_date: 10.days.from_now) }
let_it_be(:late_milestone) { create(:milestone, project: project, due_date: 30.days.from_now) }
let_it_be(:priority_label1) { create(:label, project: project, priority: 1) }
......@@ -200,7 +204,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
context 'when sorting by label priority' do
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:label1) { create(:label, project: project, priority: 1) }
let_it_be(:label2) { create(:label, project: project, priority: 5) }
let_it_be(:label3) { create(:label, project: project, priority: 10) }
......@@ -219,7 +223,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
context 'when sorting by milestone due date' do
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:early_milestone) { create(:milestone, project: project, due_date: 10.days.from_now) }
let_it_be(:late_milestone) { create(:milestone, project: project, due_date: 30.days.from_now) }
let_it_be(:milestone_issue1) { create(:issue, project: project) }
......@@ -236,17 +240,17 @@ RSpec.describe Resolvers::IssuesResolver do
end
context 'when sorting by severity' do
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue_high_severity) { create_issue_with_severity(project, severity: :high) }
let_it_be(:issue_low_severity) { create_issue_with_severity(project, severity: :low) }
let_it_be(:issue_no_severity) { create(:incident, project: project) }
it 'sorts issues ascending' do
expect(resolve_issues(sort: :severity_asc)).to eq([issue_no_severity, issue_low_severity, issue_high_severity])
expect(resolve_issues(sort: :severity_asc).to_a).to eq([issue_no_severity, issue_low_severity, issue_high_severity])
end
it 'sorts issues descending' do
expect(resolve_issues(sort: :severity_desc)).to eq([issue_high_severity, issue_low_severity, issue_no_severity])
expect(resolve_issues(sort: :severity_desc).to_a).to eq([issue_high_severity, issue_low_severity, issue_no_severity])
end
end
end
......@@ -267,7 +271,9 @@ RSpec.describe Resolvers::IssuesResolver do
it 'batches queries that only include IIDs', :request_store do
result = batch_sync(max_queries: 2) do
resolve_issues(iid: issue1.iid) + resolve_issues(iids: issue2.iid)
[issue1, issue2]
.map { |issue| resolve_issues(iid: issue.iid.to_s) }
.flat_map(&:to_a)
end
expect(result).to contain_exactly(issue1, issue2)
......
......@@ -75,9 +75,9 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'can batch-resolve merge requests from different projects' do
# 2 queries for project_authorizations, and 2 for merge_requests
result = batch_sync(max_queries: queries_per_project * 2) do
resolve_mr(project, iids: iid_1) +
resolve_mr(project, iids: iid_2) +
resolve_mr(other_project, iids: other_iid)
resolve_mr(project, iids: [iid_1]) +
resolve_mr(project, iids: [iid_2]) +
resolve_mr(other_project, iids: [other_iid])
end
expect(result).to contain_exactly(merge_request_1, merge_request_2, other_merge_request)
......@@ -110,7 +110,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
context 'by source branches' do
it 'takes one argument' do
result = resolve_mr(project, source_branch: [merge_request_3.source_branch])
result = resolve_mr(project, source_branches: [merge_request_3.source_branch])
expect(result).to contain_exactly(merge_request_3)
end
......@@ -118,7 +118,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'takes more than one argument' do
mrs = [merge_request_3, merge_request_4]
branches = mrs.map(&:source_branch)
result = resolve_mr(project, source_branch: branches )
result = resolve_mr(project, source_branches: branches )
expect(result).to match_array(mrs)
end
......@@ -126,7 +126,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
context 'by target branches' do
it 'takes one argument' do
result = resolve_mr(project, target_branch: [merge_request_3.target_branch])
result = resolve_mr(project, target_branches: [merge_request_3.target_branch])
expect(result).to contain_exactly(merge_request_3)
end
......@@ -134,7 +134,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'takes more than one argument' do
mrs = [merge_request_3, merge_request_4]
branches = mrs.map(&:target_branch)
result = resolve_mr(project, target_branch: branches )
result = resolve_mr(project, target_branches: branches )
expect(result.compact).to match_array(mrs)
end
......@@ -153,13 +153,13 @@ RSpec.describe Resolvers::MergeRequestsResolver do
let_it_be(:with_label) { create(:labeled_merge_request, :closed, labels: [label], **common_attrs) }
it 'takes one argument' do
result = resolve_mr(project, label_name: [label.title])
result = resolve_mr(project, labels: [label.title])
expect(result).to contain_exactly(merge_request_6, with_label)
end
it 'takes multiple arguments, with semantics of ALL MUST MATCH' do
result = resolve_mr(project, label_name: merge_request_6.labels.map(&:title))
result = resolve_mr(project, labels: merge_request_6.labels.map(&:title))
expect(result).to contain_exactly(merge_request_6)
end
......@@ -201,7 +201,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'requires all filters' do
create(:merge_request, :closed, source_project: project, target_project: project, source_branch: merge_request_4.source_branch)
result = resolve_mr(project, source_branch: [merge_request_4.source_branch], state: 'locked')
result = resolve_mr(project, source_branches: [merge_request_4.source_branch], state: 'locked')
expect(result.compact).to contain_exactly(merge_request_4)
end
......@@ -236,7 +236,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
def resolve_mr_single(project, iid)
resolve_mr(project, resolver: described_class.single, iids: iid)
resolve_mr(project, resolver: described_class.single, iid: iid.to_s)
end
def resolve_mr(project, resolver: described_class, user: current_user, **args)
......
......@@ -28,7 +28,7 @@ RSpec.describe Resolvers::ProjectMergeRequestsResolver do
it 'filters merge requests by assignee username' do
result = resolve_mr(project, assignee_username: other_user.username)
expect(result).to eq([merge_request])
expect(result).to contain_exactly(merge_request)
end
it 'does not find anything' do
......@@ -42,7 +42,7 @@ RSpec.describe Resolvers::ProjectMergeRequestsResolver do
it 'filters merge requests by author username' do
result = resolve_mr(project, author_username: other_user.username)
expect(result).to eq([merge_request])
expect(result).to contain_exactly(merge_request)
end
it 'does not find anything' do
......@@ -56,7 +56,7 @@ RSpec.describe Resolvers::ProjectMergeRequestsResolver do
it 'filters merge requests by reviewer username' do
result = resolve_mr(project, reviewer_username: reviewer.username)
expect(result).to eq([merge_request])
expect(result).to contain_exactly(merge_request)
end
it 'does not find anything' do
......
......@@ -6,16 +6,18 @@ RSpec.describe Resolvers::Projects::SnippetsResolver do
include GraphqlHelpers
describe '#resolve' do
let_it_be(:current_user) { create(:user) }
let_it_be(:user) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:personal_snippet) { create(:personal_snippet, :private, author: current_user) }
let_it_be(:project_snippet) { create(:project_snippet, :internal, author: current_user, project: project) }
let_it_be(:personal_snippet) { create(:personal_snippet, :private, author: user) }
let_it_be(:project_snippet) { create(:project_snippet, :internal, author: user, project: project) }
let_it_be(:other_project_snippet) { create(:project_snippet, :public, author: other_user, project: project) }
before do
project.add_developer(current_user)
let(:current_user) { user }
before_all do
project.add_developer(user)
end
it 'calls SnippetsFinder' do
......@@ -42,20 +44,26 @@ RSpec.describe Resolvers::Projects::SnippetsResolver do
end
it 'returns the snippets by gid' do
snippets = resolve_snippets(args: { ids: project_snippet.to_global_id })
snippets = resolve_snippets(args: { ids: [global_id_of(project_snippet)] })
expect(snippets).to contain_exactly(project_snippet)
end
it 'returns the snippets by array of gid' do
args = {
ids: [project_snippet.to_global_id, other_project_snippet.to_global_id]
ids: [global_id_of(project_snippet), global_id_of(other_project_snippet)]
}
snippets = resolve_snippets(args: args)
expect(snippets).to contain_exactly(project_snippet, other_project_snippet)
end
it 'returns an error if the gid is invalid' do
expect do
resolve_snippets(args: { ids: ['foo'] })
end.to raise_error(GraphQL::CoercionError)
end
end
context 'when no project is provided' do
......@@ -65,8 +73,10 @@ RSpec.describe Resolvers::Projects::SnippetsResolver do
end
context 'when provided user is not current user' do
let(:current_user) { other_user }
it 'returns no snippets' do
expect(resolve_snippets(context: { current_user: other_user }, args: { ids: project_snippet.to_global_id })).to be_empty
expect(resolve_snippets(args: { ids: [global_id_of(project_snippet)] })).to be_empty
end
end
......
......@@ -17,6 +17,7 @@ RSpec.describe Resolvers::ReleasesResolver do
let_it_be(:public_user) { create(:user) }
let(:args) { { sort: :released_at_desc } }
let(:all_releases) { [release_v1, release_v2, release_v3] }
before do
project.add_developer(developer)
......@@ -27,7 +28,7 @@ RSpec.describe Resolvers::ReleasesResolver do
let(:current_user) { public_user }
it 'returns an empty array' do
expect(resolve_releases).to eq([])
expect(resolve_releases).to be_empty
end
end
......@@ -35,7 +36,7 @@ RSpec.describe Resolvers::ReleasesResolver do
let(:current_user) { developer }
it 'returns all releases associated to the project' do
expect(resolve_releases).to eq([release_v3, release_v2, release_v1])
expect(resolve_releases).to match_array(all_releases)
end
describe 'sorting behavior' do
......@@ -43,7 +44,9 @@ RSpec.describe Resolvers::ReleasesResolver do
let(:args) { { sort: :released_at_desc } }
it 'returns the releases ordered by released_at in descending order' do
expect(resolve_releases).to eq([release_v3, release_v2, release_v1])
expect(resolve_releases.to_a)
.to match_array(all_releases)
.and be_sorted(:released_at, :desc)
end
end
......@@ -51,7 +54,9 @@ RSpec.describe Resolvers::ReleasesResolver do
let(:args) { { sort: :released_at_asc } }
it 'returns the releases ordered by released_at in ascending order' do
expect(resolve_releases).to eq([release_v1, release_v2, release_v3])
expect(resolve_releases.to_a)
.to match_array(all_releases)
.and be_sorted(:released_at, :asc)
end
end
......@@ -59,7 +64,9 @@ RSpec.describe Resolvers::ReleasesResolver do
let(:args) { { sort: :created_desc } }
it 'returns the releases ordered by created_at in descending order' do
expect(resolve_releases).to eq([release_v1, release_v3, release_v2])
expect(resolve_releases.to_a)
.to match_array(all_releases)
.and be_sorted(:created_at, :desc)
end
end
......@@ -67,7 +74,9 @@ RSpec.describe Resolvers::ReleasesResolver do
let(:args) { { sort: :created_asc } }
it 'returns the releases ordered by created_at in ascending order' do
expect(resolve_releases).to eq([release_v2, release_v3, release_v1])
expect(resolve_releases.to_a)
.to match_array(all_releases)
.and be_sorted(:created_at, :asc)
end
end
end
......
......@@ -84,19 +84,18 @@ RSpec.describe Resolvers::SnippetsResolver do
end
it 'returns the snippets by single gid' do
snippets = resolve_snippets(args: { ids: personal_snippet.to_global_id })
snippets = resolve_snippets(args: { ids: [global_id_of(personal_snippet)] })
expect(snippets).to contain_exactly(personal_snippet)
end
it 'returns the snippets by array of gid' do
args = {
ids: [personal_snippet.to_global_id, project_snippet.to_global_id]
}
snippets = [personal_snippet, project_snippet]
args = { ids: snippets.map { |s| global_id_of(s) } }
snippets = resolve_snippets(args: args)
found = resolve_snippets(args: args)
expect(snippets).to contain_exactly(personal_snippet, project_snippet)
expect(found).to match_array(snippets)
end
it 'returns an error if the id cannot be coerced' do
......
......@@ -20,7 +20,7 @@ RSpec.describe Resolvers::TodoResolver do
it 'calls TodosFinder' do
expect_next_instance_of(TodosFinder) do |finder|
expect(finder).to receive(:execute)
expect(finder).to receive(:execute).and_call_original
end
resolve_todos
......@@ -48,7 +48,7 @@ RSpec.describe Resolvers::TodoResolver do
end
it 'returns the todos for single filter' do
todos = resolve_todos(type: 'MergeRequest')
todos = resolve_todos(type: ['MergeRequest'])
expect(todos).to contain_exactly(merge_request_todo_pending)
end
......
......@@ -51,24 +51,23 @@ RSpec.describe Resolvers::Users::SnippetsResolver do
end
it 'returns the snippets by single gid' do
snippets = resolve_snippets(args: { ids: private_personal_snippet.to_global_id })
snippets = resolve_snippets(args: { ids: [global_id_of(private_personal_snippet)] })
expect(snippets).to contain_exactly(private_personal_snippet)
end
it 'returns the snippets by array of gid' do
args = {
ids: [private_personal_snippet.to_global_id, public_personal_snippet.to_global_id]
}
snippets = [private_personal_snippet, public_personal_snippet]
args = { ids: snippets.map { |s| global_id_of(s) } }
snippets = resolve_snippets(args: args)
found = resolve_snippets(args: args)
expect(snippets).to contain_exactly(private_personal_snippet, public_personal_snippet)
expect(found).to match_array(snippets)
end
it 'returns an error if the gid is invalid' do
args = {
ids: [private_personal_snippet.to_global_id, 'foo']
ids: [global_id_of(private_personal_snippet), 'foo']
}
expect do
......
......@@ -11,11 +11,14 @@ RSpec.describe GitlabSchema.types['AlertManagementPrometheusIntegration'] do
describe 'resolvers' do
shared_examples_for 'has field with value' do |field_name|
it 'correctly renders the field' do
expect(resolve_field(field_name, integration)).to eq(value)
result = resolve_field(field_name, integration, current_user: user)
expect(result).to eq(value)
end
end
let_it_be_with_reload(:integration) { create(:prometheus_service) }
let_it_be(:user) { create(:user, maintainer_projects: [integration.project]) }
it_behaves_like 'has field with value', 'name' do
let(:value) { integration.title }
......
......@@ -6,8 +6,9 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
let(:queue) { 'authorized_projects' }
let(:variables) { { user: admin.username, queue_name: 'authorized_projects' } }
let(:variables) { { user: admin.username, queue_name: queue } }
let(:mutation) { graphql_mutation(:admin_sidekiq_queues_delete_jobs, variables) }
def mutation_response
......@@ -26,18 +27,19 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues do
context 'valid request' do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Queue.new(queue).clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Queue.new(queue).clear
end
def add_job(user, args)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'queue' => queue,
'args' => args,
'meta.user' => user.username
)
raise 'Not enqueued!' if Sidekiq::Queue.new(queue).size.zero?
end
it 'returns info about the deleted jobs' do
......@@ -55,7 +57,7 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues do
end
context 'when no required params are provided' do
let(:variables) { { queue_name: 'authorized_projects' } }
let(:variables) { { queue_name: queue } }
it_behaves_like 'a mutation that returns errors in the response',
errors: ['No metadata provided']
......
......@@ -163,9 +163,15 @@ RSpec.describe 'Creating a Snippet' do
context 'when there are uploaded files' do
shared_examples 'expected files argument' do |file_value, expected_value|
let(:uploaded_files) { file_value }
let(:snippet) { build(:snippet) }
let(:creation_response) do
::ServiceResponse.error(message: 'urk', payload: { snippet: snippet })
end
it do
expect(::Snippets::CreateService).to receive(:new).with(nil, user, hash_including(files: expected_value))
expect(::Snippets::CreateService).to receive(:new)
.with(nil, user, hash_including(files: expected_value))
.and_return(double(execute: creation_response))
subject
end
......
......@@ -42,7 +42,6 @@ RSpec.describe 'Query.project(fullPath).issue(iid).designCollection.version(sha)
describe 'scalar fields' do
let(:path) { path_prefix }
let(:version_fields) { query_graphql_field(:sha) }
before do
post_query
......@@ -50,7 +49,7 @@ RSpec.describe 'Query.project(fullPath).issue(iid).designCollection.version(sha)
{ id: ->(x) { x.to_global_id.to_s }, sha: ->(x) { x.sha } }.each do |field, value|
describe ".#{field}" do
let(:version_fields) { query_graphql_field(field) }
let(:version_fields) { field }
it "retrieves the #{field}" do
expect(data).to match(a_hash_including(field.to_s => value[version]))
......
......@@ -29,8 +29,8 @@ RSpec.describe 'Query.project(fullPath).issue(iid)' do
let(:design_fields) do
[
query_graphql_field(:filename),
query_graphql_field(:project, nil, query_graphql_field(:id))
:filename,
query_graphql_field(:project, :id)
]
end
......@@ -173,7 +173,7 @@ RSpec.describe 'Query.project(fullPath).issue(iid)' do
let(:result_fields) { { 'version' => id_hash(version) } }
let(:object_fields) do
design_fields + [query_graphql_field(:version, nil, query_graphql_field(:id))]
design_fields + [query_graphql_field(:version, :id)]
end
let(:no_argument_error) { missing_required_argument(path, :id) }
......
......@@ -36,7 +36,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
let(:path) { path_prefix }
let(:release_fields) do
query_graphql_field(%{
%{
tagName
tagPath
description
......@@ -45,7 +45,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
createdAt
releasedAt
upcomingRelease
})
}
end
before do
......@@ -233,7 +233,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
let(:path) { path_prefix }
let(:release_fields) do
query_graphql_field('description')
'description'
end
before do
......@@ -394,10 +394,10 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
let(:current_user) { developer }
let(:release_fields) do
query_graphql_field(%{
%{
releasedAt
upcomingRelease
})
}
end
before do
......
......@@ -53,32 +53,32 @@ RSpec.describe 'query terraform states' do
post_graphql(query, current_user: current_user)
end
it 'returns terraform state data', :aggregate_failures do
state = data.dig('nodes', 0)
version = state['latestVersion']
expect(state['id']).to eq(terraform_state.to_global_id.to_s)
expect(state['name']).to eq(terraform_state.name)
expect(state['lockedAt']).to eq(terraform_state.locked_at.iso8601)
expect(state['createdAt']).to eq(terraform_state.created_at.iso8601)
expect(state['updatedAt']).to eq(terraform_state.updated_at.iso8601)
expect(state.dig('lockedByUser', 'id')).to eq(terraform_state.locked_by_user.to_global_id.to_s)
expect(version['id']).to eq(latest_version.to_global_id.to_s)
expect(version['serial']).to eq(latest_version.version)
expect(version['downloadPath']).to eq(
expose_path(
api_v4_projects_terraform_state_versions_path(
id: project.id,
name: terraform_state.name,
serial: latest_version.version
)
it 'returns terraform state data' do
download_path = expose_path(
api_v4_projects_terraform_state_versions_path(
id: project.id,
name: terraform_state.name,
serial: latest_version.version
)
)
expect(version['createdAt']).to eq(latest_version.created_at.iso8601)
expect(version['updatedAt']).to eq(latest_version.updated_at.iso8601)
expect(version.dig('createdByUser', 'id')).to eq(latest_version.created_by_user.to_global_id.to_s)
expect(version.dig('job', 'name')).to eq(latest_version.build.name)
expect(data['nodes']).to contain_exactly({
'id' => global_id_of(terraform_state),
'name' => terraform_state.name,
'lockedAt' => terraform_state.locked_at.iso8601,
'createdAt' => terraform_state.created_at.iso8601,
'updatedAt' => terraform_state.updated_at.iso8601,
'lockedByUser' => { 'id' => global_id_of(terraform_state.locked_by_user) },
'latestVersion' => {
'id' => eq(global_id_of(latest_version)),
'serial' => eq(latest_version.version),
'downloadPath' => eq(download_path),
'createdAt' => eq(latest_version.created_at.iso8601),
'updatedAt' => eq(latest_version.updated_at.iso8601),
'createdByUser' => { 'id' => eq(global_id_of(latest_version.created_by_user)) },
'job' => { 'name' => eq(latest_version.build.name) }
}
})
end
it 'returns count of terraform states' do
......
......@@ -5,36 +5,34 @@ require 'spec_helper'
RSpec.describe 'getting project information' do
include GraphqlHelpers
let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) }
let(:current_user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:current_user) { create(:user) }
let(:fields) { all_graphql_fields_for(Project, max_depth: 2, excluded: %w(jiraImports services)) }
let(:query) do
graphql_query_for(
'project',
{ 'fullPath' => project.full_path },
all_graphql_fields_for('project'.to_s.classify, excluded: %w(jiraImports services))
)
graphql_query_for(:project, { full_path: project.full_path }, fields)
end
context 'when the user has full access to the project' do
let(:full_access_query) do
graphql_query_for('project', 'fullPath' => project.full_path)
graphql_query_for(:project, { full_path: project.full_path },
all_graphql_fields_for('Project', max_depth: 2))
end
before do
project.add_maintainer(current_user)
end
it 'includes the project' do
post_graphql(query, current_user: current_user)
it 'includes the project', :use_clean_rails_memory_store_caching, :request_store do
post_graphql(full_access_query, current_user: current_user)
expect(graphql_data['project']).not_to be_nil
end
end
context 'when the user has access to the project' do
before do
context 'when the user has access to the project', :use_clean_rails_memory_store_caching, :request_store do
before_all do
project.add_developer(current_user)
end
......@@ -55,10 +53,12 @@ RSpec.describe 'getting project information' do
create(:ci_pipeline, project: project)
end
let(:fields) { query_nodes(:pipelines) }
it 'is included in the pipelines connection' do
post_graphql(query, current_user: current_user)
expect(graphql_data['project']['pipelines']['edges'].size).to eq(1)
expect(graphql_data_at(:project, :pipelines, :nodes)).to contain_exactly(a_kind_of(Hash))
end
end
......@@ -109,7 +109,7 @@ RSpec.describe 'getting project information' do
end
describe 'performance' do
before do
before_all do
project.add_developer(current_user)
mrs = create_list(:merge_request, 10, :closed, :with_head_pipeline,
source_project: project,
......
......@@ -26,27 +26,35 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
end
describe '#image' do
let_it_be(:current_user) { create(:user) }
let(:schema) { GitlabSchema }
let(:query) { GraphQL::Query.new(schema) }
let(:context) { double('Context', schema: schema, query: query, parent: nil) }
let(:context) { query.context }
let(:field) { described_class.fields['image'] }
let(:args) { GraphQL::Query::Arguments::NO_ARGS }
let(:instance) do
let(:instance) { instantiate(object_id) }
let(:instance_b) { instantiate(object_id_b) }
def instantiate(object_id)
object = GitlabSchema.sync_lazy(GitlabSchema.object_from_id(object_id))
object_type.authorized_new(object, query.context)
end
let(:instance_b) do
object_b = GitlabSchema.sync_lazy(GitlabSchema.object_from_id(object_id_b))
object_type.authorized_new(object_b, query.context)
def resolve_image(instance)
field.resolve_field(instance, args, context)
end
before do
context[:current_user] = current_user
allow(Ability).to receive(:allowed?).with(current_user, :read_design, anything).and_return(true)
allow(context).to receive(:parent).and_return(nil)
end
it 'resolves to the design image URL' do
image = field.resolve_field(instance, args, context)
sha = design.versions.first.sha
url = ::Gitlab::Routing.url_helpers.project_design_management_designs_raw_image_url(design.project, design, sha)
expect(image).to eq(url)
expect(resolve_image(instance)).to eq(url)
end
it 'has better than O(N) peformance', :request_store do
......@@ -68,10 +76,10 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
# = 10
expect(instance).not_to eq(instance_b) # preload designs themselves.
expect do
image_a = field.resolve_field(instance, args, context)
image_b = field.resolve_field(instance, args, context)
image_c = field.resolve_field(instance_b, args, context)
image_d = field.resolve_field(instance_b, args, context)
image_a = resolve_image(instance)
image_b = resolve_image(instance)
image_c = resolve_image(instance_b)
image_d = resolve_image(instance_b)
expect(image_a).to eq(image_b)
expect(image_c).not_to eq(image_b)
expect(image_c).to eq(image_d)
......
# frozen_string_literal: true
RSpec.shared_examples 'no Jira import data present' do
it 'returns none' do
expect(resolve_imports).to eq JiraImportState.none
end
end
RSpec.shared_examples 'no Jira import access' do
it 'raises error' do
expect do
resolve_imports
end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
......@@ -13,6 +13,8 @@ RSpec.shared_examples 'a mutation that returns top-level errors' do |errors: []|
it do
post_graphql_mutation(mutation, current_user: current_user)
expect(graphql_errors).to be_present
error_messages = graphql_errors.map { |e| e['message'] }
expect(error_messages).to match_errors
......
......@@ -13,7 +13,8 @@ end
RSpec.shared_examples 'can raise spam flag' do
it 'spam parameters are passed to the service' do
expect(service).to receive(:new).with(anything, anything, hash_including(api: true, request: instance_of(ActionDispatch::Request)))
args = [anything, anything, hash_including(api: true, request: instance_of(ActionDispatch::Request))]
expect(service).to receive(:new).with(*args).and_call_original
subject
end
......@@ -39,7 +40,9 @@ RSpec.shared_examples 'can raise spam flag' do
end
it 'request parameter is not passed to the service' do
expect(service).to receive(:new).with(anything, anything, hash_not_including(request: instance_of(ActionDispatch::Request)))
expect(service).to receive(:new)
.with(anything, anything, hash_not_including(request: instance_of(ActionDispatch::Request)))
.and_call_original
subject
end
......
......@@ -2,7 +2,7 @@
RSpec.shared_examples 'no project services' do
it 'returns empty collection' do
expect(resolve_services).to eq []
expect(resolve_services).to be_empty
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment