Commit d7a028e2 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 0d0cddc9
Please view this file on the master branch, on stable branches it's out of date.
## 12.9.2 (2020-03-31)
### Fixed (4 changes)
- Fix direct access to individual design on deprecated issue route. !27650
- Fix error when viewing events from design notes on project activity page. !27840
- Allow Seat Link to be disabled through configuration or admin toggle. !28015
- Allow active_users param to be optional for SyncSeatLinkRequestWorker#perform. !28241
### Changed (1 change)
- Send active users for each day in seat link POST request. !27481
## 12.9.1 (2020-03-26)
### Security (1 change)
......
......@@ -2,6 +2,25 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 12.9.2 (2020-03-31)
### Fixed (5 changes)
- Ensure import by URL works after a failed import. !27546
- Fix issue/MR state not being preserved when importing a project using Project Import/Export. !27816
- Leave upload Content-Type unchaged. !27864
- Disable archive rate limit by default. !28264
- Fix rake gitlab:setup failing on new installs. !28270
### Changed (1 change)
- Rename feature on the FE and locale.
### Performance (1 change)
- Index issues on sent_notifications table. !27034
## 12.9.1 (2020-03-26)
### Security (16 changes)
......
......@@ -77,9 +77,10 @@ export default {
return name;
},
fields() {
const tagClass = this.isDesktop ? 'w-25' : '';
return [
{ key: LIST_KEY_CHECKBOX, label: '', class: 'gl-w-16' },
{ key: LIST_KEY_TAG, label: LIST_LABEL_TAG, class: 'w-25' },
{ key: LIST_KEY_TAG, label: LIST_LABEL_TAG, class: `${tagClass} js-tag-column` },
{ key: LIST_KEY_IMAGE_ID, label: LIST_LABEL_IMAGE_ID },
{ key: LIST_KEY_SIZE, label: LIST_LABEL_SIZE },
{ key: LIST_KEY_LAST_UPDATED, label: LIST_LABEL_LAST_UPDATED },
......
.navbar-gitlab {
padding: 0 16px;
z-index: 1000;
z-index: $header-zindex;
margin-bottom: 0;
min-height: $header-height;
border: 0;
......
......@@ -418,6 +418,7 @@ $browser-scrollbar-size: 10px;
* Misc
*/
$header-height: 40px;
$header-zindex: 1000;
$suggestion-header-height: 46px;
$ide-statusbar-height: 25px;
$fixed-layout-width: 1280px;
......
......@@ -221,3 +221,7 @@
.editor-title-row {
margin-bottom: 20px;
}
.popover.suggest-gitlab-ci-yml {
z-index: $header-zindex - 1;
}
# frozen_string_literal: true
module Mutations
module JiraImport
class Start < BaseMutation
include Mutations::ResolvesProject
graphql_name 'JiraImportStart'
field :jira_import,
Types::JiraImportType,
null: true,
description: 'The Jira import data after mutation'
argument :project_path, GraphQL::ID_TYPE,
required: true,
description: 'The project to import the Jira project into'
argument :jira_project_key, GraphQL::STRING_TYPE,
required: true,
description: 'Project key of the importer Jira project'
argument :jira_project_name, GraphQL::STRING_TYPE,
required: false,
description: 'Project name of the importer Jira project'
def resolve(project_path:, jira_project_key:)
project = find_project!(project_path: project_path)
raise_resource_not_available_error! unless project
service_response = ::JiraImport::StartImportService
.new(context[:current_user], project, jira_project_key)
.execute
import_data = service_response.payload[:import_data]
{
jira_import: import_data.errors.blank? ? import_data.projects.last : nil,
errors: errors_on_object(import_data)
}
end
private
def find_project!(project_path:)
return unless project_path.present?
authorized_find!(full_path: project_path)
end
def find_object(full_path:)
resolve_project(full_path: full_path)
end
def authorized_resource?(project)
Ability.allowed?(context[:current_user], :admin_project, project)
end
end
end
end
......@@ -39,6 +39,7 @@ module Types
mount_mutation Mutations::Snippets::Update
mount_mutation Mutations::Snippets::Create
mount_mutation Mutations::Snippets::MarkAsSpam
mount_mutation Mutations::JiraImport::Start
end
end
......
......@@ -914,7 +914,7 @@ module Ci
def dependencies
strong_memoize(:dependencies) do
Ci::Processable::Dependencies.new(self)
Ci::BuildDependencies.new(self)
end
end
......
# frozen_string_literal: true
module Ci
class BuildDependencies
attr_reader :processable
def initialize(processable)
@processable = processable
end
def all
(local + cross_pipeline).uniq
end
# Dependencies local to the given pipeline
def local
return [] if no_local_dependencies_specified?
deps = model_class.where(pipeline_id: processable.pipeline_id).latest
deps = from_previous_stages(deps)
deps = from_needs(deps)
deps = from_dependencies(deps)
deps
end
# Dependencies that are defined in other pipelines
def cross_pipeline
[]
end
def invalid_local
local.reject(&:valid_dependency?)
end
def valid?
valid_local? && valid_cross_pipeline?
end
private
# Dependencies can only be of Ci::Build type because only builds
# can create artifacts
def model_class
::Ci::Build
end
def valid_local?
return true if Feature.enabled?('ci_disable_validates_dependencies')
local.all?(&:valid_dependency?)
end
def valid_cross_pipeline?
true
end
def project
processable.project
end
def no_local_dependencies_specified?
processable.options[:dependencies]&.empty?
end
def from_previous_stages(scope)
scope.before_stage(processable.stage_idx)
end
def from_needs(scope)
return scope unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
return scope unless processable.scheduling_type_dag?
needs_names = processable.needs.artifacts.select(:name)
scope.where(name: needs_names)
end
def from_dependencies(scope)
return scope unless processable.options[:dependencies].present?
scope.where(name: processable.options[:dependencies])
end
end
end
Ci::BuildDependencies.prepend_if_ee('EE::Ci::BuildDependencies')
# frozen_string_literal: true
module Ci
class Processable
class Dependencies
attr_reader :processable
def initialize(processable)
@processable = processable
end
def all
(local + cross_pipeline).uniq
end
# Dependencies local to the given pipeline
def local
return [] if no_local_dependencies_specified?
deps = model_class.where(pipeline_id: processable.pipeline_id).latest
deps = from_previous_stages(deps)
deps = from_needs(deps)
deps = from_dependencies(deps)
deps
end
# Dependencies that are defined in other pipelines
def cross_pipeline
[]
end
def invalid_local
local.reject(&:valid_dependency?)
end
def valid?
valid_local? && valid_cross_pipeline?
end
private
# Dependencies can only be of Ci::Build type because only builds
# can create artifacts
def model_class
::Ci::Build
end
def valid_local?
return true if Feature.enabled?('ci_disable_validates_dependencies')
local.all?(&:valid_dependency?)
end
def valid_cross_pipeline?
true
end
def project
processable.project
end
def no_local_dependencies_specified?
processable.options[:dependencies]&.empty?
end
def from_previous_stages(scope)
scope.before_stage(processable.stage_idx)
end
def from_needs(scope)
return scope unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
return scope unless processable.scheduling_type_dag?
needs_names = processable.needs.artifacts.select(:name)
scope.where(name: needs_names)
end
def from_dependencies(scope)
return scope unless processable.options[:dependencies].present?
scope.where(name: processable.options[:dependencies])
end
end
end
end
Ci::Processable::Dependencies.prepend_if_ee('EE::Ci::Processable::Dependencies')
# frozen_string_literal: true
class LabelNote < Note
class LabelNote < SyntheticNote
attr_accessor :resource_parent
attr_reader :events
def self.from_events(events, resource: nil, resource_parent: nil)
resource ||= events.first.issuable
attrs = {
system: true,
author: events.first.user,
created_at: events.first.created_at,
discussion_id: events.first.discussion_id,
noteable: resource,
system_note_metadata: SystemNoteMetadata.new(action: 'label'),
events: events,
resource_parent: resource_parent
}
if resource_parent.is_a?(Project)
attrs[:project_id] = resource_parent.id
end
attrs = note_attributes('label', events.first, resource, resource_parent).merge(events: events)
LabelNote.new(attrs)
end
......@@ -35,22 +22,10 @@ class LabelNote < Note
true
end
def note
@note ||= note_text
end
def note_html
@note_html ||= "<p dir=\"auto\">#{note_text(html: true)}</p>"
end
def project
resource_parent if resource_parent.is_a?(Project)
end
def group
resource_parent if resource_parent.is_a?(Group)
end
private
def update_outdated_markdown
......
# frozen_string_literal: true
class MilestoneNote < ::Note
attr_accessor :resource_parent, :event, :milestone
class MilestoneNote < SyntheticNote
attr_accessor :milestone
def self.from_event(event, resource: nil, resource_parent: nil)
resource ||= event.resource
attrs = {
system: true,
author: event.user,
created_at: event.created_at,
noteable: resource,
milestone: event.milestone,
discussion_id: event.discussion_id,
event: event,
system_note_metadata: ::SystemNoteMetadata.new(action: 'milestone'),
resource_parent: resource_parent
}
if resource_parent.is_a?(Project)
attrs[:project_id] = resource_parent.id
end
attrs = note_attributes('milestone', event, resource, resource_parent).merge(milestone: event.milestone)
MilestoneNote.new(attrs)
end
def note
@note ||= note_text
end
def note_html
@note_html ||= Banzai::Renderer.cacheless_render_field(self, :note, { group: group, project: project })
end
def project
resource_parent if resource_parent.is_a?(Project)
end
def group
resource_parent if resource_parent.is_a?(Group)
end
private
def note_text(html: false)
......
# frozen_string_literal: true
class SyntheticNote < Note
attr_accessor :resource_parent, :event
self.abstract_class = true
def self.note_attributes(action, event, resource, resource_parent)
resource ||= event.resource
attrs = {
system: true,
author: event.user,
created_at: event.created_at,
discussion_id: event.discussion_id,
noteable: resource,
event: event,
system_note_metadata: ::SystemNoteMetadata.new(action: action),
resource_parent: resource_parent
}
if resource_parent.is_a?(Project)
attrs[:project_id] = resource_parent.id
end
attrs
end
def project
resource_parent if resource_parent.is_a?(Project)
end
def group
resource_parent if resource_parent.is_a?(Group)
end
def note
@note ||= note_text
end
def note_html
raise NotImplementedError
end
private
def note_text(html: false)
raise NotImplementedError
end
end
......@@ -7,6 +7,15 @@
= f.check_box :gravatar_enabled, class: 'form-check-input'
= f.label :gravatar_enabled, class: 'form-check-label' do
= _('Gravatar enabled')
.form-group
= f.label :namespace_storage_size_limit, class: 'label-bold' do
= _('Maximum namespace storage (MB)')
= f.number_field :namespace_storage_size_limit, class: 'form-control', min: 0
%span.form-text.text-muted
= _('Includes repository storage, wiki storage, LFS objects, build artifacts and packages. 0 for unlimited.')
= link_to _('More information'), help_page_path('user/admin_area/settings/account_and_limit_settings', anchor: 'maximum-namespace-storage-size'), target: '_blank'
.form-group
= f.label :default_projects_limit, _('Default projects limit'), class: 'label-bold'
= f.number_field :default_projects_limit, class: 'form-control', title: _('Maximum number of projects.'), data: { toggle: 'tooltip', container: 'body' }
......
---
title: Adds filter by name to the packages list
merge_request: 27586
author:
type: added
---
title: Add jira_imports table to track current jira import progress as well as historical imports data
merge_request: 28108
author:
type: added
---
title: Leave upload Content-Type unchaged
merge_request: 27864
author:
type: fixed
---
title: Fix issue/MR state not being preserved when importing a project using Project
Import/Export
merge_request: 27816
author:
type: fixed
---
title: Index issues on sent_notifications table
merge_request: 27034
author:
type: performance
---
title: Rename feature on the FE and locale
title: Add namespace storage size limit setting
merge_request:
author:
type: changed
type: added
---
title: Fix rake gitlab:setup failing on new installs
merge_request: 28270
author:
type: fixed
---
title: Disable archive rate limit by default
merge_request: 28264
author:
type: fixed
---
title: Ensure import by URL works after a failed import
merge_request: 27546
author:
type: fixed
---
title: Allow to start Jira import through graphql mutation
merge_request: 27684
author:
type: added
# frozen_string_literal: true
class AddScannedResourcesCountToSecurityScan < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column :security_scans, :scanned_resources_count, :integer
end
def down
remove_column :security_scans, :scanned_resources_count
end
end
# frozen_string_literal: true
class CreateJiraImportsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def change
create_table :jira_imports do |t|
t.integer :project_id, null: false, limit: 8
t.integer :user_id, limit: 8
t.integer :label_id, limit: 8
t.timestamps_with_timezone
t.datetime_with_timezone :finished_at
t.integer :jira_project_xid, null: false, limit: 8
t.integer :total_issue_count, null: false, default: 0, limit: 4
t.integer :imported_issues_count, null: false, default: 0, limit: 4
t.integer :failed_to_import_count, null: false, default: 0, limit: 4
t.integer :status, limit: 2, null: false, default: 0
t.string :jid, limit: 255
t.string :jira_project_key, null: false, limit: 255
t.string :jira_project_name, null: false, limit: 255
end
add_index :jira_imports, [:project_id, :jira_project_key], name: 'index_jira_imports_on_project_id_and_jira_project_key'
end
end
# frozen_string_literal: true
class AddProjectsFkToJiraImportsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :jira_imports, :projects, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :jira_imports, :projects
end
end
end
# frozen_string_literal: true
class AddUsersFkToJiraImportsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :jira_imports, :users, on_delete: :nullify # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :jira_imports, :users
end
end
end
# frozen_string_literal: true
class AddUsersFkIndexOnJiraImportsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :jira_imports, :user_id
end
def down
remove_concurrent_index :jira_imports, :user_id
end
end
# frozen_string_literal: true
class AddLabelsFkToJiraImportsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :jira_imports, :labels, on_delete: :nullify # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :jira_imports, :labels
end
end
end
# frozen_string_literal: true
class AddLabelsFkIndexOnJiraImportsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :jira_imports, :label_id
end
def down
remove_concurrent_index :jira_imports, :label_id
end
end
......@@ -3304,6 +3304,33 @@ CREATE SEQUENCE public.jira_connect_subscriptions_id_seq
ALTER SEQUENCE public.jira_connect_subscriptions_id_seq OWNED BY public.jira_connect_subscriptions.id;
CREATE TABLE public.jira_imports (
id bigint NOT NULL,
project_id bigint NOT NULL,
user_id bigint,
label_id bigint,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
finished_at timestamp with time zone,
jira_project_xid bigint NOT NULL,
total_issue_count integer DEFAULT 0 NOT NULL,
imported_issues_count integer DEFAULT 0 NOT NULL,
failed_to_import_count integer DEFAULT 0 NOT NULL,
status smallint DEFAULT 0 NOT NULL,
jid character varying(255),
jira_project_key character varying(255) NOT NULL,
jira_project_name character varying(255) NOT NULL
);
CREATE SEQUENCE public.jira_imports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.jira_imports_id_seq OWNED BY public.jira_imports.id;
CREATE TABLE public.jira_tracker_data (
id bigint NOT NULL,
service_id integer NOT NULL,
......@@ -5558,7 +5585,8 @@ CREATE TABLE public.security_scans (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
build_id bigint NOT NULL,
scan_type smallint NOT NULL
scan_type smallint NOT NULL,
scanned_resources_count integer
);
CREATE SEQUENCE public.security_scans_id_seq
......@@ -7119,6 +7147,8 @@ ALTER TABLE ONLY public.jira_connect_installations ALTER COLUMN id SET DEFAULT n
ALTER TABLE ONLY public.jira_connect_subscriptions ALTER COLUMN id SET DEFAULT nextval('public.jira_connect_subscriptions_id_seq'::regclass);
ALTER TABLE ONLY public.jira_imports ALTER COLUMN id SET DEFAULT nextval('public.jira_imports_id_seq'::regclass);
ALTER TABLE ONLY public.jira_tracker_data ALTER COLUMN id SET DEFAULT nextval('public.jira_tracker_data_id_seq'::regclass);
ALTER TABLE ONLY public.keys ALTER COLUMN id SET DEFAULT nextval('public.keys_id_seq'::regclass);
......@@ -7872,6 +7902,9 @@ ALTER TABLE ONLY public.jira_connect_installations
ALTER TABLE ONLY public.jira_connect_subscriptions
ADD CONSTRAINT jira_connect_subscriptions_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.jira_imports
ADD CONSTRAINT jira_imports_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.jira_tracker_data
ADD CONSTRAINT jira_tracker_data_pkey PRIMARY KEY (id);
......@@ -9243,6 +9276,12 @@ CREATE UNIQUE INDEX index_jira_connect_installations_on_client_key ON public.jir
CREATE INDEX index_jira_connect_subscriptions_on_namespace_id ON public.jira_connect_subscriptions USING btree (namespace_id);
CREATE INDEX index_jira_imports_on_label_id ON public.jira_imports USING btree (label_id);
CREATE INDEX index_jira_imports_on_project_id_and_jira_project_key ON public.jira_imports USING btree (project_id, jira_project_key);
CREATE INDEX index_jira_imports_on_user_id ON public.jira_imports USING btree (user_id);
CREATE INDEX index_jira_tracker_data_on_service_id ON public.jira_tracker_data USING btree (service_id);
CREATE UNIQUE INDEX index_keys_on_fingerprint ON public.keys USING btree (fingerprint);
......@@ -11218,6 +11257,9 @@ ALTER TABLE ONLY public.deployment_clusters
ALTER TABLE ONLY public.evidences
ADD CONSTRAINT fk_rails_6388b435a6 FOREIGN KEY (release_id) REFERENCES public.releases(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.jira_imports
ADD CONSTRAINT fk_rails_63cbe52ada FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.vulnerability_occurrence_pipelines
ADD CONSTRAINT fk_rails_6421e35d7d FOREIGN KEY (pipeline_id) REFERENCES public.ci_pipelines(id) ON DELETE CASCADE;
......@@ -11257,6 +11299,9 @@ ALTER TABLE ONLY public.operations_feature_flags_clients
ALTER TABLE ONLY public.web_hook_logs
ADD CONSTRAINT fk_rails_666826e111 FOREIGN KEY (web_hook_id) REFERENCES public.web_hooks(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.jira_imports
ADD CONSTRAINT fk_rails_675d38c03b FOREIGN KEY (label_id) REFERENCES public.labels(id) ON DELETE SET NULL;
ALTER TABLE ONLY public.geo_hashed_storage_migrated_events
ADD CONSTRAINT fk_rails_687ed7d7c5 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
......@@ -11671,6 +11716,9 @@ ALTER TABLE ONLY public.vulnerability_issue_links
ALTER TABLE ONLY public.geo_hashed_storage_attachments_events
ADD CONSTRAINT fk_rails_d496b088e9 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.jira_imports
ADD CONSTRAINT fk_rails_da617096ce FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE SET NULL;
ALTER TABLE ONLY public.dependency_proxy_blobs
ADD CONSTRAINT fk_rails_db58bbc5d7 FOREIGN KEY (group_id) REFERENCES public.namespaces(id) ON DELETE CASCADE;
......@@ -12826,6 +12874,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200312163407
20200313101649
20200313123934
20200314060834
20200316111759
20200316162648
20200316173312
......@@ -12854,5 +12903,11 @@ COPY "schema_migrations" (version) FROM STDIN;
20200325152327
20200325160952
20200325183636
20200326114443
20200326124443
20200326134443
20200326135443
20200326144443
20200326145443
\.
......@@ -4155,6 +4155,51 @@ type JiraImportEdge {
node: JiraImport
}
"""
Autogenerated input type of JiraImportStart
"""
input JiraImportStartInput {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Project key of the importer Jira project
"""
jiraProjectKey: String!
"""
Project name of the importer Jira project
"""
jiraProjectName: String
"""
The project to import the Jira project into
"""
projectPath: ID!
}
"""
Autogenerated return type of JiraImportStart
"""
type JiraImportStartPayload {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Reasons why the mutation failed.
"""
errors: [String!]!
"""
The Jira import data after mutation
"""
jiraImport: JiraImport
}
type Label {
"""
Background color of the label
......@@ -5180,6 +5225,7 @@ type Mutation {
issueSetConfidential(input: IssueSetConfidentialInput!): IssueSetConfidentialPayload
issueSetDueDate(input: IssueSetDueDateInput!): IssueSetDueDatePayload
issueSetWeight(input: IssueSetWeightInput!): IssueSetWeightPayload
jiraImportStart(input: JiraImportStartInput!): JiraImportStartPayload
markAsSpamSnippet(input: MarkAsSpamSnippetInput!): MarkAsSpamSnippetPayload
mergeRequestSetAssignees(input: MergeRequestSetAssigneesInput!): MergeRequestSetAssigneesPayload
mergeRequestSetLabels(input: MergeRequestSetLabelsInput!): MergeRequestSetLabelsPayload
......
......@@ -11810,6 +11810,132 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "JiraImportStartInput",
"description": "Autogenerated input type of JiraImportStart",
"fields": null,
"inputFields": [
{
"name": "projectPath",
"description": "The project to import the Jira project into",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "jiraProjectKey",
"description": "Project key of the importer Jira project",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "jiraProjectName",
"description": "Project name of the importer Jira project",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "JiraImportStartPayload",
"description": "Autogenerated return type of JiraImportStart",
"fields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Reasons why the mutation failed.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "jiraImport",
"description": "The Jira import data after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "JiraImport",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "Label",
......@@ -15290,6 +15416,33 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "jiraImportStart",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "JiraImportStartInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "JiraImportStartPayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "markAsSpamSnippet",
"description": null,
......
......@@ -614,6 +614,16 @@ Autogenerated return type of IssueSetWeight
| `scheduledAt` | Time | Timestamp of when the Jira import was created/started |
| `scheduledBy` | User | User that started the Jira import |
## JiraImportStartPayload
Autogenerated return type of JiraImportStart
| Name | Type | Description |
| --- | ---- | ---------- |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. |
| `jiraImport` | JiraImport | The Jira import data after mutation |
## Label
| Name | Type | Description |
......
......@@ -303,6 +303,13 @@ use of extensions and concurrent index removal, you need at least PostgreSQL 9.2
sudo apt-get install -y postgresql postgresql-client libpq-dev postgresql-contrib
```
1. Start the PostgreSQL service and confirm that the service is running:
```shell
sudo service postgresql start
sudo service postgresql status
```
1. Create a database user for GitLab:
```shell
......
......@@ -15,6 +15,19 @@ If you choose a size larger than what is currently configured for the web server
you will likely get errors. See the [troubleshooting section](#troubleshooting) for more
details.
## Maximum namespace storage size
This sets a maximum size limit on each namespace. The following are included in the namespace size:
- repository
- wiki
- LFS objects
- build artifacts
- packages
NOTE: **Note:**
This limit is not currently enforced but will be in a future release.
## Repository size limit **(STARTER ONLY)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/740) in [GitLab Enterprise Edition 8.12](https://about.gitlab.com/releases/2016/09/22/gitlab-8-12-released/#limit-project-size-ee).
......
......@@ -16,15 +16,9 @@ module Gitlab
Gitlab::Ci::Pipeline::Chain::Config::Content::AutoDevops
].freeze
LEGACY_SOURCES = [
Gitlab::Ci::Pipeline::Chain::Config::Content::Bridge,
Gitlab::Ci::Pipeline::Chain::Config::Content::LegacyRepository,
Gitlab::Ci::Pipeline::Chain::Config::Content::LegacyAutoDevops
].freeze
def perform!
if config = find_config
@pipeline.build_pipeline_config(content: config.content) if ci_root_config_content_enabled?
@pipeline.build_pipeline_config(content: config.content)
@command.config_content = config.content
@pipeline.config_source = config.source
else
......@@ -39,21 +33,13 @@ module Gitlab
private
def find_config
sources.each do |source|
SOURCES.each do |source|
config = source.new(@pipeline, @command)
return config if config.exists?
end
nil
end
def sources
ci_root_config_content_enabled? ? SOURCES : LEGACY_SOURCES
end
def ci_root_config_content_enabled?
Feature.enabled?(:ci_root_config_content, @command.project, default_enabled: true)
end
end
end
end
......
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class LegacyAutoDevops < Source
def content
strong_memoize(:content) do
next unless project&.auto_devops_enabled?
template = Gitlab::Template::GitlabCiYmlTemplate.find(template_name)
template.content
end
end
def source
:auto_devops_source
end
private
def template_name
'Auto-DevOps'
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class LegacyRepository < Source
def content
strong_memoize(:content) do
next unless project
next unless @pipeline.sha
next unless ci_config_path
project.repository.gitlab_ci_yml_for(@pipeline.sha, ci_config_path)
rescue GRPC::NotFound, GRPC::Internal
nil
end
end
def source
:repository_source
end
end
end
end
end
end
end
end
......@@ -2270,6 +2270,9 @@ msgstr ""
msgid "April"
msgstr ""
msgid "Archive"
msgstr ""
msgid "Archive jobs"
msgstr ""
......@@ -10942,6 +10945,9 @@ msgstr ""
msgid "Includes an MVC structure, mvnw and pom.xml to help you get started."
msgstr ""
msgid "Includes repository storage, wiki storage, LFS objects, build artifacts and packages. 0 for unlimited."
msgstr ""
msgid "Incoming email"
msgstr ""
......@@ -12045,6 +12051,9 @@ msgstr ""
msgid "Live preview"
msgstr ""
msgid "Loading"
msgstr ""
msgid "Loading blob"
msgstr ""
......@@ -12393,6 +12402,9 @@ msgstr ""
msgid "Maximum lifetime allowable for Personal Access Tokens is active, your expire date must be set before %{maximum_allowable_date}."
msgstr ""
msgid "Maximum namespace storage (MB)"
msgstr ""
msgid "Maximum number of %{name} (%{count}) exceeded"
msgstr ""
......@@ -14075,6 +14087,9 @@ msgstr ""
msgid "PackageRegistry|Delete package"
msgstr ""
msgid "PackageRegistry|Filter by name"
msgstr ""
msgid "PackageRegistry|For more information on the Conan registry, %{linkStart}see the documentation%{linkEnd}."
msgstr ""
......@@ -14123,6 +14138,9 @@ msgstr ""
msgid "PackageRegistry|Remove package"
msgstr ""
msgid "PackageRegistry|Sorry, your filter produced no results"
msgstr ""
msgid "PackageRegistry|There are no %{packageType} packages yet"
msgstr ""
......@@ -14132,6 +14150,9 @@ msgstr ""
msgid "PackageRegistry|There was a problem fetching the details for this package."
msgstr ""
msgid "PackageRegistry|To widen your search, change or remove the filters above."
msgstr ""
msgid "PackageRegistry|Unable to load package"
msgstr ""
......@@ -18645,6 +18666,9 @@ msgstr ""
msgid "Something went wrong while fetching related merge requests."
msgstr ""
msgid "Something went wrong while fetching requirements list."
msgstr ""
msgid "Something went wrong while fetching the environments for this merge request. Please try again."
msgstr ""
......@@ -20136,6 +20160,9 @@ msgstr ""
msgid "There are no archived projects yet"
msgstr ""
msgid "There are no archived requirements"
msgstr ""
msgid "There are no changes"
msgstr ""
......@@ -20169,6 +20196,9 @@ msgstr ""
msgid "There are no open merge requests"
msgstr ""
msgid "There are no open requirements"
msgstr ""
msgid "There are no packages yet"
msgstr ""
......@@ -23916,6 +23946,9 @@ msgstr ""
msgid "created"
msgstr ""
msgid "created %{timeAgo}"
msgstr ""
msgid "customize"
msgstr ""
......@@ -24784,6 +24817,9 @@ msgstr ""
msgid "updated"
msgstr ""
msgid "updated %{timeAgo}"
msgstr ""
msgid "updated %{time_ago}"
msgstr ""
......
......@@ -36,7 +36,7 @@
"updated_at": { "type": "date" },
"system": { "type": "boolean" },
"noteable_id": { "type": "integer" },
"noteable_iid": { "type": "integer" },
"noteable_iid": { "type": ["integer", "null"] },
"noteable_type": { "type": "string" },
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
......
......@@ -29,10 +29,11 @@ describe('Details Page', () => {
const findAllDeleteButtons = () => wrapper.findAll('.js-delete-registry');
const findAllCheckboxes = () => wrapper.findAll('.js-row-checkbox');
const findCheckedCheckboxes = () => findAllCheckboxes().filter(c => c.attributes('checked'));
const findFirsTagColumn = () => wrapper.find('.js-tag-column');
const routeId = window.btoa(JSON.stringify({ name: 'foo', tags_path: 'bar' }));
beforeEach(() => {
const mountComponent = options => {
wrapper = mount(component, {
store,
stubs: {
......@@ -49,7 +50,11 @@ describe('Details Page', () => {
},
$toast,
},
...options,
});
};
beforeEach(() => {
dispatchSpy = jest.spyOn(store, 'dispatch');
store.dispatch('receiveTagsListSuccess', tagsListResponse);
jest.spyOn(Tracking, 'event');
......@@ -61,6 +66,7 @@ describe('Details Page', () => {
describe('when isLoading is true', () => {
beforeEach(() => {
mountComponent();
store.dispatch('receiveTagsListSuccess', { ...tagsListResponse, data: [] });
store.commit(SET_MAIN_LOADING, true);
});
......@@ -81,6 +87,10 @@ describe('Details Page', () => {
});
describe('table', () => {
beforeEach(() => {
mountComponent();
});
it.each([
'rowCheckbox',
'rowName',
......@@ -93,6 +103,10 @@ describe('Details Page', () => {
});
describe('header checkbox', () => {
beforeEach(() => {
mountComponent();
});
it('exists', () => {
expect(findMainCheckbox().exists()).toBe(true);
});
......@@ -116,6 +130,10 @@ describe('Details Page', () => {
});
describe('row checkbox', () => {
beforeEach(() => {
mountComponent();
});
it('if selected adds item to selectedItems', () => {
findFirstRowItem('rowCheckbox').vm.$emit('change');
return wrapper.vm.$nextTick().then(() => {
......@@ -135,6 +153,10 @@ describe('Details Page', () => {
});
describe('header delete button', () => {
beforeEach(() => {
mountComponent();
});
it('exists', () => {
expect(findBulkDeleteButton().exists()).toBe(true);
});
......@@ -182,6 +204,10 @@ describe('Details Page', () => {
});
describe('row delete button', () => {
beforeEach(() => {
mountComponent();
});
it('exists', () => {
expect(
findAllDeleteButtons()
......@@ -213,9 +239,39 @@ describe('Details Page', () => {
});
});
});
describe('tag cell', () => {
describe('on desktop viewport', () => {
beforeEach(() => {
mountComponent();
});
it('has class w-25', () => {
expect(findFirsTagColumn().classes()).toContain('w-25');
});
});
describe('on mobile viewport', () => {
beforeEach(() => {
mountComponent({
data() {
return { isDesktop: false };
},
});
});
it('does not has class w-25', () => {
expect(findFirsTagColumn().classes()).not.toContain('w-25');
});
});
});
});
describe('pagination', () => {
beforeEach(() => {
mountComponent();
});
it('exists', () => {
expect(findPagination().exists()).toBe(true);
});
......@@ -238,6 +294,10 @@ describe('Details Page', () => {
});
describe('modal', () => {
beforeEach(() => {
mountComponent();
});
it('exists', () => {
expect(findDeleteModal().exists()).toBe(true);
});
......
......@@ -10,146 +10,6 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject { described_class.new(pipeline, command) }
describe '#perform!' do
context 'when feature flag is disabled' do
before do
stub_feature_flags(ci_root_config_content: false)
end
context 'when bridge job is passed in as parameter' do
let(:ci_config_path) { nil }
let(:bridge) { create(:ci_bridge) }
before do
command.bridge = bridge
end
context 'when bridge job has downstream yaml' do
before do
allow(bridge).to receive(:yaml_for_downstream).and_return('the-yaml')
end
it 'returns the content already available in command' do
subject.perform!
expect(pipeline.config_source).to eq 'bridge_source'
expect(command.config_content).to eq 'the-yaml'
end
end
context 'when bridge job does not have downstream yaml' do
before do
allow(bridge).to receive(:yaml_for_downstream).and_return(nil)
end
it 'returns the next available source' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
end
context 'when config is defined in a custom path in the repository' do
let(:ci_config_path) { 'path/to/config.yml' }
before do
expect(project.repository)
.to receive(:gitlab_ci_yml_for)
.with(pipeline.sha, ci_config_path)
.and_return('the-content')
end
it 'returns the content of the YAML file' do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
expect(pipeline.pipeline_config).to be_nil
expect(command.config_content).to eq('the-content')
end
end
context 'when config is defined remotely' do
let(:ci_config_path) { 'http://example.com/path/to/ci/config.yml' }
it 'does not support URLs and default to AutoDevops' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
expect(pipeline.pipeline_config).to be_nil
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
context 'when config is defined in a separate repository' do
let(:ci_config_path) { 'path/to/.gitlab-ci.yml@another-group/another-repo' }
it 'does not support YAML from external repository and default to AutoDevops' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
expect(pipeline.pipeline_config).to be_nil
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
context 'when config is defined in the default .gitlab-ci.yml' do
let(:ci_config_path) { nil }
before do
expect(project.repository)
.to receive(:gitlab_ci_yml_for)
.with(pipeline.sha, '.gitlab-ci.yml')
.and_return('the-content')
end
it 'returns the content of the canonical config file' do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
expect(pipeline.pipeline_config).to be_nil
expect(command.config_content).to eq('the-content')
end
end
context 'when config is the Auto-Devops template' do
let(:ci_config_path) { nil }
before do
expect(project).to receive(:auto_devops_enabled?).and_return(true)
end
it 'returns the content of AutoDevops template' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
expect(pipeline.pipeline_config).to be_nil
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
context 'when config is not defined anywhere' do
let(:ci_config_path) { nil }
before do
expect(project).to receive(:auto_devops_enabled?).and_return(false)
end
it 'builds root config including the auto-devops template' do
subject.perform!
expect(pipeline.config_source).to eq('unknown_source')
expect(pipeline.pipeline_config).to be_nil
expect(command.config_content).to be_nil
expect(pipeline.errors.full_messages).to include('Missing CI config file')
end
end
end
context 'when bridge job is passed in as parameter' do
let(:ci_config_path) { nil }
let(:bridge) { create(:ci_bridge) }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe Ci::Processable::Dependencies do
describe Ci::BuildDependencies do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository) }
......
# frozen_string_literal: true
require 'spec_helper'
describe 'Starting a Jira Import' do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
let(:jira_project_key) { 'AA' }
let(:project_path) { project.full_path }
let(:mutation) do
variables = {
jira_project_key: jira_project_key,
project_path: project_path
}
graphql_mutation(:jira_import_start, variables)
end
def mutation_response
graphql_mutation_response(:jira_import_start)
end
def jira_import
mutation_response['jiraImport']
end
context 'when the user does not have permission' do
before do
stub_feature_flags(jira_issue_import: true)
end
shared_examples 'Jira import does not start' do
it 'does not start the Jira import' do
post_graphql_mutation(mutation, current_user: current_user)
expect(project.reload.import_state).to be nil
expect(project.reload.import_data).to be nil
end
end
context 'with anonymous user' do
let(:current_user) { nil }
it_behaves_like 'Jira import does not start'
it_behaves_like 'a mutation that returns top-level errors',
errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
context 'with user without permissions' do
let(:current_user) { user }
let(:project_path) { project.full_path }
before do
project.add_developer(current_user)
end
it_behaves_like 'Jira import does not start'
it_behaves_like 'a mutation that returns top-level errors',
errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
end
context 'when the user has permission' do
let(:current_user) { user }
before do
project.add_maintainer(current_user)
end
context 'with project' do
context 'when the project path is invalid' do
let(:project_path) { 'foobar' }
it 'returns an an error' do
post_graphql_mutation(mutation, current_user: current_user)
errors = json_response['errors']
expect(errors.first['message']).to eq(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
end
end
context 'when feature jira_issue_import feature flag is disabled' do
before do
stub_feature_flags(jira_issue_import: false)
end
it_behaves_like 'a mutation that returns errors in the response', errors: ['Jira import feature is disabled.']
end
context 'when feature jira_issue_import feature flag is enabled' do
before do
stub_feature_flags(jira_issue_import: true)
end
context 'when project has no Jira service' do
it_behaves_like 'a mutation that returns errors in the response', errors: ['Jira integration not configured.']
end
context 'when when project has Jira service' do
let!(:service) { create(:jira_service, project: project) }
before do
project.reload
end
context 'when jira_project_key not provided' do
let(:jira_project_key) { '' }
it_behaves_like 'a mutation that returns errors in the response', errors: ['Unable to find Jira project to import data from.']
end
context 'when jira import successfully scheduled' do
it 'schedules a Jira import' do
post_graphql_mutation(mutation, current_user: current_user)
expect(jira_import['jiraProjectKey']).to eq 'AA'
expect(jira_import['scheduledBy']['username']).to eq current_user.username
expect(project.import_state).not_to be nil
expect(project.import_state.status).to eq 'scheduled'
expect(project.import_data.becomes(JiraImportData).projects.last.scheduled_by['user_id']).to eq current_user.id
end
end
end
end
end
end
end
......@@ -5,9 +5,9 @@ require 'spec_helper'
describe API::ProjectClusters do
include KubernetesHelpers
let(:current_user) { create(:user) }
let(:developer_user) { create(:user) }
let(:project) { create(:project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:developer_user) { create(:user) }
let_it_be(:project) { create(:project) }
before do
project.add_maintainer(current_user)
......@@ -15,10 +15,10 @@ describe API::ProjectClusters do
end
describe 'GET /projects/:id/clusters' do
let!(:extra_cluster) { create(:cluster, :provided_by_gcp, :project) }
let_it_be(:extra_cluster) { create(:cluster, :provided_by_gcp, :project) }
let!(:clusters) do
create_list(:cluster, 5, :provided_by_gcp, :project, :production_environment,
let_it_be(:clusters) do
create_list(:cluster, 2, :provided_by_gcp, :project, :production_environment,
projects: [project])
end
......@@ -35,17 +35,15 @@ describe API::ProjectClusters do
get api("/projects/#{project.id}/clusters", current_user)
end
it 'responds with 200' do
expect(response).to have_gitlab_http_status(:ok)
end
it 'includes pagination headers' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
it 'onlies include authorized clusters' do
cluster_ids = json_response.map { |cluster| cluster['id'] }
expect(response).to have_gitlab_http_status(:ok)
expect(cluster_ids).to match_array(clusters.pluck(:id))
expect(cluster_ids).not_to include(extra_cluster.id)
end
......@@ -139,7 +137,7 @@ describe API::ProjectClusters do
end
context 'with non-existing cluster' do
let(:cluster_id) { 123 }
let(:cluster_id) { 0 }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
......@@ -185,14 +183,11 @@ describe API::ProjectClusters do
end
context 'with valid params' do
it 'responds with 201' do
expect(response).to have_gitlab_http_status(:created)
end
it 'creates a new Cluster::Cluster' do
cluster_result = Clusters::Cluster.find(json_response["id"])
platform_kubernetes = cluster_result.platform
expect(response).to have_gitlab_http_status(:created)
expect(cluster_result).to be_user
expect(cluster_result).to be_kubernetes
expect(cluster_result.project).to eq(project)
......@@ -235,15 +230,9 @@ describe API::ProjectClusters do
context 'with invalid params' do
let(:namespace) { 'invalid_namespace' }
it 'responds with 400' do
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not create a new Clusters::Cluster' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(project.reload.clusters).to be_empty
end
it 'returns validation errors' do
expect(json_response['message']['platform_kubernetes.namespace'].first).to be_present
end
end
......@@ -259,8 +248,8 @@ describe API::ProjectClusters do
it 'responds with 400' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['base'].first).to eq(_('Instance does not support multiple Kubernetes clusters'))
expect(json_response['message']['base'].first)
.to eq(_('Instance does not support multiple Kubernetes clusters'))
end
end
......@@ -271,7 +260,6 @@ describe API::ProjectClusters do
it 'responds with 403' do
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
end
......@@ -281,7 +269,7 @@ describe API::ProjectClusters do
let(:api_url) { 'https://kubernetes.example.com' }
let(:namespace) { 'new-namespace' }
let(:platform_kubernetes_attributes) { { namespace: namespace } }
let(:management_project) { create(:project, namespace: project.namespace) }
let_it_be(:management_project) { create(:project, namespace: project.namespace) }
let(:management_project_id) { management_project.id }
let(:update_params) do
......@@ -321,11 +309,8 @@ describe API::ProjectClusters do
end
context 'with valid params' do
it 'responds with 200' do
expect(response).to have_gitlab_http_status(:ok)
end
it 'updates cluster attributes' do
expect(response).to have_gitlab_http_status(:ok)
expect(cluster.domain).to eq('new-domain.com')
expect(cluster.platform_kubernetes.namespace).to eq('new-namespace')
expect(cluster.management_project).to eq(management_project)
......@@ -335,29 +320,24 @@ describe API::ProjectClusters do
context 'with invalid params' do
let(:namespace) { 'invalid_namespace' }
it 'responds with 400' do
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not update cluster attributes' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(cluster.domain).not_to eq('new_domain.com')
expect(cluster.platform_kubernetes.namespace).not_to eq('invalid_namespace')
expect(cluster.management_project).not_to eq(management_project)
end
it 'returns validation errors' do
expect(json_response['message']['platform_kubernetes.namespace'].first).to match('can contain only lowercase letters')
expect(json_response['message']['platform_kubernetes.namespace'].first)
.to match('can contain only lowercase letters')
end
end
context 'current user does not have access to management_project_id' do
let(:management_project_id) { create(:project).id }
it 'responds with 400' do
expect(response).to have_gitlab_http_status(:bad_request)
end
let_it_be(:management_project_id) { create(:project).id }
it 'returns validation errors' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['management_project_id'].first).to match('don\'t have permission')
end
end
......@@ -371,12 +351,10 @@ describe API::ProjectClusters do
}
end
it 'responds with 400' do
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns validation error' do
expect(json_response['message']['platform_kubernetes.base'].first).to eq(_('Cannot modify managed Kubernetes cluster'))
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['platform_kubernetes.base'].first)
.to eq(_('Cannot modify managed Kubernetes cluster'))
end
end
......@@ -412,13 +390,10 @@ describe API::ProjectClusters do
}
end
it 'responds with 200' do
expect(response).to have_gitlab_http_status(:ok)
end
it 'updates platform kubernetes attributes' do
platform_kubernetes = cluster.platform_kubernetes
expect(response).to have_gitlab_http_status(:ok)
expect(cluster.name).to eq('new-name')
expect(platform_kubernetes.namespace).to eq('new-namespace')
expect(platform_kubernetes.api_url).to eq('https://new-api-url.com')
......@@ -439,7 +414,7 @@ describe API::ProjectClusters do
describe 'DELETE /projects/:id/clusters/:cluster_id' do
let(:cluster_params) { { cluster_id: cluster.id } }
let(:cluster) do
let_it_be(:cluster) do
create(:cluster, :project, :provided_by_gcp,
projects: [project])
end
......@@ -457,11 +432,8 @@ describe API::ProjectClusters do
delete api("/projects/#{project.id}/clusters/#{cluster.id}", current_user), params: cluster_params
end
it 'responds with 204' do
expect(response).to have_gitlab_http_status(:no_content)
end
it 'deletes the cluster' do
expect(response).to have_gitlab_http_status(:no_content)
expect(Clusters::Cluster.exists?(id: cluster.id)).to be_falsy
end
......
......@@ -30,12 +30,10 @@ module StubGitlabCalls
# Stub the first call to `include:[local: .gitlab-ci.yml]` when
# evaluating the CI root config content.
if Feature.enabled?(:ci_root_config_content, default_enabled: true)
allow_any_instance_of(Gitlab::Ci::Config::External::File::Local)
.to receive(:content)
.and_return(ci_yaml_content)
end
end
def stub_pipeline_modified_paths(pipeline, modified_paths)
allow(pipeline).to receive(:modified_paths).and_return(modified_paths)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment