Commit 7aada820 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent b5ad0617
...@@ -264,7 +264,7 @@ gem 'licensee', '~> 8.9' ...@@ -264,7 +264,7 @@ gem 'licensee', '~> 8.9'
gem 'ace-rails-ap', '~> 4.1.0' gem 'ace-rails-ap', '~> 4.1.0'
# Detect and convert string character encoding # Detect and convert string character encoding
gem 'charlock_holmes', '~> 0.7.5' gem 'charlock_holmes', '~> 0.7.7'
# Detect mime content type from content # Detect mime content type from content
gem 'mimemagic', '~> 0.3.2' gem 'mimemagic', '~> 0.3.2'
......
...@@ -142,7 +142,7 @@ GEM ...@@ -142,7 +142,7 @@ GEM
mime-types (>= 1.16) mime-types (>= 1.16)
cause (0.1) cause (0.1)
character_set (1.1.2) character_set (1.1.2)
charlock_holmes (0.7.6) charlock_holmes (0.7.7)
childprocess (0.9.0) childprocess (0.9.0)
ffi (~> 1.0, >= 1.0.11) ffi (~> 1.0, >= 1.0.11)
chronic (0.10.2) chronic (0.10.2)
...@@ -1143,7 +1143,7 @@ DEPENDENCIES ...@@ -1143,7 +1143,7 @@ DEPENDENCIES
capybara (~> 3.22.0) capybara (~> 3.22.0)
capybara-screenshot (~> 1.0.22) capybara-screenshot (~> 1.0.22)
carrierwave (~> 1.3) carrierwave (~> 1.3)
charlock_holmes (~> 0.7.5) charlock_holmes (~> 0.7.7)
chronic (~> 0.10.2) chronic (~> 0.10.2)
commonmarker (~> 0.20) commonmarker (~> 0.20)
concurrent-ruby (~> 1.1) concurrent-ruby (~> 1.1)
......
...@@ -93,7 +93,7 @@ class List { ...@@ -93,7 +93,7 @@ class List {
entityType = 'milestone_id'; entityType = 'milestone_id';
} }
return gl.boardService return boardsStore
.createList(entity.id, entityType) .createList(entity.id, entityType)
.then(res => res.data) .then(res => res.data)
.then(data => { .then(data => {
...@@ -111,14 +111,14 @@ class List { ...@@ -111,14 +111,14 @@ class List {
boardsStore.state.lists.splice(index, 1); boardsStore.state.lists.splice(index, 1);
boardsStore.updateNewListDropdown(this.id); boardsStore.updateNewListDropdown(this.id);
gl.boardService.destroyList(this.id).catch(() => { boardsStore.destroyList(this.id).catch(() => {
// TODO: handle request error // TODO: handle request error
}); });
} }
update() { update() {
const collapsed = !this.isExpanded; const collapsed = !this.isExpanded;
return gl.boardService.updateList(this.id, this.position, collapsed).catch(() => { return boardsStore.updateList(this.id, this.position, collapsed).catch(() => {
// TODO: handle request error // TODO: handle request error
}); });
} }
...@@ -147,7 +147,7 @@ class List { ...@@ -147,7 +147,7 @@ class List {
this.loading = true; this.loading = true;
} }
return gl.boardService return boardsStore
.getIssuesForList(this.id, data) .getIssuesForList(this.id, data)
.then(res => res.data) .then(res => res.data)
.then(data => { .then(data => {
...@@ -168,7 +168,7 @@ class List { ...@@ -168,7 +168,7 @@ class List {
this.addIssue(issue, null, 0); this.addIssue(issue, null, 0);
this.issuesSize += 1; this.issuesSize += 1;
return gl.boardService return boardsStore
.newIssue(this.id, issue) .newIssue(this.id, issue)
.then(res => res.data) .then(res => res.data)
.then(data => this.onNewIssueResponse(issue, data)); .then(data => this.onNewIssueResponse(issue, data));
...@@ -276,7 +276,7 @@ class List { ...@@ -276,7 +276,7 @@ class List {
this.issues.splice(oldIndex, 1); this.issues.splice(oldIndex, 1);
this.issues.splice(newIndex, 0, issue); this.issues.splice(newIndex, 0, issue);
gl.boardService.moveIssue(issue.id, null, null, moveBeforeId, moveAfterId).catch(() => { boardsStore.moveIssue(issue.id, null, null, moveBeforeId, moveAfterId).catch(() => {
// TODO: handle request error // TODO: handle request error
}); });
} }
...@@ -287,7 +287,7 @@ class List { ...@@ -287,7 +287,7 @@ class List {
}); });
this.issues.splice(newIndex, 0, ...issues); this.issues.splice(newIndex, 0, ...issues);
gl.boardService boardsStore
.moveMultipleIssues({ .moveMultipleIssues({
ids: issues.map(issue => issue.id), ids: issues.map(issue => issue.id),
fromListId: null, fromListId: null,
...@@ -299,15 +299,13 @@ class List { ...@@ -299,15 +299,13 @@ class List {
} }
updateIssueLabel(issue, listFrom, moveBeforeId, moveAfterId) { updateIssueLabel(issue, listFrom, moveBeforeId, moveAfterId) {
gl.boardService boardsStore.moveIssue(issue.id, listFrom.id, this.id, moveBeforeId, moveAfterId).catch(() => {
.moveIssue(issue.id, listFrom.id, this.id, moveBeforeId, moveAfterId)
.catch(() => {
// TODO: handle request error // TODO: handle request error
}); });
} }
updateMultipleIssues(issues, listFrom, moveBeforeId, moveAfterId) { updateMultipleIssues(issues, listFrom, moveBeforeId, moveAfterId) {
gl.boardService boardsStore
.moveMultipleIssues({ .moveMultipleIssues({
ids: issues.map(issue => issue.id), ids: issues.map(issue => issue.id),
fromListId: listFrom.id, fromListId: listFrom.id,
...@@ -359,7 +357,7 @@ class List { ...@@ -359,7 +357,7 @@ class List {
if (this.issuesSize > 1) { if (this.issuesSize > 1) {
const moveBeforeId = this.issues[1].id; const moveBeforeId = this.issues[1].id;
gl.boardService.moveIssue(issue.id, null, null, null, moveBeforeId); boardsStore.moveIssue(issue.id, null, null, null, moveBeforeId);
} }
} }
} }
......
...@@ -9,6 +9,12 @@ module Mutations ...@@ -9,6 +9,12 @@ module Mutations
GitlabSchema.object_from_id(id) GitlabSchema.object_from_id(id)
end end
def map_to_global_ids(ids)
return [] if ids.blank?
ids.map { |id| to_global_id(id) }
end
def to_global_id(id) def to_global_id(id)
::URI::GID.build(app: GlobalID.app, model_name: Todo.name, model_id: id, params: nil).to_s ::URI::GID.build(app: GlobalID.app, model_name: Todo.name, model_id: id, params: nil).to_s
end end
......
# frozen_string_literal: true
module Mutations
module Todos
class MarkAllDone < ::Mutations::Todos::Base
graphql_name 'TodosMarkAllDone'
authorize :update_user
field :updated_ids,
[GraphQL::ID_TYPE],
null: false,
description: 'Ids of the updated todos'
def resolve
authorize!(current_user)
updated_ids = mark_all_todos_done
{
updated_ids: map_to_global_ids(updated_ids),
errors: []
}
end
private
def mark_all_todos_done
return [] unless current_user
TodoService.new.mark_all_todos_as_done_by_user(current_user)
end
end
end
end
...@@ -22,6 +22,7 @@ module Types ...@@ -22,6 +22,7 @@ module Types
mount_mutation Mutations::Notes::Destroy mount_mutation Mutations::Notes::Destroy
mount_mutation Mutations::Todos::MarkDone mount_mutation Mutations::Todos::MarkDone
mount_mutation Mutations::Todos::Restore mount_mutation Mutations::Todos::Restore
mount_mutation Mutations::Todos::MarkAllDone
end end
end end
......
...@@ -174,6 +174,11 @@ class TodoService ...@@ -174,6 +174,11 @@ class TodoService
mark_todos_as_done(todos, current_user) mark_todos_as_done(todos, current_user)
end end
def mark_all_todos_as_done_by_user(current_user)
todos = TodosFinder.new(current_user).execute
mark_todos_as_done(todos, current_user)
end
# When user marks some todos as pending # When user marks some todos as pending
def mark_todos_as_pending(todos, current_user) def mark_todos_as_pending(todos, current_user)
update_todos_state(todos, current_user, :pending) update_todos_state(todos, current_user, :pending)
......
---
title: Add GraphQL mutation to mark all todos done for a user
merge_request: 19482
author:
type: added
---
title: Removes references of BoardService in list file
merge_request: 20145
author: nuwe1
type: other
...@@ -3520,6 +3520,7 @@ type Mutation { ...@@ -3520,6 +3520,7 @@ type Mutation {
removeAwardEmoji(input: RemoveAwardEmojiInput!): RemoveAwardEmojiPayload removeAwardEmoji(input: RemoveAwardEmojiInput!): RemoveAwardEmojiPayload
todoMarkDone(input: TodoMarkDoneInput!): TodoMarkDonePayload todoMarkDone(input: TodoMarkDoneInput!): TodoMarkDonePayload
todoRestore(input: TodoRestoreInput!): TodoRestorePayload todoRestore(input: TodoRestoreInput!): TodoRestorePayload
todosMarkAllDone(input: TodosMarkAllDoneInput!): TodosMarkAllDonePayload
toggleAwardEmoji(input: ToggleAwardEmojiInput!): ToggleAwardEmojiPayload toggleAwardEmoji(input: ToggleAwardEmojiInput!): ToggleAwardEmojiPayload
updateEpic(input: UpdateEpicInput!): UpdateEpicPayload updateEpic(input: UpdateEpicInput!): UpdateEpicPayload
updateNote(input: UpdateNoteInput!): UpdateNotePayload updateNote(input: UpdateNoteInput!): UpdateNotePayload
...@@ -5060,6 +5061,36 @@ enum TodoTargetEnum { ...@@ -5060,6 +5061,36 @@ enum TodoTargetEnum {
MERGEREQUEST MERGEREQUEST
} }
"""
Autogenerated input type of TodosMarkAllDone
"""
input TodosMarkAllDoneInput {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
}
"""
Autogenerated return type of TodosMarkAllDone
"""
type TodosMarkAllDonePayload {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Reasons why the mutation failed.
"""
errors: [String!]!
"""
Ids of the updated todos
"""
updatedIds: [ID!]!
}
""" """
Autogenerated input type of ToggleAwardEmoji Autogenerated input type of ToggleAwardEmoji
""" """
......
...@@ -14355,6 +14355,33 @@ ...@@ -14355,6 +14355,33 @@
"isDeprecated": false, "isDeprecated": false,
"deprecationReason": null "deprecationReason": null
}, },
{
"name": "todosMarkAllDone",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "TodosMarkAllDoneInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "TodosMarkAllDonePayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{ {
"name": "toggleAwardEmoji", "name": "toggleAwardEmoji",
"description": null, "description": null,
...@@ -16825,6 +16852,106 @@ ...@@ -16825,6 +16852,106 @@
"enumValues": null, "enumValues": null,
"possibleTypes": null "possibleTypes": null
}, },
{
"kind": "OBJECT",
"name": "TodosMarkAllDonePayload",
"description": "Autogenerated return type of TodosMarkAllDone",
"fields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Reasons why the mutation failed.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "updatedIds",
"description": "Ids of the updated todos",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "TodosMarkAllDoneInput",
"description": "Autogenerated input type of TodosMarkAllDone",
"fields": null,
"inputFields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{ {
"kind": "OBJECT", "kind": "OBJECT",
"name": "DesignManagementUploadPayload", "name": "DesignManagementUploadPayload",
......
...@@ -785,6 +785,14 @@ The API can be explored interactively using the [GraphiQL IDE](../index.md#graph ...@@ -785,6 +785,14 @@ The API can be explored interactively using the [GraphiQL IDE](../index.md#graph
| `errors` | String! => Array | Reasons why the mutation failed. | | `errors` | String! => Array | Reasons why the mutation failed. |
| `todo` | Todo! | The requested todo | | `todo` | Todo! | The requested todo |
### TodosMarkAllDonePayload
| Name | Type | Description |
| --- | ---- | ---------- |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. |
| `updatedIds` | ID! => Array | Ids of the updated todos |
### ToggleAwardEmojiPayload ### ToggleAwardEmojiPayload
| Name | Type | Description | | Name | Type | Description |
......
...@@ -1539,9 +1539,14 @@ cache: ...@@ -1539,9 +1539,14 @@ cache:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18986) in GitLab v12.5. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18986) in GitLab v12.5.
If `cache:key:files` is added, one or two files must be defined with it. The cache `key` The `cache:key:files` keyword extends the `cache:key` functionality by making it easier
will be a SHA computed from the most recent commits (one or two) that changed the to reuse some caches, and rebuild them less often, which will speed up subsequent pipeline
given files. If neither file was changed in any commits, the key will be `default`. runs.
When you include `cache:key:files`, you must also list the project files that will be used to generate the key, up to a maximum of two files.
The cache `key` will be a SHA checksum computed from the most recent commits (up to two, if two files are listed)
that changed the given files. If neither file was changed in any commits,
the fallback key will be `default`.
```yaml ```yaml
cache: cache:
...@@ -1554,20 +1559,26 @@ cache: ...@@ -1554,20 +1559,26 @@ cache:
- node_modules - node_modules
``` ```
In this example we are creating a cache for Ruby and Nodejs dependencies that
is tied to current versions of the `Gemfile.lock` and `package.json` files. Whenever one of
these files changes, a new cache key is computed and a new cache is created. Any future
job runs using the same `Gemfile.lock` and `package.json` with `cache:key:files` will
use the new cache, instead of rebuilding the dependencies.
##### `cache:key:prefix` ##### `cache:key:prefix`
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18986) in GitLab v12.5. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18986) in GitLab v12.5.
The `prefix` parameter adds extra functionality to `key:files` by allowing the key to The `prefix` parameter adds extra functionality to `key:files` by allowing the key to
be composed of the given `prefix` combined with the SHA computed for `cache:key:files`. be composed of the given `prefix` combined with the SHA computed for `cache:key:files`.
For example, adding a `prefix` of `rspec`, will For example, adding a `prefix` of `test`, will cause keys to look like: `test-feef9576d21ee9b6a32e30c5c79d0a0ceb68d1e5`.
cause keys to look like: `rspec-feef9576d21ee9b6a32e30c5c79d0a0ceb68d1e5`. If neither If neither file was changed in any commits, the prefix is added to `default`, so the
file was changed in any commits, the prefix is added to `default`, so the key in the key in the example would be `test-default`.
example would be `rspec-default`.
`prefix` follows the same restrictions as `key`, so it can use any of the Like `cache:key`, `prefix` can use any of the [predefined variables](../variables/README.md),
[predefined variables](../variables/README.md). Similarly, the `/` character or the but the following are not allowed:
equivalent URI-encoded `%2F`, or a value made only of `.` or `%2E`, is not allowed.
- the `/` character (or the equivalent URI-encoded `%2F`)
- a value made only of `.` (or the equivalent URI-encoded `%2E`)
```yaml ```yaml
cache: cache:
...@@ -1577,8 +1588,20 @@ cache: ...@@ -1577,8 +1588,20 @@ cache:
prefix: ${CI_JOB_NAME} prefix: ${CI_JOB_NAME}
paths: paths:
- vendor/ruby - vendor/ruby
rspec:
script:
- bundle exec rspec
``` ```
For example, adding a `prefix` of `$CI_JOB_NAME` will
cause the key to look like: `rspec-feef9576d21ee9b6a32e30c5c79d0a0ceb68d1e5` and
the job cache is shared across different branches. If a branch changes
`Gemfile.lock`, that branch will have a new SHA checksum for `cache:key:files`. A new cache key
will be generated, and a new cache will be created for that key.
If `Gemfile.lock` is not found, the prefix is added to
`default`, so the key in the example would be `rspec-default`.
#### `cache:untracked` #### `cache:untracked`
Set `untracked: true` to cache all files that are untracked in your Git Set `untracked: true` to cache all files that are untracked in your Git
......
...@@ -7,7 +7,7 @@ module Quality ...@@ -7,7 +7,7 @@ module Quality
class HelmClient class HelmClient
CommandFailedError = Class.new(StandardError) CommandFailedError = Class.new(StandardError)
attr_reader :namespace attr_reader :tiller_namespace, :namespace
RELEASE_JSON_ATTRIBUTES = %w[Name Revision Updated Status Chart AppVersion Namespace].freeze RELEASE_JSON_ATTRIBUTES = %w[Name Revision Updated Status Chart AppVersion Namespace].freeze
...@@ -24,7 +24,8 @@ module Quality ...@@ -24,7 +24,8 @@ module Quality
# A single page of data and the corresponding page number. # A single page of data and the corresponding page number.
Page = Struct.new(:releases, :number) Page = Struct.new(:releases, :number)
def initialize(namespace:) def initialize(tiller_namespace:, namespace:)
@tiller_namespace = tiller_namespace
@namespace = namespace @namespace = namespace
end end
...@@ -35,7 +36,7 @@ module Quality ...@@ -35,7 +36,7 @@ module Quality
def delete(release_name:) def delete(release_name:)
run_command([ run_command([
'delete', 'delete',
%(--tiller-namespace "#{namespace}"), %(--tiller-namespace "#{tiller_namespace}"),
'--purge', '--purge',
release_name release_name
]) ])
...@@ -60,7 +61,7 @@ module Quality ...@@ -60,7 +61,7 @@ module Quality
command = [ command = [
'list', 'list',
%(--namespace "#{namespace}"), %(--namespace "#{namespace}"),
%(--tiller-namespace "#{namespace}" --output json), %(--tiller-namespace "#{tiller_namespace}" --output json),
*args *args
] ]
json = JSON.parse(run_command(command)) json = JSON.parse(run_command(command))
......
...@@ -25,7 +25,6 @@ class AutomatedCleanup ...@@ -25,7 +25,6 @@ class AutomatedCleanup
def initialize(project_path: ENV['CI_PROJECT_PATH'], gitlab_token: ENV['GITLAB_BOT_REVIEW_APPS_CLEANUP_TOKEN']) def initialize(project_path: ENV['CI_PROJECT_PATH'], gitlab_token: ENV['GITLAB_BOT_REVIEW_APPS_CLEANUP_TOKEN'])
@project_path = project_path @project_path = project_path
@gitlab_token = gitlab_token @gitlab_token = gitlab_token
ENV['TILLER_NAMESPACE'] ||= review_apps_namespace
end end
def gitlab def gitlab
...@@ -45,7 +44,9 @@ class AutomatedCleanup ...@@ -45,7 +44,9 @@ class AutomatedCleanup
end end
def helm def helm
@helm ||= Quality::HelmClient.new(namespace: review_apps_namespace) @helm ||= Quality::HelmClient.new(
tiller_namespace: review_apps_namespace,
namespace: review_apps_namespace)
end end
def kubernetes def kubernetes
......
[[ "$TRACE" ]] && set -x [[ "$TRACE" ]] && set -x
export TILLER_NAMESPACE="$KUBE_NAMESPACE"
function deploy_exists() { function deploy_exists() {
local namespace="${1}" local namespace="${1}"
local deploy="${2}" local release="${2}"
echoinfo "Checking if ${deploy} exists in the ${namespace} namespace..." true local deploy_exists
helm status --tiller-namespace "${namespace}" "${deploy}" >/dev/null 2>&1 echoinfo "Checking if ${release} exists in the ${namespace} namespace..." true
local deploy_exists=$?
echoinfo "Deployment status for ${deploy} is ${deploy_exists}" helm status --tiller-namespace "${namespace}" "${release}" >/dev/null 2>&1
deploy_exists=$?
echoinfo "Deployment status for ${release} is ${deploy_exists}"
return $deploy_exists return $deploy_exists
} }
function previous_deploy_failed() { function previous_deploy_failed() {
local deploy="${1}" local namespace="${1}"
echoinfo "Checking for previous deployment of ${deploy}" true local release="${2}"
echoinfo "Checking for previous deployment of ${release}" true
helm status "${deploy}" >/dev/null 2>&1 helm status --tiller-namespace "${namespace}" "${release}" >/dev/null 2>&1
local status=$? local status=$?
# if `status` is `0`, deployment exists, has a status # if `status` is `0`, deployment exists, has a status
if [ $status -eq 0 ]; then if [ $status -eq 0 ]; then
echoinfo "Previous deployment found, checking status..." echoinfo "Previous deployment found, checking status..."
deployment_status=$(helm status "${deploy}" | grep ^STATUS | cut -d' ' -f2) deployment_status=$(helm status --tiller-namespace "${namespace}" "${release}" | grep ^STATUS | cut -d' ' -f2)
echoinfo "Previous deployment state: ${deployment_status}" echoinfo "Previous deployment state: ${deployment_status}"
if [[ "$deployment_status" == "FAILED" || "$deployment_status" == "PENDING_UPGRADE" || "$deployment_status" == "PENDING_INSTALL" ]]; then if [[ "$deployment_status" == "FAILED" || "$deployment_status" == "PENDING_UPGRADE" || "$deployment_status" == "PENDING_INSTALL" ]]; then
status=0; status=0;
...@@ -37,30 +40,34 @@ function previous_deploy_failed() { ...@@ -37,30 +40,34 @@ function previous_deploy_failed() {
} }
function delete_release() { function delete_release() {
if [ -z "$CI_ENVIRONMENT_SLUG" ]; then local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
if [ -z "${release}" ]; then
echoerr "No release given, aborting the delete!" echoerr "No release given, aborting the delete!"
return return
fi fi
local name="$CI_ENVIRONMENT_SLUG" echoinfo "Deleting release '${release}'..." true
echoinfo "Deleting release '$name'..." true
helm delete --purge "$name" helm delete --tiller-namespace "${namespace}" --purge "${release}"
} }
function delete_failed_release() { function delete_failed_release() {
if [ -z "$CI_ENVIRONMENT_SLUG" ]; then local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
if [ -z "${release}" ]; then
echoerr "No release given, aborting the delete!" echoerr "No release given, aborting the delete!"
return return
fi fi
if ! deploy_exists "${KUBE_NAMESPACE}" "${CI_ENVIRONMENT_SLUG}"; then if ! deploy_exists "${namespace}" "${release}"; then
echoinfo "No Review App with ${CI_ENVIRONMENT_SLUG} is currently deployed." echoinfo "No Review App with ${release} is currently deployed."
else else
# Cleanup and previous installs, as FAILED and PENDING_UPGRADE will cause errors with `upgrade` # Cleanup and previous installs, as FAILED and PENDING_UPGRADE will cause errors with `upgrade`
if previous_deploy_failed "$CI_ENVIRONMENT_SLUG" ; then if previous_deploy_failed "${namespace}" "${release}" ; then
echoinfo "Review App deployment in bad state, cleaning up $CI_ENVIRONMENT_SLUG" echoinfo "Review App deployment in bad state, cleaning up ${release}"
delete_release delete_release
else else
echoinfo "Review App deployment in good state" echoinfo "Review App deployment in good state"
...@@ -70,9 +77,12 @@ function delete_failed_release() { ...@@ -70,9 +77,12 @@ function delete_failed_release() {
function get_pod() { function get_pod() {
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
local app_name="${1}" local app_name="${1}"
local status="${2-Running}" local status="${2-Running}"
get_pod_cmd="kubectl get pods -n ${KUBE_NAMESPACE} --field-selector=status.phase=${status} -lapp=${app_name},release=${CI_ENVIRONMENT_SLUG} --no-headers -o=custom-columns=NAME:.metadata.name | tail -n 1"
get_pod_cmd="kubectl get pods --namespace ${namespace} --field-selector=status.phase=${status} -lapp=${app_name},release=${release} --no-headers -o=custom-columns=NAME:.metadata.name | tail -n 1"
echoinfo "Waiting till '${app_name}' pod is ready" true echoinfo "Waiting till '${app_name}' pod is ready" true
echoinfo "Running '${get_pod_cmd}'" echoinfo "Running '${get_pod_cmd}'"
...@@ -111,19 +121,24 @@ function check_kube_domain() { ...@@ -111,19 +121,24 @@ function check_kube_domain() {
} }
function ensure_namespace() { function ensure_namespace() {
echoinfo "Ensuring the ${KUBE_NAMESPACE} namespace exists..." true local namespace="${KUBE_NAMESPACE}"
echoinfo "Ensuring the ${namespace} namespace exists..." true
kubectl describe namespace "$KUBE_NAMESPACE" || kubectl create namespace "$KUBE_NAMESPACE" kubectl describe namespace "${namespace}" || kubectl create namespace "${namespace}"
} }
function install_tiller() { function install_tiller() {
echoinfo "Checking deployment/tiller-deploy status in the ${TILLER_NAMESPACE} namespace..." true local namespace="${KUBE_NAMESPACE}"
echoinfo "Checking deployment/tiller-deploy status in the ${namespace} namespace..." true
echoinfo "Initiating the Helm client..." echoinfo "Initiating the Helm client..."
helm init --client-only helm init --client-only
# Set toleration for Tiller to be installed on a specific node pool # Set toleration for Tiller to be installed on a specific node pool
helm init \ helm init \
--tiller-namespace "${namespace}" \
--wait \ --wait \
--upgrade \ --upgrade \
--node-selectors "app=helm" \ --node-selectors "app=helm" \
...@@ -133,34 +148,38 @@ function install_tiller() { ...@@ -133,34 +148,38 @@ function install_tiller() {
--override "spec.template.spec.tolerations[0].value"="helm" \ --override "spec.template.spec.tolerations[0].value"="helm" \
--override "spec.template.spec.tolerations[0].effect"="NoSchedule" --override "spec.template.spec.tolerations[0].effect"="NoSchedule"
kubectl rollout status -n "$TILLER_NAMESPACE" -w "deployment/tiller-deploy" kubectl rollout status --namespace "${namespace}" --watch "deployment/tiller-deploy"
if ! helm version --debug; then if ! helm version --tiller-namespace "${namespace}" --debug; then
echo "Failed to init Tiller." echo "Failed to init Tiller."
return 1 return 1
fi fi
} }
function install_external_dns() { function install_external_dns() {
local release_name="dns-gitlab-review-app" local namespace="${KUBE_NAMESPACE}"
local release="dns-gitlab-review-app"
local domain local domain
domain=$(echo "${REVIEW_APPS_DOMAIN}" | awk -F. '{printf "%s.%s", $(NF-1), $NF}') domain=$(echo "${REVIEW_APPS_DOMAIN}" | awk -F. '{printf "%s.%s", $(NF-1), $NF}')
echoinfo "Installing external DNS for domain ${domain}..." true echoinfo "Installing external DNS for domain ${domain}..." true
if ! deploy_exists "${KUBE_NAMESPACE}" "${release_name}" || previous_deploy_failed "${release_name}" ; then if ! deploy_exists "${namespace}" "${release}" || previous_deploy_failed "${namespace}" "${release}" ; then
echoinfo "Installing external-dns Helm chart" echoinfo "Installing external-dns Helm chart"
helm repo update helm repo update --tiller-namespace "${namespace}"
# Default requested: CPU => 0, memory => 0 # Default requested: CPU => 0, memory => 0
helm install stable/external-dns --version '^2.2.1' \ helm install stable/external-dns \
-n "${release_name}" \ --tiller-namespace "${namespace}" \
--namespace "${KUBE_NAMESPACE}" \ --namespace "${namespace}" \
--version '^2.2.1' \
--name "${release}" \
--set provider="aws" \ --set provider="aws" \
--set aws.credentials.secretKey="${REVIEW_APPS_AWS_SECRET_KEY}" \ --set aws.credentials.secretKey="${REVIEW_APPS_AWS_SECRET_KEY}" \
--set aws.credentials.accessKey="${REVIEW_APPS_AWS_ACCESS_KEY}" \ --set aws.credentials.accessKey="${REVIEW_APPS_AWS_ACCESS_KEY}" \
--set aws.zoneType="public" \ --set aws.zoneType="public" \
--set aws.batchChangeSize=400 \ --set aws.batchChangeSize=400 \
--set domainFilters[0]="${domain}" \ --set domainFilters[0]="${domain}" \
--set txtOwnerId="${KUBE_NAMESPACE}" \ --set txtOwnerId="${namespace}" \
--set rbac.create="true" \ --set rbac.create="true" \
--set policy="sync" \ --set policy="sync" \
--set resources.requests.cpu=50m \ --set resources.requests.cpu=50m \
...@@ -173,21 +192,24 @@ function install_external_dns() { ...@@ -173,21 +192,24 @@ function install_external_dns() {
} }
function create_application_secret() { function create_application_secret() {
echoinfo "Creating the ${CI_ENVIRONMENT_SLUG}-gitlab-initial-root-password secret in the ${KUBE_NAMESPACE} namespace..." true local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
echoinfo "Creating the ${release}-gitlab-initial-root-password secret in the ${namespace} namespace..." true
kubectl create secret generic -n "$KUBE_NAMESPACE" \ kubectl create secret generic --namespace "${namespace}" \
"${CI_ENVIRONMENT_SLUG}-gitlab-initial-root-password" \ "${release}-gitlab-initial-root-password" \
--from-literal="password=${REVIEW_APPS_ROOT_PASSWORD}" \ --from-literal="password=${REVIEW_APPS_ROOT_PASSWORD}" \
--dry-run -o json | kubectl apply -f - --dry-run -o json | kubectl apply -f -
if [ -z "${REVIEW_APPS_EE_LICENSE}" ]; then echo "License not found" && return; fi if [ -z "${REVIEW_APPS_EE_LICENSE}" ]; then echo "License not found" && return; fi
echoinfo "Creating the ${CI_ENVIRONMENT_SLUG}-gitlab-license secret in the ${KUBE_NAMESPACE} namespace..." true echoinfo "Creating the ${release}-gitlab-license secret in the ${namespace} namespace..." true
echo "${REVIEW_APPS_EE_LICENSE}" > /tmp/license.gitlab echo "${REVIEW_APPS_EE_LICENSE}" > /tmp/license.gitlab
kubectl create secret generic -n "$KUBE_NAMESPACE" \ kubectl create secret generic --namespace "${namespace}" \
"${CI_ENVIRONMENT_SLUG}-gitlab-license" \ "${release}-gitlab-license" \
--from-file=license=/tmp/license.gitlab \ --from-file=license=/tmp/license.gitlab \
--dry-run -o json | kubectl apply -f - --dry-run -o json | kubectl apply -f -
} }
...@@ -213,13 +235,14 @@ function base_config_changed() { ...@@ -213,13 +235,14 @@ function base_config_changed() {
} }
function deploy() { function deploy() {
local name="$CI_ENVIRONMENT_SLUG" local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
local edition="${GITLAB_EDITION-ce}" local edition="${GITLAB_EDITION-ce}"
local base_config_file_ref="master" local base_config_file_ref="master"
if [[ "$(base_config_changed)" == "true" ]]; then base_config_file_ref="$CI_COMMIT_SHA"; fi if [[ "$(base_config_changed)" == "true" ]]; then base_config_file_ref="${CI_COMMIT_SHA}"; fi
local base_config_file="https://gitlab.com/gitlab-org/gitlab/raw/${base_config_file_ref}/scripts/review_apps/base-config.yaml" local base_config_file="https://gitlab.com/gitlab-org/gitlab/raw/${base_config_file_ref}/scripts/review_apps/base-config.yaml"
echoinfo "Deploying ${name}..." true echoinfo "Deploying ${release}..." true
IMAGE_REPOSITORY="registry.gitlab.com/gitlab-org/build/cng-mirror" IMAGE_REPOSITORY="registry.gitlab.com/gitlab-org/build/cng-mirror"
gitlab_migrations_image_repository="${IMAGE_REPOSITORY}/gitlab-rails-${edition}" gitlab_migrations_image_repository="${IMAGE_REPOSITORY}/gitlab-rails-${edition}"
...@@ -233,47 +256,49 @@ function deploy() { ...@@ -233,47 +256,49 @@ function deploy() {
create_application_secret create_application_secret
HELM_CMD=$(cat << EOF HELM_CMD=$(cat << EOF
helm upgrade --install \ helm upgrade \
--tiller-namespace="${namespace}" \
--namespace="${namespace}" \
--install \
--wait \ --wait \
--timeout 900 \ --timeout 900 \
--set ci.branch="$CI_COMMIT_REF_NAME" \ --set ci.branch="${CI_COMMIT_REF_NAME}" \
--set ci.commit.sha="$CI_COMMIT_SHORT_SHA" \ --set ci.commit.sha="${CI_COMMIT_SHORT_SHA}" \
--set ci.job.url="$CI_JOB_URL" \ --set ci.job.url="${CI_JOB_URL}" \
--set ci.pipeline.url="$CI_PIPELINE_URL" \ --set ci.pipeline.url="${CI_PIPELINE_URL}" \
--set releaseOverride="$CI_ENVIRONMENT_SLUG" \ --set releaseOverride="${release}" \
--set global.hosts.hostSuffix="$HOST_SUFFIX" \ --set global.hosts.hostSuffix="${HOST_SUFFIX}" \
--set global.hosts.domain="$REVIEW_APPS_DOMAIN" \ --set global.hosts.domain="${REVIEW_APPS_DOMAIN}" \
--set gitlab.migrations.image.repository="$gitlab_migrations_image_repository" \ --set gitlab.migrations.image.repository="${gitlab_migrations_image_repository}" \
--set gitlab.migrations.image.tag="$CI_COMMIT_REF_SLUG" \ --set gitlab.migrations.image.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.gitaly.image.repository="$gitlab_gitaly_image_repository" \ --set gitlab.gitaly.image.repository="${gitlab_gitaly_image_repository}" \
--set gitlab.gitaly.image.tag="v$GITALY_VERSION" \ --set gitlab.gitaly.image.tag="v${GITALY_VERSION}" \
--set gitlab.gitlab-shell.image.repository="$gitlab_shell_image_repository" \ --set gitlab.gitlab-shell.image.repository="${gitlab_shell_image_repository}" \
--set gitlab.gitlab-shell.image.tag="v$GITLAB_SHELL_VERSION" \ --set gitlab.gitlab-shell.image.tag="v${GITLAB_SHELL_VERSION}" \
--set gitlab.sidekiq.image.repository="$gitlab_sidekiq_image_repository" \ --set gitlab.sidekiq.image.repository="${gitlab_sidekiq_image_repository}" \
--set gitlab.sidekiq.image.tag="$CI_COMMIT_REF_SLUG" \ --set gitlab.sidekiq.image.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.unicorn.image.repository="$gitlab_unicorn_image_repository" \ --set gitlab.unicorn.image.repository="${gitlab_unicorn_image_repository}" \
--set gitlab.unicorn.image.tag="$CI_COMMIT_REF_SLUG" \ --set gitlab.unicorn.image.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.unicorn.workhorse.image="$gitlab_workhorse_image_repository" \ --set gitlab.unicorn.workhorse.image="${gitlab_workhorse_image_repository}" \
--set gitlab.unicorn.workhorse.tag="$CI_COMMIT_REF_SLUG" \ --set gitlab.unicorn.workhorse.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.task-runner.image.repository="$gitlab_task_runner_image_repository" \ --set gitlab.task-runner.image.repository="${gitlab_task_runner_image_repository}" \
--set gitlab.task-runner.image.tag="$CI_COMMIT_REF_SLUG" --set gitlab.task-runner.image.tag="${CI_COMMIT_REF_SLUG}"
EOF EOF
) )
if [ -n "${REVIEW_APPS_EE_LICENSE}" ]; then if [ -n "${REVIEW_APPS_EE_LICENSE}" ]; then
HELM_CMD=$(cat << EOF HELM_CMD=$(cat << EOF
${HELM_CMD} \ ${HELM_CMD} \
--set global.gitlab.license.secret="${CI_ENVIRONMENT_SLUG}-gitlab-license" --set global.gitlab.license.secret="${release}-gitlab-license"
EOF EOF
) )
fi fi
HELM_CMD=$(cat << EOF HELM_CMD=$(cat << EOF
${HELM_CMD} \ ${HELM_CMD} \
--namespace="$KUBE_NAMESPACE" \
--version="${CI_PIPELINE_ID}-${CI_JOB_ID}" \ --version="${CI_PIPELINE_ID}-${CI_JOB_ID}" \
-f "${base_config_file}" \ -f "${base_config_file}" \
"${name}" . "${release}" .
EOF EOF
) )
...@@ -284,11 +309,14 @@ EOF ...@@ -284,11 +309,14 @@ EOF
} }
function display_deployment_debug() { function display_deployment_debug() {
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
# Get all pods for this release # Get all pods for this release
echoinfo "Pods for release ${CI_ENVIRONMENT_SLUG}" echoinfo "Pods for release ${release}"
kubectl get pods -n "$KUBE_NAMESPACE" -lrelease=${CI_ENVIRONMENT_SLUG} kubectl get pods --namespace "${namespace}" -lrelease=${release}
# Get all non-completed jobs # Get all non-completed jobs
echoinfo "Unsuccessful Jobs for release ${CI_ENVIRONMENT_SLUG}" echoinfo "Unsuccessful Jobs for release ${release}"
kubectl get jobs -n "$KUBE_NAMESPACE" -lrelease=${CI_ENVIRONMENT_SLUG} --field-selector=status.successful!=1 kubectl get jobs --namespace "${namespace}" -lrelease=${release} --field-selector=status.successful!=1
} }
# frozen_string_literal: true
require 'spec_helper'
describe Mutations::Todos::MarkAllDone do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :done) }
let_it_be(:todo3) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
let_it_be(:user3) { create(:user) }
describe '#resolve' do
it 'marks all pending todos as done' do
updated_todo_ids = mutation_for(current_user).resolve.dig(:updated_ids)
expect(todo1.reload.state).to eq('done')
expect(todo2.reload.state).to eq('done')
expect(todo3.reload.state).to eq('done')
expect(other_user_todo.reload.state).to eq('pending')
expect(updated_todo_ids).to contain_exactly(global_id_of(todo1), global_id_of(todo3))
end
it 'behaves as expected if there are no todos for the requesting user' do
updated_todo_ids = mutation_for(user3).resolve.dig(:updated_ids)
expect(todo1.reload.state).to eq('pending')
expect(todo2.reload.state).to eq('done')
expect(todo3.reload.state).to eq('pending')
expect(other_user_todo.reload.state).to eq('pending')
expect(updated_todo_ids).to be_empty
end
context 'when user is not logged in' do
it 'fails with the expected error' do
expect { mutation_for(nil).resolve }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
def mutation_for(user)
described_class.new(object: nil, context: { current_user: user })
end
end
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
require 'spec_helper' require 'spec_helper'
describe Mutations::Todos::MarkDone do describe Mutations::Todos::MarkDone do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) } let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) } let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) } let_it_be(:other_user) { create(:user) }
...@@ -59,8 +61,4 @@ describe Mutations::Todos::MarkDone do ...@@ -59,8 +61,4 @@ describe Mutations::Todos::MarkDone do
def mark_done_mutation(todo) def mark_done_mutation(todo)
mutation.resolve(id: global_id_of(todo)) mutation.resolve(id: global_id_of(todo))
end end
def global_id_of(todo)
todo.to_global_id.to_s
end
end end
...@@ -11,7 +11,7 @@ import '~/boards/models/list'; ...@@ -11,7 +11,7 @@ import '~/boards/models/list';
import '~/boards/services/board_service'; import '~/boards/services/board_service';
import boardsStore from '~/boards/stores/boards_store'; import boardsStore from '~/boards/stores/boards_store';
import eventHub from '~/boards/eventhub'; import eventHub from '~/boards/eventhub';
import { listObj, listObjDuplicate, boardsMockInterceptor, mockBoardService } from './mock_data'; import { listObj, listObjDuplicate, boardsMockInterceptor } from './mock_data';
import waitForPromises from '../../frontend/helpers/wait_for_promises'; import waitForPromises from '../../frontend/helpers/wait_for_promises';
describe('Store', () => { describe('Store', () => {
...@@ -20,17 +20,16 @@ describe('Store', () => { ...@@ -20,17 +20,16 @@ describe('Store', () => {
beforeEach(() => { beforeEach(() => {
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
mock.onAny().reply(boardsMockInterceptor); mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService();
boardsStore.create(); boardsStore.create();
spyOn(gl.boardService, 'moveIssue').and.callFake( spyOn(boardsStore, 'moveIssue').and.callFake(
() => () =>
new Promise(resolve => { new Promise(resolve => {
resolve(); resolve();
}), }),
); );
spyOn(gl.boardService, 'moveMultipleIssues').and.callFake( spyOn(boardsStore, 'moveMultipleIssues').and.callFake(
() => () =>
new Promise(resolve => { new Promise(resolve => {
resolve(); resolve();
...@@ -263,7 +262,7 @@ describe('Store', () => { ...@@ -263,7 +262,7 @@ describe('Store', () => {
expect(listOne.issues.length).toBe(0); expect(listOne.issues.length).toBe(0);
expect(listTwo.issues.length).toBe(2); expect(listTwo.issues.length).toBe(2);
expect(listTwo.issues[0].id).toBe(2); expect(listTwo.issues[0].id).toBe(2);
expect(gl.boardService.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, null, 1); expect(boardsStore.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, null, 1);
done(); done();
}, 0); }, 0);
...@@ -286,7 +285,7 @@ describe('Store', () => { ...@@ -286,7 +285,7 @@ describe('Store', () => {
expect(listOne.issues.length).toBe(0); expect(listOne.issues.length).toBe(0);
expect(listTwo.issues.length).toBe(2); expect(listTwo.issues.length).toBe(2);
expect(listTwo.issues[1].id).toBe(2); expect(listTwo.issues[1].id).toBe(2);
expect(gl.boardService.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, 1, null); expect(boardsStore.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, 1, null);
done(); done();
}, 0); }, 0);
...@@ -311,7 +310,7 @@ describe('Store', () => { ...@@ -311,7 +310,7 @@ describe('Store', () => {
boardsStore.moveIssueInList(list, issue, 0, 1, [1, 2]); boardsStore.moveIssueInList(list, issue, 0, 1, [1, 2]);
expect(list.issues[0].id).toBe(2); expect(list.issues[0].id).toBe(2);
expect(gl.boardService.moveIssue).toHaveBeenCalledWith(2, null, null, 1, null); expect(boardsStore.moveIssue).toHaveBeenCalledWith(2, null, null, 1, null);
done(); done();
}); });
...@@ -495,7 +494,7 @@ describe('Store', () => { ...@@ -495,7 +494,7 @@ describe('Store', () => {
expect(list.issues[0].id).toBe(issue1.id); expect(list.issues[0].id).toBe(issue1.id);
expect(gl.boardService.moveMultipleIssues).toHaveBeenCalledWith({ expect(boardsStore.moveMultipleIssues).toHaveBeenCalledWith({
ids: [issue1.id, issue2.id], ids: [issue1.id, issue2.id],
fromListId: null, fromListId: null,
toListId: null, toListId: null,
......
...@@ -12,7 +12,7 @@ import '~/boards/models/issue'; ...@@ -12,7 +12,7 @@ import '~/boards/models/issue';
import '~/boards/models/list'; import '~/boards/models/list';
import '~/boards/services/board_service'; import '~/boards/services/board_service';
import boardsStore from '~/boards/stores/boards_store'; import boardsStore from '~/boards/stores/boards_store';
import { listObj, listObjDuplicate, boardsMockInterceptor, mockBoardService } from './mock_data'; import { listObj, listObjDuplicate, boardsMockInterceptor } from './mock_data';
describe('List model', () => { describe('List model', () => {
let list; let list;
...@@ -21,9 +21,6 @@ describe('List model', () => { ...@@ -21,9 +21,6 @@ describe('List model', () => {
beforeEach(() => { beforeEach(() => {
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
mock.onAny().reply(boardsMockInterceptor); mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService({
bulkUpdatePath: '/test/issue-boards/board/1/lists',
});
boardsStore.create(); boardsStore.create();
list = new List(listObj); list = new List(listObj);
...@@ -110,11 +107,11 @@ describe('List model', () => { ...@@ -110,11 +107,11 @@ describe('List model', () => {
list.issues.push(issue); list.issues.push(issue);
listDup.issues.push(issue); listDup.issues.push(issue);
spyOn(gl.boardService, 'moveIssue').and.callThrough(); spyOn(boardsStore, 'moveIssue').and.callThrough();
listDup.updateIssueLabel(issue, list); listDup.updateIssueLabel(issue, list);
expect(gl.boardService.moveIssue).toHaveBeenCalledWith( expect(boardsStore.moveIssue).toHaveBeenCalledWith(
issue.id, issue.id,
list.id, list.id,
listDup.id, listDup.id,
...@@ -172,7 +169,7 @@ describe('List model', () => { ...@@ -172,7 +169,7 @@ describe('List model', () => {
describe('newIssue', () => { describe('newIssue', () => {
beforeEach(() => { beforeEach(() => {
spyOn(gl.boardService, 'newIssue').and.returnValue( spyOn(boardsStore, 'newIssue').and.returnValue(
Promise.resolve({ Promise.resolve({
data: { data: {
id: 42, id: 42,
......
...@@ -3,7 +3,8 @@ ...@@ -3,7 +3,8 @@
require 'fast_spec_helper' require 'fast_spec_helper'
RSpec.describe Quality::HelmClient do RSpec.describe Quality::HelmClient do
let(:namespace) { 'review-apps-ee' } let(:tiller_namespace) { 'review-apps-ee' }
let(:namespace) { tiller_namespace }
let(:release_name) { 'my-release' } let(:release_name) { 'my-release' }
let(:raw_helm_list_page1) do let(:raw_helm_list_page1) do
<<~OUTPUT <<~OUTPUT
...@@ -30,12 +31,12 @@ RSpec.describe Quality::HelmClient do ...@@ -30,12 +31,12 @@ RSpec.describe Quality::HelmClient do
OUTPUT OUTPUT
end end
subject { described_class.new(namespace: namespace) } subject { described_class.new(tiller_namespace: tiller_namespace, namespace: namespace) }
describe '#releases' do describe '#releases' do
it 'raises an error if the Helm command fails' do it 'raises an error if the Helm command fails' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{namespace}" --output json)]) .with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{tiller_namespace}" --output json)])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false))) .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
expect { subject.releases.to_a }.to raise_error(described_class::CommandFailedError) expect { subject.releases.to_a }.to raise_error(described_class::CommandFailedError)
...@@ -43,7 +44,7 @@ RSpec.describe Quality::HelmClient do ...@@ -43,7 +44,7 @@ RSpec.describe Quality::HelmClient do
it 'calls helm list with default arguments' do it 'calls helm list with default arguments' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{namespace}" --output json)]) .with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{tiller_namespace}" --output json)])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
subject.releases.to_a subject.releases.to_a
...@@ -51,7 +52,7 @@ RSpec.describe Quality::HelmClient do ...@@ -51,7 +52,7 @@ RSpec.describe Quality::HelmClient do
it 'calls helm list with extra arguments' do it 'calls helm list with extra arguments' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{namespace}" --output json --deployed)]) .with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{tiller_namespace}" --output json --deployed)])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
subject.releases(args: ['--deployed']).to_a subject.releases(args: ['--deployed']).to_a
...@@ -59,7 +60,7 @@ RSpec.describe Quality::HelmClient do ...@@ -59,7 +60,7 @@ RSpec.describe Quality::HelmClient do
it 'returns a list of Release objects' do it 'returns a list of Release objects' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{namespace}" --output json --deployed)]) .with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{tiller_namespace}" --output json --deployed)])
.and_return(Gitlab::Popen::Result.new([], raw_helm_list_page2, '', double(success?: true))) .and_return(Gitlab::Popen::Result.new([], raw_helm_list_page2, '', double(success?: true)))
releases = subject.releases(args: ['--deployed']).to_a releases = subject.releases(args: ['--deployed']).to_a
...@@ -78,10 +79,10 @@ RSpec.describe Quality::HelmClient do ...@@ -78,10 +79,10 @@ RSpec.describe Quality::HelmClient do
it 'automatically paginates releases' do it 'automatically paginates releases' do
expect(Gitlab::Popen).to receive(:popen_with_detail).ordered expect(Gitlab::Popen).to receive(:popen_with_detail).ordered
.with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{namespace}" --output json)]) .with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{tiller_namespace}" --output json)])
.and_return(Gitlab::Popen::Result.new([], raw_helm_list_page1, '', double(success?: true))) .and_return(Gitlab::Popen::Result.new([], raw_helm_list_page1, '', double(success?: true)))
expect(Gitlab::Popen).to receive(:popen_with_detail).ordered expect(Gitlab::Popen).to receive(:popen_with_detail).ordered
.with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{namespace}" --output json --offset review-6709-group-t40qbv)]) .with([%(helm list --namespace "#{namespace}" --tiller-namespace "#{tiller_namespace}" --output json --offset review-6709-group-t40qbv)])
.and_return(Gitlab::Popen::Result.new([], raw_helm_list_page2, '', double(success?: true))) .and_return(Gitlab::Popen::Result.new([], raw_helm_list_page2, '', double(success?: true)))
releases = subject.releases.to_a releases = subject.releases.to_a
...@@ -94,7 +95,7 @@ RSpec.describe Quality::HelmClient do ...@@ -94,7 +95,7 @@ RSpec.describe Quality::HelmClient do
describe '#delete' do describe '#delete' do
it 'raises an error if the Helm command fails' do it 'raises an error if the Helm command fails' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm delete --tiller-namespace "#{namespace}" --purge #{release_name})]) .with([%(helm delete --tiller-namespace "#{tiller_namespace}" --purge #{release_name})])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false))) .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
expect { subject.delete(release_name: release_name) }.to raise_error(described_class::CommandFailedError) expect { subject.delete(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
...@@ -102,7 +103,7 @@ RSpec.describe Quality::HelmClient do ...@@ -102,7 +103,7 @@ RSpec.describe Quality::HelmClient do
it 'calls helm delete with default arguments' do it 'calls helm delete with default arguments' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm delete --tiller-namespace "#{namespace}" --purge #{release_name})]) .with([%(helm delete --tiller-namespace "#{tiller_namespace}" --purge #{release_name})])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
expect(subject.delete(release_name: release_name)).to eq('') expect(subject.delete(release_name: release_name)).to eq('')
...@@ -113,7 +114,7 @@ RSpec.describe Quality::HelmClient do ...@@ -113,7 +114,7 @@ RSpec.describe Quality::HelmClient do
it 'raises an error if the Helm command fails' do it 'raises an error if the Helm command fails' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm delete --tiller-namespace "#{namespace}" --purge #{release_name.join(' ')})]) .with([%(helm delete --tiller-namespace "#{tiller_namespace}" --purge #{release_name.join(' ')})])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false))) .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
expect { subject.delete(release_name: release_name) }.to raise_error(described_class::CommandFailedError) expect { subject.delete(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
...@@ -121,7 +122,7 @@ RSpec.describe Quality::HelmClient do ...@@ -121,7 +122,7 @@ RSpec.describe Quality::HelmClient do
it 'calls helm delete with multiple release names' do it 'calls helm delete with multiple release names' do
expect(Gitlab::Popen).to receive(:popen_with_detail) expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(helm delete --tiller-namespace "#{namespace}" --purge #{release_name.join(' ')})]) .with([%(helm delete --tiller-namespace "#{tiller_namespace}" --purge #{release_name.join(' ')})])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
expect(subject.delete(release_name: release_name)).to eq('') expect(subject.delete(release_name: release_name)).to eq('')
......
# frozen_string_literal: true
require 'spec_helper'
describe 'Marking all todos done' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:other_user2) { create(:user) }
let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :done) }
let_it_be(:todo3) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
let(:input) { {} }
let(:mutation) do
graphql_mutation(:todos_mark_all_done, input,
<<-QL.strip_heredoc
clientMutationId
errors
updatedIds
QL
)
end
def mutation_response
graphql_mutation_response(:todos_mark_all_done)
end
it 'marks all pending todos as done' do
post_graphql_mutation(mutation, current_user: current_user)
expect(todo1.reload.state).to eq('done')
expect(todo2.reload.state).to eq('done')
expect(todo3.reload.state).to eq('done')
expect(other_user_todo.reload.state).to eq('pending')
updated_todo_ids = mutation_response['updatedIds']
expect(updated_todo_ids).to contain_exactly(global_id_of(todo1), global_id_of(todo3))
end
it 'behaves as expected if there are no todos for the requesting user' do
post_graphql_mutation(mutation, current_user: other_user2)
expect(todo1.reload.state).to eq('pending')
expect(todo2.reload.state).to eq('done')
expect(todo3.reload.state).to eq('pending')
expect(other_user_todo.reload.state).to eq('pending')
updated_todo_ids = mutation_response['updatedIds']
expect(updated_todo_ids).to be_empty
end
context 'when user is not logged in' do
let(:current_user) { nil }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
end
end
...@@ -1015,6 +1015,21 @@ describe TodoService do ...@@ -1015,6 +1015,21 @@ describe TodoService do
end end
end end
describe '#mark_all_todos_as_done_by_user' do
it 'marks all todos done' do
todo1 = create(:todo, user: john_doe, state: :pending)
todo2 = create(:todo, user: john_doe, state: :done)
todo3 = create(:todo, user: john_doe, state: :pending)
ids = described_class.new.mark_all_todos_as_done_by_user(john_doe)
expect(ids).to contain_exactly(todo1.id, todo3.id)
expect(todo1.reload.state).to eq('done')
expect(todo2.reload.state).to eq('done')
expect(todo3.reload.state).to eq('done')
end
end
describe '#mark_todos_as_done_by_ids' do describe '#mark_todos_as_done_by_ids' do
let(:issue) { create(:issue, project: project, author: author, assignees: [john_doe]) } let(:issue) { create(:issue, project: project, author: author, assignees: [john_doe]) }
let(:another_issue) { create(:issue, project: project, author: author, assignees: [john_doe]) } let(:another_issue) { create(:issue, project: project, author: author, assignees: [john_doe]) }
......
...@@ -297,6 +297,10 @@ module GraphqlHelpers ...@@ -297,6 +297,10 @@ module GraphqlHelpers
extract_attribute ? item['node'][extract_attribute] : item['node'] extract_attribute ? item['node'][extract_attribute] : item['node']
end end
end end
def global_id_of(model)
model.to_global_id.to_s
end
end end
# This warms our schema, doing this as part of loading the helpers to avoid # This warms our schema, doing this as part of loading the helpers to avoid
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment