Commit ba5f3957 authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-03-06

# Conflicts:
#	.gitlab-ci.yml
#	app/assets/javascripts/boards/components/board_new_issue.vue
#	app/assets/javascripts/boards/components/sidebar/remove_issue.js
#	app/assets/javascripts/boards/filtered_search_boards.js
#	app/assets/javascripts/boards/index.js
#	app/assets/javascripts/boards/mixins/sortable_default_options.js
#	app/assets/javascripts/boards/models/issue.js
#	app/controllers/groups/boards_controller.rb
#	app/models/board.rb
#	app/models/group.rb
#	app/models/project.rb
#	app/views/groups/labels/index.html.haml
#	app/views/shared/issuable/_search_bar.html.haml
#	config/routes/group.rb
#	config/routes/project.rb
#	config/sidekiq_queues.yml
#	db/schema.rb
#	doc/README.md
#	doc/api/group_boards.md
#	lib/api/group_boards.rb
#	lib/api/job_artifacts.rb
#	lib/api/merge_requests.rb
#	lib/gitlab/ci/trace.rb
#	spec/controllers/groups/boards_controller_spec.rb
#	spec/factories/boards.rb
#	spec/javascripts/api_spec.js
#	spec/lib/gitlab/ci/trace_spec.rb
#	spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb

[ci skip]
parents 25fb4798 35f6efae
...@@ -40,8 +40,14 @@ variables: ...@@ -40,8 +40,14 @@ variables:
before_script: before_script:
- bundle --version - bundle --version
- date
- source scripts/utils.sh - source scripts/utils.sh
- date
- source scripts/prepare_build.sh - source scripts/prepare_build.sh
- date
after_script:
- date
stages: stages:
- build - build
...@@ -101,6 +107,26 @@ stages: ...@@ -101,6 +107,26 @@ stages:
- /(^docs[\/-].*|.*-docs$)/ - /(^docs[\/-].*|.*-docs$)/
- /(^qa[\/-].*|.*-qa$)/ - /(^qa[\/-].*|.*-qa$)/
# Jobs that only need to pull cache
.dedicated-no-docs-pull-cache-job: &dedicated-no-docs-pull-cache-job
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
dependencies:
- setup-test-env
stage: test
# Jobs that do not need a DB
.dedicated-no-docs-no-db-pull-cache-job: &dedicated-no-docs-no-db-pull-cache-job
<<: *dedicated-no-docs-pull-cache-job
variables:
SETUP_DB: "false"
.rake-exec: &rake-exec
<<: *dedicated-no-docs-no-db-pull-cache-job
script:
- bundle exec rake $CI_JOB_NAME
.rspec-metadata: &rspec-metadata .rspec-metadata: &rspec-metadata
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs-and-qa <<: *except-docs-and-qa
...@@ -208,21 +234,23 @@ stages: ...@@ -208,21 +234,23 @@ stages:
- master@gitlab/gitlabhq - master@gitlab/gitlabhq
- master@gitlab/gitlab-ee - master@gitlab/gitlab-ee
## .gitlab-setup: &gitlab-setup
# Trigger a package build in omnibus-gitlab repository <<: *dedicated-no-docs-pull-cache-job
# <<: *use-pg
package-qa: variables:
<<: *dedicated-runner CREATE_DB_USER: "true"
image: ruby:2.4-alpine
before_script: []
stage: build
cache: {}
when: manual
script: script:
- scripts/trigger-build-omnibus # Manually clone gitlab-test and only seed this project in
only: # db/fixtures/development/04_project.rb thanks to SIZE=1 below
- //@gitlab-org/gitlab-ce - git clone https://gitlab.com/gitlab-org/gitlab-test.git
- //@gitlab-org/gitlab-ee /home/git/repositories/gitlab-org/gitlab-test.git
- scripts/gitaly-test-spawn
- force=yes SIZE=1 FIXTURE_PATH="db/fixtures/development" bundle exec rake gitlab:setup
artifacts:
when: on_failure
expire_in: 1d
paths:
- log/development.log
# Review docs base # Review docs base
.review-docs: &review-docs .review-docs: &review-docs
...@@ -245,6 +273,47 @@ package-qa: ...@@ -245,6 +273,47 @@ package-qa:
only: only:
- branches - branches
# DB migration, rollback, and seed jobs
.db-migrate-reset: &db-migrate-reset
<<: *dedicated-no-docs-pull-cache-job
script:
- bundle exec rake db:migrate:reset
.migration-paths: &migration-paths
<<: *dedicated-no-docs-pull-cache-job
variables:
CREATE_DB_USER: "true"
script:
- git fetch https://gitlab.com/gitlab-org/gitlab-ce.git v9.3.0
- git checkout -f FETCH_HEAD
- bundle install $BUNDLE_INSTALL_FLAGS
- date
- cp config/gitlab.yml.example config/gitlab.yml
- bundle exec rake db:drop db:create db:schema:load db:seed_fu
- date
- git checkout $CI_COMMIT_SHA
- bundle install $BUNDLE_INSTALL_FLAGS
- date
- . scripts/prepare_build.sh
- date
- bundle exec rake db:migrate
##
# Trigger a package build in omnibus-gitlab repository
#
package-qa:
<<: *dedicated-runner
image: ruby:2.4-alpine
before_script: []
stage: build
cache: {}
when: manual
script:
- scripts/trigger-build-omnibus
only:
- //@gitlab-org/gitlab-ce
- //@gitlab-org/gitlab-ee
# Trigger a docs build in gitlab-docs # Trigger a docs build in gitlab-docs
# Useful to preview the docs changes live # Useful to preview the docs changes live
review-docs-deploy: review-docs-deploy:
...@@ -324,7 +393,7 @@ update-tests-metadata: ...@@ -324,7 +393,7 @@ update-tests-metadata:
flaky-examples-check: flaky-examples-check:
<<: *dedicated-runner <<: *dedicated-runner
image: ruby:2.3-alpine image: ruby:2.4-alpine
services: [] services: []
before_script: [] before_script: []
variables: variables:
...@@ -358,7 +427,9 @@ compile-assets: ...@@ -358,7 +427,9 @@ compile-assets:
<<: *default-cache <<: *default-cache
script: script:
- node --version - node --version
- date
- yarn install --frozen-lockfile --cache-folder .yarn-cache - yarn install --frozen-lockfile --cache-folder .yarn-cache
- date
- bundle exec rake gitlab:assets:compile - bundle exec rake gitlab:assets:compile
artifacts: artifacts:
expire_in: 7d expire_in: 7d
...@@ -454,26 +525,11 @@ spinach-pg 1 2: *spinach-metadata-pg ...@@ -454,26 +525,11 @@ spinach-pg 1 2: *spinach-metadata-pg
spinach-mysql 0 2: *spinach-metadata-mysql spinach-mysql 0 2: *spinach-metadata-mysql
spinach-mysql 1 2: *spinach-metadata-mysql spinach-mysql 1 2: *spinach-metadata-mysql
# Static analysis jobs
.ruby-static-analysis: &ruby-static-analysis
variables:
SIMPLECOV: "false"
SETUP_DB: "false"
.rake-exec: &rake-exec
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
<<: *ruby-static-analysis
stage: test
script:
- bundle exec rake $CI_JOB_NAME
static-analysis: static-analysis:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *except-docs dependencies:
<<: *ruby-static-analysis - compile-assets
stage: test - setup-test-env
script: script:
- scripts/static-analysis - scripts/static-analysis
cache: cache:
...@@ -530,15 +586,6 @@ ee_compat_check: ...@@ -530,15 +586,6 @@ ee_compat_check:
paths: paths:
- ee_compat_check/patches/*.patch - ee_compat_check/patches/*.patch
# DB migration, rollback, and seed jobs
.db-migrate-reset: &db-migrate-reset
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
script:
- bundle exec rake db:migrate:reset
db:migrate:reset-pg: db:migrate:reset-pg:
<<: *db-migrate-reset <<: *db-migrate-reset
<<: *use-pg <<: *use-pg
...@@ -553,6 +600,7 @@ db:check-schema-pg: ...@@ -553,6 +600,7 @@ db:check-schema-pg:
script: script:
- source scripts/schema_changed.sh - source scripts/schema_changed.sh
<<<<<<< HEAD
.migration-paths: &migration-paths .migration-paths: &migration-paths
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs-and-qa <<: *except-docs-and-qa
...@@ -572,6 +620,8 @@ db:check-schema-pg: ...@@ -572,6 +620,8 @@ db:check-schema-pg:
- . scripts/prepare_build.sh - . scripts/prepare_build.sh
- bundle exec rake db:migrate - bundle exec rake db:migrate
=======
>>>>>>> upstream/master
migration:path-pg: migration:path-pg:
<<: *migration-paths <<: *migration-paths
<<: *use-pg <<: *use-pg
...@@ -581,10 +631,7 @@ migration:path-mysql: ...@@ -581,10 +631,7 @@ migration:path-mysql:
<<: *use-mysql <<: *use-mysql
.db-rollback: &db-rollback .db-rollback: &db-rollback
<<: *dedicated-runner <<: *dedicated-no-docs-pull-cache-job
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
script: script:
- bundle exec rake db:rollback STEP=119 - bundle exec rake db:rollback STEP=119
- bundle exec rake db:migrate - bundle exec rake db:migrate
...@@ -597,27 +644,6 @@ db:rollback-mysql: ...@@ -597,27 +644,6 @@ db:rollback-mysql:
<<: *db-rollback <<: *db-rollback
<<: *use-mysql <<: *use-mysql
.gitlab-setup: &gitlab-setup
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
variables:
SIZE: "1"
SETUP_DB: "false"
CREATE_DB_USER: "true"
FIXTURE_PATH: db/fixtures/development
script:
- git clone https://gitlab.com/gitlab-org/gitlab-test.git
/home/git/repositories/gitlab-org/gitlab-test.git
- scripts/gitaly-test-spawn
- force=yes bundle exec rake gitlab:setup
artifacts:
when: on_failure
expire_in: 1d
paths:
- log/development.log
gitlab:setup-pg: gitlab:setup-pg:
<<: *gitlab-setup <<: *gitlab-setup
<<: *use-pg <<: *use-pg
...@@ -628,10 +654,7 @@ gitlab:setup-mysql: ...@@ -628,10 +654,7 @@ gitlab:setup-mysql:
# Frontend-related jobs # Frontend-related jobs
gitlab:assets:compile: gitlab:assets:compile:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
dependencies: [] dependencies: []
variables: variables:
NODE_ENV: "production" NODE_ENV: "production"
...@@ -641,7 +664,9 @@ gitlab:assets:compile: ...@@ -641,7 +664,9 @@ gitlab:assets:compile:
WEBPACK_REPORT: "true" WEBPACK_REPORT: "true"
NO_COMPRESSION: "true" NO_COMPRESSION: "true"
script: script:
- date
- yarn install --frozen-lockfile --production --cache-folder .yarn-cache - yarn install --frozen-lockfile --production --cache-folder .yarn-cache
- date
- bundle exec rake gitlab:assets:compile - bundle exec rake gitlab:assets:compile
artifacts: artifacts:
name: webpack-report name: webpack-report
...@@ -650,17 +675,16 @@ gitlab:assets:compile: ...@@ -650,17 +675,16 @@ gitlab:assets:compile:
- webpack-report/ - webpack-report/
karma: karma:
<<: *dedicated-runner <<: *dedicated-no-docs-pull-cache-job
<<: *except-docs-and-qa
<<: *pull-cache
<<: *use-pg <<: *use-pg
stage: test dependencies:
variables: - compile-assets
BABEL_ENV: "coverage" - setup-test-env
CHROME_LOG_FILE: "chrome_debug.log"
script: script:
- export BABEL_ENV=coverage CHROME_LOG_FILE=chrome_debug.log
- date
- scripts/gitaly-test-spawn - scripts/gitaly-test-spawn
- bundle exec rake gettext:po_to_json - date
- bundle exec rake karma - bundle exec rake karma
coverage: '/^Statements *: (\d+\.\d+%)/' coverage: '/^Statements *: (\d+\.\d+%)/'
artifacts: artifacts:
...@@ -672,9 +696,7 @@ karma: ...@@ -672,9 +696,7 @@ karma:
- coverage-javascript/ - coverage-javascript/
codequality: codequality:
<<: *except-docs <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *pull-cache
stage: test
image: docker:latest image: docker:latest
before_script: [] before_script: []
services: services:
...@@ -706,11 +728,7 @@ sast: ...@@ -706,11 +728,7 @@ sast:
paths: [gl-sast-report.json] paths: [gl-sast-report.json]
qa:internal: qa:internal:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *except-docs
stage: test
variables:
SETUP_DB: "false"
services: [] services: []
script: script:
- cd qa/ - cd qa/
...@@ -718,11 +736,7 @@ qa:internal: ...@@ -718,11 +736,7 @@ qa:internal:
- bundle exec rspec - bundle exec rspec
qa:selectors: qa:selectors:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *except-docs
stage: test
variables:
SETUP_DB: "false"
services: [] services: []
script: script:
- cd qa/ - cd qa/
...@@ -730,14 +744,8 @@ qa:selectors: ...@@ -730,14 +744,8 @@ qa:selectors:
- bundle exec bin/qa Test::Sanity::Selectors - bundle exec bin/qa Test::Sanity::Selectors
coverage: coverage:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *except-docs-and-qa
<<: *pull-cache
stage: post-test stage: post-test
services: []
variables:
SETUP_DB: "false"
USE_BUNDLE_INSTALL: "true"
script: script:
- bundle exec scripts/merge-simplecov - bundle exec scripts/merge-simplecov
coverage: '/LOC \((\d+\.\d+%)\) covered.$/' coverage: '/LOC \((\d+\.\d+%)\) covered.$/'
...@@ -749,16 +757,16 @@ coverage: ...@@ -749,16 +757,16 @@ coverage:
- coverage/assets/ - coverage/assets/
lint:javascript:report: lint:javascript:report:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *except-docs-and-qa
<<: *pull-cache
stage: post-test stage: post-test
dependencies: dependencies:
- compile-assets - compile-assets
- setup-test-env - setup-test-env
before_script: [] before_script: []
script: script:
- date
- find app/ spec/ -name '*.js' -exec sed --in-place 's|/\* eslint-disable .*\*/||' {} \; # run report over all files - find app/ spec/ -name '*.js' -exec sed --in-place 's|/\* eslint-disable .*\*/||' {} \; # run report over all files
- date
- yarn run eslint-report || true # ignore exit code - yarn run eslint-report || true # ignore exit code
artifacts: artifacts:
name: eslint-report name: eslint-report
...@@ -767,8 +775,7 @@ lint:javascript:report: ...@@ -767,8 +775,7 @@ lint:javascript:report:
- eslint-report.html - eslint-report.html
pages: pages:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *pull-cache
before_script: [] before_script: []
stage: pages stage: pages
dependencies: dependencies:
...@@ -793,10 +800,7 @@ pages: ...@@ -793,10 +800,7 @@ pages:
# Insurance in case a gem needed by one of our releases gets yanked from # Insurance in case a gem needed by one of our releases gets yanked from
# rubygems.org in the future. # rubygems.org in the future.
cache gems: cache gems:
<<: *dedicated-runner <<: *dedicated-no-docs-no-db-pull-cache-job
<<: *pull-cache
variables:
SETUP_DB: "false"
script: script:
- bundle package --all --all-platforms - bundle package --all --all-platforms
artifacts: artifacts:
......
<script> <script>
import eventHub from '../eventhub'; import eventHub from '../eventhub';
<<<<<<< HEAD
import ProjectSelect from 'ee/boards/components/project_select.vue'; // eslint-disable-line import/first import ProjectSelect from 'ee/boards/components/project_select.vue'; // eslint-disable-line import/first
=======
import ProjectSelect from './project_select.vue';
>>>>>>> upstream/master
import ListIssue from '../models/issue'; import ListIssue from '../models/issue';
const Store = gl.issueBoards.BoardsStore; const Store = gl.issueBoards.BoardsStore;
...@@ -140,3 +144,4 @@ export default { ...@@ -140,3 +144,4 @@ export default {
</div> </div>
</div> </div>
</template> </template>
<script>
/* global ListIssue */
import _ from 'underscore';
import eventHub from '../eventhub';
import loadingIcon from '../../vue_shared/components/loading_icon.vue';
import Api from '../../api';
export default {
name: 'BoardProjectSelect',
components: {
loadingIcon,
},
props: {
groupId: {
type: Number,
required: true,
default: 0,
},
},
data() {
return {
loading: true,
selectedProject: {},
};
},
computed: {
selectedProjectName() {
return this.selectedProject.name || 'Select a project';
},
},
mounted() {
$(this.$refs.projectsDropdown).glDropdown({
filterable: true,
filterRemote: true,
search: {
fields: ['name_with_namespace'],
},
clicked: ({ $el, e }) => {
e.preventDefault();
this.selectedProject = {
id: $el.data('project-id'),
name: $el.data('project-name'),
};
eventHub.$emit('setSelectedProject', this.selectedProject);
},
selectable: true,
data: (term, callback) => {
this.loading = true;
return Api.groupProjects(this.groupId, term, (projects) => {
this.loading = false;
callback(projects);
});
},
renderRow(project) {
return `
<li>
<a href='#' class='dropdown-menu-link' data-project-id="${project.id}" data-project-name="${project.name}">
${_.escape(project.name)}
</a>
</li>
`;
},
text: project => project.name,
});
},
};
</script>
<template>
<div>
<label class="label-light prepend-top-10">
Project
</label>
<div
ref="projectsDropdown"
class="dropdown"
>
<button
class="dropdown-menu-toggle wide"
type="button"
data-toggle="dropdown"
aria-expanded="false"
>
{{ selectedProjectName }}
<i
class="fa fa-chevron-down"
aria-hidden="true"
>
</i>
</button>
<div class="dropdown-menu dropdown-menu-selectable dropdown-menu-full-width">
<div class="dropdown-title">
<span>Projects</span>
<button
aria-label="Close"
type="button"
class="dropdown-title-button dropdown-menu-close"
>
<i
aria-hidden="true"
data-hidden="true"
class="fa fa-times dropdown-menu-close-icon"
>
</i>
</button>
</div>
<div class="dropdown-input">
<input
class="dropdown-input-field"
type="search"
placeholder="Search projects"
/>
<i
aria-hidden="true"
data-hidden="true"
class="fa fa-search dropdown-input-search"
>
</i>
</div>
<div class="dropdown-content"></div>
<div class="dropdown-loading">
<loading-icon />
</div>
</div>
</div>
</div>
</template>
...@@ -43,6 +43,7 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({ ...@@ -43,6 +43,7 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({
labelIds = ['']; labelIds = [''];
} }
<<<<<<< HEAD
let assigneeIds = issue.assignees let assigneeIds = issue.assignees
.map(assignee => assignee.id) .map(assignee => assignee.id)
.filter(id => id !== board.assignee.id); .filter(id => id !== board.assignee.id);
...@@ -51,6 +52,8 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({ ...@@ -51,6 +52,8 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({
assigneeIds = ['0']; assigneeIds = ['0'];
} }
=======
>>>>>>> upstream/master
const data = { const data = {
issue: { issue: {
label_ids: labelIds, label_ids: labelIds,
...@@ -58,6 +61,7 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({ ...@@ -58,6 +61,7 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({
}, },
}; };
<<<<<<< HEAD
if (board.milestone_id) { if (board.milestone_id) {
data.issue.milestone_id = -1; data.issue.milestone_id = -1;
} }
...@@ -66,6 +70,8 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({ ...@@ -66,6 +70,8 @@ gl.issueBoards.RemoveIssueBtn = Vue.extend({
data.issue.weight = null; data.issue.weight = null;
} }
=======
>>>>>>> upstream/master
// Post the remove data // Post the remove data
Vue.http.patch(this.updateUrl, data).catch(() => { Vue.http.patch(this.updateUrl, data).catch(() => {
Flash(__('Failed to remove issue from board, please try again.')); Flash(__('Failed to remove issue from board, please try again.'));
......
...@@ -7,8 +7,11 @@ export default class FilteredSearchBoards extends FilteredSearchManager { ...@@ -7,8 +7,11 @@ export default class FilteredSearchBoards extends FilteredSearchManager {
constructor(store, updateUrl = false, cantEdit = []) { constructor(store, updateUrl = false, cantEdit = []) {
super({ super({
page: 'boards', page: 'boards',
<<<<<<< HEAD
isGroup: true, isGroup: true,
filteredSearchTokenKeys: FilteredSearchTokenKeysIssues, filteredSearchTokenKeys: FilteredSearchTokenKeysIssues,
=======
>>>>>>> upstream/master
stateFiltersSelector: '.issues-state-filters', stateFiltersSelector: '.issues-state-filters',
}); });
......
...@@ -13,6 +13,7 @@ import sidebarEventHub from '~/sidebar/event_hub'; // eslint-disable-line import ...@@ -13,6 +13,7 @@ import sidebarEventHub from '~/sidebar/event_hub'; // eslint-disable-line import
import './models/issue'; import './models/issue';
import './models/list'; import './models/list';
import './models/milestone'; import './models/milestone';
import './models/project';
import './models/assignee'; import './models/assignee';
import './stores/boards_store'; import './stores/boards_store';
import './stores/modal_store'; import './stores/modal_store';
...@@ -258,8 +259,11 @@ export default () => { ...@@ -258,8 +259,11 @@ export default () => {
return { return {
modal: ModalStore.store, modal: ModalStore.store,
store: Store.state, store: Store.state,
<<<<<<< HEAD
isFullscreen: false, isFullscreen: false,
focusModeAvailable: $boardApp.hasAttribute('data-focus-mode-available'), focusModeAvailable: $boardApp.hasAttribute('data-focus-mode-available'),
=======
>>>>>>> upstream/master
canAdminList: this.$options.el.hasAttribute('data-can-admin-list'), canAdminList: this.$options.el.hasAttribute('data-can-admin-list'),
}; };
}, },
......
/* eslint-disable no-unused-vars, no-mixed-operators, comma-dangle */ /* eslint-disable no-unused-vars, no-mixed-operators, comma-dangle */
/* global DocumentTouch */ /* global DocumentTouch */
<<<<<<< HEAD
import sortableConfig from 'ee/sortable/sortable_config'; import sortableConfig from 'ee/sortable/sortable_config';
=======
import sortableConfig from '../../sortable/sortable_config';
>>>>>>> upstream/master
window.gl = window.gl || {}; window.gl = window.gl || {};
window.gl.issueBoards = window.gl.issueBoards || {}; window.gl.issueBoards = window.gl.issueBoards || {};
......
...@@ -4,7 +4,11 @@ ...@@ -4,7 +4,11 @@
/* global ListAssignee */ /* global ListAssignee */
import Vue from 'vue'; import Vue from 'vue';
<<<<<<< HEAD
import IssueProject from 'ee/boards/models/project'; import IssueProject from 'ee/boards/models/project';
=======
import IssueProject from './project';
>>>>>>> upstream/master
class ListIssue { class ListIssue {
constructor (obj, defaultAvatar) { constructor (obj, defaultAvatar) {
...@@ -29,7 +33,10 @@ class ListIssue { ...@@ -29,7 +33,10 @@ class ListIssue {
this.toggleSubscriptionEndpoint = obj.toggle_subscription_endpoint; this.toggleSubscriptionEndpoint = obj.toggle_subscription_endpoint;
this.milestone_id = obj.milestone_id; this.milestone_id = obj.milestone_id;
this.project_id = obj.project_id; this.project_id = obj.project_id;
<<<<<<< HEAD
this.weight = obj.weight; this.weight = obj.weight;
=======
>>>>>>> upstream/master
if (obj.project) { if (obj.project) {
this.project = new IssueProject(obj.project); this.project = new IssueProject(obj.project);
......
export default class IssueProject {
constructor(obj) {
this.id = obj.id;
this.path = obj.path;
}
}
...@@ -117,7 +117,10 @@ ...@@ -117,7 +117,10 @@
</script> </script>
<template> <template>
<section class="settings no-animate expanded"> <section
id="cluster-applications"
class="settings no-animate expanded"
>
<div class="settings-header"> <div class="settings-header">
<h4> <h4>
{{ s__('ClusterIntegration|Applications') }} {{ s__('ClusterIntegration|Applications') }}
......
...@@ -7,34 +7,82 @@ ...@@ -7,34 +7,82 @@
import EmptyState from './empty_state.vue'; import EmptyState from './empty_state.vue';
import MonitoringStore from '../stores/monitoring_store'; import MonitoringStore from '../stores/monitoring_store';
import eventHub from '../event_hub'; import eventHub from '../event_hub';
import { convertPermissionToBoolean } from '../../lib/utils/common_utils';
export default { export default {
components: { components: {
Graph, Graph,
GraphGroup, GraphGroup,
EmptyState, EmptyState,
}, },
data() { props: {
const metricsData = document.querySelector('#prometheus-graphs').dataset; hasMetrics: {
const store = new MonitoringStore(); type: Boolean,
required: false,
default: true,
},
showLegend: {
type: Boolean,
required: false,
default: true,
},
showPanels: {
type: Boolean,
required: false,
default: true,
},
forceSmallGraph: {
type: Boolean,
required: false,
default: false,
},
documentationPath: {
type: String,
required: true,
},
settingsPath: {
type: String,
required: true,
},
clustersPath: {
type: String,
required: true,
},
tagsPath: {
type: String,
required: true,
},
projectPath: {
type: String,
required: true,
},
metricsEndpoint: {
type: String,
required: true,
},
deploymentEndpoint: {
type: String,
required: false,
default: null,
},
emptyGettingStartedSvgPath: {
type: String,
required: true,
},
emptyLoadingSvgPath: {
type: String,
required: true,
},
emptyUnableToConnectSvgPath: {
type: String,
required: true,
},
},
data() {
return { return {
store, store: new MonitoringStore(),
state: 'gettingStarted', state: 'gettingStarted',
hasMetrics: convertPermissionToBoolean(metricsData.hasMetrics),
documentationPath: metricsData.documentationPath,
settingsPath: metricsData.settingsPath,
clustersPath: metricsData.clustersPath,
tagsPath: metricsData.tagsPath,
projectPath: metricsData.projectPath,
metricsEndpoint: metricsData.additionalMetrics,
deploymentEndpoint: metricsData.deploymentEndpoint,
emptyGettingStartedSvgPath: metricsData.emptyGettingStartedSvgPath,
emptyLoadingSvgPath: metricsData.emptyLoadingSvgPath,
emptyUnableToConnectSvgPath: metricsData.emptyUnableToConnectSvgPath,
showEmptyState: true, showEmptyState: true,
updateAspectRatio: false, updateAspectRatio: false,
updatedAspectRatios: 0, updatedAspectRatios: 0,
...@@ -67,6 +115,7 @@ ...@@ -67,6 +115,7 @@
window.addEventListener('resize', this.resizeThrottled, false); window.addEventListener('resize', this.resizeThrottled, false);
} }
}, },
methods: { methods: {
getGraphsData() { getGraphsData() {
this.state = 'loading'; this.state = 'loading';
...@@ -115,6 +164,7 @@ ...@@ -115,6 +164,7 @@
v-for="(groupData, index) in store.groups" v-for="(groupData, index) in store.groups"
:key="index" :key="index"
:name="groupData.group" :name="groupData.group"
:show-panels="showPanels"
> >
<graph <graph
v-for="(graphData, index) in groupData.metrics" v-for="(graphData, index) in groupData.metrics"
...@@ -125,6 +175,8 @@ ...@@ -125,6 +175,8 @@
:deployment-data="store.deploymentData" :deployment-data="store.deploymentData"
:project-path="projectPath" :project-path="projectPath"
:tags-path="tagsPath" :tags-path="tagsPath"
:show-legend="showLegend"
:small-graph="forceSmallGraph"
/> />
</graph-group> </graph-group>
</div> </div>
......
...@@ -52,6 +52,16 @@ ...@@ -52,6 +52,16 @@
type: String, type: String,
required: true, required: true,
}, },
showLegend: {
type: Boolean,
required: false,
default: true,
},
smallGraph: {
type: Boolean,
required: false,
default: false,
},
}, },
data() { data() {
...@@ -130,7 +140,7 @@ ...@@ -130,7 +140,7 @@
const breakpointSize = bp.getBreakpointSize(); const breakpointSize = bp.getBreakpointSize();
const query = this.graphData.queries[0]; const query = this.graphData.queries[0];
this.margin = measurements.large.margin; this.margin = measurements.large.margin;
if (breakpointSize === 'xs' || breakpointSize === 'sm') { if (this.smallGraph || breakpointSize === 'xs' || breakpointSize === 'sm') {
this.graphHeight = 300; this.graphHeight = 300;
this.margin = measurements.small.margin; this.margin = measurements.small.margin;
this.measurements = measurements.small; this.measurements = measurements.small;
...@@ -182,7 +192,9 @@ ...@@ -182,7 +192,9 @@
this.graphHeightOffset, this.graphHeightOffset,
); );
if (this.timeSeries.length > 3) { if (!this.showLegend) {
this.baseGraphHeight -= 50;
} else if (this.timeSeries.length > 3) {
this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20; this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20;
} }
...@@ -255,6 +267,7 @@ ...@@ -255,6 +267,7 @@
:time-series="timeSeries" :time-series="timeSeries"
:unit-of-display="unitOfDisplay" :unit-of-display="unitOfDisplay"
:current-data-index="currentDataIndex" :current-data-index="currentDataIndex"
:show-legend-group="showLegend"
/> />
<svg <svg
class="graph-data" class="graph-data"
......
...@@ -39,6 +39,11 @@ ...@@ -39,6 +39,11 @@
type: Number, type: Number,
required: true, required: true,
}, },
showLegendGroup: {
type: Boolean,
required: false,
default: true,
},
}, },
data() { data() {
return { return {
...@@ -57,8 +62,9 @@ ...@@ -57,8 +62,9 @@
}, },
rectTransform() { rectTransform() {
const yCoordinate = ((this.graphHeight - this.margin.top) / 2) const yCoordinate = (((this.graphHeight - this.margin.top)
+ (this.yLabelWidth / 2) + 10 || 0; + this.measurements.axisLabelLineOffset) / 2)
+ (this.yLabelWidth / 2) || 0;
return `translate(0, ${yCoordinate}) rotate(-90)`; return `translate(0, ${yCoordinate}) rotate(-90)`;
}, },
...@@ -166,6 +172,7 @@ ...@@ -166,6 +172,7 @@
> >
Time Time
</text> </text>
<template v-if="showLegendGroup">
<g <g
class="legend-group" class="legend-group"
v-for="(series, index) in timeSeries" v-for="(series, index) in timeSeries"
...@@ -200,5 +207,6 @@ ...@@ -200,5 +207,6 @@
{{ legendTitle }} {{ formatMetricUsage(series) }} {{ legendTitle }} {{ formatMetricUsage(series) }}
</text> </text>
</g> </g>
</template>
</g> </g>
</template> </template>
...@@ -5,12 +5,20 @@ ...@@ -5,12 +5,20 @@
type: String, type: String,
required: true, required: true,
}, },
showPanels: {
type: Boolean,
required: false,
default: true,
},
}, },
}; };
</script> </script>
<template> <template>
<div class="panel panel-default prometheus-panel"> <div
v-if="showPanels"
class="panel panel-default prometheus-panel"
>
<div class="panel-heading"> <div class="panel-heading">
<h4>{{ name }}</h4> <h4>{{ name }}</h4>
</div> </div>
...@@ -18,4 +26,10 @@ ...@@ -18,4 +26,10 @@
<slot></slot> <slot></slot>
</div> </div>
</div> </div>
<div
v-else
class="prometheus-graph-group"
>
<slot></slot>
</div>
</template> </template>
import Vue from 'vue'; import Vue from 'vue';
import { convertPermissionToBoolean } from '~/lib/utils/common_utils';
import Dashboard from './components/dashboard.vue'; import Dashboard from './components/dashboard.vue';
export default () => new Vue({ export default () => {
el: '#prometheus-graphs', const el = document.getElementById('prometheus-graphs');
render: createElement => createElement(Dashboard),
}); if (el && el.dataset) {
// eslint-disable-next-line no-new
new Vue({
el,
render(createElement) {
return createElement(Dashboard, {
props: {
...el.dataset,
hasMetrics: convertPermissionToBoolean(el.dataset.hasMetrics),
},
});
},
});
}
};
...@@ -40,6 +40,9 @@ export default class MonitoringService { ...@@ -40,6 +40,9 @@ export default class MonitoringService {
} }
getDeploymentData() { getDeploymentData() {
if (!this.deploymentEndpoint) {
return Promise.resolve([]);
}
return backOffRequest(() => axios.get(this.deploymentEndpoint)) return backOffRequest(() => axios.get(this.deploymentEndpoint))
.then(resp => resp.data) .then(resp => resp.data)
.then((response) => { .then((response) => {
......
export default {
animation: 200,
forceFallback: true,
fallbackClass: 'is-dragging',
fallbackOnBody: true,
ghostClass: 'is-ghost',
};
...@@ -529,7 +529,8 @@ ...@@ -529,7 +529,8 @@
} }
> text { > text {
font-size: 12px; fill: $theme-gray-600;
font-size: 10px;
} }
} }
......
class Groups::BoardsController < Groups::ApplicationController class Groups::BoardsController < Groups::ApplicationController
<<<<<<< HEAD
prepend EE::Boards::BoardsController prepend EE::Boards::BoardsController
=======
>>>>>>> upstream/master
include BoardsResponses include BoardsResponses
before_action :assign_endpoint_vars before_action :assign_endpoint_vars
......
...@@ -24,7 +24,7 @@ class Projects::DeploymentsController < Projects::ApplicationController ...@@ -24,7 +24,7 @@ class Projects::DeploymentsController < Projects::ApplicationController
end end
def additional_metrics def additional_metrics
return render_404 unless deployment.has_additional_metrics? return render_404 unless deployment.has_metrics?
respond_to do |format| respond_to do |format|
format.json do format.json do
......
...@@ -2,11 +2,12 @@ module Projects ...@@ -2,11 +2,12 @@ module Projects
module Prometheus module Prometheus
class MetricsController < Projects::ApplicationController class MetricsController < Projects::ApplicationController
before_action :authorize_admin_project! before_action :authorize_admin_project!
before_action :require_prometheus_metrics!
def active_common def active_common
respond_to do |format| respond_to do |format|
format.json do format.json do
matched_metrics = prometheus_service.matched_metrics || {} matched_metrics = prometheus_adapter.query(:matched_metrics) || {}
if matched_metrics.any? if matched_metrics.any?
render json: matched_metrics render json: matched_metrics
...@@ -19,8 +20,12 @@ module Projects ...@@ -19,8 +20,12 @@ module Projects
private private
def prometheus_service def prometheus_adapter
@prometheus_service ||= project.find_or_initialize_service('prometheus') @prometheus_adapter ||= ::Prometheus::AdapterService.new(project).prometheus_adapter
end
def require_prometheus_metrics!
render_404 unless prometheus_adapter.can_query?
end end
end end
end end
......
class Board < ActiveRecord::Base class Board < ActiveRecord::Base
<<<<<<< HEAD
prepend EE::Board prepend EE::Board
=======
>>>>>>> upstream/master
belongs_to :group belongs_to :group
belongs_to :project belongs_to :project
......
module Clusters module Clusters
module Applications module Applications
class Prometheus < ActiveRecord::Base class Prometheus < ActiveRecord::Base
include PrometheusAdapter
VERSION = "2.0.0".freeze VERSION = "2.0.0".freeze
self.table_name = 'clusters_applications_prometheus' self.table_name = 'clusters_applications_prometheus'
...@@ -39,7 +41,7 @@ module Clusters ...@@ -39,7 +41,7 @@ module Clusters
) )
end end
def proxy_client def prometheus_client
return unless kube_client return unless kube_client
proxy_url = kube_client.proxy_url('service', service_name, service_port, Gitlab::Kubernetes::Helm::NAMESPACE) proxy_url = kube_client.proxy_url('service', service_name, service_port, Gitlab::Kubernetes::Helm::NAMESPACE)
......
...@@ -53,9 +53,6 @@ module Clusters ...@@ -53,9 +53,6 @@ module Clusters
scope :enabled, -> { where(enabled: true) } scope :enabled, -> { where(enabled: true) }
scope :disabled, -> { where(enabled: false) } scope :disabled, -> { where(enabled: false) }
scope :for_environment, -> (env) { where(environment_scope: ['*', '', env.slug]) }
scope :for_all_environments, -> { where(environment_scope: ['*', '']) }
def status_name def status_name
if provider if provider
provider.status_name provider.status_name
......
...@@ -9,6 +9,7 @@ class Commit ...@@ -9,6 +9,7 @@ class Commit
include Mentionable include Mentionable
include Referable include Referable
include StaticModel include StaticModel
include ::Gitlab::Utils::StrongMemoize
attr_mentionable :safe_message, pipeline: :single_line attr_mentionable :safe_message, pipeline: :single_line
...@@ -225,11 +226,13 @@ class Commit ...@@ -225,11 +226,13 @@ class Commit
end end
def parents def parents
@parents ||= parent_ids.map { |id| project.commit(id) } @parents ||= parent_ids.map { |oid| Commit.lazy(project, oid) }
end end
def parent def parent
@parent ||= project.commit(self.parent_id) if self.parent_id strong_memoize(:parent) do
project.commit_by(oid: self.parent_id) if self.parent_id
end
end end
def notes def notes
......
module PrometheusAdapter
extend ActiveSupport::Concern
included do
include ReactiveCaching
self.reactive_cache_key = ->(adapter) { [adapter.class.model_name.singular, adapter.id] }
self.reactive_cache_lease_timeout = 30.seconds
self.reactive_cache_refresh_interval = 30.seconds
self.reactive_cache_lifetime = 1.minute
def prometheus_client
raise NotImplementedError
end
def prometheus_client_wrapper
Gitlab::PrometheusClient.new(prometheus_client)
end
def can_query?
prometheus_client.present?
end
def query(query_name, *args)
return unless can_query?
query_class = Gitlab::Prometheus::Queries.const_get("#{query_name.to_s.classify}Query")
args.map!(&:id)
with_reactive_cache(query_class.name, *args, &query_class.method(:transform_reactive_result))
end
# Cache metrics for specific environment
def calculate_reactive_cache(query_class_name, *args)
return unless prometheus_client
data = Kernel.const_get(query_class_name).new(prometheus_client_wrapper).query(*args)
{
success: true,
data: data,
last_update: Time.now.utc
}
rescue Gitlab::PrometheusClient::Error => err
{ success: false, result: err.message }
end
end
end
...@@ -98,28 +98,29 @@ class Deployment < ActiveRecord::Base ...@@ -98,28 +98,29 @@ class Deployment < ActiveRecord::Base
end end
def has_metrics? def has_metrics?
project.monitoring_service.present? prometheus_adapter&.can_query?
end end
def metrics def metrics
return {} unless has_metrics? return {} unless has_metrics?
project.monitoring_service.deployment_metrics(self) metrics = prometheus_adapter.query(:deployment, self)
end metrics&.merge(deployment_time: created_at.to_i) || {}
def has_additional_metrics?
project.prometheus_service.present?
end end
def additional_metrics def additional_metrics
return {} unless project.prometheus_service.present? return {} unless has_metrics?
metrics = project.prometheus_service.additional_deployment_metrics(self) metrics = prometheus_adapter.query(:additional_metrics_deployment, self)
metrics&.merge(deployment_time: created_at.to_i) || {} metrics&.merge(deployment_time: created_at.to_i) || {}
end end
private private
def prometheus_adapter
environment.prometheus_adapter
end
def ref_path def ref_path
File.join(environment.ref_path, 'deployments', iid.to_s) File.join(environment.ref_path, 'deployments', iid.to_s)
end end
......
...@@ -154,21 +154,19 @@ class Environment < ActiveRecord::Base ...@@ -154,21 +154,19 @@ class Environment < ActiveRecord::Base
end end
def has_metrics? def has_metrics?
project.monitoring_service.present? && available? && last_deployment.present? prometheus_adapter&.can_query? && available? && last_deployment.present?
end end
def metrics def metrics
project.monitoring_service.environment_metrics(self) if has_metrics? prometheus_adapter.query(:environment, self) if has_metrics?
end
def has_additional_metrics?
project.prometheus_service.present? && available? && last_deployment.present?
end end
def additional_metrics def additional_metrics
if has_additional_metrics? prometheus_adapter.query(:additional_metrics_environment, self) if has_metrics?
project.prometheus_service.additional_environment_metrics(self)
end end
def prometheus_adapter
@prometheus_adapter ||= Prometheus::AdapterService.new(project, deployment_platform).prometheus_adapter
end end
def slug def slug
...@@ -234,6 +232,10 @@ class Environment < ActiveRecord::Base ...@@ -234,6 +232,10 @@ class Environment < ActiveRecord::Base
self.environment_type || self.name self.environment_type || self.name
end end
def deployment_platform
project.deployment_platform
end
private private
# Slugifying a name may remove the uniqueness guarantee afforded by it being # Slugifying a name may remove the uniqueness guarantee afforded by it being
......
...@@ -37,10 +37,13 @@ class Group < Namespace ...@@ -37,10 +37,13 @@ class Group < Namespace
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :boards has_many :boards
<<<<<<< HEAD
# We cannot simply set `has_many :audit_events, as: :entity, dependent: :destroy` # We cannot simply set `has_many :audit_events, as: :entity, dependent: :destroy`
# here since Group inherits from Namespace, the entity_type would be set to `Namespace`. # here since Group inherits from Namespace, the entity_type would be set to `Namespace`.
has_many :audit_events, -> { where(entity_type: Group) }, foreign_key: 'entity_id' has_many :audit_events, -> { where(entity_type: Group) }, foreign_key: 'entity_id'
=======
>>>>>>> upstream/master
has_many :badges, class_name: 'GroupBadge' has_many :badges, class_name: 'GroupBadge'
accepts_nested_attributes_for :variables, allow_destroy: true accepts_nested_attributes_for :variables, allow_destroy: true
......
...@@ -281,7 +281,8 @@ class Project < ActiveRecord::Base ...@@ -281,7 +281,8 @@ class Project < ActiveRecord::Base
scope :without_storage_feature, ->(feature) { where('storage_version < :version OR storage_version IS NULL', version: HASHED_STORAGE_FEATURES[feature]) } scope :without_storage_feature, ->(feature) { where('storage_version < :version OR storage_version IS NULL', version: HASHED_STORAGE_FEATURES[feature]) }
scope :with_unmigrated_storage, -> { where('storage_version < :version OR storage_version IS NULL', version: LATEST_STORAGE_VERSION) } scope :with_unmigrated_storage, -> { where('storage_version < :version OR storage_version IS NULL', version: LATEST_STORAGE_VERSION) }
scope :sorted_by_activity, -> { reorder(last_activity_at: :desc) } # last_activity_at is throttled every minute, but last_repository_updated_at is updated with every push
scope :sorted_by_activity, -> { reorder("GREATEST(COALESCE(last_activity_at, '1970-01-01'), COALESCE(last_repository_updated_at, '1970-01-01')) DESC") }
scope :sorted_by_stars, -> { reorder('projects.star_count DESC') } scope :sorted_by_stars, -> { reorder('projects.star_count DESC') }
scope :in_namespace, ->(namespace_ids) { where(namespace_id: namespace_ids) } scope :in_namespace, ->(namespace_ids) { where(namespace_id: namespace_ids) }
...@@ -789,7 +790,7 @@ class Project < ActiveRecord::Base ...@@ -789,7 +790,7 @@ class Project < ActiveRecord::Base
end end
def last_activity_date def last_activity_date
last_repository_updated_at || last_activity_at || updated_at [last_activity_at, last_repository_updated_at, updated_at].compact.max
end end
def project_id def project_id
...@@ -1696,6 +1697,13 @@ class Project < ActiveRecord::Base ...@@ -1696,6 +1697,13 @@ class Project < ActiveRecord::Base
# Overridden on EE module # Overridden on EE module
def multiple_issue_boards_available? def multiple_issue_boards_available?
false false
<<<<<<< HEAD
=======
end
def issue_board_milestone_available?(user = nil)
feature_available?(:issue_board_milestone, user)
>>>>>>> upstream/master
end end
def full_path_was def full_path_was
......
...@@ -9,11 +9,11 @@ class MonitoringService < Service ...@@ -9,11 +9,11 @@ class MonitoringService < Service
%w() %w()
end end
def environment_metrics(environment) def can_query?
raise NotImplementedError raise NotImplementedError
end end
def deployment_metrics(deployment) def query(_, *_)
raise NotImplementedError raise NotImplementedError
end end
end end
class PrometheusService < MonitoringService class PrometheusService < MonitoringService
include ReactiveService include PrometheusAdapter
self.reactive_cache_lease_timeout = 30.seconds
self.reactive_cache_refresh_interval = 30.seconds
self.reactive_cache_lifetime = 1.minute
# Access to prometheus is directly through the API # Access to prometheus is directly through the API
prop_accessor :api_url prop_accessor :api_url
...@@ -13,7 +9,7 @@ class PrometheusService < MonitoringService ...@@ -13,7 +9,7 @@ class PrometheusService < MonitoringService
validates :api_url, url: true validates :api_url, url: true
end end
before_save :synchronize_service_state! before_save :synchronize_service_state
after_save :clear_reactive_cache! after_save :clear_reactive_cache!
...@@ -66,63 +62,15 @@ class PrometheusService < MonitoringService ...@@ -66,63 +62,15 @@ class PrometheusService < MonitoringService
# Check we can connect to the Prometheus API # Check we can connect to the Prometheus API
def test(*args) def test(*args)
client.ping Gitlab::PrometheusClient.new(prometheus_client).ping
{ success: true, result: 'Checked API endpoint' } { success: true, result: 'Checked API endpoint' }
rescue Gitlab::PrometheusClient::Error => err rescue Gitlab::PrometheusClient::Error => err
{ success: false, result: err } { success: false, result: err }
end end
def environment_metrics(environment) def prometheus_client
with_reactive_cache(Gitlab::Prometheus::Queries::EnvironmentQuery.name, environment.id, &rename_field(:data, :metrics)) RestClient::Resource.new(api_url) if api_url && manual_configuration? && active?
end
def deployment_metrics(deployment)
metrics = with_reactive_cache(Gitlab::Prometheus::Queries::DeploymentQuery.name, deployment.environment.id, deployment.id, &rename_field(:data, :metrics))
metrics&.merge(deployment_time: deployment.created_at.to_i) || {}
end
def additional_environment_metrics(environment)
with_reactive_cache(Gitlab::Prometheus::Queries::AdditionalMetricsEnvironmentQuery.name, environment.id, &:itself)
end
def additional_deployment_metrics(deployment)
with_reactive_cache(Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery.name, deployment.environment.id, deployment.id, &:itself)
end
def matched_metrics
with_reactive_cache(Gitlab::Prometheus::Queries::MatchedMetricsQuery.name, &:itself)
end
# Cache metrics for specific environment
def calculate_reactive_cache(query_class_name, *args)
return unless active? && project && !project.pending_delete?
environment_id = args.first
client = client(environment_id)
data = Kernel.const_get(query_class_name).new(client).query(*args)
{
success: true,
data: data,
last_update: Time.now.utc
}
rescue Gitlab::PrometheusClient::Error => err
{ success: false, result: err.message }
end
def client(environment_id = nil)
if manual_configuration?
Gitlab::PrometheusClient.new(RestClient::Resource.new(api_url))
else
cluster = cluster_with_prometheus(environment_id)
raise Gitlab::PrometheusClient::Error, "couldn't find cluster with Prometheus installed" unless cluster
rest_client = client_from_cluster(cluster)
raise Gitlab::PrometheusClient::Error, "couldn't create proxy Prometheus client" unless rest_client
Gitlab::PrometheusClient.new(rest_client)
end
end end
def prometheus_installed? def prometheus_installed?
...@@ -134,32 +82,7 @@ class PrometheusService < MonitoringService ...@@ -134,32 +82,7 @@ class PrometheusService < MonitoringService
private private
def cluster_with_prometheus(environment_id = nil) def synchronize_service_state
clusters = if environment_id
::Environment.find_by(id: environment_id).try do |env|
# sort results by descending order based on environment_scope being longer
# thus more closely matching environment slug
project.clusters.enabled.for_environment(env).sort_by { |c| c.environment_scope&.length }.reverse!
end
else
project.clusters.enabled.for_all_environments
end
clusters&.detect { |cluster| cluster.application_prometheus&.installed? }
end
def client_from_cluster(cluster)
cluster.application_prometheus.proxy_client
end
def rename_field(old_field, new_field)
-> (metrics) do
metrics[new_field] = metrics.delete(old_field)
metrics
end
end
def synchronize_service_state!
self.active = prometheus_installed? || manual_configuration? self.active = prometheus_installed? || manual_configuration?
true true
......
module Prometheus
class AdapterService
def initialize(project, deployment_platform = nil)
@project = project
@deployment_platform = if deployment_platform
deployment_platform
else
project.deployment_platform
end
end
attr_reader :deployment_platform, :project
def prometheus_adapter
@prometheus_adapter ||= if service_prometheus_adapter.can_query?
service_prometheus_adapter
else
cluster_prometheus_adapter
end
end
def service_prometheus_adapter
project.find_or_initialize_service('prometheus')
end
def cluster_prometheus_adapter
return unless deployment_platform.respond_to?(:cluster)
cluster = deployment_platform.cluster
return unless cluster.application_prometheus&.installed?
cluster.application_prometheus
end
end
end
- page_title 'Labels' - page_title 'Labels'
<<<<<<< HEAD
- issuables = ['issues', 'merge requests'] + (@group&.feature_available?(:epics) ? ['epics'] : []) - issuables = ['issues', 'merge requests'] + (@group&.feature_available?(:epics) ? ['epics'] : [])
=======
- issuables = ['issues', 'merge requests']
>>>>>>> upstream/master
.top-area.adjust .top-area.adjust
.nav-text .nav-text
......
...@@ -15,7 +15,8 @@ ...@@ -15,7 +15,8 @@
"empty-getting-started-svg-path": image_path('illustrations/monitoring/getting_started.svg'), "empty-getting-started-svg-path": image_path('illustrations/monitoring/getting_started.svg'),
"empty-loading-svg-path": image_path('illustrations/monitoring/loading.svg'), "empty-loading-svg-path": image_path('illustrations/monitoring/loading.svg'),
"empty-unable-to-connect-svg-path": image_path('illustrations/monitoring/unable_to_connect.svg'), "empty-unable-to-connect-svg-path": image_path('illustrations/monitoring/unable_to_connect.svg'),
"additional-metrics": additional_metrics_project_environment_path(@project, @environment, format: :json), "metrics-endpoint": additional_metrics_project_environment_path(@project, @environment, format: :json),
"deployment-endpoint": project_environment_deployments_path(@project, @environment, format: :json),
"project-path": project_path(@project), "project-path": project_path(@project),
"tags-path": project_tags_path(@project), "tags-path": project_tags_path(@project),
"has-metrics": "#{@environment.has_metrics?}", deployment_endpoint: project_environment_deployments_path(@project, @environment, format: :json) } } "has-metrics": "#{@environment.has_metrics?}" } }
- @no_container = true - @no_container = true
- @sort ||= sort_value_recently_updated - @sort ||= sort_value_recently_updated
- page_title s_('TagsPage|Tags') - page_title s_('TagsPage|Tags')
- add_to_breadcrumbs("Repository", project_tree_path(@project))
.flex-list{ class: container_class } .flex-list{ class: container_class }
.top-area.adjust .top-area.adjust
......
...@@ -137,6 +137,9 @@ ...@@ -137,6 +137,9 @@
= dropdown_loading = dropdown_loading
- if @project - if @project
#js-add-issues-btn.prepend-left-10{ data: { can_admin_list: can?(current_user, :admin_list, @project) } } #js-add-issues-btn.prepend-left-10{ data: { can_admin_list: can?(current_user, :admin_list, @project) } }
<<<<<<< HEAD
#js-toggle-focus-btn.prepend-left-10 #js-toggle-focus-btn.prepend-left-10
=======
>>>>>>> upstream/master
- elsif type != :boards_modal - elsif type != :boards_modal
= render 'shared/sort_dropdown' = render 'shared/sort_dropdown'
...@@ -48,7 +48,6 @@ ...@@ -48,7 +48,6 @@
- pipeline_default:build_trace_sections - pipeline_default:build_trace_sections
- pipeline_default:pipeline_metrics - pipeline_default:pipeline_metrics
- pipeline_default:pipeline_notification - pipeline_default:pipeline_notification
- pipeline_default:update_head_pipeline_for_merge_request
- pipeline_hooks:build_hooks - pipeline_hooks:build_hooks
- pipeline_hooks:pipeline_hooks - pipeline_hooks:pipeline_hooks
- pipeline_processing:build_finished - pipeline_processing:build_finished
...@@ -58,6 +57,7 @@ ...@@ -58,6 +57,7 @@
- pipeline_processing:pipeline_success - pipeline_processing:pipeline_success
- pipeline_processing:pipeline_update - pipeline_processing:pipeline_update
- pipeline_processing:stage_update - pipeline_processing:stage_update
- pipeline_processing:update_head_pipeline_for_merge_request
- repository_check:repository_check_clear - repository_check:repository_check_clear
- repository_check:repository_check_single_repository - repository_check:repository_check_single_repository
......
...@@ -2,6 +2,8 @@ class UpdateHeadPipelineForMergeRequestWorker ...@@ -2,6 +2,8 @@ class UpdateHeadPipelineForMergeRequestWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
queue_namespace :pipeline_processing
def perform(merge_request_id) def perform(merge_request_id)
merge_request = MergeRequest.find(merge_request_id) merge_request = MergeRequest.find(merge_request_id)
pipeline = Ci::Pipeline.where(project: merge_request.source_project, ref: merge_request.source_branch).last pipeline = Ci::Pipeline.where(project: merge_request.source_project, ref: merge_request.source_branch).last
......
---
title: Remove extra breadcrumb on tags
merge_request: 17562
author: Takuya Noguchi
type: fixed
---
title: Started translation into Turkish, Indonesian and Filipino
merge_request: 17526
author:
type: other
---
title: Add one group board to Libre
merge_request:
author:
type: added
---
title: Fix project dashboard showing the wrong timestamps
merge_request:
author:
type: fixed
...@@ -59,6 +59,7 @@ constraints(GroupUrlConstrainer.new) do ...@@ -59,6 +59,7 @@ constraints(GroupUrlConstrainer.new) do
end end
end end
<<<<<<< HEAD
## EE-specific ## EE-specific
resource :analytics, only: [:show] resource :analytics, only: [:show]
resource :ldap, only: [] do resource :ldap, only: [] do
...@@ -101,6 +102,10 @@ constraints(GroupUrlConstrainer.new) do ...@@ -101,6 +102,10 @@ constraints(GroupUrlConstrainer.new) do
## EE-specific ## EE-specific
resource :roadmap, only: [:show], controller: 'roadmap' resource :roadmap, only: [:show], controller: 'roadmap'
=======
# On CE only index and show actions are needed
resources :boards, only: [:index, :show]
>>>>>>> upstream/master
end end
scope(path: '*id', scope(path: '*id',
......
...@@ -423,7 +423,11 @@ constraints(ProjectUrlConstrainer.new) do ...@@ -423,7 +423,11 @@ constraints(ProjectUrlConstrainer.new) do
get 'noteable/:target_type/:target_id/notes' => 'notes#index', as: 'noteable_notes' get 'noteable/:target_type/:target_id/notes' => 'notes#index', as: 'noteable_notes'
# On CE only index and show are needed # On CE only index and show are needed
<<<<<<< HEAD
resources :boards, only: [:index, :show, :create, :update, :destroy] resources :boards, only: [:index, :show, :create, :update, :destroy]
=======
resources :boards, only: [:index, :show]
>>>>>>> upstream/master
resources :todos, only: [:create] resources :todos, only: [:create]
......
...@@ -70,6 +70,7 @@ ...@@ -70,6 +70,7 @@
- [pages_domain_verification, 1] - [pages_domain_verification, 1]
- [plugin, 1] - [plugin, 1]
- [pipeline_background, 1] - [pipeline_background, 1]
<<<<<<< HEAD
# EE-specific queues # EE-specific queues
- [ldap_group_sync, 2] - [ldap_group_sync, 2]
...@@ -86,3 +87,5 @@ ...@@ -86,3 +87,5 @@
- [export_csv, 1] - [export_csv, 1]
- [object_storage_upload, 1] - [object_storage_upload, 1]
- [object_storage, 1] - [object_storage, 1]
=======
>>>>>>> upstream/master
class AddGroupIdToBoards < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
return if group_id_exists?
add_column :boards, :group_id, :integer
add_foreign_key :boards, :namespaces, column: :group_id, on_delete: :cascade
add_concurrent_index :boards, :group_id
change_column_null :boards, :project_id, true
end
def down
return unless group_id_exists?
remove_foreign_key :boards, column: :group_id
remove_index :boards, :group_id if index_exists? :boards, :group_id
remove_column :boards, :group_id
execute "DELETE from boards WHERE project_id IS NULL"
change_column_null :boards, :project_id, false
end
private
def group_id_exists?
column_exists?(:boards, :group_id)
end
end
class MigrateUpdateHeadPipelineForMergeRequestSidekiqQueue < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
sidekiq_queue_migrate 'pipeline_default:update_head_pipeline_for_merge_request',
to: 'pipeline_processing:update_head_pipeline_for_merge_request'
end
def down
sidekiq_queue_migrate 'pipeline_processing:update_head_pipeline_for_merge_request',
to: 'pipeline_default:update_head_pipeline_for_merge_request'
end
end
...@@ -11,7 +11,11 @@ ...@@ -11,7 +11,11 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
<<<<<<< HEAD
ActiveRecord::Schema.define(version: 20180306074045) do ActiveRecord::Schema.define(version: 20180306074045) do
=======
ActiveRecord::Schema.define(version: 20180307012445) do
>>>>>>> upstream/master
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -272,6 +276,7 @@ ActiveRecord::Schema.define(version: 20180306074045) do ...@@ -272,6 +276,7 @@ ActiveRecord::Schema.define(version: 20180306074045) do
t.integer "project_id" t.integer "project_id"
t.datetime "created_at", null: false t.datetime "created_at", null: false
t.datetime "updated_at", null: false t.datetime "updated_at", null: false
<<<<<<< HEAD
t.string "name", default: "Development", null: false t.string "name", default: "Development", null: false
t.integer "milestone_id" t.integer "milestone_id"
t.integer "group_id" t.integer "group_id"
...@@ -280,6 +285,12 @@ ActiveRecord::Schema.define(version: 20180306074045) do ...@@ -280,6 +285,12 @@ ActiveRecord::Schema.define(version: 20180306074045) do
add_index "boards", ["group_id"], name: "index_boards_on_group_id", using: :btree add_index "boards", ["group_id"], name: "index_boards_on_group_id", using: :btree
add_index "boards", ["milestone_id"], name: "index_boards_on_milestone_id", using: :btree add_index "boards", ["milestone_id"], name: "index_boards_on_milestone_id", using: :btree
=======
t.integer "group_id"
end
add_index "boards", ["group_id"], name: "index_boards_on_group_id", using: :btree
>>>>>>> upstream/master
add_index "boards", ["project_id"], name: "index_boards_on_project_id", using: :btree add_index "boards", ["project_id"], name: "index_boards_on_project_id", using: :btree
create_table "broadcast_messages", force: :cascade do |t| create_table "broadcast_messages", force: :cascade do |t|
...@@ -2518,11 +2529,15 @@ ActiveRecord::Schema.define(version: 20180306074045) do ...@@ -2518,11 +2529,15 @@ ActiveRecord::Schema.define(version: 20180306074045) do
add_foreign_key "approver_groups", "namespaces", column: "group_id", on_delete: :cascade add_foreign_key "approver_groups", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "badges", "namespaces", column: "group_id", on_delete: :cascade add_foreign_key "badges", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "badges", "projects", on_delete: :cascade add_foreign_key "badges", "projects", on_delete: :cascade
<<<<<<< HEAD
add_foreign_key "board_assignees", "boards", on_delete: :cascade add_foreign_key "board_assignees", "boards", on_delete: :cascade
add_foreign_key "board_assignees", "users", column: "assignee_id", on_delete: :cascade add_foreign_key "board_assignees", "users", column: "assignee_id", on_delete: :cascade
add_foreign_key "board_labels", "boards", on_delete: :cascade add_foreign_key "board_labels", "boards", on_delete: :cascade
add_foreign_key "board_labels", "labels", on_delete: :cascade add_foreign_key "board_labels", "labels", on_delete: :cascade
add_foreign_key "boards", "namespaces", column: "group_id", name: "fk_1e9a074a35", on_delete: :cascade add_foreign_key "boards", "namespaces", column: "group_id", name: "fk_1e9a074a35", on_delete: :cascade
=======
add_foreign_key "boards", "namespaces", column: "group_id", on_delete: :cascade
>>>>>>> upstream/master
add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade
add_foreign_key "chat_teams", "namespaces", on_delete: :cascade add_foreign_key "chat_teams", "namespaces", on_delete: :cascade
add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade
......
...@@ -88,8 +88,12 @@ Manage your [repositories](user/project/repository/index.md) from the UI (user i ...@@ -88,8 +88,12 @@ Manage your [repositories](user/project/repository/index.md) from the UI (user i
- [Discussions](user/discussions/index.md): Threads, comments, and resolvable discussions in issues, commits, and merge requests. - [Discussions](user/discussions/index.md): Threads, comments, and resolvable discussions in issues, commits, and merge requests.
- [Issues](user/project/issues/index.md) - [Issues](user/project/issues/index.md)
- [Project issue Board](user/project/issue_board.md) - [Project issue Board](user/project/issue_board.md)
<<<<<<< HEAD
- [Group Issue Boards](user/project/issue_board.md#group-issue-boards) - [Group Issue Boards](user/project/issue_board.md#group-issue-boards)
- **(Starter/Premium)** [Related Issues](user/project/issues/related_issues.md): create a relationship between issues - **(Starter/Premium)** [Related Issues](user/project/issues/related_issues.md): create a relationship between issues
=======
- [Group Issue Board](user/project/issue_board.md#group-issue-board)
>>>>>>> upstream/master
- [Issues and merge requests templates](user/project/description_templates.md): Create templates for submitting new issues and merge requests. - [Issues and merge requests templates](user/project/description_templates.md): Create templates for submitting new issues and merge requests.
- [Labels](user/project/labels.md): Categorize your issues or merge requests based on descriptive titles. - [Labels](user/project/labels.md): Categorize your issues or merge requests based on descriptive titles.
- [Merge Requests](user/project/merge_requests/index.md) - [Merge Requests](user/project/merge_requests/index.md)
......
...@@ -27,12 +27,16 @@ Example response: ...@@ -27,12 +27,16 @@ Example response:
[ [
{ {
"id": 1, "id": 1,
<<<<<<< HEAD
"name:": "group issue board", "name:": "group issue board",
"group_id": 5, "group_id": 5,
"milestone": { "milestone": {
"id": 12 "id": 12
"title": "10.0" "title": "10.0"
}, },
=======
"group_id": 5,
>>>>>>> upstream/master
"lists" : [ "lists" : [
{ {
"id" : 1, "id" : 1,
...@@ -88,6 +92,7 @@ Example response: ...@@ -88,6 +92,7 @@ Example response:
```json ```json
{ {
"id": 1, "id": 1,
<<<<<<< HEAD
"name:": "group issue board", "name:": "group issue board",
"group_id": 5, "group_id": 5,
"milestone": { "milestone": {
...@@ -154,6 +159,9 @@ Example response: ...@@ -154,6 +159,9 @@ Example response:
"id": 12 "id": 12
"title": "10.0" "title": "10.0"
}, },
=======
"group_id": 5,
>>>>>>> upstream/master
"lists" : [ "lists" : [
{ {
"id" : 1, "id" : 1,
...@@ -186,6 +194,7 @@ Example response: ...@@ -186,6 +194,7 @@ Example response:
} }
``` ```
<<<<<<< HEAD
## Delete a board ## Delete a board
Deletes a board. Deletes a board.
...@@ -203,6 +212,8 @@ DELETE /groups/:id/boards/:board_id ...@@ -203,6 +212,8 @@ DELETE /groups/:id/boards/:board_id
curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/groups/5/boards/1 curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/groups/5/boards/1
``` ```
=======
>>>>>>> upstream/master
## List board lists ## List board lists
Get a list of the board's lists. Get a list of the board's lists.
......
...@@ -139,6 +139,14 @@ CREATE EXTENSION pg_trgm; ...@@ -139,6 +139,14 @@ CREATE EXTENSION pg_trgm;
On some systems you may need to install an additional package (e.g. On some systems you may need to install an additional package (e.g.
`postgresql-contrib`) for this extension to become available. `postgresql-contrib`) for this extension to become available.
#### Additional requirements for GitLab Geo
If you are using [GitLab Geo](https://docs.gitlab.com/ee/development/geo.html), the [tracking database](https://docs.gitlab.com/ee/development/geo.html#geo-tracking-database) also requires the `postgres_fdw` extension.
```
CREATE EXTENSION postgres_fdw;
```
## Unicorn Workers ## Unicorn Workers
It's possible to increase the amount of unicorn workers and this will usually help to reduce the response time of the applications and increase the ability to handle parallel requests. It's possible to increase the amount of unicorn workers and this will usually help to reduce the response time of the applications and increase the ability to handle parallel requests.
......
...@@ -329,6 +329,16 @@ Click the button at the top right to toggle focus mode on and off. In focus mode ...@@ -329,6 +329,16 @@ Click the button at the top right to toggle focus mode on and off. In focus mode
[Developers and up](../permissions.md) can use all the functionality of the [Developers and up](../permissions.md) can use all the functionality of the
Issue Board, that is create/delete lists and drag issues around. Issue Board, that is create/delete lists and drag issues around.
## Group Issue Board
>Introduced in GitLab 10.6
Group issue board is analogous to project-level issue board and it is accessible at the group
navigation level. A group-level issue board allows you to view all issues from all projects in that group
(currently, it does not see issues from projects in subgroups). Similarly, you can only filter by group labels for these
boards. When updating milestones and labels for an issue through the sidebar update mechanism, again only
group-level objects are available.
## Tips ## Tips
A few things to remember: A few things to remember:
......
...@@ -132,6 +132,7 @@ module API ...@@ -132,6 +132,7 @@ module API
mount ::API::Events mount ::API::Events
mount ::API::Features mount ::API::Features
mount ::API::Files mount ::API::Files
mount ::API::GroupBoards
mount ::API::Groups mount ::API::Groups
mount ::API::GroupBoards mount ::API::GroupBoards
mount ::API::GroupMilestones mount ::API::GroupMilestones
......
module API module API
class GroupBoards < Grape::API class GroupBoards < Grape::API
include BoardsResponses include BoardsResponses
<<<<<<< HEAD
include EE::API::BoardsResponses include EE::API::BoardsResponses
=======
>>>>>>> upstream/master
include PaginationParams include PaginationParams
before do before do
...@@ -21,7 +24,11 @@ module API ...@@ -21,7 +24,11 @@ module API
resource :groups, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do resource :groups, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
segment ':id/boards' do segment ':id/boards' do
desc 'Find a group board' do desc 'Find a group board' do
<<<<<<< HEAD
detail 'This feature was introduced in 10.4' detail 'This feature was introduced in 10.4'
=======
detail 'This feature was introduced in 10.6'
>>>>>>> upstream/master
success ::API::Entities::Board success ::API::Entities::Board
end end
get '/:board_id' do get '/:board_id' do
...@@ -29,7 +36,11 @@ module API ...@@ -29,7 +36,11 @@ module API
end end
desc 'Get all group boards' do desc 'Get all group boards' do
<<<<<<< HEAD
detail 'This feature was introduced in 10.4' detail 'This feature was introduced in 10.4'
=======
detail 'This feature was introduced in 10.6'
>>>>>>> upstream/master
success Entities::Board success Entities::Board
end end
params do params do
...@@ -45,7 +56,11 @@ module API ...@@ -45,7 +56,11 @@ module API
end end
segment ':id/boards/:board_id' do segment ':id/boards/:board_id' do
desc 'Get the lists of a group board' do desc 'Get the lists of a group board' do
<<<<<<< HEAD
detail 'Does not include backlog and closed lists. This feature was introduced in 10.4' detail 'Does not include backlog and closed lists. This feature was introduced in 10.4'
=======
detail 'Does not include backlog and closed lists. This feature was introduced in 10.6'
>>>>>>> upstream/master
success Entities::List success Entities::List
end end
params do params do
...@@ -56,7 +71,11 @@ module API ...@@ -56,7 +71,11 @@ module API
end end
desc 'Get a list of a group board' do desc 'Get a list of a group board' do
<<<<<<< HEAD
detail 'This feature was introduced in 10.4' detail 'This feature was introduced in 10.4'
=======
detail 'This feature was introduced in 10.6'
>>>>>>> upstream/master
success Entities::List success Entities::List
end end
params do params do
...@@ -67,7 +86,11 @@ module API ...@@ -67,7 +86,11 @@ module API
end end
desc 'Create a new board list' do desc 'Create a new board list' do
<<<<<<< HEAD
detail 'This feature was introduced in 10.4' detail 'This feature was introduced in 10.4'
=======
detail 'This feature was introduced in 10.6'
>>>>>>> upstream/master
success Entities::List success Entities::List
end end
params do params do
...@@ -84,7 +107,11 @@ module API ...@@ -84,7 +107,11 @@ module API
end end
desc 'Moves a board list to a new position' do desc 'Moves a board list to a new position' do
<<<<<<< HEAD
detail 'This feature was introduced in 10.4' detail 'This feature was introduced in 10.4'
=======
detail 'This feature was introduced in 10.6'
>>>>>>> upstream/master
success Entities::List success Entities::List
end end
params do params do
...@@ -100,7 +127,11 @@ module API ...@@ -100,7 +127,11 @@ module API
end end
desc 'Delete a board list' do desc 'Delete a board list' do
<<<<<<< HEAD
detail 'This feature was introduced in 10.4' detail 'This feature was introduced in 10.4'
=======
detail 'This feature was introduced in 10.6'
>>>>>>> upstream/master
success Entities::List success Entities::List
end end
params do params do
......
...@@ -9,8 +9,11 @@ module API ...@@ -9,8 +9,11 @@ module API
end end
end end
<<<<<<< HEAD
prepend EE::API::JobArtifacts prepend EE::API::JobArtifacts
=======
>>>>>>> upstream/master
params do params do
requires :id, type: String, desc: 'The ID of a project' requires :id, type: String, desc: 'The ID of a project'
end end
......
...@@ -32,8 +32,11 @@ module API ...@@ -32,8 +32,11 @@ module API
] ]
end end
<<<<<<< HEAD
prepend EE::API::MergeRequests prepend EE::API::MergeRequests
=======
>>>>>>> upstream/master
helpers do helpers do
def find_merge_requests(args = {}) def find_merge_requests(args = {})
args = declared_params.merge(args) args = declared_params.merge(args)
......
...@@ -137,7 +137,12 @@ module Gitlab ...@@ -137,7 +137,12 @@ module Gitlab
job.create_job_artifacts_trace!( job.create_job_artifacts_trace!(
project: job.project, project: job.project,
file_type: :trace, file_type: :trace,
<<<<<<< HEAD
file: stream) file: stream)
=======
file: stream,
file_sha256: Digest::SHA256.file(path).hexdigest)
>>>>>>> upstream/master
end end
end end
......
...@@ -18,7 +18,10 @@ module Gitlab ...@@ -18,7 +18,10 @@ module Gitlab
'uk' => 'Українська', 'uk' => 'Українська',
'ja' => '日本語', 'ja' => '日本語',
'ko' => '한국어', 'ko' => '한국어',
'nl_NL' => 'Nederlands' 'nl_NL' => 'Nederlands',
'tr_TR' => 'Türkçe',
'id_ID' => 'Bahasa Indonesia',
'fil_PH' => 'Filipino'
}.freeze }.freeze
def available_locales def available_locales
......
module Gitlab module Gitlab
module Prometheus module Prometheus
module AdditionalMetricsParser module AdditionalMetricsParser
CONFIG_ROOT = 'config/prometheus'.freeze
MUTEX = Mutex.new
extend self extend self
def load_groups_from_yaml def load_groups_from_yaml(file_name = 'additional_metrics.yml')
additional_metrics_raw.map(&method(:group_from_entry)) yaml_metrics_raw(file_name).map(&method(:group_from_entry))
end end
private private
...@@ -22,13 +24,20 @@ module Gitlab ...@@ -22,13 +24,20 @@ module Gitlab
MetricGroup.new(entry).tap(&method(:validate!)) MetricGroup.new(entry).tap(&method(:validate!))
end end
def additional_metrics_raw def yaml_metrics_raw(file_name)
load_yaml_file&.map(&:deep_symbolize_keys).freeze load_yaml_file(file_name)&.map(&:deep_symbolize_keys).freeze
end end
def load_yaml_file # rubocop:disable Gitlab/ModuleWithInstanceVariables
@loaded_yaml_file ||= YAML.load_file(Rails.root.join('config/prometheus/additional_metrics.yml')) def load_yaml_file(file_name)
return YAML.load_file(Rails.root.join(CONFIG_ROOT, file_name)) if Rails.env.development?
MUTEX.synchronize do
@loaded_yaml_cache ||= {}
@loaded_yaml_cache[file_name] ||= YAML.load_file(Rails.root.join(CONFIG_ROOT, file_name))
end
end end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
end end
end end
end end
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
class AdditionalMetricsDeploymentQuery < BaseQuery class AdditionalMetricsDeploymentQuery < BaseQuery
include QueryAdditionalMetrics include QueryAdditionalMetrics
def query(environment_id, deployment_id) def query(deployment_id)
Deployment.find_by(id: deployment_id).try do |deployment| Deployment.find_by(id: deployment_id).try do |deployment|
query_metrics( query_metrics(
deployment.project, deployment.project,
......
...@@ -20,6 +20,10 @@ module Gitlab ...@@ -20,6 +20,10 @@ module Gitlab
def query(*args) def query(*args)
raise NotImplementedError raise NotImplementedError
end end
def self.transform_reactive_result(result)
result
end
end end
end end
end end
......
...@@ -2,7 +2,7 @@ module Gitlab ...@@ -2,7 +2,7 @@ module Gitlab
module Prometheus module Prometheus
module Queries module Queries
class DeploymentQuery < BaseQuery class DeploymentQuery < BaseQuery
def query(environment_id, deployment_id) def query(deployment_id)
Deployment.find_by(id: deployment_id).try do |deployment| Deployment.find_by(id: deployment_id).try do |deployment|
environment_slug = deployment.environment.slug environment_slug = deployment.environment.slug
...@@ -25,6 +25,11 @@ module Gitlab ...@@ -25,6 +25,11 @@ module Gitlab
} }
end end
end end
def self.transform_reactive_result(result)
result[:metrics] = result.delete :data
result
end
end end
end end
end end
......
...@@ -19,6 +19,11 @@ module Gitlab ...@@ -19,6 +19,11 @@ module Gitlab
} }
end end
end end
def self.transform_reactive_result(result)
result[:metrics] = result.delete :data
result
end
end end
end end
end end
......
module Gitlab module Gitlab
module Prometheus module Prometheus
module Queries module Queries
class MatchedMetricsQuery < BaseQuery class MatchedMetricQuery < BaseQuery
MAX_QUERY_ITEMS = 40.freeze MAX_QUERY_ITEMS = 40.freeze
def query def query
......
...@@ -3,9 +3,16 @@ module Gitlab ...@@ -3,9 +3,16 @@ module Gitlab
module Queries module Queries
module QueryAdditionalMetrics module QueryAdditionalMetrics
def query_metrics(project, query_context) def query_metrics(project, query_context)
matched_metrics(project).map(&query_group(query_context))
.select(&method(:group_with_any_metrics))
end
protected
def query_group(query_context)
query_processor = method(:process_query).curry[query_context] query_processor = method(:process_query).curry[query_context]
groups = matched_metrics(project).map do |group| lambda do |group|
metrics = group.metrics.map do |metric| metrics = group.metrics.map do |metric|
{ {
title: metric.title, title: metric.title,
...@@ -21,8 +28,6 @@ module Gitlab ...@@ -21,8 +28,6 @@ module Gitlab
metrics: metrics.select(&method(:metric_with_any_queries)) metrics: metrics.select(&method(:metric_with_any_queries))
} }
end end
groups.select(&method(:group_with_any_metrics))
end end
private private
...@@ -72,12 +77,17 @@ module Gitlab ...@@ -72,12 +77,17 @@ module Gitlab
end end
def common_query_context(environment, timeframe_start:, timeframe_end:) def common_query_context(environment, timeframe_start:, timeframe_end:)
{ base_query_context(timeframe_start, timeframe_end).merge({
timeframe_start: timeframe_start,
timeframe_end: timeframe_end,
ci_environment_slug: environment.slug, ci_environment_slug: environment.slug,
kube_namespace: environment.project.deployment_platform(environment: environment)&.actual_namespace || '', kube_namespace: environment.project.deployment_platform(environment: environment)&.actual_namespace || '',
environment_filter: %{container_name!="POD",environment="#{environment.slug}"} environment_filter: %{container_name!="POD",environment="#{environment.slug}"}
})
end
def base_query_context(timeframe_start, timeframe_end)
{
timeframe_start: timeframe_start,
timeframe_end: timeframe_end
} }
end end
end end
......
...@@ -57,7 +57,11 @@ module Gitlab ...@@ -57,7 +57,11 @@ module Gitlab
rescue OpenSSL::SSL::SSLError rescue OpenSSL::SSL::SSLError
raise PrometheusClient::Error, "#{rest_client.url} contains invalid SSL data" raise PrometheusClient::Error, "#{rest_client.url} contains invalid SSL data"
rescue RestClient::ExceptionWithResponse => ex rescue RestClient::ExceptionWithResponse => ex
if ex.response
handle_exception_response(ex.response) handle_exception_response(ex.response)
else
raise PrometheusClient::Error, "Network connection error"
end
rescue RestClient::Exception rescue RestClient::Exception
raise PrometheusClient::Error, "Network connection error" raise PrometheusClient::Error, "Network connection error"
end end
......
...@@ -39,7 +39,11 @@ describe Groups::BoardsController do ...@@ -39,7 +39,11 @@ describe Groups::BoardsController do
context 'when format is JSON' do context 'when format is JSON' do
it 'return an array with one group board' do it 'return an array with one group board' do
<<<<<<< HEAD
create(:board, group: group, milestone: create(:milestone, group: group)) create(:board, group: group, milestone: create(:milestone, group: group))
=======
create(:board, group: group)
>>>>>>> upstream/master
list_boards format: :json list_boards format: :json
......
...@@ -129,10 +129,10 @@ describe Projects::DeploymentsController do ...@@ -129,10 +129,10 @@ describe Projects::DeploymentsController do
end end
context 'when metrics are enabled' do context 'when metrics are enabled' do
let(:prometheus_service) { double('prometheus_service') } let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
before do before do
allow(deployment.project).to receive(:prometheus_service).and_return(prometheus_service) allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
end end
context 'when environment has no metrics' do context 'when environment has no metrics' do
......
...@@ -4,21 +4,22 @@ describe Projects::Prometheus::MetricsController do ...@@ -4,21 +4,22 @@ describe Projects::Prometheus::MetricsController do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project) } let(:project) { create(:project) }
let(:prometheus_service) { double('prometheus_service') } let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
before do before do
allow(controller).to receive(:project).and_return(project)
allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return(prometheus_service)
project.add_master(user) project.add_master(user)
sign_in(user) sign_in(user)
end end
describe 'GET #active_common' do describe 'GET #active_common' do
before do
allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
end
context 'when prometheus metrics are enabled' do context 'when prometheus metrics are enabled' do
context 'when data is not present' do context 'when data is not present' do
before do before do
allow(prometheus_service).to receive(:matched_metrics).and_return({}) allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return({})
end end
it 'returns no content response' do it 'returns no content response' do
...@@ -32,7 +33,7 @@ describe Projects::Prometheus::MetricsController do ...@@ -32,7 +33,7 @@ describe Projects::Prometheus::MetricsController do
let(:sample_response) { { some_data: 1 } } let(:sample_response) { { some_data: 1 } }
before do before do
allow(prometheus_service).to receive(:matched_metrics).and_return(sample_response) allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return(sample_response)
end end
it 'returns no content response' do it 'returns no content response' do
...@@ -53,6 +54,18 @@ describe Projects::Prometheus::MetricsController do ...@@ -53,6 +54,18 @@ describe Projects::Prometheus::MetricsController do
end end
end end
describe '#prometheus_adapter' do
before do
allow(controller).to receive(:project).and_return(project)
end
it 'calls prometheus adapter service' do
expect_any_instance_of(::Prometheus::AdapterService).to receive(:prometheus_adapter)
subject.__send__(:prometheus_adapter)
end
end
def project_params(opts = {}) def project_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace, project_id: project) opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end end
......
FactoryBot.define do FactoryBot.define do
factory :board do factory :board do
<<<<<<< HEAD
sequence(:name) { |n| "board#{n}" } sequence(:name) { |n| "board#{n}" }
=======
>>>>>>> upstream/master
transient do transient do
project nil project nil
group nil group nil
......
...@@ -37,6 +37,14 @@ feature 'Dashboard Projects' do ...@@ -37,6 +37,14 @@ feature 'Dashboard Projects' do
expect(page).to have_xpath("//time[@datetime='#{project.last_repository_updated_at.getutc.iso8601}']") expect(page).to have_xpath("//time[@datetime='#{project.last_repository_updated_at.getutc.iso8601}']")
end end
it 'shows the last_activity_at attribute as the update date' do
project.update_attributes!(last_repository_updated_at: 1.hour.ago, last_activity_at: Time.now)
visit dashboard_projects_path
expect(page).to have_xpath("//time[@datetime='#{project.last_activity_at.getutc.iso8601}']")
end
end end
context 'when last_repository_updated_at and last_activity_at are missing' do context 'when last_repository_updated_at and last_activity_at are missing' do
......
...@@ -138,10 +138,17 @@ describe('Api', () => { ...@@ -138,10 +138,17 @@ describe('Api', () => {
}); });
}); });
<<<<<<< HEAD
it('creates a new group label', (done) => { it('creates a new group label', (done) => {
const namespace = 'some namespace'; const namespace = 'some namespace';
const labelData = { some: 'data' }; const labelData = { some: 'data' };
const expectedUrl = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespace); const expectedUrl = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespace);
=======
it('creates a group label', (done) => {
const namespace = 'group/subgroup';
const labelData = { some: 'data' };
const expectedUrl = `${dummyUrlRoot}/groups/${namespace}/-/labels`;
>>>>>>> upstream/master
const expectedData = { const expectedData = {
label: labelData, label: labelData,
}; };
...@@ -153,7 +160,11 @@ describe('Api', () => { ...@@ -153,7 +160,11 @@ describe('Api', () => {
}]; }];
}); });
<<<<<<< HEAD
Api.newLabel(namespace, null, labelData, (response) => { Api.newLabel(namespace, null, labelData, (response) => {
=======
Api.newLabel(namespace, undefined, labelData, (response) => {
>>>>>>> upstream/master
expect(response.name).toBe('test'); expect(response.name).toBe('test');
done(); done();
}); });
......
require 'spec_helper'
describe Projects::EnvironmentsController, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
let(:admin) { create(:admin) }
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project_empty_repo, namespace: namespace, path: 'environments-project') }
let(:environment) { create(:environment, name: 'production', project: project) }
render_views
before(:all) do
clean_frontend_fixtures('environments/metrics')
end
before do
sign_in(admin)
end
it 'environments/metrics/metrics.html.raw' do |example|
get :metrics,
namespace_id: project.namespace,
project_id: project,
id: environment.id
expect(response).to be_success
store_frontend_fixture(response, example.description)
end
end
...@@ -5,24 +5,35 @@ import axios from '~/lib/utils/axios_utils'; ...@@ -5,24 +5,35 @@ import axios from '~/lib/utils/axios_utils';
import { metricsGroupsAPIResponse, mockApiEndpoint } from './mock_data'; import { metricsGroupsAPIResponse, mockApiEndpoint } from './mock_data';
describe('Dashboard', () => { describe('Dashboard', () => {
const fixtureName = 'environments/metrics/metrics.html.raw';
let DashboardComponent; let DashboardComponent;
let component;
preloadFixtures(fixtureName); const propsData = {
hasMetrics: false,
documentationPath: '/path/to/docs',
settingsPath: '/path/to/settings',
clustersPath: '/path/to/clusters',
tagsPath: '/path/to/tags',
projectPath: '/path/to/project',
metricsEndpoint: mockApiEndpoint,
deploymentEndpoint: null,
emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
emptyLoadingSvgPath: '/path/to/loading.svg',
emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
};
beforeEach(() => { beforeEach(() => {
loadFixtures(fixtureName); setFixtures('<div class="prometheus-graphs"></div>');
DashboardComponent = Vue.extend(Dashboard); DashboardComponent = Vue.extend(Dashboard);
}); });
describe('no metrics are available yet', () => { describe('no metrics are available yet', () => {
it('shows a getting started empty state when no metrics are present', () => { it('shows a getting started empty state when no metrics are present', () => {
component = new DashboardComponent({ const component = new DashboardComponent({
el: document.querySelector('#prometheus-graphs'), el: document.querySelector('.prometheus-graphs'),
propsData,
}); });
component.$mount(); expect(component.$el.querySelector('.prometheus-graphs')).toBe(null);
expect(component.$el.querySelector('#prometheus-graphs')).toBe(null);
expect(component.state).toEqual('gettingStarted'); expect(component.state).toEqual('gettingStarted');
}); });
}); });
...@@ -30,11 +41,8 @@ describe('Dashboard', () => { ...@@ -30,11 +41,8 @@ describe('Dashboard', () => {
describe('requests information to the server', () => { describe('requests information to the server', () => {
let mock; let mock;
beforeEach(() => { beforeEach(() => {
document.querySelector('#prometheus-graphs').setAttribute('data-has-metrics', 'true');
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
mock.onGet(mockApiEndpoint).reply(200, { mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
metricsGroupsAPIResponse,
});
}); });
afterEach(() => { afterEach(() => {
...@@ -42,14 +50,43 @@ describe('Dashboard', () => { ...@@ -42,14 +50,43 @@ describe('Dashboard', () => {
}); });
it('shows up a loading state', (done) => { it('shows up a loading state', (done) => {
component = new DashboardComponent({ const component = new DashboardComponent({
el: document.querySelector('#prometheus-graphs'), el: document.querySelector('.prometheus-graphs'),
propsData: { ...propsData, hasMetrics: true },
}); });
component.$mount();
Vue.nextTick(() => { Vue.nextTick(() => {
expect(component.state).toEqual('loading'); expect(component.state).toEqual('loading');
done(); done();
}); });
}); });
it('hides the legend when showLegend is false', (done) => {
const component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
propsData: { ...propsData, hasMetrics: true, showLegend: false },
});
setTimeout(() => {
expect(component.showEmptyState).toEqual(false);
expect(component.$el.querySelector('.legend-group')).toEqual(null);
expect(component.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
done();
});
});
it('hides the group panels when showPanels is false', (done) => {
const component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
propsData: { ...propsData, hasMetrics: true, showPanels: false },
});
setTimeout(() => {
expect(component.showEmptyState).toEqual(false);
expect(component.$el.querySelector('.prometheus-panel')).toEqual(null);
expect(component.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
done();
});
});
}); });
}); });
...@@ -413,7 +413,12 @@ describe Gitlab::Ci::Trace do ...@@ -413,7 +413,12 @@ describe Gitlab::Ci::Trace do
expect(build.job_artifacts_trace.file.filename).to eq('job.log') expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(File.exist?(src_path)).to be_falsy expect(File.exist?(src_path)).to be_falsy
expect(src_checksum) expect(src_checksum)
<<<<<<< HEAD
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).digest) .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).digest)
=======
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
>>>>>>> upstream/master
end end
end end
...@@ -438,7 +443,12 @@ describe Gitlab::Ci::Trace do ...@@ -438,7 +443,12 @@ describe Gitlab::Ci::Trace do
expect(build.job_artifacts_trace.file.filename).to eq('job.log') expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(build.old_trace).to be_nil expect(build.old_trace).to be_nil
expect(src_checksum) expect(src_checksum)
<<<<<<< HEAD
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).digest) .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).digest)
=======
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
>>>>>>> upstream/master
end end
end end
...@@ -457,7 +467,11 @@ describe Gitlab::Ci::Trace do ...@@ -457,7 +467,11 @@ describe Gitlab::Ci::Trace do
context 'when trace file stored in default path' do context 'when trace file stored in default path' do
let!(:build) { create(:ci_build, :success, :trace_live) } let!(:build) { create(:ci_build, :success, :trace_live) }
let!(:src_path) { trace.read { |s| return s.path } } let!(:src_path) { trace.read { |s| return s.path } }
<<<<<<< HEAD
let!(:src_checksum) { Digest::SHA256.file(src_path).digest } let!(:src_checksum) { Digest::SHA256.file(src_path).digest }
=======
let!(:src_checksum) { Digest::SHA256.file(src_path).hexdigest }
>>>>>>> upstream/master
it_behaves_like 'archive trace file' it_behaves_like 'archive trace file'
...@@ -483,7 +497,11 @@ describe Gitlab::Ci::Trace do ...@@ -483,7 +497,11 @@ describe Gitlab::Ci::Trace do
context 'when trace is stored in database' do context 'when trace is stored in database' do
let(:build) { create(:ci_build, :success) } let(:build) { create(:ci_build, :success) }
let(:trace_content) { 'Sample trace' } let(:trace_content) { 'Sample trace' }
<<<<<<< HEAD
let!(:src_checksum) { Digest::SHA256.digest(trace_content) } let!(:src_checksum) { Digest::SHA256.digest(trace_content) }
=======
let!(:src_checksum) { Digest::SHA256.hexdigest(trace_content) }
>>>>>>> upstream/master
before do before do
build.update_column(:trace, trace_content) build.update_column(:trace, trace_content)
......
...@@ -7,7 +7,7 @@ describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do ...@@ -7,7 +7,7 @@ describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do
include_examples 'additional metrics query' do include_examples 'additional metrics query' do
let(:deployment) { create(:deployment, environment: environment) } let(:deployment) { create(:deployment, environment: environment) }
let(:query_params) { [environment.id, deployment.id] } let(:query_params) { [deployment.id] }
it 'queries using specific time' do it 'queries using specific time' do
expect(client).to receive(:query_range).with(anything, expect(client).to receive(:query_range).with(anything,
......
...@@ -31,7 +31,7 @@ describe Gitlab::Prometheus::Queries::DeploymentQuery do ...@@ -31,7 +31,7 @@ describe Gitlab::Prometheus::Queries::DeploymentQuery do
expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100', expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100',
time: stop_time) time: stop_time)
expect(subject.query(environment.id, deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil, expect(subject.query(deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil,
cpu_values: nil, cpu_before: nil, cpu_after: nil) cpu_values: nil, cpu_before: nil, cpu_after: nil)
end end
end end
require 'spec_helper' require 'spec_helper'
describe Gitlab::Prometheus::Queries::MatchedMetricsQuery do describe Gitlab::Prometheus::Queries::MatchedMetricQuery do
include Prometheus::MetricBuilders include Prometheus::MetricBuilders
let(:metric_group_class) { Gitlab::Prometheus::MetricGroup } let(:metric_group_class) { Gitlab::Prometheus::MetricGroup }
......
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180307012445_migrate_update_head_pipeline_for_merge_request_sidekiq_queue.rb')
describe MigrateUpdateHeadPipelineForMergeRequestSidekiqQueue, :sidekiq, :redis do
include Gitlab::Database::MigrationHelpers
context 'when there are jobs in the queues' do
it 'correctly migrates queue when migrating up' do
Sidekiq::Testing.disable! do
stubbed_worker(queue: 'pipeline_default:update_head_pipeline_for_merge_request').perform_async('Something', [1])
stubbed_worker(queue: 'pipeline_processing:update_head_pipeline_for_merge_request').perform_async('Something', [1])
described_class.new.up
expect(sidekiq_queue_length('pipeline_default:update_head_pipeline_for_merge_request')).to eq 0
expect(sidekiq_queue_length('pipeline_processing:update_head_pipeline_for_merge_request')).to eq 2
end
end
it 'does not affect other queues under the same namespace' do
Sidekiq::Testing.disable! do
stubbed_worker(queue: 'pipeline_default:build_coverage').perform_async('Something', [1])
stubbed_worker(queue: 'pipeline_default:build_trace_sections').perform_async('Something', [1])
stubbed_worker(queue: 'pipeline_default:pipeline_metrics').perform_async('Something', [1])
stubbed_worker(queue: 'pipeline_default:pipeline_notification').perform_async('Something', [1])
described_class.new.up
expect(sidekiq_queue_length('pipeline_default:build_coverage')).to eq 1
expect(sidekiq_queue_length('pipeline_default:build_trace_sections')).to eq 1
expect(sidekiq_queue_length('pipeline_default:pipeline_metrics')).to eq 1
expect(sidekiq_queue_length('pipeline_default:pipeline_notification')).to eq 1
end
end
it 'correctly migrates queue when migrating down' do
Sidekiq::Testing.disable! do
stubbed_worker(queue: 'pipeline_processing:update_head_pipeline_for_merge_request').perform_async('Something', [1])
described_class.new.down
expect(sidekiq_queue_length('pipeline_default:update_head_pipeline_for_merge_request')).to eq 1
expect(sidekiq_queue_length('pipeline_processing:update_head_pipeline_for_merge_request')).to eq 0
end
end
end
context 'when there are no jobs in the queues' do
it 'does not raise error when migrating up' do
expect { described_class.new.up }.not_to raise_error
end
it 'does not raise error when migrating down' do
expect { described_class.new.down }.not_to raise_error
end
end
def stubbed_worker(queue:)
Class.new do
include Sidekiq::Worker
sidekiq_options queue: queue
end
end
end
...@@ -22,11 +22,11 @@ describe Clusters::Applications::Prometheus do ...@@ -22,11 +22,11 @@ describe Clusters::Applications::Prometheus do
end end
end end
describe '#proxy_client' do describe '#prometheus_client' do
context 'cluster is nil' do context 'cluster is nil' do
it 'returns nil' do it 'returns nil' do
expect(subject.cluster).to be_nil expect(subject.cluster).to be_nil
expect(subject.proxy_client).to be_nil expect(subject.prometheus_client).to be_nil
end end
end end
...@@ -35,7 +35,7 @@ describe Clusters::Applications::Prometheus do ...@@ -35,7 +35,7 @@ describe Clusters::Applications::Prometheus do
subject { create(:clusters_applications_prometheus, cluster: cluster) } subject { create(:clusters_applications_prometheus, cluster: cluster) }
it 'returns nil' do it 'returns nil' do
expect(subject.proxy_client).to be_nil expect(subject.prometheus_client).to be_nil
end end
end end
...@@ -63,15 +63,15 @@ describe Clusters::Applications::Prometheus do ...@@ -63,15 +63,15 @@ describe Clusters::Applications::Prometheus do
end end
it 'creates proxy prometheus rest client' do it 'creates proxy prometheus rest client' do
expect(subject.proxy_client).to be_instance_of(RestClient::Resource) expect(subject.prometheus_client).to be_instance_of(RestClient::Resource)
end end
it 'creates proper url' do it 'creates proper url' do
expect(subject.proxy_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80') expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
end end
it 'copies options and headers from kube client to proxy client' do it 'copies options and headers from kube client to proxy client' do
expect(subject.proxy_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers)) expect(subject.prometheus_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers))
end end
end end
end end
......
require 'spec_helper'
describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
include PrometheusHelpers
include ReactiveCachingHelpers
class TestClass
include PrometheusAdapter
end
let(:project) { create(:prometheus_project) }
let(:service) { project.prometheus_service }
let(:described_class) { TestClass }
let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery }
describe '#query' do
describe 'environment' do
let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
around do |example|
Timecop.freeze { example.run }
end
context 'with valid data' do
subject { service.query(:environment, environment) }
before do
stub_reactive_cache(service, prometheus_data, environment_query, environment.id)
end
it 'returns reactive data' do
is_expected.to eq(prometheus_metrics_data)
end
end
end
describe 'matched_metrics' do
let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricQuery }
let(:prometheus_client_wrapper) { double(:prometheus_client_wrapper, label_values: nil) }
context 'with valid data' do
subject { service.query(:matched_metrics) }
before do
allow(service).to receive(:prometheus_client_wrapper).and_return(prometheus_client_wrapper)
synchronous_reactive_cache(service)
end
it 'returns reactive data' do
expect(subject[:success]).to be_truthy
expect(subject[:data]).to eq([])
end
end
end
describe 'deployment' do
let(:deployment) { build_stubbed(:deployment) }
let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
around do |example|
Timecop.freeze { example.run }
end
context 'with valid data' do
subject { service.query(:deployment, deployment) }
before do
stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id)
end
it 'returns reactive data' do
expect(subject).to eq(prometheus_metrics_data)
end
end
end
end
describe '#calculate_reactive_cache' do
let(:environment) { create(:environment, slug: 'env-slug') }
before do
service.manual_configuration = true
service.active = true
end
subject do
service.calculate_reactive_cache(environment_query.name, environment.id)
end
around do |example|
Timecop.freeze { example.run }
end
context 'when service is inactive' do
before do
service.active = false
end
it { is_expected.to be_nil }
end
context 'when Prometheus responds with valid data' do
before do
stub_all_prometheus_requests(environment.slug)
end
it { expect(subject.to_json).to eq(prometheus_data.to_json) }
it { expect(subject.to_json).to eq(prometheus_data.to_json) }
end
[404, 500].each do |status|
context "when Prometheus responds with #{status}" do
before do
stub_all_prometheus_requests(environment.slug, status: status, body: "QUERY FAILED!")
end
it { is_expected.to eq(success: false, result: %(#{status} - "QUERY FAILED!")) }
end
end
end
end
...@@ -64,6 +64,7 @@ describe Deployment do ...@@ -64,6 +64,7 @@ describe Deployment do
describe '#metrics' do describe '#metrics' do
let(:deployment) { create(:deployment) } let(:deployment) { create(:deployment) }
let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
subject { deployment.metrics } subject { deployment.metrics }
...@@ -76,17 +77,16 @@ describe Deployment do ...@@ -76,17 +77,16 @@ describe Deployment do
{ {
success: true, success: true,
metrics: {}, metrics: {},
last_update: 42, last_update: 42
deployment_time: 1494408956
} }
end end
before do before do
allow(deployment.project).to receive_message_chain(:monitoring_service, :deployment_metrics) allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
.with(any_args).and_return(simple_metrics) allow(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics)
end end
it { is_expected.to eq(simple_metrics) } it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
end end
end end
...@@ -109,11 +109,11 @@ describe Deployment do ...@@ -109,11 +109,11 @@ describe Deployment do
} }
end end
let(:prometheus_service) { double('prometheus_service') } let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
before do before do
allow(project).to receive(:prometheus_service).and_return(prometheus_service) allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
allow(prometheus_service).to receive(:additional_deployment_metrics).and_return(simple_metrics) allow(prometheus_adapter).to receive(:query).with(:additional_metrics_deployment, deployment).and_return(simple_metrics)
end end
it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) } it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
......
require 'spec_helper' require 'spec_helper'
describe Environment do describe Environment do
set(:project) { create(:project) } let(:project) { create(:project) }
subject(:environment) { create(:environment, project: project) } subject(:environment) { create(:environment, project: project) }
it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:project) }
...@@ -553,8 +553,8 @@ describe Environment do ...@@ -553,8 +553,8 @@ describe Environment do
end end
it 'returns the metrics from the deployment service' do it 'returns the metrics from the deployment service' do
expect(project.monitoring_service) expect(environment.prometheus_adapter)
.to receive(:environment_metrics).with(environment) .to receive(:query).with(:environment, environment)
.and_return(:fake_metrics) .and_return(:fake_metrics)
is_expected.to eq(:fake_metrics) is_expected.to eq(:fake_metrics)
...@@ -609,12 +609,12 @@ describe Environment do ...@@ -609,12 +609,12 @@ describe Environment do
context 'when the environment has additional metrics' do context 'when the environment has additional metrics' do
before do before do
allow(environment).to receive(:has_additional_metrics?).and_return(true) allow(environment).to receive(:has_metrics?).and_return(true)
end end
it 'returns the additional metrics from the deployment service' do it 'returns the additional metrics from the deployment service' do
expect(project.prometheus_service).to receive(:additional_environment_metrics) expect(environment.prometheus_adapter).to receive(:query)
.with(environment) .with(:additional_metrics_environment, environment)
.and_return(:fake_metrics) .and_return(:fake_metrics)
is_expected.to eq(:fake_metrics) is_expected.to eq(:fake_metrics)
...@@ -623,46 +623,13 @@ describe Environment do ...@@ -623,46 +623,13 @@ describe Environment do
context 'when the environment does not have metrics' do context 'when the environment does not have metrics' do
before do before do
allow(environment).to receive(:has_additional_metrics?).and_return(false) allow(environment).to receive(:has_metrics?).and_return(false)
end end
it { is_expected.to be_nil } it { is_expected.to be_nil }
end end
end end
describe '#has_additional_metrics??' do
subject { environment.has_additional_metrics? }
context 'when the enviroment is available' do
context 'with a deployment service' do
let(:project) { create(:prometheus_project) }
context 'and a deployment' do
let!(:deployment) { create(:deployment, environment: environment) }
it { is_expected.to be_truthy }
end
context 'but no deployments' do
it { is_expected.to be_falsy }
end
end
context 'without a monitoring service' do
it { is_expected.to be_falsy }
end
end
context 'when the environment is unavailable' do
let(:project) { create(:prometheus_project) }
before do
environment.stop
end
it { is_expected.to be_falsy }
end
end
describe '#slug' do describe '#slug' do
it "is automatically generated" do it "is automatically generated" do
expect(environment.slug).not_to be_nil expect(environment.slug).not_to be_nil
...@@ -755,4 +722,12 @@ describe Environment do ...@@ -755,4 +722,12 @@ describe Environment do
end end
end end
end end
describe '#prometheus_adapter' do
it 'calls prometheus adapter service' do
expect_any_instance_of(Prometheus::AdapterService).to receive(:prometheus_adapter)
subject.prometheus_adapter
end
end
end end
...@@ -6,7 +6,6 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do ...@@ -6,7 +6,6 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
let(:project) { create(:prometheus_project) } let(:project) { create(:prometheus_project) }
let(:service) { project.prometheus_service } let(:service) { project.prometheus_service }
let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery }
describe "Associations" do describe "Associations" do
it { is_expected.to belong_to :project } it { is_expected.to belong_to :project }
...@@ -55,197 +54,31 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do ...@@ -55,197 +54,31 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end end
end end
describe '#environment_metrics' do describe '#prometheus_client' do
let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
around do |example|
Timecop.freeze { example.run }
end
context 'with valid data' do
subject { service.environment_metrics(environment) }
before do
stub_reactive_cache(service, prometheus_data, environment_query, environment.id)
end
it 'returns reactive data' do
is_expected.to eq(prometheus_metrics_data)
end
end
end
describe '#matched_metrics' do
let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricsQuery }
let(:client) { double(:client, label_values: nil) }
context 'with valid data' do
subject { service.matched_metrics }
before do
allow(service).to receive(:client).and_return(client)
synchronous_reactive_cache(service)
end
it 'returns reactive data' do
expect(subject[:success]).to be_truthy
expect(subject[:data]).to eq([])
end
end
end
describe '#deployment_metrics' do
let(:deployment) { build_stubbed(:deployment) }
let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
around do |example|
Timecop.freeze { example.run }
end
context 'with valid data' do
subject { service.deployment_metrics(deployment) }
let(:fake_deployment_time) { 10 }
before do
stub_reactive_cache(service, prometheus_data, deployment_query, deployment.environment.id, deployment.id)
end
it 'returns reactive data' do
expect(deployment).to receive(:created_at).and_return(fake_deployment_time)
expect(subject).to eq(prometheus_metrics_data.merge(deployment_time: fake_deployment_time))
end
end
end
describe '#calculate_reactive_cache' do
let(:environment) { create(:environment, slug: 'env-slug') }
before do
service.manual_configuration = true
service.active = true
end
subject do
service.calculate_reactive_cache(environment_query.name, environment.id)
end
around do |example|
Timecop.freeze { example.run }
end
context 'when service is inactive' do
before do
service.active = false
end
it { is_expected.to be_nil }
end
context 'when Prometheus responds with valid data' do
before do
stub_all_prometheus_requests(environment.slug)
end
it { expect(subject.to_json).to eq(prometheus_data.to_json) }
it { expect(subject.to_json).to eq(prometheus_data.to_json) }
end
[404, 500].each do |status|
context "when Prometheus responds with #{status}" do
before do
stub_all_prometheus_requests(environment.slug, status: status, body: "QUERY FAILED!")
end
it { is_expected.to eq(success: false, result: %(#{status} - "QUERY FAILED!")) }
end
end
end
describe '#client' do
context 'manual configuration is enabled' do context 'manual configuration is enabled' do
let(:api_url) { 'http://some_url' } let(:api_url) { 'http://some_url' }
before do before do
subject.active = true
subject.manual_configuration = true subject.manual_configuration = true
subject.api_url = api_url subject.api_url = api_url
end end
it 'returns simple rest client from api_url' do it 'returns rest client from api_url' do
expect(subject.client).to be_instance_of(Gitlab::PrometheusClient) expect(subject.prometheus_client.url).to eq(api_url)
expect(subject.client.rest_client.url).to eq(api_url)
end end
end end
context 'manual configuration is disabled' do context 'manual configuration is disabled' do
let!(:cluster_for_all) { create(:cluster, environment_scope: '*', projects: [project]) } let(:api_url) { 'http://some_url' }
let!(:cluster_for_dev) { create(:cluster, environment_scope: 'dev', projects: [project]) }
let!(:prometheus_for_dev) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_dev) }
let(:proxy_client) { double('proxy_client') }
before do before do
service.manual_configuration = false subject.manual_configuration = false
end subject.api_url = api_url
context 'with cluster for all environments with prometheus installed' do
let!(:prometheus_for_all) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_all) }
context 'without environment supplied' do
it 'returns client handling all environments' do
expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
expect(service.client).to be_instance_of(Gitlab::PrometheusClient)
expect(service.client.rest_client).to eq(proxy_client)
end
end
context 'with dev environment supplied' do
let!(:environment) { create(:environment, project: project, name: 'dev') }
it 'returns dev cluster client' do
expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
expect(service.client(environment.id).rest_client).to eq(proxy_client)
end
end
context 'with prod environment supplied' do
let!(:environment) { create(:environment, project: project, name: 'prod') }
it 'returns dev cluster client' do
expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
expect(service.client(environment.id).rest_client).to eq(proxy_client)
end
end
end
context 'with cluster for all environments without prometheus installed' do
context 'without environment supplied' do
it 'raises PrometheusClient::Error because cluster was not found' do
expect { service.client }.to raise_error(Gitlab::PrometheusClient::Error, /couldn't find cluster with Prometheus installed/)
end
end
context 'with dev environment supplied' do
let!(:environment) { create(:environment, project: project, name: 'dev') }
it 'returns dev cluster client' do
expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
expect(service.client(environment.id).rest_client).to eq(proxy_client)
end
end end
context 'with prod environment supplied' do it 'no client provided' do
let!(:environment) { create(:environment, project: project, name: 'prod') } expect(subject.prometheus_client).to be_nil
it 'raises PrometheusClient::Error because cluster was not found' do
expect { service.client }.to raise_error(Gitlab::PrometheusClient::Error, /couldn't find cluster with Prometheus installed/)
end
end
end end
end end
end end
...@@ -284,7 +117,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do ...@@ -284,7 +117,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end end
end end
describe '#synchronize_service_state! before_save callback' do describe '#synchronize_service_state before_save callback' do
context 'no clusters with prometheus are installed' do context 'no clusters with prometheus are installed' do
context 'when service is inactive' do context 'when service is inactive' do
before do before do
......
...@@ -564,6 +564,20 @@ describe Project do ...@@ -564,6 +564,20 @@ describe Project do
it 'returns the project\'s last update date if it has no events' do it 'returns the project\'s last update date if it has no events' do
expect(project.last_activity_date).to eq(project.updated_at) expect(project.last_activity_date).to eq(project.updated_at)
end end
it 'returns the most recent timestamp' do
project.update_attributes(updated_at: nil,
last_activity_at: timestamp,
last_repository_updated_at: timestamp - 1.hour)
expect(project.last_activity_date).to eq(timestamp)
project.update_attributes(updated_at: timestamp,
last_activity_at: timestamp - 1.hour,
last_repository_updated_at: nil)
expect(project.last_activity_date).to eq(timestamp)
end
end end
end end
......
require 'spec_helper'
describe Prometheus::AdapterService do
let(:project) { create(:project) }
subject { described_class.new(project) }
describe '#prometheus_adapter' do
let(:cluster) { create(:cluster, :provided_by_user, environment_scope: '*', projects: [project]) }
context 'prometheus service can execute queries' do
let(:prometheus_service) { double(:prometheus_service, can_query?: true) }
before do
allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
end
it 'return prometheus service as prometheus adapter' do
expect(subject.prometheus_adapter).to eq(prometheus_service)
end
end
context "prometheus service can't execute queries" do
let(:prometheus_service) { double(:prometheus_service, can_query?: false) }
context 'with cluster with prometheus installed' do
let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
it 'returns application handling all environments' do
expect(subject.prometheus_adapter).to eq(prometheus)
end
end
context 'with cluster without prometheus installed' do
it 'returns nil' do
expect(subject.prometheus_adapter).to be_nil
end
end
end
end
end
...@@ -13,6 +13,7 @@ describe 'layouts/nav/sidebar/_project' do ...@@ -13,6 +13,7 @@ describe 'layouts/nav/sidebar/_project' do
describe 'issue boards' do describe 'issue boards' do
it 'has board tab' do it 'has board tab' do
<<<<<<< HEAD
render render
expect(rendered).to have_css('a[title="Boards"]') expect(rendered).to have_css('a[title="Boards"]')
...@@ -22,6 +23,8 @@ describe 'layouts/nav/sidebar/_project' do ...@@ -22,6 +23,8 @@ describe 'layouts/nav/sidebar/_project' do
allow(License).to receive(:feature_available?).and_call_original allow(License).to receive(:feature_available?).and_call_original
allow(License).to receive(:feature_available?).with(:multiple_project_issue_boards) { false } allow(License).to receive(:feature_available?).with(:multiple_project_issue_boards) { false }
=======
>>>>>>> upstream/master
render render
expect(rendered).to have_css('a[title="Board"]') expect(rendered).to have_css('a[title="Board"]')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment