Commit a8e34125 authored by Bob Van Landuyt's avatar Bob Van Landuyt

Merge branch 'master' into ce-to-ee-2018-03-08

parents f8fdb263 73463b08
......@@ -253,19 +253,29 @@
}
}
.loading-metrics,
.empty-metrics {
padding: 30px 10px;
.custom-monitored-metrics {
.panel-title {
display: flex;
align-items: center;
p,
.btn {
margin-top: 10px;
margin-bottom: 0;
> .btn-success {
margin-left: auto;
}
}
.custom-metric {
display: flex;
align-items: center;
}
.custom-metric-link-bold {
font-weight: $gl-font-weight-bold;
text-decoration: none;
}
}
.loading-metrics .metrics-load-spinner {
color: $loading-color;
color: $gl-text-color-secondary;
}
.metrics-list {
......
......@@ -18,8 +18,85 @@ module Projects
end
end
def validate_query
respond_to do |format|
format.json do
result = prometheus_adapter.query(:validate, params[:query])
if result.any?
render json: result
else
head :no_content
end
end
end
end
def new
@metric = project.prometheus_metrics.new
end
def index
respond_to do |format|
format.json do
metrics = project.prometheus_metrics
response = {}
if metrics.any?
response[:metrics] = PrometheusMetricSerializer.new(project: project)
.represent(metrics.order(created_at: :asc))
end
render json: response
end
end
end
def create
@metric = project.prometheus_metrics.create(metrics_params)
if @metric.persisted?
redirect_to edit_project_service_path(project, PrometheusService),
notice: 'Metric was successfully added.'
else
render 'new'
end
end
def update
@metric = project.prometheus_metrics.find(params[:id])
@metric.update(metrics_params)
if @metric.persisted?
redirect_to edit_project_service_path(project, PrometheusService),
notice: 'Metric was successfully updated.'
else
render 'edit'
end
end
def edit
@metric = project.prometheus_metrics.find(params[:id])
end
def destroy
metric = project.prometheus_metrics.find(params[:id])
metric.destroy
respond_to do |format|
format.html do
redirect_to edit_project_service_path(project, PrometheusService), status: 303
end
format.json do
head :ok
end
end
end
private
def metrics_params
params.require(:prometheus_metric).permit(:title, :query, :y_label, :unit, :legend, :group)
end
def prometheus_adapter
@prometheus_adapter ||= ::Prometheus::AdapterService.new(project).prometheus_adapter
end
......
......@@ -26,7 +26,13 @@ module PrometheusAdapter
query_class = Gitlab::Prometheus::Queries.const_get("#{query_name.to_s.classify}Query")
args.map!(&:id)
args.map! do |arg|
if arg.respond_to?(:id)
arg.id
else
arg
end
end
with_reactive_cache(query_class.name, *args, &query_class.method(:transform_reactive_result))
end
......
......@@ -200,6 +200,8 @@ class Project < ActiveRecord::Base
has_one :cluster_project, class_name: 'Clusters::Project'
has_many :clusters, through: :cluster_project, class_name: 'Clusters::Cluster'
has_many :prometheus_metrics
# Container repositories need to remove data from the container registry,
# which is not managed by the DB. Hence we're still using dependent: :destroy
# here.
......
......@@ -3,13 +3,37 @@
%h4.prepend-top-0
= s_('PrometheusService|Metrics')
%p
= s_('PrometheusService|Metrics are automatically configured and monitored based on a library of metrics from popular exporters.')
= link_to s_('PrometheusService|More information'), help_page_path('user/project/integrations/prometheus')
= s_('PrometheusService|Common metrics are automatically monitored based on a library of metrics from popular exporters.')
= link_to s_('PrometheusService|More information'), help_page_path('user/project/integrations/prometheus_library/metrics'), target: '_blank', rel: "noopener noreferrer"
.col-lg-9
<<<<<<< HEAD
.panel.panel-default.js-panel-monitored-metrics{ data: { active_metrics: active_common_project_prometheus_metrics_path(@project, :json), metrics_help_path: help_page_path('user/project/integrations/prometheus_library/metrics') } }
.panel-heading
%h3.panel-title
=======
.panel.panel-default.custom-monitored-metrics.js-panel-custom-monitored-metrics{ data: { active_custom_metrics: project_prometheus_metrics_path(@project), environments_data: environments_list_data } }
.panel-heading
%h3.panel-title
= s_('PrometheusService|Custom metrics')
%span.badge.js-custom-monitored-count 0
= link_to s_('PrometheusService|New metric'), new_project_prometheus_metric_path(@project), class: 'btn btn-success js-new-metric-button hidden'
.panel-body
.flash-container.hidden
.flash-warning
.flash-text
.loading-metrics.js-loading-custom-metrics
%p.prepend-top-10.prepend-left-10
= icon('spinner spin', class: 'metrics-load-spinner')
= s_('PrometheusService|Finding custom metrics...')
.empty-metrics.hidden.js-empty-custom-metrics
= link_to s_('PrometheusService|New metric'), new_project_prometheus_metric_path(@project), class: 'btn btn-success prepend-top-10 prepend-left-10'
%ul.list-unstyled.metrics-list.hidden.js-custom-metrics-list
.panel.panel-default.js-panel-monitored-metrics{ data: { active_metrics: active_common_project_prometheus_metrics_path(@project, :json), metrics_help_path: help_page_path('user/project/integrations/prometheus_library/metrics') } }
.panel-heading
%h3.panel-title
>>>>>>> master
= s_('PrometheusService|Common metrics')
%span.badge.js-monitored-count 0
.panel-body
......
......@@ -113,6 +113,7 @@
- cronjob:geo_prune_event_log
- cronjob:geo_repository_sync
- cronjob:geo_repository_verification_primary_batch
- cronjob:geo_repository_verification_secondary_scheduler
- cronjob:geo_sidekiq_cron_config
- cronjob:historical_data
- cronjob:ldap_all_groups_sync
......@@ -133,6 +134,7 @@
- geo:geo_repository_shard_sync
- geo:geo_repository_verification_primary_shard
- geo:geo_repository_verification_primary_single
- geo:geo_repository_verification_secondary_single
- object_storage_upload
- object_storage:object_storage_background_move
......
......@@ -479,6 +479,9 @@ Settings.cron_jobs['geo_prune_event_log_worker']['job_class'] ||= 'Geo::PruneEve
Settings.cron_jobs['geo_repository_verification_primary_batch_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['job_class'] ||= 'Geo::RepositoryVerification::Primary::BatchWorker'
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['job_class'] ||= 'Geo::RepositoryVerification::Secondary::SchedulerWorker'
Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker'
......
......@@ -78,7 +78,8 @@ constraints(ProjectUrlConstrainer.new) do
resource :mattermost, only: [:new, :create]
namespace :prometheus do
resources :metrics, constraints: { id: %r{[^\/]+} }, only: [] do
resources :metrics, constraints: { id: %r{[^\/]+} }, only: [:index, :new, :create, :edit, :update, :destroy] do
post :validate_query, on: :collection
get :active_common, on: :collection
end
end
......
......@@ -1047,6 +1047,10 @@ ActiveRecord::Schema.define(version: 20180307164427) do
t.integer "job_artifacts_failed_count"
t.string "version"
t.string "revision"
t.integer "repositories_verified_count"
t.integer "repositories_verification_failed_count"
t.integer "wikis_verified_count"
t.integer "wikis_verification_failed_count"
end
add_index "geo_node_statuses", ["geo_node_id"], name: "index_geo_node_statuses_on_geo_node_id", unique: true, using: :btree
......@@ -1531,7 +1535,10 @@ ActiveRecord::Schema.define(version: 20180307164427) do
t.string "merge_jid"
t.boolean "discussion_locked"
t.integer "latest_merge_request_diff_id"
<<<<<<< HEAD
t.string "rebase_commit_sha"
=======
>>>>>>> master
t.boolean "allow_maintainer_to_push"
end
......@@ -2001,6 +2008,21 @@ ActiveRecord::Schema.define(version: 20180307164427) do
add_index "projects", ["star_count"], name: "index_projects_on_star_count", using: :btree
add_index "projects", ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree
create_table "prometheus_metrics", force: :cascade do |t|
t.integer "project_id"
t.string "title", null: false
t.string "query", null: false
t.string "y_label"
t.string "unit"
t.string "legend"
t.integer "group", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "prometheus_metrics", ["group"], name: "index_prometheus_metrics_on_group", using: :btree
add_index "prometheus_metrics", ["project_id"], name: "index_prometheus_metrics_on_project_id", using: :btree
create_table "protected_branch_merge_access_levels", force: :cascade do |t|
t.integer "protected_branch_id", null: false
t.integer "access_level", default: 40
......@@ -2701,6 +2723,7 @@ ActiveRecord::Schema.define(version: 20180307164427) do
add_foreign_key "project_mirror_data", "projects", name: "fk_d1aad367d7", on_delete: :cascade
add_foreign_key "project_repository_states", "projects", on_delete: :cascade
add_foreign_key "project_statistics", "projects", on_delete: :cascade
add_foreign_key "prometheus_metrics", "projects", on_delete: :cascade
add_foreign_key "protected_branch_merge_access_levels", "namespaces", column: "group_id", name: "fk_98f3d044fe", on_delete: :cascade
add_foreign_key "protected_branch_merge_access_levels", "protected_branches", name: "fk_8a3072ccb3", on_delete: :cascade
add_foreign_key "protected_branch_merge_access_levels", "users"
......
......@@ -203,6 +203,22 @@
</li>
<template v-if="showAdvanceItems">
<template v-if="node.primary">
<geo-node-detail-item
:item-title="s__('GeoNodes|Repositories checksummed:')"
:success-label="s__('GeoNodes|Checksummed')"
:neutral-label="s__('GeoNodes|Not checksummed')"
:failure-label="s__('GeoNodes|Failed')"
:item-value="nodeDetails.verifiedRepositories"
:item-value-type="valueType.GRAPH"
/>
<geo-node-detail-item
:item-title="s__('GeoNodes|Wikis checksummed:')"
:success-label="s__('GeoNodes|Checksummed')"
:neutral-label="s__('GeoNodes|Not checksummed')"
:failure-label="s__('GeoNodes|Failed')"
:item-value="nodeDetails.verifiedWikis"
:item-value-type="valueType.GRAPH"
/>
<geo-node-detail-item
:item-title="s__('GeoNodes|Replication slots:')"
:success-label="s__('GeoNodes|Used slots')"
......@@ -219,6 +235,22 @@
/>
</template>
<template v-else>
<geo-node-detail-item
:item-title="s__('GeoNodes|Repository checksums verified:')"
:success-label="s__('GeoNodes|Verified')"
:neutral-label="s__('GeoNodes|Unverified')"
:failure-label="s__('GeoNodes|Failed')"
:item-value="nodeDetails.verifiedRepositories"
:item-value-type="valueType.GRAPH"
/>
<geo-node-detail-item
:item-title="s__('GeoNodes|Wiki checksums verified:')"
:success-label="s__('GeoNodes|Verified')"
:neutral-label="s__('GeoNodes|Unverified')"
:failure-label="s__('GeoNodes|Failed')"
:item-value="nodeDetails.verifiedWikis"
:item-value-type="valueType.GRAPH"
/>
<geo-node-detail-item
css-class="node-detail-value-bold"
:item-title="s__('GeoNodes|Database replication lag:')"
......
......@@ -87,6 +87,16 @@ export default class GeoNodesStore {
successCount: rawNodeDetails.wikis_synced_count || 0,
failureCount: rawNodeDetails.wikis_failed_count || 0,
},
verifiedRepositories: {
totalCount: rawNodeDetails.repositories_count || 0,
successCount: rawNodeDetails.repositories_verified_count || 0,
failureCount: rawNodeDetails.repositories_verification_failed_count || 0,
},
verifiedWikis: {
totalCount: rawNodeDetails.wikis_count || 0,
successCount: rawNodeDetails.wikis_verified_count || 0,
failureCount: rawNodeDetails.wikis_verification_failed_count || 0,
},
lfs: {
totalCount: rawNodeDetails.lfs_objects_count || 0,
successCount: rawNodeDetails.lfs_objects_synced_count || 0,
......
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
import PrometheusMetrics from 'ee/prometheus_metrics/prometheus_metrics';
document.addEventListener('DOMContentLoaded', () => {
const integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
integrationSettingsForm.init();
const prometheusSettingsWrapper = document.querySelector('.js-prometheus-metrics-monitoring');
if (prometheusSettingsWrapper) {
const prometheusMetrics = new PrometheusMetrics('.js-prometheus-metrics-monitoring');
prometheusMetrics.loadActiveCustomMetrics();
}
});
import _ from 'underscore';
import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics';
import PANEL_STATE from '~/prometheus_metrics/constants';
import axios from '~/lib/utils/axios_utils';
import { s__ } from '~/locale';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
export default class EEPrometheusMetrics extends PrometheusMetrics {
constructor(wrapperSelector) {
super(wrapperSelector);
this.$wrapperCustomMetrics = $(wrapperSelector);
this.$monitoredCustomMetricsPanel = this.$wrapperCustomMetrics.find('.js-panel-custom-monitored-metrics');
this.$monitoredCustomMetricsCount = this.$monitoredCustomMetricsPanel.find('.js-custom-monitored-count');
this.$monitoredCustomMetricsLoading = this.$monitoredCustomMetricsPanel.find('.js-loading-custom-metrics');
this.$monitoredCustomMetricsEmpty = this.$monitoredCustomMetricsPanel.find('.js-empty-custom-metrics');
this.$monitoredCustomMetricsList = this.$monitoredCustomMetricsPanel.find('.js-custom-metrics-list');
this.$newCustomMetricButton = this.$monitoredCustomMetricsPanel.find('.js-new-metric-button');
this.$flashCustomMetricsContainer = this.$wrapperCustomMetrics.find('.flash-container');
this.customMetrics = [];
this.environmentsData = [];
this.activeCustomMetricsEndpoint = this.$monitoredCustomMetricsPanel.data('active-custom-metrics');
this.environmentsDataEndpoint = this.$monitoredCustomMetricsPanel.data('environments-data-endpoint');
}
showMonitoringCustomMetricsPanelState(stateName) {
switch (stateName) {
case PANEL_STATE.LOADING:
this.$monitoredCustomMetricsLoading.removeClass('hidden');
this.$monitoredCustomMetricsEmpty.addClass('hidden');
this.$monitoredCustomMetricsList.addClass('hidden');
this.$newCustomMetricButton.addClass('hidden');
break;
case PANEL_STATE.LIST:
this.$monitoredCustomMetricsLoading.addClass('hidden');
this.$monitoredCustomMetricsEmpty.addClass('hidden');
this.$newCustomMetricButton.removeClass('hidden');
this.$monitoredCustomMetricsList.removeClass('hidden');
break;
default:
this.$monitoredCustomMetricsLoading.addClass('hidden');
this.$monitoredCustomMetricsEmpty.removeClass('hidden');
this.$monitoredCustomMetricsList.addClass('hidden');
this.$newCustomMetricButton.addClass('hidden');
break;
}
}
populateCustomMetrics() {
const sortedMetrics = _(this.customMetrics).chain()
.map(metric => ({ ...metric, group: capitalizeFirstCharacter(metric.group) }))
.sortBy('title')
.sortBy('group')
.value();
sortedMetrics.forEach((metric) => {
this.$monitoredCustomMetricsList.append(EEPrometheusMetrics.customMetricTemplate(metric));
});
this.$monitoredCustomMetricsCount.text(this.customMetrics.length);
this.showMonitoringCustomMetricsPanelState(PANEL_STATE.LIST);
if (!this.environmentsData) {
this.showFlashMessage(s__('PrometheusService|These metrics will only be monitored after your first deployment to an environment'));
}
}
showFlashMessage(message) {
this.$flashCustomMetricsContainer.removeClass('hidden');
this.$flashCustomMetricsContainer.find('.flash-text').text(message);
}
loadActiveCustomMetrics() {
super.loadActiveMetrics();
Promise.all([
axios.get(this.activeCustomMetricsEndpoint),
axios.get(this.environmentsDataEndpoint),
])
.then(([customMetrics, environmentsData]) => {
this.environmentsData = environmentsData.data.environments;
if (!customMetrics.data || !customMetrics.data.metrics) {
this.showMonitoringCustomMetricsPanelState(PANEL_STATE.EMPTY);
} else {
this.customMetrics = customMetrics.data.metrics;
this.populateCustomMetrics(customMetrics.data.metrics);
}
})
.catch((customMetricError) => {
this.showFlashMessage(customMetricError);
this.showMonitoringCustomMetricsPanelState(PANEL_STATE.EMPTY);
});
}
static customMetricTemplate(metric) {
return `
<li class="custom-metric">
<a href="${_.escape(metric.edit_path)}" class="custom-metric-link-bold">
${_.escape(metric.group)} / ${_.escape(metric.title)} (${_.escape(metric.unit)})
</a>
</li>
`;
}
}
......@@ -377,6 +377,8 @@ export default {
v-if="shouldRenderRelatedLinks"
:related-links="mr.relatedLinks"
/>
<mr-widget-maintainer-edit
:maintainerEditAllowed="mr.maintainerEditAllowed" />
</div>
<div class="mr-widget-footer" v-if="shouldRenderMergeHelp">
<mr-widget-merge-help />
......
......@@ -6,7 +6,8 @@ module EE::Admin::LogsController
def loggers
strong_memoize(:loggers) do
super + [
Gitlab::GeoLogger
Gitlab::GeoLogger,
Gitlab::Geo::RepositoryVerificationLogger
]
end
end
......
......@@ -39,14 +39,58 @@ module Geo
end
def find_failed_project_registries(type = nil)
if selective_sync?
legacy_find_filtered_failed_projects(type)
else
find_filtered_failed_project_registries(type)
end
end
def count_verified_repositories
relation =
if selective_sync?
legacy_find_filtered_failed_projects(type)
if use_legacy_queries?
legacy_find_verified_repositories
else
find_filtered_failed_project_registries(type)
find_verified_repositories
end
relation
relation.count
end
def count_verified_wikis
relation =
if use_legacy_queries?
legacy_find_verified_wikis
else
find_verified_wikis
end
relation.count
end
def count_verification_failed_repositories
find_verification_failed_project_registries('repository').count
end
def count_verification_failed_wikis
find_verification_failed_project_registries('wiki').count
end
def find_verification_failed_project_registries(type = nil)
if use_legacy_queries?
legacy_find_filtered_verification_failed_projects(type)
else
find_filtered_verification_failed_project_registries(type)
end
end
# find all registries that need a repository or wiki verified
def find_registries_to_verify
if use_legacy_queries?
legacy_find_registries_to_verify
else
fdw_find_registries_to_verify
end
end
def find_unsynced_projects(batch_size:)
......@@ -77,6 +121,14 @@ module Geo
Geo::ProjectRegistry.synced_repos
end
def find_verified_repositories
Geo::ProjectRegistry.verified_repos
end
def find_verified_wikis
Geo::ProjectRegistry.verified_wikis
end
def find_filtered_failed_project_registries(type = nil)
case type
when 'repository'
......@@ -88,17 +140,52 @@ module Geo
end
end
def find_filtered_verification_failed_project_registries(type = nil)
case type
when 'repository'
Geo::ProjectRegistry.verification_failed_repos
when 'wiki'
Geo::ProjectRegistry.verification_failed_wikis
else
Geo::ProjectRegistry.verification_failed
end
end
def conditions_for_verification(type, use_fdw = true)
last_verification_failed = "last_#{type}_verification_failed".to_sym
verification_checksum = "#{type}_verification_checksum".to_sym
last_verification_at = "last_#{type}_verification_at".to_sym
state_arel = use_fdw ? fdw_repository_state_arel : legacy_repository_state_arel
# primary verification did not fail
primary_verification_not_failed = state_arel[last_verification_failed].eq(false)
# primary checksum is not NULL
primary_has_checksum = state_arel[verification_checksum].not_eq(nil)
# primary was verified later than the secondary verification
primary_recently_verified = state_arel[last_verification_at].gt(registry_arel[last_verification_at])
.or(registry_arel[last_verification_at].eq(nil))
# secondary verification failed and the last verification was over 24.hours.ago
# this allows us to retry any verification failures if they haven't already corrected themselves
secondary_failure_period = registry_arel[last_verification_at].lt(24.hours.ago)
.and(registry_arel[last_verification_failed].eq(true))
primary_verification_not_failed
.and(primary_has_checksum)
.and(primary_recently_verified)
.or(secondary_failure_period)
end
#
# FDW accessors
#
def fdw_table
Geo::Fdw::Project.table_name
end
# @return [ActiveRecord::Relation<Geo::Fdw::Project>]
def fdw_find_unsynced_projects
Geo::Fdw::Project.joins("LEFT OUTER JOIN project_registry ON project_registry.project_id = #{fdw_table}.id")
Geo::Fdw::Project.joins("LEFT OUTER JOIN project_registry ON project_registry.project_id = #{fdw_project_table}.id")
.where(project_registry: { project_id: nil })
end
......@@ -121,11 +208,19 @@ module Geo
# @return [ActiveRecord::Relation<Geo::Fdw::Project>]
def fdw_find_projects_updated_recently
Geo::Fdw::Project.joins("INNER JOIN project_registry ON project_registry.project_id = #{fdw_table}.id")
Geo::Fdw::Project.joins("INNER JOIN project_registry ON project_registry.project_id = #{fdw_project_table}.id")
.merge(Geo::ProjectRegistry.dirty)
.merge(Geo::ProjectRegistry.retry_due)
end
# find all registries that need a repository or wiki verified
# @return [ActiveRecord::Relation<Geo::ProjectRegistry>] list of registries that need verification
def fdw_find_registries_to_verify
Geo::ProjectRegistry
.joins("LEFT OUTER JOIN #{fdw_repository_state_table} ON #{fdw_repository_state_table}.project_id = project_registry.project_id")
.where(conditions_for_verification(:repository, true).or(conditions_for_verification(:wiki, true)))
end
#
# Legacy accessors (non FDW)
#
......@@ -176,6 +271,16 @@ module Geo
)
end
# @return [ActiveRecord::Relation<Geo::ProjectRegistry>] list of verified projects
def legacy_find_verified_repositories
legacy_find_project_registries(Geo::ProjectRegistry.verified_repos)
end
# @return [ActiveRecord::Relation<Geo::ProjectRegistry>] list of verified projects
def legacy_find_verified_wikis
legacy_find_project_registries(Geo::ProjectRegistry.verified_wikis)
end
# @return [ActiveRecord::Relation<Project>] list of synced projects
def legacy_find_project_registries(project_registries)
legacy_inner_join_registry_ids(
......@@ -194,5 +299,65 @@ module Geo
foreign_key: :project_id
)
end
# @return [ActiveRecord::Relation<Project>] list of projects that verification has failed
def legacy_find_filtered_verification_failed_projects(type = nil)
legacy_inner_join_registry_ids(
find_filtered_verification_failed_project_registries(type),
current_node.projects.pluck(:id),
Geo::ProjectRegistry,
foreign_key: :project_id
)
end
# @return [ActiveRecord::Relation<Geo::ProjectRegistry>] list of registries that need verification
def legacy_find_registries_to_verify
registries = Geo::ProjectRegistry
.pluck(:project_id, :last_repository_verification_at, :last_wiki_verification_at,
:last_repository_verification_failed, :last_wiki_verification_failed)
return Geo::ProjectRegistry.none if registries.empty?
id_and_values = registries.map do |project_id, repo_at, wiki_at, repo_failed, wiki_failed|
"(#{project_id}, to_timestamp(#{repo_at.to_i}), to_timestamp(#{wiki_at.to_i}),
#{quote_value(repo_failed)}, #{quote_value(wiki_failed)})"
end
joined_relation = ProjectRepositoryState.joins(<<~SQL)
INNER JOIN
(VALUES #{id_and_values.join(',')})
project_registry(project_id, last_repository_verification_at, last_wiki_verification_at,
last_repository_verification_failed, last_wiki_verification_failed)
ON #{ProjectRepositoryState.table_name}.project_id = project_registry.project_id
SQL
project_ids = joined_relation
.where(conditions_for_verification(:repository, false).or(conditions_for_verification(:wiki, false)))
.pluck(:project_id)
::Geo::ProjectRegistry.where(project_id: project_ids)
end
private
def registry_arel
Geo::ProjectRegistry.arel_table
end
def fdw_repository_state_arel
Geo::Fdw::ProjectRepositoryState.arel_table
end
def legacy_repository_state_arel
::ProjectRepositoryState.arel_table
end
def fdw_project_table
Geo::Fdw::Project.table_name
end
def fdw_repository_state_table
Geo::Fdw::ProjectRepositoryState.table_name
end
end
end
......@@ -19,6 +19,22 @@ module Geo
.limit(batch_size)
end
def count_verified_repositories
Project.verified_repos.count
end
def count_verified_wikis
Project.verified_wikis.count
end
def count_verification_failed_repositories
Project.verification_failed_repos.count
end
def count_verification_failed_wikis
Project.verification_failed_wikis.count
end
protected
def projects_table
......
......@@ -54,7 +54,12 @@ module EE
end
scope :with_remote_mirrors, -> { joins(:remote_mirrors).where(remote_mirrors: { enabled: true }).distinct }
scope :with_wiki_enabled, -> { with_feature_enabled(:wiki) }
scope :with_wiki_enabled, -> { with_feature_enabled(:wiki) }
scope :verified_repos, -> { joins(:repository_state).merge(ProjectRepositoryState.verified_repos) }
scope :verified_wikis, -> { joins(:repository_state).merge(ProjectRepositoryState.verified_wikis) }
scope :verification_failed_repos, -> { joins(:repository_state).merge(ProjectRepositoryState.verification_failed_repos) }
scope :verification_failed_wikis, -> { joins(:repository_state).merge(ProjectRepositoryState.verification_failed_wikis) }
delegate :shared_runners_minutes, :shared_runners_seconds, :shared_runners_seconds_last_reset,
to: :statistics, allow_nil: true
......
module Geo
module Fdw
class ProjectRepositoryState < ::Geo::BaseFdw
self.table_name = Gitlab::Geo::Fdw.table('project_repository_states')
end
end
end
class Geo::ProjectRegistry < Geo::BaseRegistry
include ::EachBatch
belongs_to :project
validates :project, presence: true, uniqueness: true
......@@ -6,6 +8,8 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
scope :dirty, -> { where(arel_table[:resync_repository].eq(true).or(arel_table[:resync_wiki].eq(true))) }
scope :failed_repos, -> { where(arel_table[:repository_retry_count].gt(0)) }
scope :failed_wikis, -> { where(arel_table[:wiki_retry_count].gt(0)) }
scope :verification_failed_repos, -> { where(arel_table[:last_repository_verification_failed].eq(true)) }
scope :verification_failed_wikis, -> { where(arel_table[:last_wiki_verification_failed].eq(true)) }
def self.failed
repository_sync_failed = arel_table[:repository_retry_count].gt(0)
......@@ -14,6 +18,13 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
where(repository_sync_failed.or(wiki_sync_failed))
end
def self.verification_failed
repository_verification_failed = arel_table[:last_repository_verification_failed].eq(true)
wiki_verification_failed = arel_table[:last_wiki_verification_failed].eq(true)
where(repository_verification_failed.or(wiki_verification_failed))
end
def self.retry_due
where(
arel_table[:repository_retry_at].lt(Time.now)
......@@ -33,6 +44,16 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
.where(resync_wiki: false)
end
def self.verified_repos
where.not(last_repository_verification_at: nil, repository_verification_checksum: nil)
.where(last_repository_verification_failed: false)
end
def self.verified_wikis
where.not(last_wiki_verification_at: nil, wiki_verification_checksum: nil)
.where(last_wiki_verification_failed: false)
end
def repository_sync_due?(scheduled_time)
never_synced_repository? || repository_sync_needed?(scheduled_time)
end
......@@ -41,6 +62,22 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
project.wiki_enabled? && (never_synced_wiki? || wiki_sync_needed?(scheduled_time))
end
delegate :repository_state, to: :project
delegate :repository_verification_checksum, :last_repository_verification_at,
:wiki_verification_checksum, :last_wiki_verification_at,
to: :repository_state, allow_nil: true, prefix: :project
def repository_path(type)
repo_path = project.disk_path
case type
when :repository
repo_path
when :wiki
"#{repo_path}.wiki"
end
end
private
def never_synced_repository?
......
......@@ -24,6 +24,10 @@ class GeoNodeStatus < ActiveRecord::Base
wikis_count: 'Total number of wikis available on primary',
wikis_synced_count: 'Number of wikis synced on secondary',
wikis_failed_count: 'Number of wikis failed to sync on secondary',
repositories_verified_count: 'Number of repositories verified on secondary',
repositories_verification_failed_count: 'Number of repositories failed to verify on secondary',
wikis_verified_count: 'Number of wikis verified on secondary',
wikis_verification_failed_count: 'Number of wikis failed to verify on secondary',
lfs_objects_count: 'Total number of local LFS objects available on primary',
lfs_objects_synced_count: 'Number of local LFS objects synced on secondary',
lfs_objects_failed_count: 'Number of local LFS objects failed to sync on secondary',
......@@ -136,6 +140,10 @@ class GeoNodeStatus < ActiveRecord::Base
self.replication_slots_count = geo_node.replication_slots_count
self.replication_slots_used_count = geo_node.replication_slots_used_count
self.replication_slots_max_retained_wal_bytes = geo_node.replication_slots_max_retained_wal_bytes
self.repositories_verified_count = repository_verification_finder.count_verified_repositories
self.repositories_verification_failed_count = repository_verification_finder.count_verification_failed_repositories
self.wikis_verified_count = repository_verification_finder.count_verified_wikis
self.wikis_verification_failed_count = repository_verification_finder.count_verification_failed_wikis
end
end
......@@ -148,6 +156,10 @@ class GeoNodeStatus < ActiveRecord::Base
self.repositories_failed_count = projects_finder.count_failed_repositories
self.wikis_synced_count = projects_finder.count_synced_wikis
self.wikis_failed_count = projects_finder.count_failed_wikis
self.repositories_verified_count = projects_finder.count_verified_repositories
self.repositories_verification_failed_count = projects_finder.count_verification_failed_repositories
self.wikis_verified_count = projects_finder.count_verified_wikis
self.wikis_verification_failed_count = projects_finder.count_verification_failed_wikis
self.lfs_objects_synced_count = lfs_objects_finder.count_synced_lfs_objects
self.lfs_objects_failed_count = lfs_objects_finder.count_failed_lfs_objects
self.job_artifacts_synced_count = job_artifacts_finder.count_synced_job_artifacts
......@@ -199,6 +211,14 @@ class GeoNodeStatus < ActiveRecord::Base
calc_percentage(wikis_count, wikis_synced_count)
end
def repositories_verified_in_percentage
calc_percentage(repositories_count, repositories_verified_count)
end
def wikis_verified_in_percentage
calc_percentage(wikis_count, wikis_verified_count)
end
def lfs_objects_synced_in_percentage
calc_percentage(lfs_objects_count, lfs_objects_synced_count)
end
......@@ -277,6 +297,10 @@ class GeoNodeStatus < ActiveRecord::Base
@projects_finder ||= Geo::ProjectRegistryFinder.new(current_node: geo_node)
end
def repository_verification_finder
@repository_verification_finder ||= Geo::RepositoryVerificationFinder.new
end
def calc_percentage(total, count)
return 0 if !total.present? || total.zero?
......
......@@ -8,6 +8,9 @@ class ProjectRepositoryState < ActiveRecord::Base
validates :project, presence: true, uniqueness: true
scope :verification_failed_repos, -> { where(arel_table[:last_repository_verification_failed].eq(true)) }
scope :verification_failed_wikis, -> { where(arel_table[:last_wiki_verification_failed].eq(true)) }
def repository_checksum_outdated?(timestamp)
repository_verification_checksum.nil? || recalculate_repository_checksum?(timestamp)
end
......@@ -18,7 +21,15 @@ class ProjectRepositoryState < ActiveRecord::Base
wiki_verification_checksum.nil? || recalculate_wiki_checksum?(timestamp)
end
private
def self.verified_repos
where.not(repository_verification_checksum: nil)
.where(last_repository_verification_failed: false)
end
def self.verified_wikis
where.not(wiki_verification_checksum: nil)
.where(last_wiki_verification_failed: false)
end
def recalculate_repository_checksum?(timestamp)
last_repository_verification_at.nil? || timestamp > last_repository_verification_at
......
class PrometheusMetric < ActiveRecord::Base
belongs_to :project, required: true, validate: true, inverse_of: :prometheus_metrics
enum group: [:business, :response, :system]
validates :title, presence: true
validates :query, presence: true
validates :group, presence: true
validates :y_label, presence: true
validates :unit, presence: true
GROUP_TITLES = {
business: _('Business'),
response: _('Response'),
system: _('System')
}.freeze
def group_title
GROUP_TITLES[group.to_sym]
end
def to_query_metric
Gitlab::Prometheus::Metric.new(title: title, required_metrics: [], weight: 0, y_label: y_label, queries: build_queries)
end
private
def build_queries
[
{
query_range: query,
unit: unit,
label: legend
}
]
end
end
class PrometheusMetricEntity < Grape::Entity
include RequestAwareEntity
expose :id
expose :title
expose :group
expose :group_title
expose :unit
expose :edit_path do |prometheus_metric|
edit_project_prometheus_metric_path(prometheus_metric.project, prometheus_metric)
end
end
class PrometheusMetricSerializer < BaseSerializer
entity PrometheusMetricEntity
end
......@@ -112,6 +112,11 @@ module Geo
attrs["last_#{type}_synced_at"] = started_at
attrs["#{type}_retry_count"] = retry_count + 1
attrs["#{type}_retry_at"] = next_retry_time(attrs["#{type}_retry_count"])
# indicate that repository verification needs to be done again
attrs["#{type}_verification_checksum"] = nil
attrs["last_#{type}_verification_at"] = nil
attrs["last_#{type}_verification_failure"] = nil
end
if finished_at
......
# rubocop:disable GitlabSecurity/PublicSend
module Geo
class RepositoryVerifySecondaryService
include Gitlab::Geo::RepositoryVerificationLogHelpers
delegate :project, to: :registry
def initialize(registry, type)
@registry = registry
@type = type.to_sym
end
def execute
return unless Gitlab::Geo.geo_database_configured?
return unless Gitlab::Geo.secondary?
return unless should_verify_checksum?
verify_checksum
end
# This is primarily a guard method, to reduce the chance of false failures (which could happen
# for repositories that change very rapidly)
def should_verify_checksum?
primary_checksum = registry.repository_state.public_send("#{type}_verification_checksum")
secondary_checksum = registry.public_send("#{type}_verification_checksum")
primary_last_verification_at = registry.repository_state.public_send("last_#{type}_verification_at")
secondary_last_verification_at = registry.public_send("last_#{type}_verification_at") || Time.at(0)
secondary_last_successful_sync_at = registry.public_send("last_#{type}_successful_sync_at")
# primary repository was verified (even if checksum is nil).
# note: we allow a nil primary checksum so that we will run through the checksum
# and set the verification date on the secondary. Otherwise, we'll keep revisiting
# this record over and over.
return false if primary_last_verification_at.nil?
# secondary repository checksum does not equal the primary repository checksum
return false if secondary_checksum == primary_checksum && !primary_checksum.nil?
# primary was verified later than the secondary verification
return false if primary_last_verification_at < secondary_last_verification_at
# secondary repository was successfully synced after the last secondary verification
return false if secondary_last_successful_sync_at.nil? || secondary_last_successful_sync_at < secondary_last_verification_at
true
end
private
attr_reader :registry, :type
def verify_checksum
checksum = calculate_checksum(project.repository_storage, repository_path)
if mismatch?(checksum)
record_status(error_msg: "#{type.to_s.capitalize} checksum mismatch: #{repository_path}")
else
record_status(checksum: checksum)
end
rescue ::Gitlab::Git::Repository::NoRepository, ::Gitlab::Git::Checksum::Failure, Timeout::Error => e
record_status(error_msg: "Error verifying #{type.to_s.capitalize} checksum: #{repository_path}", exception: e)
end
def mismatch?(checksum)
checksum != registry.public_send("project_#{type}_verification_checksum")
end
def calculate_checksum(storage, relative_path)
Gitlab::Git::Checksum.new(storage, relative_path).calculate
end
# note: the `last_#{type}_verification_at` is always set, indicating that was the
# time that we _did_ a verification, success or failure
def record_status(checksum: nil, error_msg: nil, exception: nil, details: {})
attrs = {
"#{type}_verification_checksum" => checksum,
"last_#{type}_verification_at" => DateTime.now,
"last_#{type}_verification_failure" => nil,
"last_#{type}_verification_failed" => false
}
if error_msg
attrs["last_#{type}_verification_failed"] = true
attrs["last_#{type}_verification_failure"] = error_msg
log_error(error_msg, exception, type: type, repository_path: repository_path, full_path: path_to_repo)
end
registry.update!(attrs)
end
def repository_path
registry.repository_path(type)
end
def path_to_repo
case type
when :repository
project.repository.path_to_repo
when :wiki
project.wiki.repository.path_to_repo
end
end
end
end
- project = local_assigns.fetch(:project)
- metric = local_assigns.fetch(:metric)
- save_button_text = metric.persisted? ? _('Save changes') : s_('Metrics|Create metric')
.row.prepend-top-default.append-bottom-default
%h3.page-title.text-center
- if metric.persisted?
= s_('Metrics|Edit metric')
- else
= s_('Metrics|New metric')
= form_for [project.namespace.becomes(Namespace), project, metric], html: { class: 'col-lg-8 col-lg-offset-2' } do |f|
= form_errors(metric)
.form-group
= f.label :title, s_('Metrics|Name'), class: 'label-light'
= f.text_field :title, required: true, class: 'form-control', placeholder: s_('Metrics|e.g. Throughput'), autofocus: true
%span.help-block
= s_('Metrics|Used as a title for the chart')
.form-group
= label_tag :group, s_("Metrics|Type"), class: 'append-bottom-10'
.form-group.append-bottom-0
= f.radio_button :group, :business, checked: true
= f.label :group_business, s_("Metrics|Business"), class: 'label-light append-right-10'
= f.radio_button :group, :response
= f.label :group_response, s_("Metrics|Response"), class: 'label-light append-right-10'
= f.radio_button :group, :system
= f.label :group_system, s_("Metrics|System"), class: 'label-light'
%p.text-tertiary
= s_('Metrics|For grouping similar metrics')
.form-group
= f.label :query, s_('Metrics|Query'), class: 'label-light'
= f.text_field :query, required: true, class: 'form-control', placeholder: s_('Metrics|e.g. rate(http_requests_total[5m])')
%span.help-block
= s_('Metrics|Must be a valid PromQL query.')
= link_to "https://prometheus.io/docs/prometheus/latest/querying/basics/", target: "_blank", rel: "noopener noreferrer" do
= sprite_icon("external-link", size: 12)
= s_('Metrics|Prometheus Query Documentation')
.form-group
= f.label :y_label, s_('Metrics|Y-axis label'), class: 'label-light'
= f.text_field :y_label, class: 'form-control', placeholder: s_('Metrics|e.g. Requests/second')
%span.help-block
= s_("Metrics|Label of the chart's vertical axis. Usually the type of the unit being charted. The horizontal axis (X-axis) always represents time.")
.form-group
= f.label :unit, s_('Metrics|Unit label'), class: 'label-light'
= f.text_field :unit, class: 'form-control', placeholder: s_('Metrics|e.g. req/sec')
.form-group
= f.label :legend, s_('Metrics|Legend label (optional)'), class: 'label-light'
= f.text_field :legend, class: 'form-control', placeholder: s_('Metrics|e.g. HTTP requests')
%span.help-block
= s_('Metrics|Used if the query returns a single series. If it returns multiple series, their legend labels will be picked up from the response.')
.form-actions
= f.submit save_button_text, class: 'btn btn-success'
= link_to _('Cancel'), edit_project_service_path(project, PrometheusService), class: 'btn btn-default pull-right'
- if metric.persisted?
= link_to _('Delete'), project_prometheus_metric_path(project, metric), data: { confirm: s_("This will delete the custom metric, Are you sure?") }, method: :delete, class: "btn btn-danger pull-right append-right-default"
- add_to_breadcrumbs _("Settings"), edit_project_path(@project)
- add_to_breadcrumbs _("Integrations"), project_settings_integrations_path(@project)
- add_to_breadcrumbs "Prometheus", edit_project_service_path(@project, PrometheusService)
- breadcrumb_title = s_('Metrics|Edit metric')
- page_title @metric.title, s_('Metrics|Edit metric')
= render 'form', project: @project, metric: @metric
- add_to_breadcrumbs _("Settings"), edit_project_path(@project)
- add_to_breadcrumbs _("Integrations"), project_settings_integrations_path(@project)
- add_to_breadcrumbs "Prometheus", edit_project_service_path(@project, PrometheusService)
- breadcrumb_title = s_('Metrics|New metric')
- page_title s_('Metrics|New metric')
= render 'form', project: @project, metric: @metric
module Geo
module RepositoryVerification
module Secondary
class SchedulerWorker < Geo::Scheduler::SecondaryWorker
include CronjobQueue
MAX_CAPACITY = 1000
def perform
return unless Feature.enabled?('geo_repository_verification')
super
end
private
def max_capacity
MAX_CAPACITY
end
def load_pending_resources
finder.find_registries_to_verify.limit(db_retrieve_batch_size).pluck(:id)
end
def schedule_job(registry_id)
job_id = Geo::RepositoryVerification::Secondary::SingleWorker.perform_async(registry_id)
{ id: registry_id, job_id: job_id } if job_id
end
def finder
@finder ||= Geo::ProjectRegistryFinder.new
end
end
end
end
end
module Geo
module RepositoryVerification
module Secondary
class SingleWorker
include ApplicationWorker
include GeoQueue
include ExclusiveLeaseGuard
include Gitlab::Geo::ProjectLogHelpers
LEASE_TIMEOUT = 1.hour.to_i
attr_reader :registry
private :registry
delegate :project, to: :registry
def perform(registry_id)
@registry = Geo::ProjectRegistry.find(registry_id)
return if registry.nil? || project.pending_delete?
try_obtain_lease do
verify_checksum(:repository)
verify_checksum(:wiki)
end
end
private
def verify_checksum(type)
Geo::RepositoryVerifySecondaryService.new(registry, type).execute
rescue => e
log_error('Error verifying the repository checksum', e, type: type)
raise e
end
def lease_key
"geo:repository_verification:secondary:single_worker:#{project.id}"
end
def lease_timeout
LEASE_TIMEOUT
end
end
end
end
end
---
title: Geo - Verify repository checksums on the secondary node
merge_request: 4749
author:
type: added
---
title: Add ability to add Custom Metrics to environment and deployment metrics dashboards
merge_request: 3799
author:
type: added
class AddRepositoryVerificationToProjectRegistry < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def change
add_column :project_registry, :repository_verification_checksum, :string
add_column :project_registry, :last_repository_verification_at, :datetime_with_timezone
add_column :project_registry, :last_repository_verification_failed, :boolean, null: false, default: false
add_column :project_registry, :last_repository_verification_failure, :string
add_column :project_registry, :wiki_verification_checksum, :string
add_column :project_registry, :last_wiki_verification_at, :datetime_with_timezone
add_column :project_registry, :last_wiki_verification_failed, :boolean, null: false, default: false
add_column :project_registry, :last_wiki_verification_failure, :string
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171115143841) do
ActiveRecord::Schema.define(version: 20180201154345) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -52,6 +52,14 @@ ActiveRecord::Schema.define(version: 20171115143841) do
t.boolean "force_to_redownload_wiki"
t.string "last_repository_sync_failure"
t.string "last_wiki_sync_failure"
t.string "repository_verification_checksum"
t.datetime_with_timezone "last_repository_verification_at"
t.boolean "last_repository_verification_failed", default: false, null: false
t.string "last_repository_verification_failure"
t.string "wiki_verification_checksum"
t.datetime_with_timezone "last_wiki_verification_at"
t.boolean "last_wiki_verification_failed", default: false, null: false
t.string "last_wiki_verification_failure"
end
add_index "project_registry", ["last_repository_successful_sync_at"], name: "index_project_registry_on_last_repository_successful_sync_at", using: :btree
......
class CreatePrometheusMetrics < ActiveRecord::Migration
DOWNTIME = false
def change
create_table :prometheus_metrics do |t|
t.references :project, index: true, foreign_key: { on_delete: :cascade }, null: false
t.string :title, null: false
t.string :query, null: false
t.string :y_label
t.string :unit
t.string :legend
t.integer :group, null: false, index: true
t.timestamps_with_timezone null: false
end
end
end
class AddGeoNodeVerificationStatus < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def change
add_column :geo_node_statuses, :repositories_verified_count, :integer
add_column :geo_node_statuses, :repositories_verification_failed_count, :integer
add_column :geo_node_statuses, :wikis_verified_count, :integer
add_column :geo_node_statuses, :wikis_verification_failed_count, :integer
end
end
......@@ -293,6 +293,18 @@ module EE
number_to_percentage(node.wikis_synced_in_percentage, precision: 2)
end
expose :repositories_verification_failed_count
expose :repositories_verified_count
expose :repositories_verified_in_percentage do |node|
number_to_percentage(node.repositories_verified_in_percentage, precision: 2)
end
expose :wikis_verification_failed_count
expose :wikis_verified_count
expose :wikis_verified_in_percentage do |node|
number_to_percentage(node.wikis_verified_in_percentage, precision: 2)
end
expose :replication_slots_count
expose :replication_slots_used_count
expose :replication_slots_used_in_percentage do |node|
......
......@@ -6,6 +6,7 @@ module Gitlab
SECONDARY_JOBS = %w[
geo_repository_sync_worker
geo_file_download_dispatch_worker
geo_repository_verification_secondary_scheduler_worker
].freeze
GEO_JOBS = (COMMON_JOBS + PRIMARY_JOBS + SECONDARY_JOBS).freeze
......
......@@ -4,14 +4,14 @@ module Gitlab
def log_info(message, details = {})
data = base_log_data(message)
data.merge!(details) if details
Gitlab::Geo::Logger.info(data)
geo_logger.info(data)
end
def log_error(message, error = nil, details = {})
data = base_log_data(message)
data[:error] = error.to_s if error
data.merge!(details) if details
Gitlab::Geo::Logger.error(data)
geo_logger.error(data)
end
protected
......@@ -22,6 +22,10 @@ module Gitlab
message: message
}
end
def geo_logger
Gitlab::Geo::Logger
end
end
end
end
module Gitlab
module Geo
module RepositoryVerificationLogHelpers
include ProjectLogHelpers
protected
def geo_logger
Gitlab::Geo::RepositoryVerificationLogger
end
end
end
end
module Gitlab
module Geo
class RepositoryVerificationLogger < ::Gitlab::Geo::Logger
def self.file_name_noext
'geo_repository_verification'
end
end
end
end
module Gitlab
module Prometheus
module Queries
class ValidateQuery < BaseQuery
def query(query)
client_query(query)
{ valid: true }
rescue Gitlab::PrometheusClient::QueryError => ex
{ valid: false, error: ex.message }
end
def self.transform_reactive_result(result)
result[:query] = result.delete :data
result
end
end
end
end
end
require 'spec_helper'
describe Projects::Prometheus::MetricsController do
let(:user) { create(:user) }
let(:project) { create(:prometheus_project) }
let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
before do
allow(controller).to receive(:project).and_return(project)
allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
project.add_master(user)
sign_in(user)
end
describe 'POST #validate_query' do
before do
allow(prometheus_adapter).to receive(:query).with(:validate, query) { validation_result }
end
let(:query) { 'avg(metric)' }
context 'validation information is ready' do
let(:validation_result) { { valid: true } }
it 'validation data is returned' do
post :validate_query, project_params(format: :json, query: query)
expect(json_response).to eq('valid' => true)
end
end
context 'validation information is not ready' do
let(:validation_result) { {} }
it 'validation data is returned' do
post :validate_query, project_params(format: :json, query: query)
expect(response).to have_gitlab_http_status(204)
end
end
end
describe 'GET #index' do
context 'with custom metric present' do
let!(:prometheus_metric) { create(:prometheus_metric, project: project) }
it 'returns a list of metrics' do
get :index, project_params(format: :json)
expect(response).to have_gitlab_http_status(200)
expect(response).to match_response_schema('prometheus/metrics', dir: 'ee')
end
end
context 'without custom metrics ' do
it 'returns an empty json' do
get :index, project_params(format: :json)
expect(response).to have_gitlab_http_status(200)
expect(json_response).to eq({})
end
end
end
describe 'POST #create' do
context 'metric is valid' do
let(:valid_metric) { { prometheus_metric: { title: 'title', query: 'query', group: 'business', y_label: 'label', unit: 'u', legend: 'legend' } } }
it 'shows a success flash message' do
post :create, project_params(valid_metric)
expect(flash[:notice]).to include('Metric was successfully added.')
expect(response).to redirect_to(edit_project_service_path(project, PrometheusService))
end
end
context 'metric is invalid' do
let(:invalid_metric) { { prometheus_metric: { title: 'title' } } }
it 'renders new metric page' do
post :create, project_params(invalid_metric)
expect(response).to have_gitlab_http_status(200)
expect(response).to render_template('new')
end
end
end
describe 'DELETE #destroy' do
context 'format html' do
let!(:metric) { create(:prometheus_metric, project: project) }
it 'destroys the metric' do
delete :destroy, project_params(id: metric.id)
expect(response).to redirect_to(edit_project_service_path(project, PrometheusService))
expect(PrometheusMetric.find_by(id: metric.id)).to be_nil
end
end
context 'format json' do
let!(:metric) { create(:prometheus_metric, project: project) }
it 'destroys the metric' do
delete :destroy, project_params(id: metric.id, format: :json)
expect(response).to have_gitlab_http_status(200)
expect(PrometheusMetric.find_by(id: metric.id)).to be_nil
end
end
end
def project_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end
end
......@@ -72,5 +72,31 @@ FactoryBot.define do
wiki_sync_failed
wiki_retry_count 0
end
trait :repository_verified do
repository_verification_checksum 'f079a831cab27bcda7d81cd9b48296d0c3dd92ee'
last_repository_verification_failed false
last_repository_verification_at { 5.days.ago }
end
trait :wiki_verified do
wiki_verification_checksum 'e079a831cab27bcda7d81cd9b48296d0c3dd92ef'
last_wiki_verification_failed false
last_wiki_verification_at { 5.days.ago }
end
trait :repository_verification_failed do
repository_verification_checksum nil
last_repository_verification_at { 5.days.ago }
last_repository_verification_failed true
last_repository_verification_failure 'Repository checksum did not match'
end
trait :wiki_verification_failed do
wiki_verification_checksum nil
last_wiki_verification_at { 5.days.ago }
last_wiki_verification_failed true
last_wiki_verification_failure 'Wiki checksum did not match'
end
end
end
FactoryBot.define do
factory :prometheus_metric, class: PrometheusMetric do
title 'title'
query 'avg(metric)'
y_label 'y_label'
unit 'm/s'
group :business
project
legend 'legend'
end
end
{
"type": "object",
"properties": {
"metrics": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": {
"type": "integer"
},
"title": {
"type": "string"
},
"group": {
"type": "string"
},
"group_title": {
"type": "string"
},
"edit_path": {
"type": "string"
}
}
}
}
}
}
......@@ -22,6 +22,12 @@
"wikis_count",
"wikis_failed_count",
"wikis_synced_count",
"repositories_verified_count",
"repositories_verification_failed_count",
"repositories_verified_in_percentage",
"wikis_verified_count",
"wikis_verification_failed_count",
"wikis_verified_in_percentage",
"replication_slots_count",
"replication_slots_used_count",
"replication_slots_used_in_percentage",
......@@ -64,6 +70,12 @@
"wikis_failed_count": { "type": ["integer", "null"] },
"wikis_synced_count": { "type": ["integer", "null"] },
"wikis_synced_in_percentage": { "type": "string" },
"repositories_verified_count": { "type": ["integer", "null"] },
"repositories_verification_failed_count": { "type": ["integer", "null"] },
"repositories_verified_in_percentage": { "type": "string" },
"wikis_verified_count": { "type": ["integer", "null"] },
"wikis_verification_failed_count": { "type": ["integer", "null"] },
"wikis_verified_in_percentage": { "type": "string" },
"replication_slots_count": { "type": ["integer", "null"] },
"replication_slots_used_count": { "type": ["integer", "null"] },
"replication_slots_used_in_percentage": { "type": "string" },
......
......@@ -28,6 +28,7 @@ describe Gitlab::Geo::CronManager, :geo do
geo_repository_verification_primary_batch_worker
geo_repository_sync_worker
geo_file_download_dispatch_worker
geo_repository_verification_secondary_scheduler_worker
geo_metrics_update_worker
].freeze
......@@ -46,7 +47,8 @@ describe Gitlab::Geo::CronManager, :geo do
let(:secondary_jobs) do
[
job('geo_file_download_dispatch_worker'),
job('geo_repository_sync_worker')
job('geo_repository_sync_worker'),
job('geo_repository_verification_secondary_scheduler_worker')
]
end
......
require 'spec_helper'
describe Gitlab::Prometheus::Queries::ValidateQuery do
let(:client) { double('prometheus_client') }
let(:query) { 'avg(metric)' }
subject { described_class.new(client) }
context 'valid query' do
before do
allow(client).to receive(:query).with(query)
end
it 'passess query to prometheus' do
expect(subject.query(query)).to eq(valid: true)
expect(client).to have_received(:query).with(query)
end
end
context 'invalid query' do
let(:message) { 'message' }
before do
allow(client).to receive(:query).with(query).and_raise(Gitlab::PrometheusClient::QueryError.new(message))
end
it 'passes query to prometheus' do
expect(subject.query(query)).to eq(valid: false, error: message)
expect(client).to have_received(:query).with(query)
end
end
end
......@@ -45,6 +45,54 @@ describe Geo::ProjectRegistry do
end
end
describe '.verified_repos' do
it 'returns projects that verified' do
create(:geo_project_registry, :repository_verification_failed)
create(:geo_project_registry, :wiki_verified)
create(:geo_project_registry, :wiki_verification_failed)
repository_verified = create(:geo_project_registry, :repository_verified)
expect(described_class.verified_repos).to match_array([repository_verified])
end
end
describe '.verification_failed_repos' do
it 'returns projects where last attempt to verify failed' do
create(:geo_project_registry, :repository_verified)
create(:geo_project_registry, :wiki_verified)
create(:geo_project_registry, :wiki_verification_failed)
repository_verification_failed = create(:geo_project_registry, :repository_verification_failed)
expect(described_class.verification_failed_repos).to match_array([repository_verification_failed])
end
end
describe '.verified_wikis' do
it 'returns projects that verified' do
create(:geo_project_registry, :repository_verification_failed)
create(:geo_project_registry, :repository_verified)
create(:geo_project_registry, :wiki_verification_failed)
wiki_verified = create(:geo_project_registry, :wiki_verified)
expect(described_class.verified_wikis).to match_array([wiki_verified])
end
end
describe '.verification_failed_wikis' do
it 'returns projects where last attempt to verify failed' do
create(:geo_project_registry, :repository_verified)
create(:geo_project_registry, :wiki_verified)
create(:geo_project_registry, :repository_verification_failed)
wiki_verification_failed = create(:geo_project_registry, :wiki_verification_failed)
expect(described_class.verification_failed_wikis).to match_array([wiki_verification_failed])
end
end
describe '.retry_due' do
it 'returns projects that should be synced' do
create(:geo_project_registry, repository_retry_at: Date.yesterday, wiki_retry_at: Date.yesterday)
......
require 'spec_helper'
describe PrometheusMetric do
subject { build(:prometheus_metric) }
it { is_expected.to belong_to(:project) }
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_presence_of(:query) }
it { is_expected.to validate_presence_of(:group) }
describe '#group_title' do
shared_examples 'group_title' do |group, title|
subject { build(:prometheus_metric, group: group).group_title }
it "returns text #{title} for group #{group}" do
expect(subject).to eq(title)
end
end
it_behaves_like 'group_title', :business, 'Business'
it_behaves_like 'group_title', :response, 'Response'
it_behaves_like 'group_title', :system, 'System'
end
describe '#to_query_metric' do
it 'converts to queryable metric object' do
expect(subject.to_query_metric).to be_instance_of(Gitlab::Prometheus::Metric)
end
it 'queryable metric object has title' do
expect(subject.to_query_metric.title).to eq(subject.title)
end
it 'queryable metric object has y_label' do
expect(subject.to_query_metric.y_label).to eq(subject.y_label)
end
it 'queryable metric has no required_metric' do
expect(subject.to_query_metric.required_metrics).to eq([])
end
it 'queryable metric has weight 0' do
expect(subject.to_query_metric.weight).to eq(0)
end
it 'queryable metrics has query description' do
queries = [
{
query_range: subject.query,
unit: subject.unit,
label: subject.legend
}
]
expect(subject.to_query_metric.queries).to eq(queries)
end
end
end
require 'spec_helper'
describe CiCd::GithubSetupService do
let(:project) { create(:project) }
let(:repo_full_name) { "MyUser/my-project" }
let(:api_token) { "abcdefghijk123" }
let(:import_url) { "https://#{api_token}@github.com/#{repo_full_name}.git" }
let(:credentials) { { user: api_token } }
let(:project) do
create(:project, import_source: repo_full_name,
import_url: import_url,
import_data_attributes: { credentials: credentials } )
end
subject do
described_class.new(project)
......
require 'spec_helper'
describe Geo::RepositoryVerifySecondaryService, :geo do
include ::EE::GeoHelpers
let(:primary) { create(:geo_node, :primary) }
let(:secondary) { create(:geo_node) }
before do
stub_current_geo_node(secondary)
end
describe '#execute' do
let(:repository_state) { create(:repository_state, project: create(:project, :repository))}
let(:registry) do
registry = create(:geo_project_registry, project: repository_state.project)
registry.project.last_repository_updated_at = 7.hours.ago
registry.project.repository_state.last_repository_verification_at = 5.hours.ago
registry.last_repository_successful_sync_at = 5.hours.ago
registry.project.repository_state.repository_verification_checksum = 'my_checksum'
registry
end
let(:service) { described_class.new(registry, :repository) }
it 'only works on the secondary' do
stub_current_geo_node(primary)
expect(service).not_to receive(:log_info)
service.execute
end
it 'sets checksum when the checksum matches' do
allow(service).to receive(:calculate_checksum).and_return('my_checksum')
expect(service).to receive(:record_status).once.with(checksum: 'my_checksum')
service.execute
end
it 'sets failure message when the checksum does not match' do
allow(service).to receive(:calculate_checksum).and_return('not_my_checksum')
expect(service).to receive(:record_status).once.with(error_msg: start_with('Repository checksum mismatch'))
service.execute
end
end
shared_examples 'should_verify_checksum? for repositories/wikis' do |type|
let(:repository_state) { create(:repository_state, project: create(:project, :repository))}
let(:registry) do
registry = create(:geo_project_registry, project: repository_state.project)
registry.project.last_repository_updated_at = 7.hours.ago
registry.project.repository_state.public_send("last_#{type}_verification_at=", 5.hours.ago)
registry.public_send("last_#{type}_successful_sync_at=", 5.hours.ago)
registry.project.repository_state.public_send("#{type}_verification_checksum=", 'my_checksum')
registry
end
let(:service) { described_class.new(registry, type) }
it 'verifies the repository' do
expect(service.should_verify_checksum?).to be_truthy
end
it 'does not verify if primary was never verified' do
registry.project.repository_state.public_send("last_#{type}_verification_at=", nil)
expect(service.should_verify_checksum?).to be_falsy
end
it 'does not verify if the checksums already match' do
registry.project.repository_state.public_send("#{type}_verification_checksum=", 'my_checksum')
registry.public_send("#{type}_verification_checksum=", 'my_checksum')
expect(service.should_verify_checksum?).to be_falsy
end
it 'does not verify if the primary was verified before the secondary' do
registry.project.repository_state.public_send("last_#{type}_verification_at=", 50.minutes.ago)
registry.public_send("last_#{type}_verification_at=", 30.minutes.ago)
expect(service.should_verify_checksum?).to be_falsy
end
it 'does verify if the secondary was never verified' do
registry.public_send("last_#{type}_verification_at=", nil)
expect(service.should_verify_checksum?).to be_truthy
end
it 'does not verify if never synced' do
registry.public_send("last_#{type}_successful_sync_at=", nil)
expect(service.should_verify_checksum?).to be_falsy
end
it 'does not verify if the secondary synced before the last secondary verification' do
registry.public_send("last_#{type}_verification_at=", 50.minutes.ago)
registry.public_send("last_#{type}_successful_sync_at=", 30.minutes.ago)
expect(service.should_verify_checksum?).to be_falsy
end
it 'has been at least 6 hours since the primary repository was updated' do
registry.project.last_repository_updated_at = 7.hours.ago
expect(service.should_verify_checksum?).to be_truthy
end
end
describe '#should_verify_checksum?' do
context 'repository' do
include_examples 'should_verify_checksum? for repositories/wikis', :repository
end
context 'wiki' do
include_examples 'should_verify_checksum? for repositories/wikis', :wiki
end
end
shared_examples 'record_status for repositories/wikis' do |type|
it 'records a successful verification' do
service.send(:record_status, checksum: 'my_checksum')
registry.reload
expect(registry.public_send("#{type}_verification_checksum")).to eq 'my_checksum'
expect(registry.public_send("last_#{type}_verification_at")).not_to be_nil
expect(registry.public_send("last_#{type}_verification_failure")).to be_nil
expect(registry.public_send("last_#{type}_verification_failed")).to be_falsey
end
it 'records a failure' do
service.send(:record_status, error_msg: 'Repository checksum did not match')
registry.reload
expect(registry.public_send("#{type}_verification_checksum")).to be_nil
expect(registry.public_send("last_#{type}_verification_at")).not_to be_nil
expect(registry.public_send("last_#{type}_verification_failure")).to eq 'Repository checksum did not match'
expect(registry.public_send("last_#{type}_verification_failed")).to be_truthy
end
end
describe '#record_status' do
let(:registry) { create(:geo_project_registry) }
context 'for a repository' do
let(:service) { described_class.new(registry, :repository) }
include_examples 'record_status for repositories/wikis', :repository
end
context 'for a wiki' do
let(:service) { described_class.new(registry, :wiki) }
include_examples 'record_status for repositories/wikis', :wiki
end
end
end
......@@ -65,6 +65,7 @@ project_tree:
- :create_access_levels
- :project_feature
- :custom_attributes
- :prometheus_metrics
- :project_badges
# Only include the following attributes for the models specified.
......
......@@ -10,9 +10,14 @@ module Gitlab
AdditionalMetricsParser.load_groups_from_yaml
end
# EE only
def self.for_project(_)
common_metrics
def self.for_project(project)
common_metrics + custom_metrics(project)
end
def self.custom_metrics(project)
project.prometheus_metrics.all.group_by(&:group_title).map do |name, metrics|
MetricGroup.new(name: name, priority: 0, metrics: metrics.map(&:to_query_metric))
end
end
end
end
......
......@@ -8,8 +8,13 @@ msgid ""
msgstr ""
"Project-Id-Version: gitlab 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
<<<<<<< HEAD
"POT-Creation-Date: 2018-03-08 08:54+0100\n"
"PO-Revision-Date: 2018-03-08 08:54+0100\n"
=======
"POT-Creation-Date: 2018-03-08 00:12+0100\n"
"PO-Revision-Date: 2018-03-08 00:12+0100\n"
>>>>>>> master
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
......@@ -3112,6 +3117,15 @@ msgstr ""
msgid "Promote to Group Milestone"
msgstr ""
msgid "Promote"
msgstr ""
msgid "Promote to Group Label"
msgstr ""
msgid "Promote to Group Milestone"
msgstr ""
msgid "Protip:"
msgstr ""
......
......@@ -8,13 +8,15 @@ describe 'User creates a merge request', :js do
merge_requests_template: 'This merge request should contain the following.')
end
let(:user) { create(:user) }
let(:approver) { create(:user) }
let(:user2) { create(:user) }
before do
project.add_master(user)
project.add_master(approver)
sign_in(user)
project.approvers.create(user_id: user.id)
project.approvers.create(user_id: approver.id)
visit(project_new_merge_request_path(project))
end
......@@ -34,7 +36,7 @@ describe 'User creates a merge request', :js do
# Approvers
page.within('ul .unsaved-approvers') do
expect(page).to have_content(user.name)
expect(page).to have_content(approver.name)
end
page.within('.suggested-approvers') do
......
......@@ -80,6 +80,12 @@ export const rawMockNodeDetails = {
wikis_failed_count: 0,
wikis_synced_count: 12,
wikis_synced_in_percentage: '100.00%',
repositories_verification_failed_count: 0,
repositories_verified_count: 12,
repositories_verified_in_percentage: '100.00%',
wikis_verification_failed_count: 0,
wikis_verified_count: 12,
wikis_verified_in_percentage: '100.00%',
replication_slots_count: null,
replication_slots_used_count: null,
replication_slots_used_in_percentage: '0.00%',
......
const metrics = [
{
edit_path: '/root/prometheus-test/prometheus/metrics/3/edit',
id: 3,
title: 'Requests',
group: 'Business',
},
{
edit_path: '/root/prometheus-test/prometheus/metrics/2/edit',
id: 2,
title: 'Sales by the hour',
group: 'Business',
},
{
edit_path: '/root/prometheus-test/prometheus/metrics/1/edit',
id: 1,
title: 'Requests',
group: 'Business',
},
];
export default metrics;
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import PrometheusMetrics from 'ee/prometheus_metrics/prometheus_metrics';
import PANEL_STATE from '~/prometheus_metrics/constants';
import metrics from './mock_data';
describe('PrometheusMetrics EE', () => {
const FIXTURE = 'services/prometheus/prometheus_service.html.raw';
const customMetricsEndpoint = 'http://test.host/frontend-fixtures/services-project/prometheus/metrics';
let mock;
preloadFixtures(FIXTURE);
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet(customMetricsEndpoint).reply(200, {
metrics,
});
loadFixtures(FIXTURE);
});
afterEach(() => {
mock.restore();
});
describe('Custom Metrics EE', () => {
let prometheusMetrics;
beforeEach(() => {
prometheusMetrics = new PrometheusMetrics('.js-prometheus-metrics-monitoring');
});
it('should initialize wrapper element refs on the class object', () => {
expect(prometheusMetrics.$wrapperCustomMetrics).not.toBeNull();
expect(prometheusMetrics.$monitoredCustomMetricsPanel).not.toBeNull();
expect(prometheusMetrics.$monitoredCustomMetricsCount).not.toBeNull();
expect(prometheusMetrics.$monitoredCustomMetricsLoading).not.toBeNull();
expect(prometheusMetrics.$monitoredCustomMetricsEmpty).not.toBeNull();
expect(prometheusMetrics.$monitoredCustomMetricsList).not.toBeNull();
expect(prometheusMetrics.$newCustomMetricButton).not.toBeNull();
expect(prometheusMetrics.$flashCustomMetricsContainer).not.toBeNull();
});
it('should contain api endpoints', () => {
expect(prometheusMetrics.activeCustomMetricsEndpoint).toEqual(customMetricsEndpoint);
});
it('should show loading state when called with `loading`', () => {
prometheusMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LOADING);
expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toEqual(false);
expect(prometheusMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
});
it('should show metrics list when called with `list`', () => {
prometheusMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LIST);
expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
expect(prometheusMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
});
it('should show empty state when called with `empty`', () => {
prometheusMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.EMPTY);
expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
expect(prometheusMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toEqual(false);
expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
});
it('should show monitored metrics list', () => {
prometheusMetrics.customMetrics = metrics;
prometheusMetrics.populateCustomMetrics();
const $metricsListLi = prometheusMetrics.$monitoredCustomMetricsList.find('li');
expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
expect($metricsListLi.length).toEqual(metrics.length);
});
});
});
......@@ -309,6 +309,7 @@ project:
- fork_network_member
- fork_network
- custom_attributes
- prometheus_metrics
- lfs_file_locks
- project_badges
- source_of_merge_requests
......@@ -317,6 +318,8 @@ award_emoji:
- user
priorities:
- label
prometheus_metrics:
- project
timelogs:
- issue
- merge_request
......
......@@ -553,6 +553,17 @@ ProjectCustomAttribute:
- project_id
- key
- value
PrometheusMetric:
- id
- created_at
- updated_at
- project_id
- y_label
- unit
- legend
- title
- query
- group
LfsFileLock:
- id
- path
......
......@@ -75,6 +75,29 @@ describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
end
end
describe 'validate_query' do
let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
let(:validation_query) { Gitlab::Prometheus::Queries::ValidateQuery.name }
let(:query) { 'avg(response)' }
let(:validation_respone) { { data: { valid: true } } }
around do |example|
Timecop.freeze { example.run }
end
context 'with valid data' do
subject { service.query(:validate, query) }
before do
stub_reactive_cache(service, validation_respone, validation_query, query)
end
it 'returns query data' do
is_expected.to eq(query: { valid: true })
end
end
end
end
describe '#calculate_reactive_cache' do
......
......@@ -118,7 +118,7 @@ describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(40)
expect(recorded.count).to be_within(2).of(40)
expect(recorded.cached_count).to eq(0)
end
end
......
......@@ -115,6 +115,50 @@ RSpec.shared_examples 'additional metrics query' do
end
end
context 'with custom metrics' do
let!(:metric) { create(:prometheus_metric, project: project) }
before do
allow(client).to receive(:query_range).with('avg(metric)', any_args).and_return(query_range_result)
end
context 'without common metrics' do
before do
allow(metric_group_class).to receive(:common_metrics).and_return([])
end
it 'return group data for custom metric' do
queries_with_result = { queries: [{ query_range: 'avg(metric)', unit: 'm/s', label: 'legend', result: query_range_result }] }
expect(query_result).to match_schema('prometheus/additional_metrics_query_result')
expect(query_result.count).to eq(1)
expect(query_result.first[:metrics].count).to eq(1)
expect(query_result.first[:metrics].first).to include(queries_with_result)
end
end
context 'with common metrics' do
before do
allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result)
allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group(metrics: [simple_metric])])
end
it 'return group data for custom metric' do
custom_queries_with_result = { queries: [{ query_range: 'avg(metric)', unit: 'm/s', label: 'legend', result: query_range_result }] }
common_queries_with_result = { queries: [{ query_range: 'query_range_a', result: query_range_result }] }
expect(query_result).to match_schema('prometheus/additional_metrics_query_result')
expect(query_result.count).to eq(2)
expect(query_result).to all(satisfy { |r| r[:metrics].count == 1 })
expect(query_result[0][:metrics].first).to include(common_queries_with_result)
expect(query_result[1][:metrics].first).to include(custom_queries_with_result)
end
end
end
context 'with two groups with one metric each' do
let(:metrics) { [simple_metric(queries: [simple_query])] }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment