Commit 830c770a authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'master' into feature/sm/35954-create-kubernetes-cluster-on-gke-from-k8s-service

parents 3e26b0dc 7f8e720f
...@@ -27,7 +27,7 @@ variables: ...@@ -27,7 +27,7 @@ variables:
GET_SOURCES_ATTEMPTS: "3" GET_SOURCES_ATTEMPTS: "3"
KNAPSACK_RSPEC_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/rspec_report-master.json KNAPSACK_RSPEC_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/rspec_report-master.json
KNAPSACK_SPINACH_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/spinach_report-master.json KNAPSACK_SPINACH_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/spinach_report-master.json
FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/${CI_PROJECT_NAME}/report-master.json FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/report-suite.json
before_script: before_script:
- bundle --version - bundle --version
...@@ -87,12 +87,13 @@ stages: ...@@ -87,12 +87,13 @@ stages:
- export CI_NODE_TOTAL=${JOB_NAME[-1]} - export CI_NODE_TOTAL=${JOB_NAME[-1]}
- export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json - export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export KNAPSACK_GENERATE_REPORT=true - export KNAPSACK_GENERATE_REPORT=true
- export ALL_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/all_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json - export SUITE_FLAKY_RSPEC_REPORT_PATH=${FLAKY_RSPEC_SUITE_REPORT_PATH}
- export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/new_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json - export FLAKY_RSPEC_REPORT_PATH=rspec_flaky/all_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/new_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export FLAKY_RSPEC_GENERATE_REPORT=true - export FLAKY_RSPEC_GENERATE_REPORT=true
- export CACHE_CLASSES=true - export CACHE_CLASSES=true
- cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH} - cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}
- cp ${FLAKY_RSPEC_SUITE_REPORT_PATH} ${ALL_FLAKY_RSPEC_REPORT_PATH} - '[[ -f $FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_REPORT_PATH}'
- '[[ -f $NEW_FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${NEW_FLAKY_RSPEC_REPORT_PATH}' - '[[ -f $NEW_FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${NEW_FLAKY_RSPEC_REPORT_PATH}'
- scripts/gitaly-test-spawn - scripts/gitaly-test-spawn
- knapsack rspec "--color --format documentation" - knapsack rspec "--color --format documentation"
...@@ -233,7 +234,7 @@ retrieve-tests-metadata: ...@@ -233,7 +234,7 @@ retrieve-tests-metadata:
- wget -O $KNAPSACK_SPINACH_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_SPINACH_SUITE_REPORT_PATH || rm $KNAPSACK_SPINACH_SUITE_REPORT_PATH - wget -O $KNAPSACK_SPINACH_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_SPINACH_SUITE_REPORT_PATH || rm $KNAPSACK_SPINACH_SUITE_REPORT_PATH
- '[[ -f $KNAPSACK_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_RSPEC_SUITE_REPORT_PATH}' - '[[ -f $KNAPSACK_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_RSPEC_SUITE_REPORT_PATH}'
- '[[ -f $KNAPSACK_SPINACH_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_SPINACH_SUITE_REPORT_PATH}' - '[[ -f $KNAPSACK_SPINACH_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_SPINACH_SUITE_REPORT_PATH}'
- mkdir -p rspec_flaky/${CI_PROJECT_NAME}/ - mkdir -p rspec_flaky/
- wget -O $FLAKY_RSPEC_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$FLAKY_RSPEC_SUITE_REPORT_PATH || rm $FLAKY_RSPEC_SUITE_REPORT_PATH - wget -O $FLAKY_RSPEC_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$FLAKY_RSPEC_SUITE_REPORT_PATH || rm $FLAKY_RSPEC_SUITE_REPORT_PATH
- '[[ -f $FLAKY_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_SUITE_REPORT_PATH}' - '[[ -f $FLAKY_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_SUITE_REPORT_PATH}'
...@@ -252,22 +253,21 @@ update-tests-metadata: ...@@ -252,22 +253,21 @@ update-tests-metadata:
- retry gem install fog-aws mime-types - retry gem install fog-aws mime-types
- scripts/merge-reports ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/rspec-pg_node_*.json - scripts/merge-reports ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/rspec-pg_node_*.json
- scripts/merge-reports ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/spinach-pg_node_*.json - scripts/merge-reports ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/spinach-pg_node_*.json
- scripts/merge-reports ${FLAKY_RSPEC_SUITE_REPORT_PATH} rspec_flaky/${CI_PROJECT_NAME}/all_node_*.json - scripts/merge-reports ${FLAKY_RSPEC_SUITE_REPORT_PATH} rspec_flaky/all_*_*.json
- '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $KNAPSACK_RSPEC_SUITE_REPORT_PATH $KNAPSACK_SPINACH_SUITE_REPORT_PATH' - '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $KNAPSACK_RSPEC_SUITE_REPORT_PATH $KNAPSACK_SPINACH_SUITE_REPORT_PATH'
- '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $FLAKY_RSPEC_SUITE_REPORT_PATH' - '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $FLAKY_RSPEC_SUITE_REPORT_PATH'
- rm -f knapsack/${CI_PROJECT_NAME}/*_node_*.json - rm -f knapsack/${CI_PROJECT_NAME}/*_node_*.json
- rm -f rspec_flaky/${CI_PROJECT_NAME}/*_node_*.json - rm -f rspec_flaky/all_*.json rspec_flaky/new_*.json
flaky-examples-check: flaky-examples-check:
<<: *dedicated-runner <<: *dedicated-runner
image: ruby:2.3-alpine image: ruby:2.3-alpine
services: [] services: []
before_script: [] before_script: []
cache: {}
variables: variables:
SETUP_DB: "false" SETUP_DB: "false"
USE_BUNDLE_INSTALL: "false" USE_BUNDLE_INSTALL: "false"
NEW_FLAKY_SPECS_REPORT: rspec_flaky/${CI_PROJECT_NAME}/new_rspec_flaky_examples.json NEW_FLAKY_SPECS_REPORT: rspec_flaky/report-new.json
stage: post-test stage: post-test
allow_failure: yes allow_failure: yes
only: only:
...@@ -281,7 +281,7 @@ flaky-examples-check: ...@@ -281,7 +281,7 @@ flaky-examples-check:
- rspec_flaky/ - rspec_flaky/
script: script:
- '[[ -f $NEW_FLAKY_SPECS_REPORT ]] || echo "{}" > ${NEW_FLAKY_SPECS_REPORT}' - '[[ -f $NEW_FLAKY_SPECS_REPORT ]] || echo "{}" > ${NEW_FLAKY_SPECS_REPORT}'
- scripts/merge-reports $NEW_FLAKY_SPECS_REPORT rspec_flaky/${CI_PROJECT_NAME}/new_node_*.json - scripts/merge-reports ${NEW_FLAKY_SPECS_REPORT} rspec_flaky/new_*_*.json
- scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT - scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT
setup-test-env: setup-test-env:
......
...@@ -23,7 +23,7 @@ gem 'faraday', '~> 0.12' ...@@ -23,7 +23,7 @@ gem 'faraday', '~> 0.12'
# Authentication libraries # Authentication libraries
gem 'devise', '~> 4.2' gem 'devise', '~> 4.2'
gem 'doorkeeper', '~> 4.2.0' gem 'doorkeeper', '~> 4.2.0'
gem 'doorkeeper-openid_connect', '~> 1.1.0' gem 'doorkeeper-openid_connect', '~> 1.2.0'
gem 'omniauth', '~> 1.4.2' gem 'omniauth', '~> 1.4.2'
gem 'omniauth-auth0', '~> 1.4.1' gem 'omniauth-auth0', '~> 1.4.1'
gem 'omniauth-azure-oauth2', '~> 0.0.9' gem 'omniauth-azure-oauth2', '~> 0.0.9'
......
...@@ -80,7 +80,7 @@ GEM ...@@ -80,7 +80,7 @@ GEM
coderay (>= 1.0.0) coderay (>= 1.0.0)
erubis (>= 2.6.6) erubis (>= 2.6.6)
rack (>= 0.9.0) rack (>= 0.9.0)
bindata (2.3.5) bindata (2.4.1)
binding_of_caller (0.7.2) binding_of_caller (0.7.2)
debug_inspector (>= 0.0.1) debug_inspector (>= 0.0.1)
bootstrap-sass (3.3.6) bootstrap-sass (3.3.6)
...@@ -166,9 +166,9 @@ GEM ...@@ -166,9 +166,9 @@ GEM
docile (1.1.5) docile (1.1.5)
domain_name (0.5.20161021) domain_name (0.5.20161021)
unf (>= 0.0.5, < 1.0.0) unf (>= 0.0.5, < 1.0.0)
doorkeeper (4.2.0) doorkeeper (4.2.6)
railties (>= 4.2) railties (>= 4.2)
doorkeeper-openid_connect (1.1.2) doorkeeper-openid_connect (1.2.0)
doorkeeper (~> 4.0) doorkeeper (~> 4.0)
json-jwt (~> 1.6) json-jwt (~> 1.6)
dropzonejs-rails (0.7.2) dropzonejs-rails (0.7.2)
...@@ -410,7 +410,7 @@ GEM ...@@ -410,7 +410,7 @@ GEM
railties (>= 4.2.0) railties (>= 4.2.0)
thor (>= 0.14, < 2.0) thor (>= 0.14, < 2.0)
json (1.8.6) json (1.8.6)
json-jwt (1.7.1) json-jwt (1.7.2)
activesupport activesupport
bindata bindata
multi_json (>= 1.3) multi_json (>= 1.3)
...@@ -681,7 +681,7 @@ GEM ...@@ -681,7 +681,7 @@ GEM
rainbow (2.2.2) rainbow (2.2.2)
rake rake
raindrops (0.18.0) raindrops (0.18.0)
rake (12.0.0) rake (12.1.0)
rblineprof (0.3.6) rblineprof (0.3.6)
debugger-ruby_core_source (~> 1.3) debugger-ruby_core_source (~> 1.3)
rbnacl (4.0.2) rbnacl (4.0.2)
...@@ -1002,7 +1002,7 @@ DEPENDENCIES ...@@ -1002,7 +1002,7 @@ DEPENDENCIES
devise-two-factor (~> 3.0.0) devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0) diffy (~> 3.1.0)
doorkeeper (~> 4.2.0) doorkeeper (~> 4.2.0)
doorkeeper-openid_connect (~> 1.1.0) doorkeeper-openid_connect (~> 1.2.0)
dropzonejs-rails (~> 0.7.1) dropzonejs-rails (~> 0.7.1)
email_reply_trimmer (~> 0.1) email_reply_trimmer (~> 0.1)
email_spec (~> 1.6.0) email_spec (~> 1.6.0)
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
<template v-if="time.days">{{ time.days }} <span>{{ n__('day', 'days', time.days) }}</span></template> <template v-if="time.days">{{ time.days }} <span>{{ n__('day', 'days', time.days) }}</span></template>
<template v-if="time.hours">{{ time.hours }} <span>{{ n__('Time|hr', 'Time|hrs', time.hours) }}</span></template> <template v-if="time.hours">{{ time.hours }} <span>{{ n__('Time|hr', 'Time|hrs', time.hours) }}</span></template>
<template v-if="time.mins && !time.days">{{ time.mins }} <span>{{ n__('Time|min', 'Time|mins', time.mins) }}</span></template> <template v-if="time.mins && !time.days">{{ time.mins }} <span>{{ n__('Time|min', 'Time|mins', time.mins) }}</span></template>
<template v-if="time.seconds && hasDa === 1 || time.seconds === 0">{{ time.seconds }} <span>{{ s__('Time|s') }}</span></template> <template v-if="time.seconds && hasData === 1 || time.seconds === 0">{{ time.seconds }} <span>{{ s__('Time|s') }}</span></template>
</template> </template>
<template v-else> <template v-else>
-- --
......
...@@ -4,6 +4,7 @@ import sprintf from './sprintf'; ...@@ -4,6 +4,7 @@ import sprintf from './sprintf';
const langAttribute = document.querySelector('html').getAttribute('lang'); const langAttribute = document.querySelector('html').getAttribute('lang');
const lang = (langAttribute || 'en').replace(/-/g, '_'); const lang = (langAttribute || 'en').replace(/-/g, '_');
const locale = new Jed(window.translations || {}); const locale = new Jed(window.translations || {});
delete window.translations;
/** /**
Translates `text` Translates `text`
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
import GraphLegend from './graph/legend.vue'; import GraphLegend from './graph/legend.vue';
import GraphFlag from './graph/flag.vue'; import GraphFlag from './graph/flag.vue';
import GraphDeployment from './graph/deployment.vue'; import GraphDeployment from './graph/deployment.vue';
import GraphPath from './graph_path.vue'; import GraphPath from './graph/path.vue';
import MonitoringMixin from '../mixins/monitoring_mixins'; import MonitoringMixin from '../mixins/monitoring_mixins';
import eventHub from '../event_hub'; import eventHub from '../event_hub';
import measurements from '../utils/measurements'; import measurements from '../utils/measurements';
...@@ -69,7 +69,7 @@ ...@@ -69,7 +69,7 @@
}, },
computed: { computed: {
outterViewBox() { outerViewBox() {
return `0 0 ${this.baseGraphWidth} ${this.baseGraphHeight}`; return `0 0 ${this.baseGraphWidth} ${this.baseGraphHeight}`;
}, },
...@@ -137,17 +137,19 @@ ...@@ -137,17 +137,19 @@
}, },
renderAxesPaths() { renderAxesPaths() {
this.timeSeries = createTimeSeries(this.graphData.queries[0], this.timeSeries = createTimeSeries(
this.graphWidth, this.graphData.queries[0],
this.graphHeight, this.graphWidth,
this.graphHeightOffset); this.graphHeight,
this.graphHeightOffset,
);
if (this.timeSeries.length > 3) { if (this.timeSeries.length > 3) {
this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20; this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20;
} }
const axisXScale = d3.time.scale() const axisXScale = d3.time.scale()
.range([0, this.graphWidth]); .range([0, this.graphWidth - 70]);
const axisYScale = d3.scale.linear() const axisYScale = d3.scale.linear()
.range([this.graphHeight - this.graphHeightOffset, 0]); .range([this.graphHeight - this.graphHeightOffset, 0]);
...@@ -214,7 +216,7 @@ ...@@ -214,7 +216,7 @@
class="prometheus-svg-container" class="prometheus-svg-container"
:style="paddingBottomRootSvg"> :style="paddingBottomRootSvg">
<svg <svg
:viewBox="outterViewBox" :viewBox="outerViewBox"
ref="baseSvg"> ref="baseSvg">
<g <g
class="x-axis" class="x-axis"
......
...@@ -72,6 +72,13 @@ ...@@ -72,6 +72,13 @@
:title="pipeline.yaml_errors"> :title="pipeline.yaml_errors">
yaml invalid yaml invalid
</span> </span>
<span
v-if="pipeline.flags.failure_reason"
v-tooltip
class="js-pipeline-url-failure label label-danger"
:title="pipeline.failure_reason">
error
</span>
<a <a
v-if="pipeline.flags.auto_devops" v-if="pipeline.flags.auto_devops"
tabindex="0" tabindex="0"
......
...@@ -291,8 +291,14 @@ ...@@ -291,8 +291,14 @@
fill: $black; fill: $black;
} }
.tick > text { .tick {
font-size: 12px; > line {
stroke: $gray-darker;
}
> text {
font-size: 12px;
}
} }
.text-metric-title { .text-metric-title {
......
...@@ -108,6 +108,15 @@ ...@@ -108,6 +108,15 @@
} }
} }
.subkeys-list {
@include basic-list;
li {
padding: 3px 0;
border: none;
}
}
.key-list-item { .key-list-item {
.key-list-item-info { .key-list-item-info {
@media (min-width: $screen-sm-min) { @media (min-width: $screen-sm-min) {
......
...@@ -2,7 +2,7 @@ class Profiles::GpgKeysController < Profiles::ApplicationController ...@@ -2,7 +2,7 @@ class Profiles::GpgKeysController < Profiles::ApplicationController
before_action :set_gpg_key, only: [:destroy, :revoke] before_action :set_gpg_key, only: [:destroy, :revoke]
def index def index
@gpg_keys = current_user.gpg_keys @gpg_keys = current_user.gpg_keys.with_subkeys
@gpg_key = GpgKey.new @gpg_key = GpgKey.new
end end
......
...@@ -5,6 +5,7 @@ module Ci ...@@ -5,6 +5,7 @@ module Ci
include Importable include Importable
include AfterCommitQueue include AfterCommitQueue
include Presentable include Presentable
include Gitlab::OptimisticLocking
belongs_to :project belongs_to :project
belongs_to :user belongs_to :user
...@@ -58,6 +59,11 @@ module Ci ...@@ -58,6 +59,11 @@ module Ci
auto_devops_source: 2 auto_devops_source: 2
} }
enum failure_reason: {
unknown_failure: 0,
config_error: 1
}
state_machine :status, initial: :created do state_machine :status, initial: :created do
event :enqueue do event :enqueue do
transition created: :pending transition created: :pending
...@@ -109,6 +115,12 @@ module Ci ...@@ -109,6 +115,12 @@ module Ci
pipeline.auto_canceled_by = nil pipeline.auto_canceled_by = nil
end end
before_transition any => :failed do |pipeline, transition|
transition.args.first.try do |reason|
pipeline.failure_reason = reason
end
end
after_transition [:created, :pending] => :running do |pipeline| after_transition [:created, :pending] => :running do |pipeline|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) } pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
end end
...@@ -263,7 +275,7 @@ module Ci ...@@ -263,7 +275,7 @@ module Ci
end end
def cancel_running def cancel_running
Gitlab::OptimisticLocking.retry_lock(cancelable_statuses) do |cancelable| retry_optimistic_lock(cancelable_statuses) do |cancelable|
cancelable.find_each do |job| cancelable.find_each do |job|
yield(job) if block_given? yield(job) if block_given?
job.cancel job.cancel
...@@ -312,6 +324,10 @@ module Ci ...@@ -312,6 +324,10 @@ module Ci
@stage_seeds ||= config_processor.stage_seeds(self) @stage_seeds ||= config_processor.stage_seeds(self)
end end
def seeds_size
@seeds_size ||= stage_seeds.sum(&:size)
end
def has_kubernetes_active? def has_kubernetes_active?
project.kubernetes_service&.active? project.kubernetes_service&.active?
end end
...@@ -403,7 +419,7 @@ module Ci ...@@ -403,7 +419,7 @@ module Ci
end end
def update_status def update_status
Gitlab::OptimisticLocking.retry_lock(self) do retry_optimistic_lock(self) do
case latest_builds_status case latest_builds_status
when 'pending' then enqueue when 'pending' then enqueue
when 'running' then run when 'running' then run
......
...@@ -81,6 +81,7 @@ module HasStatus ...@@ -81,6 +81,7 @@ module HasStatus
scope :canceled, -> { where(status: 'canceled') } scope :canceled, -> { where(status: 'canceled') }
scope :skipped, -> { where(status: 'skipped') } scope :skipped, -> { where(status: 'skipped') }
scope :manual, -> { where(status: 'manual') } scope :manual, -> { where(status: 'manual') }
scope :alive, -> { where(status: [:created, :pending, :running]) }
scope :created_or_pending, -> { where(status: [:created, :pending]) } scope :created_or_pending, -> { where(status: [:created, :pending]) }
scope :running_or_pending, -> { where(status: [:running, :pending]) } scope :running_or_pending, -> { where(status: [:running, :pending]) }
scope :finished, -> { where(status: [:success, :failed, :canceled]) } scope :finished, -> { where(status: [:success, :failed, :canceled]) }
......
...@@ -9,6 +9,9 @@ class GpgKey < ActiveRecord::Base ...@@ -9,6 +9,9 @@ class GpgKey < ActiveRecord::Base
belongs_to :user belongs_to :user
has_many :gpg_signatures has_many :gpg_signatures
has_many :subkeys, class_name: 'GpgKeySubkey'
scope :with_subkeys, -> { includes(:subkeys) }
validates :user, presence: true validates :user, presence: true
...@@ -36,10 +39,12 @@ class GpgKey < ActiveRecord::Base ...@@ -36,10 +39,12 @@ class GpgKey < ActiveRecord::Base
before_validation :extract_fingerprint, :extract_primary_keyid before_validation :extract_fingerprint, :extract_primary_keyid
after_commit :update_invalid_gpg_signatures, on: :create after_commit :update_invalid_gpg_signatures, on: :create
after_create :generate_subkeys
def primary_keyid def primary_keyid
super&.upcase super&.upcase
end end
alias_method :keyid, :primary_keyid
def fingerprint def fingerprint
super&.upcase super&.upcase
...@@ -49,6 +54,10 @@ class GpgKey < ActiveRecord::Base ...@@ -49,6 +54,10 @@ class GpgKey < ActiveRecord::Base
super(value&.strip) super(value&.strip)
end end
def keyids
[keyid].concat(subkeys.map(&:keyid))
end
def user_infos def user_infos
@user_infos ||= Gitlab::Gpg.user_infos_from_key(key) @user_infos ||= Gitlab::Gpg.user_infos_from_key(key)
end end
...@@ -82,10 +91,11 @@ class GpgKey < ActiveRecord::Base ...@@ -82,10 +91,11 @@ class GpgKey < ActiveRecord::Base
def revoke def revoke
GpgSignature GpgSignature
.where(gpg_key: self) .with_key_and_subkeys(self)
.where.not(verification_status: GpgSignature.verification_statuses[:unknown_key]) .where.not(verification_status: GpgSignature.verification_statuses[:unknown_key])
.update_all( .update_all(
gpg_key_id: nil, gpg_key_id: nil,
gpg_key_subkey_id: nil,
verification_status: GpgSignature.verification_statuses[:unknown_key], verification_status: GpgSignature.verification_statuses[:unknown_key],
updated_at: Time.zone.now updated_at: Time.zone.now
) )
...@@ -106,4 +116,12 @@ class GpgKey < ActiveRecord::Base ...@@ -106,4 +116,12 @@ class GpgKey < ActiveRecord::Base
# only allows one key # only allows one key
self.primary_keyid = Gitlab::Gpg.primary_keyids_from_key(key).first self.primary_keyid = Gitlab::Gpg.primary_keyids_from_key(key).first
end end
def generate_subkeys
gpg_subkeys = Gitlab::Gpg.subkeys_from_key(key)
gpg_subkeys[primary_keyid]&.each do |subkey_data|
subkeys.create!(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
end
end
end end
class GpgKeySubkey < ActiveRecord::Base
include ShaAttribute
sha_attribute :keyid
sha_attribute :fingerprint
belongs_to :gpg_key
validates :gpg_key_id, presence: true
validates :fingerprint, :keyid, presence: true, uniqueness: true
delegate :key, :user, :user_infos, :verified?, :verified_user_infos,
:verified_and_belongs_to_email?, to: :gpg_key
def keyid
super&.upcase
end
def fingerprint
super&.upcase
end
end
...@@ -15,11 +15,42 @@ class GpgSignature < ActiveRecord::Base ...@@ -15,11 +15,42 @@ class GpgSignature < ActiveRecord::Base
belongs_to :project belongs_to :project
belongs_to :gpg_key belongs_to :gpg_key
belongs_to :gpg_key_subkey
validates :commit_sha, presence: true validates :commit_sha, presence: true
validates :project_id, presence: true validates :project_id, presence: true
validates :gpg_key_primary_keyid, presence: true validates :gpg_key_primary_keyid, presence: true
def self.with_key_and_subkeys(gpg_key)
subkey_ids = gpg_key.subkeys.pluck(:id)
where(
arel_table[:gpg_key_id].eq(gpg_key.id).or(
arel_table[:gpg_key_subkey_id].in(subkey_ids)
)
)
end
def gpg_key=(model)
case model
when GpgKey
super
when GpgKeySubkey
self.gpg_key_subkey = model
when NilClass
super
self.gpg_key_subkey = nil
end
end
def gpg_key
if gpg_key_id
super
elsif gpg_key_subkey_id
gpg_key_subkey
end
end
def gpg_key_primary_keyid def gpg_key_primary_keyid
super&.upcase super&.upcase
end end
......
module Ci module Ci
class PipelinePresenter < Gitlab::View::Presenter::Delegated class PipelinePresenter < Gitlab::View::Presenter::Delegated
FAILURE_REASONS = {
config_error: 'CI/CD YAML configuration error!'
}.freeze
presents :pipeline presents :pipeline
def failure_reason
return unless pipeline.failure_reason?
FAILURE_REASONS[pipeline.failure_reason.to_sym] ||
pipeline.failure_reason
end
def status_title def status_title
if auto_canceled? if auto_canceled?
"Pipeline is redundant and is auto-canceled by Pipeline ##{auto_canceled_by_id}" "Pipeline is redundant and is auto-canceled by Pipeline ##{auto_canceled_by_id}"
......
...@@ -20,6 +20,7 @@ class PipelineEntity < Grape::Entity ...@@ -20,6 +20,7 @@ class PipelineEntity < Grape::Entity
expose :has_yaml_errors?, as: :yaml_errors expose :has_yaml_errors?, as: :yaml_errors
expose :can_retry?, as: :retryable expose :can_retry?, as: :retryable
expose :can_cancel?, as: :cancelable expose :can_cancel?, as: :cancelable
expose :failure_reason?, as: :failure_reason
end end
expose :details do expose :details do
...@@ -44,6 +45,11 @@ class PipelineEntity < Grape::Entity ...@@ -44,6 +45,11 @@ class PipelineEntity < Grape::Entity
end end
expose :commit, using: CommitEntity expose :commit, using: CommitEntity
expose :yaml_errors, if: -> (pipeline, _) { pipeline.has_yaml_errors? }
expose :failure_reason, if: -> (pipeline, _) { pipeline.failure_reason? } do |pipeline|
pipeline.present.failure_reason
end
expose :retry_path, if: -> (*) { can_retry? } do |pipeline| expose :retry_path, if: -> (*) { can_retry? } do |pipeline|
retry_project_pipeline_path(pipeline.project, pipeline) retry_project_pipeline_path(pipeline.project, pipeline)
...@@ -53,8 +59,6 @@ class PipelineEntity < Grape::Entity ...@@ -53,8 +59,6 @@ class PipelineEntity < Grape::Entity
cancel_project_pipeline_path(pipeline.project, pipeline) cancel_project_pipeline_path(pipeline.project, pipeline)
end end
expose :yaml_errors, if: -> (pipeline, _) { pipeline.has_yaml_errors? }
private private
alias_method :pipeline, :object alias_method :pipeline, :object
......
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
- if content_for?(:library_javascripts) - if content_for?(:library_javascripts)
= yield :library_javascripts = yield :library_javascripts
= javascript_include_tag asset_path("locale/#{I18n.locale.to_s || I18n.default_locale.to_s}/app.js") = javascript_include_tag asset_path("locale/#{I18n.locale.to_s || I18n.default_locale.to_s}/app.js") unless I18n.locale == :en
= webpack_bundle_tag "webpack_runtime" = webpack_bundle_tag "webpack_runtime"
= webpack_bundle_tag "common" = webpack_bundle_tag "common"
= webpack_bundle_tag "main" = webpack_bundle_tag "main"
......
...@@ -7,6 +7,13 @@ ...@@ -7,6 +7,13 @@
.description .description
%code= key.fingerprint %code= key.fingerprint
- if key.subkeys.present?
.subkeys
%span.bold Subkeys:
%ul.subkeys-list
- key.subkeys.each do |subkey|
%li
%code= subkey.fingerprint
.pull-right .pull-right
%span.key-created-at %span.key-created-at
created #{time_ago_with_tooltip(key.created_at)} created #{time_ago_with_tooltip(key.created_at)}
......
- @content_class = "limit-container-width limited-inner-width-container" unless fluid_layout - @content_class = "limit-container-width limited-inner-width-container" unless fluid_layout
- add_to_breadcrumbs "Snippets", dashboard_snippets_path - add_to_breadcrumbs "Snippets", project_snippets_path(@project)
- breadcrumb_title @snippet.to_reference - breadcrumb_title @snippet.to_reference
- page_title "#{@snippet.title} (#{@snippet.to_reference})", "Snippets" - page_title "#{@snippet.title} (#{@snippet.to_reference})", "Snippets"
......
---
title: Add support for GPG subkeys in signature verification
merge_request: 14517
author:
type: added
---
title: Fix incorrect X-axis labels in Prometheus graphs
merge_request: 14258
author:
type: fixed
---
title: Fix project snippets breadcrumb link
merge_request:
author:
type: fixed
---
title: Upgrade doorkeeper-openid_connect
merge_request: 14372
author: Markus Koller
type: other
---
title: Fix typo in cycle analytics breaking time component
merge_request:
author:
type: fixed
---
title: Prevent branches or tags from starting with invalid characters (e.g. -, .)
merge_request:
author:
type: fixed
Doorkeeper::OpenidConnect.configure do Doorkeeper::OpenidConnect.configure do
issuer Gitlab.config.gitlab.url issuer Gitlab.config.gitlab.url
jws_private_key Rails.application.secrets.jws_private_key signing_key Rails.application.secrets.openid_connect_signing_key
resource_owner_from_access_token do |access_token| resource_owner_from_access_token do |access_token|
User.active.find_by(id: access_token.resource_owner_id) User.active.find_by(id: access_token.resource_owner_id)
......
...@@ -25,7 +25,7 @@ def create_tokens ...@@ -25,7 +25,7 @@ def create_tokens
secret_key_base: file_secret_key || generate_new_secure_token, secret_key_base: file_secret_key || generate_new_secure_token,
otp_key_base: env_secret_key || file_secret_key || generate_new_secure_token, otp_key_base: env_secret_key || file_secret_key || generate_new_secure_token,
db_key_base: generate_new_secure_token, db_key_base: generate_new_secure_token,
jws_private_key: generate_new_rsa_private_key openid_connect_signing_key: generate_new_rsa_private_key
} }
missing_secrets = set_missing_keys(defaults) missing_secrets = set_missing_keys(defaults)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'gitlab/current_settings' require 'gitlab/current_settings'
if Rails.env.production? def configure_sentry
# allow it to fail: it may do so when create_from_defaults is executed before migrations are actually done # allow it to fail: it may do so when create_from_defaults is executed before migrations are actually done
begin begin
sentry_enabled = Gitlab::CurrentSettings.current_application_settings.sentry_enabled sentry_enabled = Gitlab::CurrentSettings.current_application_settings.sentry_enabled
...@@ -23,3 +23,5 @@ if Rails.env.production? ...@@ -23,3 +23,5 @@ if Rails.env.production?
end end
end end
end end
configure_sentry if Rails.env.production?
class CreateGpgKeySubkeys < ActiveRecord::Migration
DOWNTIME = false
def up
create_table :gpg_key_subkeys do |t|
t.references :gpg_key, null: false, index: true, foreign_key: { on_delete: :cascade }
t.binary :keyid
t.binary :fingerprint
t.index :keyid, unique: true, length: Gitlab::Database.mysql? ? 20 : nil
t.index :fingerprint, unique: true, length: Gitlab::Database.mysql? ? 20 : nil
end
add_reference :gpg_signatures, :gpg_key_subkey, index: true, foreign_key: { on_delete: :nullify }
end
def down
remove_reference(:gpg_signatures, :gpg_key_subkey, index: true, foreign_key: true)
drop_table :gpg_key_subkeys
end
end
class AddFailureReasonToPipelines < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :ci_pipelines, :failure_reason, :integer
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration
disable_ddl_transaction!
DOWNTIME = false
MIGRATION = 'CreateGpgKeySubkeysFromGpgKeys'
class GpgKey < ActiveRecord::Base
self.table_name = 'gpg_keys'
include EachBatch
end
def up
GpgKey.select(:id).each_batch do |gpg_keys|
jobs = gpg_keys.pluck(:id).map do |id|
[MIGRATION, [id]]
end
BackgroundMigrationWorker.perform_bulk(jobs)
end
end
def down
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171004121444) do ActiveRecord::Schema.define(version: 20171005130944) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -342,6 +342,7 @@ ActiveRecord::Schema.define(version: 20171004121444) do ...@@ -342,6 +342,7 @@ ActiveRecord::Schema.define(version: 20171004121444) do
t.integer "source" t.integer "source"
t.integer "config_source" t.integer "config_source"
t.boolean "protected" t.boolean "protected"
t.integer "failure_reason"
end end
add_index "ci_pipelines", ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree add_index "ci_pipelines", ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree
...@@ -608,6 +609,16 @@ ActiveRecord::Schema.define(version: 20171004121444) do ...@@ -608,6 +609,16 @@ ActiveRecord::Schema.define(version: 20171004121444) do
add_index "gcp_clusters", ["project_id"], name: "index_gcp_clusters_on_project_id", unique: true, using: :btree add_index "gcp_clusters", ["project_id"], name: "index_gcp_clusters_on_project_id", unique: true, using: :btree
create_table "gpg_key_subkeys", force: :cascade do |t|
t.integer "gpg_key_id", null: false
t.binary "keyid"
t.binary "fingerprint"
end
add_index "gpg_key_subkeys", ["fingerprint"], name: "index_gpg_key_subkeys_on_fingerprint", unique: true, using: :btree
add_index "gpg_key_subkeys", ["gpg_key_id"], name: "index_gpg_key_subkeys_on_gpg_key_id", using: :btree
add_index "gpg_key_subkeys", ["keyid"], name: "index_gpg_key_subkeys_on_keyid", unique: true, using: :btree
create_table "gpg_keys", force: :cascade do |t| create_table "gpg_keys", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false t.datetime_with_timezone "updated_at", null: false
...@@ -631,11 +642,13 @@ ActiveRecord::Schema.define(version: 20171004121444) do ...@@ -631,11 +642,13 @@ ActiveRecord::Schema.define(version: 20171004121444) do
t.text "gpg_key_user_name" t.text "gpg_key_user_name"
t.text "gpg_key_user_email" t.text "gpg_key_user_email"
t.integer "verification_status", limit: 2, default: 0, null: false t.integer "verification_status", limit: 2, default: 0, null: false
t.integer "gpg_key_subkey_id"
end end
add_index "gpg_signatures", ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree add_index "gpg_signatures", ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree
add_index "gpg_signatures", ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree add_index "gpg_signatures", ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree
add_index "gpg_signatures", ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree add_index "gpg_signatures", ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree
add_index "gpg_signatures", ["gpg_key_subkey_id"], name: "index_gpg_signatures_on_gpg_key_subkey_id", using: :btree
add_index "gpg_signatures", ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree add_index "gpg_signatures", ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree
create_table "identities", force: :cascade do |t| create_table "identities", force: :cascade do |t|
...@@ -1758,7 +1771,9 @@ ActiveRecord::Schema.define(version: 20171004121444) do ...@@ -1758,7 +1771,9 @@ ActiveRecord::Schema.define(version: 20171004121444) do
add_foreign_key "gcp_clusters", "projects", on_delete: :cascade add_foreign_key "gcp_clusters", "projects", on_delete: :cascade
add_foreign_key "gcp_clusters", "services", on_delete: :nullify add_foreign_key "gcp_clusters", "services", on_delete: :nullify
add_foreign_key "gcp_clusters", "users", on_delete: :nullify add_foreign_key "gcp_clusters", "users", on_delete: :nullify
add_foreign_key "gpg_key_subkeys", "gpg_keys", on_delete: :cascade
add_foreign_key "gpg_keys", "users", on_delete: :cascade add_foreign_key "gpg_keys", "users", on_delete: :cascade
add_foreign_key "gpg_signatures", "gpg_key_subkeys", on_delete: :nullify
add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify
add_foreign_key "gpg_signatures", "projects", on_delete: :cascade add_foreign_key "gpg_signatures", "projects", on_delete: :cascade
add_foreign_key "issue_assignees", "issues", name: "fk_b7d881734a", on_delete: :cascade add_foreign_key "issue_assignees", "issues", name: "fk_b7d881734a", on_delete: :cascade
......
...@@ -133,9 +133,9 @@ Remove the old Ruby 1.8 if present: ...@@ -133,9 +133,9 @@ Remove the old Ruby 1.8 if present:
Download Ruby and compile it: Download Ruby and compile it:
mkdir /tmp/ruby && cd /tmp/ruby mkdir /tmp/ruby && cd /tmp/ruby
curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.3.tar.gz curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.5.tar.gz
echo '1014ee699071aa2ddd501907d18cbe15399c997d ruby-2.3.3.tar.gz' | shasum -c - && tar xzf ruby-2.3.3.tar.gz echo '3247e217d6745c27ef23bdc77b6abdb4b57a118f ruby-2.3.5.tar.gz' | shasum -c - && tar xzf ruby-2.3.5.tar.gz
cd ruby-2.3.3 cd ruby-2.3.5
./configure --disable-install-rdoc ./configure --disable-install-rdoc
make make
sudo make install sudo make install
......
# From 10.0 to 10.1
Make sure you view this update guide from the tag (version) of GitLab you would
like to install. In most cases this should be the highest numbered production
tag (without rc in it). You can select the tag in the version dropdown at the
top left corner of GitLab (below the menu bar).
If the highest number stable branch is unclear please check the
[GitLab Blog](https://about.gitlab.com/blog/archives.html) for installation
guide links by version.
### 1. Stop server
```bash
sudo service gitlab stop
```
### 2. Backup
```bash
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:backup:create RAILS_ENV=production
```
### 3. Update Ruby
NOTE: GitLab 9.0 and higher only support Ruby 2.3.x and dropped support for Ruby 2.1.x. Be
sure to upgrade your interpreter if necessary.
You can check which version you are running with `ruby -v`.
Download and compile Ruby:
```bash
mkdir /tmp/ruby && cd /tmp/ruby
curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.5.tar.gz
echo '3247e217d6745c27ef23bdc77b6abdb4b57a118f ruby-2.3.5.tar.gz' | shasum -c - && tar xzf ruby-2.3.5.tar.gz
cd ruby-2.3.5
./configure --disable-install-rdoc
make
sudo make install
```
Install Bundler:
```bash
sudo gem install bundler --no-ri --no-rdoc
```
### 4. Update Node
GitLab now runs [webpack](http://webpack.js.org) to compile frontend assets and
it has a minimum requirement of node v4.3.0.
You can check which version you are running with `node -v`. If you are running
a version older than `v4.3.0` you will need to update to a newer version. You
can find instructions to install from community maintained packages or compile
from source at the nodejs.org website.
<https://nodejs.org/en/download/>
Since 8.17, GitLab requires the use of yarn `>= v0.17.0` to manage
JavaScript dependencies.
```bash
curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
sudo apt-get update
sudo apt-get install yarn
```
More information can be found on the [yarn website](https://yarnpkg.com/en/docs/install).
### 5. Update Go
NOTE: GitLab 9.2 and higher only supports Go 1.8.3 and dropped support for Go
1.5.x through 1.7.x. Be sure to upgrade your installation if necessary.
You can check which version you are running with `go version`.
Download and install Go:
```bash
# Remove former Go installation folder
sudo rm -rf /usr/local/go
curl --remote-name --progress https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz
echo '1862f4c3d3907e59b04a757cfda0ea7aa9ef39274af99a784f5be843c80c6772 go1.8.3.linux-amd64.tar.gz' | shasum -a256 -c - && \
sudo tar -C /usr/local -xzf go1.8.3.linux-amd64.tar.gz
sudo ln -sf /usr/local/go/bin/{go,godoc,gofmt} /usr/local/bin/
rm go1.8.3.linux-amd64.tar.gz
```
### 6. Get latest code
```bash
cd /home/git/gitlab
sudo -u git -H git fetch --all
sudo -u git -H git checkout -- db/schema.rb # local changes will be restored automatically
sudo -u git -H git checkout -- locale
```
For GitLab Community Edition:
```bash
cd /home/git/gitlab
sudo -u git -H git checkout 10-1-stable
```
OR
For GitLab Enterprise Edition:
```bash
cd /home/git/gitlab
sudo -u git -H git checkout 10-1-stable-ee
```
### 7. Update gitlab-shell
```bash
cd /home/git/gitlab-shell
sudo -u git -H git fetch --all --tags
sudo -u git -H git checkout v$(</home/git/gitlab/GITLAB_SHELL_VERSION)
sudo -u git -H bin/compile
```
### 8. Update gitlab-workhorse
Install and compile gitlab-workhorse. GitLab-Workhorse uses
[GNU Make](https://www.gnu.org/software/make/).
If you are not using Linux you may have to run `gmake` instead of
`make` below.
```bash
cd /home/git/gitlab-workhorse
sudo -u git -H git fetch --all --tags
sudo -u git -H git checkout v$(</home/git/gitlab/GITLAB_WORKHORSE_VERSION)
sudo -u git -H make
```
### 9. Update Gitaly
#### New Gitaly configuration options required
In order to function Gitaly needs some additional configuration information. Below we assume you installed Gitaly in `/home/git/gitaly` and GitLab Shell in `/home/git/gitlab-shell'.
```shell
echo '
[gitaly-ruby]
dir = "/home/git/gitaly/ruby"
[gitlab-shell]
dir = "/home/git/gitlab-shell"
' | sudo -u git tee -a /home/git/gitaly/config.toml
```
#### Check Gitaly configuration
Due to a bug in the `rake gitlab:gitaly:install` script your Gitaly
configuration file may contain syntax errors. The block name
`[[storages]]`, which may occur more than once in your `config.toml`
file, should be `[[storage]]` instead.
```shell
sudo -u git -H sed -i.pre-10.1 's/\[\[storages\]\]/[[storage]]/' /home/git/gitaly/config.toml
```
#### Compile Gitaly
```shell
cd /home/git/gitaly
sudo -u git -H git fetch --all --tags
sudo -u git -H git checkout v$(</home/git/gitlab/GITALY_SERVER_VERSION)
sudo -u git -H make
```
### 10. Update MySQL permissions
If you are using MySQL you need to grant the GitLab user the necessary
permissions on the database:
```bash
mysql -u root -p -e "GRANT TRIGGER ON \`gitlabhq_production\`.* TO 'git'@'localhost';"
```
If you use MySQL with replication, or just have MySQL configured with binary logging,
you will need to also run the following on all of your MySQL servers:
```bash
mysql -u root -p -e "SET GLOBAL log_bin_trust_function_creators = 1;"
```
You can make this setting permanent by adding it to your `my.cnf`:
```
log_bin_trust_function_creators=1
```
### 11. Update configuration files
#### New configuration options for `gitlab.yml`
There might be configuration options available for [`gitlab.yml`][yaml]. View them with the command below and apply them manually to your current `gitlab.yml`:
```sh
cd /home/git/gitlab
git diff origin/10-0-stable:config/gitlab.yml.example origin/10-1-stable:config/gitlab.yml.example
```
#### Nginx configuration
Ensure you're still up-to-date with the latest NGINX configuration changes:
```sh
cd /home/git/gitlab
# For HTTPS configurations
git diff origin/10-0-stable:lib/support/nginx/gitlab-ssl origin/10-1-stable:lib/support/nginx/gitlab-ssl
# For HTTP configurations
git diff origin/10-0-stable:lib/support/nginx/gitlab origin/10-1-stable:lib/support/nginx/gitlab
```
If you are using Strict-Transport-Security in your installation to continue using it you must enable it in your Nginx
configuration as GitLab application no longer handles setting it.
If you are using Apache instead of NGINX please see the updated [Apache templates].
Also note that because Apache does not support upstreams behind Unix sockets you
will need to let gitlab-workhorse listen on a TCP port. You can do this
via [/etc/default/gitlab].
[Apache templates]: https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/web-server/apache
[/etc/default/gitlab]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/lib/support/init.d/gitlab.default.example#L38
#### SMTP configuration
If you're installing from source and use SMTP to deliver mail, you will need to add the following line
to config/initializers/smtp_settings.rb:
```ruby
ActionMailer::Base.delivery_method = :smtp
```
See [smtp_settings.rb.sample] as an example.
[smtp_settings.rb.sample]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/config/initializers/smtp_settings.rb.sample#L13
#### Init script
There might be new configuration options available for [`gitlab.default.example`][gl-example]. View them with the command below and apply them manually to your current `/etc/default/gitlab`:
```sh
cd /home/git/gitlab
git diff origin/10-0-stable:lib/support/init.d/gitlab.default.example origin/10-1-stable:lib/support/init.d/gitlab.default.example
```
Ensure you're still up-to-date with the latest init script changes:
```bash
cd /home/git/gitlab
sudo cp lib/support/init.d/gitlab /etc/init.d/gitlab
```
For Ubuntu 16.04.1 LTS:
```bash
sudo systemctl daemon-reload
```
### 12. Install libs, migrations, etc.
```bash
cd /home/git/gitlab
# MySQL installations (note: the line below states '--without postgres')
sudo -u git -H bundle install --without postgres development test --deployment
# PostgreSQL installations (note: the line below states '--without mysql')
sudo -u git -H bundle install --without mysql development test --deployment
# Optional: clean up old gems
sudo -u git -H bundle clean
# Run database migrations
sudo -u git -H bundle exec rake db:migrate RAILS_ENV=production
# Compile GetText PO files
sudo -u git -H bundle exec rake gettext:compile RAILS_ENV=production
# Update node dependencies and recompile assets
sudo -u git -H bundle exec rake yarn:install gitlab:assets:clean gitlab:assets:compile RAILS_ENV=production NODE_ENV=production
# Clean up cache
sudo -u git -H bundle exec rake cache:clear RAILS_ENV=production
```
**MySQL installations**: Run through the `MySQL strings limits` and `Tables and data conversion to utf8mb4` [tasks](../install/database_mysql.md).
### 13. Start application
```bash
sudo service gitlab start
sudo service nginx restart
```
### 14. Check application status
Check if GitLab and its environment are configured correctly:
```bash
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:env:info RAILS_ENV=production
```
To make sure you didn't miss anything run a more thorough check:
```bash
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:check RAILS_ENV=production
```
If all items are green, then congratulations, the upgrade is complete!
## Things went south? Revert to previous version (9.5)
### 1. Revert the code to the previous version
Follow the [upgrade guide from 9.4 to 9.5](9.4-to-9.5.md), except for the
database migration (the backup is already migrated to the previous version).
### 2. Restore from the backup
```bash
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:backup:restore RAILS_ENV=production
```
If you have more than one backup `*.tar` file(s) please add `BACKUP=timestamp_of_backup` to the command above.
[yaml]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/config/gitlab.yml.example
[gl-example]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/lib/support/init.d/gitlab.default.example
*** NOTE: These instructions should be considered deprecated. In GitLab 10.0 we will be releasing new migration instructions using [pgloader](http://pgloader.io/). ---
last_updated: 2017-10-05
---
# Migrating GitLab from MySQL to Postgres # Migrating from MySQL to PostgreSQL
*Make sure you view this [guide from the `master` branch](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/update/mysql_to_postgresql.md#migrating-gitlab-from-mysql-to-postgres) for the most up to date instructions.*
If you are replacing MySQL with Postgres while keeping GitLab on the same server all you need to do is to export from MySQL, convert the resulting SQL file, and import it into Postgres. If you are also moving GitLab to another server, or if you are switching to omnibus-gitlab, you may want to use a GitLab backup file. The second part of this documents explains the procedure to do this. > **Note:** This guide assumes you have a working Omnibus GitLab instance with
> MySQL and want to migrate to bundled PostgreSQL database.
## Export from MySQL and import into Postgres ## Prerequisites
Use this if you are keeping GitLab on the same server. First, we'll need to enable the bundled PostgreSQL database with up-to-date
schema. Next, we'll use [pgloader](http://pgloader.io) to migrate the data
from the old MySQL database to the new PostgreSQL one.
``` Here's what you'll need to have installed:
sudo service gitlab stop
# Update /home/git/gitlab/config/database.yml - pgloader 3.4.1+
- Omnibus GitLab
- MySQL
git clone https://github.com/gitlabhq/mysql-postgresql-converter.git -b gitlab ## Enable bundled PostgreSQL database
cd mysql-postgresql-converter
mysqldump --compatible=postgresql --default-character-set=utf8 -r gitlabhq_production.mysql -u root gitlabhq_production -p
python db_converter.py gitlabhq_production.mysql gitlabhq_production.psql
ed -s gitlabhq_production.psql < move_drop_indexes.ed
# Import the database dump as the application database user 1. Stop GitLab:
sudo -u git psql -f gitlabhq_production.psql -d gitlabhq_production
# Install gems for PostgreSQL (note: the line below states '--without ... mysql') ``` bash
sudo -u git -H bundle install --without development test mysql --deployment sudo gitlab-ctl stop
```
sudo service gitlab start 1. Edit `/etc/gitlab/gitlab.rb` to enable bundled PostgreSQL:
```
## Converting a GitLab backup file from MySQL to Postgres ```
**Note:** Please make sure to have Python 2.7.x (or higher) installed. postgresql['enable'] = true
```
GitLab backup files (`<timestamp>_gitlab_backup.tar`) contain a SQL dump. Using the lanyrd database converter we can replace a MySQL database dump inside the tar file with a Postgres database dump. This can be useful if you are moving to another server. 1. Edit `/etc/gitlab/gitlab.rb` to use the bundled PostgreSQL. Please check
all the settings beginning with `db_`, such as `gitlab_rails['db_adapter']`
and alike. You could just comment all of them out so that we'll just use
the defaults.
``` 1. [Reconfigure GitLab] for the changes to take effect:
# Stop GitLab
sudo service gitlab stop ``` bash
sudo gitlab-ctl reconfigure
```
1. Start Unicorn and PostgreSQL so that we could prepare the schema:
``` bash
sudo gitlab-ctl start unicorn
sudo gitlab-ctl start posgresql
```
1. Run the following commands to prepare the schema:
``` bash
sudo gitlab-rake db:create db:migrate
```
1. Stop Unicorn in case it's interfering the next step:
# Create the backup ``` bash
cd /home/git/gitlab sudo gitlab-ctl stop unicorn
sudo -u git -H bundle exec rake gitlab:backup:create RAILS_ENV=production ```
# Note the filename of the backup that was created. We will call it After these steps, you'll have a fresh PostgreSQL database with up-to-date schema.
# TIMESTAMP_gitlab_backup.tar below.
# Move the backup file we will convert to its own directory ## Migrate data from MySQL to PostgreSQL
sudo -u git -H mkdir -p tmp/backups/postgresql
sudo -u git -H mv tmp/backups/TIMESTAMP_gitlab_backup.tar tmp/backups/postgresql/
# Create a separate database dump with PostgreSQL compatibility Now, you can use pgloader to migrate the data from MySQL to PostgreSQL:
cd tmp/backups/postgresql
sudo -u git -H mysqldump --compatible=postgresql --default-character-set=utf8 -r gitlabhq_production.mysql -u root gitlabhq_production -p
# Clone the database converter 1. Save the following snippet in a `commands.load` file, and edit with your
sudo -u git -H git clone https://github.com/gitlabhq/mysql-postgresql-converter.git -b gitlab database `username`, `password` and `host`:
# Convert gitlabhq_production.mysql ```
sudo -u git -H mkdir db LOAD DATABASE
sudo -u git -H python mysql-postgresql-converter/db_converter.py gitlabhq_production.mysql db/database.sql FROM mysql://username:password@host/gitlabhq_production
sudo -u git -H ed -s db/database.sql < mysql-postgresql-converter/move_drop_indexes.ed INTO postgresql://gitlab-psql@unix://var/opt/gitlab/postgresql:/gitlabhq_production
# Compress database backup WITH include no drop, truncate, disable triggers, create no tables,
# Warning: If you have Gitlab 7.12.0 or older skip this step and import the database.sql directly into the backup with: create no indexes, preserve index names, no foreign keys,
# sudo -u git -H tar rf TIMESTAMP_gitlab_backup.tar db/database.sql data only
# The compressed databasedump is not supported at 7.12.0 and older.
sudo -u git -H gzip db/database.sql
# Replace the MySQL dump in TIMESTAMP_gitlab_backup.tar. ALTER SCHEMA 'gitlabhq_production' RENAME TO 'public'
# Warning: if you forget to replace TIMESTAMP below, tar will create a new file ;
# 'TIMESTAMP_gitlab_backup.tar' without giving an error. ```
sudo -u git -H tar rf TIMESTAMP_gitlab_backup.tar db/database.sql.gz 1. Start the migration:
# Done! TIMESTAMP_gitlab_backup.tar can now be restored into a Postgres GitLab ``` bash
# installation. sudo -u gitlab-psql pgloader commands.load
# See https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/raketasks/backup_restore.md for more information about backups. ```
1. Once the migration finishes, start GitLab:
``` bash
sudo gitlab-ctl start
```
Now, you can verify that everything worked by visiting GitLab.
## Troubleshooting
### Experiencing 500 errors after the migration
If you experience 500 errors after the migration, try to clear the cache:
``` bash
sudo gitlab-rake cache:clear
``` ```
[reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
...@@ -287,7 +287,7 @@ module API ...@@ -287,7 +287,7 @@ module API
if sentry_enabled? && report_exception?(exception) if sentry_enabled? && report_exception?(exception)
define_params_for_grape_middleware define_params_for_grape_middleware
sentry_context sentry_context
Raven.capture_exception(exception) Raven.capture_exception(exception, extra: params)
end end
# lifted from https://github.com/rails/rails/blob/master/actionpack/lib/action_dispatch/middleware/debug_exceptions.rb#L60 # lifted from https://github.com/rails/rails/blob/master/actionpack/lib/action_dispatch/middleware/debug_exceptions.rb#L60
......
class Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys
class GpgKey < ActiveRecord::Base
self.table_name = 'gpg_keys'
include EachBatch
include ShaAttribute
sha_attribute :primary_keyid
sha_attribute :fingerprint
has_many :subkeys, class_name: 'GpgKeySubkey'
end
class GpgKeySubkey < ActiveRecord::Base
self.table_name = 'gpg_key_subkeys'
include ShaAttribute
sha_attribute :keyid
sha_attribute :fingerprint
end
def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id)
return if gpg_key.nil?
return if gpg_key.subkeys.any?
create_subkeys(gpg_key)
update_signatures(gpg_key)
end
private
def create_subkeys(gpg_key)
gpg_subkeys = Gitlab::Gpg.subkeys_from_key(gpg_key.key)
gpg_subkeys[gpg_key.primary_keyid.upcase]&.each do |subkey_data|
gpg_key.subkeys.build(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
end
# Improve latency by doing all INSERTs in a single call
GpgKey.transaction do
gpg_key.save!
end
end
def update_signatures(gpg_key)
return unless gpg_key.subkeys.exists?
InvalidGpgSignatureUpdateWorker.perform_async(gpg_key.id)
end
end
...@@ -13,7 +13,7 @@ module Gitlab ...@@ -13,7 +13,7 @@ module Gitlab
end end
if @command.save_incompleted && @pipeline.has_yaml_errors? if @command.save_incompleted && @pipeline.has_yaml_errors?
@pipeline.drop @pipeline.drop!(:config_error)
end end
return error(@pipeline.yaml_errors) return error(@pipeline.yaml_errors)
......
...@@ -3,7 +3,9 @@ module Gitlab ...@@ -3,7 +3,9 @@ module Gitlab
module Stage module Stage
class Seed class Seed
attr_reader :pipeline attr_reader :pipeline
delegate :project, to: :pipeline delegate :project, to: :pipeline
delegate :size, to: :@jobs
def initialize(pipeline, stage, jobs) def initialize(pipeline, stage, jobs)
@pipeline = pipeline @pipeline = pipeline
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
module Gitlab module Gitlab
# Checks if a set of migrations requires downtime or not. # Checks if a set of migrations requires downtime or not.
class EeCompatCheck class EeCompatCheck
CE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ce.git'.freeze DEFAULT_CE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ce.git'.freeze
EE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze EE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check') CHECK_DIR = Rails.root.join('ee_compat_check')
IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze
...@@ -20,7 +20,7 @@ module Gitlab ...@@ -20,7 +20,7 @@ module Gitlab
attr_reader :ee_repo_dir, :patches_dir, :ce_repo, :ce_branch, :ee_branch_found attr_reader :ee_repo_dir, :patches_dir, :ce_repo, :ce_branch, :ee_branch_found
attr_reader :failed_files attr_reader :failed_files
def initialize(branch:, ce_repo: CE_REPO) def initialize(branch:, ce_repo: DEFAULT_CE_REPO)
@ee_repo_dir = CHECK_DIR.join('ee-repo') @ee_repo_dir = CHECK_DIR.join('ee-repo')
@patches_dir = CHECK_DIR.join('patches') @patches_dir = CHECK_DIR.join('patches')
@ce_branch = branch @ce_branch = branch
...@@ -132,7 +132,7 @@ module Gitlab ...@@ -132,7 +132,7 @@ module Gitlab
def check_patch(patch_path) def check_patch(patch_path)
step("Checking out master", %w[git checkout master]) step("Checking out master", %w[git checkout master])
step("Resetting to latest master", %w[git reset --hard origin/master]) step("Resetting to latest master", %w[git reset --hard origin/master])
step("Fetching CE/#{ce_branch}", %W[git fetch #{CE_REPO} #{ce_branch}]) step("Fetching CE/#{ce_branch}", %W[git fetch #{ce_repo} #{ce_branch}])
step( step(
"Checking if #{patch_path} applies cleanly to EE/master", "Checking if #{patch_path} applies cleanly to EE/master",
# Don't use --check here because it can result in a 0-exit status even # Don't use --check here because it can result in a 0-exit status even
......
...@@ -11,7 +11,7 @@ module Gitlab ...@@ -11,7 +11,7 @@ module Gitlab
return false if ref_name.start_with?('refs/remotes/') return false if ref_name.start_with?('refs/remotes/')
Gitlab::Utils.system_silent( Gitlab::Utils.system_silent(
%W(#{Gitlab.config.git.bin_path} check-ref-format refs/#{ref_name})) %W(#{Gitlab.config.git.bin_path} check-ref-format --branch #{ref_name}))
end end
end end
end end
...@@ -34,6 +34,21 @@ module Gitlab ...@@ -34,6 +34,21 @@ module Gitlab
end end
end end
def subkeys_from_key(key)
using_tmp_keychain do
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
raw_keys = GPGME::Key.find(:public, fingerprints)
raw_keys.each_with_object({}) do |raw_key, grouped_subkeys|
primary_subkey_id = raw_key.primary_subkey.keyid
grouped_subkeys[primary_subkey_id] = raw_key.subkeys[1..-1].map do |s|
{ keyid: s.keyid, fingerprint: s.fingerprint }
end
end
end
end
def user_infos_from_key(key) def user_infos_from_key(key)
using_tmp_keychain do using_tmp_keychain do
fingerprints = CurrentKeyChain.fingerprints_from_key(key) fingerprints = CurrentKeyChain.fingerprints_from_key(key)
......
...@@ -43,7 +43,9 @@ module Gitlab ...@@ -43,7 +43,9 @@ module Gitlab
# key belonging to the keyid. # key belonging to the keyid.
# This way we can add the key to the temporary keychain and extract # This way we can add the key to the temporary keychain and extract
# the proper signature. # the proper signature.
gpg_key = GpgKey.find_by(primary_keyid: verified_signature.fingerprint) # NOTE: the invoked method is #fingerprint but it's only returning
# 16 characters (the format used by keyid) instead of 40.
gpg_key = find_gpg_key(verified_signature.fingerprint)
if gpg_key if gpg_key
Gitlab::Gpg::CurrentKeyChain.add(gpg_key.key) Gitlab::Gpg::CurrentKeyChain.add(gpg_key.key)
...@@ -74,7 +76,7 @@ module Gitlab ...@@ -74,7 +76,7 @@ module Gitlab
commit_sha: @commit.sha, commit_sha: @commit.sha,
project: @commit.project, project: @commit.project,
gpg_key: gpg_key, gpg_key: gpg_key,
gpg_key_primary_keyid: gpg_key&.primary_keyid || verified_signature.fingerprint, gpg_key_primary_keyid: gpg_key&.keyid || verified_signature.fingerprint,
gpg_key_user_name: user_infos[:name], gpg_key_user_name: user_infos[:name],
gpg_key_user_email: user_infos[:email], gpg_key_user_email: user_infos[:email],
verification_status: verification_status verification_status: verification_status
...@@ -98,6 +100,10 @@ module Gitlab ...@@ -98,6 +100,10 @@ module Gitlab
def user_infos(gpg_key) def user_infos(gpg_key)
gpg_key&.verified_user_infos&.first || gpg_key&.user_infos&.first || {} gpg_key&.verified_user_infos&.first || gpg_key&.user_infos&.first || {}
end end
def find_gpg_key(keyid)
GpgKey.find_by(primary_keyid: keyid) || GpgKeySubkey.find_by(keyid: keyid)
end
end end
end end
end end
...@@ -9,7 +9,7 @@ module Gitlab ...@@ -9,7 +9,7 @@ module Gitlab
GpgSignature GpgSignature
.select(:id, :commit_sha, :project_id) .select(:id, :commit_sha, :project_id)
.where('gpg_key_id IS NULL OR verification_status <> ?', GpgSignature.verification_statuses[:verified]) .where('gpg_key_id IS NULL OR verification_status <> ?', GpgSignature.verification_statuses[:verified])
.where(gpg_key_primary_keyid: @gpg_key.primary_keyid) .where(gpg_key_primary_keyid: @gpg_key.keyids)
.find_each { |sig| sig.gpg_commit.update_signature!(sig) } .find_each { |sig| sig.gpg_commit.update_signature!(sig) }
end end
end end
......
require 'json'
module RspecFlaky
class Config
def self.generate_report?
ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
end
def self.suite_flaky_examples_report_path
ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/suite-report.json")
end
def self.flaky_examples_report_path
ENV['FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/report.json")
end
def self.new_flaky_examples_report_path
ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/new-report.json")
end
end
end
...@@ -9,24 +9,21 @@ module RspecFlaky ...@@ -9,24 +9,21 @@ module RspecFlaky
line: example.line, line: example.line,
description: example.description, description: example.description,
last_attempts_count: example.attempts, last_attempts_count: example.attempts,
flaky_reports: 1) flaky_reports: 0)
else else
super super
end end
end end
def first_flaky_at def update_flakiness!(last_attempts_count: nil)
self[:first_flaky_at] || Time.now self.first_flaky_at ||= Time.now
end self.last_flaky_at = Time.now
self.flaky_reports += 1
def last_flaky_at self.last_attempts_count = last_attempts_count if last_attempts_count
Time.now
end
def last_flaky_job if ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
return unless ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID'] self.last_flaky_job = "#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
end
"#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
end end
def to_h def to_h
......
require 'json'
module RspecFlaky
class FlakyExamplesCollection < SimpleDelegator
def self.from_json(json)
new(JSON.parse(json))
end
def initialize(collection = {})
unless collection.is_a?(Hash)
raise ArgumentError, "`collection` must be a Hash, #{collection.class} given!"
end
collection_of_flaky_examples =
collection.map do |uid, example|
[
uid,
example.is_a?(RspecFlaky::FlakyExample) ? example : RspecFlaky::FlakyExample.new(example)
]
end
super(Hash[collection_of_flaky_examples])
end
def to_report
Hash[map { |uid, example| [uid, example.to_h] }].deep_symbolize_keys
end
def -(other)
unless other.respond_to?(:key)
raise ArgumentError, "`other` must respond to `#key?`, #{other.class} does not!"
end
self.class.new(reject { |uid, _| other.key?(uid) })
end
end
end
...@@ -2,11 +2,15 @@ require 'json' ...@@ -2,11 +2,15 @@ require 'json'
module RspecFlaky module RspecFlaky
class Listener class Listener
attr_reader :all_flaky_examples, :new_flaky_examples # - suite_flaky_examples: contains all the currently tracked flacky example
# for the whole RSpec suite
def initialize # - flaky_examples: contains the examples detected as flaky during the
@new_flaky_examples = {} # current RSpec run
@all_flaky_examples = init_all_flaky_examples attr_reader :suite_flaky_examples, :flaky_examples
def initialize(suite_flaky_examples_json = nil)
@flaky_examples = FlakyExamplesCollection.new
@suite_flaky_examples = init_suite_flaky_examples(suite_flaky_examples_json)
end end
def example_passed(notification) def example_passed(notification)
...@@ -14,29 +18,21 @@ module RspecFlaky ...@@ -14,29 +18,21 @@ module RspecFlaky
return unless current_example.attempts > 1 return unless current_example.attempts > 1
flaky_example_hash = all_flaky_examples[current_example.uid] flaky_example = suite_flaky_examples.fetch(current_example.uid) { FlakyExample.new(current_example) }
flaky_example.update_flakiness!(last_attempts_count: current_example.attempts)
all_flaky_examples[current_example.uid] =
if flaky_example_hash flaky_examples[current_example.uid] = flaky_example
FlakyExample.new(flaky_example_hash).tap do |ex|
ex.last_attempts_count = current_example.attempts
ex.flaky_reports += 1
end
else
FlakyExample.new(current_example).tap do |ex|
new_flaky_examples[current_example.uid] = ex
end
end
end end
def dump_summary(_) def dump_summary(_)
write_report_file(all_flaky_examples, all_flaky_examples_report_path) write_report_file(flaky_examples, RspecFlaky::Config.flaky_examples_report_path)
new_flaky_examples = flaky_examples - suite_flaky_examples
if new_flaky_examples.any? if new_flaky_examples.any?
Rails.logger.warn "\nNew flaky examples detected:\n" Rails.logger.warn "\nNew flaky examples detected:\n"
Rails.logger.warn JSON.pretty_generate(to_report(new_flaky_examples)) Rails.logger.warn JSON.pretty_generate(new_flaky_examples.to_report)
write_report_file(new_flaky_examples, new_flaky_examples_report_path) write_report_file(new_flaky_examples, RspecFlaky::Config.new_flaky_examples_report_path)
end end
end end
...@@ -46,30 +42,23 @@ module RspecFlaky ...@@ -46,30 +42,23 @@ module RspecFlaky
private private
def init_all_flaky_examples def init_suite_flaky_examples(suite_flaky_examples_json = nil)
return {} unless File.exist?(all_flaky_examples_report_path) unless suite_flaky_examples_json
return {} unless File.exist?(RspecFlaky::Config.suite_flaky_examples_report_path)
all_flaky_examples = JSON.parse(File.read(all_flaky_examples_report_path)) suite_flaky_examples_json = File.read(RspecFlaky::Config.suite_flaky_examples_report_path)
end
Hash[(all_flaky_examples || {}).map { |k, ex| [k, FlakyExample.new(ex)] }] FlakyExamplesCollection.from_json(suite_flaky_examples_json)
end end
def write_report_file(examples, file_path) def write_report_file(examples_collection, file_path)
return unless ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true' return unless RspecFlaky::Config.generate_report?
report_path_dir = File.dirname(file_path) report_path_dir = File.dirname(file_path)
FileUtils.mkdir_p(report_path_dir) unless Dir.exist?(report_path_dir) FileUtils.mkdir_p(report_path_dir) unless Dir.exist?(report_path_dir)
File.write(file_path, JSON.pretty_generate(to_report(examples)))
end
def all_flaky_examples_report_path
@all_flaky_examples_report_path ||= ENV['ALL_FLAKY_RSPEC_REPORT_PATH'] ||
Rails.root.join("rspec_flaky/all-report.json")
end
def new_flaky_examples_report_path File.write(file_path, JSON.pretty_generate(examples_collection.to_report))
@new_flaky_examples_report_path ||= ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] ||
Rails.root.join("rspec_flaky/new-report.json")
end end
end end
end end
...@@ -4,7 +4,10 @@ namespace :gitlab do ...@@ -4,7 +4,10 @@ namespace :gitlab do
task :ee_compat_check, [:branch] => :environment do |_, args| task :ee_compat_check, [:branch] => :environment do |_, args|
opts = opts =
if ENV['CI'] if ENV['CI']
{ branch: ENV['CI_COMMIT_REF_NAME'] } {
ce_repo: ENV['CI_REPOSITORY_URL'],
branch: ENV['CI_COMMIT_REF_NAME']
}
else else
unless args[:branch] unless args[:branch]
puts "Must specify a branch as an argument".color(:red) puts "Must specify a branch as an argument".color(:red)
......
...@@ -47,6 +47,7 @@ FactoryGirl.define do ...@@ -47,6 +47,7 @@ FactoryGirl.define do
trait :invalid do trait :invalid do
config(rspec: nil) config(rspec: nil)
failure_reason :config_error
end end
trait :blocked do trait :blocked do
......
require_relative '../support/gpg_helpers'
FactoryGirl.define do
factory :gpg_key_subkey do
gpg_key
sequence(:keyid) { |n| "keyid-#{n}" }
sequence(:fingerprint) { |n| "fingerprint-#{n}" }
end
end
...@@ -4,5 +4,9 @@ FactoryGirl.define do ...@@ -4,5 +4,9 @@ FactoryGirl.define do
factory :gpg_key do factory :gpg_key do
key GpgHelpers::User1.public_key key GpgHelpers::User1.public_key
user user
factory :gpg_key_with_subkeys do
key GpgHelpers::User1.public_key_with_extra_signing_key
end
end end
end end
...@@ -5,7 +5,7 @@ FactoryGirl.define do ...@@ -5,7 +5,7 @@ FactoryGirl.define do
commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) } commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
project project
gpg_key gpg_key
gpg_key_primary_keyid { gpg_key.primary_keyid } gpg_key_primary_keyid { gpg_key.keyid }
verification_status :verified verification_status :verified
end end
end end
...@@ -20,6 +20,18 @@ feature 'Profile > GPG Keys' do ...@@ -20,6 +20,18 @@ feature 'Profile > GPG Keys' do
expect(page).to have_content('bette.cartwright@example.net Unverified') expect(page).to have_content('bette.cartwright@example.net Unverified')
expect(page).to have_content(GpgHelpers::User2.fingerprint) expect(page).to have_content(GpgHelpers::User2.fingerprint)
end end
scenario 'with multiple subkeys' do
fill_in('Key', with: GpgHelpers::User3.public_key)
click_button('Add key')
expect(page).to have_content('john.doe@example.com Unverified')
expect(page).to have_content(GpgHelpers::User3.fingerprint)
GpgHelpers::User3.subkey_fingerprints.each do |fingerprint|
expect(page).to have_content(fingerprint)
end
end
end end
scenario 'User sees their key' do scenario 'User sees their key' do
......
...@@ -162,6 +162,16 @@ describe 'Pipelines', :js do ...@@ -162,6 +162,16 @@ describe 'Pipelines', :js do
expect(page).to have_selector( expect(page).to have_selector(
%Q{span[data-original-title="#{pipeline.yaml_errors}"]}) %Q{span[data-original-title="#{pipeline.yaml_errors}"]})
end end
it 'contains badge that indicates failure reason' do
expect(page).to have_content 'error'
end
it 'contains badge with tooltip which contains failure reason' do
expect(pipeline.failure_reason?).to eq true
expect(page).to have_selector(
%Q{span[data-original-title="#{pipeline.present.failure_reason}"]})
end
end end
context 'with manual actions' do context 'with manual actions' do
......
...@@ -36,10 +36,10 @@ describe 'create_tokens' do ...@@ -36,10 +36,10 @@ describe 'create_tokens' do
expect(keys).to all(match(HEX_KEY)) expect(keys).to all(match(HEX_KEY))
end end
it 'generates an RSA key for jws_private_key' do it 'generates an RSA key for openid_connect_signing_key' do
create_tokens create_tokens
keys = secrets.values_at(:jws_private_key) keys = secrets.values_at(:openid_connect_signing_key)
expect(keys.uniq).to eq(keys) expect(keys.uniq).to eq(keys)
expect(keys).to all(match(RSA_KEY)) expect(keys).to all(match(RSA_KEY))
...@@ -49,7 +49,7 @@ describe 'create_tokens' do ...@@ -49,7 +49,7 @@ describe 'create_tokens' do
expect(self).to receive(:warn_missing_secret).with('secret_key_base') expect(self).to receive(:warn_missing_secret).with('secret_key_base')
expect(self).to receive(:warn_missing_secret).with('otp_key_base') expect(self).to receive(:warn_missing_secret).with('otp_key_base')
expect(self).to receive(:warn_missing_secret).with('db_key_base') expect(self).to receive(:warn_missing_secret).with('db_key_base')
expect(self).to receive(:warn_missing_secret).with('jws_private_key') expect(self).to receive(:warn_missing_secret).with('openid_connect_signing_key')
create_tokens create_tokens
end end
...@@ -61,7 +61,7 @@ describe 'create_tokens' do ...@@ -61,7 +61,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base) expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base)
expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base) expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base)
expect(new_secrets['db_key_base']).to eq(secrets.db_key_base) expect(new_secrets['db_key_base']).to eq(secrets.db_key_base)
expect(new_secrets['jws_private_key']).to eq(secrets.jws_private_key) expect(new_secrets['openid_connect_signing_key']).to eq(secrets.openid_connect_signing_key)
end end
create_tokens create_tokens
...@@ -77,7 +77,7 @@ describe 'create_tokens' do ...@@ -77,7 +77,7 @@ describe 'create_tokens' do
context 'when the other secrets all exist' do context 'when the other secrets all exist' do
before do before do
secrets.db_key_base = 'db_key_base' secrets.db_key_base = 'db_key_base'
secrets.jws_private_key = 'jws_private_key' secrets.openid_connect_signing_key = 'openid_connect_signing_key'
allow(File).to receive(:exist?).with('.secret').and_return(true) allow(File).to receive(:exist?).with('.secret').and_return(true)
allow(File).to receive(:read).with('.secret').and_return('file_key') allow(File).to receive(:read).with('.secret').and_return('file_key')
...@@ -88,7 +88,7 @@ describe 'create_tokens' do ...@@ -88,7 +88,7 @@ describe 'create_tokens' do
stub_env('SECRET_KEY_BASE', 'env_key') stub_env('SECRET_KEY_BASE', 'env_key')
secrets.secret_key_base = 'secret_key_base' secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base' secrets.otp_key_base = 'otp_key_base'
secrets.jws_private_key = 'jws_private_key' secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end end
it 'does not issue a warning' do it 'does not issue a warning' do
...@@ -114,7 +114,7 @@ describe 'create_tokens' do ...@@ -114,7 +114,7 @@ describe 'create_tokens' do
before do before do
secrets.secret_key_base = 'secret_key_base' secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base' secrets.otp_key_base = 'otp_key_base'
secrets.jws_private_key = 'jws_private_key' secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end end
it 'does not write any files' do it 'does not write any files' do
...@@ -129,7 +129,7 @@ describe 'create_tokens' do ...@@ -129,7 +129,7 @@ describe 'create_tokens' do
expect(secrets.secret_key_base).to eq('secret_key_base') expect(secrets.secret_key_base).to eq('secret_key_base')
expect(secrets.otp_key_base).to eq('otp_key_base') expect(secrets.otp_key_base).to eq('otp_key_base')
expect(secrets.db_key_base).to eq('db_key_base') expect(secrets.db_key_base).to eq('db_key_base')
expect(secrets.jws_private_key).to eq('jws_private_key') expect(secrets.openid_connect_signing_key).to eq('openid_connect_signing_key')
end end
it 'deletes the .secret file' do it 'deletes the .secret file' do
...@@ -153,7 +153,7 @@ describe 'create_tokens' do ...@@ -153,7 +153,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq('file_key') expect(new_secrets['secret_key_base']).to eq('file_key')
expect(new_secrets['otp_key_base']).to eq('file_key') expect(new_secrets['otp_key_base']).to eq('file_key')
expect(new_secrets['db_key_base']).to eq('db_key_base') expect(new_secrets['db_key_base']).to eq('db_key_base')
expect(new_secrets['jws_private_key']).to eq('jws_private_key') expect(new_secrets['openid_connect_signing_key']).to eq('openid_connect_signing_key')
end end
create_tokens create_tokens
......
import Vue from 'vue';
import component from '~/cycle_analytics/components/total_time_component.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Total time component', () => {
let vm;
let Component;
beforeEach(() => {
Component = Vue.extend(component);
});
afterEach(() => {
vm.$destroy();
});
describe('With data', () => {
it('should render information for days and hours', () => {
vm = mountComponent(Component, {
time: {
days: 3,
hours: 4,
},
});
expect(vm.$el.textContent.trim()).toEqual('3 days 4 hrs');
});
it('should render information for hours and minutes', () => {
vm = mountComponent(Component, {
time: {
hours: 4,
mins: 35,
},
});
expect(vm.$el.textContent.trim()).toEqual('4 hrs 35 mins');
});
it('should render information for seconds', () => {
vm = mountComponent(Component, {
time: {
seconds: 45,
},
});
expect(vm.$el.textContent.trim()).toEqual('45 s');
});
});
describe('Without data', () => {
it('should render no information', () => {
vm = mountComponent(Component);
expect(vm.$el.textContent.trim()).toEqual('--');
});
});
});
import Vue from 'vue'; import Vue from 'vue';
import GraphPath from '~/monitoring/components/graph_path.vue'; import GraphPath from '~/monitoring/components/graph/path.vue';
import createTimeSeries from '~/monitoring/utils/multiple_time_series'; import createTimeSeries from '~/monitoring/utils/multiple_time_series';
import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from './mock_data'; import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from './mock_data';
......
...@@ -44,7 +44,7 @@ describe('Graph', () => { ...@@ -44,7 +44,7 @@ describe('Graph', () => {
.not.toEqual(-1); .not.toEqual(-1);
}); });
it('outterViewBox gets a width and height property based on the DOM size of the element', () => { it('outerViewBox gets a width and height property based on the DOM size of the element', () => {
const component = createComponent({ const component = createComponent({
graphData: convertedMetrics[1], graphData: convertedMetrics[1],
classType: 'col-md-6', classType: 'col-md-6',
...@@ -52,8 +52,8 @@ describe('Graph', () => { ...@@ -52,8 +52,8 @@ describe('Graph', () => {
deploymentData, deploymentData,
}); });
const viewBoxArray = component.outterViewBox.split(' '); const viewBoxArray = component.outerViewBox.split(' ');
expect(typeof component.outterViewBox).toEqual('string'); expect(typeof component.outerViewBox).toEqual('string');
expect(viewBoxArray[2]).toEqual(component.graphWidth.toString()); expect(viewBoxArray[2]).toEqual(component.graphWidth.toString());
expect(viewBoxArray[3]).toEqual(component.graphHeight.toString()); expect(viewBoxArray[3]).toEqual(component.graphHeight.toString());
}); });
......
...@@ -125,4 +125,23 @@ describe('Pipeline Url Component', () => { ...@@ -125,4 +125,23 @@ describe('Pipeline Url Component', () => {
component.$el.querySelector('.js-pipeline-url-autodevops').textContent.trim(), component.$el.querySelector('.js-pipeline-url-autodevops').textContent.trim(),
).toEqual('Auto DevOps'); ).toEqual('Auto DevOps');
}); });
it('should render error badge when pipeline has a failure reason set', () => {
const component = new PipelineUrlComponent({
propsData: {
pipeline: {
id: 1,
path: 'foo',
flags: {
failure_reason: true,
},
failure_reason: 'some reason',
},
autoDevopsHelpPath: 'foo',
},
}).$mount();
expect(component.$el.querySelector('.js-pipeline-url-failure').textContent).toContain('error');
expect(component.$el.querySelector('.js-pipeline-url-failure').getAttribute('data-original-title')).toContain('some reason');
});
}); });
require 'spec_helper'
describe Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys, :migration, schema: 20171005130944 do
context 'when GpgKey exists' do
let!(:gpg_key) { create(:gpg_key, key: GpgHelpers::User3.public_key) }
before do
GpgKeySubkey.destroy_all
end
it 'generate the subkeys' do
expect do
described_class.new.perform(gpg_key.id)
end.to change { gpg_key.subkeys.count }.from(0).to(2)
end
it 'schedules the signature update worker' do
expect(InvalidGpgSignatureUpdateWorker).to receive(:perform_async).with(gpg_key.id)
described_class.new.perform(gpg_key.id)
end
end
context 'when GpgKey does not exist' do
it 'does not do anything' do
expect(Gitlab::Gpg).not_to receive(:subkeys_from_key)
expect(InvalidGpgSignatureUpdateWorker).not_to receive(:perform_async)
described_class.new.perform(123)
end
end
end
...@@ -55,6 +55,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do ...@@ -55,6 +55,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
it 'fails the pipeline' do it 'fails the pipeline' do
expect(pipeline.reload).to be_failed expect(pipeline.reload).to be_failed
end end
it 'sets a config error failure reason' do
expect(pipeline.reload.config_error?).to eq true
end
end end
context 'when saving incomplete pipeline is not allowed' do context 'when saving incomplete pipeline is not allowed' do
......
...@@ -11,6 +11,12 @@ describe Gitlab::Ci::Stage::Seed do ...@@ -11,6 +11,12 @@ describe Gitlab::Ci::Stage::Seed do
described_class.new(pipeline, 'test', builds) described_class.new(pipeline, 'test', builds)
end end
describe '#size' do
it 'returns a number of jobs in the stage' do
expect(subject.size).to eq 2
end
end
describe '#stage' do describe '#stage' do
it 'returns hash attributes of a stage' do it 'returns hash attributes of a stage' do
expect(subject.stage).to be_a Hash expect(subject.stage).to be_a Hash
......
...@@ -4,6 +4,7 @@ describe Gitlab::GitRefValidator do ...@@ -4,6 +4,7 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('feature/new')).to be_truthy } it { expect(described_class.validate('feature/new')).to be_truthy }
it { expect(described_class.validate('implement_@all')).to be_truthy } it { expect(described_class.validate('implement_@all')).to be_truthy }
it { expect(described_class.validate('my_new_feature')).to be_truthy } it { expect(described_class.validate('my_new_feature')).to be_truthy }
it { expect(described_class.validate('my-branch')).to be_truthy }
it { expect(described_class.validate('#1')).to be_truthy } it { expect(described_class.validate('#1')).to be_truthy }
it { expect(described_class.validate('feature/refs/heads/foo')).to be_truthy } it { expect(described_class.validate('feature/refs/heads/foo')).to be_truthy }
it { expect(described_class.validate('feature/~new/')).to be_falsey } it { expect(described_class.validate('feature/~new/')).to be_falsey }
...@@ -22,4 +23,8 @@ describe Gitlab::GitRefValidator do ...@@ -22,4 +23,8 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('refs/remotes/')).to be_falsey } it { expect(described_class.validate('refs/remotes/')).to be_falsey }
it { expect(described_class.validate('refs/heads/feature')).to be_falsey } it { expect(described_class.validate('refs/heads/feature')).to be_falsey }
it { expect(described_class.validate('refs/remotes/origin')).to be_falsey } it { expect(described_class.validate('refs/remotes/origin')).to be_falsey }
it { expect(described_class.validate('-')).to be_falsey }
it { expect(described_class.validate('-branch')).to be_falsey }
it { expect(described_class.validate('.tag')).to be_falsey }
it { expect(described_class.validate('my branch')).to be_falsey }
end end
...@@ -63,6 +63,45 @@ describe Gitlab::Gpg::Commit do ...@@ -63,6 +63,45 @@ describe Gitlab::Gpg::Commit do
it_behaves_like 'returns the cached signature on second call' it_behaves_like 'returns the cached signature on second call'
end end
context 'commit signed with a subkey' do
let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User3.emails.first }
let!(:user) { create(:user, email: GpgHelpers::User3.emails.first) }
let!(:gpg_key) do
create :gpg_key, key: GpgHelpers::User3.public_key, user: user
end
let(:gpg_key_subkey) do
gpg_key.subkeys.find_by(fingerprint: '0522DD29B98F167CD8421752E38FFCAF75ABD92A')
end
before do
allow(Rugged::Commit).to receive(:extract_signature)
.with(Rugged::Repository, commit_sha)
.and_return(
[
GpgHelpers::User3.signed_commit_signature,
GpgHelpers::User3.signed_commit_base_data
]
)
end
it 'returns a valid signature' do
expect(described_class.new(commit).signature).to have_attributes(
commit_sha: commit_sha,
project: project,
gpg_key: gpg_key_subkey,
gpg_key_primary_keyid: gpg_key_subkey.keyid,
gpg_key_user_name: GpgHelpers::User3.names.first,
gpg_key_user_email: GpgHelpers::User3.emails.first,
verification_status: 'verified'
)
end
it_behaves_like 'returns the cached signature on second call'
end
context 'user email does not match the committer email, but is the same user' do context 'user email does not match the committer email, but is the same user' do
let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User2.emails.first } let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User2.emails.first }
......
...@@ -2,17 +2,16 @@ require 'rails_helper' ...@@ -2,17 +2,16 @@ require 'rails_helper'
RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
describe '#run' do describe '#run' do
let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' } let(:signature) { [GpgHelpers::User1.signed_commit_signature, GpgHelpers::User1.signed_commit_base_data] }
let!(:project) { create :project, :repository, path: 'sample-project' } let(:committer_email) { GpgHelpers::User1.emails.first }
let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create :project, :repository, path: 'sample-project' }
let!(:raw_commit) do let!(:raw_commit) do
raw_commit = double( raw_commit = double(
:raw_commit, :raw_commit,
signature: [ signature: signature,
GpgHelpers::User1.signed_commit_signature,
GpgHelpers::User1.signed_commit_base_data
],
sha: commit_sha, sha: commit_sha,
committer_email: GpgHelpers::User1.emails.first committer_email: committer_email
) )
allow(raw_commit).to receive :save! allow(raw_commit).to receive :save!
...@@ -29,12 +28,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do ...@@ -29,12 +28,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
allow(Rugged::Commit).to receive(:extract_signature) allow(Rugged::Commit).to receive(:extract_signature)
.with(Rugged::Repository, commit_sha) .with(Rugged::Repository, commit_sha)
.and_return( .and_return(signature)
[
GpgHelpers::User1.signed_commit_signature,
GpgHelpers::User1.signed_commit_base_data
]
)
end end
context 'gpg signature did have an associated gpg key which was removed later' do context 'gpg signature did have an associated gpg key which was removed later' do
...@@ -183,5 +177,34 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do ...@@ -183,5 +177,34 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
) )
end end
end end
context 'gpg signature did not have an associated gpg subkey' do
let(:signature) { [GpgHelpers::User3.signed_commit_signature, GpgHelpers::User3.signed_commit_base_data] }
let(:committer_email) { GpgHelpers::User3.emails.first }
let!(:user) { create :user, email: GpgHelpers::User3.emails.first }
let!(:invalid_gpg_signature) do
create :gpg_signature,
project: project,
commit_sha: commit_sha,
gpg_key: nil,
gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..-1],
verification_status: 'unknown_key'
end
it 'updates the signature to being valid when the missing gpg key is added' do
# InvalidGpgSignatureUpdater is called by the after_create hook
gpg_key = create(:gpg_key, key: GpgHelpers::User3.public_key, user: user)
subkey = gpg_key.subkeys.last
expect(invalid_gpg_signature.reload).to have_attributes(
project: project,
commit_sha: commit_sha,
gpg_key_subkey_id: subkey.id,
gpg_key_primary_keyid: subkey.keyid,
verification_status: 'verified'
)
end
end
end end
end end
...@@ -28,6 +28,23 @@ describe Gitlab::Gpg do ...@@ -28,6 +28,23 @@ describe Gitlab::Gpg do
end end
end end
describe '.subkeys_from_key' do
it 'returns the subkeys by primary key' do
all_subkeys = described_class.subkeys_from_key(GpgHelpers::User1.public_key)
subkeys = all_subkeys[GpgHelpers::User1.primary_keyid]
expect(subkeys).to be_present
expect(subkeys.first[:keyid]).to be_present
expect(subkeys.first[:fingerprint]).to be_present
end
it 'returns an empty array when there are not subkeys' do
all_subkeys = described_class.subkeys_from_key(GpgHelpers::User4.public_key)
expect(all_subkeys[GpgHelpers::User4.primary_keyid]).to be_empty
end
end
describe '.user_infos_from_key' do describe '.user_infos_from_key' do
it 'returns the names and emails' do it 'returns the names and emails' do
user_infos = described_class.user_infos_from_key(GpgHelpers::User1.public_key) user_infos = described_class.user_infos_from_key(GpgHelpers::User1.public_key)
......
...@@ -224,6 +224,7 @@ Ci::Pipeline: ...@@ -224,6 +224,7 @@ Ci::Pipeline:
- auto_canceled_by_id - auto_canceled_by_id
- pipeline_schedule_id - pipeline_schedule_id
- config_source - config_source
- failure_reason
- protected - protected
Ci::Stage: Ci::Stage:
- id - id
......
require 'spec_helper'
describe RspecFlaky::Config, :aggregate_failures do
before do
# Stub these env variables otherwise specs don't behave the same on the CI
stub_env('FLAKY_RSPEC_GENERATE_REPORT', nil)
stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
stub_env('FLAKY_RSPEC_REPORT_PATH', nil)
stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', nil)
end
describe '.generate_report?' do
context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is not set" do
it 'returns false' do
expect(described_class).not_to be_generate_report
end
end
context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'false'" do
before do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
end
it 'returns false' do
expect(described_class).not_to be_generate_report
end
end
context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'true'" do
before do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
end
it 'returns true' do
expect(described_class).to be_generate_report
end
end
end
describe '.suite_flaky_examples_report_path' do
context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is not set" do
it 'returns the default path' do
expect(Rails.root).to receive(:join).with('rspec_flaky/suite-report.json')
.and_return('root/rspec_flaky/suite-report.json')
expect(described_class.suite_flaky_examples_report_path).to eq('root/rspec_flaky/suite-report.json')
end
end
context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is set" do
before do
stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', 'foo/suite-report.json')
end
it 'returns the value of the env variable' do
expect(described_class.suite_flaky_examples_report_path).to eq('foo/suite-report.json')
end
end
end
describe '.flaky_examples_report_path' do
context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is not set" do
it 'returns the default path' do
expect(Rails.root).to receive(:join).with('rspec_flaky/report.json')
.and_return('root/rspec_flaky/report.json')
expect(described_class.flaky_examples_report_path).to eq('root/rspec_flaky/report.json')
end
end
context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is set" do
before do
stub_env('FLAKY_RSPEC_REPORT_PATH', 'foo/report.json')
end
it 'returns the value of the env variable' do
expect(described_class.flaky_examples_report_path).to eq('foo/report.json')
end
end
end
describe '.new_flaky_examples_report_path' do
context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is not set" do
it 'returns the default path' do
expect(Rails.root).to receive(:join).with('rspec_flaky/new-report.json')
.and_return('root/rspec_flaky/new-report.json')
expect(described_class.new_flaky_examples_report_path).to eq('root/rspec_flaky/new-report.json')
end
end
context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is set" do
before do
stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', 'foo/new-report.json')
end
it 'returns the value of the env variable' do
expect(described_class.new_flaky_examples_report_path).to eq('foo/new-report.json')
end
end
end
end
require 'spec_helper' require 'spec_helper'
describe RspecFlaky::FlakyExample do describe RspecFlaky::FlakyExample, :aggregate_failures do
let(:flaky_example_attrs) do let(:flaky_example_attrs) do
{ {
example_id: 'spec/foo/bar_spec.rb:2', example_id: 'spec/foo/bar_spec.rb:2',
...@@ -9,6 +9,7 @@ describe RspecFlaky::FlakyExample do ...@@ -9,6 +9,7 @@ describe RspecFlaky::FlakyExample do
description: 'hello world', description: 'hello world',
first_flaky_at: 1234, first_flaky_at: 1234,
last_flaky_at: 2345, last_flaky_at: 2345,
last_flaky_job: 'https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/12',
last_attempts_count: 2, last_attempts_count: 2,
flaky_reports: 1 flaky_reports: 1
} }
...@@ -27,57 +28,78 @@ describe RspecFlaky::FlakyExample do ...@@ -27,57 +28,78 @@ describe RspecFlaky::FlakyExample do
end end
let(:example) { double(example_attrs) } let(:example) { double(example_attrs) }
before do
# Stub these env variables otherwise specs don't behave the same on the CI
stub_env('CI_PROJECT_URL', nil)
stub_env('CI_JOB_ID', nil)
end
describe '#initialize' do describe '#initialize' do
shared_examples 'a valid FlakyExample instance' do shared_examples 'a valid FlakyExample instance' do
it 'returns valid attributes' do let(:flaky_example) { described_class.new(args) }
flaky_example = described_class.new(args)
it 'returns valid attributes' do
expect(flaky_example.uid).to eq(flaky_example_attrs[:uid]) expect(flaky_example.uid).to eq(flaky_example_attrs[:uid])
expect(flaky_example.example_id).to eq(flaky_example_attrs[:example_id]) expect(flaky_example.file).to eq(flaky_example_attrs[:file])
expect(flaky_example.line).to eq(flaky_example_attrs[:line])
expect(flaky_example.description).to eq(flaky_example_attrs[:description])
expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
expect(flaky_example.last_flaky_at).to eq(expected_last_flaky_at)
expect(flaky_example.last_attempts_count).to eq(flaky_example_attrs[:last_attempts_count])
expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
end end
end end
context 'when given an Rspec::Example' do context 'when given an Rspec::Example' do
let(:args) { example } it_behaves_like 'a valid FlakyExample instance' do
let(:args) { example }
it_behaves_like 'a valid FlakyExample instance' let(:expected_first_flaky_at) { nil }
let(:expected_last_flaky_at) { nil }
let(:expected_flaky_reports) { 0 }
end
end end
context 'when given a hash' do context 'when given a hash' do
let(:args) { flaky_example_attrs } it_behaves_like 'a valid FlakyExample instance' do
let(:args) { flaky_example_attrs }
it_behaves_like 'a valid FlakyExample instance' let(:expected_flaky_reports) { flaky_example_attrs[:flaky_reports] }
let(:expected_first_flaky_at) { flaky_example_attrs[:first_flaky_at] }
let(:expected_last_flaky_at) { flaky_example_attrs[:last_flaky_at] }
end
end end
end end
describe '#to_h' do describe '#update_flakiness!' do
before do shared_examples 'an up-to-date FlakyExample instance' do
# Stub these env variables otherwise specs don't behave the same on the CI let(:flaky_example) { described_class.new(args) }
stub_env('CI_PROJECT_URL', nil)
stub_env('CI_JOB_ID', nil)
end
shared_examples 'a valid FlakyExample hash' do it 'updates the first_flaky_at' do
let(:additional_attrs) { {} } now = Time.now
expected_first_flaky_at = flaky_example.first_flaky_at ? flaky_example.first_flaky_at : now
Timecop.freeze(now) { flaky_example.update_flakiness! }
it 'returns a valid hash' do expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
flaky_example = described_class.new(args) end
final_hash = flaky_example_attrs
.merge(last_flaky_at: instance_of(Time), last_flaky_job: nil) it 'updates the last_flaky_at' do
.merge(additional_attrs) now = Time.now
Timecop.freeze(now) { flaky_example.update_flakiness! }
expect(flaky_example.to_h).to match(hash_including(final_hash)) expect(flaky_example.last_flaky_at).to eq(now)
end end
end
context 'when given an Rspec::Example' do it 'updates the flaky_reports' do
let(:args) { example } expected_flaky_reports = flaky_example.first_flaky_at ? flaky_example.flaky_reports + 1 : 1
expect { flaky_example.update_flakiness! }.to change { flaky_example.flaky_reports }.by(1)
expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
end
context 'when passed a :last_attempts_count' do
it 'updates the last_attempts_count' do
flaky_example.update_flakiness!(last_attempts_count: 42)
context 'when run locally' do expect(flaky_example.last_attempts_count).to eq(42)
it_behaves_like 'a valid FlakyExample hash' do
let(:additional_attrs) do
{ first_flaky_at: instance_of(Time) }
end
end end
end end
...@@ -87,10 +109,45 @@ describe RspecFlaky::FlakyExample do ...@@ -87,10 +109,45 @@ describe RspecFlaky::FlakyExample do
stub_env('CI_JOB_ID', 42) stub_env('CI_JOB_ID', 42)
end end
it_behaves_like 'a valid FlakyExample hash' do it 'updates the last_flaky_job' do
let(:additional_attrs) do flaky_example.update_flakiness!
{ first_flaky_at: instance_of(Time), last_flaky_job: "https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42" }
end expect(flaky_example.last_flaky_job).to eq('https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42')
end
end
end
context 'when given an Rspec::Example' do
it_behaves_like 'an up-to-date FlakyExample instance' do
let(:args) { example }
end
end
context 'when given a hash' do
it_behaves_like 'an up-to-date FlakyExample instance' do
let(:args) { flaky_example_attrs }
end
end
end
describe '#to_h' do
shared_examples 'a valid FlakyExample hash' do
let(:additional_attrs) { {} }
it 'returns a valid hash' do
flaky_example = described_class.new(args)
final_hash = flaky_example_attrs.merge(additional_attrs)
expect(flaky_example.to_h).to eq(final_hash)
end
end
context 'when given an Rspec::Example' do
let(:args) { example }
it_behaves_like 'a valid FlakyExample hash' do
let(:additional_attrs) do
{ first_flaky_at: nil, last_flaky_at: nil, last_flaky_job: nil, flaky_reports: 0 }
end end
end end
end end
......
require 'spec_helper'
describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do
let(:collection_hash) do
{
a: { example_id: 'spec/foo/bar_spec.rb:2' },
b: { example_id: 'spec/foo/baz_spec.rb:3' }
}
end
let(:collection_report) do
{
a: {
example_id: 'spec/foo/bar_spec.rb:2',
first_flaky_at: nil,
last_flaky_at: nil,
last_flaky_job: nil
},
b: {
example_id: 'spec/foo/baz_spec.rb:3',
first_flaky_at: nil,
last_flaky_at: nil,
last_flaky_job: nil
}
}
end
describe '.from_json' do
it 'accepts a JSON' do
collection = described_class.from_json(JSON.pretty_generate(collection_hash))
expect(collection.to_report).to eq(described_class.new(collection_hash).to_report)
end
end
describe '#initialize' do
it 'accepts no argument' do
expect { described_class.new }.not_to raise_error
end
it 'accepts a hash' do
expect { described_class.new(collection_hash) }.not_to raise_error
end
it 'does not accept anything else' do
expect { described_class.new([1, 2, 3]) }.to raise_error(ArgumentError, "`collection` must be a Hash, Array given!")
end
end
describe '#to_report' do
it 'calls #to_h on the values' do
collection = described_class.new(collection_hash)
expect(collection.to_report).to eq(collection_report)
end
end
describe '#-' do
it 'returns only examples that are not present in the given collection' do
collection1 = described_class.new(collection_hash)
collection2 = described_class.new(
a: { example_id: 'spec/foo/bar_spec.rb:2' },
c: { example_id: 'spec/bar/baz_spec.rb:4' })
expect((collection2 - collection1).to_report).to eq(
c: {
example_id: 'spec/bar/baz_spec.rb:4',
first_flaky_at: nil,
last_flaky_at: nil,
last_flaky_job: nil
})
end
it 'fails if the given collection does not respond to `#key?`' do
collection = described_class.new(collection_hash)
expect { collection - [1, 2, 3] }.to raise_error(ArgumentError, "`other` must respond to `#key?`, Array does not!")
end
end
end
require 'spec_helper' require 'spec_helper'
describe RspecFlaky::Listener do describe RspecFlaky::Listener, :aggregate_failures do
let(:flaky_example_report) do let(:already_flaky_example_uid) { '6e869794f4cfd2badd93eb68719371d1' }
let(:suite_flaky_example_report) do
{ {
'abc123' => { already_flaky_example_uid => {
example_id: 'spec/foo/bar_spec.rb:2', example_id: 'spec/foo/bar_spec.rb:2',
file: 'spec/foo/bar_spec.rb', file: 'spec/foo/bar_spec.rb',
line: 2, line: 2,
description: 'hello world', description: 'hello world',
first_flaky_at: 1234, first_flaky_at: 1234,
last_flaky_at: instance_of(Time), last_flaky_at: 4321,
last_attempts_count: 2, last_attempts_count: 3,
flaky_reports: 1, flaky_reports: 1,
last_flaky_job: nil last_flaky_job: nil
} }
} }
end end
let(:example_attrs) do let(:already_flaky_example_attrs) do
{
id: 'spec/foo/bar_spec.rb:2',
metadata: {
file_path: 'spec/foo/bar_spec.rb',
line_number: 2,
full_description: 'hello world'
},
execution_result: double(status: 'passed', exception: nil)
}
end
let(:already_flaky_example) { RspecFlaky::FlakyExample.new(suite_flaky_example_report[already_flaky_example_uid]) }
let(:new_example_attrs) do
{ {
id: 'spec/foo/baz_spec.rb:3', id: 'spec/foo/baz_spec.rb:3',
metadata: { metadata: {
...@@ -36,14 +49,14 @@ describe RspecFlaky::Listener do ...@@ -36,14 +49,14 @@ describe RspecFlaky::Listener do
describe '#initialize' do describe '#initialize' do
shared_examples 'a valid Listener instance' do shared_examples 'a valid Listener instance' do
let(:expected_all_flaky_examples) { {} } let(:expected_suite_flaky_examples) { {} }
it 'returns a valid Listener instance' do it 'returns a valid Listener instance' do
listener = described_class.new listener = described_class.new
expect(listener.to_report(listener.all_flaky_examples)) expect(listener.to_report(listener.suite_flaky_examples))
.to match(hash_including(expected_all_flaky_examples)) .to eq(expected_suite_flaky_examples)
expect(listener.new_flaky_examples).to eq({}) expect(listener.flaky_examples).to eq({})
end end
end end
...@@ -51,16 +64,16 @@ describe RspecFlaky::Listener do ...@@ -51,16 +64,16 @@ describe RspecFlaky::Listener do
it_behaves_like 'a valid Listener instance' it_behaves_like 'a valid Listener instance'
end end
context 'when a report file exists and set by ALL_FLAKY_RSPEC_REPORT_PATH' do context 'when a report file exists and set by SUITE_FLAKY_RSPEC_REPORT_PATH' do
let(:report_file) do let(:report_file) do
Tempfile.new(%w[rspec_flaky_report .json]).tap do |f| Tempfile.new(%w[rspec_flaky_report .json]).tap do |f|
f.write(JSON.pretty_generate(flaky_example_report)) f.write(JSON.pretty_generate(suite_flaky_example_report))
f.rewind f.rewind
end end
end end
before do before do
stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file.path) stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', report_file.path)
end end
after do after do
...@@ -69,74 +82,122 @@ describe RspecFlaky::Listener do ...@@ -69,74 +82,122 @@ describe RspecFlaky::Listener do
end end
it_behaves_like 'a valid Listener instance' do it_behaves_like 'a valid Listener instance' do
let(:expected_all_flaky_examples) { flaky_example_report } let(:expected_suite_flaky_examples) { suite_flaky_example_report }
end end
end end
end end
describe '#example_passed' do describe '#example_passed' do
let(:rspec_example) { double(example_attrs) } let(:rspec_example) { double(new_example_attrs) }
let(:notification) { double(example: rspec_example) } let(:notification) { double(example: rspec_example) }
let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
shared_examples 'a non-flaky example' do shared_examples 'a non-flaky example' do
it 'does not change the flaky examples hash' do it 'does not change the flaky examples hash' do
expect { subject.example_passed(notification) } expect { listener.example_passed(notification) }
.not_to change { subject.all_flaky_examples } .not_to change { listener.flaky_examples }
end end
end end
describe 'when the RSpec example does not respond to attempts' do shared_examples 'an existing flaky example' do
it_behaves_like 'a non-flaky example' let(:expected_flaky_example) do
end {
example_id: 'spec/foo/bar_spec.rb:2',
file: 'spec/foo/bar_spec.rb',
line: 2,
description: 'hello world',
first_flaky_at: 1234,
last_attempts_count: 2,
flaky_reports: 2,
last_flaky_job: nil
}
end
describe 'when the RSpec example has 1 attempt' do it 'changes the flaky examples hash' do
let(:rspec_example) { double(example_attrs.merge(attempts: 1)) } new_example = RspecFlaky::Example.new(rspec_example)
it_behaves_like 'a non-flaky example' now = Time.now
Timecop.freeze(now) do
expect { listener.example_passed(notification) }
.to change { listener.flaky_examples[new_example.uid].to_h }
end
expect(listener.flaky_examples[new_example.uid].to_h)
.to eq(expected_flaky_example.merge(last_flaky_at: now))
end
end end
describe 'when the RSpec example has 2 attempts' do shared_examples 'a new flaky example' do
let(:rspec_example) { double(example_attrs.merge(attempts: 2)) } let(:expected_flaky_example) do
let(:expected_new_flaky_example) do
{ {
example_id: 'spec/foo/baz_spec.rb:3', example_id: 'spec/foo/baz_spec.rb:3',
file: 'spec/foo/baz_spec.rb', file: 'spec/foo/baz_spec.rb',
line: 3, line: 3,
description: 'hello GitLab', description: 'hello GitLab',
first_flaky_at: instance_of(Time),
last_flaky_at: instance_of(Time),
last_attempts_count: 2, last_attempts_count: 2,
flaky_reports: 1, flaky_reports: 1,
last_flaky_job: nil last_flaky_job: nil
} }
end end
it 'does not change the flaky examples hash' do it 'changes the all flaky examples hash' do
expect { subject.example_passed(notification) }
.to change { subject.all_flaky_examples }
new_example = RspecFlaky::Example.new(rspec_example) new_example = RspecFlaky::Example.new(rspec_example)
expect(subject.all_flaky_examples[new_example.uid].to_h) now = Time.now
.to match(hash_including(expected_new_flaky_example)) Timecop.freeze(now) do
expect { listener.example_passed(notification) }
.to change { listener.flaky_examples[new_example.uid].to_h }
end
expect(listener.flaky_examples[new_example.uid].to_h)
.to eq(expected_flaky_example.merge(first_flaky_at: now, last_flaky_at: now))
end
end
describe 'when the RSpec example does not respond to attempts' do
it_behaves_like 'a non-flaky example'
end
describe 'when the RSpec example has 1 attempt' do
let(:rspec_example) { double(new_example_attrs.merge(attempts: 1)) }
it_behaves_like 'a non-flaky example'
end
describe 'when the RSpec example has 2 attempts' do
let(:rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
it_behaves_like 'a new flaky example'
context 'with an existing flaky example' do
let(:rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
it_behaves_like 'an existing flaky example'
end end
end end
end end
describe '#dump_summary' do describe '#dump_summary' do
let(:rspec_example) { double(example_attrs) } let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
let(:notification) { double(example: rspec_example) } let(:new_flaky_rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
let(:already_flaky_rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
let(:notification_new_flaky_rspec_example) { double(example: new_flaky_rspec_example) }
let(:notification_already_flaky_rspec_example) { double(example: already_flaky_rspec_example) }
context 'when a report file path is set by ALL_FLAKY_RSPEC_REPORT_PATH' do context 'when a report file path is set by FLAKY_RSPEC_REPORT_PATH' do
let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') } let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') }
let(:new_report_file_path) { Rails.root.join('tmp', 'rspec_flaky_new_report.json') }
before do before do
stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file_path) stub_env('FLAKY_RSPEC_REPORT_PATH', report_file_path)
stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', new_report_file_path)
FileUtils.rm(report_file_path) if File.exist?(report_file_path) FileUtils.rm(report_file_path) if File.exist?(report_file_path)
FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path)
end end
after do after do
FileUtils.rm(report_file_path) if File.exist?(report_file_path) FileUtils.rm(report_file_path) if File.exist?(report_file_path)
FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path)
end end
context 'when FLAKY_RSPEC_GENERATE_REPORT == "false"' do context 'when FLAKY_RSPEC_GENERATE_REPORT == "false"' do
...@@ -144,12 +205,13 @@ describe RspecFlaky::Listener do ...@@ -144,12 +205,13 @@ describe RspecFlaky::Listener do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false') stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
end end
it 'does not write the report file' do it 'does not write any report file' do
subject.example_passed(notification) listener.example_passed(notification_new_flaky_rspec_example)
subject.dump_summary(nil) listener.dump_summary(nil)
expect(File.exist?(report_file_path)).to be(false) expect(File.exist?(report_file_path)).to be(false)
expect(File.exist?(new_report_file_path)).to be(false)
end end
end end
...@@ -158,21 +220,39 @@ describe RspecFlaky::Listener do ...@@ -158,21 +220,39 @@ describe RspecFlaky::Listener do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true') stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
end end
it 'writes the report file' do around do |example|
subject.example_passed(notification) Timecop.freeze { example.run }
end
it 'writes the report files' do
listener.example_passed(notification_new_flaky_rspec_example)
listener.example_passed(notification_already_flaky_rspec_example)
subject.dump_summary(nil) listener.dump_summary(nil)
expect(File.exist?(report_file_path)).to be(true) expect(File.exist?(report_file_path)).to be(true)
expect(File.exist?(new_report_file_path)).to be(true)
expect(File.read(report_file_path))
.to eq(JSON.pretty_generate(listener.to_report(listener.flaky_examples)))
new_example = RspecFlaky::Example.new(notification_new_flaky_rspec_example)
new_flaky_example = RspecFlaky::FlakyExample.new(new_example)
new_flaky_example.update_flakiness!
expect(File.read(new_report_file_path))
.to eq(JSON.pretty_generate(listener.to_report(new_example.uid => new_flaky_example)))
end end
end end
end end
end end
describe '#to_report' do describe '#to_report' do
let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
it 'transforms the internal hash to a JSON-ready hash' do it 'transforms the internal hash to a JSON-ready hash' do
expect(subject.to_report('abc123' => RspecFlaky::FlakyExample.new(flaky_example_report['abc123']))) expect(listener.to_report(already_flaky_example_uid => already_flaky_example))
.to match(hash_including(flaky_example_report)) .to match(hash_including(suite_flaky_example_report))
end end
end end
end end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys')
describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
matcher :be_scheduled_migration do |*expected|
match do |migration|
BackgroundMigrationWorker.jobs.any? do |job|
job['args'] == [migration, expected]
end
end
failure_message do |migration|
"Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
end
end
before do
create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key)
create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key)
# Delete all subkeys so they can be recreated
GpgKeySubkey.destroy_all
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
migrate!
expect(described_class::MIGRATION).to be_scheduled_migration(1)
expect(described_class::MIGRATION).to be_scheduled_migration(2)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
it 'schedules background migrations' do
Sidekiq::Testing.inline! do
expect(GpgKeySubkey.count).to eq(0)
migrate!
expect(GpgKeySubkey.count).to eq(3)
end
end
end
...@@ -238,7 +238,7 @@ describe Ci::Pipeline, :mailer do ...@@ -238,7 +238,7 @@ describe Ci::Pipeline, :mailer do
describe '#stage_seeds' do describe '#stage_seeds' do
let(:pipeline) do let(:pipeline) do
create(:ci_pipeline, config: { rspec: { script: 'rake' } }) build(:ci_pipeline, config: { rspec: { script: 'rake' } })
end end
it 'returns preseeded stage seeds object' do it 'returns preseeded stage seeds object' do
...@@ -247,6 +247,14 @@ describe Ci::Pipeline, :mailer do ...@@ -247,6 +247,14 @@ describe Ci::Pipeline, :mailer do
end end
end end
describe '#seeds_size' do
let(:pipeline) { build(:ci_pipeline_with_one_job) }
it 'returns number of jobs in stage seeds' do
expect(pipeline.seeds_size).to eq 1
end
end
describe '#legacy_stages' do describe '#legacy_stages' do
subject { pipeline.legacy_stages } subject { pipeline.legacy_stages }
......
...@@ -231,6 +231,18 @@ describe HasStatus do ...@@ -231,6 +231,18 @@ describe HasStatus do
end end
end end
describe '.alive' do
subject { CommitStatus.alive }
%i[running pending created].each do |status|
it_behaves_like 'containing the job', status
end
%i[failed success].each do |status|
it_behaves_like 'not containing the job', status
end
end
describe '.created_or_pending' do describe '.created_or_pending' do
subject { CommitStatus.created_or_pending } subject { CommitStatus.created_or_pending }
......
...@@ -3,6 +3,7 @@ require 'rails_helper' ...@@ -3,6 +3,7 @@ require 'rails_helper'
describe GpgKey do describe GpgKey do
describe "associations" do describe "associations" do
it { is_expected.to belong_to(:user) } it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:subkeys) }
end end
describe "validation" do describe "validation" do
...@@ -38,6 +39,14 @@ describe GpgKey do ...@@ -38,6 +39,14 @@ describe GpgKey do
expect(gpg_key.primary_keyid).to eq GpgHelpers::User1.primary_keyid expect(gpg_key.primary_keyid).to eq GpgHelpers::User1.primary_keyid
end end
end end
describe 'generate_subkeys' do
it 'extracts the subkeys from the gpg key' do
gpg_key = create(:gpg_key, key: GpgHelpers::User1.public_key_with_extra_signing_key)
expect(gpg_key.subkeys.count).to eq(2)
end
end
end end
describe '#key=' do describe '#key=' do
...@@ -182,5 +191,29 @@ describe GpgKey do ...@@ -182,5 +191,29 @@ describe GpgKey do
expect(unrelated_gpg_key.destroyed?).to be false expect(unrelated_gpg_key.destroyed?).to be false
end end
it 'deletes all the associated subkeys' do
gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
expect(gpg_key.subkeys).to be_present
gpg_key.revoke
expect(gpg_key.subkeys(true)).to be_blank
end
it 'invalidates all signatures associated to the subkeys' do
gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
gpg_key_subkey = gpg_key.subkeys.last
gpg_signature = create :gpg_signature, verification_status: :verified, gpg_key: gpg_key_subkey
gpg_key.revoke
expect(gpg_signature.reload).to have_attributes(
verification_status: 'unknown_key',
gpg_key: nil,
gpg_key_subkey: nil
)
end
end end
end end
require 'rails_helper'
describe GpgKeySubkey do
subject { build(:gpg_key_subkey) }
describe 'associations' do
it { is_expected.to belong_to(:gpg_key) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:gpg_key_id) }
it { is_expected.to validate_presence_of(:fingerprint) }
it { is_expected.to validate_presence_of(:keyid) }
end
end
require 'rails_helper' require 'rails_helper'
RSpec.describe GpgSignature do RSpec.describe GpgSignature do
let(:gpg_key) { create(:gpg_key) }
let(:gpg_key_subkey) { create(:gpg_key_subkey) }
describe 'associations' do describe 'associations' do
it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:gpg_key) } it { is_expected.to belong_to(:gpg_key) }
it { is_expected.to belong_to(:gpg_key_subkey) }
end end
describe 'validation' do describe 'validation' do
...@@ -25,4 +29,26 @@ RSpec.describe GpgSignature do ...@@ -25,4 +29,26 @@ RSpec.describe GpgSignature do
gpg_signature.commit gpg_signature.commit
end end
end end
describe '#gpg_key=' do
it 'supports the assignment of a GpgKey' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key)
expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKey)
end
it 'supports the assignment of a GpgKeySubkey' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
end
it 'clears gpg_key and gpg_key_subkey_id when passing nil' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
gpg_signature.update_attribute(:gpg_key, nil)
expect(gpg_signature.gpg_key_id).to be_nil
expect(gpg_signature.gpg_key_subkey_id).to be_nil
end
end
end end
...@@ -51,4 +51,21 @@ describe Ci::PipelinePresenter do ...@@ -51,4 +51,21 @@ describe Ci::PipelinePresenter do
end end
end end
end end
context '#failure_reason' do
context 'when pipeline has failure reason' do
it 'represents a failure reason sentence' do
pipeline.failure_reason = :config_error
expect(presenter.failure_reason)
.to eq 'CI/CD YAML configuration error!'
end
end
context 'when pipeline does not have failure reason' do
it 'returns nil' do
expect(presenter.failure_reason).to be_nil
end
end
end
end end
require 'spec_helper' require 'spec_helper'
require 'raven/transports/dummy'
require_relative '../../../config/initializers/sentry'
describe API::Helpers do describe API::Helpers do
include API::APIGuard::HelperMethods include API::APIGuard::HelperMethods
...@@ -476,7 +478,7 @@ describe API::Helpers do ...@@ -476,7 +478,7 @@ describe API::Helpers do
allow(exception).to receive(:backtrace).and_return(caller) allow(exception).to receive(:backtrace).and_return(caller)
expect_any_instance_of(self.class).to receive(:sentry_context) expect_any_instance_of(self.class).to receive(:sentry_context)
expect(Raven).to receive(:capture_exception).with(exception) expect(Raven).to receive(:capture_exception).with(exception, extra: {})
handle_api_exception(exception) handle_api_exception(exception)
end end
...@@ -501,6 +503,30 @@ describe API::Helpers do ...@@ -501,6 +503,30 @@ describe API::Helpers do
expect(json_response['message']).to start_with("\nRuntimeError (Runtime Error!):") expect(json_response['message']).to start_with("\nRuntimeError (Runtime Error!):")
end end
end end
context 'extra information' do
# Sentry events are an array of the form [auth_header, data, options]
let(:event_data) { Raven.client.transport.events.first[1] }
before do
stub_application_setting(
sentry_enabled: true,
sentry_dsn: "dummy://12345:67890@sentry.localdomain/sentry/42"
)
configure_sentry
Raven.client.configuration.encoding = 'json'
end
it 'sends the params, excluding confidential values' do
expect(Gitlab::Sentry).to receive(:enabled?).twice.and_return(true)
expect(ProjectsFinder).to receive(:new).and_raise('Runtime Error!')
get api('/projects', user), password: 'dont_send_this', other_param: 'send_this'
expect(event_data).to include('other_param=send_this')
expect(event_data).to include('password=********')
end
end
end end
describe '.authenticate_non_get!' do describe '.authenticate_non_get!' do
......
...@@ -108,5 +108,18 @@ describe PipelineEntity do ...@@ -108,5 +108,18 @@ describe PipelineEntity do
expect(subject[:ref][:path]).to be_nil expect(subject[:ref][:path]).to be_nil
end end
end end
context 'when pipeline has a failure reason set' do
let(:pipeline) { create(:ci_empty_pipeline) }
before do
pipeline.drop!(:config_error)
end
it 'has a correct failure reason' do
expect(subject[:failure_reason])
.to eq 'CI/CD YAML configuration error!'
end
end
end end
end end
...@@ -18,4 +18,14 @@ describe GpgKeys::CreateService do ...@@ -18,4 +18,14 @@ describe GpgKeys::CreateService do
it 'creates a gpg key' do it 'creates a gpg key' do
expect { subject.execute }.to change { user.gpg_keys.where(params).count }.by(1) expect { subject.execute }.to change { user.gpg_keys.where(params).count }.by(1)
end end
context 'when the public key contains subkeys' do
let(:params) { attributes_for(:gpg_key_with_subkeys) }
it 'generates the gpg subkeys' do
gpg_key = subject.execute
expect(gpg_key.subkeys.count).to eq(2)
end
end
end end
...@@ -81,7 +81,10 @@ RSpec.configure do |config| ...@@ -81,7 +81,10 @@ RSpec.configure do |config|
if ENV['CI'] if ENV['CI']
# This includes the first try, i.e. tests will be run 4 times before failing. # This includes the first try, i.e. tests will be run 4 times before failing.
config.default_retry_count = 4 config.default_retry_count = 4
config.reporter.register_listener(RspecFlaky::Listener.new, :example_passed, :dump_summary) config.reporter.register_listener(
RspecFlaky::Listener.new,
:example_passed,
:dump_summary)
end end
config.before(:suite) do config.before(:suite) do
......
...@@ -92,6 +92,46 @@ module GpgHelpers ...@@ -92,6 +92,46 @@ module GpgHelpers
KEY KEY
end end
def public_key_with_extra_signing_key
<<~KEY.strip
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v1
mI0EWK7VJwEEANSFayuVYenl7sBKUjmIxwDRc3jd+K+FWUZgknLgiLcevaLh/mxV
98dLxDKGDHHNKc/B7Y4qdlZYv1wfNQVuIbd8dqUQFOOkH7ukbgcGBTxH+2IM67y+
QBH618luS5Gz1d4bd0YoFf/xZGEh9G5xicz7TiXYzLKjnMjHu2EmbFePABEBAAG0
LU5hbm5pZSBCZXJuaGFyZCA8bmFubmllLmJlcm5oYXJkQGV4YW1wbGUuY29tPoi4
BBMBAgAiBQJYrtUnAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRDM++Gf
AKyLHaeSA/99oUWpb02PlfkALcx5RncboMHkgczYEU9wOFIgvXIReswThCMOvPZa
piui+ItyJfV3ijJfO8IvbbFcvU7jjGA073Bb7tbzAEOQLA16mWgBLQlGaRWbHDW4
uwFxvkJKA0GzEsadEXeniESaZPc4rOXKPO3+/MSQWS2bmvvGsBTEuriNBFiu1ScB
BADIXkITf+kKCkD+n8tMsdTLInefu8KrJ8p7YRYCCabEXnWRsDb5zxUAG2VXCVUh
Yl6QXQybkNiBaduS+uxilz7gtYZUMFJvQ09+fV7D2N9B7u/1bGdIYz+cDFJnEJit
LY4w/nju2Sno5CL5Ead8sZuslKetSXPYHR/kbW462EOw5wARAQABiJ8EGAECAAkF
Aliu1ScCGwwACgkQzPvhnwCsix2WRQQAtOXpBS60myrBUXhlcqabDQgSTw+Spbgb
61hEMckyzpk7SfMNLz0EbYMvj9SU6znBG8RGeUljPTVMxPGr9yIpoFMSPKAUi/0K
AgRmH3tVpxlMipwXjST1Jukk2eHckt/3jGw3E1ElMSFtULe6u5p4gu578hHukEwT
IKzj0ZyC7DK5AQ0EWcx23AEIANwpAq85bT10JCBuNhOMyF2jKVt5wHbI9wBtjWYG
fgJFBkRvm6IsbmR0Y5DSBvF/of0UX1iGMfx6mvCDJkb1okquhCUef6MONWRpzXYE
CIZDm1TXu6yv0D35tkLfPo+/sY9UHHp1zGRcPAU46e8ztRwoD+zEJwy7lobLHGOL
9OdWtCGjsutLOTqKRK4jsifr8n3rePU09rejhDkRONNs7ufn9GRcWMN7RWiFDtpU
gNe84AJ38qaXPU8GHNTrDtDtRRPmn68ezMmE1qTNsxQxD4Isexe5Wsfc4+ElaP9s
zaHgij7npX1HS9RpmhnOa2h1ESroM9cqDh3IJVhf+eP6/uMAEQEAAYkBxAQYAQIA
DwUCWcx23AIbAgUJAeEzgAEpCRDM++GfAKyLHcBdIAQZAQIABgUCWcx23AAKCRDk
garE0uOuES7DCAC2Kgl6zO+NqIBIS6McgcEN0sGyvOvZ8Ps4hBiMwCyDAnsIRAUi
v4KZMtQMAyl9njJ3YjPWBsdieuTz45O06DDnrzJpZO5rUGJjAcEue4zvRRWIyu3H
qHC8MsvkslsNCygJHoWlknm+HucroskTNtxHQ+FdKZ6Tey+twl1u+PhV8PQVyFkl
4G1chO90EP4dvYrye26CC+ik2JkvC7Vy5M+U0PJikme8pFMjcdNks25BnAKcdqKU
AU8RTkSjoYvb8qSmZyldJjYjQRkTPRX1ZdaOID1EdiWl+s5cn0Oypo3z7BChcEMx
IWB/gmXQJQXVr5qNQnJObyMO/RczXYi9qNnyGMED/2EJJERLR0nefjHQalwDKQVP
s5lX1OKAcf2CrV6ZarckqaQgtqjZbuV2C2mrOHUs5uojlXaopj5gA4yJSGDcYhj1
Rg9jdHWBtkHBj3eL32ZqrHDs3ap8ErZMmPE8A+mn9TTnQS+FY2QF7vBjJKM3qPT7
DMVGWrg4m1NF8N6yMPMP
=RB1y
-----END PGP PUBLIC KEY BLOCK-----
KEY
end
def primary_keyid def primary_keyid
fingerprint[-16..-1] fingerprint[-16..-1]
end end
...@@ -201,4 +241,277 @@ module GpgHelpers ...@@ -201,4 +241,277 @@ module GpgHelpers
['bette.cartwright@example.com', 'bette.cartwright@example.net'] ['bette.cartwright@example.com', 'bette.cartwright@example.net']
end end
end end
# GPG Key with extra signing key
module User3
extend self
def signed_commit_signature
<<~SIGNATURE
-----BEGIN PGP SIGNATURE-----
iQEzBAABCAAdFiEEBSLdKbmPFnzYQhdS44/8r3Wr2SoFAlnNlT8ACgkQ44/8r3Wr
2SqP1wf9FC4J2S8LIHs/fpxgkYzsyCp5lCbS7JuoD4pqmI2KWyBx+vi9/3mZPCsm
Fj9f0vFEtNOb39GNGZbaA8DdGw30/WAS6kI6yco0WSK53KHrLw9Kqd+3e/NAVSsl
991Gq4n8X1U5izSH+gZOMtEEUBGqIlZKgRrEh7lhNcz0G7JTF2VCE4NNtZdq7GDA
N6jOQxDGUwi9wQBYORQzIBc3NihfhGloII1hXf0XzrgUY3zNYHTT7QipCxKAmH54
skwW+wi8RpBedar4saf7fs5xZbP/0yyVz98MJMdHBL68++Xt1AIHoqrb7eWszqnd
PCo/fnz1iHKCig602KLj0/zhADcNkg==
=LsTi
-----END PGP SIGNATURE-----
SIGNATURE
end
def signed_commit_base_data
<<~SIGNEDDATA
tree 86ec18bfe87ad42a782fdabd8310f9b7ac750f51
parent 2d1096e3a0ecf1d2baf6dee036cc80775d4940ba
author John Doe <john.doe@example.com> 1506645311 -0500
committer John Doe <john.doe@example.com> 1506645311 -0500
Commit signed with subkey by John Doe
SIGNEDDATA
end
def public_key
<<~KEY.strip
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQENBFnNgbIBCAC9/WblcR4s/pFTwh9cm2iS59YRhtGfbrnfNE6QMIFIRFaK0u6J
LDy+scipXLoGg7X0XNFLW6Y457UUpkaPDVLPuVMONhMXdVqndGVvk9jq3D4oDtRa
ukucuXr9F84lHnjL3EosmAUiK3xBmHOGHm8+bvKbgPOdvre48YxoJ1POTen+chfo
YtLUfnC9EEGY/bn00aaXm3NV+zZK2zio5bFX9YLWSNh/JaXxuJsLoHR/lVrU7CLt
FCaGcPQ9SU46LHPshEYWO7ZsjEYJsYYOIOEzfcfR47T2EDTa6mVc++gC5TCoY3Ql
jccgm+EM0LvyEHwupEpxzCg2VsT0yoedhUhtABEBAAG0H0pvaG4gRG9lIDxqb2hu
LmRvZUBleGFtcGxlLmNvbT6JAVQEEwEIAD4WIQTqP4uIlyqP1HSHLy8RWzrxqtPt
ugUCWc2BsgIbAwUJA8JnAAULCQgHAgYVCAkKCwIEFgIDAQIeAQIXgAAKCRARWzrx
qtPturMDCACc1Pi1sLJFcCnJEc9sCInCO4LH8fntNjRLN3MTPU5YCYmFM3fAl5ly
vXPZ4jNWZxKbQVeFnkDOg5Ti8bzmFEMc8KbZuguktVFizxnLdFCTTRO89i3HDVSN
bIosrs5HJwRKOzul6i2whn3dsr8/P8WJczYjZGiw29hGwH3md4Thn/aAGbzjoCEF
IfIb1kccyHMJkaj79S8B2agsbEJLuTSfsXC3kGZIJyKG1DNmDUHW/ZE6ro/Kkhik
3w6Jw14cMsKUIOBkOgsD/gXgX9xxWjYHmKrbCXucTKUevNlaCy5tzwrC0Am3prl9
OJj3macOA8hNaTVDflEHNCwHOwqnVQYyuQENBFnNgbIBCAC59MmKc0cqPRPTpCZ5
arKRoj23SNKWMDWaxSELdU91Wd/NJk4wF25urk9BtBuwjzaBMqb/xPD88yJC3ucs
2LwCyAb5C/dHcPOpys8Pd+KrdHDR3zAMjcASsizlW/qFI9MtjhcU9Yd6iTUejAZG
NEC76WALJ3SLYzCaDkHFjWpH3Xq6ck3/9jpL3csn/5GLCsZQUDYGrZSXvHAIigwW
Xo6tMs5LCCO9CZg2qGDpvqlzcmy6CRkf0h/UFYJzGqfbJtxeCIxa93WIPE8eGwao
aneDlNtIoYiP6krC3OLsaPWT58QltNKaQuZSpjwtQBHa4JIt55vx+FcvRb7Kflgf
fT8bABEBAAGJATwEGAEIACYWIQTqP4uIlyqP1HSHLy8RWzrxqtPtugUCWc2BsgIb
DAUJA8JnAAAKCRARWzrxqtPtuqJjCACj+Z4qtgMpJXx3u58wCzkVLl5IylD/tEPA
cNIrj8QS8ec+woTJaMGVCh96VC2FPl8KR4Hjhy0yaupyPbTI6VWib63S/NcDfG7r
tviRFG2Gf8yduERebyC0cpgnmjVgFfJs7N3K3ncz6myOr9idNI05OC9poL73sDUv
jRXhm7uy938bT/R4MQdpYuxucgU3MiwvfG5ht+oJ4Yp+/IrR2PTqRGqMCsodnroa
TBKq2kW565TtCvrFkNub/ytorDbIVN9VrIMcuTiOv8sLoQRDJO9HvWUhYAqMY6Uh
dy12jR9FrquZnGsDKKs9V0Y6J4Wi8vnmdgWVZUc40pfXq3APAB6suQENBFnNgeAB
CADFqQRxLHxLIQ7B72diTMI2tPk9d5c67k+Gzkrg1QYoxBLdRCmhM4ydYqZzvIz4
1npkK20w4somOCwvyAOjO46IGb3f/Wk8mY8o5HMpI1miAfze0YTZKzRo2DmrvwbV
/h8jvZNCISwtrOgaaszWSVSuEQQCA1jf4qixfCb3ReETvZc3MTZXhw8IUbszXh5d
a6CYqq/fr5Zw4Dc19ZSoHSTh0Wn03mEm/kaYtia/wt1I+xVvTSaC2Pf/kUtr7UEf
3NMc0YF0s4KgeW8KwjHa7Sz9wzydLPRH5kJ26SDUGUhrFf1jNLIegtDsoISV/86G
ztXcVq5GY6lXMwmsggRe++7xABEBAAGJAmwEGAEIACAWIQTqP4uIlyqP1HSHLy8R
WzrxqtPtugUCWc2B4AIbAgFACRARWzrxqtPtusB0IAQZAQgAHRYhBAUi3Sm5jxZ8
2EIXUuOP/K91q9kqBQJZzYHgAAoJEOOP/K91q9kqlHcH+wbvD14ITYDMfgIfy67O
4Qcmgf1qzGXhpsABz/i/EPgRD990eNBI0YGuvoKRJfetEGn7LerrrCB8Z+ICFPHF
rzXoe10zm+QTREck0OB8nPFRycJ+Fbl6JX+cnkEx27Mmg1aVb7+H5LMDaWO1KjLs
g2wIDo/jrDfW7NoZzy4XTd7jFCOt4fftL/dhiujQmhLzugZXCxRISOVdmgilDJQo
Tz1sEm34ida98JFjdzSgkUvJ/pFTZ21ThCNxlUf01Hr2Pdcg1e2/97sZocMFTY2J
KwmiW2LG3B05/VrRtdvsCVj8G49coxgPPKty+m71ovAXdS/CvVqE7TefCplsYJ1d
V3abwwf/Xl2SxzbAKbrYMgZfdGzpPg2u6982WvfGIVfjFJh9veHZAbfkPcjdAD2X
e67Y4BeKG2OQQqeOY2y+81A7PaehgHzbFHJG/4RjqB66efrZAg4DgIdbr4oyMoUJ
VVsl0wfYSIvnd4kvWXYICVwk53HLA3wIowZAsJ1LT2byAKbUzayLzTekrTzGcwQk
g2XT798ev2QuR5Ki5x8MULBFX4Lhd03+uGOxjhNPQD6DAAHCQLaXQhaGuyMgt5hD
t0nF3yuw3Eg4Ygcbtm24rZXOHJc9bDKeRiWZz1dIyYYVJmHSQwOVXkAwJlF1sIgy
/jQYeOgFDKq20x86WulkvsUtBoaZJg==
=Q5Z7
-----END PGP PUBLIC KEY BLOCK-----
KEY
end
def secret_key
<<~SECRET
-----BEGIN PGP PRIVATE KEY BLOCK-----
lQPGBFnNgbIBCAC9/WblcR4s/pFTwh9cm2iS59YRhtGfbrnfNE6QMIFIRFaK0u6J
LDy+scipXLoGg7X0XNFLW6Y457UUpkaPDVLPuVMONhMXdVqndGVvk9jq3D4oDtRa
ukucuXr9F84lHnjL3EosmAUiK3xBmHOGHm8+bvKbgPOdvre48YxoJ1POTen+chfo
YtLUfnC9EEGY/bn00aaXm3NV+zZK2zio5bFX9YLWSNh/JaXxuJsLoHR/lVrU7CLt
FCaGcPQ9SU46LHPshEYWO7ZsjEYJsYYOIOEzfcfR47T2EDTa6mVc++gC5TCoY3Ql
jccgm+EM0LvyEHwupEpxzCg2VsT0yoedhUhtABEBAAH+BwMCOqjIWtWBMo3mjIP1
OnIbZ+YJxSUZ/B8wU2loMv4XiKmeXLbjD6h3jojxYlnreSHA9QvoY8uNaWElL/n2
jv6bxluivk8tA9FWJVv4HaSlMDd2J2YmUW17r8z9Kvm7b7pFVSrEoYV93Wdj5FJ7
ciKrFhYNSD7tH1sHwkrFAbiv6aHyk9h48YmR3kx2wBvz+pWk7M2srCJx2b6DXkj/
fsj1c/vnzUUGooOJgOvYAWrpg/rJUNxSsFypAHf8Xtk+xt8S1aZ9jaCmYk6I1B2L
m00HP43cXUpKcmETW1zXvfMLKjjoUEAJhSJhbCwiEzGL4ojQTarl8qbb+MisakEJ
DkPYtrhiiuVzUIFfqE86yO0UKidtzBmJAW3c6zeiUATvACzU09aGyUY1cJi93oXD
w4PCyVZ+nMvGD1wx+gyYdDINwpX4y6od9RDr06DGCzwu+S2vxsu1T8LdSv52fhBr
U0FY3Z3VN1ytay4SHi/8Y9VBYQFBh7R7Ch0gEMxLVKXVNqOXHUdGrKWV/WmyLKuZ
W9DEnWU4Mpz/di5jU8EDW7EB9DZZhVk3mQw3nuAZrBGD4azmmD5mgSgLeBGmKZ1e
q/9IWO44mRBBUtNv+rAkmmYF3MCNHuc7EMj+c/IgBUC7d5qBzGWA3UJ0vKX4xcIQ
X/PnU+nGxNvBrdqQaMLczeg28SerojxuX79prOsoySctLAbajd9HshW5SfOZ0rvb
BNHPqolQDijYEHGxANh4BbamRMGi60Rop7vJsZOLAemz17x/mvCtAHISOJT77/IM
oWC+IksJ5XsA/klJGe/tkx11aRQDDmKvIJXmMuRdvnIR23UBbzRQlWWq0l6CdoF6
6SQ9BJBFq0WY32No9WZAPnDO3buUzWc1Y3uwn/+h7TVYVyTlEqzpYJ9FoJwBHbor
0663eoyz6+AUtB9Kb2huIERvZSA8am9obi5kb2VAZXhhbXBsZS5jb20+iQFUBBMB
CAA+FiEE6j+LiJcqj9R0hy8vEVs68arT7boFAlnNgbICGwMFCQPCZwAFCwkIBwIG
FQgJCgsCBBYCAwECHgECF4AACgkQEVs68arT7bqzAwgAnNT4tbCyRXApyRHPbAiJ
wjuCx/H57TY0SzdzEz1OWAmJhTN3wJeZcr1z2eIzVmcSm0FXhZ5AzoOU4vG85hRD
HPCm2boLpLVRYs8Zy3RQk00TvPYtxw1UjWyKLK7ORycESjs7peotsIZ93bK/Pz/F
iXM2I2RosNvYRsB95neE4Z/2gBm846AhBSHyG9ZHHMhzCZGo+/UvAdmoLGxCS7k0
n7Fwt5BmSCcihtQzZg1B1v2ROq6PypIYpN8OicNeHDLClCDgZDoLA/4F4F/ccVo2
B5iq2wl7nEylHrzZWgsubc8KwtAJt6a5fTiY95mnDgPITWk1Q35RBzQsBzsKp1UG
Mp0DxgRZzYGyAQgAufTJinNHKj0T06QmeWqykaI9t0jSljA1msUhC3VPdVnfzSZO
MBdubq5PQbQbsI82gTKm/8Tw/PMiQt7nLNi8AsgG+Qv3R3DzqcrPD3fiq3Rw0d8w
DI3AErIs5Vv6hSPTLY4XFPWHeok1HowGRjRAu+lgCyd0i2Mwmg5BxY1qR916unJN
//Y6S93LJ/+RiwrGUFA2Bq2Ul7xwCIoMFl6OrTLOSwgjvQmYNqhg6b6pc3JsugkZ
H9If1BWCcxqn2ybcXgiMWvd1iDxPHhsGqGp3g5TbSKGIj+pKwtzi7Gj1k+fEJbTS
mkLmUqY8LUAR2uCSLeeb8fhXL0W+yn5YH30/GwARAQAB/gcDAuYn/gmAA3OC5p5Q
Pat5kE7MtmSvSPmdjVA2o+6RtqZf81JqtAgtDVDwj7SPFsH6ue5P+iAn9938YYek
WQU2+0GXeUbSJt+u4VAchgwA5mYsEnEr1/E5KEfWPWO3jJol1rJG99adrjkMxvug
QJmwieqhu0368w1FU0tKstxYbr3Tz3nPCPDJoigMEUkXiFklDCUgeNk0g+zd5ytE
lXuuLYcGZX7njxL5jD+cMIKqua5zv8WbvNf/BhM1nCarxp4qzKWim8J8jY+iR+/E
qOar4aliGRez0j+qh/r8ywgPwfOO89zrKrMfaclL7dN9yuecmBHKWZvfrP5JKMHj
yTU3nRMhUGbfVUaaZI2Ccz2rNOU4oF9wuzpzQi8qOysZixRmH61Nw3ULIKoQgiWp
0p5A3L94OaEfZEq3plVaIXI2YWYFSEAlIAc2dq4GxynousLdhNACi9bHhXrfFUhK
ckw1QlbhguO/j63/x8ygsmLZVwHG0fJZtMhT3+EGam9cuMBibIYyu3ufJRy7kMKt
kmyuk02X+hYJ7w8Pu6b8zYHBXbsEKamipMgd4oKtc8WnXILZo4lwDSArgs7ZVCBa
vGBbpTOsr54WjsyuCdX/wv0F2l31J87UxVtTKXx/+nfMfCE02zd+NsTgqvgqmkaA
Sy3qvv326kJNx7p+5hRwDzlAZ7vGJjj5TwCbGYDvctIf6MFrGDRNYwrGwNkPc3TG
rturfeL/ioua0Smj8LIbOv9Ir93gUIseNpxv8tXV/lffdIplcw802b3aXIKyv4fq
b9y3Oq/pDHFukKuBe9WTXJvjT0+ME+a0C8KIb/sts95pmjZsgN1kPmvuT0ReQwUR
eGrqz387bnVUzo4RgM3IERs/0EYzPzE8A2vc1e4/87b5J+1Xnov8Phd29vW8Td5l
ApiFrFO2r+/Np4kBPAQYAQgAJhYhBOo/i4iXKo/UdIcvLxFbOvGq0+26BQJZzYGy
AhsMBQkDwmcAAAoJEBFbOvGq0+26omMIAKP5niq2AyklfHe7nzALORUuXkjKUP+0
Q8Bw0iuPxBLx5z7ChMlowZUKH3pULYU+XwpHgeOHLTJq6nI9tMjpVaJvrdL81wN8
buu2+JEUbYZ/zJ24RF5vILRymCeaNWAV8mzs3credzPqbI6v2J00jTk4L2mgvvew
NS+NFeGbu7L3fxtP9HgxB2li7G5yBTcyLC98bmG36gnhin78itHY9OpEaowKyh2e
uhpMEqraRbnrlO0K+sWQ25v/K2isNshU31Wsgxy5OI6/ywuhBEMk70e9ZSFgCoxj
pSF3LXaNH0Wuq5mcawMoqz1XRjonhaLy+eZ2BZVlRzjSl9ercA8AHqydA8YEWc2B
4AEIAMWpBHEsfEshDsHvZ2JMwja0+T13lzruT4bOSuDVBijEEt1EKaEzjJ1ipnO8
jPjWemQrbTDiyiY4LC/IA6M7jogZvd/9aTyZjyjkcykjWaIB/N7RhNkrNGjYOau/
BtX+HyO9k0IhLC2s6BpqzNZJVK4RBAIDWN/iqLF8JvdF4RO9lzcxNleHDwhRuzNe
Hl1roJiqr9+vlnDgNzX1lKgdJOHRafTeYSb+Rpi2Jr/C3Uj7FW9NJoLY9/+RS2vt
QR/c0xzRgXSzgqB5bwrCMdrtLP3DPJ0s9EfmQnbpINQZSGsV/WM0sh6C0OyghJX/
zobO1dxWrkZjqVczCayCBF777vEAEQEAAf4HAwKESvCIDq5QNeadnSvpkZemItPO
lDf+7Wiue2gt776D5xkVyT7WkgTQv+IGWGtqz7pCCO2rMp/F9u1BghdjY46jtrK6
MMFKta4YENUhRliH6M2YmRjq5p7xZgH6UOnDlqsafbfyUx30t59tbQj+07aMnH5J
LMm37nVkDvo3wpPQPuo7L6qizYsrHrQKeJZ8636u41UjC99lVH7vXzqXw68FJImi
XdMZbEVBIprYfCDem+fD6gJBA4JBqWJMxuFMfhWp+1WtYoeNojDm4KxBzc2fvYV/
HOIUfLFBvACD/UwU5ovllHN39/O8SMgyLm9ymx2/qXcdIkUz4l7fhOCY1OW12DMu
5OFrrTteGK/Sj4Z8pYRdMdaKyjIlxuVzEQGWsU5+J2ALao5atEHguqwlD3cKh3G8
1sA/l5eTFDt84erYv1MVStV0BhZaCE4mNL4WpnQGDdW05yoGq9jIyLcurb/k/atU
TUkAF1csgNlJlR3IP+7U9xfHkjMO5+SV82xoNf9nBjz06TRdnvOSKsMNKp0RxC/L
Hbiee9o7Rxqdiyv0ly6bCCymwfvlsEIqo3YKssBfe3XI5yQI2hF9QZaH1ywzmgaH
o+rbME/XxddRJueS79eipT7K05Z3ulSHTXzpDw+jZcdUV0Ac72Q9FTDPMl3xc6NW
DrYwWw/3+kyZ4SkP56l7KlGczTyNPvU9iou4Cj/cAZk/pHx68Chq8ZZNznFm/bIF
gWt3fqE/n+y78B6MI8qTjGJOR0jycxrLH82Z2F+FpMShI2C5NnOa/Ilkv3e2Q5U6
MOAwaCIz6RHhcI99O/yta2vLelWZqn2g86rLzTG0HlIABTCPYotwetHh0hsrkSv9
Kh6rOzGB4i8lRqcBVY+alMSiRBlzkwpL4YUcO6f3vEDncQ9evE1AQCpD4jUJjB1H
JSSJAmwEGAEIACAWIQTqP4uIlyqP1HSHLy8RWzrxqtPtugUCWc2B4AIbAgFACRAR
WzrxqtPtusB0IAQZAQgAHRYhBAUi3Sm5jxZ82EIXUuOP/K91q9kqBQJZzYHgAAoJ
EOOP/K91q9kqlHcH+wbvD14ITYDMfgIfy67O4Qcmgf1qzGXhpsABz/i/EPgRD990
eNBI0YGuvoKRJfetEGn7LerrrCB8Z+ICFPHFrzXoe10zm+QTREck0OB8nPFRycJ+
Fbl6JX+cnkEx27Mmg1aVb7+H5LMDaWO1KjLsg2wIDo/jrDfW7NoZzy4XTd7jFCOt
4fftL/dhiujQmhLzugZXCxRISOVdmgilDJQoTz1sEm34ida98JFjdzSgkUvJ/pFT
Z21ThCNxlUf01Hr2Pdcg1e2/97sZocMFTY2JKwmiW2LG3B05/VrRtdvsCVj8G49c
oxgPPKty+m71ovAXdS/CvVqE7TefCplsYJ1dV3abwwf/Xl2SxzbAKbrYMgZfdGzp
Pg2u6982WvfGIVfjFJh9veHZAbfkPcjdAD2Xe67Y4BeKG2OQQqeOY2y+81A7Paeh
gHzbFHJG/4RjqB66efrZAg4DgIdbr4oyMoUJVVsl0wfYSIvnd4kvWXYICVwk53HL
A3wIowZAsJ1LT2byAKbUzayLzTekrTzGcwQkg2XT798ev2QuR5Ki5x8MULBFX4Lh
d03+uGOxjhNPQD6DAAHCQLaXQhaGuyMgt5hDt0nF3yuw3Eg4Ygcbtm24rZXOHJc9
bDKeRiWZz1dIyYYVJmHSQwOVXkAwJlF1sIgy/jQYeOgFDKq20x86WulkvsUtBoaZ
Jg==
=TKlF
-----END PGP PRIVATE KEY BLOCK-----
SECRET
end
def fingerprint
'EA3F8B88972A8FD474872F2F115B3AF1AAD3EDBA'
end
def subkey_fingerprints
%w(159AD5DDF199591D67D2B87AA3CEC5C0A7C270EC 0522DD29B98F167CD8421752E38FFCAF75ABD92A)
end
def names
['John Doe']
end
def emails
['john.doe@example.com']
end
end
# GPG Key containing just the main key
module User4
extend self
def public_key
<<~KEY.strip
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQENBFnWcesBCAC6Y8FXl9ZJ9HPa6dIYcgQrvjIQcwoQCUEsaXNRpc+206RPCIXK
aIYr0nTD8GeovMuUONXTj+DdueQU2GAAqHHOqvDDVXqRrW3xfWnSwix7sTuhG1Ew
PLHYmjLENqaTsdyliEo3N8VWy2k0QRbC3R6xvop4Ooa87D5vcATIl0gYFtSiHIL+
TervYvTG9Eq1qSLZHbe2x4IzeqX2luikPKokL7j8FTZaCmC5MezIUur1ulfyYY/j
SkST/1aUFc5QXJJSZA0MYJWZX6x7Y3l7yl0dkHqmK8OTuo8RPWd3ybEiuvRsOL8K
GAv/PmVJRGDAf7GGbwXXsE9MiZ5GzVPxHnexABEBAAG0G0pvaG4gRG9lIDxqb2hu
QGV4YW1wbGUuY29tPokBTgQTAQgAOBYhBAh0izYM0lwuzJnVlAcBbPnhOj+bBQJZ
1nHrAhsDBQsJCAcCBhUICQoLAgQWAgMBAh4BAheAAAoJEAcBbPnhOj+bkywH/i4w
OwpDxoTjUQlPlqGAGuzvWaPzSJndawgmMTr68oRsD+wlQmQQTR5eqxCpUIyV4aYb
D697RYzoqbT4mlU49ymzfKSAxFe88r1XQWdm81DcofHVPmw2GBrIqaX3Du4Z7xkI
Q9/S43orwknh5FoVwU8Nau7qBuv9vbw2apSkuA1oBj3spQ8hqwLavACyQ+fQloAT
hSDNqPiCZj6L0dwM1HYiqVoN3Q7qjgzzeBzlXzljJoWblhxllvMK20bVoa7H+uR2
lczFHfsX8VTIMjyTGP7R3oHN91DEahlQybVVNLmNSDKZM2P/0d28BRUmWxQJ4Ws3
J4hOWDKnLMed3VOIWzM=
=xVuW
-----END PGP PUBLIC KEY BLOCK-----
KEY
end
def secret_key
<<~KEY.strip
-----BEGIN PGP PRIVATE KEY BLOCK-----
lQPGBFnWcesBCAC6Y8FXl9ZJ9HPa6dIYcgQrvjIQcwoQCUEsaXNRpc+206RPCIXK
aIYr0nTD8GeovMuUONXTj+DdueQU2GAAqHHOqvDDVXqRrW3xfWnSwix7sTuhG1Ew
PLHYmjLENqaTsdyliEo3N8VWy2k0QRbC3R6xvop4Ooa87D5vcATIl0gYFtSiHIL+
TervYvTG9Eq1qSLZHbe2x4IzeqX2luikPKokL7j8FTZaCmC5MezIUur1ulfyYY/j
SkST/1aUFc5QXJJSZA0MYJWZX6x7Y3l7yl0dkHqmK8OTuo8RPWd3ybEiuvRsOL8K
GAv/PmVJRGDAf7GGbwXXsE9MiZ5GzVPxHnexABEBAAH+BwMC4UwgHgH5Cp7meY39
G5Q3GV2xtwADoaAvlOvPOLPK2fQqxQfb4WN4eZECp2wQuMRBMj52c4i9yphab1mQ
vOzoPIRGvkcJoxG++OxQ0kRk0C0gX6wM6SGVdb1nQnfZnoJCCU3IwCaSGktkLDs1
jwdI+VmXJbSugUbd25bakHQcE2BaNHuRBlQWQfFbhGBy0+uMfNDBZ6FRipBu47hO
f/wm/xXuV8N8BSgvNR/qtAqSQI34CdsnWAhMYm9rqmTNyt0nq4dveX+E0YzVn4lH
lOEa7cpYeuBwIL8L3EvSPNCICiJlF3gVqiYzyqRElnCkv1OGc0x3W5onY/agHgGZ
KYyi/ubOdqqDgBR+eMt0JKSGH2EPxUAGFPY5F37u4erdxH86GzIinAExLSmADiVR
KtxluZP6S2KLbETN5uVbrfa+HVcMbbUZaBHHtL+YbY8PqaFUIvIUR1HM2SK7IrFw
KuQ8ibRgooyP7VgMNiPzlFpY4NXUv+FXIrNJ6ELuIaENi0izJ7aIbVBM8SijDz6u
5EEmodnDvmU2hmQNZJ17TxggE7oeT0rKdDGHM5zBvqZ3deqE9sgKx/aTKcj61ID3
M80ZkHPDFazUCohLpYgFN20bYYSmxU4LeNFy8YEiuic8QQKaAFxSf9Lf87UFQwyF
dduI1RWEbjMsbEJXwlmGM02ssQHsgoVKwZxijq5A5R1Ul6LowazQ8obPiwRS4NZ4
Z+QKDon79MMXiFEeh1jeG/MKKWPxFg3pdtCWhC7WdH4hfkBsCVKf+T58yB2Gzziy
fOHvAl7v3PtdZgf1xikF8spGYGCWo4B2lxC79xIflKAb2U6myb5I4dpUYxzxoMxT
zxHwxEie3NxzZGUyXSt3LqYe2r4CxWnOCXWjIxxRlLue1BE5Za1ycnDRjgUO24+Z
uDQne6KLkhAotBtKb2huIERvZSA8am9obkBleGFtcGxlLmNvbT6JAU4EEwEIADgW
IQQIdIs2DNJcLsyZ1ZQHAWz54To/mwUCWdZx6wIbAwULCQgHAgYVCAkKCwIEFgID
AQIeAQIXgAAKCRAHAWz54To/m5MsB/4uMDsKQ8aE41EJT5ahgBrs71mj80iZ3WsI
JjE6+vKEbA/sJUJkEE0eXqsQqVCMleGmGw+ve0WM6Km0+JpVOPcps3ykgMRXvPK9
V0FnZvNQ3KHx1T5sNhgayKml9w7uGe8ZCEPf0uN6K8JJ4eRaFcFPDWru6gbr/b28
NmqUpLgNaAY97KUPIasC2rwAskPn0JaAE4Ugzaj4gmY+i9HcDNR2IqlaDd0O6o4M
83gc5V85YyaFm5YcZZbzCttG1aGux/rkdpXMxR37F/FUyDI8kxj+0d6BzfdQxGoZ
UMm1VTS5jUgymTNj/9HdvAUVJlsUCeFrNyeITlgypyzHnd1TiFsz
=/37z
-----END PGP PRIVATE KEY BLOCK-----
KEY
end
def primary_keyid
fingerprint[-16..-1]
end
def fingerprint
'08748B360CD25C2ECC99D59407016CF9E13A3F9B'
end
end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment