Commit 830c770a authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'master' into feature/sm/35954-create-kubernetes-cluster-on-gke-from-k8s-service

parents 3e26b0dc 7f8e720f
......@@ -27,7 +27,7 @@ variables:
GET_SOURCES_ATTEMPTS: "3"
KNAPSACK_RSPEC_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/rspec_report-master.json
KNAPSACK_SPINACH_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/spinach_report-master.json
FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/${CI_PROJECT_NAME}/report-master.json
FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/report-suite.json
before_script:
- bundle --version
......@@ -87,12 +87,13 @@ stages:
- export CI_NODE_TOTAL=${JOB_NAME[-1]}
- export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export KNAPSACK_GENERATE_REPORT=true
- export ALL_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/all_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/new_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export SUITE_FLAKY_RSPEC_REPORT_PATH=${FLAKY_RSPEC_SUITE_REPORT_PATH}
- export FLAKY_RSPEC_REPORT_PATH=rspec_flaky/all_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/new_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export FLAKY_RSPEC_GENERATE_REPORT=true
- export CACHE_CLASSES=true
- cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}
- cp ${FLAKY_RSPEC_SUITE_REPORT_PATH} ${ALL_FLAKY_RSPEC_REPORT_PATH}
- '[[ -f $FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_REPORT_PATH}'
- '[[ -f $NEW_FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${NEW_FLAKY_RSPEC_REPORT_PATH}'
- scripts/gitaly-test-spawn
- knapsack rspec "--color --format documentation"
......@@ -233,7 +234,7 @@ retrieve-tests-metadata:
- wget -O $KNAPSACK_SPINACH_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_SPINACH_SUITE_REPORT_PATH || rm $KNAPSACK_SPINACH_SUITE_REPORT_PATH
- '[[ -f $KNAPSACK_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_RSPEC_SUITE_REPORT_PATH}'
- '[[ -f $KNAPSACK_SPINACH_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_SPINACH_SUITE_REPORT_PATH}'
- mkdir -p rspec_flaky/${CI_PROJECT_NAME}/
- mkdir -p rspec_flaky/
- wget -O $FLAKY_RSPEC_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$FLAKY_RSPEC_SUITE_REPORT_PATH || rm $FLAKY_RSPEC_SUITE_REPORT_PATH
- '[[ -f $FLAKY_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_SUITE_REPORT_PATH}'
......@@ -252,22 +253,21 @@ update-tests-metadata:
- retry gem install fog-aws mime-types
- scripts/merge-reports ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/rspec-pg_node_*.json
- scripts/merge-reports ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/spinach-pg_node_*.json
- scripts/merge-reports ${FLAKY_RSPEC_SUITE_REPORT_PATH} rspec_flaky/${CI_PROJECT_NAME}/all_node_*.json
- scripts/merge-reports ${FLAKY_RSPEC_SUITE_REPORT_PATH} rspec_flaky/all_*_*.json
- '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $KNAPSACK_RSPEC_SUITE_REPORT_PATH $KNAPSACK_SPINACH_SUITE_REPORT_PATH'
- '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $FLAKY_RSPEC_SUITE_REPORT_PATH'
- rm -f knapsack/${CI_PROJECT_NAME}/*_node_*.json
- rm -f rspec_flaky/${CI_PROJECT_NAME}/*_node_*.json
- rm -f rspec_flaky/all_*.json rspec_flaky/new_*.json
flaky-examples-check:
<<: *dedicated-runner
image: ruby:2.3-alpine
services: []
before_script: []
cache: {}
variables:
SETUP_DB: "false"
USE_BUNDLE_INSTALL: "false"
NEW_FLAKY_SPECS_REPORT: rspec_flaky/${CI_PROJECT_NAME}/new_rspec_flaky_examples.json
NEW_FLAKY_SPECS_REPORT: rspec_flaky/report-new.json
stage: post-test
allow_failure: yes
only:
......@@ -281,7 +281,7 @@ flaky-examples-check:
- rspec_flaky/
script:
- '[[ -f $NEW_FLAKY_SPECS_REPORT ]] || echo "{}" > ${NEW_FLAKY_SPECS_REPORT}'
- scripts/merge-reports $NEW_FLAKY_SPECS_REPORT rspec_flaky/${CI_PROJECT_NAME}/new_node_*.json
- scripts/merge-reports ${NEW_FLAKY_SPECS_REPORT} rspec_flaky/new_*_*.json
- scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT
setup-test-env:
......
......@@ -23,7 +23,7 @@ gem 'faraday', '~> 0.12'
# Authentication libraries
gem 'devise', '~> 4.2'
gem 'doorkeeper', '~> 4.2.0'
gem 'doorkeeper-openid_connect', '~> 1.1.0'
gem 'doorkeeper-openid_connect', '~> 1.2.0'
gem 'omniauth', '~> 1.4.2'
gem 'omniauth-auth0', '~> 1.4.1'
gem 'omniauth-azure-oauth2', '~> 0.0.9'
......
......@@ -80,7 +80,7 @@ GEM
coderay (>= 1.0.0)
erubis (>= 2.6.6)
rack (>= 0.9.0)
bindata (2.3.5)
bindata (2.4.1)
binding_of_caller (0.7.2)
debug_inspector (>= 0.0.1)
bootstrap-sass (3.3.6)
......@@ -166,9 +166,9 @@ GEM
docile (1.1.5)
domain_name (0.5.20161021)
unf (>= 0.0.5, < 1.0.0)
doorkeeper (4.2.0)
doorkeeper (4.2.6)
railties (>= 4.2)
doorkeeper-openid_connect (1.1.2)
doorkeeper-openid_connect (1.2.0)
doorkeeper (~> 4.0)
json-jwt (~> 1.6)
dropzonejs-rails (0.7.2)
......@@ -410,7 +410,7 @@ GEM
railties (>= 4.2.0)
thor (>= 0.14, < 2.0)
json (1.8.6)
json-jwt (1.7.1)
json-jwt (1.7.2)
activesupport
bindata
multi_json (>= 1.3)
......@@ -681,7 +681,7 @@ GEM
rainbow (2.2.2)
rake
raindrops (0.18.0)
rake (12.0.0)
rake (12.1.0)
rblineprof (0.3.6)
debugger-ruby_core_source (~> 1.3)
rbnacl (4.0.2)
......@@ -1002,7 +1002,7 @@ DEPENDENCIES
devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0)
doorkeeper (~> 4.2.0)
doorkeeper-openid_connect (~> 1.1.0)
doorkeeper-openid_connect (~> 1.2.0)
dropzonejs-rails (~> 0.7.1)
email_reply_trimmer (~> 0.1)
email_spec (~> 1.6.0)
......
......@@ -20,7 +20,7 @@
<template v-if="time.days">{{ time.days }} <span>{{ n__('day', 'days', time.days) }}</span></template>
<template v-if="time.hours">{{ time.hours }} <span>{{ n__('Time|hr', 'Time|hrs', time.hours) }}</span></template>
<template v-if="time.mins && !time.days">{{ time.mins }} <span>{{ n__('Time|min', 'Time|mins', time.mins) }}</span></template>
<template v-if="time.seconds && hasDa === 1 || time.seconds === 0">{{ time.seconds }} <span>{{ s__('Time|s') }}</span></template>
<template v-if="time.seconds && hasData === 1 || time.seconds === 0">{{ time.seconds }} <span>{{ s__('Time|s') }}</span></template>
</template>
<template v-else>
--
......
......@@ -4,6 +4,7 @@ import sprintf from './sprintf';
const langAttribute = document.querySelector('html').getAttribute('lang');
const lang = (langAttribute || 'en').replace(/-/g, '_');
const locale = new Jed(window.translations || {});
delete window.translations;
/**
Translates `text`
......
......@@ -3,7 +3,7 @@
import GraphLegend from './graph/legend.vue';
import GraphFlag from './graph/flag.vue';
import GraphDeployment from './graph/deployment.vue';
import GraphPath from './graph_path.vue';
import GraphPath from './graph/path.vue';
import MonitoringMixin from '../mixins/monitoring_mixins';
import eventHub from '../event_hub';
import measurements from '../utils/measurements';
......@@ -69,7 +69,7 @@
},
computed: {
outterViewBox() {
outerViewBox() {
return `0 0 ${this.baseGraphWidth} ${this.baseGraphHeight}`;
},
......@@ -137,17 +137,19 @@
},
renderAxesPaths() {
this.timeSeries = createTimeSeries(this.graphData.queries[0],
this.graphWidth,
this.graphHeight,
this.graphHeightOffset);
this.timeSeries = createTimeSeries(
this.graphData.queries[0],
this.graphWidth,
this.graphHeight,
this.graphHeightOffset,
);
if (this.timeSeries.length > 3) {
this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20;
}
const axisXScale = d3.time.scale()
.range([0, this.graphWidth]);
.range([0, this.graphWidth - 70]);
const axisYScale = d3.scale.linear()
.range([this.graphHeight - this.graphHeightOffset, 0]);
......@@ -214,7 +216,7 @@
class="prometheus-svg-container"
:style="paddingBottomRootSvg">
<svg
:viewBox="outterViewBox"
:viewBox="outerViewBox"
ref="baseSvg">
<g
class="x-axis"
......
......@@ -72,6 +72,13 @@
:title="pipeline.yaml_errors">
yaml invalid
</span>
<span
v-if="pipeline.flags.failure_reason"
v-tooltip
class="js-pipeline-url-failure label label-danger"
:title="pipeline.failure_reason">
error
</span>
<a
v-if="pipeline.flags.auto_devops"
tabindex="0"
......
......@@ -291,8 +291,14 @@
fill: $black;
}
.tick > text {
font-size: 12px;
.tick {
> line {
stroke: $gray-darker;
}
> text {
font-size: 12px;
}
}
.text-metric-title {
......
......@@ -108,6 +108,15 @@
}
}
.subkeys-list {
@include basic-list;
li {
padding: 3px 0;
border: none;
}
}
.key-list-item {
.key-list-item-info {
@media (min-width: $screen-sm-min) {
......
......@@ -2,7 +2,7 @@ class Profiles::GpgKeysController < Profiles::ApplicationController
before_action :set_gpg_key, only: [:destroy, :revoke]
def index
@gpg_keys = current_user.gpg_keys
@gpg_keys = current_user.gpg_keys.with_subkeys
@gpg_key = GpgKey.new
end
......
......@@ -5,6 +5,7 @@ module Ci
include Importable
include AfterCommitQueue
include Presentable
include Gitlab::OptimisticLocking
belongs_to :project
belongs_to :user
......@@ -58,6 +59,11 @@ module Ci
auto_devops_source: 2
}
enum failure_reason: {
unknown_failure: 0,
config_error: 1
}
state_machine :status, initial: :created do
event :enqueue do
transition created: :pending
......@@ -109,6 +115,12 @@ module Ci
pipeline.auto_canceled_by = nil
end
before_transition any => :failed do |pipeline, transition|
transition.args.first.try do |reason|
pipeline.failure_reason = reason
end
end
after_transition [:created, :pending] => :running do |pipeline|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
end
......@@ -263,7 +275,7 @@ module Ci
end
def cancel_running
Gitlab::OptimisticLocking.retry_lock(cancelable_statuses) do |cancelable|
retry_optimistic_lock(cancelable_statuses) do |cancelable|
cancelable.find_each do |job|
yield(job) if block_given?
job.cancel
......@@ -312,6 +324,10 @@ module Ci
@stage_seeds ||= config_processor.stage_seeds(self)
end
def seeds_size
@seeds_size ||= stage_seeds.sum(&:size)
end
def has_kubernetes_active?
project.kubernetes_service&.active?
end
......@@ -403,7 +419,7 @@ module Ci
end
def update_status
Gitlab::OptimisticLocking.retry_lock(self) do
retry_optimistic_lock(self) do
case latest_builds_status
when 'pending' then enqueue
when 'running' then run
......
......@@ -81,6 +81,7 @@ module HasStatus
scope :canceled, -> { where(status: 'canceled') }
scope :skipped, -> { where(status: 'skipped') }
scope :manual, -> { where(status: 'manual') }
scope :alive, -> { where(status: [:created, :pending, :running]) }
scope :created_or_pending, -> { where(status: [:created, :pending]) }
scope :running_or_pending, -> { where(status: [:running, :pending]) }
scope :finished, -> { where(status: [:success, :failed, :canceled]) }
......
......@@ -9,6 +9,9 @@ class GpgKey < ActiveRecord::Base
belongs_to :user
has_many :gpg_signatures
has_many :subkeys, class_name: 'GpgKeySubkey'
scope :with_subkeys, -> { includes(:subkeys) }
validates :user, presence: true
......@@ -36,10 +39,12 @@ class GpgKey < ActiveRecord::Base
before_validation :extract_fingerprint, :extract_primary_keyid
after_commit :update_invalid_gpg_signatures, on: :create
after_create :generate_subkeys
def primary_keyid
super&.upcase
end
alias_method :keyid, :primary_keyid
def fingerprint
super&.upcase
......@@ -49,6 +54,10 @@ class GpgKey < ActiveRecord::Base
super(value&.strip)
end
def keyids
[keyid].concat(subkeys.map(&:keyid))
end
def user_infos
@user_infos ||= Gitlab::Gpg.user_infos_from_key(key)
end
......@@ -82,10 +91,11 @@ class GpgKey < ActiveRecord::Base
def revoke
GpgSignature
.where(gpg_key: self)
.with_key_and_subkeys(self)
.where.not(verification_status: GpgSignature.verification_statuses[:unknown_key])
.update_all(
gpg_key_id: nil,
gpg_key_subkey_id: nil,
verification_status: GpgSignature.verification_statuses[:unknown_key],
updated_at: Time.zone.now
)
......@@ -106,4 +116,12 @@ class GpgKey < ActiveRecord::Base
# only allows one key
self.primary_keyid = Gitlab::Gpg.primary_keyids_from_key(key).first
end
def generate_subkeys
gpg_subkeys = Gitlab::Gpg.subkeys_from_key(key)
gpg_subkeys[primary_keyid]&.each do |subkey_data|
subkeys.create!(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
end
end
end
class GpgKeySubkey < ActiveRecord::Base
include ShaAttribute
sha_attribute :keyid
sha_attribute :fingerprint
belongs_to :gpg_key
validates :gpg_key_id, presence: true
validates :fingerprint, :keyid, presence: true, uniqueness: true
delegate :key, :user, :user_infos, :verified?, :verified_user_infos,
:verified_and_belongs_to_email?, to: :gpg_key
def keyid
super&.upcase
end
def fingerprint
super&.upcase
end
end
......@@ -15,11 +15,42 @@ class GpgSignature < ActiveRecord::Base
belongs_to :project
belongs_to :gpg_key
belongs_to :gpg_key_subkey
validates :commit_sha, presence: true
validates :project_id, presence: true
validates :gpg_key_primary_keyid, presence: true
def self.with_key_and_subkeys(gpg_key)
subkey_ids = gpg_key.subkeys.pluck(:id)
where(
arel_table[:gpg_key_id].eq(gpg_key.id).or(
arel_table[:gpg_key_subkey_id].in(subkey_ids)
)
)
end
def gpg_key=(model)
case model
when GpgKey
super
when GpgKeySubkey
self.gpg_key_subkey = model
when NilClass
super
self.gpg_key_subkey = nil
end
end
def gpg_key
if gpg_key_id
super
elsif gpg_key_subkey_id
gpg_key_subkey
end
end
def gpg_key_primary_keyid
super&.upcase
end
......
module Ci
class PipelinePresenter < Gitlab::View::Presenter::Delegated
FAILURE_REASONS = {
config_error: 'CI/CD YAML configuration error!'
}.freeze
presents :pipeline
def failure_reason
return unless pipeline.failure_reason?
FAILURE_REASONS[pipeline.failure_reason.to_sym] ||
pipeline.failure_reason
end
def status_title
if auto_canceled?
"Pipeline is redundant and is auto-canceled by Pipeline ##{auto_canceled_by_id}"
......
......@@ -20,6 +20,7 @@ class PipelineEntity < Grape::Entity
expose :has_yaml_errors?, as: :yaml_errors
expose :can_retry?, as: :retryable
expose :can_cancel?, as: :cancelable
expose :failure_reason?, as: :failure_reason
end
expose :details do
......@@ -44,6 +45,11 @@ class PipelineEntity < Grape::Entity
end
expose :commit, using: CommitEntity
expose :yaml_errors, if: -> (pipeline, _) { pipeline.has_yaml_errors? }
expose :failure_reason, if: -> (pipeline, _) { pipeline.failure_reason? } do |pipeline|
pipeline.present.failure_reason
end
expose :retry_path, if: -> (*) { can_retry? } do |pipeline|
retry_project_pipeline_path(pipeline.project, pipeline)
......@@ -53,8 +59,6 @@ class PipelineEntity < Grape::Entity
cancel_project_pipeline_path(pipeline.project, pipeline)
end
expose :yaml_errors, if: -> (pipeline, _) { pipeline.has_yaml_errors? }
private
alias_method :pipeline, :object
......
......@@ -37,7 +37,7 @@
- if content_for?(:library_javascripts)
= yield :library_javascripts
= javascript_include_tag asset_path("locale/#{I18n.locale.to_s || I18n.default_locale.to_s}/app.js")
= javascript_include_tag asset_path("locale/#{I18n.locale.to_s || I18n.default_locale.to_s}/app.js") unless I18n.locale == :en
= webpack_bundle_tag "webpack_runtime"
= webpack_bundle_tag "common"
= webpack_bundle_tag "main"
......
......@@ -7,6 +7,13 @@
.description
%code= key.fingerprint
- if key.subkeys.present?
.subkeys
%span.bold Subkeys:
%ul.subkeys-list
- key.subkeys.each do |subkey|
%li
%code= subkey.fingerprint
.pull-right
%span.key-created-at
created #{time_ago_with_tooltip(key.created_at)}
......
- @content_class = "limit-container-width limited-inner-width-container" unless fluid_layout
- add_to_breadcrumbs "Snippets", dashboard_snippets_path
- add_to_breadcrumbs "Snippets", project_snippets_path(@project)
- breadcrumb_title @snippet.to_reference
- page_title "#{@snippet.title} (#{@snippet.to_reference})", "Snippets"
......
---
title: Add support for GPG subkeys in signature verification
merge_request: 14517
author:
type: added
---
title: Fix incorrect X-axis labels in Prometheus graphs
merge_request: 14258
author:
type: fixed
---
title: Fix project snippets breadcrumb link
merge_request:
author:
type: fixed
---
title: Upgrade doorkeeper-openid_connect
merge_request: 14372
author: Markus Koller
type: other
---
title: Fix typo in cycle analytics breaking time component
merge_request:
author:
type: fixed
---
title: Prevent branches or tags from starting with invalid characters (e.g. -, .)
merge_request:
author:
type: fixed
Doorkeeper::OpenidConnect.configure do
issuer Gitlab.config.gitlab.url
jws_private_key Rails.application.secrets.jws_private_key
signing_key Rails.application.secrets.openid_connect_signing_key
resource_owner_from_access_token do |access_token|
User.active.find_by(id: access_token.resource_owner_id)
......
......@@ -25,7 +25,7 @@ def create_tokens
secret_key_base: file_secret_key || generate_new_secure_token,
otp_key_base: env_secret_key || file_secret_key || generate_new_secure_token,
db_key_base: generate_new_secure_token,
jws_private_key: generate_new_rsa_private_key
openid_connect_signing_key: generate_new_rsa_private_key
}
missing_secrets = set_missing_keys(defaults)
......
......@@ -2,7 +2,7 @@
require 'gitlab/current_settings'
if Rails.env.production?
def configure_sentry
# allow it to fail: it may do so when create_from_defaults is executed before migrations are actually done
begin
sentry_enabled = Gitlab::CurrentSettings.current_application_settings.sentry_enabled
......@@ -23,3 +23,5 @@ if Rails.env.production?
end
end
end
configure_sentry if Rails.env.production?
class CreateGpgKeySubkeys < ActiveRecord::Migration
DOWNTIME = false
def up
create_table :gpg_key_subkeys do |t|
t.references :gpg_key, null: false, index: true, foreign_key: { on_delete: :cascade }
t.binary :keyid
t.binary :fingerprint
t.index :keyid, unique: true, length: Gitlab::Database.mysql? ? 20 : nil
t.index :fingerprint, unique: true, length: Gitlab::Database.mysql? ? 20 : nil
end
add_reference :gpg_signatures, :gpg_key_subkey, index: true, foreign_key: { on_delete: :nullify }
end
def down
remove_reference(:gpg_signatures, :gpg_key_subkey, index: true, foreign_key: true)
drop_table :gpg_key_subkeys
end
end
class AddFailureReasonToPipelines < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :ci_pipelines, :failure_reason, :integer
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration
disable_ddl_transaction!
DOWNTIME = false
MIGRATION = 'CreateGpgKeySubkeysFromGpgKeys'
class GpgKey < ActiveRecord::Base
self.table_name = 'gpg_keys'
include EachBatch
end
def up
GpgKey.select(:id).each_batch do |gpg_keys|
jobs = gpg_keys.pluck(:id).map do |id|
[MIGRATION, [id]]
end
BackgroundMigrationWorker.perform_bulk(jobs)
end
end
def down
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171004121444) do
ActiveRecord::Schema.define(version: 20171005130944) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -342,6 +342,7 @@ ActiveRecord::Schema.define(version: 20171004121444) do
t.integer "source"
t.integer "config_source"
t.boolean "protected"
t.integer "failure_reason"
end
add_index "ci_pipelines", ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree
......@@ -608,6 +609,16 @@ ActiveRecord::Schema.define(version: 20171004121444) do
add_index "gcp_clusters", ["project_id"], name: "index_gcp_clusters_on_project_id", unique: true, using: :btree
create_table "gpg_key_subkeys", force: :cascade do |t|
t.integer "gpg_key_id", null: false
t.binary "keyid"
t.binary "fingerprint"
end
add_index "gpg_key_subkeys", ["fingerprint"], name: "index_gpg_key_subkeys_on_fingerprint", unique: true, using: :btree
add_index "gpg_key_subkeys", ["gpg_key_id"], name: "index_gpg_key_subkeys_on_gpg_key_id", using: :btree
add_index "gpg_key_subkeys", ["keyid"], name: "index_gpg_key_subkeys_on_keyid", unique: true, using: :btree
create_table "gpg_keys", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
......@@ -631,11 +642,13 @@ ActiveRecord::Schema.define(version: 20171004121444) do
t.text "gpg_key_user_name"
t.text "gpg_key_user_email"
t.integer "verification_status", limit: 2, default: 0, null: false
t.integer "gpg_key_subkey_id"
end
add_index "gpg_signatures", ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree
add_index "gpg_signatures", ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree
add_index "gpg_signatures", ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree
add_index "gpg_signatures", ["gpg_key_subkey_id"], name: "index_gpg_signatures_on_gpg_key_subkey_id", using: :btree
add_index "gpg_signatures", ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree
create_table "identities", force: :cascade do |t|
......@@ -1758,7 +1771,9 @@ ActiveRecord::Schema.define(version: 20171004121444) do
add_foreign_key "gcp_clusters", "projects", on_delete: :cascade
add_foreign_key "gcp_clusters", "services", on_delete: :nullify
add_foreign_key "gcp_clusters", "users", on_delete: :nullify
add_foreign_key "gpg_key_subkeys", "gpg_keys", on_delete: :cascade
add_foreign_key "gpg_keys", "users", on_delete: :cascade
add_foreign_key "gpg_signatures", "gpg_key_subkeys", on_delete: :nullify
add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify
add_foreign_key "gpg_signatures", "projects", on_delete: :cascade
add_foreign_key "issue_assignees", "issues", name: "fk_b7d881734a", on_delete: :cascade
......
......@@ -133,9 +133,9 @@ Remove the old Ruby 1.8 if present:
Download Ruby and compile it:
mkdir /tmp/ruby && cd /tmp/ruby
curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.3.tar.gz
echo '1014ee699071aa2ddd501907d18cbe15399c997d ruby-2.3.3.tar.gz' | shasum -c - && tar xzf ruby-2.3.3.tar.gz
cd ruby-2.3.3
curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.5.tar.gz
echo '3247e217d6745c27ef23bdc77b6abdb4b57a118f ruby-2.3.5.tar.gz' | shasum -c - && tar xzf ruby-2.3.5.tar.gz
cd ruby-2.3.5
./configure --disable-install-rdoc
make
sudo make install
......
This diff is collapsed.
*** NOTE: These instructions should be considered deprecated. In GitLab 10.0 we will be releasing new migration instructions using [pgloader](http://pgloader.io/).
---
last_updated: 2017-10-05
---
# Migrating GitLab from MySQL to Postgres
*Make sure you view this [guide from the `master` branch](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/update/mysql_to_postgresql.md#migrating-gitlab-from-mysql-to-postgres) for the most up to date instructions.*
# Migrating from MySQL to PostgreSQL
If you are replacing MySQL with Postgres while keeping GitLab on the same server all you need to do is to export from MySQL, convert the resulting SQL file, and import it into Postgres. If you are also moving GitLab to another server, or if you are switching to omnibus-gitlab, you may want to use a GitLab backup file. The second part of this documents explains the procedure to do this.
> **Note:** This guide assumes you have a working Omnibus GitLab instance with
> MySQL and want to migrate to bundled PostgreSQL database.
## Export from MySQL and import into Postgres
## Prerequisites
Use this if you are keeping GitLab on the same server.
First, we'll need to enable the bundled PostgreSQL database with up-to-date
schema. Next, we'll use [pgloader](http://pgloader.io) to migrate the data
from the old MySQL database to the new PostgreSQL one.
```
sudo service gitlab stop
Here's what you'll need to have installed:
# Update /home/git/gitlab/config/database.yml
- pgloader 3.4.1+
- Omnibus GitLab
- MySQL
git clone https://github.com/gitlabhq/mysql-postgresql-converter.git -b gitlab
cd mysql-postgresql-converter
mysqldump --compatible=postgresql --default-character-set=utf8 -r gitlabhq_production.mysql -u root gitlabhq_production -p
python db_converter.py gitlabhq_production.mysql gitlabhq_production.psql
ed -s gitlabhq_production.psql < move_drop_indexes.ed
## Enable bundled PostgreSQL database
# Import the database dump as the application database user
sudo -u git psql -f gitlabhq_production.psql -d gitlabhq_production
1. Stop GitLab:
# Install gems for PostgreSQL (note: the line below states '--without ... mysql')
sudo -u git -H bundle install --without development test mysql --deployment
``` bash
sudo gitlab-ctl stop
```
sudo service gitlab start
```
1. Edit `/etc/gitlab/gitlab.rb` to enable bundled PostgreSQL:
## Converting a GitLab backup file from MySQL to Postgres
**Note:** Please make sure to have Python 2.7.x (or higher) installed.
```
postgresql['enable'] = true
```
GitLab backup files (`<timestamp>_gitlab_backup.tar`) contain a SQL dump. Using the lanyrd database converter we can replace a MySQL database dump inside the tar file with a Postgres database dump. This can be useful if you are moving to another server.
1. Edit `/etc/gitlab/gitlab.rb` to use the bundled PostgreSQL. Please check
all the settings beginning with `db_`, such as `gitlab_rails['db_adapter']`
and alike. You could just comment all of them out so that we'll just use
the defaults.
```
# Stop GitLab
sudo service gitlab stop
1. [Reconfigure GitLab] for the changes to take effect:
``` bash
sudo gitlab-ctl reconfigure
```
1. Start Unicorn and PostgreSQL so that we could prepare the schema:
``` bash
sudo gitlab-ctl start unicorn
sudo gitlab-ctl start posgresql
```
1. Run the following commands to prepare the schema:
``` bash
sudo gitlab-rake db:create db:migrate
```
1. Stop Unicorn in case it's interfering the next step:
# Create the backup
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:backup:create RAILS_ENV=production
``` bash
sudo gitlab-ctl stop unicorn
```
# Note the filename of the backup that was created. We will call it
# TIMESTAMP_gitlab_backup.tar below.
After these steps, you'll have a fresh PostgreSQL database with up-to-date schema.
# Move the backup file we will convert to its own directory
sudo -u git -H mkdir -p tmp/backups/postgresql
sudo -u git -H mv tmp/backups/TIMESTAMP_gitlab_backup.tar tmp/backups/postgresql/
## Migrate data from MySQL to PostgreSQL
# Create a separate database dump with PostgreSQL compatibility
cd tmp/backups/postgresql
sudo -u git -H mysqldump --compatible=postgresql --default-character-set=utf8 -r gitlabhq_production.mysql -u root gitlabhq_production -p
Now, you can use pgloader to migrate the data from MySQL to PostgreSQL:
# Clone the database converter
sudo -u git -H git clone https://github.com/gitlabhq/mysql-postgresql-converter.git -b gitlab
1. Save the following snippet in a `commands.load` file, and edit with your
database `username`, `password` and `host`:
# Convert gitlabhq_production.mysql
sudo -u git -H mkdir db
sudo -u git -H python mysql-postgresql-converter/db_converter.py gitlabhq_production.mysql db/database.sql
sudo -u git -H ed -s db/database.sql < mysql-postgresql-converter/move_drop_indexes.ed
```
LOAD DATABASE
FROM mysql://username:password@host/gitlabhq_production
INTO postgresql://gitlab-psql@unix://var/opt/gitlab/postgresql:/gitlabhq_production
# Compress database backup
# Warning: If you have Gitlab 7.12.0 or older skip this step and import the database.sql directly into the backup with:
# sudo -u git -H tar rf TIMESTAMP_gitlab_backup.tar db/database.sql
# The compressed databasedump is not supported at 7.12.0 and older.
sudo -u git -H gzip db/database.sql
WITH include no drop, truncate, disable triggers, create no tables,
create no indexes, preserve index names, no foreign keys,
data only
# Replace the MySQL dump in TIMESTAMP_gitlab_backup.tar.
ALTER SCHEMA 'gitlabhq_production' RENAME TO 'public'
# Warning: if you forget to replace TIMESTAMP below, tar will create a new file
# 'TIMESTAMP_gitlab_backup.tar' without giving an error.
;
```
sudo -u git -H tar rf TIMESTAMP_gitlab_backup.tar db/database.sql.gz
1. Start the migration:
# Done! TIMESTAMP_gitlab_backup.tar can now be restored into a Postgres GitLab
# installation.
# See https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/raketasks/backup_restore.md for more information about backups.
``` bash
sudo -u gitlab-psql pgloader commands.load
```
1. Once the migration finishes, start GitLab:
``` bash
sudo gitlab-ctl start
```
Now, you can verify that everything worked by visiting GitLab.
## Troubleshooting
### Experiencing 500 errors after the migration
If you experience 500 errors after the migration, try to clear the cache:
``` bash
sudo gitlab-rake cache:clear
```
[reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
......@@ -287,7 +287,7 @@ module API
if sentry_enabled? && report_exception?(exception)
define_params_for_grape_middleware
sentry_context
Raven.capture_exception(exception)
Raven.capture_exception(exception, extra: params)
end
# lifted from https://github.com/rails/rails/blob/master/actionpack/lib/action_dispatch/middleware/debug_exceptions.rb#L60
......
class Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys
class GpgKey < ActiveRecord::Base
self.table_name = 'gpg_keys'
include EachBatch
include ShaAttribute
sha_attribute :primary_keyid
sha_attribute :fingerprint
has_many :subkeys, class_name: 'GpgKeySubkey'
end
class GpgKeySubkey < ActiveRecord::Base
self.table_name = 'gpg_key_subkeys'
include ShaAttribute
sha_attribute :keyid
sha_attribute :fingerprint
end
def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id)
return if gpg_key.nil?
return if gpg_key.subkeys.any?
create_subkeys(gpg_key)
update_signatures(gpg_key)
end
private
def create_subkeys(gpg_key)
gpg_subkeys = Gitlab::Gpg.subkeys_from_key(gpg_key.key)
gpg_subkeys[gpg_key.primary_keyid.upcase]&.each do |subkey_data|
gpg_key.subkeys.build(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
end
# Improve latency by doing all INSERTs in a single call
GpgKey.transaction do
gpg_key.save!
end
end
def update_signatures(gpg_key)
return unless gpg_key.subkeys.exists?
InvalidGpgSignatureUpdateWorker.perform_async(gpg_key.id)
end
end
......@@ -13,7 +13,7 @@ module Gitlab
end
if @command.save_incompleted && @pipeline.has_yaml_errors?
@pipeline.drop
@pipeline.drop!(:config_error)
end
return error(@pipeline.yaml_errors)
......
......@@ -3,7 +3,9 @@ module Gitlab
module Stage
class Seed
attr_reader :pipeline
delegate :project, to: :pipeline
delegate :size, to: :@jobs
def initialize(pipeline, stage, jobs)
@pipeline = pipeline
......
......@@ -2,7 +2,7 @@
module Gitlab
# Checks if a set of migrations requires downtime or not.
class EeCompatCheck
CE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ce.git'.freeze
DEFAULT_CE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ce.git'.freeze
EE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check')
IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze
......@@ -20,7 +20,7 @@ module Gitlab
attr_reader :ee_repo_dir, :patches_dir, :ce_repo, :ce_branch, :ee_branch_found
attr_reader :failed_files
def initialize(branch:, ce_repo: CE_REPO)
def initialize(branch:, ce_repo: DEFAULT_CE_REPO)
@ee_repo_dir = CHECK_DIR.join('ee-repo')
@patches_dir = CHECK_DIR.join('patches')
@ce_branch = branch
......@@ -132,7 +132,7 @@ module Gitlab
def check_patch(patch_path)
step("Checking out master", %w[git checkout master])
step("Resetting to latest master", %w[git reset --hard origin/master])
step("Fetching CE/#{ce_branch}", %W[git fetch #{CE_REPO} #{ce_branch}])
step("Fetching CE/#{ce_branch}", %W[git fetch #{ce_repo} #{ce_branch}])
step(
"Checking if #{patch_path} applies cleanly to EE/master",
# Don't use --check here because it can result in a 0-exit status even
......
......@@ -11,7 +11,7 @@ module Gitlab
return false if ref_name.start_with?('refs/remotes/')
Gitlab::Utils.system_silent(
%W(#{Gitlab.config.git.bin_path} check-ref-format refs/#{ref_name}))
%W(#{Gitlab.config.git.bin_path} check-ref-format --branch #{ref_name}))
end
end
end
......@@ -34,6 +34,21 @@ module Gitlab
end
end
def subkeys_from_key(key)
using_tmp_keychain do
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
raw_keys = GPGME::Key.find(:public, fingerprints)
raw_keys.each_with_object({}) do |raw_key, grouped_subkeys|
primary_subkey_id = raw_key.primary_subkey.keyid
grouped_subkeys[primary_subkey_id] = raw_key.subkeys[1..-1].map do |s|
{ keyid: s.keyid, fingerprint: s.fingerprint }
end
end
end
end
def user_infos_from_key(key)
using_tmp_keychain do
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
......
......@@ -43,7 +43,9 @@ module Gitlab
# key belonging to the keyid.
# This way we can add the key to the temporary keychain and extract
# the proper signature.
gpg_key = GpgKey.find_by(primary_keyid: verified_signature.fingerprint)
# NOTE: the invoked method is #fingerprint but it's only returning
# 16 characters (the format used by keyid) instead of 40.
gpg_key = find_gpg_key(verified_signature.fingerprint)
if gpg_key
Gitlab::Gpg::CurrentKeyChain.add(gpg_key.key)
......@@ -74,7 +76,7 @@ module Gitlab
commit_sha: @commit.sha,
project: @commit.project,
gpg_key: gpg_key,
gpg_key_primary_keyid: gpg_key&.primary_keyid || verified_signature.fingerprint,
gpg_key_primary_keyid: gpg_key&.keyid || verified_signature.fingerprint,
gpg_key_user_name: user_infos[:name],
gpg_key_user_email: user_infos[:email],
verification_status: verification_status
......@@ -98,6 +100,10 @@ module Gitlab
def user_infos(gpg_key)
gpg_key&.verified_user_infos&.first || gpg_key&.user_infos&.first || {}
end
def find_gpg_key(keyid)
GpgKey.find_by(primary_keyid: keyid) || GpgKeySubkey.find_by(keyid: keyid)
end
end
end
end
......@@ -9,7 +9,7 @@ module Gitlab
GpgSignature
.select(:id, :commit_sha, :project_id)
.where('gpg_key_id IS NULL OR verification_status <> ?', GpgSignature.verification_statuses[:verified])
.where(gpg_key_primary_keyid: @gpg_key.primary_keyid)
.where(gpg_key_primary_keyid: @gpg_key.keyids)
.find_each { |sig| sig.gpg_commit.update_signature!(sig) }
end
end
......
require 'json'
module RspecFlaky
class Config
def self.generate_report?
ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
end
def self.suite_flaky_examples_report_path
ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/suite-report.json")
end
def self.flaky_examples_report_path
ENV['FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/report.json")
end
def self.new_flaky_examples_report_path
ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/new-report.json")
end
end
end
......@@ -9,24 +9,21 @@ module RspecFlaky
line: example.line,
description: example.description,
last_attempts_count: example.attempts,
flaky_reports: 1)
flaky_reports: 0)
else
super
end
end
def first_flaky_at
self[:first_flaky_at] || Time.now
end
def last_flaky_at
Time.now
end
def update_flakiness!(last_attempts_count: nil)
self.first_flaky_at ||= Time.now
self.last_flaky_at = Time.now
self.flaky_reports += 1
self.last_attempts_count = last_attempts_count if last_attempts_count
def last_flaky_job
return unless ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
"#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
if ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
self.last_flaky_job = "#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
end
end
def to_h
......
require 'json'
module RspecFlaky
class FlakyExamplesCollection < SimpleDelegator
def self.from_json(json)
new(JSON.parse(json))
end
def initialize(collection = {})
unless collection.is_a?(Hash)
raise ArgumentError, "`collection` must be a Hash, #{collection.class} given!"
end
collection_of_flaky_examples =
collection.map do |uid, example|
[
uid,
example.is_a?(RspecFlaky::FlakyExample) ? example : RspecFlaky::FlakyExample.new(example)
]
end
super(Hash[collection_of_flaky_examples])
end
def to_report
Hash[map { |uid, example| [uid, example.to_h] }].deep_symbolize_keys
end
def -(other)
unless other.respond_to?(:key)
raise ArgumentError, "`other` must respond to `#key?`, #{other.class} does not!"
end
self.class.new(reject { |uid, _| other.key?(uid) })
end
end
end
......@@ -2,11 +2,15 @@ require 'json'
module RspecFlaky
class Listener
attr_reader :all_flaky_examples, :new_flaky_examples
def initialize
@new_flaky_examples = {}
@all_flaky_examples = init_all_flaky_examples
# - suite_flaky_examples: contains all the currently tracked flacky example
# for the whole RSpec suite
# - flaky_examples: contains the examples detected as flaky during the
# current RSpec run
attr_reader :suite_flaky_examples, :flaky_examples
def initialize(suite_flaky_examples_json = nil)
@flaky_examples = FlakyExamplesCollection.new
@suite_flaky_examples = init_suite_flaky_examples(suite_flaky_examples_json)
end
def example_passed(notification)
......@@ -14,29 +18,21 @@ module RspecFlaky
return unless current_example.attempts > 1
flaky_example_hash = all_flaky_examples[current_example.uid]
all_flaky_examples[current_example.uid] =
if flaky_example_hash
FlakyExample.new(flaky_example_hash).tap do |ex|
ex.last_attempts_count = current_example.attempts
ex.flaky_reports += 1
end
else
FlakyExample.new(current_example).tap do |ex|
new_flaky_examples[current_example.uid] = ex
end
end
flaky_example = suite_flaky_examples.fetch(current_example.uid) { FlakyExample.new(current_example) }
flaky_example.update_flakiness!(last_attempts_count: current_example.attempts)
flaky_examples[current_example.uid] = flaky_example
end
def dump_summary(_)
write_report_file(all_flaky_examples, all_flaky_examples_report_path)
write_report_file(flaky_examples, RspecFlaky::Config.flaky_examples_report_path)
new_flaky_examples = flaky_examples - suite_flaky_examples
if new_flaky_examples.any?
Rails.logger.warn "\nNew flaky examples detected:\n"
Rails.logger.warn JSON.pretty_generate(to_report(new_flaky_examples))
Rails.logger.warn JSON.pretty_generate(new_flaky_examples.to_report)
write_report_file(new_flaky_examples, new_flaky_examples_report_path)
write_report_file(new_flaky_examples, RspecFlaky::Config.new_flaky_examples_report_path)
end
end
......@@ -46,30 +42,23 @@ module RspecFlaky
private
def init_all_flaky_examples
return {} unless File.exist?(all_flaky_examples_report_path)
def init_suite_flaky_examples(suite_flaky_examples_json = nil)
unless suite_flaky_examples_json
return {} unless File.exist?(RspecFlaky::Config.suite_flaky_examples_report_path)
all_flaky_examples = JSON.parse(File.read(all_flaky_examples_report_path))
suite_flaky_examples_json = File.read(RspecFlaky::Config.suite_flaky_examples_report_path)
end
Hash[(all_flaky_examples || {}).map { |k, ex| [k, FlakyExample.new(ex)] }]
FlakyExamplesCollection.from_json(suite_flaky_examples_json)
end
def write_report_file(examples, file_path)
return unless ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
def write_report_file(examples_collection, file_path)
return unless RspecFlaky::Config.generate_report?
report_path_dir = File.dirname(file_path)
FileUtils.mkdir_p(report_path_dir) unless Dir.exist?(report_path_dir)
File.write(file_path, JSON.pretty_generate(to_report(examples)))
end
def all_flaky_examples_report_path
@all_flaky_examples_report_path ||= ENV['ALL_FLAKY_RSPEC_REPORT_PATH'] ||
Rails.root.join("rspec_flaky/all-report.json")
end
def new_flaky_examples_report_path
@new_flaky_examples_report_path ||= ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] ||
Rails.root.join("rspec_flaky/new-report.json")
File.write(file_path, JSON.pretty_generate(examples_collection.to_report))
end
end
end
......@@ -4,7 +4,10 @@ namespace :gitlab do
task :ee_compat_check, [:branch] => :environment do |_, args|
opts =
if ENV['CI']
{ branch: ENV['CI_COMMIT_REF_NAME'] }
{
ce_repo: ENV['CI_REPOSITORY_URL'],
branch: ENV['CI_COMMIT_REF_NAME']
}
else
unless args[:branch]
puts "Must specify a branch as an argument".color(:red)
......
......@@ -47,6 +47,7 @@ FactoryGirl.define do
trait :invalid do
config(rspec: nil)
failure_reason :config_error
end
trait :blocked do
......
require_relative '../support/gpg_helpers'
FactoryGirl.define do
factory :gpg_key_subkey do
gpg_key
sequence(:keyid) { |n| "keyid-#{n}" }
sequence(:fingerprint) { |n| "fingerprint-#{n}" }
end
end
......@@ -4,5 +4,9 @@ FactoryGirl.define do
factory :gpg_key do
key GpgHelpers::User1.public_key
user
factory :gpg_key_with_subkeys do
key GpgHelpers::User1.public_key_with_extra_signing_key
end
end
end
......@@ -5,7 +5,7 @@ FactoryGirl.define do
commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
project
gpg_key
gpg_key_primary_keyid { gpg_key.primary_keyid }
gpg_key_primary_keyid { gpg_key.keyid }
verification_status :verified
end
end
......@@ -20,6 +20,18 @@ feature 'Profile > GPG Keys' do
expect(page).to have_content('bette.cartwright@example.net Unverified')
expect(page).to have_content(GpgHelpers::User2.fingerprint)
end
scenario 'with multiple subkeys' do
fill_in('Key', with: GpgHelpers::User3.public_key)
click_button('Add key')
expect(page).to have_content('john.doe@example.com Unverified')
expect(page).to have_content(GpgHelpers::User3.fingerprint)
GpgHelpers::User3.subkey_fingerprints.each do |fingerprint|
expect(page).to have_content(fingerprint)
end
end
end
scenario 'User sees their key' do
......
......@@ -162,6 +162,16 @@ describe 'Pipelines', :js do
expect(page).to have_selector(
%Q{span[data-original-title="#{pipeline.yaml_errors}"]})
end
it 'contains badge that indicates failure reason' do
expect(page).to have_content 'error'
end
it 'contains badge with tooltip which contains failure reason' do
expect(pipeline.failure_reason?).to eq true
expect(page).to have_selector(
%Q{span[data-original-title="#{pipeline.present.failure_reason}"]})
end
end
context 'with manual actions' do
......
......@@ -36,10 +36,10 @@ describe 'create_tokens' do
expect(keys).to all(match(HEX_KEY))
end
it 'generates an RSA key for jws_private_key' do
it 'generates an RSA key for openid_connect_signing_key' do
create_tokens
keys = secrets.values_at(:jws_private_key)
keys = secrets.values_at(:openid_connect_signing_key)
expect(keys.uniq).to eq(keys)
expect(keys).to all(match(RSA_KEY))
......@@ -49,7 +49,7 @@ describe 'create_tokens' do
expect(self).to receive(:warn_missing_secret).with('secret_key_base')
expect(self).to receive(:warn_missing_secret).with('otp_key_base')
expect(self).to receive(:warn_missing_secret).with('db_key_base')
expect(self).to receive(:warn_missing_secret).with('jws_private_key')
expect(self).to receive(:warn_missing_secret).with('openid_connect_signing_key')
create_tokens
end
......@@ -61,7 +61,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base)
expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base)
expect(new_secrets['db_key_base']).to eq(secrets.db_key_base)
expect(new_secrets['jws_private_key']).to eq(secrets.jws_private_key)
expect(new_secrets['openid_connect_signing_key']).to eq(secrets.openid_connect_signing_key)
end
create_tokens
......@@ -77,7 +77,7 @@ describe 'create_tokens' do
context 'when the other secrets all exist' do
before do
secrets.db_key_base = 'db_key_base'
secrets.jws_private_key = 'jws_private_key'
secrets.openid_connect_signing_key = 'openid_connect_signing_key'
allow(File).to receive(:exist?).with('.secret').and_return(true)
allow(File).to receive(:read).with('.secret').and_return('file_key')
......@@ -88,7 +88,7 @@ describe 'create_tokens' do
stub_env('SECRET_KEY_BASE', 'env_key')
secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base'
secrets.jws_private_key = 'jws_private_key'
secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end
it 'does not issue a warning' do
......@@ -114,7 +114,7 @@ describe 'create_tokens' do
before do
secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base'
secrets.jws_private_key = 'jws_private_key'
secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end
it 'does not write any files' do
......@@ -129,7 +129,7 @@ describe 'create_tokens' do
expect(secrets.secret_key_base).to eq('secret_key_base')
expect(secrets.otp_key_base).to eq('otp_key_base')
expect(secrets.db_key_base).to eq('db_key_base')
expect(secrets.jws_private_key).to eq('jws_private_key')
expect(secrets.openid_connect_signing_key).to eq('openid_connect_signing_key')
end
it 'deletes the .secret file' do
......@@ -153,7 +153,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq('file_key')
expect(new_secrets['otp_key_base']).to eq('file_key')
expect(new_secrets['db_key_base']).to eq('db_key_base')
expect(new_secrets['jws_private_key']).to eq('jws_private_key')
expect(new_secrets['openid_connect_signing_key']).to eq('openid_connect_signing_key')
end
create_tokens
......
import Vue from 'vue';
import component from '~/cycle_analytics/components/total_time_component.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Total time component', () => {
let vm;
let Component;
beforeEach(() => {
Component = Vue.extend(component);
});
afterEach(() => {
vm.$destroy();
});
describe('With data', () => {
it('should render information for days and hours', () => {
vm = mountComponent(Component, {
time: {
days: 3,
hours: 4,
},
});
expect(vm.$el.textContent.trim()).toEqual('3 days 4 hrs');
});
it('should render information for hours and minutes', () => {
vm = mountComponent(Component, {
time: {
hours: 4,
mins: 35,
},
});
expect(vm.$el.textContent.trim()).toEqual('4 hrs 35 mins');
});
it('should render information for seconds', () => {
vm = mountComponent(Component, {
time: {
seconds: 45,
},
});
expect(vm.$el.textContent.trim()).toEqual('45 s');
});
});
describe('Without data', () => {
it('should render no information', () => {
vm = mountComponent(Component);
expect(vm.$el.textContent.trim()).toEqual('--');
});
});
});
import Vue from 'vue';
import GraphPath from '~/monitoring/components/graph_path.vue';
import GraphPath from '~/monitoring/components/graph/path.vue';
import createTimeSeries from '~/monitoring/utils/multiple_time_series';
import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from './mock_data';
......
......@@ -44,7 +44,7 @@ describe('Graph', () => {
.not.toEqual(-1);
});
it('outterViewBox gets a width and height property based on the DOM size of the element', () => {
it('outerViewBox gets a width and height property based on the DOM size of the element', () => {
const component = createComponent({
graphData: convertedMetrics[1],
classType: 'col-md-6',
......@@ -52,8 +52,8 @@ describe('Graph', () => {
deploymentData,
});
const viewBoxArray = component.outterViewBox.split(' ');
expect(typeof component.outterViewBox).toEqual('string');
const viewBoxArray = component.outerViewBox.split(' ');
expect(typeof component.outerViewBox).toEqual('string');
expect(viewBoxArray[2]).toEqual(component.graphWidth.toString());
expect(viewBoxArray[3]).toEqual(component.graphHeight.toString());
});
......
......@@ -125,4 +125,23 @@ describe('Pipeline Url Component', () => {
component.$el.querySelector('.js-pipeline-url-autodevops').textContent.trim(),
).toEqual('Auto DevOps');
});
it('should render error badge when pipeline has a failure reason set', () => {
const component = new PipelineUrlComponent({
propsData: {
pipeline: {
id: 1,
path: 'foo',
flags: {
failure_reason: true,
},
failure_reason: 'some reason',
},
autoDevopsHelpPath: 'foo',
},
}).$mount();
expect(component.$el.querySelector('.js-pipeline-url-failure').textContent).toContain('error');
expect(component.$el.querySelector('.js-pipeline-url-failure').getAttribute('data-original-title')).toContain('some reason');
});
});
require 'spec_helper'
describe Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys, :migration, schema: 20171005130944 do
context 'when GpgKey exists' do
let!(:gpg_key) { create(:gpg_key, key: GpgHelpers::User3.public_key) }
before do
GpgKeySubkey.destroy_all
end
it 'generate the subkeys' do
expect do
described_class.new.perform(gpg_key.id)
end.to change { gpg_key.subkeys.count }.from(0).to(2)
end
it 'schedules the signature update worker' do
expect(InvalidGpgSignatureUpdateWorker).to receive(:perform_async).with(gpg_key.id)
described_class.new.perform(gpg_key.id)
end
end
context 'when GpgKey does not exist' do
it 'does not do anything' do
expect(Gitlab::Gpg).not_to receive(:subkeys_from_key)
expect(InvalidGpgSignatureUpdateWorker).not_to receive(:perform_async)
described_class.new.perform(123)
end
end
end
......@@ -55,6 +55,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
it 'fails the pipeline' do
expect(pipeline.reload).to be_failed
end
it 'sets a config error failure reason' do
expect(pipeline.reload.config_error?).to eq true
end
end
context 'when saving incomplete pipeline is not allowed' do
......
......@@ -11,6 +11,12 @@ describe Gitlab::Ci::Stage::Seed do
described_class.new(pipeline, 'test', builds)
end
describe '#size' do
it 'returns a number of jobs in the stage' do
expect(subject.size).to eq 2
end
end
describe '#stage' do
it 'returns hash attributes of a stage' do
expect(subject.stage).to be_a Hash
......
......@@ -4,6 +4,7 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('feature/new')).to be_truthy }
it { expect(described_class.validate('implement_@all')).to be_truthy }
it { expect(described_class.validate('my_new_feature')).to be_truthy }
it { expect(described_class.validate('my-branch')).to be_truthy }
it { expect(described_class.validate('#1')).to be_truthy }
it { expect(described_class.validate('feature/refs/heads/foo')).to be_truthy }
it { expect(described_class.validate('feature/~new/')).to be_falsey }
......@@ -22,4 +23,8 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('refs/remotes/')).to be_falsey }
it { expect(described_class.validate('refs/heads/feature')).to be_falsey }
it { expect(described_class.validate('refs/remotes/origin')).to be_falsey }
it { expect(described_class.validate('-')).to be_falsey }
it { expect(described_class.validate('-branch')).to be_falsey }
it { expect(described_class.validate('.tag')).to be_falsey }
it { expect(described_class.validate('my branch')).to be_falsey }
end
......@@ -63,6 +63,45 @@ describe Gitlab::Gpg::Commit do
it_behaves_like 'returns the cached signature on second call'
end
context 'commit signed with a subkey' do
let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User3.emails.first }
let!(:user) { create(:user, email: GpgHelpers::User3.emails.first) }
let!(:gpg_key) do
create :gpg_key, key: GpgHelpers::User3.public_key, user: user
end
let(:gpg_key_subkey) do
gpg_key.subkeys.find_by(fingerprint: '0522DD29B98F167CD8421752E38FFCAF75ABD92A')
end
before do
allow(Rugged::Commit).to receive(:extract_signature)
.with(Rugged::Repository, commit_sha)
.and_return(
[
GpgHelpers::User3.signed_commit_signature,
GpgHelpers::User3.signed_commit_base_data
]
)
end
it 'returns a valid signature' do
expect(described_class.new(commit).signature).to have_attributes(
commit_sha: commit_sha,
project: project,
gpg_key: gpg_key_subkey,
gpg_key_primary_keyid: gpg_key_subkey.keyid,
gpg_key_user_name: GpgHelpers::User3.names.first,
gpg_key_user_email: GpgHelpers::User3.emails.first,
verification_status: 'verified'
)
end
it_behaves_like 'returns the cached signature on second call'
end
context 'user email does not match the committer email, but is the same user' do
let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User2.emails.first }
......
......@@ -2,17 +2,16 @@ require 'rails_helper'
RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
describe '#run' do
let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create :project, :repository, path: 'sample-project' }
let(:signature) { [GpgHelpers::User1.signed_commit_signature, GpgHelpers::User1.signed_commit_base_data] }
let(:committer_email) { GpgHelpers::User1.emails.first }
let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create :project, :repository, path: 'sample-project' }
let!(:raw_commit) do
raw_commit = double(
:raw_commit,
signature: [
GpgHelpers::User1.signed_commit_signature,
GpgHelpers::User1.signed_commit_base_data
],
signature: signature,
sha: commit_sha,
committer_email: GpgHelpers::User1.emails.first
committer_email: committer_email
)
allow(raw_commit).to receive :save!
......@@ -29,12 +28,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
allow(Rugged::Commit).to receive(:extract_signature)
.with(Rugged::Repository, commit_sha)
.and_return(
[
GpgHelpers::User1.signed_commit_signature,
GpgHelpers::User1.signed_commit_base_data
]
)
.and_return(signature)
end
context 'gpg signature did have an associated gpg key which was removed later' do
......@@ -183,5 +177,34 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
)
end
end
context 'gpg signature did not have an associated gpg subkey' do
let(:signature) { [GpgHelpers::User3.signed_commit_signature, GpgHelpers::User3.signed_commit_base_data] }
let(:committer_email) { GpgHelpers::User3.emails.first }
let!(:user) { create :user, email: GpgHelpers::User3.emails.first }
let!(:invalid_gpg_signature) do
create :gpg_signature,
project: project,
commit_sha: commit_sha,
gpg_key: nil,
gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..-1],
verification_status: 'unknown_key'
end
it 'updates the signature to being valid when the missing gpg key is added' do
# InvalidGpgSignatureUpdater is called by the after_create hook
gpg_key = create(:gpg_key, key: GpgHelpers::User3.public_key, user: user)
subkey = gpg_key.subkeys.last
expect(invalid_gpg_signature.reload).to have_attributes(
project: project,
commit_sha: commit_sha,
gpg_key_subkey_id: subkey.id,
gpg_key_primary_keyid: subkey.keyid,
verification_status: 'verified'
)
end
end
end
end
......@@ -28,6 +28,23 @@ describe Gitlab::Gpg do
end
end
describe '.subkeys_from_key' do
it 'returns the subkeys by primary key' do
all_subkeys = described_class.subkeys_from_key(GpgHelpers::User1.public_key)
subkeys = all_subkeys[GpgHelpers::User1.primary_keyid]
expect(subkeys).to be_present
expect(subkeys.first[:keyid]).to be_present
expect(subkeys.first[:fingerprint]).to be_present
end
it 'returns an empty array when there are not subkeys' do
all_subkeys = described_class.subkeys_from_key(GpgHelpers::User4.public_key)
expect(all_subkeys[GpgHelpers::User4.primary_keyid]).to be_empty
end
end
describe '.user_infos_from_key' do
it 'returns the names and emails' do
user_infos = described_class.user_infos_from_key(GpgHelpers::User1.public_key)
......
......@@ -224,6 +224,7 @@ Ci::Pipeline:
- auto_canceled_by_id
- pipeline_schedule_id
- config_source
- failure_reason
- protected
Ci::Stage:
- id
......
require 'spec_helper'
describe RspecFlaky::Config, :aggregate_failures do
before do
# Stub these env variables otherwise specs don't behave the same on the CI
stub_env('FLAKY_RSPEC_GENERATE_REPORT', nil)
stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
stub_env('FLAKY_RSPEC_REPORT_PATH', nil)
stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', nil)
end
describe '.generate_report?' do
context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is not set" do
it 'returns false' do
expect(described_class).not_to be_generate_report
end
end
context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'false'" do
before do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
end
it 'returns false' do
expect(described_class).not_to be_generate_report
end
end
context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'true'" do
before do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
end
it 'returns true' do
expect(described_class).to be_generate_report
end
end
end
describe '.suite_flaky_examples_report_path' do
context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is not set" do
it 'returns the default path' do
expect(Rails.root).to receive(:join).with('rspec_flaky/suite-report.json')
.and_return('root/rspec_flaky/suite-report.json')
expect(described_class.suite_flaky_examples_report_path).to eq('root/rspec_flaky/suite-report.json')
end
end
context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is set" do
before do
stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', 'foo/suite-report.json')
end
it 'returns the value of the env variable' do
expect(described_class.suite_flaky_examples_report_path).to eq('foo/suite-report.json')
end
end
end
describe '.flaky_examples_report_path' do
context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is not set" do
it 'returns the default path' do
expect(Rails.root).to receive(:join).with('rspec_flaky/report.json')
.and_return('root/rspec_flaky/report.json')
expect(described_class.flaky_examples_report_path).to eq('root/rspec_flaky/report.json')
end
end
context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is set" do
before do
stub_env('FLAKY_RSPEC_REPORT_PATH', 'foo/report.json')
end
it 'returns the value of the env variable' do
expect(described_class.flaky_examples_report_path).to eq('foo/report.json')
end
end
end
describe '.new_flaky_examples_report_path' do
context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is not set" do
it 'returns the default path' do
expect(Rails.root).to receive(:join).with('rspec_flaky/new-report.json')
.and_return('root/rspec_flaky/new-report.json')
expect(described_class.new_flaky_examples_report_path).to eq('root/rspec_flaky/new-report.json')
end
end
context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is set" do
before do
stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', 'foo/new-report.json')
end
it 'returns the value of the env variable' do
expect(described_class.new_flaky_examples_report_path).to eq('foo/new-report.json')
end
end
end
end
require 'spec_helper'
describe RspecFlaky::FlakyExample do
describe RspecFlaky::FlakyExample, :aggregate_failures do
let(:flaky_example_attrs) do
{
example_id: 'spec/foo/bar_spec.rb:2',
......@@ -9,6 +9,7 @@ describe RspecFlaky::FlakyExample do
description: 'hello world',
first_flaky_at: 1234,
last_flaky_at: 2345,
last_flaky_job: 'https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/12',
last_attempts_count: 2,
flaky_reports: 1
}
......@@ -27,57 +28,78 @@ describe RspecFlaky::FlakyExample do
end
let(:example) { double(example_attrs) }
before do
# Stub these env variables otherwise specs don't behave the same on the CI
stub_env('CI_PROJECT_URL', nil)
stub_env('CI_JOB_ID', nil)
end
describe '#initialize' do
shared_examples 'a valid FlakyExample instance' do
it 'returns valid attributes' do
flaky_example = described_class.new(args)
let(:flaky_example) { described_class.new(args) }
it 'returns valid attributes' do
expect(flaky_example.uid).to eq(flaky_example_attrs[:uid])
expect(flaky_example.example_id).to eq(flaky_example_attrs[:example_id])
expect(flaky_example.file).to eq(flaky_example_attrs[:file])
expect(flaky_example.line).to eq(flaky_example_attrs[:line])
expect(flaky_example.description).to eq(flaky_example_attrs[:description])
expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
expect(flaky_example.last_flaky_at).to eq(expected_last_flaky_at)
expect(flaky_example.last_attempts_count).to eq(flaky_example_attrs[:last_attempts_count])
expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
end
end
context 'when given an Rspec::Example' do
let(:args) { example }
it_behaves_like 'a valid FlakyExample instance'
it_behaves_like 'a valid FlakyExample instance' do
let(:args) { example }
let(:expected_first_flaky_at) { nil }
let(:expected_last_flaky_at) { nil }
let(:expected_flaky_reports) { 0 }
end
end
context 'when given a hash' do
let(:args) { flaky_example_attrs }
it_behaves_like 'a valid FlakyExample instance'
it_behaves_like 'a valid FlakyExample instance' do
let(:args) { flaky_example_attrs }
let(:expected_flaky_reports) { flaky_example_attrs[:flaky_reports] }
let(:expected_first_flaky_at) { flaky_example_attrs[:first_flaky_at] }
let(:expected_last_flaky_at) { flaky_example_attrs[:last_flaky_at] }
end
end
end
describe '#to_h' do
before do
# Stub these env variables otherwise specs don't behave the same on the CI
stub_env('CI_PROJECT_URL', nil)
stub_env('CI_JOB_ID', nil)
end
describe '#update_flakiness!' do
shared_examples 'an up-to-date FlakyExample instance' do
let(:flaky_example) { described_class.new(args) }
shared_examples 'a valid FlakyExample hash' do
let(:additional_attrs) { {} }
it 'updates the first_flaky_at' do
now = Time.now
expected_first_flaky_at = flaky_example.first_flaky_at ? flaky_example.first_flaky_at : now
Timecop.freeze(now) { flaky_example.update_flakiness! }
it 'returns a valid hash' do
flaky_example = described_class.new(args)
final_hash = flaky_example_attrs
.merge(last_flaky_at: instance_of(Time), last_flaky_job: nil)
.merge(additional_attrs)
expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
end
it 'updates the last_flaky_at' do
now = Time.now
Timecop.freeze(now) { flaky_example.update_flakiness! }
expect(flaky_example.to_h).to match(hash_including(final_hash))
expect(flaky_example.last_flaky_at).to eq(now)
end
end
context 'when given an Rspec::Example' do
let(:args) { example }
it 'updates the flaky_reports' do
expected_flaky_reports = flaky_example.first_flaky_at ? flaky_example.flaky_reports + 1 : 1
expect { flaky_example.update_flakiness! }.to change { flaky_example.flaky_reports }.by(1)
expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
end
context 'when passed a :last_attempts_count' do
it 'updates the last_attempts_count' do
flaky_example.update_flakiness!(last_attempts_count: 42)
context 'when run locally' do
it_behaves_like 'a valid FlakyExample hash' do
let(:additional_attrs) do
{ first_flaky_at: instance_of(Time) }
end
expect(flaky_example.last_attempts_count).to eq(42)
end
end
......@@ -87,10 +109,45 @@ describe RspecFlaky::FlakyExample do
stub_env('CI_JOB_ID', 42)
end
it_behaves_like 'a valid FlakyExample hash' do
let(:additional_attrs) do
{ first_flaky_at: instance_of(Time), last_flaky_job: "https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42" }
end
it 'updates the last_flaky_job' do
flaky_example.update_flakiness!
expect(flaky_example.last_flaky_job).to eq('https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42')
end
end
end
context 'when given an Rspec::Example' do
it_behaves_like 'an up-to-date FlakyExample instance' do
let(:args) { example }
end
end
context 'when given a hash' do
it_behaves_like 'an up-to-date FlakyExample instance' do
let(:args) { flaky_example_attrs }
end
end
end
describe '#to_h' do
shared_examples 'a valid FlakyExample hash' do
let(:additional_attrs) { {} }
it 'returns a valid hash' do
flaky_example = described_class.new(args)
final_hash = flaky_example_attrs.merge(additional_attrs)
expect(flaky_example.to_h).to eq(final_hash)
end
end
context 'when given an Rspec::Example' do
let(:args) { example }
it_behaves_like 'a valid FlakyExample hash' do
let(:additional_attrs) do
{ first_flaky_at: nil, last_flaky_at: nil, last_flaky_job: nil, flaky_reports: 0 }
end
end
end
......
require 'spec_helper'
describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do
let(:collection_hash) do
{
a: { example_id: 'spec/foo/bar_spec.rb:2' },
b: { example_id: 'spec/foo/baz_spec.rb:3' }
}
end
let(:collection_report) do
{
a: {
example_id: 'spec/foo/bar_spec.rb:2',
first_flaky_at: nil,
last_flaky_at: nil,
last_flaky_job: nil
},
b: {
example_id: 'spec/foo/baz_spec.rb:3',
first_flaky_at: nil,
last_flaky_at: nil,
last_flaky_job: nil
}
}
end
describe '.from_json' do
it 'accepts a JSON' do
collection = described_class.from_json(JSON.pretty_generate(collection_hash))
expect(collection.to_report).to eq(described_class.new(collection_hash).to_report)
end
end
describe '#initialize' do
it 'accepts no argument' do
expect { described_class.new }.not_to raise_error
end
it 'accepts a hash' do
expect { described_class.new(collection_hash) }.not_to raise_error
end
it 'does not accept anything else' do
expect { described_class.new([1, 2, 3]) }.to raise_error(ArgumentError, "`collection` must be a Hash, Array given!")
end
end
describe '#to_report' do
it 'calls #to_h on the values' do
collection = described_class.new(collection_hash)
expect(collection.to_report).to eq(collection_report)
end
end
describe '#-' do
it 'returns only examples that are not present in the given collection' do
collection1 = described_class.new(collection_hash)
collection2 = described_class.new(
a: { example_id: 'spec/foo/bar_spec.rb:2' },
c: { example_id: 'spec/bar/baz_spec.rb:4' })
expect((collection2 - collection1).to_report).to eq(
c: {
example_id: 'spec/bar/baz_spec.rb:4',
first_flaky_at: nil,
last_flaky_at: nil,
last_flaky_job: nil
})
end
it 'fails if the given collection does not respond to `#key?`' do
collection = described_class.new(collection_hash)
expect { collection - [1, 2, 3] }.to raise_error(ArgumentError, "`other` must respond to `#key?`, Array does not!")
end
end
end
This diff is collapsed.
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys')
describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
matcher :be_scheduled_migration do |*expected|
match do |migration|
BackgroundMigrationWorker.jobs.any? do |job|
job['args'] == [migration, expected]
end
end
failure_message do |migration|
"Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
end
end
before do
create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key)
create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key)
# Delete all subkeys so they can be recreated
GpgKeySubkey.destroy_all
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
migrate!
expect(described_class::MIGRATION).to be_scheduled_migration(1)
expect(described_class::MIGRATION).to be_scheduled_migration(2)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
it 'schedules background migrations' do
Sidekiq::Testing.inline! do
expect(GpgKeySubkey.count).to eq(0)
migrate!
expect(GpgKeySubkey.count).to eq(3)
end
end
end
......@@ -238,7 +238,7 @@ describe Ci::Pipeline, :mailer do
describe '#stage_seeds' do
let(:pipeline) do
create(:ci_pipeline, config: { rspec: { script: 'rake' } })
build(:ci_pipeline, config: { rspec: { script: 'rake' } })
end
it 'returns preseeded stage seeds object' do
......@@ -247,6 +247,14 @@ describe Ci::Pipeline, :mailer do
end
end
describe '#seeds_size' do
let(:pipeline) { build(:ci_pipeline_with_one_job) }
it 'returns number of jobs in stage seeds' do
expect(pipeline.seeds_size).to eq 1
end
end
describe '#legacy_stages' do
subject { pipeline.legacy_stages }
......
......@@ -231,6 +231,18 @@ describe HasStatus do
end
end
describe '.alive' do
subject { CommitStatus.alive }
%i[running pending created].each do |status|
it_behaves_like 'containing the job', status
end
%i[failed success].each do |status|
it_behaves_like 'not containing the job', status
end
end
describe '.created_or_pending' do
subject { CommitStatus.created_or_pending }
......
......@@ -3,6 +3,7 @@ require 'rails_helper'
describe GpgKey do
describe "associations" do
it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:subkeys) }
end
describe "validation" do
......@@ -38,6 +39,14 @@ describe GpgKey do
expect(gpg_key.primary_keyid).to eq GpgHelpers::User1.primary_keyid
end
end
describe 'generate_subkeys' do
it 'extracts the subkeys from the gpg key' do
gpg_key = create(:gpg_key, key: GpgHelpers::User1.public_key_with_extra_signing_key)
expect(gpg_key.subkeys.count).to eq(2)
end
end
end
describe '#key=' do
......@@ -182,5 +191,29 @@ describe GpgKey do
expect(unrelated_gpg_key.destroyed?).to be false
end
it 'deletes all the associated subkeys' do
gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
expect(gpg_key.subkeys).to be_present
gpg_key.revoke
expect(gpg_key.subkeys(true)).to be_blank
end
it 'invalidates all signatures associated to the subkeys' do
gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
gpg_key_subkey = gpg_key.subkeys.last
gpg_signature = create :gpg_signature, verification_status: :verified, gpg_key: gpg_key_subkey
gpg_key.revoke
expect(gpg_signature.reload).to have_attributes(
verification_status: 'unknown_key',
gpg_key: nil,
gpg_key_subkey: nil
)
end
end
end
require 'rails_helper'
describe GpgKeySubkey do
subject { build(:gpg_key_subkey) }
describe 'associations' do
it { is_expected.to belong_to(:gpg_key) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:gpg_key_id) }
it { is_expected.to validate_presence_of(:fingerprint) }
it { is_expected.to validate_presence_of(:keyid) }
end
end
require 'rails_helper'
RSpec.describe GpgSignature do
let(:gpg_key) { create(:gpg_key) }
let(:gpg_key_subkey) { create(:gpg_key_subkey) }
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:gpg_key) }
it { is_expected.to belong_to(:gpg_key_subkey) }
end
describe 'validation' do
......@@ -25,4 +29,26 @@ RSpec.describe GpgSignature do
gpg_signature.commit
end
end
describe '#gpg_key=' do
it 'supports the assignment of a GpgKey' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key)
expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKey)
end
it 'supports the assignment of a GpgKeySubkey' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
end
it 'clears gpg_key and gpg_key_subkey_id when passing nil' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
gpg_signature.update_attribute(:gpg_key, nil)
expect(gpg_signature.gpg_key_id).to be_nil
expect(gpg_signature.gpg_key_subkey_id).to be_nil
end
end
end
......@@ -51,4 +51,21 @@ describe Ci::PipelinePresenter do
end
end
end
context '#failure_reason' do
context 'when pipeline has failure reason' do
it 'represents a failure reason sentence' do
pipeline.failure_reason = :config_error
expect(presenter.failure_reason)
.to eq 'CI/CD YAML configuration error!'
end
end
context 'when pipeline does not have failure reason' do
it 'returns nil' do
expect(presenter.failure_reason).to be_nil
end
end
end
end
require 'spec_helper'
require 'raven/transports/dummy'
require_relative '../../../config/initializers/sentry'
describe API::Helpers do
include API::APIGuard::HelperMethods
......@@ -476,7 +478,7 @@ describe API::Helpers do
allow(exception).to receive(:backtrace).and_return(caller)
expect_any_instance_of(self.class).to receive(:sentry_context)
expect(Raven).to receive(:capture_exception).with(exception)
expect(Raven).to receive(:capture_exception).with(exception, extra: {})
handle_api_exception(exception)
end
......@@ -501,6 +503,30 @@ describe API::Helpers do
expect(json_response['message']).to start_with("\nRuntimeError (Runtime Error!):")
end
end
context 'extra information' do
# Sentry events are an array of the form [auth_header, data, options]
let(:event_data) { Raven.client.transport.events.first[1] }
before do
stub_application_setting(
sentry_enabled: true,
sentry_dsn: "dummy://12345:67890@sentry.localdomain/sentry/42"
)
configure_sentry
Raven.client.configuration.encoding = 'json'
end
it 'sends the params, excluding confidential values' do
expect(Gitlab::Sentry).to receive(:enabled?).twice.and_return(true)
expect(ProjectsFinder).to receive(:new).and_raise('Runtime Error!')
get api('/projects', user), password: 'dont_send_this', other_param: 'send_this'
expect(event_data).to include('other_param=send_this')
expect(event_data).to include('password=********')
end
end
end
describe '.authenticate_non_get!' do
......
......@@ -108,5 +108,18 @@ describe PipelineEntity do
expect(subject[:ref][:path]).to be_nil
end
end
context 'when pipeline has a failure reason set' do
let(:pipeline) { create(:ci_empty_pipeline) }
before do
pipeline.drop!(:config_error)
end
it 'has a correct failure reason' do
expect(subject[:failure_reason])
.to eq 'CI/CD YAML configuration error!'
end
end
end
end
......@@ -18,4 +18,14 @@ describe GpgKeys::CreateService do
it 'creates a gpg key' do
expect { subject.execute }.to change { user.gpg_keys.where(params).count }.by(1)
end
context 'when the public key contains subkeys' do
let(:params) { attributes_for(:gpg_key_with_subkeys) }
it 'generates the gpg subkeys' do
gpg_key = subject.execute
expect(gpg_key.subkeys.count).to eq(2)
end
end
end
......@@ -81,7 +81,10 @@ RSpec.configure do |config|
if ENV['CI']
# This includes the first try, i.e. tests will be run 4 times before failing.
config.default_retry_count = 4
config.reporter.register_listener(RspecFlaky::Listener.new, :example_passed, :dump_summary)
config.reporter.register_listener(
RspecFlaky::Listener.new,
:example_passed,
:dump_summary)
end
config.before(:suite) do
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment