Commit be5a7e70 authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents df17e87e 85d475bf
......@@ -647,6 +647,10 @@ module Ci
!artifacts_expired? && artifacts_file&.exists?
end
def locked_artifacts?
pipeline.artifacts_locked? && artifacts_file&.exists?
end
# This method is similar to #artifacts? but it includes the artifacts
# locking mechanics. A new method was created to prevent breaking existing
# behavior and avoid introducing N+1s.
......
......@@ -867,6 +867,10 @@ module Ci
builds.latest.with_reports(reports_scope)
end
def builds_with_coverage
builds.with_coverage
end
def has_reports?(reports_scope)
complete? && latest_report_builds(reports_scope).exists?
end
......
......@@ -295,7 +295,7 @@ class MergeRequest < ApplicationRecord
alias_attribute :auto_merge_enabled, :merge_when_pipeline_succeeds
alias_method :issuing_parent, :target_project
delegate :active?, to: :head_pipeline, prefix: true, allow_nil: true
delegate :active?, :builds_with_coverage, to: :head_pipeline, prefix: true, allow_nil: true
delegate :success?, :active?, to: :actual_head_pipeline, prefix: true, allow_nil: true
RebaseLockTimeout = Class.new(StandardError)
......
# frozen_string_literal: true
class BuildCoverageEntity < Grape::Entity
expose :name, :coverage
end
......@@ -27,15 +27,15 @@ class BuildDetailsEntity < JobEntity
end
expose :artifact, if: -> (*) { can?(current_user, :read_build, build) } do
expose :download_path, if: -> (*) { build.pipeline.artifacts_locked? || build.artifacts? } do |build|
expose :download_path, if: -> (*) { build.locked_artifacts? || build.artifacts? } do |build|
download_project_job_artifacts_path(project, build)
end
expose :browse_path, if: -> (*) { build.pipeline.artifacts_locked? || build.browsable_artifacts? } do |build|
expose :browse_path, if: -> (*) { build.locked_artifacts? || build.browsable_artifacts? } do |build|
browse_project_job_artifacts_path(project, build)
end
expose :keep_path, if: -> (*) { build.has_expiring_archive_artifacts? && can?(current_user, :update_build, build) } do |build|
expose :keep_path, if: -> (*) { (build.locked_artifacts? || build.has_expiring_archive_artifacts?) && can?(current_user, :update_build, build) } do |build|
keep_project_job_artifacts_path(project, build)
end
......
......@@ -73,6 +73,8 @@ class MergeRequestPollWidgetEntity < Grape::Entity
presenter(merge_request).pipeline_coverage_delta
end
expose :head_pipeline_builds_with_coverage, as: :builds_with_coverage, using: BuildCoverageEntity
expose :cancel_auto_merge_path do |merge_request|
presenter(merge_request).cancel_auto_merge_path
end
......
---
title: Create `security_findings` table
merge_request: 40368
author:
type: added
---
title: Return builds with coverage in MR widget JSON response
merge_request: 40533
author:
type: added
---
title: Show keep button for locked artifacts.
merge_request: 40962
author:
type: changed
# frozen_string_literal: true
class CreateSecurityFindingsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless table_exists?(:security_findings)
create_table :security_findings do |t|
t.references :scan, null: false
t.references :scanner, null: false
t.integer :severity, limit: 2, index: true, null: false
t.integer :confidence, limit: 2, index: true, null: false
t.text :project_fingerprint, index: true, null: false
end
end
add_text_limit :security_findings, :project_fingerprint, 40
end
def down
drop_table :security_findings
end
end
# frozen_string_literal: true
class AddForeignKeyOnScanIdToSecurityScans < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :security_findings, :security_scans, column: :scan_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :security_findings, column: :scan_id
end
end
end
# frozen_string_literal: true
class AddForeignKeyOnScannerIdToVulnerabilityScanners < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :security_findings, :vulnerability_scanners, column: :scanner_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :security_findings, column: :scanner_id
end
end
end
d5e81848257b3391d99b198b177531a4c190ca6f19b27c9aedaa931f6eb3165a
\ No newline at end of file
b3ee994231a8da694dbcda227b37e19a2112be666648d918425b064ec19d239e
\ No newline at end of file
b575558752206149171a05231e4167e1ac3e1295f76d800edfe3d61c1b996b52
\ No newline at end of file
......@@ -15387,6 +15387,25 @@ CREATE SEQUENCE public.scim_oauth_access_tokens_id_seq
ALTER SEQUENCE public.scim_oauth_access_tokens_id_seq OWNED BY public.scim_oauth_access_tokens.id;
CREATE TABLE public.security_findings (
id bigint NOT NULL,
scan_id bigint NOT NULL,
scanner_id bigint NOT NULL,
severity smallint NOT NULL,
confidence smallint NOT NULL,
project_fingerprint text NOT NULL,
CONSTRAINT check_b9508c6df8 CHECK ((char_length(project_fingerprint) <= 40))
);
CREATE SEQUENCE public.security_findings_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.security_findings_id_seq OWNED BY public.security_findings.id;
CREATE TABLE public.security_scans (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
......@@ -17386,6 +17405,8 @@ ALTER TABLE ONLY public.scim_identities ALTER COLUMN id SET DEFAULT nextval('pub
ALTER TABLE ONLY public.scim_oauth_access_tokens ALTER COLUMN id SET DEFAULT nextval('public.scim_oauth_access_tokens_id_seq'::regclass);
ALTER TABLE ONLY public.security_findings ALTER COLUMN id SET DEFAULT nextval('public.security_findings_id_seq'::regclass);
ALTER TABLE ONLY public.security_scans ALTER COLUMN id SET DEFAULT nextval('public.security_scans_id_seq'::regclass);
ALTER TABLE ONLY public.self_managed_prometheus_alert_events ALTER COLUMN id SET DEFAULT nextval('public.self_managed_prometheus_alert_events_id_seq'::regclass);
......@@ -18648,6 +18669,9 @@ ALTER TABLE ONLY public.scim_identities
ALTER TABLE ONLY public.scim_oauth_access_tokens
ADD CONSTRAINT scim_oauth_access_tokens_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.security_findings
ADD CONSTRAINT security_findings_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.security_scans
ADD CONSTRAINT security_scans_pkey PRIMARY KEY (id);
......@@ -20820,6 +20844,16 @@ CREATE INDEX index_secure_ci_builds_on_user_id_created_at_parser_features ON pub
CREATE INDEX index_security_ci_builds_on_name_and_id_parser_features ON public.ci_builds USING btree (name, id) WHERE (((name)::text = ANY (ARRAY[('container_scanning'::character varying)::text, ('dast'::character varying)::text, ('dependency_scanning'::character varying)::text, ('license_management'::character varying)::text, ('sast'::character varying)::text, ('secret_detection'::character varying)::text, ('coverage_fuzzing'::character varying)::text, ('license_scanning'::character varying)::text])) AND ((type)::text = 'Ci::Build'::text));
CREATE INDEX index_security_findings_on_confidence ON public.security_findings USING btree (confidence);
CREATE INDEX index_security_findings_on_project_fingerprint ON public.security_findings USING btree (project_fingerprint);
CREATE INDEX index_security_findings_on_scan_id ON public.security_findings USING btree (scan_id);
CREATE INDEX index_security_findings_on_scanner_id ON public.security_findings USING btree (scanner_id);
CREATE INDEX index_security_findings_on_severity ON public.security_findings USING btree (severity);
CREATE INDEX index_self_managed_prometheus_alert_events_on_environment_id ON public.self_managed_prometheus_alert_events USING btree (environment_id);
CREATE INDEX index_sent_notifications_on_noteable_type_noteable_id ON public.sent_notifications USING btree (noteable_id) WHERE ((noteable_type)::text = 'Issue'::text);
......@@ -22697,6 +22731,9 @@ ALTER TABLE ONLY public.list_user_preferences
ALTER TABLE ONLY public.project_custom_attributes
ADD CONSTRAINT fk_rails_719c3dccc5 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.security_findings
ADD CONSTRAINT fk_rails_729b763a54 FOREIGN KEY (scanner_id) REFERENCES public.vulnerability_scanners(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.dast_scanner_profiles
ADD CONSTRAINT fk_rails_72a8ba7141 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
......@@ -23030,6 +23067,9 @@ ALTER TABLE ONLY public.approval_project_rules_users
ALTER TABLE ONLY public.lists
ADD CONSTRAINT fk_rails_baed5f39b7 FOREIGN KEY (milestone_id) REFERENCES public.milestones(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.security_findings
ADD CONSTRAINT fk_rails_bb63863cf1 FOREIGN KEY (scan_id) REFERENCES public.security_scans(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.approval_merge_request_rules_users
ADD CONSTRAINT fk_rails_bc8972fa55 FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;
......
......@@ -348,50 +348,51 @@ are supported and can be added if needed.
## Configure Gitaly
Deploying Gitaly in its own server can benefit GitLab installations that are
larger than a single machine. Gitaly node requirements are dependent on data,
specifically the number of projects and their sizes. It's recommended that each
Gitaly node store no more than 5TB of data. Your 2K setup may require one or more
nodes depending on your repository storage requirements.
We strongly recommend that all Gitaly nodes should be set up with SSD disks with a throughput of at least
8,000 IOPS for read operations and 2,000 IOPS for write, as Gitaly has heavy I/O.
These IOPS values are recommended only as a starter as with time they may be
adjusted higher or lower depending on the scale of your environment's workload.
If you're running the environment on a Cloud provider
you may need to refer to their documentation on how configure IOPS correctly.
Some things to note:
- The GitLab Rails application shards repositories into [repository storages](../repository_storage_paths.md).
- A Gitaly server can host one or more storages.
- A GitLab server can use one or more Gitaly servers.
- Gitaly addresses must be specified in such a way that they resolve
correctly for ALL Gitaly clients.
[Gitaly](../gitaly/index.md) server node requirements are dependent on data,
specifically the number of projects and those projects' sizes. It's recommended
that a Gitaly server node stores no more than 5TB of data. Although this
reference architecture includes a single Gitaly server node, you may require
additional nodes depending on your repository storage requirements.
Due to Gitaly having notable input and output requirements, we strongly
recommend that all Gitaly nodes use solid-state drives (SSDs). These SSDs
should have a throughput of at least 8,000
input/output operations per second (IOPS) for read operations and 2,000 IOPS
for write operations. These IOPS values are initial recommendations, and may be
adjusted to greater or lesser values depending on the scale of your
environment's workload. If you're running the environment on a Cloud provider,
refer to their documentation about how to configure IOPS correctly.
Be sure to note the following items:
- The GitLab Rails application shards repositories into
[repository storage paths](../repository_storage_paths.md).
- A Gitaly server can host one or more storage paths.
- A GitLab server can use one or more Gitaly server nodes.
- Gitaly addresses must be specified to be correctly resolvable for *all*
Gitaly clients.
- Gitaly servers must not be exposed to the public internet, as Gitaly's network
traffic is unencrypted by default. The use of a firewall is highly recommended
to restrict access to the Gitaly server. Another option is to
[use TLS](#gitaly-tls-support).
TIP: **Tip:**
For more information about Gitaly's history and network architecture see the
[standalone Gitaly documentation](../gitaly/index.md).
Note: **Note:** The token referred to throughout the Gitaly documentation is
just an arbitrary password selected by the administrator. It is unrelated to
tokens created for the GitLab API or other similar web API tokens.
NOTE: **Note:**
The token referred to throughout the Gitaly documentation is an arbitrary
password selected by the administrator. This token is unrelated to tokens
created for the GitLab API or other similar web API tokens.
Below we describe how to configure one Gitaly server `gitaly1.internal` with
secret token `gitalysecret`. We assume your GitLab installation has two
repository storages: `default` and `storage1`.
The following procedure describes how to configure a single Gitaly server named
`gitaly1.internal` with the secret token `gitalysecret`. We assume your GitLab
installation has two repository storages: `default` and `storage1`.
To configure the Gitaly server:
1. [Download/Install](https://about.gitlab.com/install/) the Omnibus GitLab
package you want using **steps 1 and 2** from the GitLab downloads page but
**without** providing the `EXTERNAL_URL` value.
1. Edit `/etc/gitlab/gitlab.rb` to configure storage paths, enable
the network listener and configure the token:
1. On the server node you want to use for Gitaly,
[download and install](https://about.gitlab.com/install/) your selected
Omnibus GitLab package using *steps 1 and 2* from the GitLab downloads page,
but *without* providing the `EXTERNAL_URL` value.
1. Edit the Gitaly server node's `/etc/gitlab/gitlab.rb` file to configure
storage paths, enable the network listener, and to configure the token:
<!--
updates to following example must also be made at
......@@ -440,11 +441,7 @@ To configure the Gitaly server:
# Set the network addresses that the exporters used for monitoring will listen on
node_exporter['listen_address'] = '0.0.0.0:9100'
```
1. Append the following to `/etc/gitlab/gitlab.rb` on `gitaly1.internal`:
```ruby
git_data_dirs({
'default' => {
'path' => '/var/opt/gitlab/git-data'
......@@ -455,12 +452,7 @@ To configure the Gitaly server:
})
```
<!--
updates to following example must also be made at
https://gitlab.com/gitlab-org/charts/gitlab/blob/master/doc/advanced/external-gitaly/external-omnibus-gitaly.md#configure-omnibus-gitlab
-->
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
1. Save the file, and then [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
1. Confirm that Gitaly can perform callbacks to the internal API:
```shell
......
......@@ -216,7 +216,7 @@ module EE
]
# Adds all the SSF Data Types automatically
replicator_classes.each do |replicator_class|
enabled_replicator_classes.each do |replicator_class|
replicable_types.push(
{
title: replicator_class.replicable_title,
......@@ -231,8 +231,8 @@ module EE
replicable_types
end
def replicator_classes
::Gitlab::Geo.replicator_classes
def enabled_replicator_classes
::Gitlab::Geo.enabled_replicator_classes
end
end
end
# frozen_string_literal: true
# This model represents the vulnerability findings
# discovered for all pipelines to use in pipeline
# security tab.
#
# Unlike `Vulnerabilities::Finding` model, this one
# only stores some important meta information to
# calculate which report artifact to download and parse.
module Security
class Finding < ApplicationRecord
self.table_name = 'security_findings'
belongs_to :scan, inverse_of: :findings, optional: false
belongs_to :scanner, class_name: 'Vulnerabilities::Scanner', inverse_of: :security_findings, optional: false
# TODO: These are duplicated between this model and Vulnerabilities::Finding,
# we should create a shared module to encapculate this in one place.
enum confidence: Vulnerabilities::Finding::CONFIDENCE_LEVELS, _prefix: :confidence
enum severity: Vulnerabilities::Finding::SEVERITY_LEVELS, _prefix: :severity
validates :project_fingerprint, presence: true, length: { maximum: 40 }
end
end
......@@ -8,8 +8,11 @@ module Security
validates :scan_type, presence: true
belongs_to :build, class_name: 'Ci::Build'
has_one :pipeline, class_name: 'Ci::Pipeline', through: :build
has_many :findings, inverse_of: :scan
enum scan_type: {
sast: 1,
dependency_scanning: 2,
......
......@@ -5,6 +5,7 @@ module Vulnerabilities
self.table_name = "vulnerability_scanners"
has_many :findings, class_name: 'Vulnerabilities::Finding', inverse_of: :scanner
has_many :security_findings, class_name: 'Security::Finding', inverse_of: :scanner
belongs_to :project
......
......@@ -19,7 +19,7 @@
%span
= _('Designs')
- if Feature.enabled?(:geo_self_service_framework)
- Gitlab::Geo.replicator_classes.each do |replicator_class|
- Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
= nav_link(page: [{ controller: 'admin/geo/replicables', action: 'index', replicable_name_plural: replicator_class.replicable_name_plural }], html_options: { class: 'gl-pr-2' }) do
= link_to admin_geo_replicables_path(replicable_name_plural: replicator_class.replicable_name_plural), title: replicator_class.replicable_title_plural do
%span
......
......@@ -78,7 +78,7 @@ module Geo
end
def replicator_classes
Gitlab::Geo.replicator_classes
Gitlab::Geo.enabled_replicator_classes
end
end
end
......@@ -166,7 +166,7 @@ module Gitlab
_(template) % { url: url }
end
def self.replicator_classes
def self.enabled_replicator_classes
REPLICATOR_CLASSES.select(&:enabled?)
end
end
......
......@@ -164,7 +164,7 @@ module Gitlab
end
def print_replicators_status
Gitlab::Geo.replicator_classes.each do |replicator_class|
Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
print "#{replicator_class.replicable_title_plural}: ".rjust(GEO_STATUS_COLUMN_WIDTH)
show_failed_value(replicator_class.failed_count)
......@@ -246,7 +246,7 @@ module Gitlab
end
def print_replicators_checked_status
Gitlab::Geo.replicator_classes.each do |replicator_class|
Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
print "#{replicator_class.replicable_title_plural} Checked: ".rjust(GEO_STATUS_COLUMN_WIDTH)
show_failed_value(replicator_class.checksum_failed_count)
print "#{replicator_class.checksummed_count}/#{replicator_class.registry_count} "
......@@ -275,7 +275,7 @@ module Gitlab
r.push current_node_status.container_repositories_failed_count
end
Gitlab::Geo.replicator_classes.each do |replicator_class|
Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
r.push replicator_class.failed_count
end
end
......@@ -294,7 +294,7 @@ module Gitlab
v.push current_node_status.repositories_checked_failed_count
end
Gitlab::Geo.replicator_classes.each do |replicator_class|
Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
v.push replicator_class.checksum_failed_count
end
end
......
......@@ -45,7 +45,7 @@ RSpec.describe 'admin Geo Replication Nav', :js, :geo do
end
describe 'visit admin/geo/replication/*' do
Gitlab::Geo.replicator_classes.each do |replicator_class|
Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
it_behaves_like 'active sidebar link', replicator_class.replicable_title_plural do
let(:path) { admin_geo_replicables_path(replicable_name_plural: replicator_class.replicable_name_plural) }
end
......
......@@ -65,7 +65,7 @@ RSpec.describe 'admin Geo Sidebar', :js, :geo do
end
describe 'visiting geo replicables' do
Gitlab::Geo.replicator_classes.each do |replicator_class|
Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
it_behaves_like 'active sidebar link', 'Replication' do
let(:path) { admin_geo_replicables_path(replicable_name_plural: replicator_class.replicable_name_plural) }
end
......
......@@ -39,7 +39,7 @@ RSpec.describe EE::GeoHelper do
end
it 'includes replicator types' do
expected_names = helper.replicator_classes.map { |c| c.replicable_name_plural }
expected_names = helper.enabled_replicator_classes.map { |c| c.replicable_name_plural }
expect(names).to include(*expected_names)
end
......
......@@ -336,9 +336,9 @@ RSpec.describe Gitlab::Geo, :geo, :request_store do
end
end
describe '.replicator_classes' do
describe '.enabled_replicator_classes' do
it 'returns an Array of replicator classes' do
result = described_class.replicator_classes
result = described_class.enabled_replicator_classes
expect(result).to be_an(Array)
expect(result).to include(Geo::PackageFileReplicator)
......@@ -350,7 +350,7 @@ RSpec.describe Gitlab::Geo, :geo, :request_store do
end
it 'does not return the replicator class' do
expect(described_class.replicator_classes).not_to include(Geo::PackageFileReplicator)
expect(described_class.enabled_replicator_classes).not_to include(Geo::PackageFileReplicator)
end
end
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Finding do
describe 'associations' do
it { is_expected.to belong_to(:scan).required }
it { is_expected.to belong_to(:scanner).required }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:project_fingerprint) }
it { is_expected.to validate_length_of(:project_fingerprint).is_at_most(40) }
end
end
......@@ -6,6 +6,7 @@ RSpec.describe Security::Scan do
describe 'associations' do
it { is_expected.to belong_to(:build) }
it { is_expected.to have_one(:pipeline).through(:build).class_name('Ci::Pipeline') }
it { is_expected.to have_many(:findings) }
end
describe 'validations' do
......
......@@ -4,8 +4,9 @@ require 'spec_helper'
RSpec.describe Vulnerabilities::Scanner do
describe 'associations' do
it { is_expected.to have_many(:findings).class_name('Vulnerabilities::Finding') }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:findings).class_name('Vulnerabilities::Finding') }
it { is_expected.to have_many(:security_findings).class_name('Security::Finding') }
end
describe 'validations' do
......
......@@ -45,7 +45,7 @@ RSpec.describe 'EE-specific admin routing' do
end
describe Admin::Geo::ReplicablesController, 'routing' do
Gitlab::Geo.replicator_classes.map(&:replicable_name_plural).each do |replicable_name_plural|
Gitlab::Geo.enabled_replicator_classes.map(&:replicable_name_plural).each do |replicable_name_plural|
it "routes /admin/geo/replication/#{replicable_name_plural} to replicables#index" do
expect(get("/admin/geo/replication/#{replicable_name_plural}")).to route_to('admin/geo/replicables#index', replicable_name_plural: replicable_name_plural)
end
......
......@@ -201,33 +201,61 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
context 'when job has artifacts' do
before do
get_show_json
end
context 'with not expiry date' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']['download_path']).to match(%r{artifacts/download})
expect(json_response['artifact']['browse_path']).to match(%r{artifacts/browse})
expect(json_response['artifact']).not_to have_key('keep_path')
expect(json_response['artifact']).not_to have_key('expired')
expect(json_response['artifact']).not_to have_key('expired_at')
end
end
context 'with expiry date' do
context 'with expired artifacts' do
let(:job) { create(:ci_build, :success, :artifacts, :expired, pipeline: pipeline) }
it 'exposes needed information' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']).not_to have_key('download_path')
expect(json_response['artifact']).not_to have_key('browse_path')
expect(json_response['artifact']['expired']).to eq(true)
expect(json_response['artifact']['expire_at']).not_to be_empty
context 'when artifacts are unlocked' do
before do
job.pipeline.unlocked!
end
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']).not_to have_key('download_path')
expect(json_response['artifact']).not_to have_key('browse_path')
expect(json_response['artifact']).not_to have_key('keep_path')
expect(json_response['artifact']['expired']).to eq(true)
expect(json_response['artifact']['expire_at']).not_to be_empty
expect(json_response['artifact']['locked']).to eq(false)
end
end
context 'when artifacts are locked' do
before do
job.pipeline.artifacts_locked!
end
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']).to have_key('download_path')
expect(json_response['artifact']).to have_key('browse_path')
expect(json_response['artifact']).to have_key('keep_path')
expect(json_response['artifact']['expired']).to eq(true)
expect(json_response['artifact']['expire_at']).not_to be_empty
expect(json_response['artifact']['locked']).to eq(true)
end
end
end
end
......
......@@ -49,6 +49,7 @@ RSpec.describe Projects::MergeRequests::ContentController do
do_request(:widget)
expect(response).to match_response_schema('entities/merge_request_poll_widget')
expect(response.headers['Poll-Interval']).to eq('10000')
end
......@@ -64,6 +65,20 @@ RSpec.describe Projects::MergeRequests::ContentController do
expect(response.headers['Poll-Interval']).to eq('300000')
end
end
context 'with coverage data' do
let(:merge_request) { create(:merge_request, target_project: project, source_project: project, head_pipeline: head_pipeline) }
let!(:base_pipeline) { create(:ci_empty_pipeline, project: project, ref: merge_request.target_branch, sha: merge_request.diff_base_sha) }
let!(:head_pipeline) { create(:ci_empty_pipeline, project: project) }
let!(:rspec_base) { create(:ci_build, name: 'rspec', coverage: 93.1, pipeline: base_pipeline) }
let!(:rspec_head) { create(:ci_build, name: 'rspec', coverage: 97.1, pipeline: head_pipeline) }
it 'renders widget MR entity as json' do
do_request(:widget)
expect(response).to match_response_schema('entities/merge_request_poll_widget')
end
end
end
end
......
......@@ -373,13 +373,29 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:expire_at) { Time.now + 7.days }
context 'when user has ability to update job' do
it 'keeps artifacts when keep button is clicked' do
expect(page).to have_content 'The artifacts will be removed in'
context 'when artifacts are unlocked' do
before do
job.pipeline.unlocked!
end
click_link 'Keep'
it 'keeps artifacts when keep button is clicked' do
expect(page).to have_content 'The artifacts will be removed in'
expect(page).to have_no_link 'Keep'
expect(page).to have_no_content 'The artifacts will be removed in'
click_link 'Keep'
expect(page).to have_no_link 'Keep'
expect(page).to have_no_content 'The artifacts will be removed in'
end
end
context 'when artifacts are locked' do
before do
job.pipeline.artifacts_locked!
end
it 'shows the keep button' do
expect(page).to have_link 'Keep'
end
end
end
......@@ -395,9 +411,26 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'when artifacts expired' do
let(:expire_at) { Time.now - 7.days }
it 'does not have the Keep button' do
expect(page).to have_content 'The artifacts were removed'
expect(page).not_to have_link 'Keep'
context 'when artifacts are unlocked' do
before do
job.pipeline.unlocked!
end
it 'does not have the Keep button' do
expect(page).to have_content 'The artifacts were removed'
expect(page).not_to have_link 'Keep'
end
end
context 'when artifacts are locked' do
before do
job.pipeline.artifacts_locked!
end
it 'has the Keep button' do
expect(page).not_to have_content 'The artifacts were removed'
expect(page).to have_link 'Keep'
end
end
end
end
......
......@@ -22,6 +22,14 @@
"only_allow_merge_if_pipeline_succeeds": { "type": "boolean" },
"has_ci": { "type": "boolean" },
"ci_status": { "type": ["string", "null"] },
"pipeline_coverage_delta": { "type": ["float", "null"] },
"builds_with_coverage": {
"type": ["array", "null"],
"items": {
"type": "object",
"required": ["name", "coverage"]
}
},
"cancel_auto_merge_path": { "type": ["string", "null"] },
"test_reports_path": { "type": ["string", "null"] },
"create_issue_to_resolve_discussions_path": { "type": ["string", "null"] },
......
......@@ -612,6 +612,46 @@ RSpec.describe Ci::Build do
end
end
describe '#locked_artifacts?' do
subject(:locked_artifacts) { build.locked_artifacts? }
context 'when pipeline is artifacts_locked' do
before do
build.pipeline.artifacts_locked!
end
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end
end
context 'when pipeline is unlocked' do
before do
build.pipeline.unlocked!
end
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_falsy }
end
end
end
describe '#available_artifacts?' do
let(:build) { create(:ci_build) }
......
......@@ -3441,4 +3441,17 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to eq(Gitlab::Git::TAG_REF_PREFIX + pipeline.source_ref.to_s) }
end
end
describe "#builds_with_coverage" do
it 'returns builds with coverage only' do
rspec = create(:ci_build, name: 'rspec', coverage: 97.1, pipeline: pipeline)
jest = create(:ci_build, name: 'jest', coverage: 94.1, pipeline: pipeline)
karma = create(:ci_build, name: 'karma', coverage: nil, pipeline: pipeline)
builds = pipeline.builds_with_coverage
expect(builds).to include(rspec, jest)
expect(builds).not_to include(karma)
end
end
end
......@@ -4112,4 +4112,14 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(context[:label_url_method]).to eq(:project_merge_requests_url)
end
end
describe '#head_pipeline_builds_with_coverage' do
it 'delegates to head_pipeline' do
expect(subject)
.to delegate_method(:builds_with_coverage)
.to(:head_pipeline)
.with_prefix
.with_arguments(allow_nil: true)
end
end
end
......@@ -188,25 +188,31 @@ RSpec.describe BuildDetailsEntity do
context 'when the build has expired artifacts' do
let!(:build) { create(:ci_build, :artifacts, artifacts_expire_at: 7.days.ago) }
it 'does not expose any artifact actions path' do
expect(subject[:artifact].keys).not_to include(:download_path, :browse_path, :keep_path)
end
context 'when pipeline is unlocked' do
before do
build.pipeline.unlocked!
end
it 'artifact locked is false' do
expect(subject.dig(:artifact, :locked)).to eq(false)
end
it 'artifact locked is false' do
expect(subject.dig(:artifact, :locked)).to eq(false)
it 'does not expose any artifact actions path' do
expect(subject[:artifact].keys).not_to include(:download_path, :browse_path, :keep_path)
end
end
context 'when the pipeline is artifacts_locked' do
before do
build.pipeline.update!(locked: :artifacts_locked)
build.pipeline.artifacts_locked!
end
it 'artifact locked is true' do
expect(subject.dig(:artifact, :locked)).to eq(true)
end
it 'exposes download and browse artifact actions path' do
expect(subject[:artifact].keys).to include(:download_path, :browse_path)
it 'exposes download, browse and keep artifact actions path' do
expect(subject[:artifact].keys).to include(:download_path, :browse_path, :keep_path)
end
end
end
......
......@@ -285,4 +285,20 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
end
end
describe '#builds_with_coverage' do
it 'serializes the builds with coverage' do
allow(resource).to receive(:head_pipeline_builds_with_coverage).and_return([
double(name: 'rspec', coverage: 91.5),
double(name: 'jest', coverage: 94.1)
])
result = subject[:builds_with_coverage]
expect(result).to eq([
{ name: 'rspec', coverage: 91.5 },
{ name: 'jest', coverage: 94.1 }
])
end
end
end
......@@ -11,6 +11,10 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let(:service) { described_class.new }
let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
before do
artifact.job.pipeline.unlocked!
end
context 'when artifact is expired' do
context 'when artifact is not locked' do
before do
......@@ -88,6 +92,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
second_artifact.job.pipeline.unlocked!
end
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
......@@ -102,7 +108,9 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
end
context 'when there are no artifacts' do
let!(:artifact) { }
before do
artifact.destroy!
end
it 'does not raise error' do
expect { subject }.not_to raise_error
......@@ -112,6 +120,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
second_artifact.job.pipeline.unlocked!
end
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
......@@ -126,6 +136,10 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
before do
[pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
end
it 'destroys pipeline artifacts' do
expect { subject }.to change { Ci::PipelineArtifact.count }.by(-2)
end
......@@ -135,10 +149,26 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 2.days) }
let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 2.days) }
it 'do not destroy pipeline artifacts' do
before do
[pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
end
it 'does not destroy pipeline artifacts' do
expect { subject }.not_to change { Ci::PipelineArtifact.count }
end
end
end
context 'when some artifacts are locked' do
before do
pipeline = create(:ci_pipeline, locked: :artifacts_locked)
job = create(:ci_build, pipeline: pipeline)
create(:ci_job_artifact, expire_at: 1.day.ago, job: job)
end
it 'destroys only unlocked artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment