Commit 44e28e7d authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents d8f9f357 bd51ce5c
......@@ -62,6 +62,8 @@ eslint-report.html
/public/assets/
/public/uploads.*
/public/uploads/
/public/sitemap.xml
/public/sitemap.xml.gz
/shared/artifacts/
/spec/examples.txt
/rails_best_practices_output.html
......
.tests-metadata-state:
variables:
TESTS_METADATA_S3_BUCKET: "gitlab-ce-cache"
before_script:
- source scripts/utils.sh
artifacts:
......
cf8e99ccc104f0a43f41e54896ee46a5e1b15a0a
dfdc9b7725eb710dab8ae9970e98cc5118e65c49
<script>
import { mapActions } from 'vuex';
import { GlModal, GlIcon } from '@gitlab/ui';
import { GlModal, GlIcon, GlTooltipDirective } from '@gitlab/ui';
import { __, sprintf } from '~/locale';
import tooltip from '~/vue_shared/directives/tooltip';
import ListItem from './list_item.vue';
export default {
......@@ -12,7 +11,7 @@ export default {
GlModal,
},
directives: {
tooltip,
GlTooltip: GlTooltipDirective,
},
props: {
fileList: {
......@@ -73,7 +72,7 @@ export default {
<div class="d-flex ml-auto">
<button
v-if="!stagedList"
v-tooltip
v-gl-tooltip
:title="__('Discard all changes')"
:aria-label="__('Discard all changes')"
:disabled="!filesLength"
......
......@@ -12,6 +12,7 @@
# only_owned: boolean
# only_shared: boolean
# limit: integer
# include_subgroups: boolean
# params:
# sort: string
# visibility_level: int
......
......@@ -378,7 +378,7 @@ class Project < ApplicationRecord
delegate :feature_available?, :builds_enabled?, :wiki_enabled?,
:merge_requests_enabled?, :forking_enabled?, :issues_enabled?,
:pages_enabled?, :public_pages?, :private_pages?,
:pages_enabled?, :snippets_enabled?, :public_pages?, :private_pages?,
:merge_requests_access_level, :forking_access_level, :issues_access_level,
:wiki_access_level, :snippets_access_level, :builds_access_level,
:repository_access_level, :pages_access_level, :metrics_dashboard_access_level,
......
......@@ -9,7 +9,7 @@ class BulkUpdateIntegrationService
# rubocop: disable CodeReuse/ActiveRecord
def execute
Service.transaction do
batch.update_all(service_hash)
Service.where(id: batch.select(:id)).update_all(service_hash)
if integration.data_fields_present?
integration.data_fields.class.where(service_id: batch.select(:id)).update_all(data_fields_hash)
......
---
title: Add default sitemap generator for gitlab-org group
merge_request: 45645
author:
type: added
---
title: "Migrate tooltip in app/assets/javascripts/ide/components/commit_sidebar/list.vue"
merge_request: 46148
author:
type: other
......@@ -6,7 +6,7 @@ Our current CI parallelization setup is as follows:
1. The `retrieve-tests-metadata` job in the `prepare` stage ensures we have a
`knapsack/report-master.json` file:
- The `knapsack/report-master.json` file is fetched from S3, if it's not here
- The `knapsack/report-master.json` file is fetched from the latest `master` artifacts, if it's not here
we initialize the file with `{}`.
1. Each `[rspec|rspec-ee] [unit|integration|system|geo] n m` job are run with
`knapsack rspec` and should have an evenly distributed share of tests:
......@@ -19,7 +19,7 @@ Our current CI parallelization setup is as follows:
1. The `update-tests-metadata` job (which only runs on scheduled pipelines for
[the canonical project](https://gitlab.com/gitlab-org/gitlab) takes all the
`knapsack/rspec*_pg_*.json` files and merge them all together into a single
`knapsack/report-master.json` file that is then uploaded to S3.
`knapsack/report-master.json` file that is saved as artifact.
After that, the next pipeline will use the up-to-date `knapsack/report-master.json` file.
......
......@@ -30,6 +30,11 @@ repository is too large the import can timeout.
There is also the option of [connecting your external repository to get CI/CD benefits](../../../ci/ci_cd_for_external_repos/index.md). **(PREMIUM)**
## LFS authentication
When importing a project that contains LFS objects, if the project has an [`.lfsconfig`](https://github.com/git-lfs/git-lfs/blob/master/docs/man/git-lfs-config.5.ronn)
file with a URL host (`lfs.url`) different from the repository URL host, LFS files are not downloaded.
## Migrating from self-managed GitLab to GitLab.com
If you only need to migrate Git repositories, you can [import each project by URL](repo_by_url.md). Issues and merge requests can't be imported.
......
---
title: Don't use docker-in-docker by default for API Fuzzing
merge_request: 45827
author:
type: changed
# frozen_string_literal: true
xml_builder.instruct!
xml_builder.urlset xmlns: 'http://www.sitemaps.org/schemas/sitemap/0.9' do
urls.flatten.compact.each do |url|
xml_builder.url do
xml_builder.loc url
xml_builder.lastmod lastmod
end
end
end
# frozen_string_literal: true
module Gitlab
module Sitemaps
class Generator
class << self
include Gitlab::Routing
GITLAB_ORG_NAMESPACE = 'gitlab-org'.freeze
def execute
unless Gitlab.com?
return "The sitemap can only be generated for Gitlab.com"
end
file = Sitemaps::SitemapFile.new
if gitlab_org_group
file.add_elements(generic_urls)
file.add_elements(gitlab_org_group)
file.add_elements(gitlab_org_subgroups)
file.add_elements(gitlab_org_projects)
file.save
else
"The group '#{GITLAB_ORG_NAMESPACE}' was not found"
end
end
private
def generic_urls
[
explore_projects_url,
explore_snippets_url,
explore_groups_url
]
end
def gitlab_org_group
@gitlab_org_group ||= GroupFinder.new(nil).execute(path: 'gitlab-org', parent_id: nil, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
end
def gitlab_org_subgroups
GroupsFinder.new(
nil,
parent: gitlab_org_group,
include_parent_descendants: true
).execute
end
def gitlab_org_projects
GroupProjectsFinder.new(
current_user: nil,
group: gitlab_org_group,
params: { non_archived: true },
options: { include_subgroups: true }
).execute.include_project_feature.inc_routes
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Sitemaps
class SitemapFile
SITEMAP_FILE_PATH = File.join(Rails.public_path, 'sitemap.xml').freeze
attr_accessor :urls
def initialize
@urls = []
end
def add_elements(elements = [])
elements = Array(elements)
return if elements.empty?
urls << elements.map! { |element| Sitemaps::UrlExtractor.extract(element) }
end
def save
return if urls.empty?
File.write(SITEMAP_FILE_PATH, render)
end
def render
fragment = File.read(File.expand_path("fragments/sitemap_file.xml.builder", __dir__))
instance_eval fragment
end
private
def xml_builder
@xml_builder ||= Builder::XmlMarkup.new(indent: 2)
end
def lastmod
@lastmod ||= Date.today.iso8601
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Sitemaps
class UrlExtractor
class << self
include Gitlab::Routing
def extract(element)
case element
when String
element
when Group
extract_from_group(element)
when Project
extract_from_project(element)
end
end
def extract_from_group(group)
[
group_url(group),
issues_group_url(group),
merge_requests_group_url(group),
group_packages_url(group),
group_epics_url(group)
]
end
def extract_from_project(project)
[
project_url(project),
project_issues_url(project),
project_merge_requests_url(project)
].tap do |urls|
urls << project_snippets_url(project) if project.snippets_enabled?
urls << project_wiki_url(project, Wiki::HOMEPAGE) if project.wiki_enabled?
end
end
end
end
end
end
......@@ -9,7 +9,6 @@ RSpec.describe 'Subscriptions Content Security Policy' do
let_it_be(:zuora_url) { 'https://*.zuora.com' }
before do
stub_experiment_for_user(signup_flow: true)
stub_request(:get, /.*gitlab_plans.*/).to_return(status: 200, body: "{}")
expect_next_instance_of(SubscriptionsController) do |controller|
......
......@@ -9,6 +9,7 @@ RSpec.describe 'API-Fuzzing.gitlab-ci.yml' do
let(:template_filename) { Rails.root.join("lib/gitlab/ci/templates/" + template.full_name) }
let(:contents) { File.read(template_filename) }
let(:production_registry) { 'registry.gitlab.com/gitlab-org/security-products/analyzers/api-fuzzing:${FUZZAPI_VERSION}-engine' }
let(:staging_registry) { 'registry.gitlab.com/gitlab-org/security-products/analyzers/api-fuzzing-src:${FUZZAPI_VERSION}-engine' }
# Make sure future changes to the template use the production container registry.
#
......@@ -19,6 +20,10 @@ RSpec.describe 'API-Fuzzing.gitlab-ci.yml' do
it 'uses the production repository' do
expect( contents.include?(production_registry) ).to be true
end
it 'doesn\'t use the staging repository' do
expect( contents.include?(staging_registry) ).to be false
end
end
describe 'the created pipeline' do
......@@ -55,12 +60,12 @@ RSpec.describe 'API-Fuzzing.gitlab-ci.yml' do
end
context 'by default' do
it 'includes no jobs' do
it 'includes no job' do
expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
end
end
context 'when FUZZAPI_HAR is present' do
context 'when configured with HAR' do
before do
create(:ci_variable, project: project, key: 'FUZZAPI_HAR', value: 'testing.har')
create(:ci_variable, project: project, key: 'FUZZAPI_TARGET_URL', value: 'http://example.com')
......@@ -71,9 +76,9 @@ RSpec.describe 'API-Fuzzing.gitlab-ci.yml' do
end
end
context 'when FUZZAPI_OPENAPI is present' do
context 'when configured with OpenAPI' do
before do
create(:ci_variable, project: project, key: 'FUZZAPI_OPENAPI', value: 'openapi.json')
create(:ci_variable, project: project, key: 'FUZZAPI_OPENAPI', value: 'testing.json')
create(:ci_variable, project: project, key: 'FUZZAPI_TARGET_URL', value: 'http://example.com')
end
......@@ -81,13 +86,38 @@ RSpec.describe 'API-Fuzzing.gitlab-ci.yml' do
expect(build_names).to match_array(%w[apifuzzer_fuzz])
end
end
context 'when FUZZAPI_D_TARGET_IMAGE is present' do
before do
create(:ci_variable, project: project, key: 'FUZZAPI_D_TARGET_IMAGE', value: 'imagename:latest')
create(:ci_variable, project: project, key: 'FUZZAPI_HAR', value: 'testing.har')
create(:ci_variable, project: project, key: 'FUZZAPI_TARGET_URL', value: 'http://example.com')
end
it 'includes dnd job' do
expect(build_names).to match_array(%w[apifuzzer_fuzz_dnd])
end
end
end
context 'when API_FUZZING_DISABLED=1' do
before do
create(:ci_variable, project: project, key: 'API_FUZZING_DISABLED', value: '1')
create(:ci_variable, project: project, key: 'FUZZAPI_HAR', value: 'testing.har')
create(:ci_variable, project: project, key: 'FUZZAPI_TARGET_URL', value: 'http://example.com')
end
it 'includes no jobs' do
expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
end
end
context 'when API_FUZZING_DISABLED=1 with DnD' do
before do
create(:ci_variable, project: project, key: 'API_FUZZING_DISABLED', value: '1')
create(:ci_variable, project: project, key: 'FUZZAPI_D_TARGET_IMAGE', value: 'imagename:latest')
create(:ci_variable, project: project, key: 'FUZZAPI_HAR', value: 'testing.har')
create(:ci_variable, project: project, key: 'FUZZAPI_TARGET_URL', value: 'http://example.com')
end
it 'includes no jobs' do
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Sitemaps::Generator do
subject { described_class.execute }
it 'returns error if the env is not .com' do
expect(Gitlab).to receive(:com?).and_return(false)
expect(subject).to eq "The sitemap can only be generated for Gitlab.com"
end
context 'when env is .com' do
before do
expect(Gitlab).to receive(:com?).and_return(true)
end
it 'returns error if group gitlab-org is not found' do
expect(subject).to eq "The group 'gitlab-org' was not found"
end
context 'when group gitlab-org is present and public' do
context 'and it is not public' do
it 'returns and error' do
create(:group, :internal, path: 'gitlab-org', name: "Gitlab Org Group")
expect(subject).to eq "The group 'gitlab-org' was not found"
end
end
context 'and it is public ' do
let_it_be(:gitlab_org_group) { create(:group, :public, path: 'gitlab-org', name: "Gitlab Org Group") }
let_it_be(:public_gitlab_org_project) { create(:project, :public, namespace: gitlab_org_group) }
let_it_be(:internal_gitlab_org_project) { create(:project, :internal, namespace: gitlab_org_group) }
let_it_be(:private_gitlab_org_project) { create(:project, :private, namespace: gitlab_org_group) }
let_it_be(:public_subgroup) { create(:group, :public, path: 'group1', name: 'group1', parent: gitlab_org_group) }
let_it_be(:internal_subgroup) { create(:group, :internal, path: 'group2', name: 'group2', parent: gitlab_org_group) }
let_it_be(:public_subgroup_public_project) { create(:project, :public, namespace: public_subgroup) }
let_it_be(:public_subgroup_internal_project) { create(:project, :internal, namespace: public_subgroup) }
let_it_be(:internal_subgroup_private_project) { create(:project, :private, namespace: internal_subgroup) }
let_it_be(:internal_subgroup_internal_project) { create(:project, :internal, namespace: internal_subgroup) }
it 'includes default explore routes and gitlab-org group routes' do
new_path = Rails.root.join('tmp/tests/sitemap.xml')
stub_const('Gitlab::Sitemaps::SitemapFile::SITEMAP_FILE_PATH', new_path)
subject
content = File.read(new_path)
expect(content).to include('/explore/projects')
expect(content).to include('/explore/groups')
expect(content).to include('/explore/snippets')
expect(content).to include(gitlab_org_group.full_path)
expect(content).to include(public_subgroup.full_path)
expect(content).to include(public_gitlab_org_project.full_path)
expect(content).to include(public_subgroup_public_project.full_path)
expect(content).not_to include(internal_gitlab_org_project.full_path)
expect(content).not_to include(private_gitlab_org_project.full_path)
expect(content).not_to include(internal_subgroup.full_path)
expect(content).not_to include(public_subgroup_internal_project.full_path)
expect(content).not_to include(internal_subgroup_private_project.full_path)
expect(content).not_to include(internal_subgroup_internal_project.full_path)
File.delete(new_path)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Sitemaps::SitemapFile do
subject do
described_class.new.tap do |file|
file.add_elements("https://gitlab.com")
end
end
describe '#render' do
it 'generates a valid sitemap file' do
freeze_time do
content = subject.render
expected_content = <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url>
<loc>https://gitlab.com</loc>
<lastmod>#{Date.today.iso8601}</lastmod>
</url>
</urlset>
EOS
expect(content).to eq expected_content
end
end
end
describe '#save' do
it 'returns if no elements has been provided' do
expect(File).not_to receive(:write)
described_class.new.save # rubocop: disable Rails/SaveBang
end
it 'stores the content in the public root folder' do
expect(subject).to receive(:render).and_call_original
expect(File).to receive(:write).with(
File.join(Rails.public_path, 'sitemap.xml'),
anything)
subject.save # rubocop: disable Rails/SaveBang
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Sitemaps::UrlExtractor do
before do
stub_default_url_options(host: 'localhost')
end
describe '.extract' do
subject { described_class.extract(element) }
context 'when element is a string' do
let(:element) { "https://gitlab.com" }
it 'returns the string without any processing' do
expect(subject).to eq element
end
end
context 'when element is a group' do
let(:element) { build(:group) }
it 'calls .extract_from_group' do
expect(described_class).to receive(:extract_from_group)
subject
end
end
context 'when element is a project' do
let(:element) { build(:project) }
it 'calls .extract_from_project' do
expect(described_class).to receive(:extract_from_project)
subject
end
end
context 'when element is unknown' do
let(:element) { build(:user) }
it 'returns nil' do
expect(subject).to be_nil
end
end
end
describe '.extract_from_group' do
let(:group) { build(:group) }
subject { described_class.extract_from_group(group) }
it 'returns several group urls' do
expected_urls = [
"http://localhost/#{group.full_path}",
"http://localhost/groups/#{group.full_path}/-/issues",
"http://localhost/groups/#{group.full_path}/-/merge_requests",
"http://localhost/groups/#{group.full_path}/-/packages",
"http://localhost/groups/#{group.full_path}/-/epics"
]
expect(subject).to match_array(expected_urls)
end
end
describe '.extract_from_project' do
let(:project) { build(:project) }
subject { described_class.extract_from_project(project) }
it 'returns several project urls' do
expected_urls = [
"http://localhost/#{project.full_path}",
"http://localhost/#{project.full_path}/-/issues",
"http://localhost/#{project.full_path}/-/merge_requests",
"http://localhost/#{project.full_path}/-/snippets",
"http://localhost/#{project.full_path}/-/wikis/home"
]
expect(subject).to match_array(expected_urls)
end
context 'when wiki is disabled' do
let(:project) { build(:project, :wiki_disabled) }
it 'does not include wiki url' do
expect(subject).not_to include("http://localhost/#{project.full_path}/-/wiki_home")
end
end
context 'when snippets are disabled' do
let(:project) { build(:project, :snippets_disabled) }
it 'does not include snippets url' do
expect(subject).not_to include("http://localhost/#{project.full_path}/-/wiki_home")
end
end
end
end
# Read more about this feature here: https://docs.gitlab.com/ee/user/application_security/api_fuzzing/
# Configure the scanning tool through the environment variables.
# List of the variables: https://docs.gitlab.com/ee/user/application_security/api_fuzzing/#available-variables
# How to set: https://docs.gitlab.com/ee/ci/yaml/#variables
stages:
- build
- test
......@@ -7,7 +13,7 @@ stages:
variables:
FUZZAPI_PROFILE: Quick
FUZZAPI_VERSION: latest
FUZZAPI_CONFIG: "/app/.gitlab-api-fuzzing.yml"
FUZZAPI_CONFIG: .gitlab-api-fuzzing.yml
FUZZAPI_TIMEOUT: 30
FUZZAPI_REPORT: gl-api-fuzzing-report.xml
#
......@@ -17,8 +23,52 @@ variables:
# available (non 500 response to HTTP(s))
FUZZAPI_SERVICE_START_TIMEOUT: "300"
#
FUZZAPI_IMAGE: registry.gitlab.com/gitlab-org/security-products/analyzers/api-fuzzing:${FUZZAPI_VERSION}-engine
#
apifuzzer_fuzz:
stage: fuzz
image:
name: $FUZZAPI_IMAGE
entrypoint: ["/bin/bash", "-l", "-c"]
variables:
FUZZAPI_PROJECT: $CI_PROJECT_PATH
FUZZAPI_API: http://apifuzzer:80
TZ: America/Los_Angeles
services:
- name: $FUZZAPI_IMAGE
alias: apifuzzer
entrypoint: ["dotnet", "/peach/Peach.Web.dll"]
allow_failure: true
rules:
- if: $FUZZAPI_D_TARGET_IMAGE
when: never
- if: $FUZZAPI_D_WORKER_IMAGE
when: never
- if: $API_FUZZING_DISABLED
when: never
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH &&
$CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME
when: never
- if: $FUZZAPI_HAR == null && $FUZZAPI_OPENAPI == null
when: never
- if: $GITLAB_FEATURES =~ /\bapi_fuzzing\b/
script:
#
# Run user provided pre-script
- sh -c "$FUZZAPI_PRE_SCRIPT"
#
# Start scanning
- worker-entry
#
# Run user provided post-script
- sh -c "$FUZZAPI_POST_SCRIPT"
#
artifacts:
reports:
junit: $FUZZAPI_REPORT
apifuzzer_fuzz_dnd:
stage: fuzz
image: docker:19.03.12
variables:
......@@ -28,19 +78,20 @@ apifuzzer_fuzz:
FUZZAPI_API: http://apifuzzer:80
allow_failure: true
rules:
- if: $FUZZAPI_D_TARGET_IMAGE == null && $FUZZAPI_D_WORKER_IMAGE == null
when: never
- if: $API_FUZZING_DISABLED
when: never
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH &&
$CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME
when: never
- if: $FUZZAPI_HAR == null &&
$FUZZAPI_OPENAPI == null &&
$FUZZAPI_D_WORKER_IMAGE == null
- if: $FUZZAPI_HAR == null && $FUZZAPI_OPENAPI == null
when: never
- if: $GITLAB_FEATURES =~ /\bapi_fuzzing\b/
services:
- docker:19.03.12-dind
script:
#
#
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
#
......@@ -56,30 +107,13 @@ apifuzzer_fuzz:
--network $FUZZAPI_D_NETWORK \
-e Proxy:Port=8000 \
-e TZ=America/Los_Angeles \
-e FUZZAPI_API=http://127.0.0.1:80 \
-e FUZZAPI_PROJECT \
-e FUZZAPI_PROFILE \
-e FUZZAPI_CONFIG \
-e FUZZAPI_REPORT \
-e FUZZAPI_HAR \
-e FUZZAPI_OPENAPI \
-e FUZZAPI_TARGET_URL \
-e FUZZAPI_OVERRIDES_FILE \
-e FUZZAPI_OVERRIDES_ENV \
-e FUZZAPI_OVERRIDES_CMD \
-e FUZZAPI_OVERRIDES_INTERVAL \
-e FUZZAPI_TIMEOUT \
-e FUZZAPI_VERBOSE \
-e FUZZAPI_SERVICE_START_TIMEOUT \
-e FUZZAPI_HTTP_USERNAME \
-e FUZZAPI_HTTP_PASSWORD \
-e GITLAB_FEATURES \
-v $CI_PROJECT_DIR:/app \
-p 80:80 \
-p 8000:8000 \
-p 514:514 \
--restart=no \
registry.gitlab.com/gitlab-org/security-products/analyzers/api-fuzzing:${FUZZAPI_VERSION}-engine
$FUZZAPI_IMAGE \
dotnet /peach/Peach.Web.dll
#
# Start target container
- |
......@@ -94,7 +128,7 @@ apifuzzer_fuzz:
$FUZZAPI_D_TARGET_IMAGE \
; fi
#
# Start worker container
# Start worker container if provided
- |
if [ "$FUZZAPI_D_WORKER_IMAGE" != "" ]; then \
echo "Starting worker image $FUZZAPI_D_WORKER_IMAGE" \
......@@ -104,9 +138,20 @@ apifuzzer_fuzz:
-e FUZZAPI_API=http://apifuzzer:80 \
-e FUZZAPI_PROJECT \
-e FUZZAPI_PROFILE \
-e FUZZAPI_AUTOMATION_CMD \
-e FUZZAPI_CONFIG \
-e FUZZAPI_REPORT \
-e FUZZAPI_HAR \
-e FUZZAPI_OPENAPI \
-e FUZZAPI_TARGET_URL \
-e FUZZAPI_OVERRIDES_FILE \
-e FUZZAPI_OVERRIDES_ENV \
-e FUZZAPI_OVERRIDES_CMD \
-e FUZZAPI_OVERRIDES_INTERVAL \
-e FUZZAPI_TIMEOUT \
-e FUZZAPI_VERBOSE \
-e FUZZAPI_SERVICE_START_TIMEOUT \
-e FUZZAPI_HTTP_USERNAME \
-e FUZZAPI_HTTP_PASSWORD \
-e CI_COMMIT_BRANCH=${CI_COMMIT_BRANCH} \
$FUZZAPI_D_WORKER_ENV \
$FUZZAPI_D_WORKER_PORTS \
......@@ -115,13 +160,43 @@ apifuzzer_fuzz:
$FUZZAPI_D_WORKER_IMAGE \
; fi
#
# Wait for testing to complete if api fuzzer is scanning
- if [ "$FUZZAPI_HAR$FUZZAPI_OPENAPI" != "" ]; then echo "Waiting for API Fuzzer to exit"; docker wait apifuzzer; fi
# Start API Fuzzing provided worker if no other worker present
- |
if [ "$FUZZAPI_D_WORKER_IMAGE" == "" ]; then \
docker run \
--name worker \
--network $FUZZAPI_D_NETWORK \
-e TZ=America/Los_Angeles \
-e FUZZAPI_API=http://apifuzzer:80 \
-e FUZZAPI_PROJECT \
-e FUZZAPI_PROFILE \
-e FUZZAPI_CONFIG \
-e FUZZAPI_REPORT \
-e FUZZAPI_HAR \
-e FUZZAPI_OPENAPI \
-e FUZZAPI_TARGET_URL \
-e FUZZAPI_OVERRIDES_FILE \
-e FUZZAPI_OVERRIDES_ENV \
-e FUZZAPI_OVERRIDES_CMD \
-e FUZZAPI_OVERRIDES_INTERVAL \
-e FUZZAPI_TIMEOUT \
-e FUZZAPI_VERBOSE \
-e FUZZAPI_SERVICE_START_TIMEOUT \
-e FUZZAPI_HTTP_USERNAME \
-e FUZZAPI_HTTP_PASSWORD \
-v $CI_PROJECT_DIR:/app \
-p 81:80 \
-p 8001:8000 \
-p 515:514 \
--restart=no \
$FUZZAPI_IMAGE \
worker-entry \
; fi
#
# Propagate exit code from api fuzzer (if any)
- if [[ $(docker inspect apifuzzer --format='{{.State.ExitCode}}') != "0" ]]; then echo "API Fuzzing exited with an error. Logs are available as job artifacts."; docker logs apifuzzer; exit 1; fi
# Propagate exit code from api fuzzing scanner (if any)
- if [[ $(docker inspect apifuzzer --format='{{.State.ExitCode}}') != "0" ]]; then echo "API Fuzzing scanner exited with an error. Logs are available as job artifacts."; exit 1; fi
#
# Run user provided pre-script
# Run user provided post-script
- sh -c "$FUZZAPI_POST_SCRIPT"
#
after_script:
......@@ -129,13 +204,13 @@ apifuzzer_fuzz:
# Shutdown all containers
- echo "Stopping all containers"
- if [ "$FUZZAPI_D_TARGET_IMAGE" != "" ]; then docker stop target; fi
- if [ "$FUZZAPI_D_WORKER_IMAGE" != "" ]; then docker stop worker; fi
- docker stop worker
- docker stop apifuzzer
#
# Save docker logs
- docker logs apifuzzer &> gl-api_fuzzing-logs.log
- if [ "$FUZZAPI_D_TARGET_IMAGE" != "" ]; then docker logs target &> gl-api_fuzzing-target-logs.log; fi
- if [ "$FUZZAPI_D_WORKER_IMAGE" != "" ]; then docker logs worker &> gl-api_fuzzing-worker-logs.log; fi
- docker logs worker &> gl-api_fuzzing-worker-logs.log
#
artifacts:
when: always
......
......@@ -4,11 +4,11 @@ function retrieve_tests_metadata() {
mkdir -p knapsack/ rspec_flaky/ rspec_profiling/
if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then
wget -O "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
wget -O "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/raw/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}?job=retrieve-tests-metadata" || echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
fi
if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
wget -O "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
wget -O "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/raw/${FLAKY_RSPEC_SUITE_REPORT_PATH}?job=retrieve-tests-metadata" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
fi
}
......@@ -16,29 +16,11 @@ function update_tests_metadata() {
echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
scripts/merge-reports "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" knapsack/rspec*.json
if [[ -n "${TESTS_METADATA_S3_BUCKET}" ]]; then
if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then
scripts/sync-reports put "${TESTS_METADATA_S3_BUCKET}" "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
else
echo "Not uplaoding report to S3 as the pipeline is not a scheduled one."
fi
fi
rm -f knapsack/rspec*.json
scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" rspec_flaky/all_*.json
export FLAKY_RSPEC_GENERATE_REPORT="true"
scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" rspec_flaky/all_*.json
scripts/flaky_examples/prune-old-flaky-examples "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
if [[ -n ${TESTS_METADATA_S3_BUCKET} ]]; then
if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then
scripts/sync-reports put "${TESTS_METADATA_S3_BUCKET}" "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
else
echo "Not uploading report to S3 as the pipeline is not a scheduled one."
fi
fi
rm -f rspec_flaky/all_*.json rspec_flaky/new_*.json
if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then
......
......@@ -10,60 +10,69 @@ RSpec.describe BulkUpdateIntegrationService do
end
let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
let(:batch) do
Service.inherited_descendants_from_self_or_ancestors_from(subgroup_integration).where(id: group_integration.id..integration.id)
end
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:group_integration) do
JiraService.create!(
group: group,
active: true,
push_events: true,
url: 'http://update-jira.instance.com',
username: 'user',
password: 'secret'
url: 'http://group.jira.com'
)
end
let_it_be(:subgroup_integration) do
JiraService.create!(
inherit_from_id: group_integration.id,
group: create(:group, parent: group),
active: true,
push_events: true,
url: 'http://update-jira.instance.com',
username: 'user',
password: 'secret'
group: subgroup,
url: 'http://subgroup.jira.com',
push_events: true
)
end
let_it_be(:excluded_integration) do
JiraService.create!(
group: create(:group),
url: 'http://another.jira.com',
push_events: false
)
end
let_it_be(:integration) do
JiraService.create!(
project: create(:project),
instance: false,
active: true,
push_events: false,
url: 'http://jira.instance.com',
username: 'user',
password: 'secret'
project: create(:project, group: subgroup),
inherit_from_id: subgroup_integration.id,
url: 'http://project.jira.com',
push_events: false
)
end
context 'with inherited integration' do
it 'updates the integration' do
described_class.new(subgroup_integration, Service.where.not(project: nil)).execute
it 'updates the integration', :aggregate_failures do
described_class.new(subgroup_integration, batch).execute
expect(integration.reload.inherit_from_id).to eq(group_integration.id)
expect(integration.attributes.except(*excluded_attributes))
expect(integration.reload.attributes.except(*excluded_attributes))
.to eq(subgroup_integration.attributes.except(*excluded_attributes))
expect(excluded_integration.reload.inherit_from_id).not_to eq(group_integration.id)
expect(excluded_integration.reload.attributes.except(*excluded_attributes))
.not_to eq(subgroup_integration.attributes.except(*excluded_attributes))
end
context 'with integration with data fields' do
let(:excluded_attributes) { %w[id service_id created_at updated_at] }
it 'updates the data fields from the integration' do
described_class.new(subgroup_integration, Service.where.not(project: nil)).execute
it 'updates the data fields from the integration', :aggregate_failures do
described_class.new(subgroup_integration, batch).execute
expect(integration.reload.data_fields.attributes.except(*excluded_attributes))
expect(integration.data_fields.attributes.except(*excluded_attributes))
.to eq(subgroup_integration.data_fields.attributes.except(*excluded_attributes))
expect(integration.data_fields.attributes.except(*excluded_attributes))
.not_to eq(excluded_integration.data_fields.attributes.except(*excluded_attributes))
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment