Commit 4411440e authored by Nick Thomas's avatar Nick Thomas

Merge branch 'master' into ce-to-ee-2018-12-07

parents ffb7d267 f67cf8af
......@@ -122,7 +122,7 @@ module Ci
acts_as_taggable
add_authentication_token_field :token
add_authentication_token_field :token, encrypted: true, fallback: true
before_save :update_artifacts_size, if: :artifacts_file_changed?
before_save :ensure_token
......
......@@ -70,13 +70,14 @@ module FastDestroyAll
module Helpers
extend ActiveSupport::Concern
include AfterCommitQueue
class_methods do
##
# This method is to be defined on models which have fast destroyable models as children,
# and let us avoid to use `dependent: :destroy` hook
def use_fast_destroy(relation)
before_destroy(prepend: true) do
def use_fast_destroy(relation, opts = {})
set_callback :destroy, :before, opts.merge(prepend: true) do
perform_fast_destroy(public_send(relation)) # rubocop:disable GitlabSecurity/PublicSend
end
end
......
......@@ -17,6 +17,8 @@
module WithUploads
extend ActiveSupport::Concern
include FastDestroyAll::Helpers
include FeatureGate
# Currently there is no simple way how to select only not-mounted
# uploads, it should be all FileUploaders so we select them by
......@@ -25,21 +27,40 @@ module WithUploads
included do
has_many :uploads, as: :model
has_many :file_uploads, -> { where(uploader: FILE_UPLOADERS) }, class_name: 'Upload', as: :model
before_destroy :destroy_file_uploads
# TODO: when feature flag is removed, we can use just dependent: destroy
# option on :file_uploads
before_destroy :remove_file_uploads
use_fast_destroy :file_uploads, if: :fast_destroy_enabled?
end
def retrieve_upload(_identifier, paths)
uploads.find_by(path: paths)
end
private
# mounted uploads are deleted in carrierwave's after_commit hook,
# but FileUploaders which are not mounted must be deleted explicitly and
# it can not be done in after_commit because FileUploader requires loads
# associated model on destroy (which is already deleted in after_commit)
def destroy_file_uploads
self.uploads.where(uploader: FILE_UPLOADERS).find_each do |upload|
def remove_file_uploads
fast_destroy_enabled? ? delete_uploads : destroy_uploads
end
def delete_uploads
file_uploads.delete_all(:delete_all)
end
def destroy_uploads
file_uploads.find_each do |upload|
upload.destroy
end
end
def retrieve_upload(_identifier, paths)
uploads.find_by(path: paths)
def fast_destroy_enabled?
Feature.enabled?(:fast_destroy_uploads, self)
end
end
......@@ -25,6 +25,25 @@ class Upload < ActiveRecord::Base
Digest::SHA256.file(path).hexdigest
end
class << self
##
# FastDestroyAll concerns
def begin_fast_destroy
{
Uploads::Local => Uploads::Local.new.keys(with_files_stored_locally),
Uploads::Fog => Uploads::Fog.new.keys(with_files_stored_remotely)
}
end
##
# FastDestroyAll concerns
def finalize_fast_destroy(keys)
keys.each do |store_class, paths|
store_class.new.delete_keys_async(paths)
end
end
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
......
# frozen_string_literal: true
module Uploads
class Base
BATCH_SIZE = 100
attr_reader :logger
def initialize(logger: nil)
@logger ||= Rails.logger
end
def delete_keys_async(keys_to_delete)
keys_to_delete.each_slice(BATCH_SIZE) do |batch|
DeleteStoredFilesWorker.perform_async(self.class, batch)
end
end
end
end
# frozen_string_literal: true
module Uploads
class Fog < Base
include ::Gitlab::Utils::StrongMemoize
def available?
object_store.enabled
end
def keys(relation)
return [] unless available?
relation.pluck(:path)
end
def delete_keys(keys)
keys.each do |key|
connection.delete_object(bucket_name, key)
end
end
private
def object_store
Gitlab.config.uploads.object_store
end
def bucket_name
return unless available?
object_store.remote_directory
end
def connection
return unless available?
strong_memoize(:connection) do
::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
end
end
end
end
# frozen_string_literal: true
module Uploads
class Local < Base
def keys(relation)
relation.includes(:model).find_each.map(&:absolute_path)
end
def delete_keys(keys)
keys.each do |path|
delete_file(path)
end
end
private
def delete_file(path)
unless exists?(path)
logger.warn("File '#{path}' doesn't exist, skipping")
return
end
unless in_uploads?(path)
message = "Path '#{path}' is not in uploads dir, skipping"
logger.warn(message)
Gitlab::Sentry.track_exception(RuntimeError.new(message), extra: { uploads_dir: storage_dir })
return
end
FileUtils.rm(path)
delete_dir!(File.dirname(path))
end
def exists?(path)
path.present? && File.exist?(path)
end
def in_uploads?(path)
path.start_with?(storage_dir)
end
def delete_dir!(path)
Dir.rmdir(path)
rescue Errno::ENOENT
# Ignore: path does not exist
rescue Errno::ENOTDIR
# Ignore: path is not a dir
rescue Errno::ENOTEMPTY, Errno::EEXIST
# Ignore: dir is not empty
end
def storage_dir
@storage_dir ||= File.realpath(Gitlab.config.uploads.storage_path)
end
end
end
......@@ -134,3 +134,4 @@
- delete_diff_files
- detect_repository_languages
- repository_cleanup
- delete_stored_files
# frozen_string_literal: true
class DeleteStoredFilesWorker
include ApplicationWorker
def perform(class_name, keys)
klass = begin
class_name.constantize
rescue NameError
nil
end
unless klass
message = "Unknown class '#{class_name}'"
logger.error(message)
Gitlab::Sentry.track_exception(RuntimeError.new(message))
return
end
klass.new(logger: logger).delete_keys(keys)
end
end
---
title: Encrypt CI/CD builds authentication tokens
merge_request: 23436
author:
type: security
......@@ -82,6 +82,7 @@
- [detect_repository_languages, 1]
- [auto_devops, 2]
- [repository_cleanup, 1]
- [delete_stored_files, 1]
# EE-specific queues
- [ldap_group_sync, 2]
......
# frozen_string_literal: true
class AddTokenEncryptedToCiBuilds < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :ci_builds, :token_encrypted, :string
end
end
# frozen_string_literal: true
class AddIndexToCiBuildsTokenEncrypted < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :ci_builds, :token_encrypted, unique: true, where: 'token_encrypted IS NOT NULL'
end
def down
remove_concurrent_index :ci_builds, :token_encrypted
end
end
......@@ -439,6 +439,7 @@ ActiveRecord::Schema.define(version: 20181204135932) do
t.boolean "protected"
t.integer "failure_reason"
t.datetime_with_timezone "scheduled_at"
t.string "token_encrypted"
t.index ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree
t.index ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
t.index ["commit_id", "stage_idx", "created_at"], name: "index_ci_builds_on_commit_id_and_stage_idx_and_created_at", using: :btree
......@@ -456,6 +457,7 @@ ActiveRecord::Schema.define(version: 20181204135932) do
t.index ["stage_id"], name: "index_ci_builds_on_stage_id", using: :btree
t.index ["status", "type", "runner_id"], name: "index_ci_builds_on_status_and_type_and_runner_id", using: :btree
t.index ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree
t.index ["token_encrypted"], name: "index_ci_builds_on_token_encrypted", unique: true, where: "(token_encrypted IS NOT NULL)", using: :btree
t.index ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree
t.index ["user_id"], name: "index_ci_builds_on_user_id", using: :btree
end
......
......@@ -132,7 +132,8 @@ in three places:
- either under the project's CI/CD settings while [enabling Auto DevOps](#enabling-auto-devops)
- or in instance-wide settings in the **admin area > Settings** under the "Continuous Integration and Delivery" section
- or at the project or group level as a variable: `AUTO_DEVOPS_DOMAIN` (required if you want to use [multiple clusters](#using-multiple-kubernetes-clusters))
- or at the project as a variable: `AUTO_DEVOPS_DOMAIN` (required if you want to use [multiple clusters](#using-multiple-kubernetes-clusters))
- or at the group level as a variable: `AUTO_DEVOPS_DOMAIN`
A wildcard DNS A record matching the base domain(s) is required, for example,
given a base domain of `example.com`, you'd need a DNS entry like:
......@@ -203,6 +204,12 @@ and verifying that your app is deployed as a review app in the Kubernetes
cluster with the `review/*` environment scope. Similarly, you can check the
other environments.
NOTE: **Note:**
Auto DevOps is not supported for a group with multiple clusters, as it
is not possible to set `AUTO_DEVOPS_DOMAIN` per environment on the group
level. This will be resolved in the future with the [following issue](
https://gitlab.com/gitlab-org/gitlab-ce/issues/52363).
## Enabling/Disabling Auto DevOps
When first using Auto Devops, review the [requirements](#requirements) to ensure all necessary components to make
......
# Group-level Kubernetes clusters
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/34758) in GitLab 11.6.
CAUTION: **Warning:**
Group Cluster integration is currently in **Beta**.
## Overview
Similar to [project Kubernetes
clusters](../../project/clusters/index.md), Group-level Kubernetes
clusters allow you to connect a Kubernetes cluster to your group,
enabling you to use the same cluster across multiple projects.
## Installing applications
GitLab provides a one-click install for various applications that can be
added directly to your cluster.
NOTE: **Note:**
Applications will be installed in a dedicated namespace called
`gitlab-managed-apps`. If you have added an existing Kubernetes cluster
with Tiller already installed, you should be careful as GitLab cannot
detect it. In this event, installing Tiller via the applications will
result in the cluster having it twice. This can lead to confusion during
deployments.
| Application | GitLab version | Description | Helm Chart |
| ----------- | -------------- | ----------- | ---------- |
| [Helm Tiller](https://docs.helm.sh) | 10.2+ | Helm is a package manager for Kubernetes and is required to install all the other applications. It is installed in its own pod inside the cluster which can run the `helm` CLI in a safe environment. | n/a |
| [Ingress](https://kubernetes.io/docs/concepts/services-networking/ingress) | 10.2+ | Ingress can provide load balancing, SSL termination, and name-based virtual hosting. It acts as a web proxy for your applications and is useful if you want to use [Auto DevOps](../../../topics/autodevops/index.md) or deploy your own web apps. | [stable/nginx-ingress](https://github.com/helm/charts/tree/master/stable/nginx-ingress) |
## RBAC compatibility
For each project under a group with a Kubernetes cluster, GitLab will
create a restricted service account with [`edit`
privileges](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles)
in the project namespace.
NOTE: **Note:**
RBAC support was introduced in
[GitLab 11.4](https://gitlab.com/gitlab-org/gitlab-ce/issues/29398), and
Project namespace restriction was introduced in
[GitLab 11.5](https://gitlab.com/gitlab-org/gitlab-ce/issues/51716).
## Cluster precedence
GitLab will use the project's cluster before using any cluster belonging
to the group containing the project if the project's cluster is available and not disabled.
In the case of sub-groups, GitLab will use the cluster of the closest ancestor group
to the project, provided the cluster is not disabled.
## Multiple Kubernetes clusters **[PREMIUM]**
With GitLab Premium, you can associate more than one Kubernetes clusters to your
group. That way you can have different clusters for different environments,
like dev, staging, production, etc.
Add another cluster similar to the first one and make sure to
[set an environment scope](#environment-scopes) that will
differentiate the new cluster from the rest.
NOTE: **Note:**
Auto DevOps is not supported for a group with multiple clusters, as it
is not possible to set `AUTO_DEVOPS_DOMAIN` per environment on the group
level. This will be resolved in the future with the [following issue](
https://gitlab.com/gitlab-org/gitlab-ce/issues/52363).
## Environment scopes **[PREMIUM]**
When adding more than one Kubernetes cluster to your project, you need
to differentiate them with an environment scope. The environment scope
associates clusters with [environments](../../../ci/environments.md)
similar to how the [environment-specific
variables](../../../ci/variables/README.md#limiting-environment-scopes-of-variables)
work.
While evaluating which environment matches the environment scope of a
cluster, [cluster precedence](#cluster-precedence) will take
effect. The cluster at the project level will take precedence, followed
by the closest ancestor group, followed by that groups' parent and so
on.
For example, let's say we have the following Kubernetes clusters:
| Cluster | Environment scope | Where |
| ---------- | ------------------- | ----------|
| Project | `*` | Project |
| Staging | `staging/*` | Project |
| Production | `production/*` | Project |
| Test | `test` | Group |
| Development| `*` | Group |
And the following environments are set in [`.gitlab-ci.yml`](../../../ci/yaml/README.md):
```yaml
stages:
- test
- deploy
test:
stage: test
script: sh test
deploy to staging:
stage: deploy
script: make deploy
environment:
name: staging/$CI_COMMIT_REF_NAME
url: https://staging.example.com/
deploy to production:
stage: deploy
script: make deploy
environment:
name: production/$CI_COMMIT_REF_NAME
url: https://example.com/
```
The result will then be:
- The Project cluster will be used for the `test` job.
- The Staging cluster will be used for the `deploy to staging` job.
- The Production cluster will be used for the `deploy to production` job.
......@@ -334,9 +334,10 @@ Define project templates at a group-level by setting a group as a template sourc
- **Projects**: view all projects within that group, add members to each project,
access each project's settings, and remove any project from the same screen.
- **Webhooks**: configure [webhooks](../project/integrations/webhooks.md) to your group.
- **Kubernetes cluster integration**: connect your GitLab group with [Kubernetes clusters](clusters/index.md).
- **Audit Events**: view [Audit Events](https://docs.gitlab.com/ee/administration/audit_events.html#audit-events)
for the group. **[STARTER ONLY]**
- **Pipelines quota**: keep track of the [pipeline quota](../admin_area/settings/continuous_integration.md) for the group
- **Pipelines quota**: keep track of the [pipeline quota](../admin_area/settings/continuous_integration.md) for the group.
## User contribution analysis **[STARTER]**
......@@ -350,4 +351,3 @@ With [GitLab Issues Analytics](issues_analytics/index.md), in groups, you can se
[ee]: https://about.gitlab.com/pricing/
[ee-2534]: https://gitlab.com/gitlab-org/gitlab-ee/issues/2534
......@@ -17,6 +17,11 @@ your account with Google Kubernetes Engine (GKE) so that you can [create new
clusters](#adding-and-creating-a-new-gke-cluster-via-gitlab) from within GitLab,
or provide the credentials to an [existing Kubernetes cluster](#adding-an-existing-kubernetes-cluster).
NOTE: **Note:**
From [GitLab 11.6](https://gitlab.com/gitlab-org/gitlab-ce/issues/34758) you
can also associate a Kubernetes cluster to your groups. Learn more about
[group Kubernetes clusters](../../group/clusters/index.md).
## Adding and creating a new GKE cluster via GitLab
TIP: **Tip:**
......@@ -246,16 +251,18 @@ install it manually.
## Installing applications
GitLab provides a one-click install for various applications which will be
added directly to your configured cluster. Those applications are needed for
[Review Apps](../../../ci/review_apps/index.md) and [deployments](../../../ci/environments.md).
GitLab provides a one-click install for various applications which can
be added directly to your configured cluster. Those applications are
needed for [Review Apps](../../../ci/review_apps/index.md) and
[deployments](../../../ci/environments.md).
NOTE: **Note:**
With the exception of Knative, the applications will be installed in a dedicated namespace called
`gitlab-managed-apps`. In case you have added an existing Kubernetes cluster
with Tiller already installed, you should be careful as GitLab cannot
detect it. By installing it via the applications will result into having it
twice, which can lead to confusion during deployments.
detect it. In this event, installing Tiller via the applications will
result in the cluster having it twice. This can lead to confusion during
deployments.
| Application | GitLab version | Description | Helm Chart |
| ----------- | :------------: | ----------- | --------------- |
......@@ -362,15 +369,11 @@ differentiate the new cluster with the rest.
## Setting the environment scope **[PREMIUM]**
NOTE: **Note:**
This is only available for [GitLab Premium][ee] where you can add more than
one Kubernetes cluster.
When adding more than one Kubernetes clusters to your project, you need to
differentiate them with an environment scope. The environment scope associates
clusters and [environments](../../../ci/environments.md) in an 1:1 relationship
similar to how the
[environment-specific variables](../../../ci/variables/README.md#limiting-environment-scopes-of-variables)
When adding more than one Kubernetes clusters to your project, you need
to differentiate them with an environment scope. The environment scope
associates clusters with [environments](../../../ci/environments.md)
similar to how the [environment-specific
variables](../../../ci/variables/README.md#limiting-environment-scopes-of-variables)
work.
The default environment scope is `*`, which means all jobs, regardless of their
......@@ -416,9 +419,9 @@ deploy to production:
The result will then be:
* The development cluster will be used for the "test" job.
* The staging cluster will be used for the "deploy to staging" job.
* The production cluster will be used for the "deploy to production" job.
- The development cluster will be used for the "test" job.
- The staging cluster will be used for the "deploy to staging" job.
- The production cluster will be used for the "deploy to production" job.
## Deployment variables
......
......@@ -6,6 +6,7 @@ import Tabs from '~/vue_shared/components/tabs/tabs';
import Tab from '~/vue_shared/components/tabs/tab.vue';
import IssueModal from 'ee/vue_shared/security_reports/components/modal.vue';
import SecurityDashboardTable from './security_dashboard_table.vue';
import VulnerabilityChart from './vulnerability_chart.vue';
import VulnerabilityCountList from './vulnerability_count_list.vue';
import Icon from '~/vue_shared/components/icon.vue';
import popover from '~/vue_shared/directives/popover';
......@@ -21,6 +22,7 @@ export default {
SecurityDashboardTable,
Tab,
Tabs,
VulnerabilityChart,
VulnerabilityCountList,
},
props: {
......@@ -40,6 +42,10 @@ export default {
type: String,
required: true,
},
vulnerabilitiesHistoryEndpoint: {
type: String,
required: true,
},
vulnerabilityFeedbackHelpPath: {
type: String,
required: true,
......@@ -73,15 +79,20 @@ export default {
html: true,
};
},
chartFlagEnabled() {
return gon.features && gon.features.groupSecurityDashboardHistory;
},
},
created() {
this.setVulnerabilitiesEndpoint(this.vulnerabilitiesEndpoint);
this.setVulnerabilitiesCountEndpoint(this.vulnerabilitiesCountEndpoint);
this.setVulnerabilitiesHistoryEndpoint(this.vulnerabilitiesHistoryEndpoint);
this.fetchVulnerabilitiesCount();
},
methods: {
...mapActions('vulnerabilities', [
'setVulnerabilitiesCountEndpoint',
'setVulnerabilitiesHistoryEndpoint',
'setVulnerabilitiesEndpoint',
'fetchVulnerabilitiesCount',
'createIssue',
......@@ -108,7 +119,11 @@ export default {
</span>
</template>
<vulnerability-count-list />
<h5 class="mt-4 mb-4">{{ __('Vulnerability List') }}</h5>
<template v-if="chartFlagEnabled">
<h4 class="my-4">{{ __('Vulnerability Chart') }}</h4>
<vulnerability-chart />
</template>
<h4 class="my-4">{{ __('Vulnerability List') }}</h4>
<security-dashboard-table
:dashboard-documentation="dashboardDocumentation"
:empty-state-svg-path="emptyStateSvgPath"
......
<script>
import dateFormat from 'dateformat';
import { mapState, mapActions } from 'vuex';
import { GlChart } from '@gitlab/ui';
import ChartTooltip from './vulnerability_chart_tooltip.vue';
export default {
name: 'VulnerabilityChart',
components: {
GlChart,
ChartTooltip,
},
data: () => ({
tooltipTitle: '',
tooltipEntries: [],
lines: [
{
name: 'Critical',
color: '#C0341D',
},
{
name: 'High',
color: '#DE7E00',
},
{
name: 'Medium',
color: '#6E49CB',
},
{
name: 'Low',
color: '#4F4F4F',
},
{
name: 'Total',
color: '#1F78D1',
},
],
}),
computed: {
...mapState('vulnerabilities', ['vulnerabilitiesHistory']),
series() {
return this.lines.map(line => {
const { name, color } = line;
const history = this.vulnerabilitiesHistory[name.toLowerCase()];
const data = history ? Object.entries(history) : [];
return {
borderWidth: 2,
color,
data,
name,
symbol: 'circle',
symbolSize: 6,
type: 'line',
};
});
},
options() {
return {
grid: {
bottom: 85,
left: 75,
right: 15,
top: 10,
},
tooltip: {
backgroundColor: '#fff',
borderColor: 'rgba(0, 0, 0, 0.1)',
borderWidth: 1,
confine: true,
formatter: this.renderTooltip,
padding: 0,
textStyle: {
color: '#4F4F4F',
},
trigger: 'axis',
},
xAxis: {
axisLabel: {
color: '#707070',
formatter: date => dateFormat(date, 'd mmm'),
margin: 8,
rotate: 45,
},
axisLine: {
lineStyle: {
color: '#dedede',
width: 2,
},
},
axisTick: {
show: false,
},
maxInterval: 1000 * 60 * 60 * 24 * 7,
min: Date.now() - 1000 * 60 * 60 * 24 * 28,
name: 'Date',
nameGap: 50,
nameLocation: 'center',
nameTextStyle: {
color: '#2e2e2e',
fontWeight: 'bold',
},
splitNumber: 12,
type: 'time',
},
yAxis: {
axisLabel: {
color: '#707070',
},
axisLine: {
lineStyle: {
color: '#dedede',
width: 2,
},
},
axisTick: {
show: false,
},
interval: 25,
name: 'Vulnerabilities',
nameGap: 42,
nameLocation: 'center',
nameRotation: 90,
nameTextStyle: {
color: '#2e2e2e',
fontWeight: 'bold',
},
type: 'value',
},
legend: {
bottom: 0,
icon: 'path://M0,0H120V40H0Z',
itemGap: 15,
left: 70,
textStyle: {
color: '#4F4F4F',
fontWeight: 'bold',
},
type: 'scroll',
},
series: this.series,
};
},
},
created() {
this.fetchVulnerabilitiesHistory();
},
methods: {
...mapActions('vulnerabilities', ['fetchVulnerabilitiesHistory']),
renderTooltip(params, ticket, callback) {
this.tooltipTitle = dateFormat(params[0].axisValue, 'd mmmm');
this.tooltipEntries = params;
this.$nextTick(() => callback(ticket, this.$refs.tooltip.$el.innerHTML));
return ' ';
},
},
};
</script>
<template>
<div class="vulnerabilities-chart">
<div class="vulnerabilities-chart-wrapper">
<gl-chart :options="options" :width="1240" />
<chart-tooltip v-show="false" ref="tooltip" :title="tooltipTitle" :entries="tooltipEntries" />
</div>
</div>
</template>
<script>
export default {
name: 'VulnerabilityChartLabel',
props: {
name: {
type: String,
required: true,
},
color: {
type: String,
required: true,
},
value: {
type: [Number],
required: false,
default: null,
},
},
};
</script>
<template>
<div class="d-flex align-items-center mb-1 js-chart-label">
<div class="js-color" :style="{ backgroundColor: color, width: '12px', height: '4px' }"></div>
<strong class="ml-2 mr-3 text-capitalize js-name">{{ name }}</strong>
<span v-if="value !== null" class="ml-auto js-value">{{ value }}</span>
</div>
</template>
<script>
import VulnerabilityChartLabel from './vulnerability_chart_label.vue';
export default {
name: 'VulnerabilityChartTooltip',
components: {
VulnerabilityChartLabel,
},
props: {
title: {
type: String,
required: false,
default: '',
},
entries: {
type: Array,
required: true,
},
},
};
</script>
<template>
<div class="card">
<div class="card-header">
<strong> {{ title }} </strong>
</div>
<div class="card-body">
<vulnerability-chart-label
v-for="entry in entries"
:key="entry.seriesId + entry.dataIndex"
:name="entry.seriesName"
:value="entry.data[1]"
:color="entry.color"
/>
</div>
</div>
</template>
......@@ -19,6 +19,7 @@ export default () => {
vulnerabilityFeedbackHelpPath: el.dataset.vulnerabilityFeedbackHelpPath,
vulnerabilitiesEndpoint: el.dataset.vulnerabilitiesEndpoint,
vulnerabilitiesCountEndpoint: el.dataset.vulnerabilitiesSummaryEndpoint,
vulnerabilitiesHistoryEndpoint: el.dataset.vulnerabilitiesHistoryEndpoint,
},
});
},
......
......@@ -204,4 +204,38 @@ export const receiveRevertDismissalError = ({ commit }, { flashError }) => {
}
};
export const setVulnerabilitiesHistoryEndpoint = ({ commit }, endpoint) => {
commit(types.SET_VULNERABILITIES_HISTORY_ENDPOINT, endpoint);
};
export const fetchVulnerabilitiesHistory = ({ state, dispatch }) => {
dispatch('requestVulnerabilitiesHistory');
axios({
method: 'GET',
url: state.vulnerabilitiesHistoryEndpoint,
})
.then(response => {
const { data } = response;
dispatch('receiveVulnerabilitiesHistorySuccess', { data });
})
.catch(() => {
dispatch('receiveVulnerabilitiesHistoryError');
});
};
export const requestVulnerabilitiesHistory = ({ commit }) => {
commit(types.REQUEST_VULNERABILITIES_HISTORY);
};
export const receiveVulnerabilitiesHistorySuccess = ({ commit }, { data }) => {
commit(types.RECEIVE_VULNERABILITIES_HISTORY_SUCCESS, data);
};
export const receiveVulnerabilitiesHistoryError = ({ commit }) => {
commit(types.RECEIVE_VULNERABILITIES_HISTORY_ERROR);
};
// prevent babel-plugin-rewire from generating an invalid default during karma tests
// This is no longer needed after gitlab-ce#52179 is merged
export default () => {};
......@@ -8,6 +8,11 @@ export const REQUEST_VULNERABILITIES_COUNT = 'REQUEST_VULNERABILITIES_COUNT';
export const RECEIVE_VULNERABILITIES_COUNT_SUCCESS = 'RECEIVE_VULNERABILITIES_COUNT_SUCCESS';
export const RECEIVE_VULNERABILITIES_COUNT_ERROR = 'RECEIVE_VULNERABILITIES_COUNT_ERROR';
export const SET_VULNERABILITIES_HISTORY_ENDPOINT = 'SET_VULNERABILITIES_HISTORY_ENDPOINT';
export const REQUEST_VULNERABILITIES_HISTORY = 'REQUEST_VULNERABILITIES_HISTORY';
export const RECEIVE_VULNERABILITIES_HISTORY_SUCCESS = 'RECEIVE_VULNERABILITIES_HISTORY_SUCCESS';
export const RECEIVE_VULNERABILITIES_HISTORY_ERROR = 'RECEIVE_VULNERABILITIES_HISTORY_ERROR';
export const SET_MODAL_DATA = 'SET_MODAL_DATA';
export const REQUEST_CREATE_ISSUE = 'REQUEST_CREATE_ISSUE';
......
......@@ -35,6 +35,21 @@ export default {
state.isLoadingVulnerabilitiesCount = false;
state.errorLoadingVulnerabilitiesCount = true;
},
[types.SET_VULNERABILITIES_HISTORY_ENDPOINT](state, payload) {
state.vulnerabilitiesHistoryEndpoint = payload;
},
[types.REQUEST_VULNERABILITIES_HISTORY](state) {
state.isLoadingVulnerabilitiesHistory = true;
state.errorLoadingVulnerabilitiesHistory = false;
},
[types.RECEIVE_VULNERABILITIES_HISTORY_SUCCESS](state, payload) {
state.isLoadingVulnerabilitiesHistory = false;
state.vulnerabilitiesHistory = payload;
},
[types.RECEIVE_VULNERABILITIES_HISTORY_ERROR](state) {
state.isLoadingVulnerabilitiesHistory = false;
state.errorLoadingVulnerabilitiesHistory = true;
},
[types.SET_MODAL_DATA](state, payload) {
const { vulnerability } = payload;
......
......@@ -3,12 +3,16 @@ import { s__ } from '~/locale';
export default () => ({
isLoadingVulnerabilities: true,
errorLoadingVulnerabilities: false,
vulnerabilities: [],
isLoadingVulnerabilitiesCount: true,
errorLoadingVulnerabilitiesCount: false,
pageInfo: {},
vulnerabilities: [],
vulnerabilitiesCount: {},
isLoadingVulnerabilitiesHistory: true,
errorLoadingVulnerabilitiesHistory: false,
vulnerabilitiesHistory: {},
pageInfo: {},
vulnerabilitiesCountEndpoint: null,
vulnerabilitiesHistoryEndpoint: null,
vulnerabilitiesEndpoint: null,
activeVulnerability: null,
modal: {
......
$trans-white: rgba(255, 255, 255, 0);
.vulnerabilities-chart-wrapper {
-webkit-overflow-scrolling: touch;
overflow: scroll;
}
@media screen and (max-width: 1240px) {
.vulnerabilities-chart {
position: relative;
}
.vulnerabilities-chart::after {
background-image: linear-gradient(to right, $trans-white, $gl-gray-350);
bottom: 0;
content: '';
height: 310px;
position: absolute;
right: -1px;
top: 10px;
width: 32px;
}
}
# frozen_string_literal: true
class Groups::Security::DashboardController < Groups::Security::ApplicationController
layout 'group'
before_action do
push_frontend_feature_flag(:group_security_dashboard_history, group)
end
end
- breadcrumb_title _("Security Dashboard")
- page_title _("Security Dashboard")
- vulnerabilities_history_endpoint = Feature.enabled?(:group_security_dashboard_history, @group) ? history_group_security_vulnerabilities_path(@group) : ''
#js-group-security-dashboard{ data: { vulnerabilities_endpoint: group_security_vulnerabilities_path(@group),
vulnerabilities_summary_endpoint: summary_group_security_vulnerabilities_path(@group),
vulnerabilities_history_endpoint: vulnerabilities_history_endpoint,
vulnerability_feedback_help_path: help_page_path("user/project/merge_requests/index", anchor: "interacting-with-security-reports-ultimate"),
empty_state_svg_path: image_path('illustrations/security-dashboard-empty-state.svg'),
dashboard_documentation: help_page_path('user/group/security_dashboard/index') } }
---
title: Adds group security dashboard metrics chart
merge_request: 8631
author:
type: added
......@@ -7,21 +7,19 @@ module EE
JOB_TOKEN_HEADER = "HTTP_JOB_TOKEN".freeze
JOB_TOKEN_PARAM = :job_token
# rubocop: disable CodeReuse/ActiveRecord
def find_user_from_job_token
return unless route_authentication_setting[:job_token_allowed]
token = (params[JOB_TOKEN_PARAM] || env[JOB_TOKEN_HEADER]).to_s
return unless token.present?
job = ::Ci::Build.find_by(token: token)
job = ::Ci::Build.find_by_token(token)
raise ::Gitlab::Auth::UnauthorizedError unless job
@job_token_authentication = true # rubocop:disable Gitlab/ModuleWithInstanceVariables
job.user
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
......
......@@ -7,8 +7,8 @@ module Gitlab
ParserNotFoundError = Class.new(StandardError)
PARSERS = {
sast: ::Gitlab::Ci::Parsers::Security::Common,
dependency_scanning: ::Gitlab::Ci::Parsers::Security::Common
sast: ::Gitlab::Ci::Parsers::Security::Sast,
dependency_scanning: ::Gitlab::Ci::Parsers::Security::DependencyScanning
}.freeze
def self.fabricate!(file_type)
......
......@@ -7,8 +7,6 @@ module Gitlab
class Common
SecurityReportParserError = Class.new(StandardError)
METADATA_VERSION = '1.2'
def parse!(json_data, report)
vulnerabilities = JSON.parse!(json_data)
......@@ -41,7 +39,7 @@ module Gitlab
raw_metadata: data.to_json,
# Version is hardcoded here untill provided in the report.
# See https://gitlab.com/gitlab-org/gitlab-ee/issues/8025
metadata_version: METADATA_VERSION
metadata_version: metadata_version(data)
)
end
......@@ -80,10 +78,6 @@ module Gitlab
input.blank? ? 'undefined' : input.downcase
end
def generate_location_fingerprint(location)
Digest::SHA1.hexdigest("#{location['file']}:#{location['start_line']}:#{location['end_line']}")
end
def generate_project_fingerprint(compare_key)
Digest::SHA1.hexdigest(compare_key)
end
......
# frozen_string_literal: true
module Gitlab
module Ci
module Parsers
module Security
class DependencyScanning < Common
private
def metadata_version(vulnerability)
'1.3'
end
def generate_location_fingerprint(location)
Digest::SHA1.hexdigest("#{location['file']}:#{location['dependency']['package']['name']}")
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Parsers
module Security
class Sast < Common
private
def metadata_version(vulnerability)
'1.2'
end
def generate_location_fingerprint(location)
Digest::SHA1.hexdigest("#{location['file']}:#{location['start_line']}:#{location['end_line']}")
end
end
end
end
end
end
import Vue from 'vue';
import component from 'ee/security_dashboard/components/vulnerability_chart_label.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
function hexToRgb(hex) {
const cleanHex = hex.replace('#', '');
const [r, g, b] = [
cleanHex.substring(0, 2),
cleanHex.substring(2, 4),
cleanHex.substring(4, 6),
].map(rgb => parseInt(rgb, 16));
return `rgb(${r}, ${g}, ${b})`;
}
describe('Vulnerability Chart Label component', () => {
const Component = Vue.extend(component);
let vm;
const props = {
name: 'Chuck Norris',
color: '#BADA55',
value: 42,
};
describe('default', () => {
beforeEach(() => {
vm = mountComponent(Component, props);
});
afterEach(() => {
vm.$destroy();
});
it('should render the name', () => {
const name = vm.$el.querySelector('.js-name');
expect(name.textContent).toContain(props.name);
});
it('should render the value', () => {
const value = vm.$el.querySelector('.js-value');
expect(value.textContent).toContain(props.value);
});
it('should render the color', () => {
const color = vm.$el.querySelector('.js-color');
expect(color.style.backgroundColor).toBe(hexToRgb(props.color));
});
});
describe('when the value is 0', () => {
const newProps = { ...props, value: 0 };
beforeEach(() => {
vm = mountComponent(Component, newProps);
});
afterEach(() => {
vm.$destroy();
});
it('should still render the value, but show a "0"', () => {
const value = vm.$el.querySelector('.js-value');
expect(value.textContent).toContain(newProps.value);
});
});
});
import Vue from 'vue';
import MockAdapater from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import component from 'ee/security_dashboard/components/vulnerability_chart.vue';
import createStore from 'ee/security_dashboard/store';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import waitForPromises from 'spec/helpers/wait_for_promises';
import { resetStore } from '../helpers';
import mockDataVulnerabilitiesHistory from '../store/vulnerabilities/data/mock_data_vulnerabilities_history.json';
describe('Vulnerabilities Chart', () => {
const Component = Vue.extend(component);
const vulnerabilitiesHistoryEndpoint = '/vulnerabilitiesEndpoint.json';
let store;
let mock;
let vm;
beforeEach(() => {
store = createStore();
store.state.vulnerabilities.vulnerabilitiesHistoryEndpoint = vulnerabilitiesHistoryEndpoint;
mock = new MockAdapater(axios);
mock.onGet(vulnerabilitiesHistoryEndpoint).replyOnce(200, mockDataVulnerabilitiesHistory);
vm = mountComponentWithStore(Component, { store });
});
afterEach(() => {
resetStore(store);
vm.$destroy();
mock.restore();
});
it('should render the e-chart instance', done => {
waitForPromises()
.then(() => {
expect(vm.$el.querySelector('[_echarts_instance_]')).not.toBeNull();
done();
})
.catch(done.fail);
});
});
import Vue from 'vue';
import component from 'ee/security_dashboard/components/vulnerability_chart_tooltip.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
describe('Vulnerability Chart Tooltip component', () => {
const Component = Vue.extend(component);
const props = {
title: 'Tooltip Title',
entries: [
{
dataIndex: 1,
seriesId: 'critical_0',
seriesName: 'critical',
color: '#00f',
data: ['critical', 32],
},
{
dataIndex: 1,
seriesId: 'high_0',
seriesName: 'high',
color: '#0f0',
data: ['high', 22],
},
{
dataIndex: 1,
seriesId: 'low_0',
seriesName: 'low',
color: '#f00',
data: ['low', 2],
},
],
};
let vm;
beforeEach(() => {
vm = mountComponent(Component, props);
});
afterEach(() => {
vm.$destroy();
});
it('should render the title', () => {
const header = vm.$el.querySelector('.card-header');
expect(header.textContent).toContain(props.title);
});
it('should render three legends', () => {
const legends = vm.$el.querySelectorAll('.js-chart-label');
expect(legends).toHaveLength(3);
});
});
......@@ -9,6 +9,7 @@ import * as actions from 'ee/security_dashboard/store/modules/vulnerabilities/ac
import mockDataVulnerabilities from './data/mock_data_vulnerabilities.json';
import mockDataVulnerabilitiesCount from './data/mock_data_vulnerabilities_count.json';
import mockDataVulnerabilitiesHistory from './data/mock_data_vulnerabilities_history.json';
describe('vulnerabiliites count actions', () => {
const data = mockDataVulnerabilitiesCount;
......@@ -634,3 +635,130 @@ describe('revert vulnerability dismissal', () => {
});
});
});
describe('vulnerabiliites timeline actions', () => {
const data = mockDataVulnerabilitiesHistory;
describe('setVulnerabilitiesHistoryEndpoint', () => {
it('should commit the correct mutuation', done => {
const state = initialState;
const endpoint = 'fakepath.json';
testAction(
actions.setVulnerabilitiesHistoryEndpoint,
endpoint,
state,
[
{
type: types.SET_VULNERABILITIES_HISTORY_ENDPOINT,
payload: endpoint,
},
],
[],
done,
);
});
});
describe('fetchVulnerabilitesTimeline', () => {
let mock;
const state = initialState;
beforeEach(() => {
state.vulnerabilitiesCountEndpoint = `${TEST_HOST}/vulnerabilitIES_HISTORY.json`;
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
describe('on success', () => {
beforeEach(() => {
mock.onGet(state.vulnerabilitiesHistoryEndpoint).replyOnce(200, data);
});
it('should dispatch the request and success actions', done => {
testAction(
actions.fetchVulnerabilitiesHistory,
{},
state,
[],
[
{ type: 'requestVulnerabilitiesHistory' },
{
type: 'receiveVulnerabilitiesHistorySuccess',
payload: { data },
},
],
done,
);
});
});
describe('on error', () => {
beforeEach(() => {
mock.onGet(state.vulnerabilitiesHistoryEndpoint).replyOnce(404, {});
});
it('should dispatch the request and error actions', done => {
testAction(
actions.fetchVulnerabilitiesHistory,
{},
state,
[],
[
{ type: 'requestVulnerabilitiesHistory' },
{ type: 'receiveVulnerabilitiesHistoryError' },
],
done,
);
});
});
});
describe('requestVulnerabilitesTimeline', () => {
it('should commit the request mutation', done => {
const state = initialState;
testAction(
actions.requestVulnerabilitiesHistory,
{},
state,
[{ type: types.REQUEST_VULNERABILITIES_HISTORY }],
[],
done,
);
});
});
describe('receiveVulnerabilitesTimelineSuccess', () => {
it('should commit the success mutation', done => {
const state = initialState;
testAction(
actions.receiveVulnerabilitiesHistorySuccess,
{ data },
state,
[{ type: types.RECEIVE_VULNERABILITIES_HISTORY_SUCCESS, payload: data }],
[],
done,
);
});
});
describe('receivetVulnerabilitesTimelineError', () => {
it('should commit the error mutation', done => {
const state = initialState;
testAction(
actions.receiveVulnerabilitiesHistoryError,
{},
state,
[{ type: types.RECEIVE_VULNERABILITIES_HISTORY_ERROR }],
[],
done,
);
});
});
});
......@@ -131,6 +131,66 @@ describe('vulnerabilities module mutations', () => {
});
});
describe('SET_VULNERABILITIES_HISTORY_ENDPOINT', () => {
it('should set `vulnerabilitiesHistoryEndpoint` to `fakepath.json`', () => {
const state = createState();
const endpoint = 'fakepath.json';
mutations[types.SET_VULNERABILITIES_HISTORY_ENDPOINT](state, endpoint);
expect(state.vulnerabilitiesHistoryEndpoint).toEqual(endpoint);
});
});
describe('REQUEST_VULNERABILITIES_HISTORY', () => {
let state;
beforeEach(() => {
state = {
...createState(),
errorLoadingVulnerabilitiesHistory: true,
};
mutations[types.REQUEST_VULNERABILITIES_HISTORY](state);
});
it('should set `isLoadingVulnerabilitiesHistory` to `true`', () => {
expect(state.isLoadingVulnerabilitiesHistory).toBeTruthy();
});
it('should set `errorLoadingVulnerabilitiesHistory` to `false`', () => {
expect(state.errorLoadingVulnerabilitiesHistory).toBeFalsy();
});
});
describe('RECEIVE_VULNERABILITIES_HISTORY_SUCCESS', () => {
let payload;
let state;
beforeEach(() => {
payload = mockData;
state = createState();
mutations[types.RECEIVE_VULNERABILITIES_HISTORY_SUCCESS](state, payload);
});
it('should set `isLoadingVulnerabilitiesHistory` to `false`', () => {
expect(state.isLoadingVulnerabilitiesHistory).toBeFalsy();
});
it('should set `vulnerabilitiesHistory`', () => {
expect(state.vulnerabilitiesHistory).toBe(payload);
});
});
describe('RECEIVE_VULNERABILITIES_HISTORY_ERROR', () => {
it('should set `isLoadingVulnerabilitiesHistory` to `false`', () => {
const state = createState();
mutations[types.RECEIVE_VULNERABILITIES_HISTORY_ERROR](state);
expect(state.isLoadingVulnerabilitiesHistory).toBeFalsy();
});
});
describe('SET_MODAL_DATA', () => {
describe('with all the data', () => {
const vulnerability = mockData[0];
......
......@@ -2,10 +2,11 @@
require 'spec_helper'
describe Gitlab::Ci::Parsers::Security::Common do
describe Gitlab::Ci::Parsers::Security::DependencyScanning do
describe '#parse!' do
let(:project) { artifact.project }
let(:pipeline) { artifact.job.pipeline }
let(:artifact) { create(:ee_ci_job_artifact, :dependency_scanning) }
let(:report) { Gitlab::Ci::Reports::Security::Report.new(artifact.file_type) }
let(:parser) { described_class.new }
......@@ -15,24 +16,18 @@ describe Gitlab::Ci::Parsers::Security::Common do
end
end
context 'sast report' do
let(:artifact) { create(:ee_ci_job_artifact, :sast) }
it "parses all identifiers and occurrences" do
expect(report.occurrences.length).to eq(3)
expect(report.identifiers.length).to eq(4)
expect(report.scanners.length).to eq(3)
end
it "parses all identifiers and occurrences" do
expect(report.occurrences.length).to eq(4)
expect(report.identifiers.length).to eq(7)
expect(report.scanners.length).to eq(2)
end
context 'dependency_scanning report' do
let(:artifact) { create(:ee_ci_job_artifact, :dependency_scanning) }
it "generates expected location fingerprint" do
expect(report.occurrences.first[:location_fingerprint]).to eq('2773f8cc955346ab1f756b94aa310db8e17c0944')
end
it "parses all identifiers and occurrences" do
expect(report.occurrences.length).to eq(4)
expect(report.identifiers.length).to eq(7)
expect(report.scanners.length).to eq(2)
end
it "generates expected metadata_version" do
expect(report.occurrences.first[:metadata_version]).to eq('1.3')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Parsers::Security::Sast do
describe '#parse!' do
let(:project) { artifact.project }
let(:pipeline) { artifact.job.pipeline }
let(:artifact) { create(:ee_ci_job_artifact, :sast) }
let(:report) { Gitlab::Ci::Reports::Security::Report.new(artifact.file_type) }
let(:parser) { described_class.new }
before do
artifact.each_blob do |blob|
parser.parse!(blob, report)
end
end
it "parses all identifiers and occurrences" do
expect(report.occurrences.length).to eq(3)
expect(report.identifiers.length).to eq(4)
expect(report.scanners.length).to eq(3)
end
it "generates expected location fingerprint" do
expect(report.occurrences.first[:location_fingerprint]).to eq('6b6bb283d43cc510d7d1e73e2882b3652cb34bd5')
end
it "generates expected metadata_version" do
expect(report.occurrences.first[:metadata_version]).to eq('1.2')
end
end
end
......@@ -156,6 +156,7 @@ excluded_attributes:
statuses:
- :trace
- :token
- :token_encrypted
- :when
- :artifacts_file
- :artifacts_metadata
......
......@@ -9388,6 +9388,9 @@ msgstr ""
msgid "VisibilityLevel|Unknown"
msgstr ""
msgid "Vulnerability Chart"
msgstr ""
msgid "Vulnerability List"
msgstr ""
......
......@@ -19,12 +19,14 @@ describe Projects::Settings::RepositoryController do
end
describe 'PUT cleanup' do
before do
allow(RepositoryCleanupWorker).to receive(:perform_async)
end
def do_put!
object_map = fixture_file_upload('spec/fixtures/bfg_object_map.txt')
Sidekiq::Testing.fake! do
put :cleanup, namespace_id: project.namespace, project_id: project, project: { object_map: object_map }
end
put :cleanup, namespace_id: project.namespace, project_id: project, project: { object_map: object_map }
end
context 'feature enabled' do
......@@ -34,7 +36,7 @@ describe Projects::Settings::RepositoryController do
do_put!
expect(response).to redirect_to project_settings_repository_path(project)
expect(RepositoryCleanupWorker.jobs.count).to eq(1)
expect(RepositoryCleanupWorker).to have_received(:perform_async).once
end
end
......
......@@ -287,6 +287,7 @@ project:
- statistics
- container_repositories
- uploads
- file_uploads
- import_state
- members_and_requesters
- build_trace_section_names
......
......@@ -20,7 +20,7 @@ describe Appearance do
end
context 'with uploads' do
it_behaves_like 'model with mounted uploader', false do
it_behaves_like 'model with uploads', false do
let(:model_object) { create(:appearance, :with_logo) }
let(:upload_attribute) { :logo }
let(:uploader_class) { AttachmentUploader }
......
......@@ -1926,7 +1926,7 @@ describe Ci::Build do
context 'when token is empty' do
before do
build.token = nil
build.update_columns(token: nil, token_encrypted: nil)
end
it { is_expected.to be_nil}
......@@ -2142,7 +2142,7 @@ describe Ci::Build do
end
before do
build.token = 'my-token'
build.set_token('my-token')
build.yaml_variables = []
end
......
......@@ -351,3 +351,89 @@ describe PersonalAccessToken, 'TokenAuthenticatable' do
end
end
end
describe Ci::Build, 'TokenAuthenticatable' do
let(:token_field) { :token }
let(:build) { FactoryBot.build(:ci_build) }
it_behaves_like 'TokenAuthenticatable'
describe 'generating new token' do
context 'token is not generated yet' do
describe 'token field accessor' do
it 'makes it possible to access token' do
expect(build.token).to be_nil
build.save!
expect(build.token).to be_present
end
end
describe "ensure_token" do
subject { build.ensure_token }
it { is_expected.to be_a String }
it { is_expected.not_to be_blank }
it 'does not persist token' do
expect(build).not_to be_persisted
end
end
describe 'ensure_token!' do
it 'persists a new token' do
expect(build.ensure_token!).to eq build.reload.token
expect(build).to be_persisted
end
it 'persists new token as an encrypted string' do
build.ensure_token!
encrypted = Gitlab::CryptoHelper.aes256_gcm_encrypt(build.token)
expect(build.read_attribute('token_encrypted')).to eq encrypted
end
it 'does not persist a token in a clear text' do
build.ensure_token!
expect(build.read_attribute('token')).to be_nil
end
end
end
describe '#reset_token!' do
it 'persists a new token' do
build.save!
build.token.yield_self do |previous_token|
build.reset_token!
expect(build.token).not_to eq previous_token
expect(build.token).to be_a String
end
end
end
end
describe 'setting a new token' do
subject { build.set_token('0123456789') }
it 'returns the token' do
expect(subject).to eq '0123456789'
end
it 'writes a new encrypted token' do
expect(build.read_attribute('token_encrypted')).to be_nil
expect(subject).to eq '0123456789'
expect(build.read_attribute('token_encrypted')).to be_present
end
it 'does not write a new cleartext token' do
expect(build.read_attribute('token')).to be_nil
expect(subject).to eq '0123456789'
expect(build.read_attribute('token')).to be_nil
end
end
end
......@@ -739,7 +739,7 @@ describe Group do
end
context 'with uploads' do
it_behaves_like 'model with mounted uploader', true do
it_behaves_like 'model with uploads', true do
let(:model_object) { create(:group, :with_avatar) }
let(:upload_attribute) { :avatar }
let(:uploader_class) { AttachmentUploader }
......
......@@ -4202,7 +4202,7 @@ describe Project do
end
context 'with uploads' do
it_behaves_like 'model with mounted uploader', true do
it_behaves_like 'model with uploads', true do
let(:model_object) { create(:project, :with_avatar) }
let(:upload_attribute) { :avatar }
let(:uploader_class) { AttachmentUploader }
......
# frozen_string_literal: true
require 'spec_helper'
describe Uploads::Fog do
let(:data_store) { described_class.new }
before do
stub_uploads_object_storage(FileUploader)
end
describe '#available?' do
subject { data_store.available? }
context 'when object storage is enabled' do
it { is_expected.to be_truthy }
end
context 'when object storage is disabled' do
before do
stub_uploads_object_storage(FileUploader, enabled: false)
end
it { is_expected.to be_falsy }
end
end
context 'model with uploads' do
let(:project) { create(:project) }
let(:relation) { project.uploads }
describe '#keys' do
let!(:uploads) { create_list(:upload, 2, :object_storage, uploader: FileUploader, model: project) }
subject { data_store.keys(relation) }
it 'returns keys' do
is_expected.to match_array(relation.pluck(:path))
end
end
describe '#delete_keys' do
let(:keys) { data_store.keys(relation) }
let!(:uploads) { create_list(:upload, 2, :with_file, :issuable_upload, model: project) }
subject { data_store.delete_keys(keys) }
before do
uploads.each { |upload| upload.build_uploader.migrate!(2) }
end
it 'deletes multiple data' do
paths = relation.pluck(:path)
::Fog::Storage.new(FileUploader.object_store_credentials).tap do |connection|
paths.each do |path|
expect(connection.get_object('uploads', path)[:body]).not_to be_nil
end
end
subject
::Fog::Storage.new(FileUploader.object_store_credentials).tap do |connection|
paths.each do |path|
expect { connection.get_object('uploads', path)[:body] }.to raise_error(Excon::Error::NotFound)
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Uploads::Local do
let(:data_store) { described_class.new }
before do
stub_uploads_object_storage(FileUploader)
end
context 'model with uploads' do
let(:project) { create(:project) }
let(:relation) { project.uploads }
describe '#keys' do
let!(:uploads) { create_list(:upload, 2, uploader: FileUploader, model: project) }
subject { data_store.keys(relation) }
it 'returns keys' do
is_expected.to match_array(relation.map(&:absolute_path))
end
end
describe '#delete_keys' do
let(:keys) { data_store.keys(relation) }
let!(:uploads) { create_list(:upload, 2, :with_file, :issuable_upload, model: project) }
subject { data_store.delete_keys(keys) }
it 'deletes multiple data' do
paths = relation.map(&:absolute_path)
paths.each do |path|
expect(File.exist?(path)).to be_truthy
end
subject
paths.each do |path|
expect(File.exist?(path)).to be_falsey
end
end
end
end
end
......@@ -3313,7 +3313,7 @@ describe User do
end
context 'with uploads' do
it_behaves_like 'model with mounted uploader', false do
it_behaves_like 'model with uploads', false do
let(:model_object) { create(:user, :with_avatar) }
let(:upload_attribute) { :avatar }
let(:uploader_class) { AttachmentUploader }
......
......@@ -20,9 +20,9 @@ describe Ci::RetryBuildService do
CLONE_ACCESSORS = described_class::CLONE_ACCESSORS
REJECT_ACCESSORS =
%i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by
%i[id status user token token_encrypted coverage trace runner
artifacts_expire_at artifacts_file artifacts_metadata artifacts_size
created_at updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by job_artifacts job_artifacts_archive
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_dependency_scanning
......
require 'spec_helper'
shared_examples_for 'model with mounted uploader' do |supports_fileuploads|
shared_examples_for 'model with uploads' do |supports_fileuploads|
describe '.destroy' do
before do
stub_uploads_object_storage(uploader_class)
......@@ -8,16 +8,62 @@ shared_examples_for 'model with mounted uploader' do |supports_fileuploads|
model_object.public_send(upload_attribute).migrate!(ObjectStorage::Store::REMOTE)
end
it 'deletes remote uploads' do
expect_any_instance_of(CarrierWave::Storage::Fog::File).to receive(:delete).and_call_original
context 'with mounted uploader' do
it 'deletes remote uploads' do
expect_any_instance_of(CarrierWave::Storage::Fog::File).to receive(:delete).and_call_original
expect { model_object.destroy }.to change { Upload.count }.by(-1)
expect { model_object.destroy }.to change { Upload.count }.by(-1)
end
end
it 'deletes any FileUploader uploads which are not mounted', skip: !supports_fileuploads do
create(:upload, uploader: FileUploader, model: model_object)
context 'with not mounted uploads', :sidekiq, skip: !supports_fileuploads do
context 'with local files' do
let!(:uploads) { create_list(:upload, 2, uploader: FileUploader, model: model_object) }
expect { model_object.destroy }.to change { Upload.count }.by(-2)
it 'deletes any FileUploader uploads which are not mounted' do
expect { model_object.destroy }.to change { Upload.count }.by(-3)
end
it 'deletes local files' do
expect_any_instance_of(Uploads::Local).to receive(:delete_keys).with(uploads.map(&:absolute_path))
model_object.destroy
end
end
context 'with remote files' do
let!(:uploads) { create_list(:upload, 2, :object_storage, uploader: FileUploader, model: model_object) }
it 'deletes any FileUploader uploads which are not mounted' do
expect { model_object.destroy }.to change { Upload.count }.by(-3)
end
it 'deletes remote files' do
expect_any_instance_of(Uploads::Fog).to receive(:delete_keys).with(uploads.map(&:path))
model_object.destroy
end
end
describe 'destroy strategy depending on feature flag' do
let!(:upload) { create(:upload, uploader: FileUploader, model: model_object) }
it 'does not destroy uploads by default' do
expect(model_object).to receive(:delete_uploads)
expect(model_object).not_to receive(:destroy_uploads)
model_object.destroy
end
it 'uses before destroy callback if feature flag is disabled' do
stub_feature_flags(fast_destroy_uploads: false)
expect(model_object).to receive(:destroy_uploads)
expect(model_object).not_to receive(:delete_uploads)
model_object.destroy
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment