Commit c6db08cd authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents 748cedb4 cccc9bad
......@@ -18,7 +18,13 @@ module Projects
if environment
render 'projects/environments/metrics'
elsif default_environment
redirect_to project_metrics_dashboard_path(project, environment: default_environment)
redirect_to project_metrics_dashboard_path(
project,
**permitted_params
.to_h
.symbolize_keys
.merge(environment: default_environment)
)
else
render 'projects/environments/empty_metrics'
end
......@@ -26,9 +32,14 @@ module Projects
private
def permitted_params
@permitted_params ||= params.permit(:dashboard_path, :environment, :page)
end
def environment
strong_memoize(:environment) do
project.environments.find(params[:environment]) if params[:environment]
env = permitted_params[:environment]
project.environments.find(env) if env
end
end
......
......@@ -107,7 +107,7 @@ class User < ApplicationRecord
has_many :group_deploy_keys
has_many :gpg_keys
has_many :emails, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :emails
has_many :personal_access_tokens, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :identities, dependent: :destroy, autosave: true # rubocop:disable Cop/ActiveRecordDependent
has_many :u2f_registrations, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
......
......@@ -42,7 +42,7 @@ module Git
push_service_class = push_service_class_for(ref_type)
create_bulk_push_event = changes.size > Gitlab::CurrentSettings.push_event_activities_limit
merge_request_branches = merge_request_branches_for(changes)
merge_request_branches = merge_request_branches_for(ref_type, changes)
changes.each do |change|
push_service_class.new(
......@@ -74,8 +74,10 @@ module Git
Git::BranchPushService
end
def merge_request_branches_for(changes)
@merge_requests_branches ||= MergeRequests::PushedBranchesService.new(project, current_user, changes: changes).execute
def merge_request_branches_for(ref_type, changes)
return [] if ref_type == :tag
MergeRequests::PushedBranchesService.new(project, current_user, changes: changes).execute
end
end
end
---
title: Fix Metrics dashboard embeds when using new URLs
merge_request: 39876
author:
type: fixed
---
title: Add emails user_id foreign key with cascade delete
merge_request: 39899
author:
type: other
---
title: Fix wrong caching logic in ProcessRefChangesService
merge_request: 40821
author:
type: fixed
# frozen_string_literal: true
class AddEmailsUserIdForeignKey < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
CONSTRAINT_NAME = 'fk_emails_user_id'
def up
with_lock_retries do
add_foreign_key :emails, :users, on_delete: :cascade, validate: false, name: CONSTRAINT_NAME
end
end
def down
with_lock_retries do
remove_foreign_key_if_exists :emails, column: :user_id, name: CONSTRAINT_NAME
end
end
end
# frozen_string_literal: true
class RemoveOrphanedEmails < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
execute <<~SQL
DELETE FROM emails
WHERE not exists (
SELECT 1 FROM users WHERE users.id = emails.user_id
);
SQL
execute 'DELETE FROM emails WHERE user_id IS NULL;'
end
def down
# no-op
end
end
# frozen_string_literal: true
class ValidateEmailsUserIdForeignKey < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
CONSTRAINT_NAME = 'fk_emails_user_id'
def up
validate_foreign_key :emails, :user_id, name: CONSTRAINT_NAME
end
def down
# no op
end
end
5a5278fdd9539d33a6de226a84ed39b7c5a26929cec68ec5e8d193afb3cfafa2
\ No newline at end of file
476bce9b18177f37b31e15d42f5a1391c0bfbbd312a513c1d5b43085b90afb3e
\ No newline at end of file
5e2dfdf725ad0a3d90b240ced74cf5a872f7126b716847f9f9e99b4ad2a22109
\ No newline at end of file
......@@ -22016,6 +22016,9 @@ ALTER TABLE ONLY public.events
ALTER TABLE ONLY public.vulnerabilities
ADD CONSTRAINT fk_efb96ab1e2 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.emails
ADD CONSTRAINT fk_emails_user_id FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.clusters
ADD CONSTRAINT fk_f05c5e5a42 FOREIGN KEY (management_project_id) REFERENCES public.projects(id) ON DELETE SET NULL;
......
......@@ -1725,11 +1725,73 @@ type ClusterAgentDeletePayload {
errors: [String!]!
}
type ClusterAgentToken {
"""
Cluster agent this token is associated with
"""
clusterAgent: ClusterAgent
"""
Timestamp the token was created
"""
createdAt: Time
"""
Global ID of the token
"""
id: ClustersAgentTokenID!
}
"""
Autogenerated input type of ClusterAgentTokenCreate
"""
input ClusterAgentTokenCreateInput {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Global ID of the cluster agent that will be associated with the new token
"""
clusterAgentId: ClustersAgentID!
}
"""
Autogenerated return type of ClusterAgentTokenCreate
"""
type ClusterAgentTokenCreatePayload {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Errors encountered during execution of the mutation.
"""
errors: [String!]!
"""
Token secret value. Make sure you save it - you won't be able to access it again
"""
secret: String
"""
Token created after mutation
"""
token: ClusterAgentToken
}
"""
Identifier of Clusters::Agent
"""
scalar ClustersAgentID
"""
Identifier of Clusters::AgentToken
"""
scalar ClustersAgentTokenID
type Commit {
"""
Author of the commit
......@@ -9797,6 +9859,7 @@ type Mutation {
boardListCreate(input: BoardListCreateInput!): BoardListCreatePayload
boardListUpdateLimitMetrics(input: BoardListUpdateLimitMetricsInput!): BoardListUpdateLimitMetricsPayload
clusterAgentDelete(input: ClusterAgentDeleteInput!): ClusterAgentDeletePayload
clusterAgentTokenCreate(input: ClusterAgentTokenCreateInput!): ClusterAgentTokenCreatePayload
commitCreate(input: CommitCreateInput!): CommitCreatePayload
configureSast(input: ConfigureSastInput!): ConfigureSastPayload
createAlertIssue(input: CreateAlertIssueInput!): CreateAlertIssuePayload
......
......@@ -4729,6 +4729,181 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "ClusterAgentToken",
"description": null,
"fields": [
{
"name": "clusterAgent",
"description": "Cluster agent this token is associated with",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "ClusterAgent",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "createdAt",
"description": "Timestamp the token was created",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
"description": "Global ID of the token",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ClustersAgentTokenID",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "ClusterAgentTokenCreateInput",
"description": "Autogenerated input type of ClusterAgentTokenCreate",
"fields": null,
"inputFields": [
{
"name": "clusterAgentId",
"description": "Global ID of the cluster agent that will be associated with the new token",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ClustersAgentID",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "ClusterAgentTokenCreatePayload",
"description": "Autogenerated return type of ClusterAgentTokenCreate",
"fields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Errors encountered during execution of the mutation.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "secret",
"description": "Token secret value. Make sure you save it - you won't be able to access it again",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "token",
"description": "Token created after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "ClusterAgentToken",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "SCALAR",
"name": "ClustersAgentID",
......@@ -4739,6 +4914,16 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "SCALAR",
"name": "ClustersAgentTokenID",
"description": "Identifier of Clusters::AgentToken",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "Commit",
......@@ -27747,6 +27932,33 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "clusterAgentTokenCreate",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "ClusterAgentTokenCreateInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "ClusterAgentTokenCreatePayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "commitCreate",
"description": null,
......@@ -295,6 +295,25 @@ Autogenerated return type of ClusterAgentDelete
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
## ClusterAgentToken
| Name | Type | Description |
| --- | ---- | ---------- |
| `clusterAgent` | ClusterAgent | Cluster agent this token is associated with |
| `createdAt` | Time | Timestamp the token was created |
| `id` | ClustersAgentTokenID! | Global ID of the token |
## ClusterAgentTokenCreatePayload
Autogenerated return type of ClusterAgentTokenCreate
| Name | Type | Description |
| --- | ---- | ---------- |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
| `secret` | String | Token secret value. Make sure you save it - you won't be able to access it again |
| `token` | ClusterAgentToken | Token created after mutation |
## Commit
| Name | Type | Description |
......
......@@ -750,3 +750,64 @@ code review. For docs changes in merge requests, whenever a change to files unde
is made, Danger Bot leaves a comment with further instructions about the documentation
process. This is configured in the `Dangerfile` in the GitLab repository under
[/danger/documentation/](https://gitlab.com/gitlab-org/gitlab/tree/master/danger/documentation).
## Automatic screenshot generator
You can now set up an automatic screenshot generator to take and compress screenshots, with the
help of a configuration file known as **screenshot generator**.
### Use the tool
To run the tool on an existing screenshot generator, take the following steps:
1. Set up the [GitLab Development Kit (GDK)](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/gitlab_docs.md).
1. Navigate to the subdirectory with your cloned GitLab repository, typically `gdk/gitlab`.
1. Make sure that your GDK database is fully migrated: `bin/rake db:migrate RAILS_ENV=development`.
1. Install pngquant, see the tool website for more info: [`pngquant`](https://pngquant.org/)
1. Run `scripts/docs_screenshots.rb spec/docs_screenshots/<name_of_screenshot_generator>.rb <milestone-version>`.
1. Identify the location of the screenshots, based on the `gitlab/doc` location defined by the `it` parameter in your script.
1. Commit the newly created screenshots.
### Extending the tool
To add an additional **screenshot generator**, take the following steps:
- Locate the `spec/docs_screenshots` directory.
- Add a new file with a `_docs.rb` extension.
- Be sure to include the following bits in the file:
```ruby
require 'spec_helper'
RSpec.describe '<What I am taking screenshots of>', :js do
include DocsScreenshotHelpers # Helper that enables the screenshots taking mechanism
before do
page.driver.browser.manage.window.resize_to(1366, 1024) # length and width of the page
end
```
- In addition, every `it` block must include the path where the screenshot is saved
```ruby
it 'user/packages/container_registry/img/project_image_repositories_list'
```
#### Full page screenshots
To take a full page screenshot simply `visit the page` and perform any expectation on real content (to have capybara wait till the page is ready and not take a white screenshot).
#### Element screenshot
To have the screenshot focuses few more steps are needed:
- **find the area**: `screenshot_area = find('#js-registry-policies')`
- **scroll the area in focus**: `scroll_to screenshot_area`
- **wait for the content**: `expect(screenshot_area).to have_content 'Expiration interval'`
- **set the crop area**: `set_crop_data(screenshot_area, 20)`
In particular `set_crop_data` accepts as arguments: a `DOM` element and a padding, the padding will be added around the element enlarging the screenshot area.
#### Live example
Please use `spec/docs_screenshots/container_registry_docs.rb` as a guide and as an example to create your own scripts.
......@@ -20,15 +20,28 @@ metrics to others, and you want to have relevant information directly available.
NOTE: **Note:**
Requires [Kubernetes](../../user/project/integrations/prometheus_library/kubernetes.md) metrics.
Note: **Note:**
In GitLab versions 13.3 and earlier, metrics dashboard links were in the form
`https://<root_url>/<project>/-/environments/<environment_id>/metrics`. These links
are still supported, and can be used to embed metric charts.
To display metric charts, include a link of the form
`https://<root_url>/<project>/-/environments/<environment_id>/metrics` in a field
`https://<root_url>/<project>/-/metrics?environment=<environment_id>` in a field
that supports GitLab-flavored Markdown:
![Embedded Metrics Markdown](img/embedded_metrics_markdown_v12_8.png)
```markdown
### Summary
**Start time:** 2020-01-21T12:00:31+00:00
### Metrics
https://gitlab.com/gitlab-org/monitor/tanuki-inc/-/metrics?environment=1118134
```
GitLab unfurls the link as an embedded metrics panel:
![Embedded Metrics Rendered](img/embedded_metrics_rendered_v12_8.png)
![Embedded Metrics Rendered](img/embedded_metrics_rendered_v13_4.png)
You can also embed a single chart. To get a link to a chart, click the
**{ellipsis_v}** **More actions** menu in the upper right corner of the chart,
......
<script>
import { uniqueId } from 'lodash';
import { GlDeprecatedButton, GlLink, GlModal, GlModalDirective, GlIntersperse } from '@gitlab/ui';
import { GlButton, GlLink, GlModal, GlModalDirective, GlIntersperse } from '@gitlab/ui';
import { sprintf, s__ } from '~/locale';
// If there are more licenses than this count, a counter will be displayed for the remaining licenses
......@@ -12,7 +12,7 @@ const MODAL_ID_PREFIX = 'dependency-license-link-modal-';
export default {
components: {
GlIntersperse,
GlDeprecatedButton,
GlButton,
GlLink,
GlModal,
},
......@@ -72,12 +72,12 @@ export default {
<gl-link v-if="license.url" :href="license.url" target="_blank">{{ license.name }}</gl-link>
<template v-else>{{ license.name }}</template>
</span>
<gl-deprecated-button
<gl-button
v-if="hasLicensesInModal"
v-gl-modal-directive="modalId"
variant="link"
class="align-baseline js-license-links-modal-trigger"
>{{ modalButtonText }}</gl-deprecated-button
>{{ modalButtonText }}</gl-button
>
</gl-intersperse>
<div class="js-license-links-modal">
......
......@@ -8,6 +8,7 @@ module EE
prepended do
mount_mutation ::Mutations::Clusters::Agents::Create
mount_mutation ::Mutations::Clusters::Agents::Delete
mount_mutation ::Mutations::Clusters::AgentTokens::Create
mount_mutation ::Mutations::Issues::SetIteration
mount_mutation ::Mutations::Issues::SetWeight
mount_mutation ::Mutations::Issues::SetEpic
......
# frozen_string_literal: true
module Mutations
module Clusters
module AgentTokens
class Create < BaseMutation
graphql_name 'ClusterAgentTokenCreate'
authorize :create_cluster
argument :cluster_agent_id,
::Types::GlobalIDType[::Clusters::Agent],
required: true,
description: 'Global ID of the cluster agent that will be associated with the new token'
field :secret,
GraphQL::STRING_TYPE,
null: true,
description: "Token secret value. Make sure you save it - you won't be able to access it again"
field :token,
Types::Clusters::AgentTokenType,
null: true,
description: 'Token created after mutation'
def resolve(cluster_agent_id:)
cluster_agent = authorized_find!(id: cluster_agent_id)
result = ::Clusters::AgentTokens::CreateService
.new(container: cluster_agent.project, current_user: current_user)
.execute(cluster_agent)
payload = result.payload
{
secret: payload[:secret],
token: payload[:token],
errors: Array.wrap(result.message)
}
end
private
def find_object(id:)
GitlabSchema.find_by_gid(id)
end
end
end
end
end
# frozen_string_literal: true
module Types
module Clusters
class AgentTokenType < BaseObject
graphql_name 'ClusterAgentToken'
authorize :admin_cluster
field :cluster_agent,
Types::Clusters::AgentType,
description: 'Cluster agent this token is associated with',
null: true,
resolve: -> (token, _args, _context) { Gitlab::Graphql::Loaders::BatchModelLoader.new(::Clusters::Agent, token.agent_id).find }
field :created_at,
Types::TimeType,
null: true,
description: 'Timestamp the token was created'
field :id,
::Types::GlobalIDType[::Clusters::AgentToken],
null: false,
description: 'Global ID of the token'
end
end
end
# frozen_string_literal: true
module Clusters
class AgentTokenPolicy < BasePolicy
alias_method :token, :subject
delegate { token.agent }
end
end
# frozen_string_literal: true
module Clusters
module AgentTokens
class CreateService < ::BaseContainerService
def execute(cluster_agent)
return error_feature_not_available unless container.feature_available?(:cluster_agents)
return error_no_permissions unless current_user.can?(:create_cluster, container)
token = ::Clusters::AgentToken.new(agent: cluster_agent)
if token.save
ServiceResponse.success(payload: { secret: token.token, token: token })
else
ServiceResponse.error(message: token.errors.full_messages)
end
end
private
def error_feature_not_available
ServiceResponse.error(message: s_('ClusterAgent|This feature is only available for premium plans'))
end
def error_no_permissions
ServiceResponse.error(message: s_('ClusterAgent|User has insufficient permissions to create a token for this project'))
end
end
end
end
---
title: Button migration to component on dependency list
merge_request: 38624
author:
type: changed
---
title: Cluster token create mutation for GraphQL
merge_request: 38820
author:
type: added
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::Clusters::AgentTokens::Create do
subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
let_it_be(:cluster_agent) { create(:cluster_agent) }
let_it_be(:user) { create(:user) }
let(:context) do
GraphQL::Query::Context.new(
query: OpenStruct.new(schema: nil),
values: { current_user: user },
object: nil
)
end
specify { expect(described_class).to require_graphql_authorizations(:create_cluster) }
describe '#resolve' do
subject { mutation.resolve(cluster_agent_id: cluster_agent.to_global_id) }
context 'without token permissions' do
it 'raises an error if the resource is not accessible to the user' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
context 'without premium plan' do
before do
stub_licensed_features(cluster_agents: false)
cluster_agent.project.add_maintainer(user)
end
it { expect(subject[:secret]).to be_nil }
it { expect(subject[:errors]).to eq(['This feature is only available for premium plans']) }
end
context 'with premium plan and user permissions' do
before do
stub_licensed_features(cluster_agents: true)
cluster_agent.project.add_maintainer(user)
end
it 'creates a new token', :aggregate_failures do
expect { subject }.to change { ::Clusters::AgentToken.count }.by(1)
expect(subject[:secret]).not_to be_nil
expect(subject[:errors]).to eq([])
end
context 'invalid params' do
subject { mutation.resolve(cluster_agent_id: cluster_agent.id) }
it 'generates an error message when id invalid', :aggregate_failures do
expect { subject }.to raise_error(NoMethodError)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['ClusterAgentToken'] do
let(:fields) { %i[cluster_agent created_at id] }
it { expect(described_class.graphql_name).to eq('ClusterAgentToken') }
it { expect(described_class).to require_graphql_authorizations(:admin_cluster) }
it { expect(described_class).to have_graphql_fields(fields) }
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::AgentTokenPolicy do
let_it_be(:token) { create(:cluster_agent_token) }
let(:user) { create(:user) }
let(:policy) { described_class.new(user, token) }
let(:project) { token.agent.project }
describe 'rules' do
context 'when developer' do
before do
project.add_developer(user)
end
it { expect(policy).to be_disallowed :admin_cluster }
it { expect(policy).to be_disallowed :read_cluster }
end
context 'when maintainer' do
before do
project.add_maintainer(user)
end
it { expect(policy).to be_allowed :admin_cluster }
it { expect(policy).to be_allowed :read_cluster }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Create a new cluster agent token' do
include GraphqlHelpers
let_it_be(:cluster_agent) { create(:cluster_agent) }
let_it_be(:current_user) { create(:user) }
let(:mutation) do
graphql_mutation(
:cluster_agent_token_create,
{ cluster_agent_id: cluster_agent.to_global_id.to_s }
)
end
def mutation_response
graphql_mutation_response(:cluster_agent_token_create)
end
context 'without user permissions' do
it_behaves_like 'a mutation that returns top-level errors',
errors: ["The resource that you are attempting to access does not exist "\
"or you don't have permission to perform this action"]
it 'does not create a token' do
expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change(Clusters::AgentToken, :count)
end
end
context 'without premium plan' do
before do
stub_licensed_features(cluster_agents: false)
cluster_agent.project.add_maintainer(current_user)
end
it 'does not create a token and returns error message', :aggregate_failures do
expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change(Clusters::AgentToken, :count)
expect(mutation_response['errors']).to eq(['This feature is only available for premium plans'])
end
end
context 'with project permissions' do
before do
stub_licensed_features(cluster_agents: true)
cluster_agent.project.add_maintainer(current_user)
end
it 'creates a new token', :aggregate_failures do
expect { post_graphql_mutation(mutation, current_user: current_user) }.to change { Clusters::AgentToken.count }.by(1)
expect(mutation_response['secret']).not_to be_nil
expect(mutation_response['errors']).to eq([])
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::AgentTokens::CreateService do
subject(:service) { described_class.new(container: project, current_user: user) }
let_it_be(:user) { create(:user) }
let(:cluster_agent) { create(:cluster_agent) }
let(:project) { cluster_agent.project }
before do
stub_licensed_features(cluster_agents: false)
end
describe '#execute' do
context 'without premium plan' do
it 'does not create a new token' do
expect { service.execute(cluster_agent) }.not_to change(Clusters::AgentToken, :count)
end
it 'returns missing license error' do
result = service.execute(cluster_agent)
expect(result.status).to eq(:error)
expect(result.message).to eq('This feature is only available for premium plans')
end
context 'with premium plan' do
before do
stub_licensed_features(cluster_agents: true)
end
it 'does not create a new token due to user permissions' do
expect { service.execute(cluster_agent) }.not_to change(::Clusters::AgentToken, :count)
end
it 'returns permission errors', :aggregate_failures do
result = service.execute(cluster_agent)
expect(result.status).to eq(:error)
expect(result.message).to eq('User has insufficient permissions to create a token for this project')
end
context 'with user permissions' do
before do
project.add_maintainer(user)
end
it 'creates a new token' do
expect { service.execute(cluster_agent) }.to change { ::Clusters::AgentToken.count }.by(1)
end
it 'returns success status', :aggregate_failures do
result = service.execute(cluster_agent)
expect(result.status).to eq(:success)
expect(result.payload[:secret]).not_to be_nil
end
end
end
end
end
end
......@@ -10,7 +10,6 @@ module Banzai
# the cost of doing a full regex match.
def xpath_search
"descendant-or-self::a[contains(@href,'metrics') and \
contains(@href,'environments') and \
starts-with(@href, '#{gitlab_domain}')]"
end
......@@ -29,7 +28,7 @@ module Banzai
params['project'],
params['environment'],
embedded: true,
**query_params(params['url'])
**query_params(params['url']).except(:environment)
)
end
end
......
......@@ -10,20 +10,23 @@ module Gitlab
QUERY_PATTERN = '(?<query>\?[a-zA-Z0-9%.()+_=-]+(&[a-zA-Z0-9%.()+_=-]+)*)?'
ANCHOR_PATTERN = '(?<anchor>\#[a-z0-9_-]+)?'
OPTIONAL_DASH_PATTERN = '(?:/-)?'
DASH_PATTERN = '(?:/-)'
# Matches urls for a metrics dashboard. This could be
# either the /metrics endpoint or the /metrics_dashboard
# endpoint.
# Matches urls for a metrics dashboard.
# This regex needs to match the old metrics URL, the new metrics URL,
# and the dashboard URL (inline_metrics_redactor_filter.rb
# uses this regex to match against the dashboard URL.)
#
# EX - https://<host>/<namespace>/<project>/environments/<env_id>/metrics
# EX - Old URL: https://<host>/<namespace>/<project>/environments/<env_id>/metrics
# OR
# New URL: https://<host>/<namespace>/<project>/-/metrics?environment=<env_id>
# OR
# dashboard URL: https://<host>/<namespace>/<project>/environments/<env_id>/metrics_dashboard
def metrics_regex
strong_memoize(:metrics_regex) do
regex_for_project_metrics(
%r{
/environments
/(?<environment>\d+)
/(metrics_dashboard|metrics)
( #{environment_metrics_regex} ) | ( #{non_environment_metrics_regex} )
}x
)
end
......@@ -36,6 +39,7 @@ module Gitlab
strong_memoize(:grafana_regex) do
regex_for_project_metrics(
%r{
#{DASH_PATTERN}?
/grafana
/metrics_dashboard
}x
......@@ -44,16 +48,22 @@ module Gitlab
end
# Matches dashboard urls for a metric chart embed
# for cluster metrics
# for cluster metrics.
# This regex needs to match the dashboard URL as well, not just the trigger URL.
# The inline_metrics_redactor_filter.rb uses this regex to match against
# the dashboard URL.
#
# EX - https://<host>/<namespace>/<project>/-/clusters/<cluster_id>/?group=Cluster%20Health&title=Memory%20Usage&y_label=Memory%20(GiB)
# dashboard URL - https://<host>/<namespace>/<project>/-/clusters/<cluster_id>/metrics_dashboard?group=Cluster%20Health&title=Memory%20Usage&y_label=Memory%20(GiB)
def clusters_regex
strong_memoize(:clusters_regex) do
regex_for_project_metrics(
%r{
#{DASH_PATTERN}?
/clusters
/(?<cluster_id>\d+)
/?
( (/metrics) | ( /metrics_dashboard\.json ) )?
}x
)
end
......@@ -67,10 +77,11 @@ module Gitlab
strong_memoize(:alert_regex) do
regex_for_project_metrics(
%r{
#{DASH_PATTERN}?
/prometheus
/alerts
/(?<alert>\d+)
/metrics_dashboard
/metrics_dashboard(\.json)?
}x
)
end
......@@ -95,16 +106,37 @@ module Gitlab
private
def environment_metrics_regex
%r{
#{DASH_PATTERN}?
/environments
/(?<environment>\d+)
/(metrics_dashboard|metrics)
}x
end
def non_environment_metrics_regex
%r{
#{DASH_PATTERN}
/metrics
(?= # Lookahead to ensure there is an environment query param
\?
.*
environment=(?<environment>\d+)
.*
)
}x
end
def regex_for_project_metrics(path_suffix_pattern)
%r{
(?<url>
^(?<url>
#{gitlab_host_pattern}
#{project_path_pattern}
#{OPTIONAL_DASH_PATTERN}
#{path_suffix_pattern}
#{QUERY_PATTERN}
#{ANCHOR_PATTERN}
)
)$
}x
end
......
......@@ -2,10 +2,10 @@ return if Rails.env.production?
require 'png_quantizator'
require 'parallel'
require_relative '../../tooling/lib/tooling/images'
# The amount of variance (in bytes) allowed in
# file size when testing for compression size
TOLERANCE = 10000
namespace :pngquant do
# Returns an array of all images eligible for compression
......@@ -13,55 +13,13 @@ namespace :pngquant do
Dir.glob('doc/**/*.png', File::FNM_CASEFOLD)
end
# Runs pngquant on an image and optionally
# writes the result to disk
def compress_image(file, overwrite_original)
compressed_file = "#{file}.compressed"
FileUtils.copy(file, compressed_file)
pngquant_file = PngQuantizator::Image.new(compressed_file)
# Run the image repeatedly through pngquant until
# the change in file size is within TOLERANCE
loop do
before = File.size(compressed_file)
pngquant_file.quantize!
after = File.size(compressed_file)
break if before - after <= TOLERANCE
end
savings = File.size(file) - File.size(compressed_file)
is_uncompressed = savings > TOLERANCE
if is_uncompressed && overwrite_original
FileUtils.copy(compressed_file, file)
end
FileUtils.remove(compressed_file)
[is_uncompressed, savings]
end
# Ensures pngquant is available and prints an error if not
def check_executable
unless system('pngquant --version', out: File::NULL)
warn(
'Error: pngquant executable was not detected in the system.'.color(:red),
'Download pngquant at https://pngquant.org/ and place the executable in /usr/local/bin'.color(:green)
)
abort
end
end
desc 'GitLab | Pngquant | Compress all documentation PNG images using pngquant'
task :compress do
check_executable
files = doc_images
puts "Compressing #{files.size} PNG files in doc/**"
Parallel.each(files) do |file|
was_uncompressed, savings = compress_image(file, true)
was_uncompressed, savings = Tooling::Image.compress_image(file)
if was_uncompressed
puts "#{file} was reduced by #{savings} bytes"
......@@ -71,13 +29,11 @@ namespace :pngquant do
desc 'GitLab | Pngquant | Checks that all documentation PNG images have been compressed with pngquant'
task :lint do
check_executable
files = doc_images
puts "Checking #{files.size} PNG files in doc/**"
uncompressed_files = Parallel.map(files) do |file|
is_uncompressed, _ = compress_image(file, false)
is_uncompressed, _ = Tooling::Image.compress_image(file, true)
if is_uncompressed
puts "Uncompressed file detected: ".color(:red) + file
file
......
......@@ -5159,6 +5159,9 @@ msgstr ""
msgid "ClusterAgent|This feature is only available for premium plans"
msgstr ""
msgid "ClusterAgent|User has insufficient permissions to create a token for this project"
msgstr ""
msgid "ClusterAgent|You have insufficient permissions to create a cluster agent for this project"
msgstr ""
......
......@@ -21,7 +21,7 @@ module RuboCop
private
def acceptable_file_path?(path)
File.fnmatch?('*_spec.rb', path) || File.fnmatch?('*/frontend/fixtures/*', path)
File.fnmatch?('*_spec.rb', path) || File.fnmatch?('*/frontend/fixtures/*', path) || File.fnmatch?('*/docs_screenshots/*_docs.rb', path)
end
def shared_example?(node)
......
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'png_quantizator'
require 'open3'
require 'parallel'
require_relative '../tooling/lib/tooling/images.rb'
generator = ARGV[0]
milestone = ARGV[1]
unless generator
warn('Error: missing generator, please supply one')
abort
end
unless milestone
warn('Error: missing milestone, please supply one')
abort
end
def rename_image(file, milestone)
path = File.dirname(file)
basename = File.basename(file)
final_name = File.join(path, "#{basename}_v#{milestone}.png")
FileUtils.mv(file, final_name)
end
system('spring', 'rspec', generator)
files = []
Open3.popen3("git diff --name-only -- '*.png'") do |stdin, stdout, stderr, thread|
files.concat stdout.read.chomp.split("\n")
end
Open3.popen3("git status --porcelain -- '*.png'") do |stdin, stdout, stderr, thread|
files.concat stdout.read.chomp.split("?? ")
end
files.reject!(&:empty?)
if files.empty?
puts "No file generated, did you select the right screenshot generator?"
else
puts "Compressing newly generated screenshots"
Parallel.each(files) do |file|
file_path = File.join(Dir.pwd, file.to_s.strip)
was_uncompressed, savings = Tooling::Image.compress_image(file_path)
rename_image(file_path, milestone)
if was_uncompressed
puts "#{file} was reduced by #{savings} bytes."
else
puts "Skipping already compressed file: #{file}."
end
end
end
......@@ -35,7 +35,6 @@ RSpec.describe 'Database schema' do
deploy_keys_projects: %w[deploy_key_id],
deployments: %w[deployable_id environment_id user_id],
draft_notes: %w[discussion_id commit_id],
emails: %w[user_id],
epics: %w[updated_by_id last_edited_by_id state_id],
events: %w[target_id],
forked_project_links: %w[forked_from_project_id],
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Container Registry', :js do
include DocsScreenshotHelpers
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
before do
page.driver.browser.manage.window.resize_to(1366, 1024)
group.add_owner(user)
sign_in(user)
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
end
context 'expiration policy settings' do
it 'user/packages/container_registry/img/expiration_policy_form' do
visit project_settings_ci_cd_path(project)
screenshot_area = find('#js-registry-policies')
scroll_to screenshot_area
expect(screenshot_area).to have_content 'Expiration interval'
set_crop_data(screenshot_area, 20)
end
end
context 'project container_registry' do
it 'user/packages/container_registry/img/project_empty_page' do
visit_project_container_registry
expect(page).to have_content _('There are no container images stored for this project')
end
context 'with a list of repositories' do
before do
stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest], with_manifest: true)
create_list(:container_repository, 12, project: project)
end
it 'user/packages/container_registry/img/project_image_repositories_list' do
visit_project_container_registry
expect(page).to have_content 'Image Repositories'
end
it 'user/packages/container_registry/img/project_image_repositories_list_with_commands_open' do
visit_project_container_registry
click_on 'CLI Commands'
end
end
end
context 'group container_registry' do
it 'user/packages/container_registry/img/group_empty_page' do
visit_group_container_registry
expect(page).to have_content 'There are no container images available in this group'
end
context 'with a list of repositories' do
before do
stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest], with_manifest: true)
create_list(:container_repository, 12, project: project)
end
it 'user/packages/container_registry/img/group_image_repositories_list' do
visit_group_container_registry
expect(page).to have_content 'Image Repositories'
end
end
end
def visit_project_container_registry
visit project_container_registry_index_path(project)
end
def visit_group_container_registry
visit group_container_registries_path(group)
end
end
......@@ -5,25 +5,68 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::InlineMetricsFilter do
include FilterSpecHelper
let(:params) { ['foo', 'bar', 12] }
let(:query_params) { {} }
let(:trigger_url) { urls.metrics_namespace_project_environment_url(*params, query_params) }
let(:environment_id) { 12 }
let(:dashboard_url) { urls.metrics_dashboard_namespace_project_environment_url(*params, **query_params, embedded: true) }
it_behaves_like 'a metrics embed filter'
let(:query_params) do
{
dashboard: 'config/prometheus/common_metrics.yml',
group: 'System metrics (Kubernetes)',
title: 'Core Usage (Pod Average)',
y_label: 'Cores per Pod'
}
end
context 'with /-/environments/:environment_id/metrics URL' do
let(:params) { ['group', 'project', environment_id] }
let(:trigger_url) { urls.metrics_namespace_project_environment_url(*params, **query_params) }
context 'with no query params' do
let(:query_params) { {} }
it_behaves_like 'a metrics embed filter'
end
context 'with query params' do
it_behaves_like 'a metrics embed filter'
end
end
context 'with query params specified' do
let(:query_params) do
{
dashboard: 'config/prometheus/common_metrics.yml',
group: 'System metrics (Kubernetes)',
title: 'Core Usage (Pod Average)',
y_label: 'Cores per Pod'
}
context 'with /-/metrics?environment=:environment_id URL' do
let(:params) { %w(group project) }
let(:trigger_url) { urls.namespace_project_metrics_dashboard_url(*params, **query_params) }
let(:dashboard_url) do
urls.metrics_dashboard_namespace_project_environment_url(
*params.append(environment_id),
**query_params.except(:environment),
embedded: true
)
end
it_behaves_like 'a metrics embed filter'
context 'with query params' do
it_behaves_like 'a metrics embed filter' do
before do
query_params.merge!(environment: environment_id)
end
end
end
context 'with only environment in query params' do
let(:query_params) { { environment: environment_id } }
it_behaves_like 'a metrics embed filter'
end
context 'with no query params' do
let(:query_params) { {} }
it 'ignores metrics URL without environment parameter' do
input = %(<a href="#{trigger_url}">example</a>)
filtered_input = filter(input).to_s
expect(CGI.unescape_html(filtered_input)).to eq(input)
end
end
end
it 'leaves links to other dashboards unchanged' do
......
......@@ -22,6 +22,13 @@ RSpec.describe Banzai::Filter::InlineMetricsRedactorFilter do
it_behaves_like 'redacts the embed placeholder'
it_behaves_like 'retains the embed placeholder when applicable'
context 'with /-/metrics?environment=:environment_id URL' do
let(:url) { urls.project_metrics_dashboard_url(project, embedded: true, environment: 1) }
it_behaves_like 'redacts the embed placeholder'
it_behaves_like 'retains the embed placeholder when applicable'
end
context 'for a grafana dashboard' do
let(:url) { urls.project_grafana_api_metrics_dashboard_url(project, embedded: true) }
......@@ -33,7 +40,7 @@ RSpec.describe Banzai::Filter::InlineMetricsRedactorFilter do
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, :project, projects: [project]) }
let(:params) { [project.namespace.path, project.path, cluster.id] }
let(:query_params) { { group: 'Cluster Health', title: 'CPU Usage', y_label: 'CPU (cores)' } }
let(:url) { urls.metrics_dashboard_namespace_project_cluster_url(*params, **query_params) }
let(:url) { urls.metrics_dashboard_namespace_project_cluster_url(*params, **query_params, format: :json) }
context 'with user who can read cluster' do
it_behaves_like 'redacts the embed placeholder'
......
......@@ -6,11 +6,12 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
include Gitlab::Routing.url_helpers
describe '#metrics_regex' do
let(:environment_id) { 1 }
let(:url_params) do
[
'foo',
'bar',
1,
environment_id,
{
start: '2019-08-02T05:43:09.000Z',
dashboard: 'config/prometheus/common_metrics.yml',
......@@ -33,12 +34,42 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
subject { described_class.metrics_regex }
context 'for metrics route' do
context 'for /-/environments/:environment_id/metrics route' do
let(:url) { metrics_namespace_project_environment_url(*url_params) }
it_behaves_like 'regex which matches url when expected'
end
context 'for /-/metrics?environment=:environment_id route' do
let(:url) { namespace_project_metrics_dashboard_url(*url_params) }
let(:url_params) do
[
'namespace1',
'project1',
{
environment: environment_id,
start: '2019-08-02T05:43:09.000Z',
dashboard: 'config/prometheus/common_metrics.yml',
group: 'awesome group',
anchor: 'title'
}
]
end
let(:expected_params) do
{
'url' => url,
'namespace' => 'namespace1',
'project' => 'project1',
'environment' => "#{environment_id}",
'query' => "?dashboard=config%2Fprometheus%2Fcommon_metrics.yml&environment=#{environment_id}&group=awesome+group&start=2019-08-02T05%3A43%3A09.000Z",
'anchor' => '#title'
}
end
it_behaves_like 'regex which matches url when expected'
end
context 'for metrics_dashboard route' do
let(:url) { metrics_dashboard_namespace_project_environment_url(*url_params) }
......@@ -47,16 +78,19 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
end
describe '#clusters_regex' do
let(:url) do
Gitlab::Routing.url_helpers.namespace_project_cluster_url(
let(:url) { Gitlab::Routing.url_helpers.namespace_project_cluster_url(*url_params) }
let(:url_params) do
[
'foo',
'bar',
'1',
group: 'Cluster Health',
title: 'Memory Usage',
y_label: 'Memory 20(GiB)',
anchor: 'title'
)
{
group: 'Cluster Health',
title: 'Memory Usage',
y_label: 'Memory 20(GiB)',
anchor: 'title'
}
]
end
let(:expected_params) do
......@@ -73,6 +107,27 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
subject { described_class.clusters_regex }
it_behaves_like 'regex which matches url when expected'
context 'for metrics_dashboard route' do
let(:url) do
metrics_dashboard_namespace_project_cluster_url(
*url_params, cluster_type: :project, embedded: true, format: :json
)
end
let(:expected_params) do
{
'url' => url,
'namespace' => 'foo',
'project' => 'bar',
'cluster_id' => '1',
'query' => '?cluster_type=project&embedded=true',
'anchor' => nil
}
end
it_behaves_like 'regex which matches url when expected'
end
end
describe '#grafana_regex' do
......@@ -103,15 +158,18 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
end
describe '#alert_regex' do
let(:url) do
Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(
let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params) }
let(:url_params) do
[
'foo',
'bar',
'1',
start: '2020-02-10T12:59:49.938Z',
end: '2020-02-10T20:59:49.938Z',
anchor: "anchor"
)
{
start: '2020-02-10T12:59:49.938Z',
end: '2020-02-10T20:59:49.938Z',
anchor: "anchor"
}
]
end
let(:expected_params) do
......@@ -128,6 +186,21 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
subject { described_class.alert_regex }
it_behaves_like 'regex which matches url when expected'
it_behaves_like 'regex which matches url when expected' do
let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params, format: :json) }
let(:expected_params) do
{
'url' => url,
'namespace' => 'foo',
'project' => 'bar',
'alert' => '1',
'query' => nil,
'anchor' => nil
}
end
end
end
describe '#build_dashboard_url' do
......
......@@ -14,7 +14,7 @@ RSpec.describe 'Projects::MetricsDashboardController' do
end
describe 'GET /:namespace/:project/-/metrics' do
it 'returns 200' do
it "redirects to default environment's metrics dashboard" do
send_request
expect(response).to redirect_to(dashboard_route(environment: environment))
end
......@@ -24,6 +24,18 @@ RSpec.describe 'Projects::MetricsDashboardController' do
expect(assigns(:default_environment).id).to eq(environment.id)
end
it 'retains existing parameters when redirecting' do
get "#{dashboard_route(dashboard_path: '.gitlab/dashboards/dashboard_path.yml')}/panel/new"
expect(response).to redirect_to(
dashboard_route(
dashboard_path: '.gitlab/dashboards/dashboard_path.yml',
page: 'panel/new',
environment: environment
)
)
end
context 'with anonymous user and public dashboard visibility' do
let(:anonymous_user) { create(:user) }
let(:project) do
......
......@@ -172,23 +172,31 @@ RSpec.describe Git::ProcessRefChangesService do
[
{ index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
{ index: 1, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" },
{ index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" }
{ index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" },
{ index: 3, oldrev: '789015', newrev: '789016', ref: "#{ref_prefix}/changed1" },
{ index: 4, oldrev: '789017', newrev: '789018', ref: "#{ref_prefix}/changed2" },
{ index: 5, oldrev: '789019', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed1" },
{ index: 6, oldrev: '789020', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed2" }
]
end
let(:git_changes) { double(branch_changes: branch_changes, tag_changes: tag_changes) }
it 'schedules job for existing merge requests' do
expect_next_instance_of(MergeRequests::PushedBranchesService) do |service|
expect(service).to receive(:execute).and_return(%w(create1 create2))
end
before do
allow(MergeRequests::PushedBranchesService).to receive(:new).and_return(
double(execute: %w(create1 create2)), double(execute: %w(changed1)), double(execute: %w(removed2))
)
end
it 'schedules job for existing merge requests' do
expect(UpdateMergeRequestsWorker).to receive(:perform_async)
.with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789012', "#{ref_prefix}/create1").ordered
expect(UpdateMergeRequestsWorker).to receive(:perform_async)
.with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789013', "#{ref_prefix}/create2").ordered
expect(UpdateMergeRequestsWorker).not_to receive(:perform_async)
.with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789014', "#{ref_prefix}/create3").ordered
expect(UpdateMergeRequestsWorker).to receive(:perform_async)
.with(project.id, user.id, '789015', '789016', "#{ref_prefix}/changed1").ordered
expect(UpdateMergeRequestsWorker).to receive(:perform_async)
.with(project.id, user.id, '789020', Gitlab::Git::BLANK_SHA, "#{ref_prefix}/removed2").ordered
subject.execute
end
......
......@@ -100,6 +100,10 @@ RSpec.configure do |config|
metadata[:enable_admin_mode] = true if location =~ %r{(ee)?/spec/controllers/admin/}
end
config.define_derived_metadata(file_path: %r{(ee)?/spec/.+_docs\.rb\z}) do |metadata|
metadata[:type] = :feature
end
config.include LicenseHelpers
config.include ActiveJob::TestHelper
config.include ActiveSupport::Testing::TimeHelpers
......
# frozen_string_literal: true
require 'fileutils'
require 'mini_magick'
module DocsScreenshotHelpers
extend ActiveSupport::Concern
def set_crop_data(element, padding)
@crop_element = element
@crop_padding = padding
end
def crop_image_screenshot(path)
element_rect = @crop_element.evaluate_script("this.getBoundingClientRect()")
width = element_rect['width'] + (@crop_padding * 2)
height = element_rect['height'] + (@crop_padding * 2)
x = element_rect['x'] - @crop_padding
y = element_rect['y'] - @crop_padding
image = MiniMagick::Image.new(path)
image.crop "#{width}x#{height}+#{x}+#{y}"
end
included do |base|
after do |example|
filename = "#{example.description}.png"
path = File.expand_path(filename, 'doc/')
page.save_screenshot(path)
if @crop_element
crop_image_screenshot(path)
set_crop_data(nil, nil)
end
end
end
end
# frozen_string_literal: true
module Tooling
module Image
# Determine the tolerance till when we run pngquant in a loop
TOLERANCE = 10000
def self.check_executables
unless system('pngquant --version', out: File::NULL)
warn(
'Error: pngquant executable was not detected in the system.',
'Download pngquant at https://pngquant.org/ and place the executable in /usr/local/bin'
)
abort
end
unless system('gm version', out: File::NULL)
warn(
'Error: gm executable was not detected in the system.',
'Please install imagemagick: brew install imagemagick or sudo apt install imagemagick'
)
abort
end
end
def self.compress_image(file, keep_original = false)
check_executables
compressed_file = "#{file}.compressed"
FileUtils.copy(file, compressed_file)
pngquant_file = PngQuantizator::Image.new(compressed_file)
# Run the image repeatedly through pngquant until
# the change in file size is within TOLERANCE
# or the loop count is above 1000
1000.times do
before = File.size(compressed_file)
pngquant_file.quantize!
after = File.size(compressed_file)
break if before - after <= TOLERANCE
end
savings = File.size(file) - File.size(compressed_file)
is_uncompressed = savings > TOLERANCE
if is_uncompressed && !keep_original
FileUtils.copy(compressed_file, file)
end
FileUtils.remove(compressed_file)
[is_uncompressed, savings]
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment