Commit a02b11b2 authored by Kamil Trzciński's avatar Kamil Trzciński

Merge branch 'pages-limit-file-count' into 'master'

Add pages_file_entries to plan_limits

See merge request gitlab-org/gitlab!64925
parents bc897734 3584d5b1
......@@ -37,14 +37,13 @@ module Projects
job.run!
end
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
validate_state!
validate_max_size!
validate_max_entries!
build.artifacts_file.use_file do |artifacts_path|
deploy_to_legacy_storage(artifacts_path)
create_pages_deployment(artifacts_path, build)
success
end
rescue InvalidStateError => e
......@@ -92,8 +91,10 @@ module Projects
# Check if we did extract public directory
archive_public_path = File.join(tmp_path, PUBLIC_DIR)
raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
validate_outdated_sha!
deploy_page!(archive_public_path)
end
......@@ -108,15 +109,6 @@ module Projects
end
def extract_zip_archive!(artifacts_path, temp_path)
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
# Calculate page size after extract
public_entry = build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true)
if public_entry.total_size > max_size
raise InvalidStateError, "artifacts for pages are too large: #{public_entry.total_size}"
end
SafeZip::Extract.new(artifacts_path)
.extract(directories: [PUBLIC_DIR], to: temp_path)
rescue SafeZip::Extract::Error => e
......@@ -151,10 +143,6 @@ module Projects
end
def create_pages_deployment(artifacts_path, build)
# we're using the full archive and pages daemon needs to read it
# so we want the total count from entries, not only "public/" directory
# because it better approximates work we need to do before we can serve the site
entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
sha256 = build.job_artifacts_archive.file_sha256
deployment = nil
......@@ -163,7 +151,7 @@ module Projects
file_count: entries_count,
file_sha256: sha256)
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
validate_outdated_sha!
project.update_pages_deployment!(deployment)
end
......@@ -175,29 +163,6 @@ module Projects
)
end
def latest?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
sha == latest_sha
end
def blocks
# Calculate dd parameters: we limit the size of pages
1 + max_size / BLOCK_SIZE
end
def max_size_from_settings
Gitlab::CurrentSettings.max_pages_size.megabytes
end
def max_size
max_pages_size = max_size_from_settings
return ::Gitlab::Pages::MAX_SIZE if max_pages_size == 0
max_pages_size
end
def tmp_path
@tmp_path ||= File.join(::Settings.pages.path, TMP_EXTRACT_PATH)
end
......@@ -262,6 +227,65 @@ module Projects
def tmp_dir_prefix
"project-#{project.id}-build-#{build.id}-"
end
def validate_state!
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
validate_outdated_sha!
end
def validate_outdated_sha!
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
end
def latest?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
sha == latest_sha
end
def validate_max_size!
if total_size > max_size
raise InvalidStateError, "artifacts for pages are too large: #{total_size}"
end
end
# Calculate page size after extract
def total_size
@total_size ||= build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true).total_size
end
def max_size_from_settings
Gitlab::CurrentSettings.max_pages_size.megabytes
end
def max_size
max_pages_size = max_size_from_settings
return ::Gitlab::Pages::MAX_SIZE if max_pages_size == 0
max_pages_size
end
def validate_max_entries!
if pages_file_entries_limit > 0 && entries_count > pages_file_entries_limit
raise InvalidStateError, "pages site contains #{entries_count} file entries, while limit is set to #{pages_file_entries_limit}"
end
end
def entries_count
# we're using the full archive and pages daemon needs to read it
# so we want the total count from entries, not only "public/" directory
# because it better approximates work we need to do before we can serve the site
@entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
end
def pages_file_entries_limit
return 0 unless Feature.enabled?(:pages_limit_entries_count, project, default_enabled: :yaml)
project.actual_limits.pages_file_entries
end
end
end
......
---
name: pages_limit_entries_count
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64925/diffs
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/334765
milestone: '14.1'
type: development
group: group::release
default_enabled: false
# frozen_string_literal: true
class AddPagesFileEntriesToPlanLimits < ActiveRecord::Migration[6.1]
def change
add_column(:plan_limits, :pages_file_entries, :integer, default: 200_000, null: false)
end
end
28b31b6e8aba1b8feec2b9a29b5f91f7145431be5d8b9875bddb8183f89700f7
\ No newline at end of file
......@@ -16505,7 +16505,8 @@ CREATE TABLE plan_limits (
ci_daily_pipeline_schedule_triggers integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_running_container_scanning integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_cluster_image_scanning integer DEFAULT 0 NOT NULL,
ci_jobs_trace_size_limit integer DEFAULT 100 NOT NULL
ci_jobs_trace_size_limit integer DEFAULT 100 NOT NULL,
pages_file_entries integer DEFAULT 200000 NOT NULL
);
CREATE SEQUENCE plan_limits_id_seq
......@@ -457,6 +457,21 @@ installation, run the following in the [GitLab Rails console](operations/rails_c
Plan.default.actual_limits.update!(ci_max_artifact_size_junit: 10)
```
### Number of files per GitLab Pages web-site
The total number of file entries (including directories and symlinks) is limited to `100000` per
GitLab Pages website.
This is the default limit for all [GitLab self-managed and SaaS plans](https://about.gitlab.com/pricing/).
You can update the limit in your self-managed instance using the
[GitLab Rails console](operations/rails_console.md#starting-a-rails-console-session).
For example, to change the limit to `100`:
```ruby
Plan.default.actual_limits.update!(pages_file_entries: 100)
```
### Number of registered runners per scope
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/321368) in GitLab 13.12.
......
......@@ -158,6 +158,21 @@ RSpec.describe Projects::UpdatePagesService do
expect(execute).not_to eq(:success)
end
it 'limits pages file count' do
create(:plan_limits, :default_plan, pages_file_entries: 2)
expect(execute).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("pages site contains 3 file entries, while limit is set to 2")
end
it 'does not limit pages file count if feature is disabled' do
stub_feature_flags(pages_limit_entries_count: false)
create(:plan_limits, :default_plan, pages_file_entries: 2)
expect(execute).to eq(:success)
end
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
......@@ -339,9 +354,15 @@ RSpec.describe Projects::UpdatePagesService do
create(:ci_job_artifact, :archive, file: file, job: build)
create(:ci_job_artifact, :metadata, file: metafile, job: build)
allow(build).to receive(:artifacts_metadata_entry)
allow(build).to receive(:artifacts_metadata_entry).with('public/', recursive: true)
.and_return(metadata)
allow(metadata).to receive(:total_size).and_return(100)
# to pass entries count check
root_metadata = double('root metadata')
allow(build).to receive(:artifacts_metadata_entry).with('', recursive: true)
.and_return(root_metadata)
allow(root_metadata).to receive_message_chain(:entries, :count).and_return(10)
end
it 'raises an error' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment