Commit 9b10bed9 authored by Ash McKenzie's avatar Ash McKenzie

Merge branch 'mc/feature/spread-ci-minute-reset-more' into 'master'

Spread runner CI minute reset to 8 hours

See merge request gitlab-org/gitlab!50265
parents 3893b5c2 0ead05ed
......@@ -10,6 +10,7 @@ class ClearSharedRunnersMinutesWorker # rubocop:disable Scalability/IdempotentWo
feature_category :continuous_integration
LEASE_TIMEOUT = 3600
TIME_SPREAD = 8.hours.seconds.freeze
BATCH_SIZE = 100_000
def perform
......@@ -17,7 +18,7 @@ class ClearSharedRunnersMinutesWorker # rubocop:disable Scalability/IdempotentWo
start_id = Namespace.minimum(:id)
last_id = Namespace.maximum(:id)
execution_offset = 3.hours.seconds / ((last_id - start_id) / BATCH_SIZE)
execution_offset = TIME_SPREAD / ((last_id - start_id) / BATCH_SIZE)
(start_id..last_id).step(BATCH_SIZE).with_index do |batch_start_id, batch_index|
batch_end_id = batch_start_id + BATCH_SIZE - 1
......
---
title: CI minutes are spread across 8 hours instead of 3.
merge_request: 50265
author:
type: performance
......@@ -147,12 +147,12 @@ RSpec.describe ClearSharedRunnersMinutesWorker do
end
it 'runs a worker per batch' do
# Spread evenly accross 3 hours (10800 seconds)
# Spread evenly accross 8 hours (28800 seconds)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(0.seconds, 2, 4)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(2700.seconds, 5, 7)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(5400.seconds, 8, 10)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(8100.seconds, 11, 13)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(10800.seconds, 14, 16)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(7200.seconds, 5, 7)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(14400.seconds, 8, 10)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(21600.seconds, 11, 13)
expect(Ci::BatchResetMinutesWorker).to receive(:perform_in).with(28800.seconds, 14, 16)
subject
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment