Commit 2e2bdeb7 authored by Eugenia Grieff's avatar Eugenia Grieff

Update cache value without performing count query

- The new count will be calculated by adding or
subtracting the number of updated issues to
existing cached value
- Add specs
parent fa464083
......@@ -702,12 +702,6 @@ class Group < Namespace
Gitlab::ServiceDesk.supported? && all_projects.service_desk_enabled.exists?
end
# rubocop: disable CodeReuse/ServiceClass
def update_group_issues_counter_cache(current_user)
Groups::OpenIssuesCountService.new(self, current_user).refresh_cache_over_threshold
end
# rubocop: enable CodeReuse/ServiceClass
private
def update_two_factor_requirement
......
......@@ -25,25 +25,29 @@ module Groups
cached_count = Rails.cache.read(cache_key)
return cached_count unless cached_count.blank?
refresh_cache_over_threshold(reset_cache: false)
refresh_cache_over_threshold
end
def cache_key(key = nil)
['groups', 'open_issues_count_service', VERSION, group.id, cache_key_name]
end
def refresh_cache_over_threshold(reset_cache: true)
new_count = uncached_count
def refresh_cache_over_threshold(new_count = nil)
new_count ||= uncached_count
if new_count > CACHED_COUNT_THRESHOLD
update_cache_for_key(cache_key) { new_count }
elsif reset_cache
else
delete_cache
end
new_count
end
def cached_count
Rails.cache.read(cache_key)
end
private
def cache_key_name
......
......@@ -16,8 +16,9 @@ module Issuable
ids = params.delete(:issuable_ids).split(",")
set_update_params(type)
updated_issues_count = update_issuables(type, ids)
if updated_issues_count > 0 && requires_issues_count_cache_refresh?(type)
update_group_cached_counts
update_group_cached_counts(updated_issues_count)
end
response_success(payload: { count: updated_issues_count })
......@@ -86,15 +87,25 @@ module Issuable
end
def requires_issues_count_cache_refresh?(type)
type == 'issue' && params.include?(:state_event)
type == 'issue' && params.include?(:state_event) && group.present?
end
def update_group_cached_counts(updated_issues_count)
count_service = Groups::OpenIssuesCountService.new(group, current_user)
cached_count = count_service.cached_count
return if cached_count.blank?
new_count = compute_new_cached_count(cached_count, updated_issues_count)
count_service.refresh_cache_over_threshold(new_count)
end
def group
parent.is_a?(Group) ? parent : parent&.group
end
def update_group_cached_counts
group&.update_group_issues_counter_cache(current_user)
def compute_new_cached_count(cached_count, updated_issues_count)
operation = params[:state_event] == 'closed' ? :- : :+
[cached_count.to_i, updated_issues_count.to_i].inject(operation)
end
end
end
......
......@@ -2376,31 +2376,4 @@ RSpec.describe Group do
it_behaves_like 'model with Debian distributions'
end
describe '.ids_with_disabled_email' do
let!(:parent_1) { create(:group, emails_disabled: true) }
let!(:child_1) { create(:group, parent: parent_1) }
let!(:parent_2) { create(:group, emails_disabled: false) }
let!(:child_2) { create(:group, parent: parent_2) }
let!(:other_group) { create(:group, emails_disabled: false) }
subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
it { is_expected.to eq(Set.new([child_1.id])) }
end
describe '#update_group_issues_counter_cache' do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
it 'updates group issues count cache if over threshold' do
expect_any_instance_of(Groups::OpenIssuesCountService)
.to receive(:refresh_cache_over_threshold)
.and_call_original
group.update_group_issues_counter_cache(user)
end
end
end
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Issuable::BulkUpdateService do
RSpec.describe Issuable::BulkUpdateService, :clean_gitlab_redis_cache do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
......@@ -282,61 +282,72 @@ RSpec.describe Issuable::BulkUpdateService do
end
context 'when project belongs to a group' do
let_it_be(:project2) { create(:project, :repository, group: create(:group)) }
let_it_be(:open_issues) { create_list(:issue, 2, project: project2) }
let_it_be(:group) { create(:group) }
let_it_be(:project2) { create(:project, :repository, group: group) }
let_it_be(:issues) { create_list(:issue, 2, :opened, project: project2) }
let(:parent) { project2 }
let(:count_service) { Groups::OpenIssuesCountService }
let(:count_service_class) { Groups::OpenIssuesCountService }
let(:count_service) { count_service_class.new(group, user) }
before do
project2.add_reporter(user)
allow(Rails.cache).to receive(:delete).and_call_original
allow_next_instance_of(count_service) do |instance|
allow(instance).to receive(:update_cache_for_key)
end
end
shared_examples 'refreshing cached open issues count with state updates' do |state|
context 'when issues count is over cache threshold' do
let(:public_count_key) { count_service.cache_key(count_service_class::PUBLIC_COUNT_KEY) }
let(:existing_cache) { 5 }
context 'when cache is empty' do
before do
stub_const("#{count_service}::CACHED_COUNT_THRESHOLD", (issues.size - 1))
stub_const("#{count_service_class}::CACHED_COUNT_THRESHOLD", 1)
Rails.cache.delete(public_count_key)
end
it 'updates issues counts cache' do
expect_next_instance_of(count_service) do |service|
expect(service).to receive(:update_cache_for_key).and_return(true)
end
expect(Rails.cache).not_to receive(:delete)
it 'does not change the counts cache' do
bulk_update(issues, state_event: state)
expect(Rails.cache.read(public_count_key)).to be_nil
end
end
context 'when issues count is under cache threshold' do
context 'when new issues count is over cache threshold' do
before do
stub_const("#{count_service}::CACHED_COUNT_THRESHOLD", (issues.size + 2))
stub_const("#{count_service_class}::CACHED_COUNT_THRESHOLD", 1)
end
it 'does not update issues counts cache and delete cache' do
expect_next_instance_of(count_service) do |service|
expect(service).not_to receive(:update_cache_for_key)
end
expect(Rails.cache).to receive(:delete)
it 'updates existing issues counts cache' do
Rails.cache.write(public_count_key, existing_cache)
bulk_update(issues, state_event: state)
expect(Rails.cache.read(public_count_key)).to eq(adjusted_count)
end
end
context 'when new issues count is under cache threshold' do
before do
stub_const("#{count_service_class}::CACHED_COUNT_THRESHOLD", 10)
end
it 'deletes existing cache' do
Rails.cache.write(public_count_key, existing_cache)
bulk_update(issues, state_event: state)
expect(Rails.cache.read(public_count_key)).to be_nil
end
end
end
context 'when closing issues' do
let_it_be(:issues) { create_list(:issue, 2, project: project2) }
it_behaves_like 'refreshing cached open issues count with state updates', 'closed'
context 'when closing issues', :use_clean_rails_memory_store_caching do
it_behaves_like 'refreshing cached open issues count with state updates', 'closed' do
let(:adjusted_count) { existing_cache - issues.size }
end
end
context 'when reopening issues' do
let_it_be(:issues) { create_list(:issue, 2, :closed, project: project2) }
context 'when reopening issues', :use_clean_rails_memory_store_caching do
before do
issues.map { |issue| issue.update!(state: 'closed') }
end
it_behaves_like 'refreshing cached open issues count with state updates', 'reopened'
it_behaves_like 'refreshing cached open issues count with state updates', 'reopened' do
let(:adjusted_count) { existing_cache + issues.size }
end
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment