Commit 95e25141 authored by Dylan Griffith's avatar Dylan Griffith Committed by Sean McGivern

Implement Redis Hash Cache Gitlab::Cache::GroupedCache

This will later be used to cache the result of use_elasticsearch? for
projects and namespaces. As per
https://gitlab.com/gitlab-org/gitlab/-/issues/214440 .
parent 79561ab2
......@@ -6,6 +6,11 @@ module ElasticsearchIndexedContainer
included do
after_commit :index, on: :create
after_commit :delete_from_index, on: :destroy
after_commit :invalidate_elasticsearch_indexes_project_cache!, on: [:create, :destroy]
end
def invalidate_elasticsearch_indexes_project_cache!
self.class.invalidate_elasticsearch_indexes_project_cache!
end
class_methods do
......@@ -18,5 +23,9 @@ module ElasticsearchIndexedContainer
batch.destroy_all # #rubocop:disable Cop/DestroyAll
end
end
def invalidate_elasticsearch_indexes_project_cache!
::Gitlab::CurrentSettings.invalidate_elasticsearch_indexes_project_cache!
end
end
end
......@@ -135,8 +135,16 @@ module EE
return false unless elasticsearch_indexing?
return true unless elasticsearch_limit_indexing?
return elasticsearch_limited_projects.exists?(project.id) unless ::Feature.enabled?(:elasticsearch_indexes_project_cache, default_enabled: true)
::Gitlab::Elastic::ElasticsearchEnabledCache.fetch(:project, project.id) do
elasticsearch_limited_projects.exists?(project.id)
end
end
def invalidate_elasticsearch_indexes_project_cache!
::Gitlab::Elastic::ElasticsearchEnabledCache.delete(:project)
end
def elasticsearch_indexes_namespace?(namespace)
return false unless elasticsearch_indexing?
......
......@@ -38,6 +38,7 @@ class ElasticsearchIndexedNamespace < ApplicationRecord
end
Gitlab::Database.bulk_insert(table_name, insert_rows)
invalidate_elasticsearch_indexes_project_cache!
jobs = batch_ids.map { |id| [id, :index] }
......@@ -55,6 +56,7 @@ class ElasticsearchIndexedNamespace < ApplicationRecord
ids.in_groups_of(BATCH_OPERATION_SIZE, false) do |batch_ids|
where(namespace_id: batch_ids).delete_all
invalidate_elasticsearch_indexes_project_cache!
jobs = batch_ids.map { |id| [id, :delete] }
......
---
title: Fix caching performance and some cache bugs with elasticsearch enabled check
merge_request: 29751
author:
type: fixed
# frozen_string_literal: true
module Gitlab
module Elastic
# Efficient cache for checking if Elasticsearch integration is enabled for
# a resource. This presents a similar API to Rails cache but only accepts
# booleans as values and sets cache expiry only on the initial access of
# the overall resource cache. As such the cache will expire roughly daily
# to ensure we don't grow unbounded in size with cached values for records
# that are not recently accessed.
#
# Under the hood this is implemented using a Redis Hash and deleting is
# just a `DEL` of the entire Hash. This kind of cache is preferred to the
# normal Rails cache implemented as normal Redis key/value because we need
# to invalidate the entire cache when we do invalidation which is too
# inefficient without a hash.
class ElasticsearchEnabledCache
TTL_UNSET = -1
EXPIRES_IN = 1.day
class << self
# Just like Rails::Cache.fetch but you provide the type of resource as well
# as the key for the specific record.
#
# @param type [Symbol] the type of resource, `:project` or `:namespace`
# @param record_id [Integer] the id of the record
# @return [true, false]
def fetch(type, record_id, &blk)
Gitlab::Redis::Cache.with do |redis|
redis_key = redis_key(type)
cached_result = redis.hget(redis_key, record_id)
break Gitlab::Redis::Boolean.decode(cached_result) unless cached_result.nil?
value = yield
redis.hset(redis_key, record_id, Gitlab::Redis::Boolean.encode(value))
# This does have a race condition where we may end up setting the
# expire twice in short succession. This is not really a problem
# since it will still expire after roughly the same amount of time.
if redis.ttl(redis_key) == TTL_UNSET
# Set an expiry only the first time we create the hash. If we
# updated expiry every time then it may grow forever and never
# expire. It's best to allow it to expire roughly daily to ensure
# it doesn't get too large.
redis.expire(redis_key, EXPIRES_IN)
end
value
end
end
# Deletes the entire cache for this type. All keys in the cache will
# be removed.
#
# @param type [Symbol] the type of resource, `:project` or `:namespace`
def delete(type)
Gitlab::Redis::Cache.with { |redis| redis.del(redis_key(type)) }
end
private
def redis_key(type)
"elasticsearch_enabled_cache:#{type}"
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Elastic::ElasticsearchEnabledCache, :clean_gitlab_redis_cache do
describe '.fetch' do
it 'remembers the result of the first invocation' do
expect(described_class.fetch(:project, 1) { true }).to eq(true)
expect(described_class.fetch(:project, 2) { false }).to eq(false)
expect { |b| described_class.fetch(:project, 1, &b) }.not_to yield_control
expect { |b| described_class.fetch(:project, 2, &b) }.not_to yield_control
expect(described_class.fetch(:project, 1) { false }).to eq(true)
expect(described_class.fetch(:project, 2) { true }).to eq(false)
end
it 'sets an expiry on the key the first time it creates the hash' do
stub_const('::Gitlab::Elastic::ElasticsearchEnabledCache::EXPIRES_IN', 0)
expect(described_class.fetch(:project, 1) { true }).to eq(true)
expect(described_class.fetch(:project, 2) { false }).to eq(false)
expect(described_class.fetch(:project, 1) { false }).to eq(false)
expect(described_class.fetch(:project, 2) { true }).to eq(true)
end
it 'does not set an expiry on the key after the hash is already created' do
expect(described_class.fetch(:project, 1) { true }).to eq(true)
stub_const('::Gitlab::Elastic::ElasticsearchEnabledCache::EXPIRES_IN', 0)
expect(described_class.fetch(:project, 2) { false }).to eq(false)
expect(described_class.fetch(:project, 1) { false }).to eq(true)
expect(described_class.fetch(:project, 2) { true }).to eq(false)
end
end
describe '.delete' do
it 'clears the cached value' do
expect(described_class.fetch(:project, 1) { true }).to eq(true)
expect(described_class.fetch(:project, 2) { false }).to eq(false)
described_class.delete(:project)
expect(described_class.fetch(:project, 1) { false }).to eq(false)
expect(described_class.fetch(:project, 2) { true }).to eq(true)
end
it 'does not clear the cache for another type' do
expect(described_class.fetch(:project, 1) { true }).to eq(true)
expect(described_class.fetch(:namespace, 1) { false }).to eq(false)
described_class.delete(:project)
expect(described_class.fetch(:project, 1) { false }).to eq(false)
expect(described_class.fetch(:namespace, 1) { true }).to eq(false)
end
end
end
......@@ -307,7 +307,33 @@ describe ApplicationSetting do
expect(setting.elasticsearch_limited_projects).to match_array(
[projects.last, project_indexed_through_namespace])
end
it 'uses the ElasticsearchEnabledCache cache' do
expect(::Gitlab::Elastic::ElasticsearchEnabledCache).to receive(:fetch).and_return(true)
expect(setting.elasticsearch_indexes_project?(projects.first)).to be(true)
end
context 'when elasticsearch_indexes_project_cache feature flag is disabled' do
before do
stub_feature_flags(elasticsearch_indexes_project_cache: false)
end
it 'does not use the cache' do
expect(::Gitlab::Elastic::ElasticsearchEnabledCache).not_to receive(:fetch)
expect(setting.elasticsearch_indexes_project?(projects.first)).to be(false)
end
end
end
end
end
describe '#invalidate_elasticsearch_indexes_project_cache!' do
it 'deletes the ElasticsearchEnabledCache for projects' do
expect(::Gitlab::Elastic::ElasticsearchEnabledCache).to receive(:delete).with(:project)
setting.invalidate_elasticsearch_indexes_project_cache!
end
end
......
......@@ -55,6 +55,8 @@ describe ElasticsearchIndexedNamespace do
describe '.index_first_n_namespaces_of_plan' do
it 'creates records, scoped by plan and ordered by namespace id' do
expect(::Gitlab::CurrentSettings).to receive(:invalidate_elasticsearch_indexes_project_cache!).and_call_original.exactly(3).times
ids = namespaces.map(&:id)
described_class.index_first_n_namespaces_of_plan('gold', 1)
......@@ -81,6 +83,8 @@ describe ElasticsearchIndexedNamespace do
end
it 'creates records, scoped by plan and ordered by namespace id' do
expect(::Gitlab::CurrentSettings).to receive(:invalidate_elasticsearch_indexes_project_cache!).and_call_original.exactly(3).times
ids = namespaces.map(&:id)
expect(get_indexed_namespaces).to contain_exactly(ids[0], ids[2], ids[1])
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment