Commit f0133b68 authored by 🤖 GitLab Bot 🤖's avatar 🤖 GitLab Bot 🤖

Merge branch 'ce-to-ee-2018-11-26' into 'master'

CE upstream - 2018-11-26 13:21 UTC

See merge request gitlab-org/gitlab-ee!8586
parents b3bb56d4 be9c3d98
......@@ -11,10 +11,11 @@ module Resolvers
end
def model_by_full_path(model, full_path)
BatchLoader.for(full_path).batch(key: "#{model.model_name.param_key}:full_path") do |full_paths, loader|
BatchLoader.for(full_path).batch(key: model) do |full_paths, loader, args|
# `with_route` avoids an N+1 calculating full_path
results = model.where_full_path_in(full_paths).with_route
results.each { |project| loader.call(project.full_path, project) }
args[:key].where_full_path_in(full_paths).with_route.each do |project|
loader.call(project.full_path, project)
end
end
end
end
......
......@@ -14,9 +14,10 @@ module Resolvers
def resolve(iid:)
return unless project.present?
BatchLoader.for(iid.to_s).batch(key: project.id) do |iids, loader|
results = project.merge_requests.where(iid: iids)
results.each { |mr| loader.call(mr.iid.to_s, mr) }
BatchLoader.for(iid.to_s).batch(key: project) do |iids, loader, args|
args[:key].merge_requests.where(iid: iids).each do |mr|
loader.call(mr.iid.to_s, mr)
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
......
......@@ -80,15 +80,9 @@ class Blob < SimpleDelegator
end
def self.lazy(project, commit_id, path)
BatchLoader.for({ project: project, commit_id: commit_id, path: path }).batch do |items, loader|
items_by_project = items.group_by { |i| i[:project] }
items_by_project.each do |project, items|
items = items.map { |i| i.values_at(:commit_id, :path) }
project.repository.blobs_at(items).each do |blob|
loader.call({ project: blob.project, commit_id: blob.commit_id, path: blob.path }, blob) if blob
end
BatchLoader.for([commit_id, path]).batch(key: project.repository) do |items, loader, args|
args[:key].blobs_at(items).each do |blob|
loader.call([blob.commit_id, blob.path], blob) if blob
end
end
end
......
......@@ -73,7 +73,13 @@ class Repository
end
def ==(other)
@disk_path == other.disk_path
other.is_a?(self.class) && @disk_path == other.disk_path
end
alias_method :eql?, :==
def hash
[self.class, @disk_path].hash
end
def raw_repository
......
---
title: Reduce Gitaly calls in projects dashboard
merge_request: 23307
author:
type: performance
---
title: Batch load only data from same repository when lazy object is accessed
merge_request: 23309
author:
type: performance
......@@ -7,9 +7,9 @@ module Gitlab
module Cache
module Ci
class ProjectPipelineStatus
attr_accessor :sha, :status, :ref, :project, :loaded
include Gitlab::Utils::StrongMemoize
delegate :commit, to: :project
attr_accessor :sha, :status, :ref, :project, :loaded
def self.load_for_project(project)
new(project).tap do |status|
......@@ -18,33 +18,12 @@ module Gitlab
end
def self.load_in_batch_for_projects(projects)
cached_results_for_projects(projects).zip(projects).each do |result, project|
project.pipeline_status = new(project, result)
projects.each do |project|
project.pipeline_status = new(project)
project.pipeline_status.load_status
end
end
def self.cached_results_for_projects(projects)
result = Gitlab::Redis::Cache.with do |redis|
redis.multi do
projects.each do |project|
cache_key = cache_key_for_project(project)
redis.exists(cache_key)
redis.hmget(cache_key, :sha, :status, :ref)
end
end
end
result.each_slice(2).map do |(cache_key_exists, (sha, status, ref))|
pipeline_info = { sha: sha, status: status, ref: ref }
{ loaded_from_cache: cache_key_exists, pipeline_info: pipeline_info }
end
end
def self.cache_key_for_project(project)
"#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status:#{project.commit&.sha}"
end
def self.update_for_pipeline(pipeline)
pipeline_info = {
sha: pipeline.sha,
......@@ -70,6 +49,7 @@ module Gitlab
def load_status
return if loaded?
return unless commit
if has_cache?
load_from_cache
......@@ -82,8 +62,6 @@ module Gitlab
end
def load_from_project
return unless commit
self.sha, self.status, self.ref = commit.sha, commit.status, project.default_branch
end
......@@ -132,7 +110,13 @@ module Gitlab
end
def cache_key
self.class.cache_key_for_project(project)
"#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status:#{commit&.sha}"
end
def commit
strong_memoize(:commit) do
project.commit
end
end
end
end
......
......@@ -6,8 +6,8 @@ module Gitlab
class Collection
class Item
def initialize(key:, value:, public: true, file: false)
raise ArgumentError, "`#{key}` must be of type String, while it was: #{value.class}" unless
value.is_a?(String)
raise ArgumentError, "`#{key}` must be of type String or nil value, while it was: #{value.class}" unless
value.is_a?(String) || value.nil?
@variable = {
key: key, value: value, public: public, file: file
......
......@@ -23,7 +23,7 @@ module Gitlab
alias_method :eql?, :==
def hash
[base_sha, start_sha, head_sha].hash
[self.class, base_sha, start_sha, head_sha].hash
end
# There is only one case in which we will have `start_sha` and `head_sha`,
......
......@@ -155,17 +155,9 @@ module Gitlab
end
def extract_signature_lazily(repository, commit_id)
BatchLoader.for({ repository: repository, commit_id: commit_id }).batch do |items, loader|
items_by_repo = items.group_by { |i| i[:repository] }
items_by_repo.each do |repo, items|
commit_ids = items.map { |i| i[:commit_id] }
signatures = batch_signature_extraction(repository, commit_ids)
signatures.each do |commit_sha, signature_data|
loader.call({ repository: repository, commit_id: commit_sha }, signature_data)
end
BatchLoader.for(commit_id).batch(key: repository) do |commit_ids, loader, args|
batch_signature_extraction(args[:key], commit_ids).each do |commit_id, signature_data|
loader.call(commit_id, signature_data)
end
end
end
......@@ -175,17 +167,9 @@ module Gitlab
end
def get_message(repository, commit_id)
BatchLoader.for({ repository: repository, commit_id: commit_id }).batch do |items, loader|
items_by_repo = items.group_by { |i| i[:repository] }
items_by_repo.each do |repo, items|
commit_ids = items.map { |i| i[:commit_id] }
messages = get_messages(repository, commit_ids)
messages.each do |commit_sha, message|
loader.call({ repository: repository, commit_id: commit_sha }, message)
end
BatchLoader.for(commit_id).batch(key: repository) do |commit_ids, loader, args|
get_messages(args[:key], commit_ids).each do |commit_id, message|
loader.call(commit_id, message)
end
end
end
......
......@@ -80,7 +80,13 @@ module Gitlab
end
def ==(other)
[storage, relative_path] == [other.storage, other.relative_path]
other.is_a?(self.class) && [storage, relative_path] == [other.storage, other.relative_path]
end
alias_method :eql?, :==
def hash
[self.class, storage, relative_path].hash
end
# This method will be removed when Gitaly reaches v1.1.
......
......@@ -14,17 +14,9 @@ module Gitlab
class << self
def get_message(repository, tag_id)
BatchLoader.for({ repository: repository, tag_id: tag_id }).batch do |items, loader|
items_by_repo = items.group_by { |i| i[:repository] }
items_by_repo.each do |repo, items|
tag_ids = items.map { |i| i[:tag_id] }
messages = get_messages(repository, tag_ids)
messages.each do |id, message|
loader.call({ repository: repository, tag_id: id }, message)
end
BatchLoader.for(tag_id).batch(key: repository) do |tag_ids, loader, args|
get_messages(args[:key], tag_ids).each do |tag_id, message|
loader.call(tag_id, message)
end
end
end
......
......@@ -85,11 +85,8 @@ module Gitlab
end
def common_query_context(environment, timeframe_start:, timeframe_end:)
base_query_context(timeframe_start, timeframe_end).merge({
ci_environment_slug: environment.slug,
kube_namespace: environment.deployment_platform&.actual_namespace || '',
environment_filter: %{container_name!="POD",environment="#{environment.slug}"}
})
base_query_context(timeframe_start, timeframe_end)
.merge(QueryVariables.call(environment))
end
def base_query_context(timeframe_start, timeframe_end)
......
# frozen_string_literal: true
module Gitlab
module Prometheus
module QueryVariables
def self.call(environment)
{
ci_environment_slug: environment.slug,
kube_namespace: environment.deployment_platform&.actual_namespace || '',
environment_filter: %{container_name!="POD",environment="#{environment.slug}"}
}
end
end
end
end
......@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
let!(:project) { create(:project, :repository) }
let(:pipeline_status) { described_class.new(project) }
let(:cache_key) { described_class.cache_key_for_project(project) }
let(:cache_key) { pipeline_status.cache_key }
describe '.load_for_project' do
it "loads the status" do
......@@ -14,94 +14,24 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
end
describe 'loading in batches' do
let(:status) { 'success' }
let(:sha) { '424d1b73bc0d3cb726eb7dc4ce17a4d48552f8c6' }
let(:ref) { 'master' }
let(:pipeline_info) { { sha: sha, status: status, ref: ref } }
let!(:project_without_status) { create(:project, :repository) }
describe '.load_in_batch_for_projects' do
it 'preloads pipeline_status on projects' do
it 'loads pipeline_status on projects' do
described_class.load_in_batch_for_projects([project])
# Don't call the accessor that would lazy load the variable
expect(project.instance_variable_get('@pipeline_status')).to be_a(described_class)
end
describe 'without a status in redis_cache' do
it 'loads the status from a commit when it was not in redis_cache' do
empty_status = { sha: nil, status: nil, ref: nil }
fake_pipeline = described_class.new(
project_without_status,
pipeline_info: empty_status,
loaded_from_cache: false
)
expect(described_class).to receive(:new)
.with(project_without_status,
pipeline_info: empty_status,
loaded_from_cache: false)
.and_return(fake_pipeline)
expect(fake_pipeline).to receive(:load_from_project)
expect(fake_pipeline).to receive(:store_in_cache)
described_class.load_in_batch_for_projects([project_without_status])
end
it 'only connects to redis twice' do
expect(Gitlab::Redis::Cache).to receive(:with).exactly(2).and_call_original
described_class.load_in_batch_for_projects([project_without_status])
expect(project_without_status.pipeline_status).not_to be_nil
end
end
describe 'when a status was cached in redis_cache' do
before do
Gitlab::Redis::Cache.with do |redis|
redis.mapped_hmset(cache_key,
{ sha: sha, status: status, ref: ref })
end
end
it 'loads the correct status' do
described_class.load_in_batch_for_projects([project])
pipeline_status = project.instance_variable_get('@pipeline_status')
expect(pipeline_status.sha).to eq(sha)
expect(pipeline_status.status).to eq(status)
expect(pipeline_status.ref).to eq(ref)
end
it 'only connects to redis_cache once' do
expect(Gitlab::Redis::Cache).to receive(:with).exactly(1).and_call_original
project_pipeline_status = project.instance_variable_get('@pipeline_status')
described_class.load_in_batch_for_projects([project])
expect(project.pipeline_status).not_to be_nil
end
it "doesn't load the status separatly" do
expect_any_instance_of(described_class).not_to receive(:load_from_project)
expect_any_instance_of(described_class).not_to receive(:load_from_cache)
described_class.load_in_batch_for_projects([project])
end
expect(project_pipeline_status).to be_a(described_class)
expect(project_pipeline_status).to be_loaded
end
end
describe '.cached_results_for_projects' do
it 'loads a status from caching for all projects' do
Gitlab::Redis::Cache.with do |redis|
redis.mapped_hmset(cache_key, { sha: sha, status: status, ref: ref })
it 'loads 10 projects without hitting Gitaly call limit', :request_store do
projects = Gitlab::GitalyClient.allow_n_plus_1_calls do
(1..10).map { create(:project, :repository) }
end
Gitlab::GitalyClient.reset_counts
result = [{ loaded_from_cache: false, pipeline_info: { sha: nil, status: nil, ref: nil } },
{ loaded_from_cache: true, pipeline_info: pipeline_info }]
expect(described_class.cached_results_for_projects([project_without_status, project])).to eq(result)
expect { described_class.load_in_batch_for_projects(projects) }.not_to raise_error
end
end
end
......@@ -198,7 +128,9 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
status_for_empty_commit.load_status
expect(status_for_empty_commit).to be_loaded
expect(status_for_empty_commit.sha).to be_nil
expect(status_for_empty_commit.status).to be_nil
expect(status_for_empty_commit.ref).to be_nil
end
end
......
......@@ -36,7 +36,7 @@ describe Gitlab::Ci::Variables::Collection::Item do
shared_examples 'raises error for invalid type' do
it do
expect { described_class.new(key: variable_key, value: variable_value) }
.to raise_error ArgumentError, /`#{variable_key}` must be of type String, while it was:/
.to raise_error ArgumentError, /`#{variable_key}` must be of type String or nil value, while it was:/
end
end
......@@ -46,7 +46,7 @@ describe Gitlab::Ci::Variables::Collection::Item do
let(:variable_value) { nil }
let(:expected_value) { nil }
it_behaves_like 'raises error for invalid type'
it_behaves_like 'creates variable'
end
context "when it's an empty string" do
......
......@@ -450,11 +450,17 @@ describe Gitlab::Git::Commit, :seed_helper do
described_class.extract_signature_lazily(repository, commit_id)
end
other_repository = double(:repository)
described_class.extract_signature_lazily(other_repository, commit_ids.first)
expect(described_class).to receive(:batch_signature_extraction)
.with(repository, commit_ids)
.once
.and_return({})
expect(described_class).not_to receive(:batch_signature_extraction)
.with(other_repository, commit_ids.first)
2.times { signatures.each(&:itself) }
end
end
......
......@@ -38,6 +38,9 @@ describe Gitlab::Git::Tag, :seed_helper do
end
it 'gets messages in one batch', :request_store do
other_repository = double(:repository)
described_class.get_message(other_repository, tag_ids.first)
expect { subject.map(&:itself) }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
end
end
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Prometheus::QueryVariables do
describe '.call' do
set(:environment) { create(:environment) }
let(:slug) { environment.slug }
subject { described_class.call(environment) }
it { is_expected.to include(ci_environment_slug: slug) }
it do
is_expected.to include(environment_filter:
%{container_name!="POD",environment="#{slug}"})
end
context 'without deployment platform' do
it { is_expected.to include(kube_namespace: '') }
end
context 'with deplyoment platform' do
let(:kube_namespace) { environment.deployment_platform.actual_namespace }
before do
create(:cluster, :provided_by_user, projects: [environment.project])
end
it { is_expected.to include(kube_namespace: kube_namespace) }
end
end
end
......@@ -18,14 +18,23 @@ describe Blob do
describe '.lazy' do
let(:project) { create(:project, :repository) }
let(:commit) { project.commit_by(oid: 'e63f41fe459e62e1228fcef60d7189127aeba95a') }
let(:other_project) { create(:project, :repository) }
let(:commit_id) { 'e63f41fe459e62e1228fcef60d7189127aeba95a' }
it 'fetches all blobs when the first is accessed' do
changelog = described_class.lazy(project, commit.id, 'CHANGELOG')
contributing = described_class.lazy(project, commit.id, 'CONTRIBUTING.md')
it 'does not fetch blobs when none are accessed' do
expect(project.repository).not_to receive(:blobs_at)
expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original
expect(Gitlab::Git::Blob).not_to receive(:find)
described_class.lazy(project, commit_id, 'CHANGELOG')
end
it 'fetches all blobs for the same repository when one is accessed' do
expect(project.repository).to receive(:blobs_at).with([[commit_id, 'CHANGELOG'], [commit_id, 'CONTRIBUTING.md']]).once.and_call_original
expect(other_project.repository).not_to receive(:blobs_at)
changelog = described_class.lazy(project, commit_id, 'CHANGELOG')
contributing = described_class.lazy(project, commit_id, 'CONTRIBUTING.md')
described_class.lazy(other_project, commit_id, 'CHANGELOG')
# Access property so the values are loaded
changelog.id
......
......@@ -6,7 +6,10 @@ describe 'clearing redis cache' do
end
describe 'clearing pipeline status cache' do
let(:pipeline_status) { create(:ci_pipeline).project.pipeline_status }
let(:pipeline_status) do
project = create(:project, :repository)
create(:ci_pipeline, project: project).project.pipeline_status
end
before do
allow(pipeline_status).to receive(:loaded).and_return(nil)
......
......@@ -713,6 +713,13 @@
source-map "^0.5.6"
vue-template-es2015-compiler "^1.6.0"
"@vue/test-utils@^1.0.0-beta.25":
version "1.0.0-beta.25"
resolved "https://registry.yarnpkg.com/@vue/test-utils/-/test-utils-1.0.0-beta.25.tgz#4703076de3076bac42cdd242cd53e6fb8752ed8c"
integrity sha512-mfvguEmEpAn0BuT4u+qm+0J1NTKgQS+ffUyWHY1QeSovIkJcy98fj1rO+PJgiZSEvGjjnDNX+qmofYFPLrofbA==
dependencies:
lodash "^4.17.4"
"@webassemblyjs/ast@1.7.6":
version "1.7.6"
resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.7.6.tgz#3ef8c45b3e5e943a153a05281317474fef63e21e"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment