Commit 606584c1 authored by James Lopez's avatar James Lopez

bulk insert FTW - This would introduce more complexity, but should be faster

parent 9ec39568
......@@ -24,20 +24,23 @@ module Projects
loop do
batch = project_ids_batch
bulk_create_from_template(batch)
bulk_create_from_template(batch) unless batch.empty?
break if batch.size < BATCH_SIZE
end
end
def bulk_create_from_template(batch)
service_hash_list = batch.map do |project_id|
service_hash.merge('project_id' => project_id)
service_list = batch.map do |project_id|
service_hash.merge('project_id' => project_id).values
end
Project.transaction do
Service.create!(service_hash_list)
end
# Project.transaction do
# Service.create!(service_hash_list)
# end
Gitlab::SQL::BulkInsert.new(service_hash.keys + ['project_id'],
service_list,
'services').execute
end
def project_ids_batch
......@@ -57,7 +60,17 @@ module Projects
end
def service_hash
@service_hash ||= @template.as_json(methods: :type).except('id', 'template')
@service_hash ||=
begin
template_hash = @template.as_json(methods: :type).except('id', 'template', 'project_id')
template_hash.each_with_object({}) do |(key, value), service_hash|
value = value.is_a?(Hash) ? value.to_json : value
key = Gitlab::Database.postgresql? ? "\"#{key}\"" : "`#{key}`"
service_hash[key] = ActiveRecord::Base.sanitize(value)
end
end
end
end
end
module Gitlab
module SQL
# Class for building SQL bulk inserts
class BulkInsert
def initialize(columns, values_array, table)
@columns = columns
@values_array = values_array
@table = table
end
def execute
ActiveRecord::Base.connection.execute(
<<-SQL.strip_heredoc
INSERT INTO #{@table} (#{@columns.join(', ')})
VALUES #{@values_array.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
SQL
)
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment