Commit e80a893f authored by Sean McGivern's avatar Sean McGivern

Merge branch 'split-events-into-push-events' into 'master'

Use a separate table for storing push events

See merge request !12463
parents 9c11894a aac1de46
...@@ -52,8 +52,10 @@ class Dashboard::ProjectsController < Dashboard::ApplicationController ...@@ -52,8 +52,10 @@ class Dashboard::ProjectsController < Dashboard::ApplicationController
end end
def load_events def load_events
@events = Event.in_projects(load_projects(params.merge(non_public: true))) projects = load_projects(params.merge(non_public: true))
@events = event_filter.apply_filter(@events).with_associations
@events = @events.limit(20).offset(params[:offset] || 0) @events = EventCollection
.new(projects, offset: params[:offset].to_i, filter: event_filter)
.to_a
end end
end end
...@@ -29,9 +29,9 @@ class DashboardController < Dashboard::ApplicationController ...@@ -29,9 +29,9 @@ class DashboardController < Dashboard::ApplicationController
current_user.authorized_projects current_user.authorized_projects
end end
@events = Event.in_projects(projects) @events = EventCollection
@events = @event_filter.apply_filter(@events).with_associations .new(projects, offset: params[:offset].to_i, filter: @event_filter)
@events = @events.limit(20).offset(params[:offset] || 0) .to_a
end end
def set_show_full_reference def set_show_full_reference
......
...@@ -160,9 +160,9 @@ class GroupsController < Groups::ApplicationController ...@@ -160,9 +160,9 @@ class GroupsController < Groups::ApplicationController
end end
def load_events def load_events
@events = Event.in_projects(@projects) @events = EventCollection
@events = event_filter.apply_filter(@events).with_associations .new(@projects, offset: params[:offset].to_i, filter: event_filter)
@events = @events.limit(20).offset(params[:offset] || 0) .to_a
end end
def user_actions def user_actions
......
...@@ -301,10 +301,11 @@ class ProjectsController < Projects::ApplicationController ...@@ -301,10 +301,11 @@ class ProjectsController < Projects::ApplicationController
end end
def load_events def load_events
@events = @project.events.recent projects = Project.where(id: @project.id)
@events = event_filter.apply_filter(@events).with_associations
limit = (params[:limit] || 20).to_i @events = EventCollection
@events = @events.limit(limit).offset(params[:offset] || 0) .new(projects, offset: params[:offset].to_i, filter: event_filter)
.to_a
end end
def project_params def project_params
......
...@@ -48,6 +48,7 @@ class Event < ActiveRecord::Base ...@@ -48,6 +48,7 @@ class Event < ActiveRecord::Base
belongs_to :author, class_name: "User" belongs_to :author, class_name: "User"
belongs_to :project belongs_to :project
belongs_to :target, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations belongs_to :target, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
has_one :push_event_payload, foreign_key: :event_id
# For Hash only # For Hash only
serialize :data # rubocop:disable Cop/ActiveRecordSerialize serialize :data # rubocop:disable Cop/ActiveRecordSerialize
...@@ -55,19 +56,51 @@ class Event < ActiveRecord::Base ...@@ -55,19 +56,51 @@ class Event < ActiveRecord::Base
# Callbacks # Callbacks
after_create :reset_project_activity after_create :reset_project_activity
after_create :set_last_repository_updated_at, if: :push? after_create :set_last_repository_updated_at, if: :push?
after_create :replicate_event_for_push_events_migration
# Scopes # Scopes
scope :recent, -> { reorder(id: :desc) } scope :recent, -> { reorder(id: :desc) }
scope :code_push, -> { where(action: PUSHED) } scope :code_push, -> { where(action: PUSHED) }
scope :in_projects, ->(projects) do scope :in_projects, -> (projects) do
where(project_id: projects.pluck(:id)).recent sub_query = projects
.except(:order)
.select(1)
.where('projects.id = events.project_id')
where('EXISTS (?)', sub_query).recent
end
scope :with_associations, -> do
# We're using preload for "push_event_payload" as otherwise the association
# is not always available (depending on the query being built).
includes(:author, :project, project: :namespace)
.preload(:target, :push_event_payload)
end end
scope :with_associations, -> { includes(:author, :project, project: :namespace).preload(:target) }
scope :for_milestone_id, ->(milestone_id) { where(target_type: "Milestone", target_id: milestone_id) } scope :for_milestone_id, ->(milestone_id) { where(target_type: "Milestone", target_id: milestone_id) }
self.inheritance_column = 'action'
class << self class << self
def find_sti_class(action)
if action.to_i == PUSHED
PushEvent
else
Event
end
end
def subclass_from_attributes(attrs)
# Without this Rails will keep calling this method on the returned class,
# resulting in an infinite loop.
return unless self == Event
action = attrs.with_indifferent_access[inheritance_column].to_i
PushEvent if action == PUSHED
end
# Update Gitlab::ContributionsCalendar#activity_dates if this changes # Update Gitlab::ContributionsCalendar#activity_dates if this changes
def contributions def contributions
where("action = ? OR (target_type IN (?) AND action IN (?)) OR (target_type = ? AND action = ?)", where("action = ? OR (target_type IN (?) AND action IN (?)) OR (target_type = ? AND action = ?)",
...@@ -290,6 +323,16 @@ class Event < ActiveRecord::Base ...@@ -290,6 +323,16 @@ class Event < ActiveRecord::Base
@commits ||= (data[:commits] || []).reverse @commits ||= (data[:commits] || []).reverse
end end
def commit_title
commit = commits.last
commit[:message] if commit
end
def commit_id
commit_to || commit_from
end
def commits_count def commits_count
data[:total_commits_count] || commits.count || 0 data[:total_commits_count] || commits.count || 0
end end
...@@ -385,6 +428,16 @@ class Event < ActiveRecord::Base ...@@ -385,6 +428,16 @@ class Event < ActiveRecord::Base
user ? author_id == user.id : false user ? author_id == user.id : false
end end
# We're manually replicating data into the new table since database triggers
# are not dumped to db/schema.rb. This could mean that a new installation
# would not have the triggers in place, thus losing events data in GitLab
# 10.0.
def replicate_event_for_push_events_migration
new_attributes = attributes.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
end
private private
def recent_update? def recent_update?
......
# A collection of events to display in an event list.
#
# An EventCollection is meant to be used for displaying events to a user (e.g.
# in a controller), it's not suitable for building queries that are used for
# building other queries.
class EventCollection
# To prevent users from putting too much pressure on the database by cycling
# through thousands of events we put a limit on the number of pages.
MAX_PAGE = 10
# projects - An ActiveRecord::Relation object that returns the projects for
# which to retrieve events.
# filter - An EventFilter instance to use for filtering events.
def initialize(projects, limit: 20, offset: 0, filter: nil)
@projects = projects
@limit = limit
@offset = offset
@filter = filter
end
# Returns an Array containing the events.
def to_a
return [] if current_page > MAX_PAGE
relation = if Gitlab::Database.join_lateral_supported?
relation_with_join_lateral
else
relation_without_join_lateral
end
relation.with_associations.to_a
end
private
# Returns the events relation to use when JOIN LATERAL is not supported.
#
# This relation simply gets all the events for all authorized projects, then
# limits that set.
def relation_without_join_lateral
events = filtered_events.in_projects(projects)
paginate_events(events)
end
# Returns the events relation to use when JOIN LATERAL is supported.
#
# This relation is built using JOIN LATERAL, producing faster queries than a
# regular LIMIT + OFFSET approach.
def relation_with_join_lateral
projects_for_lateral = projects.select(:id).to_sql
lateral = filtered_events
.limit(limit_for_join_lateral)
.where('events.project_id = projects_for_lateral.id')
.to_sql
# The outer query does not need to re-apply the filters since the JOIN
# LATERAL body already takes care of this.
outer = base_relation
.from("(#{projects_for_lateral}) projects_for_lateral")
.joins("JOIN LATERAL (#{lateral}) AS #{Event.table_name} ON true")
paginate_events(outer)
end
def filtered_events
@filter ? @filter.apply_filter(base_relation) : base_relation
end
def paginate_events(events)
events.limit(@limit).offset(@offset)
end
def base_relation
# We want to have absolute control over the event queries being built, thus
# we're explicitly opting out of any default scopes that may be set.
Event.unscoped.recent
end
def limit_for_join_lateral
# Applying the OFFSET on the inside of a JOIN LATERAL leads to incorrect
# results. To work around this we need to increase the inner limit for every
# page.
#
# This means that on page 1 we use LIMIT 20, and an outer OFFSET of 0. On
# page 2 we use LIMIT 40 and an outer OFFSET of 20.
@limit + @offset
end
def current_page
(@offset / @limit) + 1
end
def projects
@projects.except(:order)
end
end
# This model is used to replicate events between the old "events" table and the
# new "events_for_migration" table that will replace "events" in GitLab 10.0.
class EventForMigration < ActiveRecord::Base
self.table_name = 'events_for_migration'
end
class PushEvent < Event
# This validation exists so we can't accidentally use PushEvent with a
# different "action" value.
validate :validate_push_action
# Authors are required as they're used to display who pushed data.
#
# We're just validating the presence of the ID here as foreign key constraints
# should ensure the ID points to a valid user.
validates :author_id, presence: true
# The project is required to build links to commits, commit ranges, etc.
#
# We're just validating the presence of the ID here as foreign key constraints
# should ensure the ID points to a valid project.
validates :project_id, presence: true
# The "data" field must not be set for push events since it's not used and a
# waste of space.
validates :data, absence: true
# These fields are also not used for push events, thus storing them would be a
# waste.
validates :target_id, absence: true
validates :target_type, absence: true
def self.sti_name
PUSHED
end
def push?
true
end
def push_with_commits?
!!(commit_from && commit_to)
end
def tag?
return super unless push_event_payload
push_event_payload.tag?
end
def branch?
return super unless push_event_payload
push_event_payload.branch?
end
def valid_push?
return super unless push_event_payload
push_event_payload.ref.present?
end
def new_ref?
return super unless push_event_payload
push_event_payload.created?
end
def rm_ref?
return super unless push_event_payload
push_event_payload.removed?
end
def commit_from
return super unless push_event_payload
push_event_payload.commit_from
end
def commit_to
return super unless push_event_payload
push_event_payload.commit_to
end
def ref_name
return super unless push_event_payload
push_event_payload.ref
end
def ref_type
return super unless push_event_payload
push_event_payload.ref_type
end
def branch_name
return super unless push_event_payload
ref_name
end
def tag_name
return super unless push_event_payload
ref_name
end
def commit_title
return super unless push_event_payload
push_event_payload.commit_title
end
def commit_id
commit_to || commit_from
end
def commits_count
return super unless push_event_payload
push_event_payload.commit_count
end
def validate_push_action
return if action == PUSHED
errors.add(:action, "the action #{action.inspect} is not valid")
end
end
class PushEventPayload < ActiveRecord::Base
include ShaAttribute
belongs_to :event, inverse_of: :push_event_payload
validates :event_id, :commit_count, :action, :ref_type, presence: true
validates :commit_title, length: { maximum: 70 }
sha_attribute :commit_from
sha_attribute :commit_to
enum action: {
created: 0,
removed: 1,
pushed: 2
}
enum ref_type: {
branch: 0,
tag: 1
}
end
...@@ -71,7 +71,14 @@ class EventCreateService ...@@ -71,7 +71,14 @@ class EventCreateService
end end
def push(project, current_user, push_data) def push(project, current_user, push_data)
create_event(project, current_user, Event::PUSHED, data: push_data) # We're using an explicit transaction here so that any errors that may occur
# when creating push payload data will result in the event creation being
# rolled back as well.
Event.transaction do
event = create_event(project, current_user, Event::PUSHED)
PushEventPayloadService.new(event, push_data).execute
end
Users::ActivityService.new(current_user, 'push').execute Users::ActivityService.new(current_user, 'push').execute
end end
......
# Service class for creating push event payloads as stored in the
# "push_event_payloads" table.
#
# Example:
#
# data = Gitlab::DataBuilder::Push.build(...)
# event = Event.create(...)
#
# PushEventPayloadService.new(event, data).execute
class PushEventPayloadService
# event - The event this push payload belongs to.
# push_data - A Hash produced by `Gitlab::DataBuilder::Push.build` to use for
# building the push payload.
def initialize(event, push_data)
@event = event
@push_data = push_data
end
# Creates and returns a new PushEventPayload row.
#
# This method will raise upon encountering validation errors.
#
# Returns an instance of PushEventPayload.
def execute
@event.build_push_event_payload(
commit_count: commit_count,
action: action,
ref_type: ref_type,
commit_from: commit_from_id,
commit_to: commit_to_id,
ref: trimmed_ref,
commit_title: commit_title,
event_id: @event.id
)
@event.push_event_payload.save!
@event.push_event_payload
end
# Returns the commit title to use.
#
# The commit title is limited to the first line and a maximum of 70
# characters.
def commit_title
commit = @push_data.fetch(:commits).last
return nil unless commit && commit[:message]
raw_msg = commit[:message]
# Find where the first line ends, without turning the entire message into an
# Array of lines (this is a waste of memory for large commit messages).
index = raw_msg.index("\n")
message = index ? raw_msg[0..index] : raw_msg
message.strip.truncate(70)
end
def commit_from_id
if create?
nil
else
revision_before
end
end
def commit_to_id
if remove?
nil
else
revision_after
end
end
def commit_count
@push_data.fetch(:total_commits_count)
end
def ref
@push_data.fetch(:ref)
end
def revision_before
@push_data.fetch(:before)
end
def revision_after
@push_data.fetch(:after)
end
def trimmed_ref
Gitlab::Git.ref_name(ref)
end
def create?
Gitlab::Git.blank_ref?(revision_before)
end
def remove?
Gitlab::Git.blank_ref?(revision_after)
end
def action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if Gitlab::Git.tag_ref?(ref)
:tag
else
:branch
end
end
end
%li.commit %li.commit
.commit-row-title .commit-row-title
= link_to truncate_sha(commit[:id]), project_commit_path(project, commit[:id]), class: "commit-sha", alt: '', title: truncate_sha(commit[:id]) = link_to truncate_sha(event.commit_id), project_commit_path(project, event.commit_id), class: "commit-sha", alt: '', title: truncate_sha(event.commit_id)
&middot; &middot;
= markdown event_commit_title(commit[:message]), project: project, pipeline: :single_line, author: event.author = markdown event_commit_title(event.commit_title), project: project, pipeline: :single_line, author: event.author
%div{ xmlns: "http://www.w3.org/1999/xhtml" } %div{ xmlns: "http://www.w3.org/1999/xhtml" }
- event.commits.first(15).each do |commit| %p
%p %strong= event.author_name
%strong= commit[:author][:name] = link_to "(#{truncate_sha(event.commit_id)})", project_commit_path(event.project, event.commit_id)
= link_to "(##{truncate_sha(commit[:id])})", project_commit_path(event.project, id: commit[:id]) %i
%i at
at = event.created_at.to_s(:short)
= commit[:timestamp].to_time.to_s(:short) %blockquote= markdown(escape_once(event.commit_title), pipeline: :atom, project: event.project, author: event.author)
%blockquote= markdown(escape_once(commit[:message]), pipeline: :atom, project: event.project, author: event.author) - if event.commits_count > 1
- if event.commits_count > 15
%p %p
%i %i
\... and \... and
= pluralize(event.commits_count - 15, "more commit") = pluralize(event.commits_count - 1, "more commit")
...@@ -14,9 +14,7 @@ ...@@ -14,9 +14,7 @@
- if event.push_with_commits? - if event.push_with_commits?
.event-body .event-body
%ul.well-list.event_commits %ul.well-list.event_commits
- few_commits = event.commits[0...2] = render "events/commit", project: project, event: event
- few_commits.each do |commit|
= render "events/commit", commit: commit, project: project, event: event
- create_mr = event.new_ref? && create_mr_button?(project.default_branch, event.ref_name, project) && event.authored_by?(current_user) - create_mr = event.new_ref? && create_mr_button?(project.default_branch, event.ref_name, project) && event.authored_by?(current_user)
- if event.commits_count > 1 - if event.commits_count > 1
...@@ -44,9 +42,6 @@ ...@@ -44,9 +42,6 @@
= link_to create_mr_path(project.default_branch, event.ref_name, project) do = link_to create_mr_path(project.default_branch, event.ref_name, project) do
Create Merge Request Create Merge Request
- elsif event.rm_ref? - elsif event.rm_ref?
- repository = project.repository .event-body
- last_commit = repository.commit(event.commit_from) %ul.well-list.event_commits
- if last_commit = render "events/commit", project: project, event: event
.event-body
%ul.well-list.event_commits
= render "events/commit", commit: last_commit, project: project, event: event
---
title: Migrate events into a new format to reduce the storage necessary and improve performance
merge_request:
author:
---
title: Use a specialized class for querying events to improve performance
merge_request:
author:
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class PrepareEventsTableForPushEventsMigration < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
# The order of these columns is deliberate and results in the following
# columns and sizes:
#
# * id (4 bytes)
# * project_id (4 bytes)
# * author_id (4 bytes)
# * target_id (4 bytes)
# * created_at (8 bytes)
# * updated_at (8 bytes)
# * action (2 bytes)
# * target_type (variable)
#
# Unfortunately we can't make the "id" column a bigint/bigserial as Rails 4
# does not support this properly.
create_table :events_for_migration do |t|
t.references :project,
index: true,
foreign_key: { on_delete: :cascade }
t.integer :author_id, index: true, null: false
t.integer :target_id
t.timestamps_with_timezone null: false
t.integer :action, null: false, limit: 2, index: true
t.string :target_type
t.index %i[target_type target_id]
end
# t.references doesn't like it when the column name doesn't make the table
# name so we have to add the foreign key separately.
add_concurrent_foreign_key(:events_for_migration, :users, column: :author_id)
end
def down
drop_table :events_for_migration
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class CreatePushEventPayloadsTables < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
create_table :push_event_payloads, id: false do |t|
t.bigint :commit_count, null: false
t.integer :event_id, null: false
t.integer :action, null: false, limit: 2
t.integer :ref_type, null: false, limit: 2
t.binary :commit_from
t.binary :commit_to
t.text :ref
t.string :commit_title, limit: 70
t.index :event_id, unique: true
end
# We're adding a foreign key to the _shadow_ table, and this is deliberate.
# By using the shadow table we don't have to recreate/revalidate this
# foreign key after swapping the "events_for_migration" and "events" tables.
#
# The "events_for_migration" table has a foreign key to "projects.id"
# ensuring that project removals also remove events from the shadow table
# (and thus also from this table).
add_concurrent_foreign_key(
:push_event_payloads,
:events_for_migration,
column: :event_id
)
end
def down
drop_table :push_event_payloads
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddIndexOnEventsProjectIdId < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
COLUMNS = %i[project_id id].freeze
TABLES = %i[events events_for_migration].freeze
disable_ddl_transaction!
def up
TABLES.each do |table|
add_concurrent_index(table, COLUMNS) unless index_exists?(table, COLUMNS)
# We remove the index _after_ adding the new one since MySQL doesn't let
# you remove an index when a foreign key exists for the same column.
if index_exists?(table, :project_id)
remove_concurrent_index(table, :project_id)
end
end
end
def down
TABLES.each do |table|
unless index_exists?(table, :project_id)
add_concurrent_index(table, :project_id)
end
unless index_exists?(table, COLUMNS)
remove_concurrent_index(table, COLUMNS)
end
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class ScheduleEventMigrations < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BUFFER_SIZE = 1000
disable_ddl_transaction!
class Event < ActiveRecord::Base
include EachBatch
self.table_name = 'events'
end
def up
jobs = []
Event.each_batch(of: 1000) do |relation|
min, max = relation.pluck('MIN(id), MAX(id)').first
if jobs.length == BUFFER_SIZE
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs)
jobs.clear
end
jobs << ['MigrateEventsToPushEventPayloads', [min, max]]
end
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
end
def down
end
end
...@@ -530,10 +530,25 @@ ActiveRecord::Schema.define(version: 20170809142252) do ...@@ -530,10 +530,25 @@ ActiveRecord::Schema.define(version: 20170809142252) do
add_index "events", ["action"], name: "index_events_on_action", using: :btree add_index "events", ["action"], name: "index_events_on_action", using: :btree
add_index "events", ["author_id"], name: "index_events_on_author_id", using: :btree add_index "events", ["author_id"], name: "index_events_on_author_id", using: :btree
add_index "events", ["created_at"], name: "index_events_on_created_at", using: :btree add_index "events", ["created_at"], name: "index_events_on_created_at", using: :btree
add_index "events", ["project_id"], name: "index_events_on_project_id", using: :btree add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree
add_index "events", ["target_id"], name: "index_events_on_target_id", using: :btree add_index "events", ["target_id"], name: "index_events_on_target_id", using: :btree
add_index "events", ["target_type"], name: "index_events_on_target_type", using: :btree add_index "events", ["target_type"], name: "index_events_on_target_type", using: :btree
create_table "events_for_migration", force: :cascade do |t|
t.integer "project_id"
t.integer "author_id", null: false
t.integer "target_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "action", limit: 2, null: false
t.string "target_type"
end
add_index "events_for_migration", ["action"], name: "index_events_for_migration_on_action", using: :btree
add_index "events_for_migration", ["author_id"], name: "index_events_for_migration_on_author_id", using: :btree
add_index "events_for_migration", ["project_id", "id"], name: "index_events_for_migration_on_project_id_and_id", using: :btree
add_index "events_for_migration", ["target_type", "target_id"], name: "index_events_for_migration_on_target_type_and_target_id", using: :btree
create_table "feature_gates", force: :cascade do |t| create_table "feature_gates", force: :cascade do |t|
t.string "feature_key", null: false t.string "feature_key", null: false
t.string "key", null: false t.string "key", null: false
...@@ -1254,6 +1269,19 @@ ActiveRecord::Schema.define(version: 20170809142252) do ...@@ -1254,6 +1269,19 @@ ActiveRecord::Schema.define(version: 20170809142252) do
add_index "protected_tags", ["project_id"], name: "index_protected_tags_on_project_id", using: :btree add_index "protected_tags", ["project_id"], name: "index_protected_tags_on_project_id", using: :btree
create_table "push_event_payloads", id: false, force: :cascade do |t|
t.integer "commit_count", limit: 8, null: false
t.integer "event_id", null: false
t.integer "action", limit: 2, null: false
t.integer "ref_type", limit: 2, null: false
t.binary "commit_from"
t.binary "commit_to"
t.text "ref"
t.string "commit_title", limit: 70
end
add_index "push_event_payloads", ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree
create_table "redirect_routes", force: :cascade do |t| create_table "redirect_routes", force: :cascade do |t|
t.integer "source_id", null: false t.integer "source_id", null: false
t.string "source_type", null: false t.string "source_type", null: false
...@@ -1654,6 +1682,8 @@ ActiveRecord::Schema.define(version: 20170809142252) do ...@@ -1654,6 +1682,8 @@ ActiveRecord::Schema.define(version: 20170809142252) do
add_foreign_key "deployments", "projects", name: "fk_b9a3851b82", on_delete: :cascade add_foreign_key "deployments", "projects", name: "fk_b9a3851b82", on_delete: :cascade
add_foreign_key "environments", "projects", name: "fk_d1c8c1da6a", on_delete: :cascade add_foreign_key "environments", "projects", name: "fk_d1c8c1da6a", on_delete: :cascade
add_foreign_key "events", "projects", name: "fk_0434b48643", on_delete: :cascade add_foreign_key "events", "projects", name: "fk_0434b48643", on_delete: :cascade
add_foreign_key "events_for_migration", "projects", on_delete: :cascade
add_foreign_key "events_for_migration", "users", column: "author_id", name: "fk_edfd187b6f", on_delete: :cascade
add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade
add_foreign_key "gpg_keys", "users", on_delete: :cascade add_foreign_key "gpg_keys", "users", on_delete: :cascade
add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify
...@@ -1696,6 +1726,7 @@ ActiveRecord::Schema.define(version: 20170809142252) do ...@@ -1696,6 +1726,7 @@ ActiveRecord::Schema.define(version: 20170809142252) do
add_foreign_key "protected_tag_create_access_levels", "protected_tags" add_foreign_key "protected_tag_create_access_levels", "protected_tags"
add_foreign_key "protected_tag_create_access_levels", "users" add_foreign_key "protected_tag_create_access_levels", "users"
add_foreign_key "protected_tags", "projects", name: "fk_8e4af87648", on_delete: :cascade add_foreign_key "protected_tags", "projects", name: "fk_8e4af87648", on_delete: :cascade
add_foreign_key "push_event_payloads", "events_for_migration", column: "event_id", name: "fk_36c74129da", on_delete: :cascade
add_foreign_key "releases", "projects", name: "fk_47fe2a0596", on_delete: :cascade add_foreign_key "releases", "projects", name: "fk_47fe2a0596", on_delete: :cascade
add_foreign_key "services", "projects", name: "fk_71cce407f9", on_delete: :cascade add_foreign_key "services", "projects", name: "fk_71cce407f9", on_delete: :cascade
add_foreign_key "snippets", "projects", name: "fk_be41fd4bb7", on_delete: :cascade add_foreign_key "snippets", "projects", name: "fk_be41fd4bb7", on_delete: :cascade
......
...@@ -79,7 +79,6 @@ Example response: ...@@ -79,7 +79,6 @@ Example response:
"target_id":160, "target_id":160,
"target_type":"Issue", "target_type":"Issue",
"author_id":25, "author_id":25,
"data":null,
"target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.", "target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.",
"created_at":"2017-02-09T10:43:19.667Z", "created_at":"2017-02-09T10:43:19.667Z",
"author":{ "author":{
...@@ -99,7 +98,6 @@ Example response: ...@@ -99,7 +98,6 @@ Example response:
"target_id":159, "target_id":159,
"target_type":"Issue", "target_type":"Issue",
"author_id":21, "author_id":21,
"data":null,
"target_title":"Nostrum enim non et sed optio illo deleniti non.", "target_title":"Nostrum enim non et sed optio illo deleniti non.",
"created_at":"2017-02-09T10:43:19.426Z", "created_at":"2017-02-09T10:43:19.426Z",
"author":{ "author":{
...@@ -151,7 +149,6 @@ Example response: ...@@ -151,7 +149,6 @@ Example response:
"target_id": 830, "target_id": 830,
"target_type": "Issue", "target_type": "Issue",
"author_id": 1, "author_id": 1,
"data": null,
"target_title": "Public project search field", "target_title": "Public project search field",
"author": { "author": {
"name": "Dmitriy Zaporozhets", "name": "Dmitriy Zaporozhets",
...@@ -166,7 +163,7 @@ Example response: ...@@ -166,7 +163,7 @@ Example response:
{ {
"title": null, "title": null,
"project_id": 15, "project_id": 15,
"action_name": "opened", "action_name": "pushed",
"target_id": null, "target_id": null,
"target_type": null, "target_type": null,
"author_id": 1, "author_id": 1,
...@@ -179,31 +176,14 @@ Example response: ...@@ -179,31 +176,14 @@ Example response:
"web_url": "http://localhost:3000/root" "web_url": "http://localhost:3000/root"
}, },
"author_username": "john", "author_username": "john",
"data": { "push_data": {
"before": "50d4420237a9de7be1304607147aec22e4a14af7", "commit_count": 1,
"after": "c5feabde2d8cd023215af4d2ceeb7a64839fc428", "action": "pushed",
"ref": "refs/heads/master", "ref_type": "branch",
"user_id": 1, "commit_from": "50d4420237a9de7be1304607147aec22e4a14af7",
"user_name": "Dmitriy Zaporozhets", "commit_to": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"repository": { "ref": "master",
"name": "gitlabhq", "commit_title": "Add simple search to projects in public area"
"url": "git@dev.gitlab.org:gitlab/gitlabhq.git",
"description": "GitLab: self hosted Git management software. \r\nDistributed under the MIT License.",
"homepage": "https://dev.gitlab.org/gitlab/gitlabhq"
},
"commits": [
{
"id": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"message": "Add simple search to projects in public area",
"timestamp": "2013-05-13T18:18:08+00:00",
"url": "https://dev.gitlab.org/gitlab/gitlabhq/commit/c5feabde2d8cd023215af4d2ceeb7a64839fc428",
"author": {
"name": "Dmitriy Zaporozhets",
"email": "dmitriy.zaporozhets@gmail.com"
}
}
],
"total_commits_count": 1
}, },
"target_title": null "target_title": null
}, },
...@@ -214,7 +194,6 @@ Example response: ...@@ -214,7 +194,6 @@ Example response:
"target_id": 840, "target_id": 840,
"target_type": "Issue", "target_type": "Issue",
"author_id": 1, "author_id": 1,
"data": null,
"target_title": "Finish & merge Code search PR", "target_title": "Finish & merge Code search PR",
"author": { "author": {
"name": "Dmitriy Zaporozhets", "name": "Dmitriy Zaporozhets",
...@@ -233,7 +212,6 @@ Example response: ...@@ -233,7 +212,6 @@ Example response:
"target_id": 1312, "target_id": 1312,
"target_type": "Note", "target_type": "Note",
"author_id": 1, "author_id": 1,
"data": null,
"target_title": null, "target_title": null,
"created_at": "2015-12-04T10:33:58.089Z", "created_at": "2015-12-04T10:33:58.089Z",
"note": { "note": {
...@@ -305,7 +283,6 @@ Example response: ...@@ -305,7 +283,6 @@ Example response:
"target_iid":160, "target_iid":160,
"target_type":"Issue", "target_type":"Issue",
"author_id":25, "author_id":25,
"data":null,
"target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.", "target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.",
"created_at":"2017-02-09T10:43:19.667Z", "created_at":"2017-02-09T10:43:19.667Z",
"author":{ "author":{
...@@ -326,7 +303,6 @@ Example response: ...@@ -326,7 +303,6 @@ Example response:
"target_iid":159, "target_iid":159,
"target_type":"Issue", "target_type":"Issue",
"author_id":21, "author_id":21,
"data":null,
"target_title":"Nostrum enim non et sed optio illo deleniti non.", "target_title":"Nostrum enim non et sed optio illo deleniti non.",
"created_at":"2017-02-09T10:43:19.426Z", "created_at":"2017-02-09T10:43:19.426Z",
"author":{ "author":{
......
...@@ -71,28 +71,14 @@ module SharedProject ...@@ -71,28 +71,14 @@ module SharedProject
step 'project "Shop" has push event' do step 'project "Shop" has push event' do
@project = Project.find_by(name: "Shop") @project = Project.find_by(name: "Shop")
@event = create(:push_event, project: @project, author: @user)
data = {
before: Gitlab::Git::BLANK_SHA, create(:push_event_payload,
after: "6d394385cf567f80a8fd85055db1ab4c5295806f", event: @event,
ref: "refs/heads/fix", action: :created,
user_id: @user.id, commit_to: '6d394385cf567f80a8fd85055db1ab4c5295806f',
user_name: @user.name, ref: 'fix',
repository: { commit_count: 1)
name: @project.name,
url: "localhost/rubinius",
description: "",
homepage: "localhost/rubinius",
private: true
}
}
@event = Event.create(
project: @project,
action: Event::PUSHED,
data: data,
author_id: @user.id
)
end end
step 'I should see project "Shop" activity feed' do step 'I should see project "Shop" activity feed' do
......
...@@ -497,14 +497,24 @@ module API ...@@ -497,14 +497,24 @@ module API
expose :author, using: Entities::UserBasic expose :author, using: Entities::UserBasic
end end
class PushEventPayload < Grape::Entity
expose :commit_count, :action, :ref_type, :commit_from, :commit_to
expose :ref, :commit_title
end
class Event < Grape::Entity class Event < Grape::Entity
expose :title, :project_id, :action_name expose :title, :project_id, :action_name
expose :target_id, :target_iid, :target_type, :author_id expose :target_id, :target_iid, :target_type, :author_id
expose :data, :target_title expose :target_title
expose :created_at expose :created_at
expose :note, using: Entities::Note, if: ->(event, options) { event.note? } expose :note, using: Entities::Note, if: ->(event, options) { event.note? }
expose :author, using: Entities::UserBasic, if: ->(event, options) { event.author } expose :author, using: Entities::UserBasic, if: ->(event, options) { event.author }
expose :push_event_payload,
as: :push_data,
using: PushEventPayload,
if: -> (event, _) { event.push? }
expose :author_username do |event, options| expose :author_username do |event, options|
event.author&.username event.author&.username
end end
......
...@@ -25,14 +25,24 @@ module API ...@@ -25,14 +25,24 @@ module API
expose(:downvote?) { |note| false } expose(:downvote?) { |note| false }
end end
class PushEventPayload < Grape::Entity
expose :commit_count, :action, :ref_type, :commit_from, :commit_to
expose :ref, :commit_title
end
class Event < Grape::Entity class Event < Grape::Entity
expose :title, :project_id, :action_name expose :title, :project_id, :action_name
expose :target_id, :target_type, :author_id expose :target_id, :target_type, :author_id
expose :data, :target_title expose :target_title
expose :created_at expose :created_at
expose :note, using: Entities::Note, if: ->(event, options) { event.note? } expose :note, using: Entities::Note, if: ->(event, options) { event.note? }
expose :author, using: ::API::Entities::UserBasic, if: ->(event, options) { event.author } expose :author, using: ::API::Entities::UserBasic, if: ->(event, options) { event.author }
expose :push_event_payload,
as: :push_data,
using: PushEventPayload,
if: -> (event, _) { event.push? }
expose :author_username do |event, options| expose :author_username do |event, options|
event.author&.username event.author&.username
end end
......
module Gitlab
module BackgroundMigration
# Class that migrates events for the new push event payloads setup. All
# events are copied to a shadow table, and push events will also have a row
# created in the push_event_payloads table.
class MigrateEventsToPushEventPayloads
class Event < ActiveRecord::Base
self.table_name = 'events'
serialize :data
BLANK_REF = ('0' * 40).freeze
TAG_REF_PREFIX = 'refs/tags/'.freeze
MAX_INDEX = 69
PUSHED = 5
def push_event?
action == PUSHED && data.present?
end
def commit_title
commit = commits.last
return nil unless commit && commit[:message]
index = commit[:message].index("\n")
message = index ? commit[:message][0..index] : commit[:message]
message.strip.truncate(70)
end
def commit_from_sha
if create?
nil
else
data[:before]
end
end
def commit_to_sha
if remove?
nil
else
data[:after]
end
end
def data
super || {}
end
def commits
data[:commits] || []
end
def commit_count
data[:total_commits_count] || 0
end
def ref
data[:ref]
end
def trimmed_ref_name
if ref_type == :tag
ref[10..-1]
else
ref[11..-1]
end
end
def create?
data[:before] == BLANK_REF
end
def remove?
data[:after] == BLANK_REF
end
def push_action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if ref.start_with?(TAG_REF_PREFIX)
:tag
else
:branch
end
end
end
class EventForMigration < ActiveRecord::Base
self.table_name = 'events_for_migration'
end
class PushEventPayload < ActiveRecord::Base
self.table_name = 'push_event_payloads'
enum action: {
created: 0,
removed: 1,
pushed: 2
}
enum ref_type: {
branch: 0,
tag: 1
}
end
# start_id - The start ID of the range of events to process
# end_id - The end ID of the range to process.
def perform(start_id, end_id)
return unless migrate?
find_events(start_id, end_id).each { |event| process_event(event) }
end
def process_event(event)
replicate_event(event)
create_push_event_payload(event) if event.push_event?
end
def replicate_event(event)
new_attributes = event.attributes
.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
rescue ActiveRecord::InvalidForeignKey
# A foreign key error means the associated event was removed. In this
# case we'll just skip migrating the event.
end
def create_push_event_payload(event)
commit_from = pack(event.commit_from_sha)
commit_to = pack(event.commit_to_sha)
PushEventPayload.create!(
event_id: event.id,
commit_count: event.commit_count,
ref_type: event.ref_type,
action: event.push_action,
commit_from: commit_from,
commit_to: commit_to,
ref: event.trimmed_ref_name,
commit_title: event.commit_title
)
rescue ActiveRecord::InvalidForeignKey
# A foreign key error means the associated event was removed. In this
# case we'll just skip migrating the event.
end
def find_events(start_id, end_id)
Event
.where('NOT EXISTS (SELECT true FROM events_for_migration WHERE events_for_migration.id = events.id)')
.where(id: start_id..end_id)
end
def migrate?
Event.table_exists? && PushEventPayload.table_exists? &&
EventForMigration.table_exists?
end
def pack(value)
value ? [value].pack('H*') : nil
end
end
end
end
...@@ -25,6 +25,10 @@ module Gitlab ...@@ -25,6 +25,10 @@ module Gitlab
database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1] database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
end end
def self.join_lateral_supported?
postgresql? && version.to_f >= 9.3
end
def self.nulls_last_order(field, direction = 'ASC') def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}" order = "#{field} #{direction}"
......
...@@ -3,18 +3,22 @@ project_tree: ...@@ -3,18 +3,22 @@ project_tree:
- labels: - labels:
:priorities :priorities
- milestones: - milestones:
- :events - events:
- :push_event_payload
- issues: - issues:
- :events - events:
- :push_event_payload
- :timelogs - :timelogs
- notes: - notes:
- :author - :author
- :events - events:
- :push_event_payload
- label_links: - label_links:
- label: - label:
:priorities :priorities
- milestone: - milestone:
- :events - events:
- :push_event_payload
- snippets: - snippets:
- :award_emoji - :award_emoji
- notes: - notes:
...@@ -25,21 +29,25 @@ project_tree: ...@@ -25,21 +29,25 @@ project_tree:
- merge_requests: - merge_requests:
- notes: - notes:
- :author - :author
- :events - events:
- :push_event_payload
- merge_request_diff: - merge_request_diff:
- :merge_request_diff_commits - :merge_request_diff_commits
- :merge_request_diff_files - :merge_request_diff_files
- :events - events:
- :push_event_payload
- :timelogs - :timelogs
- label_links: - label_links:
- label: - label:
:priorities :priorities
- milestone: - milestone:
- :events - events:
- :push_event_payload
- pipelines: - pipelines:
- notes: - notes:
- :author - :author
- :events - events:
- :push_event_payload
- :stages - :stages
- :statuses - :statuses
- :triggers - :triggers
...@@ -107,6 +115,8 @@ excluded_attributes: ...@@ -107,6 +115,8 @@ excluded_attributes:
statuses: statuses:
- :trace - :trace
- :token - :token
push_event_payload:
- :event_id
methods: methods:
labels: labels:
......
...@@ -92,8 +92,14 @@ describe UsersController do ...@@ -92,8 +92,14 @@ describe UsersController do
before do before do
sign_in(user) sign_in(user)
project.team << [user, :developer] project.team << [user, :developer]
EventCreateService.new.push(project, user, [])
EventCreateService.new.push(forked_project, user, []) push_data = Gitlab::DataBuilder::Push.build_sample(project, user)
fork_push_data = Gitlab::DataBuilder::Push
.build_sample(forked_project, user)
EventCreateService.new.push(project, user, push_data)
EventCreateService.new.push(forked_project, user, fork_push_data)
end end
it 'includes forked projects' do it 'includes forked projects' do
......
...@@ -2,6 +2,7 @@ FactoryGirl.define do ...@@ -2,6 +2,7 @@ FactoryGirl.define do
factory :event do factory :event do
project project
author factory: :user author factory: :user
action Event::JOINED
trait(:created) { action Event::CREATED } trait(:created) { action Event::CREATED }
trait(:updated) { action Event::UPDATED } trait(:updated) { action Event::UPDATED }
...@@ -20,4 +21,19 @@ FactoryGirl.define do ...@@ -20,4 +21,19 @@ FactoryGirl.define do
target factory: :closed_issue target factory: :closed_issue
end end
end end
factory :push_event, class: PushEvent do
project factory: :project_empty_repo
author factory: :user
action Event::PUSHED
end
factory :push_event_payload do
event
commit_count 1
action :pushed
ref_type :branch
ref 'master'
commit_to '3cdce97ed87c91368561584e7358f4d46e3e173c'
end
end end
...@@ -42,14 +42,14 @@ feature 'Contributions Calendar', :js do ...@@ -42,14 +42,14 @@ feature 'Contributions Calendar', :js do
end end
def push_code_contribution def push_code_contribution
push_params = { event = create(:push_event, project: contributed_project, author: user)
project: contributed_project,
action: Event::PUSHED, create(:push_event_payload,
author_id: user.id, event: event,
data: { commit_count: 3 } commit_from: '11f9ac0a48b62cef25eedede4c1819964f08d5ce',
} commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
commit_count: 3,
Event.create(push_params) ref: 'master')
end end
def note_comment_contribution def note_comment_contribution
......
...@@ -23,27 +23,19 @@ feature 'Dashboard > Activity' do ...@@ -23,27 +23,19 @@ feature 'Dashboard > Activity' do
create(:merge_request, author: user, source_project: project, target_project: project) create(:merge_request, author: user, source_project: project, target_project: project)
end end
let(:push_event_data) do
{
before: Gitlab::Git::BLANK_SHA,
after: '0220c11b9a3e6c69dc8fd35321254ca9a7b98f7e',
ref: 'refs/heads/new_design',
user_id: user.id,
user_name: user.name,
repository: {
name: project.name,
url: 'localhost/rubinius',
description: '',
homepage: 'localhost/rubinius',
private: true
}
}
end
let(:note) { create(:note, project: project, noteable: merge_request) } let(:note) { create(:note, project: project, noteable: merge_request) }
let!(:push_event) do let!(:push_event) do
create(:event, :pushed, data: push_event_data, project: project, author: user) event = create(:push_event, project: project, author: user)
create(:push_event_payload,
event: event,
action: :created,
commit_to: '0220c11b9a3e6c69dc8fd35321254ca9a7b98f7e',
ref: 'new_design',
commit_count: 1)
event
end end
let!(:merged_event) do let!(:merged_event) do
......
...@@ -14,8 +14,8 @@ describe ContributedProjectsFinder do ...@@ -14,8 +14,8 @@ describe ContributedProjectsFinder do
private_project.add_developer(current_user) private_project.add_developer(current_user)
public_project.add_master(source_user) public_project.add_master(source_user)
create(:event, :pushed, project: public_project, target: public_project, author: source_user) create(:push_event, project: public_project, author: source_user)
create(:event, :pushed, project: private_project, target: private_project, author: source_user) create(:push_event, project: private_project, author: source_user)
end end
describe 'without a current user' do describe 'without a current user' do
......
...@@ -5,7 +5,7 @@ describe EventFilter do ...@@ -5,7 +5,7 @@ describe EventFilter do
let(:source_user) { create(:user) } let(:source_user) { create(:user) }
let!(:public_project) { create(:project, :public) } let!(:public_project) { create(:project, :public) }
let!(:push_event) { create(:event, :pushed, project: public_project, target: public_project, author: source_user) } let!(:push_event) { create(:push_event, project: public_project, author: source_user) }
let!(:merged_event) { create(:event, :merged, project: public_project, target: public_project, author: source_user) } let!(:merged_event) { create(:event, :merged, project: public_project, target: public_project, author: source_user) }
let!(:created_event) { create(:event, :created, project: public_project, target: public_project, author: source_user) } let!(:created_event) { create(:event, :created, project: public_project, target: public_project, author: source_user) }
let!(:updated_event) { create(:event, :updated, project: public_project, target: public_project, author: source_user) } let!(:updated_event) { create(:event, :updated, project: public_project, target: public_project, author: source_user) }
......
require 'spec_helper'
describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event do
describe '#commit_title' do
it 'returns nil when there are no commits' do
expect(described_class.new.commit_title).to be_nil
end
it 'returns nil when there are commits without commit messages' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ id: '123' }])
expect(event.commit_title).to be_nil
end
it 'returns the commit message when it is less than 70 characters long' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ message: 'Hello world' }])
expect(event.commit_title).to eq('Hello world')
end
it 'returns the first line of a commit message if multiple lines are present' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ message: "Hello\n\nworld" }])
expect(event.commit_title).to eq('Hello')
end
it 'truncates the commit to 70 characters when it is too long' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ message: 'a' * 100 }])
expect(event.commit_title).to eq(('a' * 67) + '...')
end
end
describe '#commit_from_sha' do
it 'returns nil when pushing to a new ref' do
event = described_class.new
allow(event).to receive(:create?).and_return(true)
expect(event.commit_from_sha).to be_nil
end
it 'returns the ID of the first commit when pushing to an existing ref' do
event = described_class.new
allow(event).to receive(:create?).and_return(false)
allow(event).to receive(:data).and_return(before: '123')
expect(event.commit_from_sha).to eq('123')
end
end
describe '#commit_to_sha' do
it 'returns nil when removing an existing ref' do
event = described_class.new
allow(event).to receive(:remove?).and_return(true)
expect(event.commit_to_sha).to be_nil
end
it 'returns the ID of the last commit when pushing to an existing ref' do
event = described_class.new
allow(event).to receive(:remove?).and_return(false)
allow(event).to receive(:data).and_return(after: '123')
expect(event.commit_to_sha).to eq('123')
end
end
describe '#data' do
it 'returns the deserialized data' do
event = described_class.new(data: { before: '123' })
expect(event.data).to eq(before: '123')
end
it 'returns an empty hash when no data is present' do
event = described_class.new
expect(event.data).to eq({})
end
end
describe '#commits' do
it 'returns an Array of commits' do
event = described_class.new(data: { commits: [{ id: '123' }] })
expect(event.commits).to eq([{ id: '123' }])
end
it 'returns an empty array when no data is present' do
event = described_class.new
expect(event.commits).to eq([])
end
end
describe '#commit_count' do
it 'returns the number of commits' do
event = described_class.new(data: { total_commits_count: 2 })
expect(event.commit_count).to eq(2)
end
it 'returns 0 when no data is present' do
event = described_class.new
expect(event.commit_count).to eq(0)
end
end
describe '#ref' do
it 'returns the name of the ref' do
event = described_class.new(data: { ref: 'refs/heads/master' })
expect(event.ref).to eq('refs/heads/master')
end
end
describe '#trimmed_ref_name' do
it 'returns the trimmed ref name for a branch' do
event = described_class.new(data: { ref: 'refs/heads/master' })
expect(event.trimmed_ref_name).to eq('master')
end
it 'returns the trimmed ref name for a tag' do
event = described_class.new(data: { ref: 'refs/tags/v1.2' })
expect(event.trimmed_ref_name).to eq('v1.2')
end
end
describe '#create?' do
it 'returns true when creating a new ref' do
event = described_class.new(data: { before: described_class::BLANK_REF })
expect(event.create?).to eq(true)
end
it 'returns false when pushing to an existing ref' do
event = described_class.new(data: { before: '123' })
expect(event.create?).to eq(false)
end
end
describe '#remove?' do
it 'returns true when removing an existing ref' do
event = described_class.new(data: { after: described_class::BLANK_REF })
expect(event.remove?).to eq(true)
end
it 'returns false when pushing to an existing ref' do
event = described_class.new(data: { after: '123' })
expect(event.remove?).to eq(false)
end
end
describe '#push_action' do
let(:event) { described_class.new }
it 'returns :created when creating a new ref' do
allow(event).to receive(:create?).and_return(true)
expect(event.push_action).to eq(:created)
end
it 'returns :removed when removing an existing ref' do
allow(event).to receive(:create?).and_return(false)
allow(event).to receive(:remove?).and_return(true)
expect(event.push_action).to eq(:removed)
end
it 'returns :pushed when pushing to an existing ref' do
allow(event).to receive(:create?).and_return(false)
allow(event).to receive(:remove?).and_return(false)
expect(event.push_action).to eq(:pushed)
end
end
describe '#ref_type' do
let(:event) { described_class.new }
it 'returns :tag for a tag' do
allow(event).to receive(:ref).and_return('refs/tags/1.2')
expect(event.ref_type).to eq(:tag)
end
it 'returns :branch for a branch' do
allow(event).to receive(:ref).and_return('refs/heads/1.2')
expect(event.ref_type).to eq(:branch)
end
end
end
describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads do
let(:migration) { described_class.new }
let(:project) { create(:project_empty_repo) }
let(:author) { create(:user) }
# We can not rely on FactoryGirl as the state of Event may change in ways that
# the background migration does not expect, hence we use the Event class of
# the migration itself.
def create_push_event(project, author, data = nil)
klass = Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event
klass.create!(
action: klass::PUSHED,
project_id: project.id,
author_id: author.id,
data: data
)
end
# The background migration relies on a temporary table, hence we're migrating
# to a specific version of the database where said table is still present.
before :all do
ActiveRecord::Migration.verbose = false
ActiveRecord::Migrator
.migrate(ActiveRecord::Migrator.migrations_paths, 20170608152748)
end
after :all do
ActiveRecord::Migrator.migrate(ActiveRecord::Migrator.migrations_paths)
ActiveRecord::Migration.verbose = true
end
describe '#perform' do
it 'returns if data should not be migrated' do
allow(migration).to receive(:migrate?).and_return(false)
expect(migration).not_to receive(:find_events)
migration.perform(1, 10)
end
it 'migrates the range of events if data is to be migrated' do
event1 = create_push_event(project, author, { commits: [] })
event2 = create_push_event(project, author, { commits: [] })
allow(migration).to receive(:migrate?).and_return(true)
expect(migration).to receive(:process_event).twice
migration.perform(event1.id, event2.id)
end
end
describe '#process_event' do
it 'processes a regular event' do
event = double(:event, push_event?: false)
expect(migration).to receive(:replicate_event)
expect(migration).not_to receive(:create_push_event_payload)
migration.process_event(event)
end
it 'processes a push event' do
event = double(:event, push_event?: true)
expect(migration).to receive(:replicate_event)
expect(migration).to receive(:create_push_event_payload)
migration.process_event(event)
end
end
describe '#replicate_event' do
it 'replicates the event to the "events_for_migration" table' do
event = create_push_event(
project,
author,
data: { commits: [] },
title: 'bla'
)
attributes = event
.attributes.with_indifferent_access.except(:title, :data)
expect(described_class::EventForMigration)
.to receive(:create!)
.with(attributes)
migration.replicate_event(event)
end
end
describe '#create_push_event_payload' do
let(:push_data) do
{
commits: [],
ref: 'refs/heads/master',
before: '156e0e9adc587a383a7eeb5b21ddecb9044768a8',
after: '0' * 40,
total_commits_count: 1
}
end
let(:event) do
create_push_event(project, author, push_data)
end
before do
# The foreign key in push_event_payloads at this point points to the
# "events_for_migration" table so we need to make sure a row exists in
# said table.
migration.replicate_event(event)
end
it 'creates a push event payload for an event' do
payload = migration.create_push_event_payload(event)
expect(PushEventPayload.count).to eq(1)
expect(payload.valid?).to eq(true)
end
it 'does not create push event payloads for removed events' do
allow(event).to receive(:id).and_return(-1)
payload = migration.create_push_event_payload(event)
expect(payload).to be_nil
expect(PushEventPayload.count).to eq(0)
end
it 'encodes and decodes the commit IDs from and to binary data' do
payload = migration.create_push_event_payload(event)
packed = migration.pack(push_data[:before])
expect(payload.commit_from).to eq(packed)
expect(payload.commit_to).to be_nil
end
end
describe '#find_events' do
it 'returns the events for the given ID range' do
event1 = create_push_event(project, author, { commits: [] })
event2 = create_push_event(project, author, { commits: [] })
event3 = create_push_event(project, author, { commits: [] })
events = migration.find_events(event1.id, event2.id)
expect(events.length).to eq(2)
expect(events.pluck(:id)).not_to include(event3.id)
end
end
describe '#migrate?' do
it 'returns true when data should be migrated' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(true)
allow(described_class::PushEventPayload)
.to receive(:table_exists?).and_return(true)
allow(described_class::EventForMigration)
.to receive(:table_exists?).and_return(true)
expect(migration.migrate?).to eq(true)
end
it 'returns false if the "events" table does not exist' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(false)
expect(migration.migrate?).to eq(false)
end
it 'returns false if the "push_event_payloads" table does not exist' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(true)
allow(described_class::PushEventPayload)
.to receive(:table_exists?).and_return(false)
expect(migration.migrate?).to eq(false)
end
it 'returns false when the "events_for_migration" table does not exist' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(true)
allow(described_class::PushEventPayload)
.to receive(:table_exists?).and_return(true)
allow(described_class::EventForMigration)
.to receive(:table_exists?).and_return(false)
expect(migration.migrate?).to eq(false)
end
end
describe '#pack' do
it 'packs a SHA1 into a 20 byte binary string' do
packed = migration.pack('156e0e9adc587a383a7eeb5b21ddecb9044768a8')
expect(packed.bytesize).to eq(20)
end
it 'returns nil if the input value is nil' do
expect(migration.pack(nil)).to be_nil
end
end
end
...@@ -51,6 +51,28 @@ describe Gitlab::Database do ...@@ -51,6 +51,28 @@ describe Gitlab::Database do
end end
end end
describe '.join_lateral_supported?' do
it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.join_lateral_supported?).to eq(false)
end
it 'returns false when using PostgreSQL 9.2' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.2.1')
expect(described_class.join_lateral_supported?).to eq(false)
end
it 'returns true when using PostgreSQL 9.3.0 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.3.0')
expect(described_class.join_lateral_supported?).to eq(true)
end
end
describe '.nulls_last_order' do describe '.nulls_last_order' do
context 'when using PostgreSQL' do context 'when using PostgreSQL' do
before do before do
......
...@@ -22,6 +22,7 @@ events: ...@@ -22,6 +22,7 @@ events:
- author - author
- project - project
- target - target
- push_event_payload
notes: notes:
- award_emoji - award_emoji
- project - project
...@@ -272,3 +273,5 @@ timelogs: ...@@ -272,3 +273,5 @@ timelogs:
- issue - issue
- merge_request - merge_request
- user - user
push_event_payload:
- event
...@@ -36,6 +36,14 @@ Event: ...@@ -36,6 +36,14 @@ Event:
- updated_at - updated_at
- action - action
- author_id - author_id
PushEventPayload:
- commit_count
- action
- ref_type
- commit_from
- commit_to
- ref
- commit_title
Note: Note:
- id - id
- note - note
......
require 'spec_helper'
describe EventCollection do
describe '#to_a' do
let(:project) { create(:project_empty_repo) }
let(:projects) { Project.where(id: project.id) }
let(:user) { create(:user) }
before do
20.times do
event = create(:push_event, project: project, author: user)
create(:push_event_payload, event: event)
end
create(:closed_issue_event, project: project, author: user)
end
it 'returns an Array of events' do
events = described_class.new(projects).to_a
expect(events).to be_an_instance_of(Array)
end
it 'applies a limit to the number of events' do
events = described_class.new(projects).to_a
expect(events.length).to eq(20)
end
it 'can paginate through events' do
events = described_class.new(projects, offset: 20).to_a
expect(events.length).to eq(1)
end
it 'returns an empty Array when crossing the maximum page number' do
events = described_class.new(projects, limit: 1, offset: 15).to_a
expect(events).to be_empty
end
it 'allows filtering of events using an EventFilter' do
filter = EventFilter.new(EventFilter.issue)
events = described_class.new(projects, filter: filter).to_a
expect(events.length).to eq(1)
expect(events[0].action).to eq(Event::CLOSED)
end
end
end
...@@ -304,27 +304,15 @@ describe Event do ...@@ -304,27 +304,15 @@ describe Event do
end end
end end
def create_push_event(project, user, attrs = {}) def create_push_event(project, user)
data = { event = create(:push_event, project: project, author: user)
before: Gitlab::Git::BLANK_SHA,
after: "0220c11b9a3e6c69dc8fd35321254ca9a7b98f7e", create(:push_event_payload,
ref: "refs/heads/master", event: event,
user_id: user.id, commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
user_name: user.name, commit_count: 0,
repository: { ref: 'master')
name: project.name,
url: "localhost/rubinius", event
description: "",
homepage: "localhost/rubinius",
private: true
}
}
described_class.create({
project: project,
action: described_class::PUSHED,
data: data,
author_id: user.id
}.merge!(attrs))
end end
end end
...@@ -149,7 +149,7 @@ describe ProjectMember do ...@@ -149,7 +149,7 @@ describe ProjectMember do
describe 'notifications' do describe 'notifications' do
describe '#after_accept_request' do describe '#after_accept_request' do
it 'calls NotificationService.new_project_member' do it 'calls NotificationService.new_project_member' do
member = create(:project_member, user: build_stubbed(:user), requested_at: Time.now) member = create(:project_member, user: create(:user), requested_at: Time.now)
expect_any_instance_of(NotificationService).to receive(:new_project_member) expect_any_instance_of(NotificationService).to receive(:new_project_member)
......
...@@ -485,7 +485,7 @@ describe Project do ...@@ -485,7 +485,7 @@ describe Project do
describe 'last_activity' do describe 'last_activity' do
it 'alias last_activity to last_event' do it 'alias last_activity to last_event' do
last_event = create(:event, project: project) last_event = create(:event, :closed, project: project)
expect(project.last_activity).to eq(last_event) expect(project.last_activity).to eq(last_event)
end end
...@@ -493,7 +493,7 @@ describe Project do ...@@ -493,7 +493,7 @@ describe Project do
describe 'last_activity_date' do describe 'last_activity_date' do
it 'returns the creation date of the project\'s last event if present' do it 'returns the creation date of the project\'s last event if present' do
new_event = create(:event, project: project, created_at: Time.now) new_event = create(:event, :closed, project: project, created_at: Time.now)
project.reload project.reload
expect(project.last_activity_at.to_i).to eq(new_event.created_at.to_i) expect(project.last_activity_at.to_i).to eq(new_event.created_at.to_i)
......
require 'spec_helper'
describe PushEventPayload do
describe 'saving payloads' do
it 'does not allow commit messages longer than 70 characters' do
event = create(:push_event)
payload = build(:push_event_payload, event: event)
expect(payload).to be_valid
payload.commit_title = 'a' * 100
expect(payload).not_to be_valid
end
end
end
require 'spec_helper'
describe PushEvent do
let(:payload) { PushEventPayload.new }
let(:event) do
event = described_class.new
allow(event).to receive(:push_event_payload).and_return(payload)
event
end
describe '.sti_name' do
it 'returns Event::PUSHED' do
expect(described_class.sti_name).to eq(Event::PUSHED)
end
end
describe '#push?' do
it 'returns true' do
expect(event).to be_push
end
end
describe '#push_with_commits?' do
it 'returns true when both the first and last commit are present' do
allow(event).to receive(:commit_from).and_return('123')
allow(event).to receive(:commit_to).and_return('456')
expect(event).to be_push_with_commits
end
it 'returns false when the first commit is missing' do
allow(event).to receive(:commit_to).and_return('456')
expect(event).not_to be_push_with_commits
end
it 'returns false when the last commit is missing' do
allow(event).to receive(:commit_from).and_return('123')
expect(event).not_to be_push_with_commits
end
end
describe '#tag?' do
it 'returns true when pushing to a tag' do
allow(payload).to receive(:tag?).and_return(true)
expect(event).to be_tag
end
it 'returns false when pushing to a branch' do
allow(payload).to receive(:tag?).and_return(false)
expect(event).not_to be_tag
end
end
describe '#branch?' do
it 'returns true when pushing to a branch' do
allow(payload).to receive(:branch?).and_return(true)
expect(event).to be_branch
end
it 'returns false when pushing to a tag' do
allow(payload).to receive(:branch?).and_return(false)
expect(event).not_to be_branch
end
end
describe '#valid_push?' do
it 'returns true if a ref exists' do
allow(payload).to receive(:ref).and_return('master')
expect(event).to be_valid_push
end
it 'returns false when no ref is present' do
expect(event).not_to be_valid_push
end
end
describe '#new_ref?' do
it 'returns true when pushing a new ref' do
allow(payload).to receive(:created?).and_return(true)
expect(event).to be_new_ref
end
it 'returns false when pushing to an existing ref' do
allow(payload).to receive(:created?).and_return(false)
expect(event).not_to be_new_ref
end
end
describe '#rm_ref?' do
it 'returns true when removing an existing ref' do
allow(payload).to receive(:removed?).and_return(true)
expect(event).to be_rm_ref
end
it 'returns false when pushing to an existing ref' do
allow(payload).to receive(:removed?).and_return(false)
expect(event).not_to be_rm_ref
end
end
describe '#commit_from' do
it 'returns the first commit SHA' do
allow(payload).to receive(:commit_from).and_return('123')
expect(event.commit_from).to eq('123')
end
end
describe '#commit_to' do
it 'returns the last commit SHA' do
allow(payload).to receive(:commit_to).and_return('123')
expect(event.commit_to).to eq('123')
end
end
describe '#ref_name' do
it 'returns the name of the ref' do
allow(payload).to receive(:ref).and_return('master')
expect(event.ref_name).to eq('master')
end
end
describe '#ref_type' do
it 'returns the type of the ref' do
allow(payload).to receive(:ref_type).and_return('branch')
expect(event.ref_type).to eq('branch')
end
end
describe '#branch_name' do
it 'returns the name of the branch' do
allow(payload).to receive(:ref).and_return('master')
expect(event.branch_name).to eq('master')
end
end
describe '#tag_name' do
it 'returns the name of the tag' do
allow(payload).to receive(:ref).and_return('1.2')
expect(event.tag_name).to eq('1.2')
end
end
describe '#commit_title' do
it 'returns the commit message' do
allow(payload).to receive(:commit_title).and_return('foo')
expect(event.commit_title).to eq('foo')
end
end
describe '#commit_id' do
it 'returns the SHA of the last commit if present' do
allow(event).to receive(:commit_to).and_return('123')
expect(event.commit_id).to eq('123')
end
it 'returns the SHA of the first commit if the last commit is not present' do
allow(event).to receive(:commit_to).and_return(nil)
allow(event).to receive(:commit_from).and_return('123')
expect(event.commit_id).to eq('123')
end
end
describe '#commits_count' do
it 'returns the number of commits' do
allow(payload).to receive(:commit_count).and_return(1)
expect(event.commits_count).to eq(1)
end
end
describe '#validate_push_action' do
it 'adds an error when the action is not PUSHED' do
event.action = Event::CREATED
event.validate_push_action
expect(event.errors.count).to eq(1)
end
end
end
...@@ -1291,7 +1291,7 @@ describe User do ...@@ -1291,7 +1291,7 @@ describe User do
let!(:project2) { create(:project, forked_from_project: project3) } let!(:project2) { create(:project, forked_from_project: project3) }
let!(:project3) { create(:project) } let!(:project3) { create(:project) }
let!(:merge_request) { create(:merge_request, source_project: project2, target_project: project3, author: subject) } let!(:merge_request) { create(:merge_request, source_project: project2, target_project: project3, author: subject) }
let!(:push_event) { create(:event, :pushed, project: project1, target: project1, author: subject) } let!(:push_event) { create(:push_event, project: project1, author: subject) }
let!(:merge_event) { create(:event, :created, project: project3, target: merge_request, author: subject) } let!(:merge_event) { create(:event, :created, project: project3, target: merge_request, author: subject) }
before do before do
...@@ -1333,10 +1333,18 @@ describe User do ...@@ -1333,10 +1333,18 @@ describe User do
subject { create(:user) } subject { create(:user) }
let!(:project1) { create(:project, :repository) } let!(:project1) { create(:project, :repository) }
let!(:project2) { create(:project, :repository, forked_from_project: project1) } let!(:project2) { create(:project, :repository, forked_from_project: project1) }
let!(:push_data) do
Gitlab::DataBuilder::Push.build_sample(project2, subject) let!(:push_event) do
event = create(:push_event, project: project2, author: subject)
create(:push_event_payload,
event: event,
commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
commit_count: 0,
ref: 'master')
event
end end
let!(:push_event) { create(:event, :pushed, project: project2, target: project1, author: subject, data: push_data) }
before do before do
project1.team << [subject, :master] project1.team << [subject, :master]
...@@ -1363,8 +1371,13 @@ describe User do ...@@ -1363,8 +1371,13 @@ describe User do
expect(subject.recent_push(project1)).to eq(nil) expect(subject.recent_push(project1)).to eq(nil)
expect(subject.recent_push(project2)).to eq(push_event) expect(subject.recent_push(project2)).to eq(push_event)
push_data1 = Gitlab::DataBuilder::Push.build_sample(project1, subject) push_event1 = create(:push_event, project: project1, author: subject)
push_event1 = create(:event, :pushed, project: project1, target: project1, author: subject, data: push_data1)
create(:push_event_payload,
event: push_event1,
commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
commit_count: 0,
ref: 'master')
expect(subject.recent_push([project1, project2])).to eq(push_event1) # Newest expect(subject.recent_push([project1, project2])).to eq(push_event1) # Newest
end end
......
...@@ -59,6 +59,34 @@ describe API::Events do ...@@ -59,6 +59,34 @@ describe API::Events do
expect(json_response.size).to eq(1) expect(json_response.size).to eq(1)
end end
context 'when the list of events includes push events' do
let(:event) do
create(:push_event, author: user, project: private_project)
end
let!(:payload) { create(:push_event_payload, event: event) }
let(:payload_hash) { json_response[0]['push_data'] }
before do
get api("/users/#{user.id}/events?action=pushed", user)
end
it 'responds with HTTP 200 OK' do
expect(response).to have_http_status(200)
end
it 'includes the push payload as a Hash' do
expect(payload_hash).to be_an_instance_of(Hash)
end
it 'includes the push payload details' do
expect(payload_hash['commit_count']).to eq(payload.commit_count)
expect(payload_hash['action']).to eq(payload.action)
expect(payload_hash['ref_type']).to eq(payload.ref_type)
expect(payload_hash['commit_to']).to eq(payload.commit_to)
end
end
context 'when there are multiple events from different projects' do context 'when there are multiple events from different projects' do
let(:second_note) { create(:note_on_issue, project: create(:project)) } let(:second_note) { create(:note_on_issue, project: create(:project)) }
......
...@@ -252,6 +252,31 @@ describe API::V3::Users do ...@@ -252,6 +252,31 @@ describe API::V3::Users do
end end
context "as a user than can see the event's project" do context "as a user than can see the event's project" do
context 'when the list of events includes push events' do
let(:event) { create(:push_event, author: user, project: project) }
let!(:payload) { create(:push_event_payload, event: event) }
let(:payload_hash) { json_response[0]['push_data'] }
before do
get api("/users/#{user.id}/events?action=pushed", user)
end
it 'responds with HTTP 200 OK' do
expect(response).to have_http_status(200)
end
it 'includes the push payload as a Hash' do
expect(payload_hash).to be_an_instance_of(Hash)
end
it 'includes the push payload details' do
expect(payload_hash['commit_count']).to eq(payload.commit_count)
expect(payload_hash['action']).to eq(payload.action)
expect(payload_hash['ref_type']).to eq(payload.ref_type)
expect(payload_hash['commit_to']).to eq(payload.commit_to)
end
end
context 'joined event' do context 'joined event' do
it 'returns the "joined" event' do it 'returns the "joined" event' do
get v3_api("/users/#{user.id}/events", user) get v3_api("/users/#{user.id}/events", user)
......
...@@ -117,12 +117,52 @@ describe EventCreateService do ...@@ -117,12 +117,52 @@ describe EventCreateService do
let(:project) { create(:project) } let(:project) { create(:project) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:push_data) do
{
commits: [
{
id: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
message: 'This is a commit'
}
],
before: '0000000000000000000000000000000000000000',
after: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
total_commits_count: 1,
ref: 'refs/heads/my-branch'
}
end
it 'creates a new event' do it 'creates a new event' do
expect { service.push(project, user, {}) }.to change { Event.count } expect { service.push(project, user, push_data) }.to change { Event.count }
end
it 'creates the push event payload' do
expect(PushEventPayloadService).to receive(:new)
.with(an_instance_of(PushEvent), push_data)
.and_call_original
service.push(project, user, push_data)
end end
it 'updates user last activity' do it 'updates user last activity' do
expect { service.push(project, user, {}) }.to change { user_activity(user) } expect { service.push(project, user, push_data) }
.to change { user_activity(user) }
end
it 'does not create any event data when an error is raised' do
payload_service = double(:service)
allow(payload_service).to receive(:execute)
.and_raise(RuntimeError)
allow(PushEventPayloadService).to receive(:new)
.and_return(payload_service)
expect { service.push(project, user, push_data) }
.to raise_error(RuntimeError)
expect(Event.count).to eq(0)
expect(PushEventPayload.count).to eq(0)
end end
end end
......
...@@ -141,10 +141,13 @@ describe GitPushService, services: true do ...@@ -141,10 +141,13 @@ describe GitPushService, services: true do
let!(:push_data) { push_data_from_service(project, user, oldrev, newrev, ref) } let!(:push_data) { push_data_from_service(project, user, oldrev, newrev, ref) }
let(:event) { Event.find_by_action(Event::PUSHED) } let(:event) { Event.find_by_action(Event::PUSHED) }
it { expect(event).not_to be_nil } it { expect(event).to be_an_instance_of(PushEvent) }
it { expect(event.project).to eq(project) } it { expect(event.project).to eq(project) }
it { expect(event.action).to eq(Event::PUSHED) } it { expect(event.action).to eq(Event::PUSHED) }
it { expect(event.data).to eq(push_data) } it { expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) }
it { expect(event.push_event_payload.commit_from).to eq(oldrev) }
it { expect(event.push_event_payload.commit_to).to eq(newrev) }
it { expect(event.push_event_payload.ref).to eq('master') }
context "Updates merge requests" do context "Updates merge requests" do
it "when pushing a new branch for the first time" do it "when pushing a new branch for the first time" do
......
require 'spec_helper'
describe PushEventPayloadService do
let(:event) { create(:push_event) }
describe '#execute' do
let(:push_data) do
{
commits: [
{
id: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
message: 'This is a commit'
}
],
before: '0000000000000000000000000000000000000000',
after: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
total_commits_count: 1,
ref: 'refs/heads/my-branch'
}
end
it 'creates a new PushEventPayload row' do
payload = described_class.new(event, push_data).execute
expect(payload.commit_count).to eq(1)
expect(payload.action).to eq('created')
expect(payload.ref_type).to eq('branch')
expect(payload.commit_from).to be_nil
expect(payload.commit_to).to eq(push_data[:after])
expect(payload.ref).to eq('my-branch')
expect(payload.commit_title).to eq('This is a commit')
expect(payload.event_id).to eq(event.id)
end
it 'sets the push_event_payload association of the used event' do
payload = described_class.new(event, push_data).execute
expect(event.push_event_payload).to eq(payload)
end
end
describe '#commit_title' do
it 'returns nil if no commits were pushed' do
service = described_class.new(event, commits: [])
expect(service.commit_title).to be_nil
end
it 'returns a String limited to 70 characters' do
service = described_class.new(event, commits: [{ message: 'a' * 100 }])
expect(service.commit_title).to eq(('a' * 67) + '...')
end
it 'does not truncate the commit message if it is shorter than 70 characters' do
service = described_class.new(event, commits: [{ message: 'Hello' }])
expect(service.commit_title).to eq('Hello')
end
it 'includes the first line of a commit message if the message spans multiple lines' do
service = described_class
.new(event, commits: [{ message: "Hello\n\nworld" }])
expect(service.commit_title).to eq('Hello')
end
end
describe '#commit_from_id' do
it 'returns nil when creating a new ref' do
service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
expect(service.commit_from_id).to be_nil
end
it 'returns the ID of the first commit when pushing to an existing ref' do
service = described_class.new(event, before: '123')
expect(service.commit_from_id).to eq('123')
end
end
describe '#commit_to_id' do
it 'returns nil when removing an existing ref' do
service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
expect(service.commit_to_id).to be_nil
end
end
describe '#commit_count' do
it 'returns the number of commits' do
service = described_class.new(event, total_commits_count: 1)
expect(service.commit_count).to eq(1)
end
it 'raises when the push data does not contain the commits count' do
service = described_class.new(event, {})
expect { service.commit_count }.to raise_error(KeyError)
end
end
describe '#ref' do
it 'returns the name of the ref' do
service = described_class.new(event, ref: 'refs/heads/foo')
expect(service.ref).to eq('refs/heads/foo')
end
it 'raises when the push data does not contain the ref name' do
service = described_class.new(event, {})
expect { service.ref }.to raise_error(KeyError)
end
end
describe '#revision_before' do
it 'returns the revision from before the push' do
service = described_class.new(event, before: 'foo')
expect(service.revision_before).to eq('foo')
end
it 'raises when the push data does not contain the before revision' do
service = described_class.new(event, {})
expect { service.revision_before }.to raise_error(KeyError)
end
end
describe '#revision_after' do
it 'returns the revision from after the push' do
service = described_class.new(event, after: 'foo')
expect(service.revision_after).to eq('foo')
end
it 'raises when the push data does not contain the after revision' do
service = described_class.new(event, {})
expect { service.revision_after }.to raise_error(KeyError)
end
end
describe '#trimmed_ref' do
it 'returns the ref name without its prefix' do
service = described_class.new(event, ref: 'refs/heads/foo')
expect(service.trimmed_ref).to eq('foo')
end
end
describe '#create?' do
it 'returns true when creating a new ref' do
service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
expect(service.create?).to eq(true)
end
it 'returns false when pushing to an existing ref' do
service = described_class.new(event, before: 'foo')
expect(service.create?).to eq(false)
end
end
describe '#remove?' do
it 'returns true when removing an existing ref' do
service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
expect(service.remove?).to eq(true)
end
it 'returns false pushing to an existing ref' do
service = described_class.new(event, after: 'foo')
expect(service.remove?).to eq(false)
end
end
describe '#action' do
it 'returns :created when creating a ref' do
service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
expect(service.action).to eq(:created)
end
it 'returns :removed when removing an existing ref' do
service = described_class.new(event,
before: '123',
after: Gitlab::Git::BLANK_SHA)
expect(service.action).to eq(:removed)
end
it 'returns :pushed when pushing to an existing ref' do
service = described_class.new(event, before: '123', after: '456')
expect(service.action).to eq(:pushed)
end
end
describe '#ref_type' do
it 'returns :tag for a tag' do
service = described_class.new(event, ref: 'refs/tags/1.2')
expect(service.ref_type).to eq(:tag)
end
it 'returns :branch for a branch' do
service = described_class.new(event, ref: 'refs/heads/master')
expect(service.ref_type).to eq(:branch)
end
end
end
...@@ -2,9 +2,11 @@ require 'spec_helper' ...@@ -2,9 +2,11 @@ require 'spec_helper'
describe PruneOldEventsWorker do describe PruneOldEventsWorker do
describe '#perform' do describe '#perform' do
let!(:expired_event) { create(:event, author_id: 0, created_at: 13.months.ago) } let(:user) { create(:user) }
let!(:not_expired_event) { create(:event, author_id: 0, created_at: 1.day.ago) }
let!(:exactly_12_months_event) { create(:event, author_id: 0, created_at: 12.months.ago) } let!(:expired_event) { create(:event, :closed, author: user, created_at: 13.months.ago) }
let!(:not_expired_event) { create(:event, :closed, author: user, created_at: 1.day.ago) }
let!(:exactly_12_months_event) { create(:event, :closed, author: user, created_at: 12.months.ago) }
it 'prunes events older than 12 months' do it 'prunes events older than 12 months' do
expect { subject.perform }.to change { Event.count }.by(-1) expect { subject.perform }.to change { Event.count }.by(-1)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment