Commit 9ccfcf55 authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'master' into per-project-pipeline-iid

parents 6a108b8f 35816eb7
......@@ -257,7 +257,7 @@ gem 'addressable', '~> 2.5.2'
gem 'bootstrap-sass', '~> 3.3.0'
gem 'font-awesome-rails', '~> 4.7'
gem 'gemojione', '~> 3.3'
gem 'gon', '~> 6.1.0'
gem 'gon', '~> 6.2'
gem 'jquery-atwho-rails', '~> 1.3.2'
gem 'request_store', '~> 1.3'
gem 'select2-rails', '~> 3.5.9'
......@@ -294,7 +294,7 @@ group :metrics do
gem 'influxdb', '~> 0.2', require: false
# Prometheus
gem 'prometheus-client-mmap', '~> 0.9.1'
gem 'prometheus-client-mmap', '~> 0.9.2'
gem 'raindrops', '~> 0.18'
end
......
......@@ -332,9 +332,8 @@ GEM
activesupport (>= 4.2.0)
gollum-grit_adapter (1.0.1)
gitlab-grit (~> 2.7, >= 2.7.1)
gon (6.1.0)
gon (6.2.0)
actionpack (>= 3.0)
json
multi_json
request_store (>= 1.0)
google-api-client (0.19.8)
......@@ -632,7 +631,7 @@ GEM
parser
unparser
procto (0.0.3)
prometheus-client-mmap (0.9.1)
prometheus-client-mmap (0.9.2)
pry (0.10.4)
coderay (~> 1.1.0)
method_source (~> 0.8.1)
......@@ -1065,7 +1064,7 @@ DEPENDENCIES
gitlab-markup (~> 1.6.2)
gitlab-styles (~> 2.3)
gitlab_omniauth-ldap (~> 2.0.4)
gon (~> 6.1.0)
gon (~> 6.2)
google-api-client (~> 0.19.8)
google-protobuf (= 3.5.1)
gpgme
......@@ -1130,7 +1129,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2)
premailer-rails (~> 1.9.7)
prometheus-client-mmap (~> 0.9.1)
prometheus-client-mmap (~> 0.9.2)
pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1)
......
<script>
import { mapActions, mapState } from 'vuex';
import { mapActions, mapState, mapGetters } from 'vuex';
import { sprintf, __ } from '~/locale';
import * as consts from '../../stores/modules/commit/constants';
import RadioGroup from './radio_group.vue';
......@@ -10,6 +10,7 @@ export default {
},
computed: {
...mapState(['currentBranchId', 'changedFiles', 'stagedFiles']),
...mapGetters(['currentProject']),
commitToCurrentBranchText() {
return sprintf(
__('Commit to %{branchName} branch'),
......@@ -52,6 +53,7 @@ export default {
:show-input="true"
/>
<radio-group
v-if="currentProject.merge_requests_enabled"
:value="$options.commitToNewBranchMR"
:label="__('Create a new branch and merge request')"
:show-input="true"
......
......@@ -110,8 +110,8 @@ export default {
Welcome to the GitLab IDE
</h4>
<p>
You can select a file in the left sidebar to begin
editing and use the right sidebar to commit your changes.
Select a file from the left sidebar to begin editing.
Afterwards, you'll be able to commit your changes.
</p>
</div>
</div>
......
......@@ -17,6 +17,16 @@
*/
@mixin markdown-table {
width: auto;
display: inline-block;
overflow-x: auto;
border-left: 0;
border-right: 0;
border-bottom: 0;
@supports(width: fit-content) {
display: block;
width: fit-content;
}
}
/*
......
......@@ -180,11 +180,6 @@ ul.wiki-pages-list.content-list {
}
}
.wiki-holder {
overflow-x: auto;
overflow-y: hidden;
}
.wiki {
table {
@include markdown-table;
......
......@@ -22,7 +22,8 @@ module ShaAttribute
column = columns.find { |c| c.name == name.to_s }
unless column
raise ArgumentError.new("sha_attribute #{name.inspect} is invalid since the column doesn't exist")
warn "WARNING: sha_attribute #{name.inspect} is invalid since the column doesn't exist - you may need to run database migrations"
return
end
unless column.type == :binary
......
......@@ -30,6 +30,8 @@ module TimeTrackable
return if @time_spent == 0
touch if touchable?
if @time_spent == :reset
reset_spent_time
else
......@@ -53,6 +55,10 @@ module TimeTrackable
private
def touchable?
valid? && persisted?
end
def reset_spent_time
timelogs.new(time_spent: total_time_spent * -1, user: @time_spent_user) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
......
......@@ -87,7 +87,7 @@ module Projects
new_path = removal_path(path)
if mv_repository(path, new_path)
log_info("Repository \"#{path}\" moved to \"#{new_path}\"")
log_info(%Q{Repository "#{path}" moved to "#{new_path}" for project "#{project.full_path}"})
project.run_after_commit do
# self is now project
......
- if @wiki_home.present?
%div{ class: container_class }
.wiki-holder.prepend-top-default.append-bottom-default
.prepend-top-default.append-bottom-default
.wiki
= render_wiki_content(@wiki_home)
- else
......
......@@ -24,7 +24,7 @@
- history_link = link_to s_("WikiHistoricalPage|history"), project_wiki_history_path(@project, @page)
= (s_("WikiHistoricalPage|You can view the %{most_recent_link} or browse the %{history_link}.") % { most_recent_link: most_recent_link, history_link: history_link }).html_safe
.wiki-holder.prepend-top-default.append-bottom-default
.prepend-top-default.append-bottom-default
.wiki
= render_wiki_content(@page)
......
......@@ -9,85 +9,6 @@ module ObjectStorage
SanityCheckError = Class.new(StandardError)
class Upload < ActiveRecord::Base
# Upper limit for foreground checksum processing
CHECKSUM_THRESHOLD = 100.megabytes
belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :size, presence: true
validates :path, presence: true
validates :model, presence: true
validates :uploader, presence: true
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
def calculate_checksum!
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
end
def exist?
File.exist?(absolute_path)
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def checksummable?
checksum.nil? && local? && exist?
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
UploadChecksumWorker.perform_async(id)
end
def relative_path?
!path.start_with?('/')
end
def identifier
File.basename(path)
end
def uploader_class
Object.const_get(uploader)
end
end
class MigrationResult
attr_reader :upload
attr_accessor :error
......
---
title: Updates updated_at on issuable when setting time spent
merge_request: 18757
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Fix finding wiki pages when they have invalidly-encoded content
merge_request: 18856
author:
type: fixed
---
title: Hide merge request option in IDE when disabled
merge_request:
author:
type: changed
---
title: Fix outdated Web IDE welcome copy
merge_request: 18861
author:
type: fixed
......@@ -7,6 +7,20 @@ module Gollum
end
require "gollum-lib"
module Gollum
class Page
def text_data(encoding = nil)
data = if raw_data.respond_to?(:encoding)
raw_data.force_encoding(encoding || Encoding::UTF_8)
else
raw_data
end
Gitlab::EncodingHelper.encode!(data)
end
end
end
Rails.application.configure do
config.after_initialize do
Gollum::Page.per_page = Kaminari.config.default_per_page
......
......@@ -230,6 +230,11 @@ describe "#==" do
end
```
### Prometheus tests
Prometheus metrics may be preserved from one test run to another. To ensure that metrics are
reset before each example, add the `:prometheus` tag to the Rspec test.
### Matchers
Custom matchers should be created to clarify the intent and/or hide the
......
module EE
module Ldap
module OmniauthCallbacksController
extend ::Gitlab::Utils::Override
override :sign_in_and_redirect
def sign_in_and_redirect(user)
# The counter gets incremented in `sign_in_and_redirect`
show_ldap_sync_flash if user.sign_in_count == 0
super
end
private
def show_ldap_sync_flash
flash[:notice] = 'LDAP sync in progress. This could take a few minutes. '\
'Refresh the page to see the changes.'
end
end
end
end
require 'spec_helper'
describe Ldap::OmniauthCallbacksController do
include_context 'Ldap::OmniauthCallbacksController'
it "displays LDAP sync flash on first sign in" do
post provider
expect(flash[:notice]).to match(/LDAP sync in progress*/)
end
it "skips LDAP sync flash on subsequent sign ins" do
user.update!(sign_in_count: 1)
post provider
expect(flash[:notice]).to eq nil
end
context 'access denied' do
let(:valid_login?) { false }
it 'logs a failure event' do
stub_licensed_features(extended_audit_events: true)
expect { post provider }.to change(SecurityEvent, :count).by(1)
end
end
end
......@@ -62,6 +62,12 @@ module Gitlab
end
end
# Returns an array of Blob instances just with the metadata, that means
# the data attribute has no content.
def batch_metadata(repository, blob_references)
batch(repository, blob_references, blob_size_limit: 0)
end
# Find LFS blobs given an array of sha ids
# Returns array of Gitlab::Git::Blob
# Does not guarantee blob data will be set
......
......@@ -28,13 +28,14 @@ module Gitlab
# 85bc2f9753afd5f4fc5d7c75f74f8d526f26b4f3 107 R060\tfiles/js/commit.js.coffee\tfiles/js/commit.coffee
def parse(raw_change)
@blob_id, @blob_size, @raw_operation, raw_paths = raw_change.split(' ', 4)
@blob_size = @blob_size.to_i
@operation = extract_operation
@old_path, @new_path = extract_paths(raw_paths)
end
def extract_paths(file_path)
case operation
when :renamed
when :copied, :renamed
file_path.split(/\t/)
when :deleted
[file_path, nil]
......
......@@ -579,29 +579,40 @@ module Gitlab
count_commits(from: from, to: to, **options)
end
# Counts the amount of commits between `from` and `to`.
def count_commits_between(from, to, options = {})
count_commits(from: from, to: to, **options)
end
# old_rev and new_rev are commit ID's
# the result of this method is an array of Gitlab::Git::RawDiffChange
def raw_changes_between(old_rev, new_rev)
gitaly_migrate(:raw_changes_between) do |is_enabled|
if is_enabled
gitaly_repository_client.raw_changes_between(old_rev, new_rev)
.each_with_object([]) do |msg, arr|
msg.raw_changes.each { |change| arr << ::Gitlab::Git::RawDiffChange.new(change) }
end
else
result = []
@raw_changes_between ||= {}
circuit_breaker.perform do
Open3.pipeline_r(git_diff_cmd(old_rev, new_rev), format_git_cat_file_script, git_cat_file_cmd) do |last_stdout, wait_threads|
last_stdout.each_line { |line| result << ::Gitlab::Git::RawDiffChange.new(line.chomp!) }
@raw_changes_between[[old_rev, new_rev]] ||= begin
return [] if new_rev.blank? || new_rev == Gitlab::Git::BLANK_SHA
if wait_threads.any? { |waiter| !waiter.value&.success? }
raise ::Gitlab::Git::Repository::GitError, "Unabled to obtain changes between #{old_rev} and #{new_rev}"
gitaly_migrate(:raw_changes_between) do |is_enabled|
if is_enabled
gitaly_repository_client.raw_changes_between(old_rev, new_rev)
.each_with_object([]) do |msg, arr|
msg.raw_changes.each { |change| arr << ::Gitlab::Git::RawDiffChange.new(change) }
end
else
result = []
circuit_breaker.perform do
Open3.pipeline_r(git_diff_cmd(old_rev, new_rev), format_git_cat_file_script, git_cat_file_cmd) do |last_stdout, wait_threads|
last_stdout.each_line { |line| result << ::Gitlab::Git::RawDiffChange.new(line.chomp!) }
if wait_threads.any? { |waiter| !waiter.value&.success? }
raise ::Gitlab::Git::Repository::GitError, "Unabled to obtain changes between #{old_rev} and #{new_rev}"
end
end
end
end
result
result
end
end
end
rescue ArgumentError => e
......
......@@ -25,6 +25,14 @@ module Gitlab
end
end
def reset_registry!
clear_memoization(:registry)
REGISTRY_MUTEX.synchronize do
::Prometheus::Client.reset!
end
end
def registry
strong_memoize(:registry) do
REGISTRY_MUTEX.synchronize do
......
module QA::Page
module Project::Job
class Show < QA::Page::Base
COMPLETED_STATUSES = %w[passed failed canceled blocked skipped manual].freeze # excludes created, pending, running
PASSED_STATUS = 'passed'.freeze
view 'app/views/projects/jobs/show.html.haml' do
element :build_output, '.js-build-output'
end
def output
css = '.js-build-output'
view 'app/assets/javascripts/vue_shared/components/ci_badge_link.vue' do
element :status_badge, 'ci-status'
end
wait(reload: false) do
has_css?(css)
end
def completed?
COMPLETED_STATUSES.include? find('.ci-status').text
end
find(css).text
def passed?
find('.ci-status').text == PASSED_STATUS
end
# Reminder: You may wish to wait for a particular job status before checking output
def output
find('.js-build-output').text
end
end
end
......
......@@ -87,16 +87,12 @@ module QA
Page::Project::Show.act { wait_for_push }
Page::Menu::Side.act { click_ci_cd_pipelines }
Page::Project::Pipeline::Index.act { go_to_latest_pipeline }
Page::Project::Pipeline::Show.act do
go_to_first_job
wait do
!has_content?('running')
end
end
Page::Project::Pipeline::Show.act { go_to_first_job }
Page::Project::Job::Show.perform do |job|
job.wait(reload: false) { job.completed? }
expect(job.passed?).to be_truthy, "Job status did not become \"passed\"."
expect(job.output).to include(sha1sum)
end
end
......
......@@ -3,6 +3,7 @@ import store from '~/ide/stores';
import commitActions from '~/ide/components/commit_sidebar/actions.vue';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import { resetStore } from 'spec/ide/helpers';
import { projectData } from 'spec/ide/mock_data';
describe('IDE commit sidebar actions', () => {
let vm;
......@@ -13,6 +14,8 @@ describe('IDE commit sidebar actions', () => {
vm = createComponentWithStore(Component, store);
vm.$store.state.currentBranchId = 'master';
vm.$store.state.currentProjectId = 'abcproject';
Vue.set(vm.$store.state.projects, 'abcproject', { ...projectData });
vm.$mount();
......@@ -32,4 +35,15 @@ describe('IDE commit sidebar actions', () => {
it('renders current branch text', () => {
expect(vm.$el.textContent).toContain('Commit to master branch');
});
it('hides merge request option when project merge requests are disabled', done => {
vm.$store.state.projects.abcproject.merge_requests_enabled = false;
vm.$nextTick(() => {
expect(vm.$el.querySelectorAll('input[type="radio"]').length).toBe(2);
expect(vm.$el.textContent).not.toContain('Create a new branch and merge request');
done();
});
});
});
......@@ -4,6 +4,7 @@ import CommitForm from '~/ide/components/commit_sidebar/form.vue';
import { activityBarViews } from '~/ide/constants';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper';
import { projectData } from 'spec/ide/mock_data';
import { resetStore } from '../../helpers';
describe('IDE commit form', () => {
......@@ -14,6 +15,8 @@ describe('IDE commit form', () => {
spyOnProperty(window, 'innerHeight').and.returnValue(800);
store.state.changedFiles.push('test');
store.state.currentProjectId = 'abcproject';
Vue.set(store.state.projects, 'abcproject', { ...projectData });
vm = createComponentWithStore(Component, store).$mount();
});
......
......@@ -12,4 +12,5 @@ export const projectData = {
},
},
mergeRequests: {},
merge_requests_enabled: true,
};
......@@ -251,6 +251,26 @@ describe Gitlab::Git::Blob, seed_helper: true do
end
end
describe '.batch_metadata' do
let(:blob_references) do
[
[SeedRepo::Commit::ID, "files/ruby/popen.rb"],
[SeedRepo::Commit::ID, 'six']
]
end
subject { described_class.batch_metadata(repository, blob_references) }
it 'returns an empty data attribute' do
first_blob, last_blob = subject
expect(first_blob.data).to be_blank
expect(first_blob.path).to eq("files/ruby/popen.rb")
expect(last_blob.data).to be_blank
expect(last_blob.path).to eq("six")
end
end
describe '.batch_lfs_pointers' do
let(:tree_object) { repository.rugged.rev_parse('master^{tree}') }
......
......@@ -12,7 +12,7 @@ describe Gitlab::Git::RawDiffChange do
expect(change.operation).to eq(:unknown)
expect(change.old_path).to be_blank
expect(change.new_path).to be_blank
expect(change.blob_size).to be_blank
expect(change.blob_size).to eq(0)
end
end
......
require 'spec_helper'
describe Gitlab::Metrics::Prometheus, :prometheus do
let(:all_metrics) { Gitlab::Metrics }
let(:registry) { all_metrics.registry }
describe '#reset_registry!' do
it 'clears existing metrics' do
registry.counter(:test, 'test metric')
expect(registry.metrics.count).to eq(1)
all_metrics.reset_registry!
expect(all_metrics.registry.metrics.count).to eq(0)
end
end
end
......@@ -495,6 +495,14 @@ describe Issuable do
expect(issue.total_time_spent).to eq(1800)
end
it 'updates issues updated_at' do
issue
Timecop.travel(1.minute.from_now) do
expect { spend_time(1800) }.to change { issue.updated_at }
end
end
end
context 'substracting time' do
......@@ -510,9 +518,13 @@ describe Issuable do
context 'when time to substract exceeds the total time spent' do
it 'raise a validation error' do
expect do
spend_time(-3600)
end.to raise_error(ActiveRecord::RecordInvalid)
Timecop.travel(1.minute.from_now) do
expect do
expect do
spend_time(-3600)
end.to raise_error(ActiveRecord::RecordInvalid)
end.not_to change { issue.updated_at }
end
end
end
end
......
......@@ -36,24 +36,26 @@ describe ShaAttribute do
end
context 'when the table does not exist' do
it 'allows the attribute to be added' do
it 'allows the attribute to be added and issues a warning' do
allow(model).to receive(:table_exists?).and_return(false)
expect(model).not_to receive(:columns)
expect(model).to receive(:attribute)
expect(model).to receive(:warn)
model.sha_attribute(:name)
end
end
context 'when the column does not exist' do
it 'raises ArgumentError' do
it 'allows the attribute to be added and issues a warning' do
allow(model).to receive(:table_exists?).and_return(true)
expect(model).to receive(:columns)
expect(model).not_to receive(:attribute)
expect(model).to receive(:attribute)
expect(model).to receive(:warn)
expect { model.sha_attribute(:no_name) }.to raise_error(ArgumentError)
model.sha_attribute(:no_name)
end
end
......
......@@ -159,6 +159,17 @@ describe ProjectWiki do
expect(page.title).to eq("autre pagé")
end
end
context 'pages with invalidly-encoded content' do
before do
create_page("encoding is fun", "f\xFCr".b)
end
it "can find the page" do
page = subject.find_page("encoding is fun")
expect(page.content).to eq("fr")
end
end
end
context 'when Gitaly wiki_find_page is enabled' do
......
......@@ -137,6 +137,13 @@ RSpec.configure do |config|
reset_delivered_emails!
end
config.before(:example, :prometheus) do
matching_files = File.join(::Prometheus::Client.configuration.multiprocess_files_dir, "*.db")
Dir[matching_files].map { |filename| File.delete(filename) if File.file?(filename) }
Gitlab::Metrics.reset_registry!
end
config.around(:each, :use_clean_rails_memory_store_caching) do |example|
caching_store = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
......
......@@ -70,8 +70,12 @@ shared_examples 'time tracking endpoints' do |issuable_name|
end
it "add spent time for #{issuable_name}" do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user),
duration: '2h'
Timecop.travel(1.minute.from_now) do
expect do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user),
duration: '2h'
end.to change { issuable.reload.updated_at }
end
expect(response).to have_gitlab_http_status(201)
expect(json_response['human_total_time_spent']).to eq('2h')
......@@ -79,7 +83,11 @@ shared_examples 'time tracking endpoints' do |issuable_name|
context 'when subtracting time' do
it 'subtracts time of the total spent time' do
issuable.update_attributes!(spend_time: { duration: 7200, user_id: user.id })
Timecop.travel(1.minute.from_now) do
expect do
issuable.update_attributes!(spend_time: { duration: 7200, user_id: user.id })
end.to change { issuable.reload.updated_at }
end
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user),
duration: '-1h'
......@@ -93,8 +101,12 @@ shared_examples 'time tracking endpoints' do |issuable_name|
it 'does not modify the total time spent' do
issuable.update_attributes!(spend_time: { duration: 7200, user_id: user.id })
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user),
duration: '-1w'
Timecop.travel(1.minute.from_now) do
expect do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user),
duration: '-1w'
end.not_to change { issuable.reload.updated_at }
end
expect(response).to have_gitlab_http_status(400)
expect(json_response['message']['time_spent'].first).to match(/exceeds the total time spent/)
......@@ -110,7 +122,11 @@ shared_examples 'time tracking endpoints' do |issuable_name|
end
it "resets spent time for #{issuable_name}" do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/reset_spent_time", user)
Timecop.travel(1.minute.from_now) do
expect do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/reset_spent_time", user)
end.to change { issuable.reload.updated_at }
end
expect(response).to have_gitlab_http_status(200)
expect(json_response['total_time_spent']).to eq(0)
......
module ForgeryProtection
def with_forgery_protection
ActionController::Base.allow_forgery_protection = true
yield
ensure
ActionController::Base.allow_forgery_protection = false
end
module_function :with_forgery_protection
end
RSpec.configure do |config|
config.around(:each, :allow_forgery_protection) do |example|
begin
ActionController::Base.allow_forgery_protection = true
ForgeryProtection.with_forgery_protection do
example.call
ensure
ActionController::Base.allow_forgery_protection = false
end
end
end
......@@ -7,113 +7,138 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
end
end
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:uploads) { Upload.all }
let(:model_class) { Project }
let(:mounted_as) { :avatar }
let(:uploads) { Upload.all }
let(:to_store) { ObjectStorage::Store::REMOTE }
before do
stub_uploads_object_storage(AvatarUploader)
end
describe '.enqueue!' do
def enqueue!
described_class.enqueue!(uploads, Project, mounted_as, to_store)
end
shared_examples "uploads migration worker" do
describe '.enqueue!' do
def enqueue!
described_class.enqueue!(uploads, Project, mounted_as, to_store)
end
it 'is guarded by .sanity_check!' do
expect(described_class).to receive(:perform_async)
expect(described_class).to receive(:sanity_check!)
it 'is guarded by .sanity_check!' do
expect(described_class).to receive(:perform_async)
expect(described_class).to receive(:sanity_check!)
enqueue!
end
enqueue!
end
context 'sanity_check! fails' do
include_context 'sanity_check! fails'
context 'sanity_check! fails' do
include_context 'sanity_check! fails'
it 'does not enqueue a job' do
expect(described_class).not_to receive(:perform_async)
it 'does not enqueue a job' do
expect(described_class).not_to receive(:perform_async)
expect { enqueue! }.to raise_error(described_class::SanityCheckError)
expect { enqueue! }.to raise_error(described_class::SanityCheckError)
end
end
end
end
describe '.sanity_check!' do
shared_examples 'raises a SanityCheckError' do
let(:mount_point) { nil }
describe '.sanity_check!' do
shared_examples 'raises a SanityCheckError' do
let(:mount_point) { nil }
it do
expect { described_class.sanity_check!(uploads, model_class, mount_point) }
.to raise_error(described_class::SanityCheckError)
it do
expect { described_class.sanity_check!(uploads, model_class, mount_point) }
.to raise_error(described_class::SanityCheckError)
end
end
end
context 'uploader types mismatch' do
let!(:outlier) { create(:upload, uploader: 'FileUploader') }
before do
stub_const("WrongModel", Class.new)
end
include_examples 'raises a SanityCheckError'
end
context 'uploader types mismatch' do
let!(:outlier) { create(:upload, uploader: 'GitlabUploader') }
context 'model types mismatch' do
let!(:outlier) { create(:upload, model_type: 'Potato') }
include_examples 'raises a SanityCheckError'
end
include_examples 'raises a SanityCheckError'
end
context 'model types mismatch' do
let!(:outlier) { create(:upload, model_type: 'WrongModel') }
context 'mount point not found' do
include_examples 'raises a SanityCheckError' do
let(:mount_point) { :potato }
include_examples 'raises a SanityCheckError'
end
end
end
describe '#perform' do
def perform
described_class.new.perform(uploads.ids, model_class.to_s, mounted_as, to_store)
rescue ObjectStorage::MigrateUploadsWorker::Report::MigrationFailures
# swallow
context 'mount point not found' do
include_examples 'raises a SanityCheckError' do
let(:mount_point) { :potato }
end
end
end
shared_examples 'outputs correctly' do |success: 0, failures: 0|
total = success + failures
describe '#perform' do
def perform
described_class.new.perform(uploads.ids, model_class.to_s, mounted_as, to_store)
rescue ObjectStorage::MigrateUploadsWorker::Report::MigrationFailures
# swallow
end
shared_examples 'outputs correctly' do |success: 0, failures: 0|
total = success + failures
if success > 0
it 'outputs the reports' do
expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files})
if success > 0
it 'outputs the reports' do
expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files})
perform
perform
end
end
end
if failures > 0
it 'outputs upload failures' do
expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/)
if failures > 0
it 'outputs upload failures' do
expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/)
perform
perform
end
end
end
end
it_behaves_like 'outputs correctly', success: 10
it_behaves_like 'outputs correctly', success: 10
it 'migrates files' do
perform
it 'migrates files' do
perform
expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(0)
end
aggregate_failures do
projects.each do |project|
expect(project.reload.avatar.upload.local?).to be_falsey
context 'migration is unsuccessful' do
before do
allow_any_instance_of(ObjectStorage::Concern)
.to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.")
end
it_behaves_like 'outputs correctly', failures: 10
end
end
end
context 'migration is unsuccessful' do
before do
allow_any_instance_of(ObjectStorage::Concern).to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.")
end
context "for AvatarUploader" do
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:mounted_as) { :avatar }
it_behaves_like 'outputs correctly', failures: 10
before do
stub_uploads_object_storage(AvatarUploader)
end
it_behaves_like "uploads migration worker"
end
context "for FileUploader" do
let!(:projects) { create_list(:project, 10) }
let(:secret) { SecureRandom.hex }
let(:mounted_as) { nil }
before do
stub_uploads_object_storage(FileUploader)
projects.map do |project|
uploader = FileUploader.new(project)
uploader.store!(fixture_file_upload('spec/fixtures/doc_sample.txt'))
end
end
it_behaves_like "uploads migration worker"
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment