Commit e0a135dc authored by Rémy Coutable's avatar Rémy Coutable

Merge branch 'ce-to-ee-2017-11-25' into 'master'

CE upstream - Saturday

Closes #4139 et gitlab-ce#38916

See merge request gitlab-org/gitlab-ee!3549
parents 09bdd7d5 fad32bb1
......@@ -175,8 +175,10 @@ stages:
- master@gitlab/gitlabhq
- master@gitlab/gitlab-ee
##
# Trigger a package build in omnibus-gitlab repository
build-package:
#
package-qa:
image: ruby:2.4-alpine
before_script: []
stage: build
......
......@@ -2,6 +2,28 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 10.2.2 (2017-11-23)
### Fixed (5 changes)
- Label addition/removal are not going to be redacted wrongfully in the API. !15080
- Fix bitbucket wiki import with hashed storage enabled. !15490
- Impersonation no longer gets stuck on password change. !15497
- Fix blank states using old css.
- Fix promoting milestone updating all issuables without milestone.
### Performance (3 changes)
- Update Issue Boards to fetch the notification subscription status asynchronously.
- Update composite pipelines index to include "id".
- Use arrays in Pipeline#latest_builds_with_artifacts.
### Other (2 changes)
- Don't move repositories and attachments for projects using hashed storage. !15479
- Add logs for monitoring the merge process.
## 10.2.1 (2017-11-22)
### Fixed (1 change)
......
......@@ -295,7 +295,7 @@ group :metrics do
gem 'influxdb', '~> 0.2', require: false
# Prometheus
gem 'prometheus-client-mmap', '~>0.7.0.beta18'
gem 'prometheus-client-mmap', '~> 0.7.0.beta36'
gem 'raindrops', '~> 0.18'
end
......
......@@ -516,7 +516,7 @@ GEM
mini_mime (0.1.4)
mini_portile2 (2.3.0)
minitest (5.7.0)
mmap2 (2.2.7)
mmap2 (2.2.9)
mousetrap-rails (1.4.6)
multi_json (1.12.2)
multi_xml (0.6.0)
......@@ -654,8 +654,8 @@ GEM
parser
unparser
procto (0.0.3)
prometheus-client-mmap (0.7.0.beta18)
mmap2 (~> 2.2, >= 2.2.7)
prometheus-client-mmap (0.7.0.beta36)
mmap2 (~> 2.2, >= 2.2.9)
pry (0.10.4)
coderay (~> 1.1.0)
method_source (~> 0.8.1)
......@@ -1149,7 +1149,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2)
premailer-rails (~> 1.9.7)
prometheus-client-mmap (~> 0.7.0.beta18)
prometheus-client-mmap (~> 0.7.0.beta36)
pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1)
......
......@@ -355,4 +355,11 @@ module Issuable
def first_contribution?
false
end
##
# Overriden in MergeRequest
#
def wipless_title_changed(old_title)
old_title != title
end
end
......@@ -223,6 +223,12 @@ class MergeRequest < ActiveRecord::Base
work_in_progress?(title) ? title : "WIP: #{title}"
end
# Verifies if title has changed not taking into account WIP prefix
# for merge requests.
def wipless_title_changed(old_title)
self.class.wipless_title(old_title) != self.wipless_title
end
def hook_attrs
Gitlab::HookData::MergeRequestBuilder.new(self).build
end
......
......@@ -18,6 +18,7 @@ class Project < ActiveRecord::Base
include SelectForProjectAuthorization
include Routable
include GroupDescendant
include Gitlab::SQL::Pattern
# EE specific modules
prepend EE::Project
......@@ -423,32 +424,17 @@ class Project < ActiveRecord::Base
#
# query - The search query as a String.
def search(query)
ptable = arel_table
ntable = Namespace.arel_table
pattern = "%#{query}%"
# unscoping unnecessary conditions that'll be applied
# when executing `where("projects.id IN (#{union.to_sql})")`
projects = unscoped.select(:id).where(
ptable[:path].matches(pattern)
.or(ptable[:name].matches(pattern))
.or(ptable[:description].matches(pattern))
)
namespaces = unscoped.select(:id)
.joins(:namespace)
.where(ntable[:name].matches(pattern))
union = Gitlab::SQL::Union.new([projects, namespaces])
pattern = to_pattern(query)
where("projects.id IN (#{union.to_sql})") # rubocop:disable GitlabSecurity/SqlInjection
where(
arel_table[:path].matches(pattern)
.or(arel_table[:name].matches(pattern))
.or(arel_table[:description].matches(pattern))
)
end
def search_by_title(query)
pattern = "%#{query}%"
table = Project.arel_table
non_archived.where(table[:name].matches(pattern))
non_archived.where(arel_table[:name].matches(to_pattern(query)))
end
def visibility_levels
......
......@@ -41,6 +41,14 @@ module Issuable
end
end
def create_wip_note(old_title)
return unless issuable.is_a?(MergeRequest)
if MergeRequest.work_in_progress?(old_title) != issuable.work_in_progress?
SystemNoteService.handle_merge_request_wip(issuable, issuable.project, current_user)
end
end
def create_labels_note(old_labels)
added_labels = issuable.labels - old_labels
removed_labels = old_labels - issuable.labels
......@@ -49,7 +57,11 @@ module Issuable
end
def create_title_change_note(old_title)
SystemNoteService.change_title(issuable, issuable.project, current_user, old_title)
create_wip_note(old_title)
if issuable.wipless_title_changed(old_title)
SystemNoteService.change_title(issuable, issuable.project, current_user, old_title)
end
end
def create_description_change_note
......
......@@ -6,20 +6,6 @@ module MergeRequests
SystemNoteService.change_status(merge_request, merge_request.target_project, current_user, state, nil)
end
def create_title_change_note(issuable, old_title)
removed_wip = MergeRequest.work_in_progress?(old_title) && !issuable.work_in_progress?
added_wip = !MergeRequest.work_in_progress?(old_title) && issuable.work_in_progress?
changed_title = MergeRequest.wipless_title(old_title) != issuable.wipless_title
if removed_wip
SystemNoteService.remove_merge_request_wip(issuable, issuable.project, current_user)
elsif added_wip
SystemNoteService.add_merge_request_wip(issuable, issuable.project, current_user)
end
super if changed_title
end
def hook_data(merge_request, action, old_rev: nil, old_labels: [], old_assignees: [], old_total_time_spent: nil)
hook_data = merge_request.to_hook_data(current_user, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent)
hook_data[:object_attributes][:action] = action
......
......@@ -241,14 +241,10 @@ module SystemNoteService
create_note(NoteSummary.new(noteable, project, author, body, action: 'merge'))
end
def remove_merge_request_wip(noteable, project, author)
body = 'unmarked as a **Work In Progress**'
def handle_merge_request_wip(noteable, project, author)
prefix = noteable.work_in_progress? ? "marked" : "unmarked"
create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
def add_merge_request_wip(noteable, project, author)
body = 'marked as a **Work In Progress**'
body = "#{prefix} as a **Work In Progress**"
create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
......
......@@ -44,9 +44,10 @@
%h4.title
Trigger
%p
%span.build-light-text Token:
#{@build.trigger_request.trigger.short_token}
- if @build.trigger_request&.trigger&.short_token
%p
%span.build-light-text Token:
#{@build.trigger_request.trigger.short_token}
- if @build.trigger_variables.any?
%p
......
---
title: Impersonation no longer gets stuck on password change.
merge_request: 15497
author:
type: fixed
---
title: Fix promoting milestone updating all issuables without milestone
title: Prevent 500 error when inspecting job after trigger was removed
merge_request:
author:
type: fixed
---
title: Update Issue Boards to fetch the notification subscription status asynchronously
merge_request:
author:
type: performance
---
title: Label addition/removal are not going to be redacted wrongfully in the API.
merge_request: 15080
author:
type: fixed
---
title: Fix bitbucket wiki import with hashed storage enabled
merge_request: 15490
author:
type: fixed
---
title: Don't move repositories and attachments for projects using hashed storage
merge_request: 15479
author:
type: other
---
title: Fix pulling and pushing using a personal access token with the sudo scope
merge_request:
author:
type: fixed
---
title: Use arrays in Pipeline#latest_builds_with_artifacts
title: Drastically improve project search performance by no longer searching namespace
name
merge_request:
author:
type: performance
---
title: Update composite pipelines index to include "id"
merge_request:
author:
type: performance
---
title: Fix hashed storage for Import/Export uploads
merge_request: 15482
author:
type: fixed
---
title: Fix blank states using old css
title: Fix WIP system note not being created
merge_request:
author:
type: fixed
---
title: Add logs for monitoring the merge process
merge_request:
author:
type: other
---
title: Reenable Prometheus metrics, add more control over Prometheus method instrumentation
merge_request: 15558
author:
type: fixed
......@@ -12,16 +12,20 @@ Prometheus::Client.configure do |config|
end
config.pid_provider = -> do
wid = Prometheus::Client::Support::Unicorn.worker_id
wid = Process.pid if wid.nil?
if wid.nil?
worker_id = Prometheus::Client::Support::Unicorn.worker_id
if worker_id.nil?
"process_pid_#{Process.pid}"
else
"worker_id_#{wid}"
"worker_id_#{worker_id}"
end
end
end
Gitlab::Application.configure do |config|
# 0 should be Sentry to catch errors in this middleware
config.middleware.insert(1, Gitlab::Metrics::RequestsRackMiddleware)
end
Sidekiq.configure_server do |config|
config.on(:startup) do
Gitlab::Metrics::SidekiqMetricsExporter.instance.start
......
......@@ -145,11 +145,6 @@ def instrument_classes(instrumentation)
end
# rubocop:enable Metrics/AbcSize
Gitlab::Application.configure do |config|
# 0 should be Sentry to catch errors in this middleware
config.middleware.insert(1, Gitlab::Metrics::RequestsRackMiddleware)
end
if Gitlab::Metrics.enabled?
require 'pathname'
require 'influxdb'
......
......@@ -135,7 +135,7 @@ module Gitlab
token = PersonalAccessTokensFinder.new(state: 'active').find_by(token: password)
if token && valid_scoped_token?(token, available_scopes)
Gitlab::Auth::Result.new(token.user, nil, :personal_access_token, abilities_for_scope(token.scopes))
Gitlab::Auth::Result.new(token.user, nil, :personal_access_token, abilities_for_scopes(token.scopes))
end
end
......@@ -147,10 +147,15 @@ module Gitlab
AccessTokenValidationService.new(token).include_any_scope?(scopes)
end
def abilities_for_scope(scopes)
scopes.map do |scope|
self.public_send(:"#{scope}_scope_authentication_abilities") # rubocop:disable GitlabSecurity/PublicSend
end.flatten.uniq
def abilities_for_scopes(scopes)
abilities_by_scope = {
api: full_authentication_abilities,
read_registry: [:read_container_image]
}
scopes.flat_map do |scope|
abilities_by_scope.fetch(scope.to_sym, [])
end.uniq
end
def lfs_token_check(login, password, project)
......@@ -229,16 +234,6 @@ module Gitlab
:admin_container_image
]
end
alias_method :api_scope_authentication_abilities, :full_authentication_abilities
def read_registry_scope_authentication_abilities
[:read_container_image]
end
# The currently used auth method doesn't allow any actions for this scope
def read_user_scope_authentication_abilities
[]
end
def available_scopes(current_user = nil)
scopes = API_SCOPES + registry_scopes
......
......@@ -24,8 +24,7 @@ module Gitlab
end
def uploads_path
# TODO: decide what to do with uploads. We will use UUIDs here too?
File.join(Rails.root.join('public/uploads'), @project.path_with_namespace)
FileUploader.dynamic_path_segment(@project)
end
end
end
......
......@@ -6,29 +6,15 @@ module Gitlab
BASE_LABELS = { module: nil, method: nil }.freeze
attr_reader :real_time, :cpu_time, :call_count, :labels
def self.call_real_duration_histogram
return @call_real_duration_histogram if @call_real_duration_histogram
MUTEX.synchronize do
@call_real_duration_histogram ||= Gitlab::Metrics.histogram(
:gitlab_method_call_real_duration_seconds,
'Method calls real duration',
Transaction::BASE_LABELS.merge(BASE_LABELS),
[0.1, 0.2, 0.5, 1, 2, 5, 10]
)
end
end
def self.call_cpu_duration_histogram
return @call_cpu_duration_histogram if @call_cpu_duration_histogram
def self.call_duration_histogram
return @call_duration_histogram if @call_duration_histogram
MUTEX.synchronize do
@call_duration_histogram ||= Gitlab::Metrics.histogram(
:gitlab_method_call_cpu_duration_seconds,
'Method calls cpu duration',
:gitlab_method_call_duration_seconds,
'Method calls real duration',
Transaction::BASE_LABELS.merge(BASE_LABELS),
[0.1, 0.2, 0.5, 1, 2, 5, 10]
)
[0.01, 0.05, 0.1, 0.5, 1])
end
end
......@@ -59,8 +45,9 @@ module Gitlab
@cpu_time += cpu_time
@call_count += 1
self.class.call_real_duration_histogram.observe(@transaction.labels.merge(labels), real_time / 1000.0)
self.class.call_cpu_duration_histogram.observe(@transaction.labels.merge(labels), cpu_time / 1000.0)
if call_measurement_enabled? && above_threshold?
self.class.call_duration_histogram.observe(@transaction.labels.merge(labels), real_time / 1000.0)
end
retval
end
......@@ -83,6 +70,10 @@ module Gitlab
def above_threshold?
real_time >= Metrics.method_call_threshold
end
def call_measurement_enabled?
Feature.get(:prometheus_metrics_method_instrumentation).enabled?
end
end
end
end
......@@ -17,9 +17,9 @@ module Gitlab
end
def prometheus_metrics_enabled?
# force disable prometheus_metrics until
# https://gitlab.com/gitlab-org/prometheus-client-mmap/merge_requests/11 is ready
false
return @prometheus_metrics_enabled if defined?(@prometheus_metrics_enabled)
@prometheus_metrics_enabled = prometheus_metrics_enabled_unmemoized
end
def registry
......
......@@ -30,7 +30,7 @@ module Gitlab
def initialize(current_user, limit_projects, query)
@current_user = current_user
@limit_projects = limit_projects || Project.all
@query = Shellwords.shellescape(query) if query.present?
@query = query
end
def objects(scope, page = nil)
......
......@@ -2,33 +2,95 @@
require 'net/http'
require 'json'
require 'cgi'
#
# Dummy way to find out in which repo we are, CE or EE
#
def ee?
File.exist?('CHANGELOG-EE.md')
end
module Omnibus
PROJECT_PATH = 'gitlab-org/omnibus-gitlab'.freeze
uri = URI('https://gitlab.com/api/v4/projects/20699/trigger/pipeline')
params = {
"ref" => ENV["OMNIBUS_BRANCH"] || "master",
"token" => ENV["BUILD_TRIGGER_TOKEN"],
"variables[GITLAB_VERSION]" => ENV["CI_COMMIT_SHA"],
"variables[ALTERNATIVE_SOURCES]" => true,
"variables[ee]" => ee? ? 'true' : 'false'
}
Dir.glob("*_VERSION").each do |version_file|
params["variables[#{version_file}]"] = File.read(version_file).strip
end
class Trigger
TOKEN = ENV['BUILD_TRIGGER_TOKEN']
def initialize
@uri = URI("https://gitlab.com/api/v4/projects/#{CGI.escape(Omnibus::PROJECT_PATH)}/trigger/pipeline")
@params = env_params.merge(file_params).merge(token: TOKEN)
end
def invoke!
res = Net::HTTP.post_form(@uri, @params)
id = JSON.parse(res.body)['id']
if id
puts "Triggered https://gitlab.com/#{Omnibus::PROJECT_PATH}/pipelines/#{id}"
else
raise "Trigger failed! The response from the trigger is: #{res.body}"
end
Omnibus::Pipeline.new(id)
end
private
def env_params
{
"ref" => ENV["OMNIBUS_BRANCH"] || "master",
"variables[GITLAB_VERSION]" => ENV["CI_COMMIT_SHA"],
"variables[ALTERNATIVE_SOURCES]" => true,
"variables[ee]" => ENV["EE_PACKAGE"] || "false"
}
end
def file_params
Hash.new.tap do |params|
Dir.glob("*_VERSION").each do |version_file|
params["variables[#{version_file}]"] = File.read(version_file).strip
end
end
end
end
res = Net::HTTP.post_form(uri, params)
pipeline_id = JSON.parse(res.body)['id']
class Pipeline
INTERVAL = 60 # seconds
MAX_DURATION = 3600 * 3 # 3 hours
unless pipeline_id.nil?
puts "Triggered pipeline can be found at https://gitlab.com/gitlab-org/omnibus-gitlab/pipelines/#{pipeline_id}"
else
puts "Trigger failed. The response from trigger is: "
puts res.body
def initialize(id)
@start = Time.now.to_i
@uri = URI("https://gitlab.com/api/v4/projects/#{CGI.escape(Omnibus::PROJECT_PATH)}/pipelines/#{id}")
end
def wait!
loop do
raise 'Pipeline timeout!' if timeout?
case status
when :pending, :running
puts "Waiting another #{INTERVAL} seconds ..."
sleep INTERVAL
when :success
puts "Omnibus pipeline succeeded!"
break
else
raise "Omnibus pipeline did not succeed!"
end
STDOUT.flush
end
end
def timeout?
Time.now.to_i > (@start + MAX_DURATION)
end
def status
req = Net::HTTP::Get.new(@uri)
req['PRIVATE-TOKEN'] = ENV['GITLAB_QA_ACCESS_TOKEN']
res = Net::HTTP.start(@uri.hostname, @uri.port, use_ssl: true) do |http|
http.request(req)
end
JSON.parse(res.body)['status'].to_s.to_sym
end
end
end
Omnibus::Trigger.new.invoke!.wait!
......@@ -136,7 +136,7 @@ describe Admin::ProjectsFinder do
context 'filter by name' do
let(:params) { { name: 'C' } }
it { is_expected.to match_array([shared_project, public_project, private_project]) }
it { is_expected.to match_array([public_project]) }
end
context 'sorting' do
......
......@@ -207,7 +207,7 @@ describe Gitlab::Auth do
end
it 'limits abilities based on scope' do
personal_access_token = create(:personal_access_token, scopes: ['read_user'])
personal_access_token = create(:personal_access_token, scopes: %w[read_user sudo])
expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_access_token, []))
......
require 'spec_helper'
describe Gitlab::ImportExport::UploadsRestorer do
describe 'bundle a project Git repo' do
let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
FileUtils.mkdir_p(File.join(shared.export_path, 'uploads/random'))
FileUtils.touch(File.join(shared.export_path, 'uploads/random', "dummy.txt"))
end
after do
FileUtils.rm_rf(export_path)
end
describe 'legacy storage' do
let(:project) { create(:project) }
subject(:restorer) { described_class.new(project: project, shared: shared) }
it 'saves the uploads successfully' do
expect(restorer.restore).to be true
end
it 'copies the uploads to the project path' do
restorer.restore
uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
end
describe 'hashed storage' do
let(:project) { create(:project, :hashed) }
subject(:restorer) { described_class.new(project: project, shared: shared) }
it 'saves the uploads successfully' do
expect(restorer.restore).to be true
end
it 'copies the uploads to the project path' do
restorer.restore
uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
end
end
end
require 'spec_helper'
describe Gitlab::ImportExport::UploadsSaver do
describe 'bundle a project Git repo' do
let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
end
after do
FileUtils.rm_rf(export_path)
end
describe 'legacy storage' do
let(:project) { create(:project) }
subject(:saver) { described_class.new(shared: shared, project: project) }
before do
UploadService.new(project, file, FileUploader).execute
end
it 'saves the uploads successfully' do
expect(saver.save).to be true
end
it 'copies the uploads to the export path' do
saver.save
uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
end
describe 'hashed storage' do
let(:project) { create(:project, :hashed) }
subject(:saver) { described_class.new(shared: shared, project: project) }
before do
UploadService.new(project, file, FileUploader).execute
end
it 'saves the uploads successfully' do
expect(saver.save).to be true
end
it 'copies the uploads to the export path' do
saver.save
uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
end
end
end
......@@ -13,16 +13,52 @@ describe Gitlab::Metrics::MethodCall do
expect(method_call.call_count).to eq(1)
end
it 'observes the performance of the supplied block' do
expect(described_class.call_real_duration_histogram)
.to receive(:observe)
.with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
context 'when measurement is above threshold' do
before do
allow(method_call).to receive(:above_threshold?).and_return(true)
end
expect(described_class.call_cpu_duration_histogram)
.to receive(:observe)
.with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
context 'prometheus instrumentation is enabled' do
before do
Feature.get(:prometheus_metrics_method_instrumentation).enable
end
method_call.measure { 'foo' }
it 'observes the performance of the supplied block' do
expect(described_class.call_duration_histogram)
.to receive(:observe)
.with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
method_call.measure { 'foo' }
end
end
context 'prometheus instrumentation is disabled' do
before do
Feature.get(:prometheus_metrics_method_instrumentation).disable
end
it 'does not observe the performance' do
expect(described_class.call_duration_histogram)
.not_to receive(:observe)
method_call.measure { 'foo' }
end
end
end
context 'when measurement is below threshold' do
before do
allow(method_call).to receive(:above_threshold?).and_return(false)
Feature.get(:prometheus_metrics_method_instrumentation).enable
end
it 'does not observe the performance' do
expect(described_class.call_duration_histogram)
.not_to receive(:observe)
method_call.measure { 'foo' }
end
end
end
......@@ -43,7 +79,13 @@ describe Gitlab::Metrics::MethodCall do
end
describe '#above_threshold?' do
before do
allow(Gitlab::Metrics).to receive(:method_call_threshold).and_return(100)
end
it 'returns false when the total call time is not above the threshold' do
expect(method_call).to receive(:real_time).and_return(9)
expect(method_call.above_threshold?).to eq(false)
end
......
......@@ -1398,24 +1398,6 @@ describe Project do
expect(described_class.search(project.path.upcase)).to eq([project])
end
it 'returns projects with a matching namespace name' do
expect(described_class.search(project.namespace.name)).to eq([project])
end
it 'returns projects with a partially matching namespace name' do
expect(described_class.search(project.namespace.name[0..2])).to eq([project])
end
it 'returns projects with a matching namespace name regardless of the casing' do
expect(described_class.search(project.namespace.name.upcase)).to eq([project])
end
it 'returns projects when eager loading namespaces' do
relation = described_class.all.includes(:namespace)
expect(relation.search(project.namespace.name)).to eq([project])
end
describe 'with pending_delete project' do
let(:pending_delete_project) { create(:project, pending_delete: true) }
......
......@@ -18,7 +18,18 @@ describe Issuable::CommonSystemNotesService do
note = Note.last
expect(note.note).to match(note_text)
expect(note.noteable_type).to eq('Issue')
expect(note.noteable_type).to eq(issuable.class.name)
end
end
shared_examples 'WIP notes creation' do |wip_action|
subject { described_class.new(project, user).execute(issuable, []) }
it 'creates WIP toggle and title change notes' do
expect { subject }.to change { Note.count }.from(0).to(2)
expect(Note.first.note).to match("#{wip_action} as a **Work In Progress**")
expect(Note.second.note).to match('changed title')
end
end
......@@ -45,5 +56,35 @@ describe Issuable::CommonSystemNotesService do
it_behaves_like 'system note creation', {}, 'changed milestone'
end
context 'with merge requests WIP note' do
context 'adding WIP note' do
let(:issuable) { create(:merge_request, title: "merge request") }
it_behaves_like 'system note creation', { title: "WIP merge request" }, 'marked as a **Work In Progress**'
context 'and changing title' do
before do
issuable.update_attribute(:title, "WIP changed title")
end
it_behaves_like 'WIP notes creation', 'marked'
end
end
context 'removing WIP note' do
let(:issuable) { create(:merge_request, title: "WIP merge request") }
it_behaves_like 'system note creation', { title: "merge request" }, 'unmarked as a **Work In Progress**'
context 'and changing title' do
before do
issuable.update_attribute(:title, "changed title")
end
it_behaves_like 'WIP notes creation', 'unmarked'
end
end
end
end
end
......@@ -35,8 +35,8 @@ describe Search::GlobalService do
expect(results.objects('projects')).to match_array [internal_project, public_project]
end
it 'namespace name is searchable' do
results = described_class.new(user, search: found_project.namespace.path).execute
it 'project name is searchable' do
results = described_class.new(user, search: found_project.name).execute
expect(results.objects('projects')).to match_array [found_project]
end
......
......@@ -1102,31 +1102,33 @@ describe SystemNoteService do
end
end
describe '.remove_merge_request_wip' do
let(:noteable) { create(:issue, project: project, title: 'WIP: Lorem ipsum') }
describe '.handle_merge_request_wip' do
context 'adding wip note' do
let(:noteable) { create(:merge_request, source_project: project, title: 'WIP Lorem ipsum') }
subject { described_class.remove_merge_request_wip(noteable, project, author) }
subject { described_class.handle_merge_request_wip(noteable, project, author) }
it_behaves_like 'a system note' do
let(:action) { 'title' }
end
it_behaves_like 'a system note' do
let(:action) { 'title' }
end
it 'sets the note text' do
expect(subject.note).to eq 'unmarked as a **Work In Progress**'
it 'sets the note text' do
expect(subject.note).to eq 'marked as a **Work In Progress**'
end
end
end
describe '.add_merge_request_wip' do
let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') }
context 'removing wip note' do
let(:noteable) { create(:merge_request, source_project: project, title: 'Lorem ipsum') }
subject { described_class.add_merge_request_wip(noteable, project, author) }
subject { described_class.handle_merge_request_wip(noteable, project, author) }
it_behaves_like 'a system note' do
let(:action) { 'title' }
end
it_behaves_like 'a system note' do
let(:action) { 'title' }
end
it 'sets the note text' do
expect(subject.note).to eq 'marked as a **Work In Progress**'
it 'sets the note text' do
expect(subject.note).to eq 'unmarked as a **Work In Progress**'
end
end
end
......
......@@ -185,6 +185,31 @@ describe 'projects/jobs/show' do
end
end
context 'when incomplete trigger_request is used' do
before do
build.trigger_request = FactoryGirl.build(:ci_trigger_request, trigger: nil)
end
it 'test should not render token block' do
render
expect(rendered).not_to have_content('Token')
end
end
context 'when complete trigger_request is used' do
before do
build.trigger_request = FactoryGirl.build(:ci_trigger_request)
end
it 'should render token' do
render
expect(rendered).to have_content('Token')
expect(rendered).to have_content(build.trigger_request.trigger.short_token)
end
end
describe 'commit title in sidebar' do
let(:commit_title) { project.commit.title }
......
......@@ -58,6 +58,10 @@
version "1.0.2"
resolved "https://registry.yarnpkg.com/@gitlab-org/gitlab-svgs/-/gitlab-svgs-1.0.2.tgz#e4d29058e2bb438ba71ac525c6397ef15ae2877b"
"@types/jquery@^2.0.40":
version "2.0.48"
resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-2.0.48.tgz#3e90d8cde2d29015e5583017f7830cb3975b2eef"
abbrev@1, abbrev@1.0.x:
version "1.0.9"
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135"
......@@ -4031,14 +4035,7 @@ lru-cache@2.2.x:
version "2.2.4"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.2.4.tgz#6c658619becf14031d0d0b594b16042ce4dc063d"
lru-cache@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e"
dependencies:
pseudomap "^1.0.1"
yallist "^2.0.0"
lru-cache@^4.1.1:
lru-cache@^4.0.1, lru-cache@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55"
dependencies:
......@@ -4196,11 +4193,7 @@ mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkd
dependencies:
minimist "0.0.8"
moment@2.x:
version "2.17.1"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.17.1.tgz#fed9506063f36b10f066c8b59a144d7faebe1d82"
moment@^2.18.1:
moment@2.x, moment@^2.18.1:
version "2.19.2"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.19.2.tgz#8a7f774c95a64550b4c7ebd496683908f9419dbe"
......@@ -5094,7 +5087,7 @@ ps-tree@^1.0.1:
dependencies:
event-stream "~3.3.0"
pseudomap@^1.0.1, pseudomap@^1.0.2:
pseudomap@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
......@@ -5478,11 +5471,7 @@ resolve@1.1.x:
version "1.1.7"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b"
resolve@^1.1.6, resolve@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.2.0.tgz#9589c3f2f6149d1417a40becc1663db6ec6bc26c"
resolve@^1.4.0:
resolve@^1.1.6, resolve@^1.2.0, resolve@^1.4.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.5.0.tgz#1f09acce796c9a762579f31b2c1cc4c3cddf9f36"
dependencies:
......@@ -6100,9 +6089,11 @@ thunky@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/thunky/-/thunky-0.1.0.tgz#bf30146824e2b6e67b0f2d7a4ac8beb26908684e"
timeago.js@^2.0.5:
version "2.0.5"
resolved "https://registry.yarnpkg.com/timeago.js/-/timeago.js-2.0.5.tgz#730c74fbdb0b0917a553675a4460e3a7f80db86c"
timeago.js@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/timeago.js/-/timeago.js-3.0.2.tgz#32a67e7c0d887ea42ca588d3aae26f77de5e76cc"
dependencies:
"@types/jquery" "^2.0.40"
timed-out@^2.0.0:
version "2.0.0"
......@@ -6671,7 +6662,7 @@ y18n@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41"
yallist@^2.0.0, yallist@^2.1.2:
yallist@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment