Commit da246582 authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'master' into 'backstage/gb/use-persisted-stages-to-improve-pipelines-table'

# Conflicts:
#   db/schema.rb
parents 809a50fc 99218302
...@@ -182,7 +182,7 @@ Assigning a team label makes sure issues get the attention of the appropriate ...@@ -182,7 +182,7 @@ Assigning a team label makes sure issues get the attention of the appropriate
people. people.
The current team labels are ~Distribution, ~"CI/CD", ~Discussion, ~Documentation, ~Quality, The current team labels are ~Distribution, ~"CI/CD", ~Discussion, ~Documentation, ~Quality,
~Geo, ~Gitaly, ~Monitoring, ~Platform, ~Release, ~"Security Products" and ~"UX". ~Geo, ~Gitaly, ~Monitoring, ~Platform, ~Release, ~"Security Products", ~"Configuration", and ~"UX".
The descriptions on the [labels page][labels-page] explain what falls under the The descriptions on the [labels page][labels-page] explain what falls under the
responsibility of each team. responsibility of each team.
......
...@@ -72,8 +72,6 @@ GEM ...@@ -72,8 +72,6 @@ GEM
attr_encrypted (3.1.0) attr_encrypted (3.1.0)
encryptor (~> 3.0.0) encryptor (~> 3.0.0)
attr_required (1.0.1) attr_required (1.0.1)
autoprefixer-rails (8.1.0.1)
execjs
awesome_print (1.2.0) awesome_print (1.2.0)
axiom-types (0.1.1) axiom-types (0.1.1)
descendants_tracker (~> 0.0.4) descendants_tracker (~> 0.0.4)
...@@ -93,9 +91,6 @@ GEM ...@@ -93,9 +91,6 @@ GEM
binding_of_caller (0.7.3) binding_of_caller (0.7.3)
debug_inspector (>= 0.0.1) debug_inspector (>= 0.0.1)
blankslate (2.1.2.4) blankslate (2.1.2.4)
bootstrap-sass (3.3.7)
autoprefixer-rails (>= 5.2.1)
sass (>= 3.3.4)
bootstrap_form (2.7.0) bootstrap_form (2.7.0)
brakeman (4.2.1) brakeman (4.2.1)
browser (2.5.3) browser (2.5.3)
...@@ -175,7 +170,7 @@ GEM ...@@ -175,7 +170,7 @@ GEM
diff-lcs (1.3) diff-lcs (1.3)
diffy (3.1.0) diffy (3.1.0)
docile (1.1.5) docile (1.1.5)
domain_name (0.5.20170404) domain_name (0.5.20180417)
unf (>= 0.0.5, < 1.0.0) unf (>= 0.0.5, < 1.0.0)
doorkeeper (4.3.1) doorkeeper (4.3.1)
railties (>= 4.2) railties (>= 4.2)
...@@ -185,9 +180,10 @@ GEM ...@@ -185,9 +180,10 @@ GEM
dropzonejs-rails (0.7.4) dropzonejs-rails (0.7.4)
rails (> 3.1) rails (> 3.1)
email_reply_trimmer (0.1.10) email_reply_trimmer (0.1.10)
email_spec (1.6.0) email_spec (2.2.0)
htmlentities (~> 4.3.3)
launchy (~> 2.1) launchy (~> 2.1)
mail (~> 2.2) mail (~> 2.7)
encryptor (3.0.0) encryptor (3.0.0)
equalizer (0.0.11) equalizer (0.0.11)
erubis (2.7.0) erubis (2.7.0)
...@@ -288,7 +284,7 @@ GEM ...@@ -288,7 +284,7 @@ GEM
gettext_i18n_rails (>= 0.7.1) gettext_i18n_rails (>= 0.7.1)
po_to_json (>= 1.0.0) po_to_json (>= 1.0.0)
rails (>= 3.2.0) rails (>= 3.2.0)
gitaly-proto (0.99.0) gitaly-proto (0.100.0)
google-protobuf (~> 3.1) google-protobuf (~> 3.1)
grpc (~> 1.10) grpc (~> 1.10)
github-linguist (5.3.3) github-linguist (5.3.3)
...@@ -365,9 +361,9 @@ GEM ...@@ -365,9 +361,9 @@ GEM
grape-entity (0.7.1) grape-entity (0.7.1)
activesupport (>= 4.0) activesupport (>= 4.0)
multi_json (>= 1.3.2) multi_json (>= 1.3.2)
grape-route-helpers (2.1.0) grape-path-helpers (1.0.0)
activesupport activesupport
grape (>= 0.16.0) grape (~> 1.0)
rake rake
grape_logging (1.7.0) grape_logging (1.7.0)
grape grape
...@@ -417,6 +413,7 @@ GEM ...@@ -417,6 +413,7 @@ GEM
httpclient (2.8.3) httpclient (2.8.3)
i18n (1.0.1) i18n (1.0.1)
concurrent-ruby (~> 1.0) concurrent-ruby (~> 1.0)
icalendar (2.4.1)
ice_nine (0.11.2) ice_nine (0.11.2)
influxdb (0.5.3) influxdb (0.5.3)
ipaddress (0.8.3) ipaddress (0.8.3)
...@@ -450,9 +447,9 @@ GEM ...@@ -450,9 +447,9 @@ GEM
kgio (2.11.2) kgio (2.11.2)
knapsack (1.16.0) knapsack (1.16.0)
rake rake
kubeclient (3.0.0) kubeclient (3.1.1)
http (~> 2.2.2) http (~> 2.2.2)
recursive-open-struct (~> 1.0.4) recursive-open-struct (~> 1.0, >= 1.0.4)
rest-client (~> 2.0) rest-client (~> 2.0)
launchy (2.4.3) launchy (2.4.3)
addressable (~> 2.3) addressable (~> 2.3)
...@@ -521,15 +518,16 @@ GEM ...@@ -521,15 +518,16 @@ GEM
multi_json (~> 1.3) multi_json (~> 1.3)
multi_xml (~> 0.5) multi_xml (~> 0.5)
rack (>= 1.2, < 3) rack (>= 1.2, < 3)
octokit (4.8.0) octokit (4.9.0)
sawyer (~> 0.8.0, >= 0.5.3) sawyer (~> 0.8.0, >= 0.5.3)
omniauth (1.8.1) omniauth (1.8.1)
hashie (>= 3.4.6, < 3.6.0) hashie (>= 3.4.6, < 3.6.0)
rack (>= 1.6.2, < 3) rack (>= 1.6.2, < 3)
omniauth-auth0 (2.0.0) omniauth-auth0 (2.0.0)
omniauth-oauth2 (~> 1.4) omniauth-oauth2 (~> 1.4)
omniauth-authentiq (0.3.1) omniauth-authentiq (0.3.3)
omniauth-oauth2 (~> 1.3, >= 1.3.1) jwt (>= 1.5)
omniauth-oauth2 (>= 1.5)
omniauth-azure-oauth2 (0.0.9) omniauth-azure-oauth2 (0.0.9)
jwt (~> 1.0) jwt (~> 1.0)
omniauth (~> 1.0) omniauth (~> 1.0)
...@@ -628,7 +626,7 @@ GEM ...@@ -628,7 +626,7 @@ GEM
parser parser
unparser unparser
procto (0.0.3) procto (0.0.3)
prometheus-client-mmap (0.9.2) prometheus-client-mmap (0.9.3)
pry (0.11.3) pry (0.11.3)
coderay (~> 1.1.0) coderay (~> 1.1.0)
method_source (~> 0.9.0) method_source (~> 0.9.0)
...@@ -702,11 +700,11 @@ GEM ...@@ -702,11 +700,11 @@ GEM
ffi ffi
rbnacl-libsodium (1.0.16) rbnacl-libsodium (1.0.16)
rbnacl (>= 3.0.1) rbnacl (>= 3.0.1)
rdoc (4.3.0) rdoc (6.0.4)
re2 (1.1.1) re2 (1.1.1)
recaptcha (3.4.0) recaptcha (3.4.0)
json json
recursive-open-struct (1.0.5) recursive-open-struct (1.1.0)
redcarpet (3.4.0) redcarpet (3.4.0)
redis (3.3.5) redis (3.3.5)
redis-actionpack (5.0.2) redis-actionpack (5.0.2)
...@@ -716,8 +714,8 @@ GEM ...@@ -716,8 +714,8 @@ GEM
redis-activesupport (5.0.4) redis-activesupport (5.0.4)
activesupport (>= 3, < 6) activesupport (>= 3, < 6)
redis-store (>= 1.3, < 2) redis-store (>= 1.3, < 2)
redis-namespace (1.5.3) redis-namespace (1.6.0)
redis (~> 3.0, >= 3.0.4) redis (>= 3.0.4)
redis-rack (2.0.4) redis-rack (2.0.4)
rack (>= 1.5, < 3) rack (>= 1.5, < 3)
redis-store (>= 1.2, < 2) redis-store (>= 1.2, < 2)
...@@ -836,7 +834,7 @@ GEM ...@@ -836,7 +834,7 @@ GEM
activesupport (>= 3.1) activesupport (>= 3.1)
select2-rails (3.5.10) select2-rails (3.5.10)
thor (~> 0.14) thor (~> 0.14)
selenium-webdriver (3.11.0) selenium-webdriver (3.12.0)
childprocess (~> 0.5) childprocess (~> 0.5)
rubyzip (~> 1.2) rubyzip (~> 1.2)
sentry-raven (2.7.2) sentry-raven (2.7.2)
...@@ -986,7 +984,7 @@ DEPENDENCIES ...@@ -986,7 +984,7 @@ DEPENDENCIES
asciidoctor-plantuml (= 0.0.8) asciidoctor-plantuml (= 0.0.8)
asset_sync (~> 2.4) asset_sync (~> 2.4)
attr_encrypted (~> 3.1.0) attr_encrypted (~> 3.1.0)
awesome_print (~> 1.2.0) awesome_print
babosa (~> 1.0.2) babosa (~> 1.0.2)
base32 (~> 0.3.0) base32 (~> 0.3.0)
batch-loader (~> 1.2.1) batch-loader (~> 1.2.1)
...@@ -994,7 +992,6 @@ DEPENDENCIES ...@@ -994,7 +992,6 @@ DEPENDENCIES
benchmark-ips (~> 2.3.0) benchmark-ips (~> 2.3.0)
better_errors (~> 2.1.0) better_errors (~> 2.1.0)
binding_of_caller (~> 0.7.2) binding_of_caller (~> 0.7.2)
bootstrap-sass (~> 3.3.0)
bootstrap_form (~> 2.7.0) bootstrap_form (~> 2.7.0)
brakeman (~> 4.2) brakeman (~> 4.2)
browser (~> 2.2) browser (~> 2.2)
...@@ -1021,7 +1018,7 @@ DEPENDENCIES ...@@ -1021,7 +1018,7 @@ DEPENDENCIES
doorkeeper-openid_connect (~> 1.3) doorkeeper-openid_connect (~> 1.3)
dropzonejs-rails (~> 0.7.1) dropzonejs-rails (~> 0.7.1)
email_reply_trimmer (~> 0.1) email_reply_trimmer (~> 0.1)
email_spec (~> 1.6.0) email_spec (~> 2.2.0)
factory_bot_rails (~> 4.8.2) factory_bot_rails (~> 4.8.2)
faraday (~> 0.12) faraday (~> 0.12)
fast_blank fast_blank
...@@ -1045,7 +1042,7 @@ DEPENDENCIES ...@@ -1045,7 +1042,7 @@ DEPENDENCIES
gettext (~> 3.2.2) gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0) gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.3) gettext_i18n_rails_js (~> 1.3)
gitaly-proto (~> 0.99.0) gitaly-proto (~> 0.100.0)
github-linguist (~> 5.3.3) github-linguist (~> 5.3.3)
gitlab-flowdock-git-hook (~> 1.0.1) gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-gollum-lib (~> 4.2) gitlab-gollum-lib (~> 4.2)
...@@ -1059,7 +1056,7 @@ DEPENDENCIES ...@@ -1059,7 +1056,7 @@ DEPENDENCIES
gpgme gpgme
grape (~> 1.0) grape (~> 1.0)
grape-entity (~> 0.7.1) grape-entity (~> 0.7.1)
grape-route-helpers (~> 2.1.0) grape-path-helpers (~> 1.0)
grape_logging (~> 1.7) grape_logging (~> 1.7)
grpc (~> 1.11.0) grpc (~> 1.11.0)
haml_lint (~> 0.26.0) haml_lint (~> 0.26.0)
...@@ -1070,6 +1067,7 @@ DEPENDENCIES ...@@ -1070,6 +1067,7 @@ DEPENDENCIES
html-pipeline (~> 2.7.1) html-pipeline (~> 2.7.1)
html2text html2text
httparty (~> 0.13.3) httparty (~> 0.13.3)
icalendar
influxdb (~> 0.2) influxdb (~> 0.2)
jira-ruby (~> 1.4) jira-ruby (~> 1.4)
jquery-atwho-rails (~> 1.3.2) jquery-atwho-rails (~> 1.3.2)
...@@ -1077,7 +1075,7 @@ DEPENDENCIES ...@@ -1077,7 +1075,7 @@ DEPENDENCIES
jwt (~> 1.5.6) jwt (~> 1.5.6)
kaminari (~> 1.0) kaminari (~> 1.0)
knapsack (~> 1.16) knapsack (~> 1.16)
kubeclient (~> 3.0) kubeclient (~> 3.1.0)
letter_opener_web (~> 1.3.0) letter_opener_web (~> 1.3.0)
license_finder (~> 3.1) license_finder (~> 3.1)
licensee (~> 8.9) licensee (~> 8.9)
...@@ -1092,10 +1090,10 @@ DEPENDENCIES ...@@ -1092,10 +1090,10 @@ DEPENDENCIES
net-ssh (~> 4.2.0) net-ssh (~> 4.2.0)
nokogiri (~> 1.8.2) nokogiri (~> 1.8.2)
oauth2 (~> 1.4) oauth2 (~> 1.4)
octokit (~> 4.8) octokit (~> 4.9)
omniauth (~> 1.8) omniauth (~> 1.8)
omniauth-auth0 (~> 2.0.0) omniauth-auth0 (~> 2.0.0)
omniauth-authentiq (~> 0.3.1) omniauth-authentiq (~> 0.3.3)
omniauth-azure-oauth2 (~> 0.0.9) omniauth-azure-oauth2 (~> 0.0.9)
omniauth-cas3 (~> 1.1.4) omniauth-cas3 (~> 1.1.4)
omniauth-facebook (~> 4.0.0) omniauth-facebook (~> 4.0.0)
...@@ -1118,7 +1116,7 @@ DEPENDENCIES ...@@ -1118,7 +1116,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3) peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2) pg (~> 0.18.2)
premailer-rails (~> 1.9.7) premailer-rails (~> 1.9.7)
prometheus-client-mmap (~> 0.9.2) prometheus-client-mmap (~> 0.9.3)
pry-byebug (~> 3.4.1) pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4) pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1) rack-attack (~> 4.4.1)
...@@ -1134,12 +1132,12 @@ DEPENDENCIES ...@@ -1134,12 +1132,12 @@ DEPENDENCIES
rblineprof (~> 0.3.6) rblineprof (~> 0.3.6)
rbnacl (~> 4.0) rbnacl (~> 4.0)
rbnacl-libsodium rbnacl-libsodium
rdoc (~> 4.2) rdoc (~> 6.0)
re2 (~> 1.1.1) re2 (~> 1.1.1)
recaptcha (~> 3.0) recaptcha (~> 3.0)
redcarpet (~> 3.4) redcarpet (~> 3.4)
redis (~> 3.2) redis (~> 3.2)
redis-namespace (~> 1.5.2) redis-namespace (~> 1.6.0)
redis-rails (~> 5.0.2) redis-rails (~> 5.0.2)
request_store (~> 1.3) request_store (~> 1.3)
responders (~> 2.0) responders (~> 2.0)
...@@ -1154,6 +1152,7 @@ DEPENDENCIES ...@@ -1154,6 +1152,7 @@ DEPENDENCIES
rubocop-rspec (~> 1.22.1) rubocop-rspec (~> 1.22.1)
ruby-fogbugz (~> 0.2.1) ruby-fogbugz (~> 0.2.1)
ruby-prof (~> 0.17.0) ruby-prof (~> 0.17.0)
ruby-progressbar
ruby_parser (~> 3.8) ruby_parser (~> 3.8)
rufus-scheduler (~> 3.4) rufus-scheduler (~> 3.4)
rugged (~> 0.27) rugged (~> 0.27)
...@@ -1162,12 +1161,12 @@ DEPENDENCIES ...@@ -1162,12 +1161,12 @@ DEPENDENCIES
scss_lint (~> 0.56.0) scss_lint (~> 0.56.0)
seed-fu (~> 2.3.7) seed-fu (~> 2.3.7)
select2-rails (~> 3.5.9) select2-rails (~> 3.5.9)
selenium-webdriver (~> 3.5) selenium-webdriver (~> 3.12)
sentry-raven (~> 2.7) sentry-raven (~> 2.7)
settingslogic (~> 2.0.9) settingslogic (~> 2.0.9)
sham_rack (~> 1.3.6) sham_rack (~> 1.3.6)
shoulda-matchers (~> 3.1.2) shoulda-matchers (~> 3.1.2)
sidekiq (~> 5.0) sidekiq (~> 5.1)
sidekiq-cron (~> 0.6.0) sidekiq-cron (~> 0.6.0)
sidekiq-limit_fetch (~> 3.4) sidekiq-limit_fetch (~> 3.4)
simple_po_parser (~> 1.1.2) simple_po_parser (~> 1.1.2)
...@@ -1199,4 +1198,4 @@ DEPENDENCIES ...@@ -1199,4 +1198,4 @@ DEPENDENCIES
wikicloth (= 0.8.1) wikicloth (= 0.8.1)
BUNDLED WITH BUNDLED WITH
1.16.1 1.16.2
<script> <script>
import $ from 'jquery';
import { mapActions, mapGetters, mapState } from 'vuex'; import { mapActions, mapGetters, mapState } from 'vuex';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '~/vue_shared/directives/tooltip'; import tooltip from '~/vue_shared/directives/tooltip';
...@@ -20,6 +21,13 @@ export default { ...@@ -20,6 +21,13 @@ export default {
}, },
methods: { methods: {
...mapActions(['updateActivityBarView']), ...mapActions(['updateActivityBarView']),
changedActivityView(e, view) {
e.currentTarget.blur();
this.updateActivityBarView(view);
$(e.currentTarget).tooltip('hide');
},
}, },
activityBarViews, activityBarViews,
}; };
...@@ -54,7 +62,7 @@ export default { ...@@ -54,7 +62,7 @@ export default {
:class="{ :class="{
active: currentActivityView === $options.activityBarViews.edit active: currentActivityView === $options.activityBarViews.edit
}" }"
@click.prevent="updateActivityBarView($options.activityBarViews.edit)" @click.prevent="changedActivityView($event, $options.activityBarViews.edit)"
:title="s__('IDE|Edit')" :title="s__('IDE|Edit')"
:aria-label="s__('IDE|Edit')" :aria-label="s__('IDE|Edit')"
> >
...@@ -73,7 +81,7 @@ export default { ...@@ -73,7 +81,7 @@ export default {
:class="{ :class="{
active: currentActivityView === $options.activityBarViews.review active: currentActivityView === $options.activityBarViews.review
}" }"
@click.prevent="updateActivityBarView($options.activityBarViews.review)" @click.prevent="changedActivityView($event, $options.activityBarViews.review)"
:title="s__('IDE|Review')" :title="s__('IDE|Review')"
:aria-label="s__('IDE|Review')" :aria-label="s__('IDE|Review')"
> >
...@@ -92,7 +100,7 @@ export default { ...@@ -92,7 +100,7 @@ export default {
:class="{ :class="{
active: currentActivityView === $options.activityBarViews.commit active: currentActivityView === $options.activityBarViews.commit
}" }"
@click.prevent="updateActivityBarView($options.activityBarViews.commit)" @click.prevent="changedActivityView($event, $options.activityBarViews.commit)"
:title="s__('IDE|Commit')" :title="s__('IDE|Commit')"
:aria-label="s__('IDE|Commit')" :aria-label="s__('IDE|Commit')"
> >
......
...@@ -14,6 +14,7 @@ export const EPIC_NOTEABLE_TYPE = 'epic'; ...@@ -14,6 +14,7 @@ export const EPIC_NOTEABLE_TYPE = 'epic';
export const MERGE_REQUEST_NOTEABLE_TYPE = 'merge_request'; export const MERGE_REQUEST_NOTEABLE_TYPE = 'merge_request';
export const UNRESOLVE_NOTE_METHOD_NAME = 'delete'; export const UNRESOLVE_NOTE_METHOD_NAME = 'delete';
export const RESOLVE_NOTE_METHOD_NAME = 'post'; export const RESOLVE_NOTE_METHOD_NAME = 'post';
export const DESCRIPTION_TYPE = 'changed the description';
export const NOTEABLE_TYPE_MAPPING = { export const NOTEABLE_TYPE_MAPPING = {
Issue: ISSUE_NOTEABLE_TYPE, Issue: ISSUE_NOTEABLE_TYPE,
......
import { n__, s__, sprintf } from '~/locale';
import { DESCRIPTION_TYPE } from '../constants';
/**
* Changes the description from a note, returns 'changed the description n number of times'
*/
export const changeDescriptionNote = (note, descriptionChangedTimes, timeDifferenceMinutes) => {
const descriptionNote = Object.assign({}, note);
descriptionNote.note_html = sprintf(
s__(`MergeRequest|
%{paragraphStart}changed the description %{descriptionChangedTimes} times %{timeDifferenceMinutes}%{paragraphEnd}`),
{
paragraphStart: '<p dir="auto">',
paragraphEnd: '</p>',
descriptionChangedTimes,
timeDifferenceMinutes: n__('within %d minute ', 'within %d minutes ', timeDifferenceMinutes),
},
false,
);
descriptionNote.times_updated = descriptionChangedTimes;
return descriptionNote;
};
/**
* Checks the time difference between two notes from their 'created_at' dates
* returns an integer
*/
export const getTimeDifferenceMinutes = (noteBeggining, noteEnd) => {
const descriptionNoteBegin = new Date(noteBeggining.created_at);
const descriptionNoteEnd = new Date(noteEnd.created_at);
const timeDifferenceMinutes = (descriptionNoteEnd - descriptionNoteBegin) / 1000 / 60;
return Math.ceil(timeDifferenceMinutes);
};
/**
* Checks if a note is a system note and if the content is description
*
* @param {Object} note
* @returns {Boolean}
*/
export const isDescriptionSystemNote = note => note.system && note.note === DESCRIPTION_TYPE;
/**
* Collapses the system notes of a description type, e.g. Changed the description, n minutes ago
* the notes will collapse as long as they happen no more than 10 minutes away from each away
* in between the notes can be anything, another type of system note
* (such as 'changed the weight') or a comment.
*
* @param {Array} notes
* @returns {Array}
*/
export const collapseSystemNotes = notes => {
let lastDescriptionSystemNote = null;
let lastDescriptionSystemNoteIndex = -1;
let descriptionChangedTimes = 1;
return notes.slice(0).reduce((acc, currentNote) => {
const note = currentNote.notes[0];
if (isDescriptionSystemNote(note)) {
// is it the first one?
if (!lastDescriptionSystemNote) {
lastDescriptionSystemNote = note;
lastDescriptionSystemNoteIndex = acc.length;
} else if (lastDescriptionSystemNote) {
const timeDifferenceMinutes = getTimeDifferenceMinutes(
lastDescriptionSystemNote,
note,
);
// are they less than 10 minutes appart?
if (timeDifferenceMinutes > 10) {
// reset counter
descriptionChangedTimes = 1;
// update the previous system note
lastDescriptionSystemNote = note;
lastDescriptionSystemNoteIndex = acc.length;
} else {
// increase counter
descriptionChangedTimes += 1;
// delete the previous one
acc.splice(lastDescriptionSystemNoteIndex, 1);
// replace the text of the current system note with the collapsed note.
currentNote.notes.splice(
0,
1,
changeDescriptionNote(note, descriptionChangedTimes, timeDifferenceMinutes),
);
// update the previous system note index
lastDescriptionSystemNoteIndex = acc.length;
}
}
}
acc.push(currentNote);
return acc;
}, []);
};
// for babel-rewire
export default {};
import _ from 'underscore'; import _ from 'underscore';
import { collapseSystemNotes } from './collapse_utils';
export const notes = state => collapseSystemNotes(state.notes);
export const notes = state => state.notes;
export const targetNoteHash = state => state.targetNoteHash; export const targetNoteHash = state => state.targetNoteHash;
export const getNotesData = state => state.notesData; export const getNotesData = state => state.notesData;
......
...@@ -69,6 +69,11 @@ code { ...@@ -69,6 +69,11 @@ code {
background-color: inherit; background-color: inherit;
padding: unset; padding: unset;
} }
.build-trace & {
background-color: inherit;
padding: inherit;
}
} }
.code { .code {
......
...@@ -183,7 +183,7 @@ ...@@ -183,7 +183,7 @@
svg { svg {
position: relative; position: relative;
top: -1px; top: -2px;
} }
.ide-file-changed-icon { .ide-file-changed-icon {
...@@ -458,6 +458,10 @@ ...@@ -458,6 +458,10 @@
width: auto; width: auto;
margin-right: 0; margin-right: 0;
a {
height: 60px;
}
a:hover, a:hover,
a:focus { a:focus {
text-decoration: none; text-decoration: none;
...@@ -718,9 +722,17 @@ ...@@ -718,9 +722,17 @@
} }
.ide-new-btn { .ide-new-btn {
.btn {
padding-top: 3px;
padding-bottom: 3px;
}
.dropdown {
display: flex;
}
.dropdown-toggle svg { .dropdown-toggle svg {
margin-top: -2px; top: 0;
margin-bottom: 2px;
} }
.dropdown-menu { .dropdown-menu {
...@@ -877,6 +889,7 @@ ...@@ -877,6 +889,7 @@
border-top: 1px solid transparent; border-top: 1px solid transparent;
border-bottom: 1px solid transparent; border-bottom: 1px solid transparent;
outline: 0; outline: 0;
cursor: pointer;
svg { svg {
margin: 0 auto; margin: 0 auto;
......
...@@ -22,9 +22,9 @@ ...@@ -22,9 +22,9 @@
header, header,
nav, nav,
nav.main-nav,
nav.navbar-collapse, nav.navbar-collapse,
nav.navbar-collapse.collapse, nav.navbar-collapse.collapse,
.nav-sidebar,
.profiler-results, .profiler-results,
.tree-ref-holder, .tree-ref-holder,
.tree-holder .breadcrumb, .tree-holder .breadcrumb,
...@@ -38,7 +38,8 @@ ul.notes-form, ...@@ -38,7 +38,8 @@ ul.notes-form,
.edit-link, .edit-link,
.note-action-button, .note-action-button,
.right-sidebar, .right-sidebar,
.flash-container { .flash-container,
#js-peek {
display: none !important; display: none !important;
} }
......
...@@ -18,7 +18,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController ...@@ -18,7 +18,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
def upload_authorize def upload_authorize
set_workhorse_internal_api_content_type set_workhorse_internal_api_content_type
authorized = LfsObjectUploader.workhorse_authorize authorized = LfsObjectUploader.workhorse_authorize(has_length: true)
authorized.merge!(LfsOid: oid, LfsSize: size) authorized.merge!(LfsOid: oid, LfsSize: size)
render json: authorized render json: authorized
......
...@@ -238,6 +238,14 @@ module ProjectsHelper ...@@ -238,6 +238,14 @@ module ProjectsHelper
"git push --set-upstream #{repository_url}/$(git rev-parse --show-toplevel | xargs basename).git $(git rev-parse --abbrev-ref HEAD)" "git push --set-upstream #{repository_url}/$(git rev-parse --show-toplevel | xargs basename).git $(git rev-parse --abbrev-ref HEAD)"
end end
def show_xcode_link?(project = @project)
browser.platform.mac? && project.repository.xcode_project?
end
def xcode_uri_to_repo(project = @project)
"xcode://clone?repo=#{CGI.escape(default_url_to_repo(project))}"
end
private private
def get_project_nav_tabs(project, current_user) def get_project_nav_tabs(project, current_user)
......
...@@ -55,6 +55,11 @@ module Ci ...@@ -55,6 +55,11 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)', where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive) '', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end end
scope :without_archived_trace, ->() do
where('NOT EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').trace)
end
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) } scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
...@@ -144,6 +149,7 @@ module Ci ...@@ -144,6 +149,7 @@ module Ci
after_transition any => [:success] do |build| after_transition any => [:success] do |build|
build.run_after_commit do build.run_after_commit do
BuildSuccessWorker.perform_async(id) BuildSuccessWorker.perform_async(id)
PagesWorker.perform_async(:deploy, id) if build.pages_generator?
end end
end end
...@@ -183,6 +189,11 @@ module Ci ...@@ -183,6 +189,11 @@ module Ci
pipeline.manual_actions.where.not(name: name) pipeline.manual_actions.where.not(name: name)
end end
def pages_generator?
Gitlab.config.pages.enabled &&
self.name == 'pages'
end
def playable? def playable?
action? && (manual? || retryable?) action? && (manual? || retryable?)
end end
...@@ -402,8 +413,6 @@ module Ci ...@@ -402,8 +413,6 @@ module Ci
build_data = Gitlab::DataBuilder::Build.build(self) build_data = Gitlab::DataBuilder::Build.build(self)
project.execute_hooks(build_data.dup, :job_hooks) project.execute_hooks(build_data.dup, :job_hooks)
project.execute_services(build_data.dup, :job_hooks) project.execute_services(build_data.dup, :job_hooks)
PagesService.new(build_data).execute
project.running_or_pending_build_count(force: true)
end end
def browsable_artifacts? def browsable_artifacts?
......
...@@ -1656,12 +1656,6 @@ class Project < ActiveRecord::Base ...@@ -1656,12 +1656,6 @@ class Project < ActiveRecord::Base
import_state.update_column(:jid, nil) import_state.update_column(:jid, nil)
end end
def running_or_pending_build_count(force: false)
Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
builds.running_or_pending.count(:all)
end
end
# Lazy loading of the `pipeline_status` attribute # Lazy loading of the `pipeline_status` attribute
def pipeline_status def pipeline_status
@pipeline_status ||= Gitlab::Cache::Ci::ProjectPipelineStatus.load_for_project(self) @pipeline_status ||= Gitlab::Cache::Ci::ProjectPipelineStatus.load_for_project(self)
......
class PagesService
attr_reader :data
def initialize(data)
@data = data
end
def execute
return unless Settings.pages.enabled
return unless data[:build_name] == 'pages'
return unless data[:build_status] == 'success'
PagesWorker.perform_async(:deploy, data[:build_id])
end
end
...@@ -17,6 +17,8 @@ module Projects ...@@ -17,6 +17,8 @@ module Projects
ensure_wiki_exists if enabling_wiki? ensure_wiki_exists if enabling_wiki?
yield if block_given?
if project.update_attributes(params.except(:default_branch)) if project.update_attributes(params.except(:default_branch))
if project.previous_changes.include?('path') if project.previous_changes.include?('path')
project.rename_repo project.rename_repo
...@@ -36,7 +38,7 @@ module Projects ...@@ -36,7 +38,7 @@ module Projects
end end
def run_auto_devops_pipeline? def run_auto_devops_pipeline?
return false if project.repository.gitlab_ci_yml || !project.auto_devops.previous_changes.include?('enabled') return false if project.repository.gitlab_ci_yml || !project.auto_devops&.previous_changes&.include?('enabled')
project.auto_devops.enabled? || (project.auto_devops.enabled.nil? && Gitlab::CurrentSettings.auto_devops_enabled?) project.auto_devops.enabled? || (project.auto_devops.enabled.nil? && Gitlab::CurrentSettings.auto_devops_enabled?)
end end
...@@ -53,8 +55,8 @@ module Projects ...@@ -53,8 +55,8 @@ module Projects
def changing_default_branch? def changing_default_branch?
new_branch = params[:default_branch] new_branch = params[:default_branch]
project.repository.exists? && new_branch && project.repository.exists? &&
new_branch && new_branch != project.default_branch new_branch != project.default_branch
end end
def enabling_wiki? def enabling_wiki?
......
...@@ -10,8 +10,6 @@ module ObjectStorage ...@@ -10,8 +10,6 @@ module ObjectStorage
UnknownStoreError = Class.new(StandardError) UnknownStoreError = Class.new(StandardError)
ObjectStorageUnavailable = Class.new(StandardError) ObjectStorageUnavailable = Class.new(StandardError)
DIRECT_UPLOAD_TIMEOUT = 4.hours
DIRECT_UPLOAD_EXPIRE_OFFSET = 15.minutes
TMP_UPLOAD_PATH = 'tmp/uploads'.freeze TMP_UPLOAD_PATH = 'tmp/uploads'.freeze
module Store module Store
...@@ -157,9 +155,9 @@ module ObjectStorage ...@@ -157,9 +155,9 @@ module ObjectStorage
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end end
def workhorse_authorize def workhorse_authorize(has_length:, maximum_size: nil)
{ {
RemoteObject: workhorse_remote_upload_options, RemoteObject: workhorse_remote_upload_options(has_length: has_length, maximum_size: maximum_size),
TempPath: workhorse_local_upload_path TempPath: workhorse_local_upload_path
}.compact }.compact
end end
...@@ -168,23 +166,16 @@ module ObjectStorage ...@@ -168,23 +166,16 @@ module ObjectStorage
File.join(self.root, TMP_UPLOAD_PATH) File.join(self.root, TMP_UPLOAD_PATH)
end end
def workhorse_remote_upload_options def workhorse_remote_upload_options(has_length:, maximum_size: nil)
return unless self.object_store_enabled? return unless self.object_store_enabled?
return unless self.direct_upload_enabled? return unless self.direct_upload_enabled?
id = [CarrierWave.generate_cache_id, SecureRandom.hex].join('-') id = [CarrierWave.generate_cache_id, SecureRandom.hex].join('-')
upload_path = File.join(TMP_UPLOAD_PATH, id) upload_path = File.join(TMP_UPLOAD_PATH, id)
connection = ::Fog::Storage.new(self.object_store_credentials) direct_upload = ObjectStorage::DirectUpload.new(self.object_store_credentials, remote_store_path, upload_path,
expire_at = Time.now + DIRECT_UPLOAD_TIMEOUT + DIRECT_UPLOAD_EXPIRE_OFFSET has_length: has_length, maximum_size: maximum_size)
options = { 'Content-Type' => 'application/octet-stream' }
{ direct_upload.to_hash.merge(ID: id)
ID: id,
Timeout: DIRECT_UPLOAD_TIMEOUT,
GetURL: connection.get_object_url(remote_store_path, upload_path, expire_at),
DeleteURL: connection.delete_object_url(remote_store_path, upload_path, expire_at),
StoreURL: connection.put_object_url(remote_store_path, upload_path, expire_at, options)
}
end end
end end
......
...@@ -42,6 +42,10 @@ ...@@ -42,6 +42,10 @@
.project-clone-holder .project-clone-holder
= render "shared/clone_panel" = render "shared/clone_panel"
- if show_xcode_link?(@project)
.project-action-button.project-xcode.inline
= render "projects/buttons/xcode_link"
- if current_user - if current_user
- if can?(current_user, :download_code, @project) - if can?(current_user, :download_code, @project)
= render 'projects/buttons/download', project: @project, ref: @ref = render 'projects/buttons/download', project: @project, ref: @ref
......
%a.btn.btn-default{ href: xcode_uri_to_repo(@project) }
= _("Open in Xcode")
---
title: Add Open in Xcode link for xcode repositories
merge_request:
author:
type: added
---
title: Check for nil AutoDevOps when saving project CI/CD settings.
merge_request: 19190
author:
type: fixed
---
title: Add background migrations for archiving legacy job traces
merge_request: 19194
author:
type: performance
---
title: Move PR IO operations out of a transaction
merge_request:
author:
type: performance
---
title: Add support for smarter system notes
merge_request: 17164
author:
type: changed
---
title: Optimise PagesWorker usage
merge_request:
author:
type: performance
---
title: Support direct_upload with S3 Multipart uploads
merge_request:
author:
type: added
---
title: Support rails5 in postgres indexes function and fix some migrations
merge_request: 19400
author: Jasper Maes
type: fixed
---
title: Remove unused running_or_pending_build_count
merge_request:
author:
type: performance
---
title: Remove N+1 query for author in issues API
merge_request:
author:
type: performance
---
title: Eliminate N+1 queries for CI job artifacts in /api/prjoects/:id/pipelines/:pipeline_id/jobs
merge_request:
author:
type: performance
artifacts_object_store = Gitlab.config.artifacts.object_store
if artifacts_object_store.enabled &&
artifacts_object_store.direct_upload &&
artifacts_object_store.connection&.provider.to_s != 'Google'
raise "Only 'Google' is supported as a object storage provider when 'direct_upload' of artifacts is used"
end
class DirectUploadsValidator
SUPPORTED_DIRECT_UPLOAD_PROVIDERS = %w(Google AWS).freeze
ValidationError = Class.new(StandardError)
def verify!(object_store)
return unless object_store.enabled
return unless object_store.direct_upload
return if SUPPORTED_DIRECT_UPLOAD_PROVIDERS.include?(object_store.connection&.provider.to_s)
raise ValidationError, "Only #{SUPPORTED_DIRECT_UPLOAD_PROVIDERS.join(',')} are supported as a object storage provider when 'direct_upload' is used"
end
end
DirectUploadsValidator.new.tap do |validator|
[Gitlab.config.artifacts, Gitlab.config.uploads, Gitlab.config.lfs].each do |uploader|
validator.verify!(uploader.object_store)
end
end
...@@ -107,8 +107,15 @@ module ActiveRecord ...@@ -107,8 +107,15 @@ module ActiveRecord
result.map do |row| result.map do |row|
index_name = row[0] index_name = row[0]
unique = row[1] == 't' unique = if Gitlab.rails5?
row[1]
else
row[1] == 't'
end
indkey = row[2].split(" ") indkey = row[2].split(" ")
if Gitlab.rails5?
indkey = indkey.map(&:to_i)
end
inddef = row[3] inddef = row[3]
oid = row[4] oid = row[4]
......
...@@ -37,7 +37,12 @@ class AddTrigramIndexesForSearching < ActiveRecord::Migration ...@@ -37,7 +37,12 @@ class AddTrigramIndexesForSearching < ActiveRecord::Migration
res = execute("SELECT true AS enabled FROM pg_available_extensions WHERE name = 'pg_trgm' AND installed_version IS NOT NULL;") res = execute("SELECT true AS enabled FROM pg_available_extensions WHERE name = 'pg_trgm' AND installed_version IS NOT NULL;")
row = res.first row = res.first
row && row['enabled'] == 't' ? true : false check = if Gitlab.rails5?
true
else
't'
end
row && row['enabled'] == check ? true : false
end end
def create_trigrams_extension def create_trigrams_extension
......
...@@ -2,12 +2,13 @@ class AddUniqueConstraintToCiVariables < ActiveRecord::Migration ...@@ -2,12 +2,13 @@ class AddUniqueConstraintToCiVariables < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers include Gitlab::Database::MigrationHelpers
DOWNTIME = false DOWNTIME = false
INDEX_NAME = 'index_ci_variables_on_project_id_and_key_and_environment_scope'
disable_ddl_transaction! disable_ddl_transaction!
def up def up
unless this_index_exists? unless this_index_exists?
add_concurrent_index(:ci_variables, columns, name: index_name, unique: true) add_concurrent_index(:ci_variables, columns, name: INDEX_NAME, unique: true)
end end
end end
...@@ -18,21 +19,17 @@ class AddUniqueConstraintToCiVariables < ActiveRecord::Migration ...@@ -18,21 +19,17 @@ class AddUniqueConstraintToCiVariables < ActiveRecord::Migration
add_concurrent_index(:ci_variables, :project_id) add_concurrent_index(:ci_variables, :project_id)
end end
remove_concurrent_index(:ci_variables, columns, name: index_name) remove_concurrent_index(:ci_variables, columns, name: INDEX_NAME)
end end
end end
private private
def this_index_exists? def this_index_exists?
index_exists?(:ci_variables, columns, name: index_name) index_exists?(:ci_variables, columns, name: INDEX_NAME)
end end
def columns def columns
@columns ||= [:project_id, :key, :environment_scope] @columns ||= [:project_id, :key, :environment_scope]
end end
def index_name
'index_ci_variables_on_project_id_and_key_and_environment_scope'
end
end end
...@@ -20,9 +20,7 @@ class TurnIssuesDueDateIndexToPartialIndex < ActiveRecord::Migration ...@@ -20,9 +20,7 @@ class TurnIssuesDueDateIndexToPartialIndex < ActiveRecord::Migration
name: NEW_INDEX_NAME name: NEW_INDEX_NAME
) )
# We set the column name to nil as otherwise Rails will ignore the custom remove_concurrent_index_by_name(:issues, OLD_INDEX_NAME)
# index name and remove the wrong index.
remove_concurrent_index(:issues, nil, name: OLD_INDEX_NAME)
end end
def down def down
...@@ -32,6 +30,6 @@ class TurnIssuesDueDateIndexToPartialIndex < ActiveRecord::Migration ...@@ -32,6 +30,6 @@ class TurnIssuesDueDateIndexToPartialIndex < ActiveRecord::Migration
name: OLD_INDEX_NAME name: OLD_INDEX_NAME
) )
remove_concurrent_index(:issues, nil, name: NEW_INDEX_NAME) remove_concurrent_index_by_name(:issues, NEW_INDEX_NAME)
end end
end end
...@@ -31,7 +31,7 @@ class AddForeignKeysToTodos < ActiveRecord::Migration ...@@ -31,7 +31,7 @@ class AddForeignKeysToTodos < ActiveRecord::Migration
end end
def down def down
remove_foreign_key :todos, :users remove_foreign_key :todos, column: :user_id
remove_foreign_key :todos, column: :author_id remove_foreign_key :todos, column: :author_id
remove_foreign_key :todos, :notes remove_foreign_key :todos, :notes
end end
......
class ScheduleToArchiveLegacyTraces < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 5000
BACKGROUND_MIGRATION_CLASS = 'ArchiveLegacyTraces'
disable_ddl_transaction!
class Build < ActiveRecord::Base
include EachBatch
self.table_name = 'ci_builds'
self.inheritance_column = :_type_disabled # Disable STI
scope :type_build, -> { where(type: 'Ci::Build') }
scope :finished, -> { where(status: [:success, :failed, :canceled]) }
scope :without_archived_trace, -> do
where('NOT EXISTS (SELECT 1 FROM ci_job_artifacts WHERE ci_builds.id = ci_job_artifacts.job_id AND ci_job_artifacts.file_type = 3)')
end
end
def up
queue_background_migration_jobs_by_range_at_intervals(
::ScheduleToArchiveLegacyTraces::Build.type_build.finished.without_archived_trace,
BACKGROUND_MIGRATION_CLASS,
5.minutes,
batch_size: BATCH_SIZE)
end
def down
# noop
end
end
...@@ -94,6 +94,7 @@ _The artifacts are stored by default in ...@@ -94,6 +94,7 @@ _The artifacts are stored by default in
> Available in [GitLab Premium](https://about.gitlab.com/products/) and > Available in [GitLab Premium](https://about.gitlab.com/products/) and
[GitLab.com Silver](https://about.gitlab.com/gitlab-com/). [GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/) > Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/)
> Since version 11.0, we support direct_upload to S3.
If you don't want to use the local disk where GitLab is installed to store the If you don't want to use the local disk where GitLab is installed to store the
artifacts, you can use an object storage like AWS S3 instead. artifacts, you can use an object storage like AWS S3 instead.
...@@ -108,7 +109,7 @@ For source installations the following settings are nested under `artifacts:` an ...@@ -108,7 +109,7 @@ For source installations the following settings are nested under `artifacts:` an
|---------|-------------|---------| |---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` | | `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artifacts will be stored| | | `remote_directory` | The bucket name where Artifacts will be stored| |
| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. Currently only `Google` provider is supported | `false` | | `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` | | `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` | | `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | | | `connection` | Various connection options described below | |
......
...@@ -22,6 +22,19 @@ As an example you might create 5 issues in between counts, which would cause the ...@@ -22,6 +22,19 @@ As an example you might create 5 issues in between counts, which would cause the
> **Note:** In some cases the query count might change slightly between runs for unrelated reasons. In this case you might need to test `exceed_query_limit(control_count + acceptable_change)`, but this should be avoided if possible. > **Note:** In some cases the query count might change slightly between runs for unrelated reasons. In this case you might need to test `exceed_query_limit(control_count + acceptable_change)`, but this should be avoided if possible.
## Cached queries
By default, QueryRecorder will ignore cached queries in the count. However, it may be better to count
all queries to avoid introducing an N+1 query that may be masked by the statement cache. To do this,
pass the `skip_cached` variable to `QueryRecorder` and use the `exceed_all_query_limit` matcher:
it "avoids N+1 database queries" do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { visit_some_page }.count
create_list(:issue, 5)
expect { visit_some_page }.not_to exceed_all_query_limit(control_count)
end
```
## Finding the source of the query ## Finding the source of the query
It may be useful to identify the source of the queries by looking at the call backtrace. It may be useful to identify the source of the queries by looking at the call backtrace.
......
...@@ -497,10 +497,10 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac ...@@ -497,10 +497,10 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). | | `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). |
| `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. | | `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. |
| `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. | | `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. |
| `CODEQUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. | | `CODE_QUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `code_quality` job. If the variable is present, the job will not be created. |
| `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. | | `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. |
| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. | | `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. |
| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. | | `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `container_scanning` job. If the variable is present, the job will not be created. |
| `REVIEW_DISABLED` | From GitLab 11.0, this variable can be used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. | | `REVIEW_DISABLED` | From GitLab 11.0, this variable can be used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. |
| `DAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dast` job. If the variable is present, the job will not be created. | | `DAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dast` job. If the variable is present, the job will not be created. |
| `PERFORMANCE_DISABLED` | From GitLab 11.0, this variable can be used to disable the `performance` job. If the variable is present, the job will not be created. | | `PERFORMANCE_DISABLED` | From GitLab 11.0, this variable can be used to disable the `performance` job. If the variable is present, the job will not be created. |
......
...@@ -16,7 +16,7 @@ module API ...@@ -16,7 +16,7 @@ module API
args[:scope] = args[:scope].underscore if args[:scope] args[:scope] = args[:scope].underscore if args[:scope]
issues = IssuesFinder.new(current_user, args).execute issues = IssuesFinder.new(current_user, args).execute
.preload(:assignees, :labels, :notes, :timelogs, :project) .preload(:assignees, :labels, :notes, :timelogs, :project, :author)
issues.reorder(args[:order_by] => args[:sort]) issues.reorder(args[:order_by] => args[:sort])
end end
......
...@@ -54,6 +54,7 @@ module API ...@@ -54,6 +54,7 @@ module API
pipeline = user_project.pipelines.find(params[:pipeline_id]) pipeline = user_project.pipelines.find(params[:pipeline_id])
builds = pipeline.builds builds = pipeline.builds
builds = filter_builds(builds, params[:scope]) builds = filter_builds(builds, params[:scope])
builds = builds.preload(:job_artifacts_archive)
present paginate(builds), with: Entities::Job present paginate(builds), with: Entities::Job
end end
......
...@@ -205,7 +205,7 @@ module API ...@@ -205,7 +205,7 @@ module API
status 200 status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
JobArtifactUploader.workhorse_authorize JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_artifacts_size)
end end
desc 'Upload artifacts for job' do desc 'Upload artifacts for job' do
......
...@@ -240,7 +240,7 @@ module Gitlab ...@@ -240,7 +240,7 @@ module Gitlab
return unless login == 'gitlab-ci-token' return unless login == 'gitlab-ci-token'
return unless password return unless password
build = ::Ci::Build.running.find_by_token(password) build = find_build_by_token(password)
return unless build return unless build
return unless build.project.builds_enabled? return unless build.project.builds_enabled?
...@@ -301,6 +301,12 @@ module Gitlab ...@@ -301,6 +301,12 @@ module Gitlab
REGISTRY_SCOPES REGISTRY_SCOPES
end end
private
def find_build_by_token(token)
::Ci::Build.running.find_by_token(token)
end
end end
end end
end end
# frozen_string_literal: true
# rubocop:disable Metrics/AbcSize
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class ArchiveLegacyTraces
def perform(start_id, stop_id)
# This background migration directly refers to ::Ci::Build model which is defined in application code.
# In general, migration code should be isolated as much as possible in order to be idempotent.
# However, `archive!` method is too complicated to be replicated by coping its subsequent code.
# So we chose a way to use ::Ci::Build directly and we don't change the `archive!` method until 11.1
::Ci::Build.finished.without_archived_trace
.where(id: start_id..stop_id).find_each do |build|
begin
build.trace.archive!
rescue => e
Rails.logger.error "Failed to archive live trace. id: #{build.id} message: #{e.message}"
end
end
end
end
end
end
...@@ -1397,6 +1397,11 @@ module Gitlab ...@@ -1397,6 +1397,11 @@ module Gitlab
def write_config(full_path:) def write_config(full_path:)
return unless full_path.present? return unless full_path.present?
# This guard avoids Gitaly log/error spam
unless exists?
raise NoRepository, 'repository does not exist'
end
gitaly_migrate(:write_config) do |is_enabled| gitaly_migrate(:write_config) do |is_enabled|
if is_enabled if is_enabled
gitaly_repository_client.write_config(full_path: full_path) gitaly_repository_client.write_config(full_path: full_path)
......
...@@ -22,15 +22,22 @@ module Gitlab ...@@ -22,15 +22,22 @@ module Gitlab
end end
def execute def execute
if (mr_id = create_merge_request) mr, already_exists = create_merge_request
issuable_finder.cache_database_id(mr_id)
if mr
insert_git_data(mr, already_exists)
issuable_finder.cache_database_id(mr.id)
end end
end end
# Creates the merge request and returns its ID. # Creates the merge request and returns its ID.
# #
# This method will return `nil` if the merge request could not be # This method will return `nil` if the merge request could not be
# created. # created, otherwise it will return an Array containing the following
# values:
#
# 1. A MergeRequest instance.
# 2. A boolean indicating if the MR already exists.
def create_merge_request def create_merge_request
author_id, author_found = user_finder.author_id_for(pull_request) author_id, author_found = user_finder.author_id_for(pull_request)
...@@ -69,21 +76,42 @@ module Gitlab ...@@ -69,21 +76,42 @@ module Gitlab
merge_request_id = GithubImport merge_request_id = GithubImport
.insert_and_return_id(attributes, project.merge_requests) .insert_and_return_id(attributes, project.merge_requests)
merge_request = project.merge_requests.find(merge_request_id) [project.merge_requests.find(merge_request_id), false]
# These fields are set so we can create the correct merge request
# diffs.
merge_request.source_branch_sha = pull_request.source_branch_sha
merge_request.target_branch_sha = pull_request.target_branch_sha
merge_request.keep_around_commit
merge_request.merge_request_diffs.create
merge_request.id
end end
rescue ActiveRecord::InvalidForeignKey rescue ActiveRecord::InvalidForeignKey
# It's possible the project has been deleted since scheduling this # It's possible the project has been deleted since scheduling this
# job. In this case we'll just skip creating the merge request. # job. In this case we'll just skip creating the merge request.
[]
rescue ActiveRecord::RecordNotUnique
# It's possible we previously created the MR, but failed when updating
# the Git data. In this case we'll just continue working on the
# existing row.
[project.merge_requests.find_by(iid: pull_request.iid), true]
end
def insert_git_data(merge_request, already_exists = false)
# These fields are set so we can create the correct merge request
# diffs.
merge_request.source_branch_sha = pull_request.source_branch_sha
merge_request.target_branch_sha = pull_request.target_branch_sha
merge_request.keep_around_commit
# MR diffs normally use an "after_save" hook to pull data from Git.
# All of this happens in the transaction started by calling
# create/save/etc. This in turn can lead to these transactions being
# held open for much longer than necessary. To work around this we
# first save the diff, then populate it.
diff =
if already_exists
merge_request.merge_request_diffs.take
else
merge_request.merge_request_diffs.build
end
diff.importing = true
diff.save
diff.save_git_content
end end
end end
end end
......
...@@ -87,18 +87,28 @@ module Gitlab ...@@ -87,18 +87,28 @@ module Gitlab
end end
def included(base = nil) def included(base = nil)
return super if base.nil? # Rails concern, ignoring it super
queue_verification(base)
end
alias_method :prepended, :included
def extended(mod)
super super
queue_verification(mod.singleton_class)
end
def queue_verification(base)
return unless ENV['STATIC_VERIFICATION']
if base.is_a?(Class) # We could check for Class in `override` if base.is_a?(Class) # We could check for Class in `override`
# This could be `nil` if `override` was never called # This could be `nil` if `override` was never called
Override.extensions[self]&.add_class(base) Override.extensions[self]&.add_class(base)
end end
end end
alias_method :prepended, :included
def self.extensions def self.extensions
@extensions ||= {} @extensions ||= {}
end end
......
module ObjectStorage
#
# The DirectUpload c;ass generates a set of presigned URLs
# that can be used to upload data to object storage from untrusted component: Workhorse, Runner?
#
# For Google it assumes that the platform supports variable Content-Length.
#
# For AWS it initiates Multipart Upload and presignes a set of part uploads.
# Class calculates the best part size to be able to upload up to asked maximum size.
# The number of generated parts will never go above 100,
# but we will always try to reduce amount of generated parts.
# The part size is rounded-up to 5MB.
#
class DirectUpload
include Gitlab::Utils::StrongMemoize
TIMEOUT = 4.hours
EXPIRE_OFFSET = 15.minutes
MAXIMUM_MULTIPART_PARTS = 100
MINIMUM_MULTIPART_SIZE = 5.megabytes
attr_reader :credentials, :bucket_name, :object_name
attr_reader :has_length, :maximum_size
def initialize(credentials, bucket_name, object_name, has_length:, maximum_size: nil)
unless has_length
raise ArgumentError, 'maximum_size has to be specified if length is unknown' unless maximum_size
end
@credentials = credentials
@bucket_name = bucket_name
@object_name = object_name
@has_length = has_length
@maximum_size = maximum_size
end
def to_hash
{
Timeout: TIMEOUT,
GetURL: get_url,
StoreURL: store_url,
DeleteURL: delete_url,
MultipartUpload: multipart_upload_hash
}.compact
end
def multipart_upload_hash
return unless requires_multipart_upload?
{
PartSize: rounded_multipart_part_size,
PartURLs: multipart_part_urls,
CompleteURL: multipart_complete_url,
AbortURL: multipart_abort_url
}
end
def provider
credentials[:provider].to_s
end
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html
def get_url
connection.get_object_url(bucket_name, object_name, expire_at)
end
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectDELETE.html
def delete_url
connection.delete_object_url(bucket_name, object_name, expire_at)
end
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
def store_url
connection.put_object_url(bucket_name, object_name, expire_at, upload_options)
end
def multipart_part_urls
Array.new(number_of_multipart_parts) do |part_index|
multipart_part_upload_url(part_index + 1)
end
end
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadUploadPart.html
def multipart_part_upload_url(part_number)
connection.signed_url({
method: 'PUT',
bucket_name: bucket_name,
object_name: object_name,
query: { uploadId: upload_id, partNumber: part_number },
headers: upload_options
}, expire_at)
end
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
def multipart_complete_url
connection.signed_url({
method: 'POST',
bucket_name: bucket_name,
object_name: object_name,
query: { uploadId: upload_id },
headers: { 'Content-Type' => 'application/xml' }
}, expire_at)
end
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadAbort.html
def multipart_abort_url
connection.signed_url({
method: 'DELETE',
bucket_name: bucket_name,
object_name: object_name,
query: { uploadId: upload_id }
}, expire_at)
end
private
def rounded_multipart_part_size
# round multipart_part_size up to minimum_mulitpart_size
(multipart_part_size + MINIMUM_MULTIPART_SIZE - 1) / MINIMUM_MULTIPART_SIZE * MINIMUM_MULTIPART_SIZE
end
def multipart_part_size
maximum_size / number_of_multipart_parts
end
def number_of_multipart_parts
[
# round maximum_size up to minimum_mulitpart_size
(maximum_size + MINIMUM_MULTIPART_SIZE - 1) / MINIMUM_MULTIPART_SIZE,
MAXIMUM_MULTIPART_PARTS
].min
end
def aws?
provider == 'AWS'
end
def requires_multipart_upload?
aws? && !has_length
end
def upload_id
return unless requires_multipart_upload?
strong_memoize(:upload_id) do
new_upload = connection.initiate_multipart_upload(bucket_name, object_name)
new_upload.body["UploadId"]
end
end
def expire_at
strong_memoize(:expire_at) do
Time.now + TIMEOUT + EXPIRE_OFFSET
end
end
def upload_options
{ 'Content-Type' => 'application/octet-stream' }
end
def connection
@connection ||= ::Fog::Storage.new(credentials)
end
end
end
...@@ -8,9 +8,7 @@ namespace :gitlab do ...@@ -8,9 +8,7 @@ namespace :gitlab do
logger = Logger.new(STDOUT) logger = Logger.new(STDOUT)
logger.info('Archiving legacy traces') logger.info('Archiving legacy traces')
Ci::Build.finished Ci::Build.finished.without_archived_trace
.where('NOT EXISTS (?)',
Ci::JobArtifact.select(1).trace.where('ci_builds.id = ci_job_artifacts.job_id'))
.order(id: :asc) .order(id: :asc)
.find_in_batches(batch_size: 1000) do |jobs| .find_in_batches(batch_size: 1000) do |jobs|
job_ids = jobs.map { |job| [job.id] } job_ids = jobs.map { |job| [job.id] }
......
...@@ -32,8 +32,6 @@ describe 'User browses a job', :js do ...@@ -32,8 +32,6 @@ describe 'User browses a job', :js do
page.within('.erased') do page.within('.erased') do
expect(page).to have_content('Job has been erased') expect(page).to have_content('Job has been erased')
end end
expect(build.project.running_or_pending_build_count).to eq(build.project.builds.running_or_pending.count(:all))
end end
context 'with a failed job' do context 'with a failed job' do
......
...@@ -435,4 +435,46 @@ describe ProjectsHelper do ...@@ -435,4 +435,46 @@ describe ProjectsHelper do
expect(helper.send(:git_user_name)).to eq('John \"A\" Doe53') expect(helper.send(:git_user_name)).to eq('John \"A\" Doe53')
end end
end end
describe 'show_xcode_link' do
let!(:project) { create(:project) }
let(:mac_ua) { 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36' }
let(:ios_ua) { 'Mozilla/5.0 (iPad; CPU OS 5_1_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3' }
context 'when the repository is xcode compatible' do
before do
allow(project.repository).to receive(:xcode_project?).and_return(true)
end
it 'returns false if the visitor is not using macos' do
allow(helper).to receive(:browser).and_return(Browser.new(ios_ua))
expect(helper.show_xcode_link?(project)).to eq(false)
end
it 'returns true if the visitor is using macos' do
allow(helper).to receive(:browser).and_return(Browser.new(mac_ua))
expect(helper.show_xcode_link?(project)).to eq(true)
end
end
context 'when the repository is not xcode compatible' do
before do
allow(project.repository).to receive(:xcode_project?).and_return(false)
end
it 'returns false if the visitor is not using macos' do
allow(helper).to receive(:browser).and_return(Browser.new(ios_ua))
expect(helper.show_xcode_link?(project)).to eq(false)
end
it 'returns false if the visitor is using macos' do
allow(helper).to receive(:browser).and_return(Browser.new(mac_ua))
expect(helper.show_xcode_link?(project)).to eq(false)
end
end
end
end end
require 'spec_helper'
describe 'Artifacts direct upload support' do
subject do
load Rails.root.join('config/initializers/artifacts_direct_upload_support.rb')
end
let(:connection) do
{ provider: provider }
end
before do
stub_artifacts_setting(
object_store: {
enabled: enabled,
direct_upload: direct_upload,
connection: connection
})
end
context 'when object storage is enabled' do
let(:enabled) { true }
context 'when direct upload is enabled' do
let(:direct_upload) { true }
context 'when provider is Google' do
let(:provider) { 'Google' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
context 'when connection is empty' do
let(:connection) { nil }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
context 'when other provider is used' do
let(:provider) { 'AWS' }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
end
context 'when direct upload is disabled' do
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
context 'when object storage is disabled' do
let(:enabled) { false }
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
require 'spec_helper'
describe 'Direct upload support' do
subject do
load Rails.root.join('config/initializers/direct_upload_support.rb')
end
where(:config_name) do
%w(lfs artifacts uploads)
end
with_them do
let(:connection) do
{ provider: provider }
end
let(:object_store) do
{
enabled: enabled,
direct_upload: direct_upload,
connection: connection
}
end
before do
allow(Gitlab.config).to receive_messages(to_settings(config_name => {
object_store: object_store
}))
end
context 'when object storage is enabled' do
let(:enabled) { true }
context 'when direct upload is enabled' do
let(:direct_upload) { true }
context 'when provider is AWS' do
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
context 'when provider is Google' do
let(:provider) { 'Google' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
context 'when connection is empty' do
let(:connection) { nil }
it 'raises an error' do
expect { subject }.to raise_error /are supported as a object storage provider when 'direct_upload' is used/
end
end
context 'when other provider is used' do
let(:provider) { 'Rackspace' }
it 'raises an error' do
expect { subject }.to raise_error /are supported as a object storage provider when 'direct_upload' is used/
end
end
end
context 'when direct upload is disabled' do
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
context 'when object storage is disabled' do
let(:enabled) { false }
let(:direct_upload) { false }
let(:provider) { 'Rackspace' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
end
...@@ -340,6 +340,79 @@ export const loggedOutnoteableData = { ...@@ -340,6 +340,79 @@ export const loggedOutnoteableData = {
'/gitlab-org/gitlab-ce/preview_markdown?quick_actions_target_id=98&quick_actions_target_type=Issue', '/gitlab-org/gitlab-ce/preview_markdown?quick_actions_target_id=98&quick_actions_target_type=Issue',
}; };
export const collapseNotesMock = [
{
expanded: true,
id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
individual_note: true,
notes: [
{
id: 1390,
attachment: null,
author: {
id: 1,
name: 'Root',
username: 'root',
state: 'active',
avatar_url: 'test',
path: '/root',
},
created_at: '2018-02-26T18:07:41.071Z',
updated_at: '2018-02-26T18:07:41.071Z',
system: true,
system_note_icon_name: 'pencil',
noteable_id: 98,
noteable_type: 'Issue',
type: null,
human_access: 'Owner',
note: 'changed the description',
note_html: '<p dir="auto">changed the description</p>',
current_user: { can_edit: false },
discussion_id: 'b97fb7bda470a65b3e009377a9032edec0a4dd05',
emoji_awardable: false,
path: '/h5bp/html5-boilerplate/notes/1057',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
},
],
},
{
expanded: true,
id: 'ffde43f25984ad7f2b4275135e0e2846875336c0',
individual_note: true,
notes: [
{
id: 1391,
attachment: null,
author: {
id: 1,
name: 'Root',
username: 'root',
state: 'active',
avatar_url: 'test',
path: '/root',
},
created_at: '2018-02-26T18:13:24.071Z',
updated_at: '2018-02-26T18:13:24.071Z',
system: true,
system_note_icon_name: 'pencil',
noteable_id: 99,
noteable_type: 'Issue',
type: null,
human_access: 'Owner',
note: 'changed the description',
note_html: '<p dir="auto">changed the description</p>',
current_user: { can_edit: false },
discussion_id: '3eb958b4d81dec207ec3537a2f3bd8b9f271bb34',
emoji_awardable: false,
path: '/h5bp/html5-boilerplate/notes/1057',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
},
],
},
];
export const INDIVIDUAL_NOTE_RESPONSE_MAP = { export const INDIVIDUAL_NOTE_RESPONSE_MAP = {
GET: { GET: {
'/gitlab-org/gitlab-ce/issues/26/discussions.json': [ '/gitlab-org/gitlab-ce/issues/26/discussions.json': [
...@@ -575,3 +648,508 @@ export function discussionNoteInterceptor(request, next) { ...@@ -575,3 +648,508 @@ export function discussionNoteInterceptor(request, next) {
}), }),
); );
} }
export const notesWithDescriptionChanges = [
{
id: '39b271c2033e9ed43d8edb393702f65f7a830459',
reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
expanded: true,
notes: [
{
id: 901,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:05:36.117Z',
updated_at: '2018-05-29T12:05:36.117Z',
system: false,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
note_html:
'<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
current_user: { can_edit: true, can_award_emoji: true },
resolved: false,
resolved_by: null,
discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
emoji_awardable: true,
award_emoji: [],
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
human_access: 'Owner',
toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
path: '/gitlab-org/gitlab-shell/notes/901',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
expanded: true,
notes: [
{
id: 902,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:05:58.694Z',
updated_at: '2018-05-29T12:05:58.694Z',
system: false,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note:
'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
note_html:
'<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
current_user: { can_edit: true, can_award_emoji: true },
resolved: false,
resolved_by: null,
discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
emoji_awardable: true,
award_emoji: [],
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
human_access: 'Owner',
toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
path: '/gitlab-org/gitlab-shell/notes/902',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: '7f1feda384083eb31763366e6392399fde6f3f31',
reply_id: '7f1feda384083eb31763366e6392399fde6f3f31',
expanded: true,
notes: [
{
id: 903,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:06:05.772Z',
updated_at: '2018-05-29T12:06:05.772Z',
system: true,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note: 'changed the description',
note_html: '<p dir="auto">changed the description</p>',
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
system_note_icon_name: 'pencil-square',
discussion_id: '7f1feda384083eb31763366e6392399fde6f3f31',
emoji_awardable: false,
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_903&user_id=1',
human_access: 'Owner',
path: '/gitlab-org/gitlab-shell/notes/903',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
expanded: true,
notes: [
{
id: 904,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:06:16.112Z',
updated_at: '2018-05-29T12:06:16.112Z',
system: false,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note: 'Ullamcorper eget nulla facilisi etiam',
note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
current_user: { can_edit: true, can_award_emoji: true },
resolved: false,
resolved_by: null,
discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
emoji_awardable: true,
award_emoji: [],
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
human_access: 'Owner',
toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
path: '/gitlab-org/gitlab-shell/notes/904',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
expanded: true,
notes: [
{
id: 905,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:06:28.851Z',
updated_at: '2018-05-29T12:06:28.851Z',
system: true,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note: 'changed the description',
note_html: '<p dir="auto">changed the description</p>',
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
system_note_icon_name: 'pencil-square',
discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
emoji_awardable: false,
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
human_access: 'Owner',
path: '/gitlab-org/gitlab-shell/notes/905',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: '70411b08cdfc01f24187a06d77daa33464cb2620',
reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
expanded: true,
notes: [
{
id: 906,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:20:02.925Z',
updated_at: '2018-05-29T12:20:02.925Z',
system: true,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note: 'changed the description',
note_html: '<p dir="auto">changed the description</p>',
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
system_note_icon_name: 'pencil-square',
discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
emoji_awardable: false,
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
human_access: 'Owner',
path: '/gitlab-org/gitlab-shell/notes/906',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
];
export const collapsedSystemNotes = [
{
id: '39b271c2033e9ed43d8edb393702f65f7a830459',
reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
expanded: true,
notes: [
{
id: 901,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:05:36.117Z',
updated_at: '2018-05-29T12:05:36.117Z',
system: false,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
note_html:
'<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
current_user: { can_edit: true, can_award_emoji: true },
resolved: false,
resolved_by: null,
discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
emoji_awardable: true,
award_emoji: [],
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
human_access: 'Owner',
toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
path: '/gitlab-org/gitlab-shell/notes/901',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
expanded: true,
notes: [
{
id: 902,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:05:58.694Z',
updated_at: '2018-05-29T12:05:58.694Z',
system: false,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note:
'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
note_html:
'<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
current_user: { can_edit: true, can_award_emoji: true },
resolved: false,
resolved_by: null,
discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
emoji_awardable: true,
award_emoji: [],
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
human_access: 'Owner',
toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
path: '/gitlab-org/gitlab-shell/notes/902',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
expanded: true,
notes: [
{
id: 904,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:06:16.112Z',
updated_at: '2018-05-29T12:06:16.112Z',
system: false,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note: 'Ullamcorper eget nulla facilisi etiam',
note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
current_user: { can_edit: true, can_award_emoji: true },
resolved: false,
resolved_by: null,
discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
emoji_awardable: true,
award_emoji: [],
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
human_access: 'Owner',
toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
path: '/gitlab-org/gitlab-shell/notes/904',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
expanded: true,
notes: [
{
id: 905,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:06:28.851Z',
updated_at: '2018-05-29T12:06:28.851Z',
system: true,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note: 'changed the description',
note_html: '\n <p dir="auto">changed the description 2 times within 1 minute </p>',
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
system_note_icon_name: 'pencil-square',
discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
emoji_awardable: false,
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
human_access: 'Owner',
path: '/gitlab-org/gitlab-shell/notes/905',
times_updated: 2,
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
{
id: '70411b08cdfc01f24187a06d77daa33464cb2620',
reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
expanded: true,
notes: [
{
id: 906,
type: null,
attachment: null,
author: {
id: 1,
name: 'Administrator',
username: 'root',
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
path: '/root',
},
created_at: '2018-05-29T12:20:02.925Z',
updated_at: '2018-05-29T12:20:02.925Z',
system: true,
noteable_id: 182,
noteable_type: 'Issue',
resolvable: false,
noteable_iid: 12,
note: 'changed the description',
note_html: '<p dir="auto">changed the description</p>',
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
system_note_icon_name: 'pencil-square',
discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
emoji_awardable: false,
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
human_access: 'Owner',
path: '/gitlab-org/gitlab-shell/notes/906',
},
],
individual_note: true,
resolvable: false,
resolved: false,
diff_discussion: false,
},
];
import {
isDescriptionSystemNote,
changeDescriptionNote,
getTimeDifferenceMinutes,
collapseSystemNotes,
} from '~/notes/stores/collapse_utils';
import {
notesWithDescriptionChanges,
collapsedSystemNotes,
} from '../mock_data';
describe('Collapse utils', () => {
const mockSystemNote = {
note: 'changed the description',
note_html: '<p dir="auto">changed the description</p>',
system: true,
created_at: '2018-05-14T21:28:00.000Z',
};
it('checks if a system note is of a description type', () => {
expect(isDescriptionSystemNote(mockSystemNote)).toEqual(true);
});
it('returns false when a system note is not a description type', () => {
expect(isDescriptionSystemNote(Object.assign({}, mockSystemNote, { note: 'foo' }))).toEqual(false);
});
it('changes the description to contain the number of changed times', () => {
const changedNote = changeDescriptionNote(mockSystemNote, 3, 5);
expect(changedNote.times_updated).toEqual(3);
expect(changedNote.note_html.trim()).toContain('<p dir="auto">changed the description 3 times within 5 minutes </p>');
});
it('gets the time difference between two notes', () => {
const anotherSystemNote = {
created_at: '2018-05-14T21:33:00.000Z',
};
expect(getTimeDifferenceMinutes(mockSystemNote, anotherSystemNote)).toEqual(5);
});
it('collapses all description system notes made within 10 minutes or less from each other', () => {
expect(collapseSystemNotes(notesWithDescriptionChanges)).toEqual(collapsedSystemNotes);
});
});
import * as getters from '~/notes/stores/getters'; import * as getters from '~/notes/stores/getters';
import { notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data'; import { notesDataMock, userDataMock, noteableDataMock, individualNote, collapseNotesMock } from '../mock_data';
describe('Getters Notes Store', () => { describe('Getters Notes Store', () => {
let state; let state;
beforeEach(() => { beforeEach(() => {
state = { state = {
notes: [individualNote], notes: [individualNote],
...@@ -20,6 +21,22 @@ describe('Getters Notes Store', () => { ...@@ -20,6 +21,22 @@ describe('Getters Notes Store', () => {
}); });
}); });
describe('Collapsed notes', () => {
const stateCollapsedNotes = {
notes: collapseNotesMock,
targetNoteHash: 'hash',
lastFetchedAt: 'timestamp',
notesData: notesDataMock,
userData: userDataMock,
noteableData: noteableDataMock,
};
it('should return a single system note when a description was updated multiple times', () => {
expect(getters.notes(stateCollapsedNotes).length).toEqual(1);
});
});
describe('targetNoteHash', () => { describe('targetNoteHash', () => {
it('should return `targetNoteHash`', () => { it('should return `targetNoteHash`', () => {
expect(getters.targetNoteHash(state)).toEqual('hash'); expect(getters.targetNoteHash(state)).toEqual('hash');
......
require 'spec_helper'
describe Gitlab::BackgroundMigration::ArchiveLegacyTraces, :migration, schema: 20180529152628 do
include TraceHelpers
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:builds) { table(:ci_builds) }
let(:job_artifacts) { table(:ci_job_artifacts) }
before do
namespaces.create!(id: 123, name: 'gitlab1', path: 'gitlab1')
projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
@build = builds.create!(id: 1, project_id: 123, status: 'success', type: 'Ci::Build')
end
context 'when trace file exsits at the right place' do
before do
create_legacy_trace(@build, 'trace in file')
end
it 'correctly archive legacy traces' do
expect(job_artifacts.count).to eq(0)
expect(File.exist?(legacy_trace_path(@build))).to be_truthy
described_class.new.perform(1, 1)
expect(job_artifacts.count).to eq(1)
expect(File.exist?(legacy_trace_path(@build))).to be_falsy
expect(File.read(archived_trace_path(job_artifacts.first))).to eq('trace in file')
end
end
context 'when trace file does not exsits at the right place' do
it 'does not raise errors nor create job artifact' do
expect { described_class.new.perform(1, 1) }.not_to raise_error
expect(job_artifacts.count).to eq(0)
end
end
context 'when trace data exsits in database' do
before do
create_legacy_trace_in_db(@build, 'trace in db')
end
it 'correctly archive legacy traces' do
expect(job_artifacts.count).to eq(0)
expect(@build.read_attribute(:trace)).not_to be_empty
described_class.new.perform(1, 1)
@build.reload
expect(job_artifacts.count).to eq(1)
expect(@build.read_attribute(:trace)).to be_nil
expect(File.read(archived_trace_path(job_artifacts.first))).to eq('trace in db')
end
end
end
...@@ -2002,6 +2002,18 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -2002,6 +2002,18 @@ describe Gitlab::Git::Repository, seed_helper: true do
expect(config).to include("fullpath = #{repository_path}") expect(config).to include("fullpath = #{repository_path}")
end end
end end
context 'repository does not exist' do
it 'raises NoRepository and does not call Gitaly WriteConfig' do
repository = Gitlab::Git::Repository.new('default', 'does/not/exist.git', '')
expect(repository.gitaly_repository_client).not_to receive(:write_config)
expect do
repository.write_config(full_path: 'foo/bar.git')
end.to raise_error(Gitlab::Git::Repository::NoRepository)
end
end
end end
context "when gitaly_write_config is enabled" do context "when gitaly_write_config is enabled" do
......
...@@ -180,12 +180,12 @@ describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cach ...@@ -180,12 +180,12 @@ describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cach
allow(importer.user_finder) allow(importer.user_finder)
.to receive(:user_id_for) .to receive(:user_id_for)
.ordered.with(issue.assignees[0]) .with(issue.assignees[0])
.and_return(4) .and_return(4)
allow(importer.user_finder) allow(importer.user_finder)
.to receive(:user_id_for) .to receive(:user_id_for)
.ordered.with(issue.assignees[1]) .with(issue.assignees[1])
.and_return(5) .and_return(5)
expect(Gitlab::Database) expect(Gitlab::Database)
......
...@@ -40,13 +40,19 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi ...@@ -40,13 +40,19 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
describe '#execute' do describe '#execute' do
it 'imports the pull request' do it 'imports the pull request' do
mr = double(:merge_request, id: 10)
expect(importer) expect(importer)
.to receive(:create_merge_request) .to receive(:create_merge_request)
.and_return(10) .and_return([mr, false])
expect(importer)
.to receive(:insert_git_data)
.with(mr, false)
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder) expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:cache_database_id) .to receive(:cache_database_id)
.with(10) .with(mr.id)
importer.execute importer.execute
end end
...@@ -99,18 +105,11 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi ...@@ -99,18 +105,11 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
importer.create_merge_request importer.create_merge_request
end end
it 'returns the ID of the created merge request' do it 'returns the created merge request' do
id = importer.create_merge_request mr, exists = importer.create_merge_request
expect(id).to be_a_kind_of(Numeric)
end
it 'creates the merge request diffs' do
importer.create_merge_request
mr = project.merge_requests.take
expect(mr.merge_request_diffs.exists?).to eq(true) expect(mr).to be_instance_of(MergeRequest)
expect(exists).to eq(false)
end end
end end
...@@ -217,5 +216,65 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi ...@@ -217,5 +216,65 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
expect { importer.create_merge_request }.not_to raise_error expect { importer.create_merge_request }.not_to raise_error
end end
end end
context 'when the merge request already exists' do
before do
allow(importer.user_finder)
.to receive(:author_id_for)
.with(pull_request)
.and_return([user.id, true])
allow(importer.user_finder)
.to receive(:assignee_id_for)
.with(pull_request)
.and_return(user.id)
end
it 'returns the existing merge request' do
mr1, exists1 = importer.create_merge_request
mr2, exists2 = importer.create_merge_request
expect(mr2).to eq(mr1)
expect(exists1).to eq(false)
expect(exists2).to eq(true)
end
end
end
describe '#insert_git_data' do
before do
allow(importer.milestone_finder)
.to receive(:id_for)
.with(pull_request)
.and_return(milestone.id)
allow(importer.user_finder)
.to receive(:author_id_for)
.with(pull_request)
.and_return([user.id, true])
allow(importer.user_finder)
.to receive(:assignee_id_for)
.with(pull_request)
.and_return(user.id)
end
it 'creates the merge request diffs' do
mr, exists = importer.create_merge_request
importer.insert_git_data(mr, exists)
expect(mr.merge_request_diffs.exists?).to eq(true)
end
it 'creates the merge request diff commits' do
mr, exists = importer.create_merge_request
importer.insert_git_data(mr, exists)
diff = mr.merge_request_diffs.take
expect(diff.merge_request_diff_commits.exists?).to eq(true)
end
end end
end end
require 'spec_helper' require 'fast_spec_helper'
describe Gitlab::Utils::Override do describe Gitlab::Utils::Override do
let(:base) { Struct.new(:good) } let(:base) do
Struct.new(:good) do
def self.good
0
end
end
end
let(:derived) { Class.new(base).tap { |m| m.extend described_class } } let(:derived) { Class.new(base).tap { |m| m.extend described_class } }
let(:extension) { Module.new.tap { |m| m.extend described_class } } let(:extension) { Module.new.tap { |m| m.extend described_class } }
...@@ -9,6 +15,14 @@ describe Gitlab::Utils::Override do ...@@ -9,6 +15,14 @@ describe Gitlab::Utils::Override do
let(:prepending_class) { base.tap { |m| m.prepend extension } } let(:prepending_class) { base.tap { |m| m.prepend extension } }
let(:including_class) { base.tap { |m| m.include extension } } let(:including_class) { base.tap { |m| m.include extension } }
let(:prepending_class_methods) do
base.tap { |m| m.singleton_class.prepend extension }
end
let(:extending_class_methods) do
base.tap { |m| m.extend extension }
end
let(:klass) { subject } let(:klass) { subject }
def good(mod) def good(mod)
...@@ -36,7 +50,7 @@ describe Gitlab::Utils::Override do ...@@ -36,7 +50,7 @@ describe Gitlab::Utils::Override do
shared_examples 'checking as intended' do shared_examples 'checking as intended' do
it 'checks ok for overriding method' do it 'checks ok for overriding method' do
good(subject) good(subject)
result = klass.new(0).good result = instance.good
expect(result).to eq(1) expect(result).to eq(1)
described_class.verify! described_class.verify!
...@@ -45,7 +59,25 @@ describe Gitlab::Utils::Override do ...@@ -45,7 +59,25 @@ describe Gitlab::Utils::Override do
it 'raises NotImplementedError when it is not overriding anything' do it 'raises NotImplementedError when it is not overriding anything' do
expect do expect do
bad(subject) bad(subject)
klass.new(0).bad instance.bad
described_class.verify!
end.to raise_error(NotImplementedError)
end
end
shared_examples 'checking as intended, nothing was overridden' do
it 'raises NotImplementedError because it is not overriding it' do
expect do
good(subject)
instance.good
described_class.verify!
end.to raise_error(NotImplementedError)
end
it 'raises NotImplementedError when it is not overriding anything' do
expect do
bad(subject)
instance.bad
described_class.verify! described_class.verify!
end.to raise_error(NotImplementedError) end.to raise_error(NotImplementedError)
end end
...@@ -54,7 +86,7 @@ describe Gitlab::Utils::Override do ...@@ -54,7 +86,7 @@ describe Gitlab::Utils::Override do
shared_examples 'nothing happened' do shared_examples 'nothing happened' do
it 'does not complain when it is overriding something' do it 'does not complain when it is overriding something' do
good(subject) good(subject)
result = klass.new(0).good result = instance.good
expect(result).to eq(1) expect(result).to eq(1)
described_class.verify! described_class.verify!
...@@ -62,7 +94,7 @@ describe Gitlab::Utils::Override do ...@@ -62,7 +94,7 @@ describe Gitlab::Utils::Override do
it 'does not complain when it is not overriding anything' do it 'does not complain when it is not overriding anything' do
bad(subject) bad(subject)
result = klass.new(0).bad result = instance.bad
expect(result).to eq(true) expect(result).to eq(true)
described_class.verify! described_class.verify!
...@@ -75,83 +107,97 @@ describe Gitlab::Utils::Override do ...@@ -75,83 +107,97 @@ describe Gitlab::Utils::Override do
end end
describe '#override' do describe '#override' do
context 'when STATIC_VERIFICATION is set' do context 'when instance is klass.new(0)' do
before do let(:instance) { klass.new(0) }
stub_env('STATIC_VERIFICATION', 'true')
end
context 'when subject is a class' do context 'when STATIC_VERIFICATION is set' do
subject { derived } before do
stub_env('STATIC_VERIFICATION', 'true')
end
it_behaves_like 'checking as intended' context 'when subject is a class' do
end subject { derived }
it_behaves_like 'checking as intended'
end
context 'when subject is a module, and class is prepending it' do
subject { extension }
let(:klass) { prepending_class }
it_behaves_like 'checking as intended'
end
context 'when subject is a module, and class is prepending it' do context 'when subject is a module, and class is including it' do
subject { extension } subject { extension }
let(:klass) { prepending_class } let(:klass) { including_class }
it_behaves_like 'checking as intended' it_behaves_like 'checking as intended, nothing was overridden'
end
end end
context 'when subject is a module, and class is including it' do context 'when STATIC_VERIFICATION is not set' do
subject { extension } before do
let(:klass) { including_class } stub_env('STATIC_VERIFICATION', nil)
end
it 'raises NotImplementedError because it is not overriding it' do context 'when subject is a class' do
expect do subject { derived }
good(subject)
klass.new(0).good it_behaves_like 'nothing happened'
described_class.verify!
end.to raise_error(NotImplementedError)
end end
it 'raises NotImplementedError when it is not overriding anything' do context 'when subject is a module, and class is prepending it' do
expect do subject { extension }
bad(subject) let(:klass) { prepending_class }
klass.new(0).bad
described_class.verify! it_behaves_like 'nothing happened'
end.to raise_error(NotImplementedError)
end end
end
end
end
context 'when STATIC_VERIFICATION is not set' do context 'when subject is a module, and class is including it' do
before do subject { extension }
stub_env('STATIC_VERIFICATION', nil) let(:klass) { including_class }
end
context 'when subject is a class' do it 'does not complain when it is overriding something' do
subject { derived } good(subject)
result = instance.good
it_behaves_like 'nothing happened' expect(result).to eq(0)
end described_class.verify!
end
context 'when subject is a module, and class is prepending it' do it 'does not complain when it is not overriding anything' do
subject { extension } bad(subject)
let(:klass) { prepending_class } result = instance.bad
it_behaves_like 'nothing happened' expect(result).to eq(true)
described_class.verify!
end
end
end
end end
context 'when subject is a module, and class is including it' do context 'when instance is klass' do
subject { extension } let(:instance) { klass }
let(:klass) { including_class }
it 'does not complain when it is overriding something' do context 'when STATIC_VERIFICATION is set' do
good(subject) before do
result = klass.new(0).good stub_env('STATIC_VERIFICATION', 'true')
end
expect(result).to eq(0) context 'when subject is a module, and class is prepending it' do
described_class.verify! subject { extension }
end let(:klass) { prepending_class_methods }
it 'does not complain when it is not overriding anything' do it_behaves_like 'checking as intended'
bad(subject) end
result = klass.new(0).bad
expect(result).to eq(true) context 'when subject is a module, and class is extending it' do
described_class.verify! subject { extension }
let(:klass) { extending_class_methods }
it_behaves_like 'checking as intended, nothing was overridden'
end
end end
end end
end end
......
require 'spec_helper'
describe ObjectStorage::DirectUpload do
let(:credentials) do
{
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
}
end
let(:storage_url) { 'https://uploads.s3.amazonaws.com/' }
let(:bucket_name) { 'uploads' }
let(:object_name) { 'tmp/uploads/my-file' }
let(:maximum_size) { 1.gigabyte }
let(:direct_upload) { described_class.new(credentials, bucket_name, object_name, has_length: has_length, maximum_size: maximum_size) }
describe '#has_length' do
context 'is known' do
let(:has_length) { true }
let(:maximum_size) { nil }
it "maximum size is not required" do
expect { direct_upload }.not_to raise_error
end
end
context 'is unknown' do
let(:has_length) { false }
context 'and maximum size is specified' do
let(:maximum_size) { 1.gigabyte }
it "does not raise an error" do
expect { direct_upload }.not_to raise_error
end
end
context 'and maximum size is not specified' do
let(:maximum_size) { nil }
it "raises an error" do
expect { direct_upload }.to raise_error /maximum_size has to be specified if length is unknown/
end
end
end
end
describe '#to_hash' do
subject { direct_upload.to_hash }
shared_examples 'a valid upload' do
it "returns valid structure" do
expect(subject).to have_key(:Timeout)
expect(subject[:GetURL]).to start_with(storage_url)
expect(subject[:StoreURL]).to start_with(storage_url)
expect(subject[:DeleteURL]).to start_with(storage_url)
end
end
shared_examples 'a valid upload with multipart data' do
before do
stub_object_storage_multipart_init(storage_url, "myUpload")
end
it_behaves_like 'a valid upload'
it "returns valid structure" do
expect(subject).to have_key(:MultipartUpload)
expect(subject[:MultipartUpload]).to have_key(:PartSize)
expect(subject[:MultipartUpload][:PartURLs]).to all(start_with(storage_url))
expect(subject[:MultipartUpload][:PartURLs]).to all(include('uploadId=myUpload'))
expect(subject[:MultipartUpload][:CompleteURL]).to start_with(storage_url)
expect(subject[:MultipartUpload][:CompleteURL]).to include('uploadId=myUpload')
expect(subject[:MultipartUpload][:AbortURL]).to start_with(storage_url)
expect(subject[:MultipartUpload][:AbortURL]).to include('uploadId=myUpload')
end
end
shared_examples 'a valid upload without multipart data' do
it_behaves_like 'a valid upload'
it "returns valid structure" do
expect(subject).not_to have_key(:MultipartUpload)
end
end
context 'when AWS is used' do
context 'when length is known' do
let(:has_length) { true }
it_behaves_like 'a valid upload without multipart data'
end
context 'when length is unknown' do
let(:has_length) { false }
it_behaves_like 'a valid upload with multipart data' do
context 'when maximum upload size is 10MB' do
let(:maximum_size) { 10.megabyte }
it 'returns only 2 parts' do
expect(subject[:MultipartUpload][:PartURLs].length).to eq(2)
end
it 'part size is mimimum, 5MB' do
expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
end
end
context 'when maximum upload size is 12MB' do
let(:maximum_size) { 12.megabyte }
it 'returns only 3 parts' do
expect(subject[:MultipartUpload][:PartURLs].length).to eq(3)
end
it 'part size is rounded-up to 5MB' do
expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
end
end
context 'when maximum upload size is 49GB' do
let(:maximum_size) { 49.gigabyte }
it 'returns maximum, 100 parts' do
expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
end
it 'part size is rounded-up to 5MB' do
expect(subject[:MultipartUpload][:PartSize]).to eq(505.megabyte)
end
end
end
end
end
context 'when Google is used' do
let(:credentials) do
{
provider: 'Google',
google_storage_access_key_id: 'GOOGLE_ACCESS_KEY_ID',
google_storage_secret_access_key: 'GOOGLE_SECRET_ACCESS_KEY'
}
end
let(:storage_url) { 'https://storage.googleapis.com/uploads/' }
context 'when length is known' do
let(:has_length) { true }
it_behaves_like 'a valid upload without multipart data'
end
context 'when length is unknown' do
let(:has_length) { false }
it_behaves_like 'a valid upload without multipart data'
end
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180529152628_schedule_to_archive_legacy_traces')
describe ScheduleToArchiveLegacyTraces, :migration do
include TraceHelpers
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:builds) { table(:ci_builds) }
let(:job_artifacts) { table(:ci_job_artifacts) }
before do
namespaces.create!(id: 123, name: 'gitlab1', path: 'gitlab1')
projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
@build_success = builds.create!(id: 1, project_id: 123, status: 'success', type: 'Ci::Build')
@build_failed = builds.create!(id: 2, project_id: 123, status: 'failed', type: 'Ci::Build')
@builds_canceled = builds.create!(id: 3, project_id: 123, status: 'canceled', type: 'Ci::Build')
@build_running = builds.create!(id: 4, project_id: 123, status: 'running', type: 'Ci::Build')
create_legacy_trace(@build_success, 'This job is done')
create_legacy_trace(@build_failed, 'This job is done')
create_legacy_trace(@builds_canceled, 'This job is done')
create_legacy_trace(@build_running, 'This job is not done yet')
end
it 'correctly archive legacy traces' do
expect(job_artifacts.count).to eq(0)
expect(File.exist?(legacy_trace_path(@build_success))).to be_truthy
expect(File.exist?(legacy_trace_path(@build_failed))).to be_truthy
expect(File.exist?(legacy_trace_path(@builds_canceled))).to be_truthy
expect(File.exist?(legacy_trace_path(@build_running))).to be_truthy
migrate!
expect(job_artifacts.count).to eq(3)
expect(File.exist?(legacy_trace_path(@build_success))).to be_falsy
expect(File.exist?(legacy_trace_path(@build_failed))).to be_falsy
expect(File.exist?(legacy_trace_path(@builds_canceled))).to be_falsy
expect(File.exist?(legacy_trace_path(@build_running))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @build_success.id).first))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @build_failed.id).first))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @builds_canceled.id).first))).to be_truthy
expect(job_artifacts.where(job_id: @build_running.id)).not_to be_exist
end
end
...@@ -2506,4 +2506,76 @@ describe Ci::Build do ...@@ -2506,4 +2506,76 @@ describe Ci::Build do
end end
end end
end end
describe 'pages deployments' do
set(:build) { create(:ci_build, project: project, user: user) }
context 'when job is "pages"' do
before do
build.name = 'pages'
end
context 'when pages are enabled' do
before do
allow(Gitlab.config.pages).to receive_messages(enabled: true)
end
it 'is marked as pages generator' do
expect(build).to be_pages_generator
end
context 'job succeeds' do
it "calls pages worker" do
expect(PagesWorker).to receive(:perform_async).with(:deploy, build.id)
build.success!
end
end
context 'job fails' do
it "does not call pages worker" do
expect(PagesWorker).not_to receive(:perform_async)
build.drop!
end
end
end
context 'when pages are disabled' do
before do
allow(Gitlab.config.pages).to receive_messages(enabled: false)
end
it 'is not marked as pages generator' do
expect(build).not_to be_pages_generator
end
context 'job succeeds' do
it "does not call pages worker" do
expect(PagesWorker).not_to receive(:perform_async)
build.success!
end
end
end
end
context 'when job is not "pages"' do
before do
build.name = 'other-job'
end
it 'is not marked as pages generator' do
expect(build).not_to be_pages_generator
end
context 'job succeeds' do
it "does not call pages worker" do
expect(PagesWorker).not_to receive(:perform_async)
build.success
end
end
end
end
end end
...@@ -630,15 +630,17 @@ describe API::Issues do ...@@ -630,15 +630,17 @@ describe API::Issues do
end end
it 'avoids N+1 queries' do it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new do get api("/projects/#{project.id}/issues", user)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/issues", user) get api("/projects/#{project.id}/issues", user)
end.count end.count
create(:issue, author: user, project: project) create_list(:issue, 3, project: project)
expect do expect do
get api("/projects/#{project.id}/issues", user) get api("/projects/#{project.id}/issues", user)
end.not_to exceed_query_limit(control_count) end.not_to exceed_all_query_limit(control_count)
end end
it 'returns 404 when project does not exist' do it 'returns 404 when project does not exist' do
......
...@@ -177,6 +177,18 @@ describe API::Jobs do ...@@ -177,6 +177,18 @@ describe API::Jobs do
json_response.each { |job| expect(job['pipeline']['id']).to eq(pipeline.id) } json_response.each { |job| expect(job['pipeline']['id']).to eq(pipeline.id) }
end end
end end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end.count
3.times { create(:ci_build, :artifacts, pipeline: pipeline) }
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end.not_to exceed_all_query_limit(control_count)
end
end end
context 'unauthorized user' do context 'unauthorized user' do
......
...@@ -1101,6 +1101,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do ...@@ -1101,6 +1101,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL') expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL') expect(json_response['RemoteObject']).to have_key('DeleteURL')
expect(json_response['RemoteObject']).to have_key('MultipartUpload')
end end
end end
......
...@@ -1021,6 +1021,7 @@ describe 'Git LFS API and storage' do ...@@ -1021,6 +1021,7 @@ describe 'Git LFS API and storage' do
expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL') expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL') expect(json_response['RemoteObject']).to have_key('DeleteURL')
expect(json_response['RemoteObject']).not_to have_key('MultipartUpload')
expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size) expect(json_response['LfsSize']).to eq(sample_size)
end end
......
require 'spec_helper'
describe PagesService do
let(:build) { create(:ci_build) }
let(:data) { Gitlab::DataBuilder::Build.build(build) }
let(:service) { described_class.new(data) }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
end
context 'execute asynchronously for pages job' do
before do
build.name = 'pages'
end
context 'on success' do
before do
build.success
end
it 'executes worker' do
expect(PagesWorker).to receive(:perform_async)
service.execute
end
end
%w(pending running failed canceled).each do |status|
context "on #{status}" do
before do
build.status = status
end
it 'does not execute worker' do
expect(PagesWorker).not_to receive(:perform_async)
service.execute
end
end
end
end
context 'for other jobs' do
before do
build.name = 'other job'
build.success
end
it 'does not execute worker' do
expect(PagesWorker).not_to receive(:perform_async)
service.execute
end
end
end
...@@ -275,6 +275,10 @@ describe Projects::UpdateService do ...@@ -275,6 +275,10 @@ describe Projects::UpdateService do
it { is_expected.to eq(false) } it { is_expected.to eq(false) }
end end
context 'when auto devops is nil' do
it { is_expected.to eq(false) }
end
context 'when auto devops is explicitly enabled' do context 'when auto devops is explicitly enabled' do
before do before do
project.create_auto_devops!(enabled: true) project.create_auto_devops!(enabled: true)
......
module ActiveRecord module ActiveRecord
class QueryRecorder class QueryRecorder
attr_reader :log, :cached attr_reader :log, :skip_cached, :cached
def initialize(&block) def initialize(skip_cached: true, &block)
@log = [] @log = []
@cached = [] @cached = []
@skip_cached = skip_cached
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block) ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
end end
...@@ -16,7 +17,7 @@ module ActiveRecord ...@@ -16,7 +17,7 @@ module ActiveRecord
def callback(name, start, finish, message_id, values) def callback(name, start, finish, message_id, values)
show_backtrace(values) if ENV['QUERY_RECORDER_DEBUG'] show_backtrace(values) if ENV['QUERY_RECORDER_DEBUG']
if values[:name]&.include?("CACHE") if values[:name]&.include?("CACHE") && skip_cached
@cached << values[:sql] @cached << values[:sql]
elsif !values[:name]&.include?("SCHEMA") elsif !values[:name]&.include?("SCHEMA")
@log << values[:sql] @log << values[:sql]
......
...@@ -45,4 +45,16 @@ module StubObjectStorage ...@@ -45,4 +45,16 @@ module StubObjectStorage
remote_directory: 'uploads', remote_directory: 'uploads',
**params) **params)
end end
def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id")
stub_request(:post, %r{\A#{endpoint}tmp/uploads/[a-z0-9-]*\?uploads\z})
.to_return status: 200, body: <<-EOS.strip_heredoc
<?xml version="1.0" encoding="UTF-8"?>
<InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Bucket>example-bucket</Bucket>
<Key>example-object</Key>
<UploadId>#{upload_id}</UploadId>
</InitiateMultipartUploadResult>
EOS
end
end end
RSpec::Matchers.define :exceed_query_limit do |expected| module ExceedQueryLimitHelpers
supports_block_expectations
match do |block|
@subject_block = block
actual_count > expected_count + threshold
end
failure_message_when_negated do |actual|
threshold_message = threshold > 0 ? " (+#{@threshold})" : ''
counts = "#{expected_count}#{threshold_message}"
"Expected a maximum of #{counts} queries, got #{actual_count}:\n\n#{log_message}"
end
def with_threshold(threshold) def with_threshold(threshold)
@threshold = threshold @threshold = threshold
self self
...@@ -43,7 +30,7 @@ RSpec::Matchers.define :exceed_query_limit do |expected| ...@@ -43,7 +30,7 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
end end
def recorder def recorder
@recorder ||= ActiveRecord::QueryRecorder.new(&@subject_block) @recorder ||= ActiveRecord::QueryRecorder.new(skip_cached: skip_cached, &@subject_block)
end end
def count_queries(queries) def count_queries(queries)
...@@ -61,4 +48,52 @@ RSpec::Matchers.define :exceed_query_limit do |expected| ...@@ -61,4 +48,52 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
@recorder.log_message @recorder.log_message
end end
end end
def skip_cached
true
end
def verify_count(&block)
@subject_block = block
actual_count > expected_count + threshold
end
def failure_message
threshold_message = threshold > 0 ? " (+#{@threshold})" : ''
counts = "#{expected_count}#{threshold_message}"
"Expected a maximum of #{counts} queries, got #{actual_count}:\n\n#{log_message}"
end
end
RSpec::Matchers.define :exceed_all_query_limit do |expected|
supports_block_expectations
include ExceedQueryLimitHelpers
match do |block|
verify_count(&block)
end
failure_message_when_negated do |actual|
failure_message
end
def skip_cached
false
end
end
# Excludes cached methods from the query count
RSpec::Matchers.define :exceed_query_limit do |expected|
supports_block_expectations
include ExceedQueryLimitHelpers
match do |block|
verify_count(&block)
end
failure_message_when_negated do |actual|
failure_message
end
end end
module TraceHelpers
def create_legacy_trace(build, content)
File.open(legacy_trace_path(build), 'wb') { |stream| stream.write(content) }
end
def create_legacy_trace_in_db(build, content)
build.update_column(:trace, content)
end
def legacy_trace_path(build)
legacy_trace_dir = File.join(Settings.gitlab_ci.builds_path,
build.created_at.utc.strftime("%Y_%m"),
build.project_id.to_s)
FileUtils.mkdir_p(legacy_trace_dir)
File.join(legacy_trace_dir, "#{build.id}.log")
end
def archived_trace_path(job_artifact)
disk_hash = Digest::SHA2.hexdigest(job_artifact.project_id.to_s)
creation_date = job_artifact.created_at.utc.strftime('%Y_%m_%d')
File.join(Gitlab.config.artifacts.path, disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, job_artifact.job_id.to_s, job_artifact.id.to_s, 'job.log')
end
end
...@@ -355,7 +355,10 @@ describe ObjectStorage do ...@@ -355,7 +355,10 @@ describe ObjectStorage do
end end
describe '.workhorse_authorize' do describe '.workhorse_authorize' do
subject { uploader_class.workhorse_authorize } let(:has_length) { true }
let(:maximum_size) { nil }
subject { uploader_class.workhorse_authorize(has_length: has_length, maximum_size: maximum_size) }
before do before do
# ensure that we use regular Fog libraries # ensure that we use regular Fog libraries
...@@ -371,10 +374,6 @@ describe ObjectStorage do ...@@ -371,10 +374,6 @@ describe ObjectStorage do
expect(subject[:TempPath]).to start_with(uploader_class.root) expect(subject[:TempPath]).to start_with(uploader_class.root)
expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH) expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH)
end end
it "does not return remote store" do
is_expected.not_to have_key('RemoteObject')
end
end end
shared_examples 'uses remote storage' do shared_examples 'uses remote storage' do
...@@ -383,7 +382,7 @@ describe ObjectStorage do ...@@ -383,7 +382,7 @@ describe ObjectStorage do
expect(subject[:RemoteObject]).to have_key(:ID) expect(subject[:RemoteObject]).to have_key(:ID)
expect(subject[:RemoteObject]).to include(Timeout: a_kind_of(Integer)) expect(subject[:RemoteObject]).to include(Timeout: a_kind_of(Integer))
expect(subject[:RemoteObject][:Timeout]).to be(ObjectStorage::DIRECT_UPLOAD_TIMEOUT) expect(subject[:RemoteObject][:Timeout]).to be(ObjectStorage::DirectUpload::TIMEOUT)
expect(subject[:RemoteObject]).to have_key(:GetURL) expect(subject[:RemoteObject]).to have_key(:GetURL)
expect(subject[:RemoteObject]).to have_key(:DeleteURL) expect(subject[:RemoteObject]).to have_key(:DeleteURL)
expect(subject[:RemoteObject]).to have_key(:StoreURL) expect(subject[:RemoteObject]).to have_key(:StoreURL)
...@@ -391,9 +390,31 @@ describe ObjectStorage do ...@@ -391,9 +390,31 @@ describe ObjectStorage do
expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH) expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH)
expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH) expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH)
end end
end
it "does not return local store" do shared_examples 'uses remote storage with multipart uploads' do
is_expected.not_to have_key('TempPath') it_behaves_like 'uses remote storage' do
it "returns multipart upload" do
is_expected.to have_key(:RemoteObject)
expect(subject[:RemoteObject]).to have_key(:MultipartUpload)
expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartSize)
expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartURLs)
expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:CompleteURL)
expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:AbortURL)
expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(include(described_class::TMP_UPLOAD_PATH))
expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(described_class::TMP_UPLOAD_PATH)
expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(described_class::TMP_UPLOAD_PATH)
end
end
end
shared_examples 'uses remote storage without multipart uploads' do
it_behaves_like 'uses remote storage' do
it "does not return multipart upload" do
is_expected.to have_key(:RemoteObject)
expect(subject[:RemoteObject]).not_to have_key(:MultipartUpload)
end
end end
end end
...@@ -416,6 +437,8 @@ describe ObjectStorage do ...@@ -416,6 +437,8 @@ describe ObjectStorage do
end end
context 'uses AWS' do context 'uses AWS' do
let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
before do before do
expect(uploader_class).to receive(:object_store_credentials) do expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "AWS", { provider: "AWS",
...@@ -425,18 +448,40 @@ describe ObjectStorage do ...@@ -425,18 +448,40 @@ describe ObjectStorage do
end end
end end
it_behaves_like 'uses remote storage' do context 'for known length' do
let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" } it_behaves_like 'uses remote storage without multipart uploads' do
it 'returns links for S3' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
end
end
end
context 'for unknown length' do
let(:has_length) { false }
let(:maximum_size) { 1.gigabyte }
it 'returns links for S3' do before do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) stub_object_storage_multipart_init(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) end
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
it_behaves_like 'uses remote storage with multipart uploads' do
it 'returns links for S3' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url))
expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url)
end
end end
end end
end end
context 'uses Google' do context 'uses Google' do
let(:storage_url) { "https://storage.googleapis.com/uploads/" }
before do before do
expect(uploader_class).to receive(:object_store_credentials) do expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "Google", { provider: "Google",
...@@ -445,36 +490,71 @@ describe ObjectStorage do ...@@ -445,36 +490,71 @@ describe ObjectStorage do
end end
end end
it_behaves_like 'uses remote storage' do context 'for known length' do
let(:storage_url) { "https://storage.googleapis.com/uploads/" } it_behaves_like 'uses remote storage without multipart uploads' do
it 'returns links for Google Cloud' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
end
end
end
context 'for unknown length' do
let(:has_length) { false }
let(:maximum_size) { 1.gigabyte }
it 'returns links for Google Cloud' do it_behaves_like 'uses remote storage without multipart uploads' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) it 'returns links for Google Cloud' do
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
end
end end
end end
end end
context 'uses GDK/minio' do context 'uses GDK/minio' do
let(:storage_url) { "http://minio:9000/uploads/" }
before do before do
expect(uploader_class).to receive(:object_store_credentials) do expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "AWS", { provider: "AWS",
aws_access_key_id: "AWS_ACCESS_KEY_ID", aws_access_key_id: "AWS_ACCESS_KEY_ID",
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
endpoint: 'http://127.0.0.1:9000', endpoint: 'http://minio:9000',
path_style: true, path_style: true,
region: "gdk" } region: "gdk" }
end end
end end
it_behaves_like 'uses remote storage' do context 'for known length' do
let(:storage_url) { "http://127.0.0.1:9000/uploads/" } it_behaves_like 'uses remote storage without multipart uploads' do
it 'returns links for S3' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
end
end
end
context 'for unknown length' do
let(:has_length) { false }
let(:maximum_size) { 1.gigabyte }
it 'returns links for S3' do before do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) stub_object_storage_multipart_init(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) end
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
it_behaves_like 'uses remote storage with multipart uploads' do
it 'returns links for S3' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url))
expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url)
end
end end
end end
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment