Commit c1c2d4cd authored by Rémy Coutable's avatar Rémy Coutable

Merge remote-tracking branch 'ce/master' into ce-to-ee-2018-07-30

Signed-off-by: default avatarRémy Coutable <remy@rymai.me>
parents 9981781b 1a956035
...@@ -2,6 +2,10 @@ ...@@ -2,6 +2,10 @@
documentation](doc/development/changelog.md) for instructions on adding your own documentation](doc/development/changelog.md) for instructions on adding your own
entry. entry.
## 11.1.4 (2018-07-30)
- No changes.
## 11.1.3 (2018-07-27) ## 11.1.3 (2018-07-27)
### Fixed (8 changes, 1 of them is from the community) ### Fixed (8 changes, 1 of them is from the community)
......
...@@ -50,7 +50,7 @@ export default { ...@@ -50,7 +50,7 @@ export default {
}; };
}, },
computed: { computed: {
...mapGetters('diffs', ['diffHasExpandedDiscussions']), ...mapGetters('diffs', ['diffHasExpandedDiscussions', 'diffHasDiscussions']),
hasExpandedDiscussions() { hasExpandedDiscussions() {
return this.diffHasExpandedDiscussions(this.diffFile); return this.diffHasExpandedDiscussions(this.diffFile);
}, },
...@@ -221,6 +221,7 @@ export default { ...@@ -221,6 +221,7 @@ export default {
v-if="diffFile.blob && diffFile.blob.readableText" v-if="diffFile.blob && diffFile.blob.readableText"
> >
<button <button
:disabled="!diffHasDiscussions(diffFile)"
:class="{ active: hasExpandedDiscussions }" :class="{ active: hasExpandedDiscussions }"
:title="s__('MergeRequests|Toggle comments for this file')" :title="s__('MergeRequests|Toggle comments for this file')"
class="js-btn-vue-toggle-comments btn" class="js-btn-vue-toggle-comments btn"
......
...@@ -47,6 +47,14 @@ export const diffHasExpandedDiscussions = (state, getters) => diff => { ...@@ -47,6 +47,14 @@ export const diffHasExpandedDiscussions = (state, getters) => diff => {
); );
}; };
/**
* Checks if the diff has any discussion
* @param {Boolean} diff
* @returns {Boolean}
*/
export const diffHasDiscussions = (state, getters) => diff =>
getters.getDiffFileDiscussions(diff).length > 0;
/** /**
* Returns an array with the discussions of the given diff * Returns an array with the discussions of the given diff
* @param {Object} diff * @param {Object} diff
......
<script> <script>
import Mousetrap from 'mousetrap'; import Mousetrap from 'mousetrap';
import { mapActions, mapState, mapGetters } from 'vuex'; import { mapActions, mapState, mapGetters } from 'vuex';
import { __ } from '~/locale';
import NewModal from './new_dropdown/modal.vue'; import NewModal from './new_dropdown/modal.vue';
import IdeSidebar from './ide_side_bar.vue'; import IdeSidebar from './ide_side_bar.vue';
import RepoTabs from './repo_tabs.vue'; import RepoTabs from './repo_tabs.vue';
...@@ -25,7 +26,6 @@ export default { ...@@ -25,7 +26,6 @@ export default {
}, },
computed: { computed: {
...mapState([ ...mapState([
'changedFiles',
'openFiles', 'openFiles',
'viewer', 'viewer',
'currentMergeRequestId', 'currentMergeRequestId',
...@@ -34,18 +34,10 @@ export default { ...@@ -34,18 +34,10 @@ export default {
'currentProjectId', 'currentProjectId',
'errorMessage', 'errorMessage',
]), ]),
...mapGetters(['activeFile', 'hasChanges']), ...mapGetters(['activeFile', 'hasChanges', 'someUncommitedChanges']),
}, },
mounted() { mounted() {
const returnValue = 'Are you sure you want to lose unsaved changes?'; window.onbeforeunload = e => this.onBeforeUnload(e);
window.onbeforeunload = e => {
if (!this.changedFiles.length) return undefined;
Object.assign(e, {
returnValue,
});
return returnValue;
};
Mousetrap.bind(['t', 'command+p', 'ctrl+p'], e => { Mousetrap.bind(['t', 'command+p', 'ctrl+p'], e => {
if (e.preventDefault) { if (e.preventDefault) {
...@@ -59,6 +51,16 @@ export default { ...@@ -59,6 +51,16 @@ export default {
}, },
methods: { methods: {
...mapActions(['toggleFileFinder']), ...mapActions(['toggleFileFinder']),
onBeforeUnload(e = {}) {
const returnValue = __('Are you sure you want to lose unsaved changes?');
if (!this.someUncommitedChanges) return undefined;
Object.assign(e, {
returnValue,
});
return returnValue;
},
mousetrapStopCallback(e, el, combo) { mousetrapStopCallback(e, el, combo) {
if ( if (
(combo === 't' && el.classList.contains('dropdown-input-field')) || (combo === 't' && el.classList.contains('dropdown-input-field')) ||
......
...@@ -2,11 +2,34 @@ ...@@ -2,11 +2,34 @@
* exports HTTP status codes * exports HTTP status codes
*/ */
export default { const httpStatusCodes = {
ABORTED: 0, ABORTED: 0,
NO_CONTENT: 204,
OK: 200, OK: 200,
CREATED: 201,
ACCEPTED: 202,
NON_AUTHORITATIVE_INFORMATION: 203,
NO_CONTENT: 204,
RESET_CONTENT: 205,
PARTIAL_CONTENT: 206,
MULTI_STATUS: 207,
ALREADY_REPORTED: 208,
IM_USED: 226,
MULTIPLE_CHOICES: 300, MULTIPLE_CHOICES: 300,
BAD_REQUEST: 400, BAD_REQUEST: 400,
NOT_FOUND: 404, NOT_FOUND: 404,
}; };
export const successCodes = [
httpStatusCodes.OK,
httpStatusCodes.CREATED,
httpStatusCodes.ACCEPTED,
httpStatusCodes.NON_AUTHORITATIVE_INFORMATION,
httpStatusCodes.NO_CONTENT,
httpStatusCodes.RESET_CONTENT,
httpStatusCodes.PARTIAL_CONTENT,
httpStatusCodes.MULTI_STATUS,
httpStatusCodes.ALREADY_REPORTED,
httpStatusCodes.IM_USED,
];
export default httpStatusCodes;
import httpStatusCodes from './http_status'; import httpStatusCodes, { successCodes } from './http_status';
import { normalizeHeaders } from './common_utils'; import { normalizeHeaders } from './common_utils';
/** /**
...@@ -62,7 +62,7 @@ export default class Poll { ...@@ -62,7 +62,7 @@ export default class Poll {
checkConditions(response) { checkConditions(response) {
const headers = normalizeHeaders(response.headers); const headers = normalizeHeaders(response.headers);
const pollInterval = parseInt(headers[this.intervalHeader], 10); const pollInterval = parseInt(headers[this.intervalHeader], 10);
if (pollInterval > 0 && response.status === httpStatusCodes.OK && this.canPoll) { if (pollInterval > 0 && successCodes.indexOf(response.status) !== -1 && this.canPoll) {
clearTimeout(this.timeoutID); clearTimeout(this.timeoutID);
this.timeoutID = setTimeout(() => { this.timeoutID = setTimeout(() => {
this.makeRequest(); this.makeRequest();
......
...@@ -405,7 +405,7 @@ class ApplicationController < ActionController::Base ...@@ -405,7 +405,7 @@ class ApplicationController < ActionController::Base
# actually stored in the session and a token is needed # actually stored in the session and a token is needed
# for every request. If you want the token to work as a # for every request. If you want the token to work as a
# sign in token, you can simply remove store: false. # sign in token, you can simply remove store: false.
sign_in user, store: false sign_in(user, store: false, message: :sessionless_sign_in)
end end
end end
......
...@@ -60,7 +60,7 @@ module AuthenticatesWithTwoFactor ...@@ -60,7 +60,7 @@ module AuthenticatesWithTwoFactor
remember_me(user) if user_params[:remember_me] == '1' remember_me(user) if user_params[:remember_me] == '1'
user.save! user.save!
sign_in(user) sign_in(user, message: :two_factor_authenticated)
else else
user.increment_failed_attempts! user.increment_failed_attempts!
Gitlab::AppLogger.info("Failed Login: user=#{user.username} ip=#{request.remote_ip} method=OTP") Gitlab::AppLogger.info("Failed Login: user=#{user.username} ip=#{request.remote_ip} method=OTP")
...@@ -77,7 +77,7 @@ module AuthenticatesWithTwoFactor ...@@ -77,7 +77,7 @@ module AuthenticatesWithTwoFactor
session.delete(:challenge) session.delete(:challenge)
remember_me(user) if user_params[:remember_me] == '1' remember_me(user) if user_params[:remember_me] == '1'
sign_in(user) sign_in(user, message: :two_factor_authenticated)
else else
user.increment_failed_attempts! user.increment_failed_attempts!
Gitlab::AppLogger.info("Failed Login: user=#{user.username} ip=#{request.remote_ip} method=U2F") Gitlab::AppLogger.info("Failed Login: user=#{user.username} ip=#{request.remote_ip} method=U2F")
......
class Projects::WikisController < Projects::ApplicationController class Projects::WikisController < Projects::ApplicationController
include PreviewMarkdown include PreviewMarkdown
include Gitlab::Utils::StrongMemoize
before_action :authorize_read_wiki! before_action :authorize_read_wiki!
before_action :authorize_create_wiki!, only: [:edit, :create, :history] before_action :authorize_create_wiki!, only: [:edit, :create, :history]
before_action :authorize_admin_wiki!, only: :destroy before_action :authorize_admin_wiki!, only: :destroy
before_action :load_project_wiki before_action :load_project_wiki
before_action :load_page, only: [:show, :edit, :update, :history, :destroy]
before_action :valid_encoding?, only: [:show, :edit, :update], if: :load_page
before_action only: [:edit, :update], unless: :valid_encoding? do
redirect_to(project_wiki_path(@project, @page))
end
def pages def pages
@wiki_pages = Kaminari.paginate_array(@project_wiki.pages).page(params[:page]) @wiki_pages = Kaminari.paginate_array(@project_wiki.pages).page(params[:page])
...@@ -12,11 +18,11 @@ class Projects::WikisController < Projects::ApplicationController ...@@ -12,11 +18,11 @@ class Projects::WikisController < Projects::ApplicationController
end end
def show def show
@page = @project_wiki.find_page(params[:id], params[:version_id])
view_param = @project_wiki.empty? ? params[:view] : 'create' view_param = @project_wiki.empty? ? params[:view] : 'create'
if @page if @page
set_encoding_error unless valid_encoding?
render 'show' render 'show'
elsif file = @project_wiki.find_file(params[:id], params[:version_id]) elsif file = @project_wiki.find_file(params[:id], params[:version_id])
response.headers['Content-Security-Policy'] = "default-src 'none'" response.headers['Content-Security-Policy'] = "default-src 'none'"
...@@ -38,13 +44,11 @@ class Projects::WikisController < Projects::ApplicationController ...@@ -38,13 +44,11 @@ class Projects::WikisController < Projects::ApplicationController
end end
def edit def edit
@page = @project_wiki.find_page(params[:id])
end end
def update def update
return render('empty') unless can?(current_user, :create_wiki, @project) return render('empty') unless can?(current_user, :create_wiki, @project)
@page = @project_wiki.find_page(params[:id])
@page = WikiPages::UpdateService.new(@project, current_user, wiki_params).execute(@page) @page = WikiPages::UpdateService.new(@project, current_user, wiki_params).execute(@page)
if @page.valid? if @page.valid?
...@@ -79,8 +83,6 @@ class Projects::WikisController < Projects::ApplicationController ...@@ -79,8 +83,6 @@ class Projects::WikisController < Projects::ApplicationController
end end
def history def history
@page = @project_wiki.find_page(params[:id])
if @page if @page
@page_versions = Kaminari.paginate_array(@page.versions(page: params[:page].to_i), @page_versions = Kaminari.paginate_array(@page.versions(page: params[:page].to_i),
total_count: @page.count_versions) total_count: @page.count_versions)
...@@ -94,8 +96,6 @@ class Projects::WikisController < Projects::ApplicationController ...@@ -94,8 +96,6 @@ class Projects::WikisController < Projects::ApplicationController
end end
def destroy def destroy
@page = @project_wiki.find_page(params[:id])
WikiPages::DestroyService.new(@project, current_user).execute(@page) WikiPages::DestroyService.new(@project, current_user).execute(@page)
redirect_to project_wiki_path(@project, :home), redirect_to project_wiki_path(@project, :home),
...@@ -141,4 +141,25 @@ class Projects::WikisController < Projects::ApplicationController ...@@ -141,4 +141,25 @@ class Projects::WikisController < Projects::ApplicationController
page.update_attributes(args) # rubocop:disable Rails/ActiveRecordAliases page.update_attributes(args) # rubocop:disable Rails/ActiveRecordAliases
end end
end end
def load_page
@page ||= @project_wiki.find_page(*page_params)
end
def page_params
keys = [:id]
keys << :version_id if params[:action] == 'show'
params.values_at(*keys)
end
def valid_encoding?
strong_memoize(:valid_encoding) do
@page.content.encoding == Encoding::UTF_8
end
end
def set_encoding_error
flash.now[:notice] = "The content of this page is not encoded in UTF-8. Edits can only be made via the Git repository."
end
end end
...@@ -90,6 +90,14 @@ class SessionsController < Devise::SessionsController ...@@ -90,6 +90,14 @@ class SessionsController < Devise::SessionsController
).increment ).increment
end end
##
# We do have some duplication between lib/gitlab/auth/activity.rb here, but
# leaving this method here because of backwards compatibility.
#
def login_counter
@login_counter ||= Gitlab::Metrics.counter(:user_session_logins_total, 'User sign in count')
end
def log_failed_login def log_failed_login
Gitlab::AppLogger.info("Failed Login: username=#{user_params[:login]} ip=#{request.remote_ip}") Gitlab::AppLogger.info("Failed Login: username=#{user_params[:login]} ip=#{request.remote_ip}")
end end
...@@ -98,10 +106,6 @@ class SessionsController < Devise::SessionsController ...@@ -98,10 +106,6 @@ class SessionsController < Devise::SessionsController
(options = env["warden.options"]) && options[:action] == "unauthenticated" (options = env["warden.options"]) && options[:action] == "unauthenticated"
end end
def login_counter
@login_counter ||= Gitlab::Metrics.counter(:user_session_logins_total, 'User sign in count')
end
# Handle an "initial setup" state, where there's only one user, it's an admin, # Handle an "initial setup" state, where there's only one user, it's an admin,
# and they require a password change. # and they require a password change.
def check_initial_setup def check_initial_setup
......
...@@ -149,6 +149,7 @@ module ApplicationSettingsHelper ...@@ -149,6 +149,7 @@ module ApplicationSettingsHelper
:after_sign_up_text, :after_sign_up_text,
:akismet_api_key, :akismet_api_key,
:akismet_enabled, :akismet_enabled,
:allow_local_requests_from_hooks_and_services,
:authorized_keys_enabled, :authorized_keys_enabled,
:auto_devops_enabled, :auto_devops_enabled,
:auto_devops_domain, :auto_devops_domain,
...@@ -175,6 +176,7 @@ module ApplicationSettingsHelper ...@@ -175,6 +176,7 @@ module ApplicationSettingsHelper
:ed25519_key_restriction, :ed25519_key_restriction,
:email_author_in_body, :email_author_in_body,
:enabled_git_access_protocol, :enabled_git_access_protocol,
:enforce_terms,
:gitaly_timeout_default, :gitaly_timeout_default,
:gitaly_timeout_medium, :gitaly_timeout_medium,
:gitaly_timeout_fast, :gitaly_timeout_fast,
...@@ -183,6 +185,7 @@ module ApplicationSettingsHelper ...@@ -183,6 +185,7 @@ module ApplicationSettingsHelper
:help_page_hide_commercial_content, :help_page_hide_commercial_content,
:help_page_support_url, :help_page_support_url,
:help_page_text, :help_page_text,
:hide_third_party_offers,
:home_page_url, :home_page_url,
:housekeeping_bitmaps_enabled, :housekeeping_bitmaps_enabled,
:housekeeping_enabled, :housekeeping_enabled,
...@@ -204,6 +207,7 @@ module ApplicationSettingsHelper ...@@ -204,6 +207,7 @@ module ApplicationSettingsHelper
:metrics_port, :metrics_port,
:metrics_sample_interval, :metrics_sample_interval,
:metrics_timeout, :metrics_timeout,
:mirror_available,
:pages_domain_verification_enabled, :pages_domain_verification_enabled,
:password_authentication_enabled_for_web, :password_authentication_enabled_for_web,
:password_authentication_enabled_for_git, :password_authentication_enabled_for_git,
...@@ -234,6 +238,7 @@ module ApplicationSettingsHelper ...@@ -234,6 +238,7 @@ module ApplicationSettingsHelper
:sign_in_text, :sign_in_text,
:signup_enabled, :signup_enabled,
:terminal_max_session_time, :terminal_max_session_time,
:terms,
:throttle_unauthenticated_enabled, :throttle_unauthenticated_enabled,
:throttle_unauthenticated_requests_per_period, :throttle_unauthenticated_requests_per_period,
:throttle_unauthenticated_period_in_seconds, :throttle_unauthenticated_period_in_seconds,
...@@ -250,12 +255,7 @@ module ApplicationSettingsHelper ...@@ -250,12 +255,7 @@ module ApplicationSettingsHelper
:usage_ping_enabled, :usage_ping_enabled,
:user_default_external, :user_default_external,
:user_oauth_applications, :user_oauth_applications,
:version_check_enabled, :version_check_enabled
:allow_local_requests_from_hooks_and_services,
:hide_third_party_offers,
:enforce_terms,
:terms,
:mirror_available
] ]
end end
end end
...@@ -228,26 +228,28 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -228,26 +228,28 @@ class ApplicationSetting < ActiveRecord::Base
def self.defaults def self.defaults
{ {
after_sign_up_text: nil, after_sign_up_text: nil,
allow_local_requests_from_hooks_and_services: false,
akismet_enabled: false, akismet_enabled: false,
authorized_keys_enabled: true, # TODO default to false if the instance is configured to use AuthorizedKeysCommand authorized_keys_enabled: true, # TODO default to false if the instance is configured to use AuthorizedKeysCommand
container_registry_token_expire_delay: 5, container_registry_token_expire_delay: 5,
default_artifacts_expire_in: '30 days', default_artifacts_expire_in: '30 days',
default_branch_protection: Settings.gitlab['default_branch_protection'], default_branch_protection: Settings.gitlab['default_branch_protection'],
default_group_visibility: Settings.gitlab.default_projects_features['visibility_level'],
default_project_visibility: Settings.gitlab.default_projects_features['visibility_level'], default_project_visibility: Settings.gitlab.default_projects_features['visibility_level'],
default_projects_limit: Settings.gitlab['default_projects_limit'], default_projects_limit: Settings.gitlab['default_projects_limit'],
default_snippet_visibility: Settings.gitlab.default_projects_features['visibility_level'], default_snippet_visibility: Settings.gitlab.default_projects_features['visibility_level'],
default_group_visibility: Settings.gitlab.default_projects_features['visibility_level'],
disabled_oauth_sign_in_sources: [], disabled_oauth_sign_in_sources: [],
domain_whitelist: Settings.gitlab['domain_whitelist'], domain_whitelist: Settings.gitlab['domain_whitelist'],
dsa_key_restriction: 0, dsa_key_restriction: 0,
ecdsa_key_restriction: 0, ecdsa_key_restriction: 0,
ed25519_key_restriction: 0, ed25519_key_restriction: 0,
gitaly_timeout_default: 55,
gitaly_timeout_fast: 10,
gitaly_timeout_medium: 30,
gravatar_enabled: Settings.gravatar['enabled'], gravatar_enabled: Settings.gravatar['enabled'],
help_page_text: nil,
help_page_hide_commercial_content: false, help_page_hide_commercial_content: false,
unique_ips_limit_per_user: 10, help_page_text: nil,
unique_ips_limit_time_window: 3600, hide_third_party_offers: false,
unique_ips_limit_enabled: false,
housekeeping_bitmaps_enabled: true, housekeeping_bitmaps_enabled: true,
housekeeping_enabled: true, housekeeping_enabled: true,
housekeeping_full_repack_period: 50, housekeeping_full_repack_period: 50,
...@@ -258,12 +260,14 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -258,12 +260,14 @@ class ApplicationSetting < ActiveRecord::Base
koding_url: nil, koding_url: nil,
max_artifacts_size: Settings.artifacts['max_size'], max_artifacts_size: Settings.artifacts['max_size'],
max_attachment_size: Settings.gitlab['max_attachment_size'], max_attachment_size: Settings.gitlab['max_attachment_size'],
password_authentication_enabled_for_web: Settings.gitlab['signin_enabled'], mirror_available: true,
password_authentication_enabled_for_git: true, password_authentication_enabled_for_git: true,
password_authentication_enabled_for_web: Settings.gitlab['signin_enabled'],
performance_bar_allowed_group_id: nil, performance_bar_allowed_group_id: nil,
rsa_key_restriction: 0, rsa_key_restriction: 0,
plantuml_enabled: false, plantuml_enabled: false,
plantuml_url: nil, plantuml_url: nil,
polling_interval_multiplier: 1,
project_export_enabled: true, project_export_enabled: true,
recaptcha_enabled: false, recaptcha_enabled: false,
repository_checks_enabled: true, repository_checks_enabled: true,
...@@ -278,25 +282,21 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -278,25 +282,21 @@ class ApplicationSetting < ActiveRecord::Base
sign_in_text: nil, sign_in_text: nil,
signup_enabled: Settings.gitlab['signup_enabled'], signup_enabled: Settings.gitlab['signup_enabled'],
terminal_max_session_time: 0, terminal_max_session_time: 0,
throttle_unauthenticated_enabled: false,
throttle_unauthenticated_requests_per_period: 3600,
throttle_unauthenticated_period_in_seconds: 3600,
throttle_authenticated_web_enabled: false,
throttle_authenticated_web_requests_per_period: 7200,
throttle_authenticated_web_period_in_seconds: 3600,
throttle_authenticated_api_enabled: false, throttle_authenticated_api_enabled: false,
throttle_authenticated_api_requests_per_period: 7200,
throttle_authenticated_api_period_in_seconds: 3600, throttle_authenticated_api_period_in_seconds: 3600,
throttle_authenticated_api_requests_per_period: 7200,
throttle_authenticated_web_enabled: false,
throttle_authenticated_web_period_in_seconds: 3600,
throttle_authenticated_web_requests_per_period: 7200,
throttle_unauthenticated_enabled: false,
throttle_unauthenticated_period_in_seconds: 3600,
throttle_unauthenticated_requests_per_period: 3600,
two_factor_grace_period: 48, two_factor_grace_period: 48,
user_default_external: false, unique_ips_limit_enabled: false,
polling_interval_multiplier: 1, unique_ips_limit_per_user: 10,
unique_ips_limit_time_window: 3600,
usage_ping_enabled: Settings.gitlab['usage_ping_enabled'], usage_ping_enabled: Settings.gitlab['usage_ping_enabled'],
gitaly_timeout_fast: 10, user_default_external: false
gitaly_timeout_medium: 30,
gitaly_timeout_default: 55,
allow_local_requests_from_hooks_and_services: false,
hide_third_party_offers: false,
mirror_available: true
} }
end end
......
...@@ -17,7 +17,7 @@ module Ci ...@@ -17,7 +17,7 @@ module Ci
{ {
subprotocols: ['terminal.gitlab.com'].freeze, subprotocols: ['terminal.gitlab.com'].freeze,
url: "#{url}/exec".sub("https://", "wss://"), url: "#{url}/exec".sub("https://", "wss://"),
headers: { Authorization: authorization.presence }.compact, headers: { Authorization: [authorization.presence] }.compact,
ca_pem: certificate.presence ca_pem: certificate.presence
} }
end end
......
require 'openssl'
module Clusters module Clusters
module Applications module Applications
class Helm < ActiveRecord::Base class Helm < ActiveRecord::Base
self.table_name = 'clusters_applications_helm' self.table_name = 'clusters_applications_helm'
attr_encrypted :ca_key,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-cbc'
include ::Clusters::Concerns::ApplicationCore include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus include ::Clusters::Concerns::ApplicationStatus
default_value_for :version, Gitlab::Kubernetes::Helm::HELM_VERSION default_value_for :version, Gitlab::Kubernetes::Helm::HELM_VERSION
before_create :create_keys_and_certs
def issue_client_cert
ca_cert_obj.issue
end
def set_initial_status def set_initial_status
return unless not_installable? return unless not_installable?
...@@ -28,41 +15,7 @@ module Clusters ...@@ -28,41 +15,7 @@ module Clusters
end end
def install_command def install_command
Gitlab::Kubernetes::Helm::InitCommand.new( Gitlab::Kubernetes::Helm::InitCommand.new(name)
name: name,
files: files
)
end
def has_ssl?
ca_key.present? && ca_cert.present?
end
private
def files
{
'ca.pem': ca_cert,
'cert.pem': tiller_cert.cert_string,
'key.pem': tiller_cert.key_string
}
end
def create_keys_and_certs
ca_cert = Gitlab::Kubernetes::Helm::Certificate.generate_root
self.ca_key = ca_cert.key_string
self.ca_cert = ca_cert.cert_string
end
def tiller_cert
@tiller_cert ||= ca_cert_obj.issue(expires_in: Gitlab::Kubernetes::Helm::Certificate::INFINITE_EXPIRY)
end
def ca_cert_obj
return unless has_ssl?
Gitlab::Kubernetes::Helm::Certificate
.from_strings(ca_key, ca_cert)
end end
end end
end end
......
...@@ -32,9 +32,9 @@ module Clusters ...@@ -32,9 +32,9 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name, name,
chart: chart, chart: chart,
files: files values: values
) )
end end
......
...@@ -35,9 +35,9 @@ module Clusters ...@@ -35,9 +35,9 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name, name,
chart: chart, chart: chart,
files: files, values: values,
repository: repository repository: repository
) )
end end
......
...@@ -45,10 +45,10 @@ module Clusters ...@@ -45,10 +45,10 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name, name,
chart: chart, chart: chart,
version: version, version: version,
files: files values: values
) )
end end
......
...@@ -28,9 +28,9 @@ module Clusters ...@@ -28,9 +28,9 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name, name,
chart: chart, chart: chart,
files: files, values: values,
repository: repository repository: repository
) )
end end
......
...@@ -12,34 +12,8 @@ module Clusters ...@@ -12,34 +12,8 @@ module Clusters
File.read(chart_values_file) File.read(chart_values_file)
end end
def files
@files ||= begin
files = { 'values.yaml': values }
files.merge!(certificate_files) if cluster.application_helm.has_ssl?
files
end
end
private private
def certificate_files
{
'ca.pem': ca_cert,
'cert.pem': helm_cert.cert_string,
'key.pem': helm_cert.key_string
}
end
def ca_cert
cluster.application_helm.ca_cert
end
def helm_cert
@helm_cert ||= cluster.application_helm.issue_client_cert
end
def chart_values_file def chart_values_file
"#{Rails.root}/vendor/#{name}/values.yaml" "#{Rails.root}/vendor/#{name}/values.yaml"
end end
......
...@@ -5,7 +5,7 @@ class ProjectStatistics < ActiveRecord::Base ...@@ -5,7 +5,7 @@ class ProjectStatistics < ActiveRecord::Base
before_save :update_storage_size before_save :update_storage_size
COLUMNS_TO_REFRESH = [:repository_size, :lfs_objects_size, :commit_count].freeze COLUMNS_TO_REFRESH = [:repository_size, :lfs_objects_size, :commit_count].freeze
INCREMENTABLE_COLUMNS = [:build_artifacts_size].freeze INCREMENTABLE_COLUMNS = { build_artifacts_size: %i[storage_size] }.freeze
def shared_runners_minutes def shared_runners_minutes
shared_runners_seconds.to_i / 60 shared_runners_seconds.to_i / 60
...@@ -42,11 +42,28 @@ class ProjectStatistics < ActiveRecord::Base ...@@ -42,11 +42,28 @@ class ProjectStatistics < ActiveRecord::Base
self.storage_size = repository_size + lfs_objects_size + build_artifacts_size self.storage_size = repository_size + lfs_objects_size + build_artifacts_size
end end
# Since this incremental update method does not call update_storage_size above,
# we have to update the storage_size here as additional column.
# Additional columns are updated depending on key => [columns], which allows
# to update statistics which are and also those which aren't included in storage_size
# or any other additional summary column in the future.
def self.increment_statistic(project_id, key, amount) def self.increment_statistic(project_id, key, amount)
raise ArgumentError, "Cannot increment attribute: #{key}" unless key.in?(INCREMENTABLE_COLUMNS) raise ArgumentError, "Cannot increment attribute: #{key}" unless INCREMENTABLE_COLUMNS.key?(key)
return if amount == 0 return if amount == 0
where(project_id: project_id) where(project_id: project_id)
.update_all(["#{key} = COALESCE(#{key}, 0) + (?)", amount]) .columns_to_increment(key, amount)
end
def self.columns_to_increment(key, amount)
updates = ["#{key} = COALESCE(#{key}, 0) + (#{amount})"]
if (additional = INCREMENTABLE_COLUMNS[key])
additional.each do |column|
updates << "#{column} = COALESCE(#{column}, 0) + (#{amount})"
end
end
update_all(updates.join(', '))
end end
end end
...@@ -317,6 +317,8 @@ class Repository ...@@ -317,6 +317,8 @@ class Repository
# types - An Array of file types (e.g. `:readme`) used to refresh extra # types - An Array of file types (e.g. `:readme`) used to refresh extra
# caches. # caches.
def refresh_method_caches(types) def refresh_method_caches(types)
return if types.empty?
to_refresh = [] to_refresh = []
types.each do |type| types.each do |type|
......
...@@ -134,6 +134,10 @@ class MergeRequestWidgetEntity < IssuableEntity ...@@ -134,6 +134,10 @@ class MergeRequestWidgetEntity < IssuableEntity
can?(request.current_user, :create_note, merge_request) can?(request.current_user, :create_note, merge_request)
end end
expose :can_create_issue do |merge_request|
can?(current_user, :create_issue, merge_request.project)
end
expose :can_update do |merge_request| expose :can_update do |merge_request|
can?(request.current_user, :update_merge_request, merge_request) can?(request.current_user, :update_merge_request, merge_request)
end end
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
class GitPushService < BaseService class GitPushService < BaseService
attr_accessor :push_data, :push_commits attr_accessor :push_data, :push_commits
include Gitlab::Access include Gitlab::Access
include Gitlab::Utils::StrongMemoize
# The N most recent commits to process in a single push payload. # The N most recent commits to process in a single push payload.
PROCESS_COMMIT_LIMIT = 100 PROCESS_COMMIT_LIMIT = 100
...@@ -21,14 +22,14 @@ class GitPushService < BaseService ...@@ -21,14 +22,14 @@ class GitPushService < BaseService
# 6. Checks if the project's main language has changed # 6. Checks if the project's main language has changed
# #
def execute def execute
@project.repository.after_create if @project.empty_repo? project.repository.after_create if project.empty_repo?
@project.repository.after_push_commit(branch_name) project.repository.after_push_commit(branch_name)
if push_remove_branch? if push_remove_branch?
@project.repository.after_remove_branch project.repository.after_remove_branch
@push_commits = [] @push_commits = []
elsif push_to_new_branch? elsif push_to_new_branch?
@project.repository.after_create_branch project.repository.after_create_branch
# Re-find the pushed commits. # Re-find the pushed commits.
if default_branch? if default_branch?
...@@ -38,14 +39,14 @@ class GitPushService < BaseService ...@@ -38,14 +39,14 @@ class GitPushService < BaseService
# Use the pushed commits that aren't reachable by the default branch # Use the pushed commits that aren't reachable by the default branch
# as a heuristic. This may include more commits than are actually pushed, but # as a heuristic. This may include more commits than are actually pushed, but
# that shouldn't matter because we check for existing cross-references later. # that shouldn't matter because we check for existing cross-references later.
@push_commits = @project.repository.commits_between(@project.default_branch, params[:newrev]) @push_commits = project.repository.commits_between(project.default_branch, params[:newrev])
# don't process commits for the initial push to the default branch # don't process commits for the initial push to the default branch
process_commit_messages process_commit_messages
end end
elsif push_to_existing_branch? elsif push_to_existing_branch?
# Collect data for this git push # Collect data for this git push
@push_commits = @project.repository.commits_between(params[:oldrev], params[:newrev]) @push_commits = project.repository.commits_between(params[:oldrev], params[:newrev])
process_commit_messages process_commit_messages
...@@ -68,7 +69,7 @@ class GitPushService < BaseService ...@@ -68,7 +69,7 @@ class GitPushService < BaseService
end end
def update_gitattributes def update_gitattributes
@project.repository.copy_gitattributes(params[:ref]) project.repository.copy_gitattributes(params[:ref])
end end
def update_caches def update_caches
...@@ -92,7 +93,7 @@ class GitPushService < BaseService ...@@ -92,7 +93,7 @@ class GitPushService < BaseService
types = [] types = []
end end
ProjectCacheWorker.perform_async(@project.id, types, [:commit_count, :repository_size]) ProjectCacheWorker.perform_async(project.id, types, [:commit_count, :repository_size])
end end
def update_signatures def update_signatures
...@@ -125,10 +126,10 @@ class GitPushService < BaseService ...@@ -125,10 +126,10 @@ class GitPushService < BaseService
protected protected
def update_remote_mirrors def update_remote_mirrors
return unless @project.has_remote_mirror? return unless project.has_remote_mirror?
@project.mark_stuck_remote_mirrors_as_failed! project.mark_stuck_remote_mirrors_as_failed!
@project.update_remote_mirrors project.update_remote_mirrors
end end
def execute_related_hooks def execute_related_hooks
...@@ -136,16 +137,21 @@ class GitPushService < BaseService ...@@ -136,16 +137,21 @@ class GitPushService < BaseService
# could cause the last commit of a merge request to change. # could cause the last commit of a merge request to change.
# #
UpdateMergeRequestsWorker UpdateMergeRequestsWorker
.perform_async(@project.id, current_user.id, params[:oldrev], params[:newrev], params[:ref]) .perform_async(project.id, current_user.id, params[:oldrev], params[:newrev], params[:ref])
<<<<<<< HEAD
mirror_update = @project.mirror? && @project.repository.up_to_date_with_upstream?(branch_name) mirror_update = @project.mirror? && @project.repository.up_to_date_with_upstream?(branch_name)
EventCreateService.new.push(@project, current_user, build_push_data) EventCreateService.new.push(@project, current_user, build_push_data)
Ci::CreatePipelineService.new(@project, current_user, build_push_data).execute(:push, mirror_update: mirror_update) Ci::CreatePipelineService.new(@project, current_user, build_push_data).execute(:push, mirror_update: mirror_update)
=======
EventCreateService.new.push(project, current_user, build_push_data)
Ci::CreatePipelineService.new(project, current_user, build_push_data).execute(:push)
>>>>>>> ce/master
SystemHookPushWorker.perform_async(build_push_data.dup, :push_hooks) SystemHookPushWorker.perform_async(build_push_data.dup, :push_hooks)
@project.execute_hooks(build_push_data.dup, :push_hooks) project.execute_hooks(build_push_data.dup, :push_hooks)
@project.execute_services(build_push_data.dup, :push_hooks) project.execute_services(build_push_data.dup, :push_hooks)
if push_remove_branch? if push_remove_branch?
AfterBranchDeleteService AfterBranchDeleteService
...@@ -155,52 +161,50 @@ class GitPushService < BaseService ...@@ -155,52 +161,50 @@ class GitPushService < BaseService
end end
def perform_housekeeping def perform_housekeeping
housekeeping = Projects::HousekeepingService.new(@project) housekeeping = Projects::HousekeepingService.new(project)
housekeeping.increment! housekeeping.increment!
housekeeping.execute if housekeeping.needed? housekeeping.execute if housekeeping.needed?
rescue Projects::HousekeepingService::LeaseTaken rescue Projects::HousekeepingService::LeaseTaken
end end
def process_default_branch def process_default_branch
@push_commits_count = project.repository.commit_count_for_ref(params[:ref]) offset = [push_commits_count - PROCESS_COMMIT_LIMIT, 0].max
offset = [@push_commits_count - PROCESS_COMMIT_LIMIT, 0].max
@push_commits = project.repository.commits(params[:newrev], offset: offset, limit: PROCESS_COMMIT_LIMIT) @push_commits = project.repository.commits(params[:newrev], offset: offset, limit: PROCESS_COMMIT_LIMIT)
@project.after_create_default_branch project.after_create_default_branch
end end
def build_push_data def build_push_data
@push_data ||= Gitlab::DataBuilder::Push.build( @push_data ||= Gitlab::DataBuilder::Push.build(
@project, project,
current_user, current_user,
params[:oldrev], params[:oldrev],
params[:newrev], params[:newrev],
params[:ref], params[:ref],
@push_commits, @push_commits,
commits_count: @push_commits_count) commits_count: push_commits_count)
end end
def push_to_existing_branch? def push_to_existing_branch?
# Return if this is not a push to a branch (e.g. new commits) # Return if this is not a push to a branch (e.g. new commits)
Gitlab::Git.branch_ref?(params[:ref]) && !Gitlab::Git.blank_ref?(params[:oldrev]) branch_ref? && !Gitlab::Git.blank_ref?(params[:oldrev])
end end
def push_to_new_branch? def push_to_new_branch?
Gitlab::Git.branch_ref?(params[:ref]) && Gitlab::Git.blank_ref?(params[:oldrev]) strong_memoize(:push_to_new_branch) do
branch_ref? && Gitlab::Git.blank_ref?(params[:oldrev])
end
end end
def push_remove_branch? def push_remove_branch?
Gitlab::Git.branch_ref?(params[:ref]) && Gitlab::Git.blank_ref?(params[:newrev]) strong_memoize(:push_remove_branch) do
end branch_ref? && Gitlab::Git.blank_ref?(params[:newrev])
end
def push_to_branch?
Gitlab::Git.branch_ref?(params[:ref])
end end
def default_branch? def default_branch?
Gitlab::Git.branch_ref?(params[:ref]) && branch_ref? &&
(Gitlab::Git.ref_name(params[:ref]) == project.default_branch || project.default_branch.nil?) (branch_name == project.default_branch || project.default_branch.nil?)
end end
def commit_user(commit) def commit_user(commit)
...@@ -208,7 +212,21 @@ class GitPushService < BaseService ...@@ -208,7 +212,21 @@ class GitPushService < BaseService
end end
def branch_name def branch_name
@branch_name ||= Gitlab::Git.ref_name(params[:ref]) strong_memoize(:branch_name) do
Gitlab::Git.ref_name(params[:ref])
end
end
def branch_ref?
strong_memoize(:branch_ref) do
Gitlab::Git.branch_ref?(params[:ref])
end
end
def push_commits_count
strong_memoize(:push_commits_count) do
project.repository.commit_count_for_ref(params[:ref])
end
end end
def last_pushed_commits def last_pushed_commits
......
...@@ -9,12 +9,17 @@ class GitTagPushService < BaseService ...@@ -9,12 +9,17 @@ class GitTagPushService < BaseService
@push_data = build_push_data @push_data = build_push_data
<<<<<<< HEAD
EventCreateService.new.push(project, current_user, @push_data) EventCreateService.new.push(project, current_user, @push_data)
Ci::CreatePipelineService.new(project, current_user, @push_data).execute(:push, mirror_update: params[:mirror_update]) Ci::CreatePipelineService.new(project, current_user, @push_data).execute(:push, mirror_update: params[:mirror_update])
=======
EventCreateService.new.push(project, current_user, push_data)
Ci::CreatePipelineService.new(project, current_user, push_data).execute(:push)
>>>>>>> ce/master
SystemHooksService.new.execute_hooks(build_system_push_data.dup, :tag_push_hooks) SystemHooksService.new.execute_hooks(build_system_push_data, :tag_push_hooks)
project.execute_hooks(@push_data.dup, :tag_push_hooks) project.execute_hooks(push_data.dup, :tag_push_hooks)
project.execute_services(@push_data.dup, :tag_push_hooks) project.execute_services(push_data.dup, :tag_push_hooks)
ProjectCacheWorker.perform_async(project.id, [], [:commit_count, :repository_size]) ProjectCacheWorker.perform_async(project.id, [], [:commit_count, :repository_size])
......
...@@ -37,6 +37,8 @@ module Issues ...@@ -37,6 +37,8 @@ module Issues
end end
if issue.previous_changes.include?('confidential') if issue.previous_changes.include?('confidential')
# don't enqueue immediately to prevent todos removal in case of a mistake
TodosDestroyer::ConfidentialIssueWorker.perform_in(1.hour, issue.id) if issue.confidential?
create_confidentiality_note(issue) create_confidentiality_note(issue)
end end
......
...@@ -17,6 +17,8 @@ module Members ...@@ -17,6 +17,8 @@ module Members
notification_service.decline_access_request(member) notification_service.decline_access_request(member)
end end
enqeue_delete_todos(member)
after_execute(member: member) after_execute(member: member)
member member
...@@ -24,6 +26,12 @@ module Members ...@@ -24,6 +26,12 @@ module Members
private private
def enqeue_delete_todos(member)
type = member.is_a?(GroupMember) ? 'Group' : 'Project'
# don't enqueue immediately to prevent todos removal in case of a mistake
TodosDestroyer::EntityLeaveWorker.perform_in(1.hour, member.user_id, member.source_id, type)
end
def can_destroy_member?(member) def can_destroy_member?(member)
can?(current_user, destroy_member_permission(member), member) can?(current_user, destroy_member_permission(member), member)
end end
......
...@@ -27,13 +27,7 @@ module Projects ...@@ -27,13 +27,7 @@ module Projects
return validation_failed! if project.errors.any? return validation_failed! if project.errors.any?
if project.update(params.except(:default_branch)) if project.update(params.except(:default_branch))
if project.previous_changes.include?('path') after_update
project.rename_repo
else
system_hook_service.execute_hooks_for(project, :update)
end
update_pages_config if changing_pages_https_only?
success success
else else
...@@ -49,6 +43,30 @@ module Projects ...@@ -49,6 +43,30 @@ module Projects
private private
def after_update
todos_features_changes = %w(
issues_access_level
merge_requests_access_level
repository_access_level
)
project_changed_feature_keys = project.project_feature.previous_changes.keys
if project.previous_changes.include?(:visibility_level) && project.private?
# don't enqueue immediately to prevent todos removal in case of a mistake
TodosDestroyer::ProjectPrivateWorker.perform_in(1.hour, project.id)
elsif (project_changed_feature_keys & todos_features_changes).present?
TodosDestroyer::PrivateFeaturesWorker.perform_in(1.hour, project.id)
end
if project.previous_changes.include?('path')
project.rename_repo
else
system_hook_service.execute_hooks_for(project, :update)
end
update_pages_config if changing_pages_https_only?
end
def validation_failed! def validation_failed!
model_errors = project.errors.full_messages.to_sentence model_errors = project.errors.full_messages.to_sentence
error_message = model_errors.presence || 'Project could not be updated!' error_message = model_errors.presence || 'Project could not be updated!'
......
module Todos
module Destroy
class BaseService
def execute
return unless todos_to_remove?
without_authorized(todos).delete_all
end
private
def without_authorized(items)
items.where('user_id NOT IN (?)', authorized_users)
end
def authorized_users
ProjectAuthorization.select(:user_id).where(project_id: project_ids)
end
def todos
raise NotImplementedError
end
def project_ids
raise NotImplementedError
end
def todos_to_remove?
raise NotImplementedError
end
end
end
end
module Todos
module Destroy
class ConfidentialIssueService < ::Todos::Destroy::BaseService
extend ::Gitlab::Utils::Override
attr_reader :issue
def initialize(issue_id)
@issue = Issue.find_by(id: issue_id)
end
private
override :todos
def todos
Todo.where(target: issue)
.where('user_id != ?', issue.author_id)
.where('user_id NOT IN (?)', issue.assignees.select(:id))
end
override :todos_to_remove?
def todos_to_remove?
issue&.confidential?
end
override :project_ids
def project_ids
issue.project_id
end
override :authorized_users
def authorized_users
ProjectAuthorization.select(:user_id)
.where(project_id: project_ids)
.where('access_level >= ?', Gitlab::Access::REPORTER)
end
end
end
end
module Todos
module Destroy
class EntityLeaveService < ::Todos::Destroy::BaseService
extend ::Gitlab::Utils::Override
attr_reader :user_id, :entity
def initialize(user_id, entity_id, entity_type)
unless %w(Group Project).include?(entity_type)
raise ArgumentError.new("#{entity_type} is not an entity user can leave")
end
@user_id = user_id
@entity = entity_type.constantize.find_by(id: entity_id)
end
private
override :todos
def todos
if entity.private?
Todo.where(project_id: project_ids, user_id: user_id)
else
project_ids.each do |project_id|
TodosDestroyer::PrivateFeaturesWorker.perform_async(project_id, user_id)
end
Todo.where(
target_id: confidential_issues.select(:id), target_type: Issue, user_id: user_id
)
end
end
override :project_ids
def project_ids
case entity
when Project
[entity.id]
when Namespace
Project.select(:id).where(namespace_id: entity.self_and_descendants.select(:id))
end
end
override :todos_to_remove?
def todos_to_remove?
# if an entity is provided we want to check always at least private features
!!entity
end
def confidential_issues
assigned_ids = IssueAssignee.select(:issue_id).where(user_id: user_id)
Issue.where(project_id: project_ids, confidential: true)
.where('author_id != ?', user_id)
.where('id NOT IN (?)', assigned_ids)
end
end
end
end
module Todos
module Destroy
class PrivateFeaturesService < ::Todos::Destroy::BaseService
attr_reader :project_ids, :user_id
def initialize(project_ids, user_id = nil)
@project_ids = project_ids
@user_id = user_id
end
def execute
ProjectFeature.where(project_id: project_ids).each do |project_features|
target_types = []
target_types << Issue if private?(project_features.issues_access_level)
target_types << MergeRequest if private?(project_features.merge_requests_access_level)
target_types << Commit if private?(project_features.repository_access_level)
next if target_types.empty?
remove_todos(project_features.project_id, target_types)
end
end
private
def private?(feature_level)
feature_level == ProjectFeature::PRIVATE
end
def remove_todos(project_id, target_types)
items = Todo.where(project_id: project_id)
items = items.where(user_id: user_id) if user_id
items.where('user_id NOT IN (?)', authorized_users)
.where(target_type: target_types)
.delete_all
end
end
end
end
module Todos
module Destroy
class ProjectPrivateService < ::Todos::Destroy::BaseService
extend ::Gitlab::Utils::Override
attr_reader :project
def initialize(project_id)
@project = Project.find_by(id: project_id)
end
private
override :todos
def todos
Todo.where(project_id: project_ids)
end
override :project_ids
def project_ids
project.id
end
override :todos_to_remove?
def todos_to_remove?
project&.private?
end
end
end
end
...@@ -4,6 +4,6 @@ ...@@ -4,6 +4,6 @@
= s_("Wiki|New page") = s_("Wiki|New page")
= link_to project_wiki_history_path(@project, @page), class: "btn" do = link_to project_wiki_history_path(@project, @page), class: "btn" do
= s_("Wiki|Page history") = s_("Wiki|Page history")
- if can?(current_user, :create_wiki, @project) && @page.latest? - if can?(current_user, :create_wiki, @project) && @page.latest? && @valid_encoding
= link_to project_wiki_edit_path(@project, @page), class: "btn js-wiki-edit" do = link_to project_wiki_edit_path(@project, @page), class: "btn js-wiki-edit" do
= _("Edit") = _("Edit")
...@@ -75,6 +75,11 @@ ...@@ -75,6 +75,11 @@
- repository_check:repository_check_batch - repository_check:repository_check_batch
- repository_check:repository_check_single_repository - repository_check:repository_check_single_repository
- todos_destroyer:todos_destroyer_confidential_issue
- todos_destroyer:todos_destroyer_entity_leave
- todos_destroyer:todos_destroyer_project_private
- todos_destroyer:todos_destroyer_private_features
- default - default
- mailers # ActionMailer::DeliveryJob.queue_name - mailers # ActionMailer::DeliveryJob.queue_name
......
# frozen_string_literal: true
##
# Concern for setting Sidekiq settings for the various Todos Destroyers.
#
module TodosDestroyerQueue
extend ActiveSupport::Concern
included do
queue_namespace :todos_destroyer
end
end
module TodosDestroyer
class ConfidentialIssueWorker
include ApplicationWorker
include TodosDestroyerQueue
def perform(issue_id)
::Todos::Destroy::ConfidentialIssueService.new(issue_id).execute
end
end
end
module TodosDestroyer
class EntityLeaveWorker
include ApplicationWorker
include TodosDestroyerQueue
def perform(user_id, entity_id, entity_type)
::Todos::Destroy::EntityLeaveService.new(user_id, entity_id, entity_type).execute
end
end
end
module TodosDestroyer
class PrivateFeaturesWorker
include ApplicationWorker
include TodosDestroyerQueue
def perform(project_id, user_id = nil)
::Todos::Destroy::PrivateFeaturesService.new(project_id, user_id).execute
end
end
end
module TodosDestroyer
class ProjectPrivateWorker
include ApplicationWorker
include TodosDestroyerQueue
def perform(project_id)
::Todos::Destroy::ProjectPrivateService.new(project_id).execute
end
end
end
---
title: Fix authorization for interactive web terminals
merge_request: 20811
author:
type: fixed
---
title: Ensure installed Helm Tiller For GitLab Managed Apps Is protected by mutual
auth
merge_request: 20801
author:
type: changed
---
title: Disables toggle comments button if diff has no discussions
merge_request:
author:
type: other
---
title: Changes poll.js to keep polling on any 2xx http status code
merge_request: 20904
author:
type: other
---
title: Add more comprehensive metrics tracking authentication activity
merge_request: 20668
author:
type: added
---
title: Update total storage size when changing size of artifacts
merge_request: 20697
author: Peter Marko
type: fixed
---
title: Prevent editing and updating wiki pages with non UTF-8 encoding via web interface
merge_request: 20906
author:
type: fixed
---
title: Warn user when reload IDE with staged changes
merge_request: 20857
author:
type: added
---
title: Delete todos when user loses access to read the target
merge_request: 20665
author:
type: other
---
title: Remove gitlab:user:check_repos, gitlab:check_repo, gitlab:git:prune, gitlab:git:gc, and gitlab:git:repack
merge_request: 20806
author:
type: removed
Rails.application.configure do |config| Rails.application.configure do |config|
Warden::Manager.after_set_user(scope: :user) do |user, auth, opts| Warden::Manager.after_set_user(scope: :user) do |user, auth, opts|
Gitlab::Auth::UniqueIpsLimiter.limit_user!(user) Gitlab::Auth::UniqueIpsLimiter.limit_user!(user)
end
Warden::Manager.before_failure(scope: :user) do |env, opts| activity = Gitlab::Auth::Activity.new(user, opts)
Gitlab::Auth::BlockedUserTracker.log_if_user_blocked(env)
case opts[:event]
when :authentication
activity.user_authenticated!
when :set_user
activity.user_authenticated!
activity.user_session_override!
when :fetch # rubocop:disable Lint/EmptyWhen
# We ignore session fetch events
else
activity.user_session_override!
end
end end
Warden::Manager.after_authentication(scope: :user) do |user, auth, opts| Warden::Manager.after_authentication(scope: :user) do |user, auth, opts|
...@@ -15,7 +25,17 @@ Rails.application.configure do |config| ...@@ -15,7 +25,17 @@ Rails.application.configure do |config|
ActiveSession.set(user, auth.request) ActiveSession.set(user, auth.request)
end end
Warden::Manager.before_logout(scope: :user) do |user, auth, opts| Warden::Manager.before_failure(scope: :user) do |env, opts|
ActiveSession.destroy(user || auth.user, auth.request.session.id) tracker = Gitlab::Auth::BlockedUserTracker.new(env)
tracker.log_blocked_user_activity! if tracker.user_blocked?
Gitlab::Auth::Activity.new(tracker.user, opts).user_authentication_failed!
end
Warden::Manager.before_logout(scope: :user) do |user_warden, auth, opts|
user = user_warden || auth.user
ActiveSession.destroy(user, auth.request.session.id)
Gitlab::Auth::Activity.new(user, opts).user_session_destroyed!
end end
end end
...@@ -45,6 +45,7 @@ ...@@ -45,6 +45,7 @@
- [github_import_advance_stage, 1] - [github_import_advance_stage, 1]
- [project_service, 1] - [project_service, 1]
- [delete_user, 1] - [delete_user, 1]
- [todos_destroyer, 1]
- [delete_merged_branches, 1] - [delete_merged_branches, 1]
- [authorized_projects, 1] - [authorized_projects, 1]
- [expire_build_instance_artifacts, 1] - [expire_build_instance_artifacts, 1]
......
class AddColumnsForHelmTillerCertificates < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :clusters_applications_helm, :encrypted_ca_key, :text
add_column :clusters_applications_helm, :encrypted_ca_key_iv, :text
add_column :clusters_applications_helm, :ca_cert, :text
end
end
...@@ -747,9 +747,6 @@ ActiveRecord::Schema.define(version: 20180722103201) do ...@@ -747,9 +747,6 @@ ActiveRecord::Schema.define(version: 20180722103201) do
t.integer "status", null: false t.integer "status", null: false
t.string "version", null: false t.string "version", null: false
t.text "status_reason" t.text "status_reason"
t.text "encrypted_ca_key"
t.text "encrypted_ca_key_iv"
t.text "ca_cert"
end end
create_table "clusters_applications_ingress", force: :cascade do |t| create_table "clusters_applications_ingress", force: :cascade do |t|
......
# Check Rake Tasks # Integrity Check Rake Task
## Repository Integrity ## Repository Integrity
...@@ -28,14 +28,8 @@ exactly which repositories are causing the trouble. ...@@ -28,14 +28,8 @@ exactly which repositories are causing the trouble.
### Check all GitLab repositories ### Check all GitLab repositories
>**Note:**
>
> - `gitlab:repo:check` has been deprecated in favor of `gitlab:git:fsck`
> - [Deprecated][ce-15931] in GitLab 10.4.
> - `gitlab:repo:check` will be removed in the future. [Removal issue][ce-41699]
This task loops through all repositories on the GitLab server and runs the This task loops through all repositories on the GitLab server and runs the
3 integrity checks described previously. integrity check described previously.
**Omnibus Installation** **Omnibus Installation**
...@@ -49,33 +43,6 @@ sudo gitlab-rake gitlab:git:fsck ...@@ -49,33 +43,6 @@ sudo gitlab-rake gitlab:git:fsck
sudo -u git -H bundle exec rake gitlab:git:fsck RAILS_ENV=production sudo -u git -H bundle exec rake gitlab:git:fsck RAILS_ENV=production
``` ```
### Check repositories for a specific user
This task checks all repositories that a specific user has access to. This is important
because sometimes you know which user is experiencing trouble but you don't know
which project might be the cause.
If the rake task is executed without brackets at the end, you will be prompted
to enter a username.
**Omnibus Installation**
```bash
sudo gitlab-rake gitlab:user:check_repos
sudo gitlab-rake gitlab:user:check_repos[<username>]
```
**Source Installation**
```bash
sudo -u git -H bundle exec rake gitlab:user:check_repos RAILS_ENV=production
sudo -u git -H bundle exec rake gitlab:user:check_repos[<username>] RAILS_ENV=production
```
Example output:
![gitlab:user:check_repos output](../img/raketasks/check_repos_output.png)
## Uploaded Files Integrity ## Uploaded Files Integrity
Various types of files can be uploaded to a GitLab installation by users. Various types of files can be uploaded to a GitLab installation by users.
...@@ -167,5 +134,4 @@ The LDAP check Rake task will test the bind_dn and password credentials ...@@ -167,5 +134,4 @@ The LDAP check Rake task will test the bind_dn and password credentials
executed as part of the `gitlab:check` task, but can run independently. executed as part of the `gitlab:check` task, but can run independently.
See [LDAP Rake Tasks - LDAP Check](ldap.md#check) for details. See [LDAP Rake Tasks - LDAP Check](ldap.md#check) for details.
[ce-15931]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/15931 [git-fsck]: https://git-scm.com/docs/git-fsck
[ce-41699]: https://gitlab.com/gitlab-org/gitlab-ce/issues/41699
...@@ -15,7 +15,7 @@ Use the following rules when creating realtime solutions. ...@@ -15,7 +15,7 @@ Use the following rules when creating realtime solutions.
Use that as your polling interval. This way it is [easy for system administrators to change the Use that as your polling interval. This way it is [easy for system administrators to change the
polling rate](../../administration/polling.md). polling rate](../../administration/polling.md).
A `Poll-Interval: -1` means you should disable polling, and this must be implemented. A `Poll-Interval: -1` means you should disable polling, and this must be implemented.
1. A response with HTTP status `4XX` or `5XX` should disable polling as well. 1. A response with HTTP status different from 2XX should disable polling as well.
1. Use a common library for polling. 1. Use a common library for polling.
1. Poll on active tabs only. Please use [Visibility](https://github.com/ai/visibilityjs). 1. Poll on active tabs only. Please use [Visibility](https://github.com/ai/visibilityjs).
1. Use regular polling intervals, do not use backoff polling, or jitter, as the interval will be 1. Use regular polling intervals, do not use backoff polling, or jitter, as the interval will be
...@@ -25,15 +25,15 @@ controlled by the server. ...@@ -25,15 +25,15 @@ controlled by the server.
### Lazy Loading Images ### Lazy Loading Images
To improve the time to first render we are using lazy loading for images. This works by setting To improve the time to first render we are using lazy loading for images. This works by setting
the actual image source on the `data-src` attribute. After the HTML is rendered and JavaScript is loaded, the actual image source on the `data-src` attribute. After the HTML is rendered and JavaScript is loaded,
the value of `data-src` will be moved to `src` automatically if the image is in the current viewport. the value of `data-src` will be moved to `src` automatically if the image is in the current viewport.
* Prepare images in HTML for lazy loading by renaming the `src` attribute to `data-src` AND adding the class `lazy` * Prepare images in HTML for lazy loading by renaming the `src` attribute to `data-src` AND adding the class `lazy`
* If you are using the Rails `image_tag` helper, all images will be lazy-loaded by default unless `lazy: false` is provided. * If you are using the Rails `image_tag` helper, all images will be lazy-loaded by default unless `lazy: false` is provided.
If you are asynchronously adding content which contains lazy images then you need to call the function If you are asynchronously adding content which contains lazy images then you need to call the function
`gl.lazyLoader.searchLazyImages()` which will search for lazy images and load them if needed. `gl.lazyLoader.searchLazyImages()` which will search for lazy images and load them if needed.
But in general it should be handled automatically through a `MutationObserver` in the lazy loading function. But in general it should be handled automatically through a `MutationObserver` in the lazy loading function.
### Animations ### Animations
...@@ -97,19 +97,19 @@ bundle and included on the page. ...@@ -97,19 +97,19 @@ bundle and included on the page.
```javascript ```javascript
import initMyWidget from './my_widget'; import initMyWidget from './my_widget';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
initMyWidget(); initMyWidget();
}); });
``` ```
- **Supporting Module Placement:** - **Supporting Module Placement:**
- If a class or a module is _specific to a particular route_, try to locate - If a class or a module is _specific to a particular route_, try to locate
it close to the entry point it will be used. For instance, if it close to the entry point it will be used. For instance, if
`my_widget.js` is only imported within `pages/widget/show/index.js`, you `my_widget.js` is only imported within `pages/widget/show/index.js`, you
should place the module at `pages/widget/show/my_widget.js` and import it should place the module at `pages/widget/show/my_widget.js` and import it
with a relative path (e.g. `import initMyWidget from './my_widget';`). with a relative path (e.g. `import initMyWidget from './my_widget';`).
- If a class or module is _used by multiple routes_, place it within a - If a class or module is _used by multiple routes_, place it within a
shared directory at the closest common parent directory for the entry shared directory at the closest common parent directory for the entry
points that import it. For example, if `my_widget.js` is imported within points that import it. For example, if `my_widget.js` is imported within
......
...@@ -30,7 +30,7 @@ With default whitelist settings, the probes can be accessed from localhost: ...@@ -30,7 +30,7 @@ With default whitelist settings, the probes can be accessed from localhost:
The first endpoint, `/-/health/`, only checks whether the application server is running. It does The first endpoint, `/-/health/`, only checks whether the application server is running. It does
-not verify the database or other services are running. A successful response with return -not verify the database or other services are running. A successful response will return
a 200 status code with the following message: a 200 status code with the following message:
``` ```
......
This diff is collapsed.
module Gitlab
module Auth
##
# Metrics and logging for user authentication activity.
#
class Activity
extend Gitlab::Utils::StrongMemoize
COUNTERS = {
user_authenticated: 'Counter of successful authentication events',
user_unauthenticated: 'Counter of authentication failures',
user_not_found: 'Counter of failed log-ins when user is unknown',
user_password_invalid: 'Counter of failed log-ins with invalid password',
user_session_override: 'Counter of manual log-ins and sessions overrides',
user_session_destroyed: 'Counter of user sessions being destroyed',
user_two_factor_authenticated: 'Counter of two factor authentications',
user_sessionless_authentication: 'Counter of sessionless authentications',
user_blocked: 'Counter of sign in attempts when user is blocked'
}.freeze
def initialize(user, opts)
@user = user
@opts = opts
end
def user_authentication_failed!
self.class.user_unauthenticated_counter_increment!
case @opts[:message]
when :not_found_in_database
self.class.user_not_found_counter_increment!
when :invalid
self.class.user_password_invalid_counter_increment!
end
self.class.user_blocked_counter_increment! if @user&.blocked?
end
def user_authenticated!
self.class.user_authenticated_counter_increment!
end
def user_session_override!
self.class.user_session_override_counter_increment!
case @opts[:message]
when :two_factor_authenticated
self.class.user_two_factor_authenticated_counter_increment!
when :sessionless_sign_in
self.class.user_sessionless_authentication_counter_increment!
end
end
def user_session_destroyed!
self.class.user_session_destroyed_counter_increment!
end
def self.each_counter
COUNTERS.each_pair do |metric, description|
yield "#{metric}_counter", metric, description
end
end
each_counter do |counter, metric, description|
define_singleton_method(counter) do
strong_memoize(counter) do
Gitlab::Metrics.counter("gitlab_auth_#{metric}_total".to_sym, description)
end
end
define_singleton_method("#{counter}_increment!") do
public_send(counter).increment # rubocop:disable GitlabSecurity/PublicSend
end
end
end
end
end
...@@ -2,37 +2,58 @@ ...@@ -2,37 +2,58 @@
module Gitlab module Gitlab
module Auth module Auth
class BlockedUserTracker class BlockedUserTracker
include Gitlab::Utils::StrongMemoize
ACTIVE_RECORD_REQUEST_PARAMS = 'action_dispatch.request.request_parameters' ACTIVE_RECORD_REQUEST_PARAMS = 'action_dispatch.request.request_parameters'
def self.log_if_user_blocked(env) def initialize(env)
message = env.dig('warden.options', :message) @env = env
end
# Devise calls User#active_for_authentication? on the User model and then def user_blocked?
# throws an exception to Warden with User#inactive_message: user&.blocked?
# https://github.com/plataformatec/devise/blob/v4.2.1/lib/devise/hooks/activatable.rb#L8 end
#
# Since Warden doesn't pass the user record to the failure handler, we
# need to do a database lookup with the username. We can limit the
# lookups to happen when the user was blocked by checking the inactive
# message passed along by Warden.
return unless message == User::BLOCKED_MESSAGE
# Check for either LDAP or regular GitLab account logins def user
login = env.dig(ACTIVE_RECORD_REQUEST_PARAMS, 'username') || return unless has_user_blocked_message?
env.dig(ACTIVE_RECORD_REQUEST_PARAMS, 'user', 'login')
return unless login.present? strong_memoize(:user) do
# Check for either LDAP or regular GitLab account logins
login = @env.dig(ACTIVE_RECORD_REQUEST_PARAMS, 'username') ||
@env.dig(ACTIVE_RECORD_REQUEST_PARAMS, 'user', 'login')
user = User.by_login(login) User.by_login(login) if login.present?
end
rescue TypeError
end
return unless user&.blocked? def log_blocked_user_activity!
return unless user_blocked?
Gitlab::AppLogger.info("Failed login for blocked user: user=#{user.username} ip=#{env['REMOTE_ADDR']}") Gitlab::AppLogger.info("Failed login for blocked user: user=#{user.username} ip=#{@env['REMOTE_ADDR']}")
SystemHooksService.new.execute_hooks_for(user, :failed_login) SystemHooksService.new.execute_hooks_for(user, :failed_login)
true true
rescue TypeError rescue TypeError
end end
private
##
# Devise calls User#active_for_authentication? on the User model and then
# throws an exception to Warden with User#inactive_message:
# https://github.com/plataformatec/devise/blob/v4.2.1/lib/devise/hooks/activatable.rb#L8
#
# Since Warden doesn't pass the user record to the failure handler, we
# need to do a database lookup with the username. We can limit the
# lookups to happen when the user was blocked by checking the inactive
# message passed along by Warden.
#
def has_user_blocked_message?
strong_memoize(:user_blocked_message) do
message = @env.dig('warden.options', :message)
message == User::BLOCKED_MESSAGE
end
end
end end
end end
end end
# frozen_string_literal: true
module Gitlab module Gitlab
class GitPostReceive class GitPostReceive
include Gitlab::Identifier include Gitlab::Identifier
...@@ -14,10 +16,11 @@ module Gitlab ...@@ -14,10 +16,11 @@ module Gitlab
end end
def changes_refs def changes_refs
return enum_for(:changes_refs) unless block_given? return changes unless block_given?
changes.each do |change| changes.each do |change|
oldrev, newrev, ref = change.strip.split(' ') change.strip!
oldrev, newrev, ref = change.split(' ')
yield oldrev, newrev, ref yield oldrev, newrev, ref
end end
...@@ -26,13 +29,10 @@ module Gitlab ...@@ -26,13 +29,10 @@ module Gitlab
private private
def deserialize_changes(changes) def deserialize_changes(changes)
changes = utf8_encode_changes(changes) utf8_encode_changes(changes).each_line
changes.lines
end end
def utf8_encode_changes(changes) def utf8_encode_changes(changes)
changes = changes.dup
changes.force_encoding('UTF-8') changes.force_encoding('UTF-8')
return changes if changes.valid_encoding? return changes if changes.valid_encoding?
......
module Gitlab module Gitlab
module Kubernetes module Kubernetes
class ConfigMap class ConfigMap
def initialize(name, files) def initialize(name, values = "")
@name = name @name = name
@files = files @values = values
end end
def generate def generate
resource = ::Kubeclient::Resource.new resource = ::Kubeclient::Resource.new
resource.metadata = metadata resource.metadata = metadata
resource.data = files resource.data = { values: values }
resource resource
end end
...@@ -19,7 +19,7 @@ module Gitlab ...@@ -19,7 +19,7 @@ module Gitlab
private private
attr_reader :name, :files attr_reader :name, :values
def metadata def metadata
{ {
......
...@@ -11,7 +11,7 @@ module Gitlab ...@@ -11,7 +11,7 @@ module Gitlab
def install(command) def install(command)
namespace.ensure_exists! namespace.ensure_exists!
create_config_map(command) create_config_map(command) if command.config_map?
kubeclient.create_pod(command.pod_resource) kubeclient.create_pod(command.pod_resource)
end end
......
module Gitlab module Gitlab
module Kubernetes module Kubernetes
module Helm module Helm
module BaseCommand class BaseCommand
attr_reader :name
def initialize(name)
@name = name
end
def pod_resource def pod_resource
Gitlab::Kubernetes::Helm::Pod.new(self, namespace).generate Gitlab::Kubernetes::Helm::Pod.new(self, namespace).generate
end end
...@@ -18,32 +24,16 @@ module Gitlab ...@@ -18,32 +24,16 @@ module Gitlab
HEREDOC HEREDOC
end end
def pod_name def config_map?
"install-#{name}" false
end
def config_map_resource
Gitlab::Kubernetes::ConfigMap.new(name, files).generate
end end
def file_names def pod_name
files.keys "install-#{name}"
end
def name
raise "Not implemented"
end
def files
raise "Not implemented"
end end
private private
def files_dir
"/data/helm/#{name}/config"
end
def namespace def namespace
Gitlab::Kubernetes::Helm::NAMESPACE Gitlab::Kubernetes::Helm::NAMESPACE
end end
......
module Gitlab
module Kubernetes
module Helm
class Certificate
INFINITE_EXPIRY = 1000.years
SHORT_EXPIRY = 30.minutes
attr_reader :key, :cert
def key_string
@key.to_s
end
def cert_string
@cert.to_pem
end
def self.from_strings(key_string, cert_string)
key = OpenSSL::PKey::RSA.new(key_string)
cert = OpenSSL::X509::Certificate.new(cert_string)
new(key, cert)
end
def self.generate_root
_issue(signed_by: nil, expires_in: INFINITE_EXPIRY, certificate_authority: true)
end
def issue(expires_in: SHORT_EXPIRY)
self.class._issue(signed_by: self, expires_in: expires_in, certificate_authority: false)
end
private
def self._issue(signed_by:, expires_in:, certificate_authority:)
key = OpenSSL::PKey::RSA.new(4096)
public_key = key.public_key
subject = OpenSSL::X509::Name.parse("/C=US")
cert = OpenSSL::X509::Certificate.new
cert.subject = subject
cert.issuer = signed_by&.cert&.subject || subject
cert.not_before = Time.now
cert.not_after = expires_in.from_now
cert.public_key = public_key
cert.serial = 0x0
cert.version = 2
if certificate_authority
extension_factory = OpenSSL::X509::ExtensionFactory.new
extension_factory.subject_certificate = cert
extension_factory.issuer_certificate = cert
cert.add_extension(extension_factory.create_extension('subjectKeyIdentifier', 'hash'))
cert.add_extension(extension_factory.create_extension('basicConstraints', 'CA:TRUE', true))
cert.add_extension(extension_factory.create_extension('keyUsage', 'cRLSign,keyCertSign', true))
end
cert.sign(signed_by&.key || key, OpenSSL::Digest::SHA256.new)
new(key, cert)
end
def initialize(key, cert)
@key = key
@cert = cert
end
end
end
end
end
module Gitlab module Gitlab
module Kubernetes module Kubernetes
module Helm module Helm
class InitCommand class InitCommand < BaseCommand
include BaseCommand
attr_reader :name, :files
def initialize(name:, files:)
@name = name
@files = files
end
def generate_script def generate_script
super + [ super + [
init_helm_command init_helm_command
...@@ -20,12 +11,7 @@ module Gitlab ...@@ -20,12 +11,7 @@ module Gitlab
private private
def init_helm_command def init_helm_command
tls_flags = "--tiller-tls" \ "helm init >/dev/null"
" --tiller-tls-verify --tls-ca-cert #{files_dir}/ca.pem" \
" --tiller-tls-cert #{files_dir}/cert.pem" \
" --tiller-tls-key #{files_dir}/key.pem"
"helm init #{tls_flags} >/dev/null"
end end
end end
end end
......
module Gitlab module Gitlab
module Kubernetes module Kubernetes
module Helm module Helm
class InstallCommand class InstallCommand < BaseCommand
include BaseCommand attr_reader :name, :chart, :version, :repository, :values
attr_reader :name, :files, :chart, :version, :repository def initialize(name, chart:, values:, version: nil, repository: nil)
def initialize(name:, chart:, files:, version: nil, repository: nil)
@name = name @name = name
@chart = chart @chart = chart
@version = version @version = version
@files = files @values = values
@repository = repository @repository = repository
end end
...@@ -22,6 +20,14 @@ module Gitlab ...@@ -22,6 +20,14 @@ module Gitlab
].compact.join("\n") ].compact.join("\n")
end end
def config_map?
true
end
def config_map_resource
Gitlab::Kubernetes::ConfigMap.new(name, values).generate
end
private private
def init_command def init_command
...@@ -33,27 +39,14 @@ module Gitlab ...@@ -33,27 +39,14 @@ module Gitlab
end end
def script_command def script_command
"helm install" \ <<~HEREDOC
"#{optional_tls_flags} " \ helm install #{chart} --name #{name}#{optional_version_flag} --namespace #{Gitlab::Kubernetes::Helm::NAMESPACE} -f /data/helm/#{name}/config/values.yaml >/dev/null
"#{chart} " \ HEREDOC
"--name #{name}" \
"#{optional_version_flag} " \
"--namespace #{Gitlab::Kubernetes::Helm::NAMESPACE} " \
"-f /data/helm/#{name}/config/values.yaml >/dev/null\n"
end end
def optional_version_flag def optional_version_flag
" --version #{version}" if version " --version #{version}" if version
end end
def optional_tls_flags
return unless files.key?(:'ca.pem')
" --tls" \
" --tls-ca-cert #{files_dir}/ca.pem" \
" --tls-cert #{files_dir}/cert.pem" \
" --tls-key #{files_dir}/key.pem"
end
end end
end end
end end
......
...@@ -10,8 +10,10 @@ module Gitlab ...@@ -10,8 +10,10 @@ module Gitlab
def generate def generate
spec = { containers: [container_specification], restartPolicy: 'Never' } spec = { containers: [container_specification], restartPolicy: 'Never' }
spec[:volumes] = volumes_specification if command.config_map?
spec[:containers][0][:volumeMounts] = volume_mounts_specification spec[:volumes] = volumes_specification
spec[:containers][0][:volumeMounts] = volume_mounts_specification
end
::Kubeclient::Resource.new(metadata: metadata, spec: spec) ::Kubeclient::Resource.new(metadata: metadata, spec: spec)
end end
...@@ -59,7 +61,7 @@ module Gitlab ...@@ -59,7 +61,7 @@ module Gitlab
name: 'configuration-volume', name: 'configuration-volume',
configMap: { configMap: {
name: "values-content-configuration-#{command.name}", name: "values-content-configuration-#{command.name}",
items: command.file_names.map { |name| { key: name, path: name } } items: [{ key: 'values', path: 'values.yaml' }]
} }
} }
] ]
......
...@@ -388,14 +388,6 @@ namespace :gitlab do ...@@ -388,14 +388,6 @@ namespace :gitlab do
end end
end end
namespace :repo do
desc "GitLab | Check the integrity of the repositories managed by GitLab"
task check: :gitlab_environment do
puts "This task is deprecated. Please use gitlab:git:fsck instead".color(:red)
Rake::Task["gitlab:git:fsck"].execute
end
end
namespace :orphans do namespace :orphans do
desc 'Gitlab | Check for orphaned namespaces and repositories' desc 'Gitlab | Check for orphaned namespaces and repositories'
task check: :gitlab_environment do task check: :gitlab_environment do
...@@ -425,6 +417,7 @@ namespace :gitlab do ...@@ -425,6 +417,7 @@ namespace :gitlab do
end end
end end
<<<<<<< HEAD
namespace :user do namespace :user do
desc "GitLab | Check the integrity of a specific user's repositories" desc "GitLab | Check the integrity of a specific user's repositories"
task :check_repos, [:username] => :gitlab_environment do |t, args| task :check_repos, [:username] => :gitlab_environment do |t, args|
...@@ -466,6 +459,8 @@ namespace :gitlab do ...@@ -466,6 +459,8 @@ namespace :gitlab do
end end
end end
=======
>>>>>>> ce/master
# Helper methods # Helper methods
########################## ##########################
......
namespace :gitlab do namespace :gitlab do
namespace :git do namespace :git do
desc "GitLab | Git | Repack"
task repack: :gitlab_environment do
failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} repack -a --quiet), "Repacking repo")
if failures.empty?
puts "Done".color(:green)
else
output_failures(failures)
end
end
desc "GitLab | Git | Run garbage collection on all repos"
task gc: :gitlab_environment do
failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} gc --auto --quiet), "Garbage Collecting")
if failures.empty?
puts "Done".color(:green)
else
output_failures(failures)
end
end
desc "GitLab | Git | Prune all repos"
task prune: :gitlab_environment do
failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} prune), "Git Prune")
if failures.empty?
puts "Done".color(:green)
else
output_failures(failures)
end
end
desc 'GitLab | Git | Check all repos integrity' desc 'GitLab | Git | Check all repos integrity'
task fsck: :gitlab_environment do task fsck: :gitlab_environment do
failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} fsck --name-objects --no-progress), "Checking integrity") do |repo|
check_config_lock(repo)
check_ref_locks(repo)
end
if failures.empty?
puts "Done".color(:green)
else
output_failures(failures)
end
end
def perform_git_cmd(cmd, message)
puts "Starting #{message} on all repositories"
failures = [] failures = []
all_repos do |repo| Project.find_each(batch_size: 100) do |project|
if system(*cmd, chdir: repo) begin
puts "Performed #{message} at #{repo}" project.repository.fsck
else
failures << repo rescue => e
failures << "#{project.full_path} on #{project.repository_storage}: #{e}"
end end
yield(repo) if block_given? puts "Performed integrity check for #{project.repository.full_path}"
end end
failures if failures.empty?
end puts "Done".color(:green)
def output_failures(failures)
puts "The following repositories reported errors:".color(:red)
failures.each { |f| puts "- #{f}" }
end
def check_config_lock(repo_dir)
config_exists = File.exist?(File.join(repo_dir, 'config.lock'))
config_output = config_exists ? 'yes'.color(:red) : 'no'.color(:green)
puts "'config.lock' file exists?".color(:yellow) + " ... #{config_output}"
end
def check_ref_locks(repo_dir)
lock_files = Dir.glob(File.join(repo_dir, 'refs/heads/*.lock'))
if lock_files.present?
puts "Ref lock files exist:".color(:red)
lock_files.each { |lock_file| puts " #{lock_file}" }
else else
puts "No ref lock files exist".color(:green) puts "The following repositories reported errors:".color(:red)
failures.each { |f| puts "- #{f}" }
end end
end end
end end
......
...@@ -652,6 +652,9 @@ msgstr "" ...@@ -652,6 +652,9 @@ msgstr ""
msgid "Are you sure you want to delete this pipeline schedule?" msgid "Are you sure you want to delete this pipeline schedule?"
msgstr "" msgstr ""
msgid "Are you sure you want to lose unsaved changes?"
msgstr ""
msgid "Are you sure you want to remove %{group_name}?" msgid "Are you sure you want to remove %{group_name}?"
msgstr "" msgstr ""
......
...@@ -44,11 +44,10 @@ module QA ...@@ -44,11 +44,10 @@ module QA
page.await_installed(:helm) page.await_installed(:helm)
page.install!(:ingress) if @install_ingress page.install!(:ingress) if @install_ingress
page.install!(:prometheus) if @install_prometheus
page.install!(:runner) if @install_runner
page.await_installed(:ingress) if @install_ingress page.await_installed(:ingress) if @install_ingress
page.install!(:prometheus) if @install_prometheus
page.await_installed(:prometheus) if @install_prometheus page.await_installed(:prometheus) if @install_prometheus
page.install!(:runner) if @install_runner
page.await_installed(:runner) if @install_runner page.await_installed(:runner) if @install_runner
end end
end end
......
...@@ -16,7 +16,6 @@ module QA ...@@ -16,7 +16,6 @@ module QA
def install!(application_name) def install!(application_name)
within(".js-cluster-application-row-#{application_name}") do within(".js-cluster-application-row-#{application_name}") do
page.has_button?('Install', wait: 30)
click_on 'Install' click_on 'Install'
end end
end end
......
...@@ -57,6 +57,10 @@ describe ApplicationController do ...@@ -57,6 +57,10 @@ describe ApplicationController do
end end
describe "#authenticate_user_from_personal_access_token!" do describe "#authenticate_user_from_personal_access_token!" do
before do
stub_authentication_activity_metrics(debug: false)
end
controller(described_class) do controller(described_class) do
def index def index
render text: 'authenticated' render text: 'authenticated'
...@@ -67,7 +71,13 @@ describe ApplicationController do ...@@ -67,7 +71,13 @@ describe ApplicationController do
context "when the 'personal_access_token' param is populated with the personal access token" do context "when the 'personal_access_token' param is populated with the personal access token" do
it "logs the user in" do it "logs the user in" do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
get :index, private_token: personal_access_token.token get :index, private_token: personal_access_token.token
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(response.body).to eq('authenticated') expect(response.body).to eq('authenticated')
end end
...@@ -75,15 +85,25 @@ describe ApplicationController do ...@@ -75,15 +85,25 @@ describe ApplicationController do
context "when the 'PERSONAL_ACCESS_TOKEN' header is populated with the personal access token" do context "when the 'PERSONAL_ACCESS_TOKEN' header is populated with the personal access token" do
it "logs the user in" do it "logs the user in" do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
@request.headers["PRIVATE-TOKEN"] = personal_access_token.token @request.headers["PRIVATE-TOKEN"] = personal_access_token.token
get :index get :index
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(response.body).to eq('authenticated') expect(response.body).to eq('authenticated')
end end
end end
it "doesn't log the user in otherwise" do it "doesn't log the user in otherwise" do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
get :index, private_token: "token" get :index, private_token: "token"
expect(response.status).not_to eq(200) expect(response.status).not_to eq(200)
expect(response.body).not_to eq('authenticated') expect(response.body).not_to eq('authenticated')
end end
...@@ -174,6 +194,10 @@ describe ApplicationController do ...@@ -174,6 +194,10 @@ describe ApplicationController do
end end
describe '#authenticate_sessionless_user!' do describe '#authenticate_sessionless_user!' do
before do
stub_authentication_activity_metrics(debug: false)
end
describe 'authenticating a user from a feed token' do describe 'authenticating a user from a feed token' do
controller(described_class) do controller(described_class) do
def index def index
...@@ -184,7 +208,13 @@ describe ApplicationController do ...@@ -184,7 +208,13 @@ describe ApplicationController do
context "when the 'feed_token' param is populated with the feed token" do context "when the 'feed_token' param is populated with the feed token" do
context 'when the request format is atom' do context 'when the request format is atom' do
it "logs the user in" do it "logs the user in" do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
get :index, feed_token: user.feed_token, format: :atom get :index, feed_token: user.feed_token, format: :atom
expect(response).to have_gitlab_http_status 200 expect(response).to have_gitlab_http_status 200
expect(response.body).to eq 'authenticated' expect(response.body).to eq 'authenticated'
end end
...@@ -192,7 +222,13 @@ describe ApplicationController do ...@@ -192,7 +222,13 @@ describe ApplicationController do
context 'when the request format is ics' do context 'when the request format is ics' do
it "logs the user in" do it "logs the user in" do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
get :index, feed_token: user.feed_token, format: :ics get :index, feed_token: user.feed_token, format: :ics
expect(response).to have_gitlab_http_status 200 expect(response).to have_gitlab_http_status 200
expect(response.body).to eq 'authenticated' expect(response.body).to eq 'authenticated'
end end
...@@ -200,7 +236,11 @@ describe ApplicationController do ...@@ -200,7 +236,11 @@ describe ApplicationController do
context 'when the request format is neither atom nor ics' do context 'when the request format is neither atom nor ics' do
it "doesn't log the user in" do it "doesn't log the user in" do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
get :index, feed_token: user.feed_token get :index, feed_token: user.feed_token
expect(response.status).not_to have_gitlab_http_status 200 expect(response.status).not_to have_gitlab_http_status 200
expect(response.body).not_to eq 'authenticated' expect(response.body).not_to eq 'authenticated'
end end
...@@ -209,7 +249,11 @@ describe ApplicationController do ...@@ -209,7 +249,11 @@ describe ApplicationController do
context "when the 'feed_token' param is populated with an invalid feed token" do context "when the 'feed_token' param is populated with an invalid feed token" do
it "doesn't log the user" do it "doesn't log the user" do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
get :index, feed_token: 'token', format: :atom get :index, feed_token: 'token', format: :atom
expect(response.status).not_to eq 200 expect(response.status).not_to eq 200
expect(response.body).not_to eq 'authenticated' expect(response.body).not_to eq 'authenticated'
end end
......
...@@ -2,50 +2,131 @@ require 'spec_helper' ...@@ -2,50 +2,131 @@ require 'spec_helper'
describe Projects::WikisController do describe Projects::WikisController do
let(:project) { create(:project, :public, :repository) } let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) } let(:user) { project.owner }
let(:wiki) { ProjectWiki.new(project, user) } let(:project_wiki) { ProjectWiki.new(project, user) }
let(:wiki) { project_wiki.wiki }
let(:wiki_title) { 'page-title-test' }
describe 'GET #show' do before do
let(:wiki_title) { 'page-title-test' } create_page(wiki_title, 'hello world')
sign_in(user)
end
after do
destroy_page(wiki_title)
end
describe 'GET #show' do
render_views render_views
before do subject { get :show, namespace_id: project.namespace, project_id: project, id: wiki_title }
create_page(wiki_title, 'hello world')
end
it 'limits the retrieved pages for the sidebar' do context 'when page content encoding is invalid' do
sign_in(user) it 'limits the retrieved pages for the sidebar' do
expect(controller).to receive(:load_wiki).and_return(project_wiki)
expect(controller).to receive(:load_wiki).and_return(wiki) # empty? call
expect(project_wiki).to receive(:pages).with(limit: 1).and_call_original
# Sidebar entries
expect(project_wiki).to receive(:pages).with(limit: 15).and_call_original
# empty? call subject
expect(wiki).to receive(:pages).with(limit: 1).and_call_original
# Sidebar entries expect(response).to have_http_status(:ok)
expect(wiki).to receive(:pages).with(limit: 15).and_call_original expect(response.body).to include(wiki_title)
end
end
get :show, namespace_id: project.namespace, project_id: project, id: wiki_title context 'when page content encoding is invalid' do
it 'sets flash error' do
allow(controller).to receive(:valid_encoding?).and_return(false)
expect(response).to have_http_status(:ok) subject
expect(response.body).to include(wiki_title)
expect(response).to have_http_status(:ok)
expect(flash[:notice]).to eq 'The content of this page is not encoded in UTF-8. Edits can only be made via the Git repository.'
end
end end
end end
describe 'POST #preview_markdown' do describe 'POST #preview_markdown' do
it 'renders json in a correct format' do it 'renders json in a correct format' do
sign_in(user)
post :preview_markdown, namespace_id: project.namespace, project_id: project, id: 'page/path', text: '*Markdown* text' post :preview_markdown, namespace_id: project.namespace, project_id: project, id: 'page/path', text: '*Markdown* text'
expect(JSON.parse(response.body).keys).to match_array(%w(body references)) expect(JSON.parse(response.body).keys).to match_array(%w(body references))
end end
end end
describe 'GET #edit' do
subject { get(:edit, namespace_id: project.namespace, project_id: project, id: wiki_title) }
context 'when page content encoding is invalid' do
it 'redirects to show' do
allow(controller).to receive(:valid_encoding?).and_return(false)
subject
expect(response).to redirect_to(project_wiki_path(project, project_wiki.pages.first))
end
end
context 'when page content encoding is valid' do
render_views
it 'shows the edit page' do
subject
expect(response).to have_http_status(:ok)
expect(response.body).to include('Edit Page')
end
end
end
describe 'PATCH #update' do
let(:new_title) { 'New title' }
let(:new_content) { 'New content' }
subject do
patch(:update,
namespace_id: project.namespace,
project_id: project,
id: wiki_title,
wiki: { title: new_title, content: new_content })
end
context 'when page content encoding is invalid' do
it 'redirects to show' do
allow(controller).to receive(:valid_encoding?).and_return(false)
subject
expect(response).to redirect_to(project_wiki_path(project, project_wiki.pages.first))
end
end
context 'when page content encoding is valid' do
render_views
it 'updates the page' do
subject
wiki_page = project_wiki.pages.first
expect(wiki_page.title).to eq new_title
expect(wiki_page.content).to eq new_content
end
end
end
def create_page(name, content) def create_page(name, content)
project.wiki.wiki.write_page(name, :markdown, content, commit_details(name)) wiki.write_page(name, :markdown, content, commit_details(name))
end end
def commit_details(name) def commit_details(name)
Gitlab::Git::Wiki::CommitDetails.new(user.id, user.username, user.name, user.email, "created page #{name}") Gitlab::Git::Wiki::CommitDetails.new(user.id, user.username, user.name, user.email, "created page #{name}")
end end
def destroy_page(title, dir = '')
page = wiki.page(title: title, dir: dir)
project_wiki.delete_page(page, "test commit")
end
end end
...@@ -45,21 +45,11 @@ FactoryBot.define do ...@@ -45,21 +45,11 @@ FactoryBot.define do
updated_at ClusterWaitForAppInstallationWorker::TIMEOUT.ago updated_at ClusterWaitForAppInstallationWorker::TIMEOUT.ago
end end
factory :clusters_applications_ingress, class: Clusters::Applications::Ingress do factory :clusters_applications_ingress, class: Clusters::Applications::Ingress
cluster factory: %i(cluster with_installed_helm provided_by_gcp) factory :clusters_applications_prometheus, class: Clusters::Applications::Prometheus
end factory :clusters_applications_runner, class: Clusters::Applications::Runner
factory :clusters_applications_prometheus, class: Clusters::Applications::Prometheus do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
factory :clusters_applications_runner, class: Clusters::Applications::Runner do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
factory :clusters_applications_jupyter, class: Clusters::Applications::Jupyter do factory :clusters_applications_jupyter, class: Clusters::Applications::Jupyter do
oauth_application factory: :oauth_application oauth_application factory: :oauth_application
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end end
end end
end end
...@@ -36,9 +36,5 @@ FactoryBot.define do ...@@ -36,9 +36,5 @@ FactoryBot.define do
trait :production_environment do trait :production_environment do
sequence(:environment_scope) { |n| "production#{n}/*" } sequence(:environment_scope) { |n| "production#{n}/*" }
end end
trait :with_installed_helm do
application_helm factory: %i(clusters_applications_helm installed)
end
end end
end end
...@@ -46,14 +46,12 @@ describe 'Clusters Applications', :js do ...@@ -46,14 +46,12 @@ describe 'Clusters Applications', :js do
end end
end end
it 'they see status transition' do it 'he sees status transition' do
page.within('.js-cluster-application-row-helm') do page.within('.js-cluster-application-row-helm') do
# FE sends request and gets the response, then the buttons is "Install" # FE sends request and gets the response, then the buttons is "Install"
expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install') expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install')
wait_until_helm_created!
Clusters::Cluster.last.application_helm.make_installing! Clusters::Cluster.last.application_helm.make_installing!
# FE starts polling and update the buttons to "Installing" # FE starts polling and update the buttons to "Installing"
...@@ -85,7 +83,7 @@ describe 'Clusters Applications', :js do ...@@ -85,7 +83,7 @@ describe 'Clusters Applications', :js do
end end
end end
it 'they see status transition' do it 'he sees status transition' do
page.within('.js-cluster-application-row-ingress') do page.within('.js-cluster-application-row-ingress') do
# FE sends request and gets the response, then the buttons is "Install" # FE sends request and gets the response, then the buttons is "Install"
expect(page).to have_css('.js-cluster-application-install-button[disabled]') expect(page).to have_css('.js-cluster-application-install-button[disabled]')
...@@ -118,14 +116,4 @@ describe 'Clusters Applications', :js do ...@@ -118,14 +116,4 @@ describe 'Clusters Applications', :js do
end end
end end
end end
def wait_until_helm_created!
retries = 0
while Clusters::Cluster.last.application_helm.nil?
raise "Timed out waiting for helm application to be created in DB" if (retries += 1) > 3
sleep(1)
end
end
end end
...@@ -137,6 +137,26 @@ describe 'User views a wiki page' do ...@@ -137,6 +137,26 @@ describe 'User views a wiki page' do
end end
end end
context 'when page has invalid content encoding' do
let(:content) { 'whatever'.force_encoding('ISO-8859-1') }
before do
allow(Gitlab::EncodingHelper).to receive(:encode!).and_return(content)
visit(project_wiki_path(project, wiki_page))
end
it 'does not show "Edit" button' do
expect(page).not_to have_selector('a.btn', text: 'Edit')
end
it 'shows error' do
page.within(:css, '.flash-notice') do
expect(page).to have_content('The content of this page is not encoded in UTF-8. Edits can only be made via the Git repository.')
end
end
end
it 'opens a default wiki page', :js do it 'opens a default wiki page', :js do
visit(project_path(project)) visit(project_path(project))
......
This diff is collapsed.
...@@ -82,6 +82,7 @@ ...@@ -82,6 +82,7 @@
"can_revert_on_current_merge_request": { "type": ["boolean", "null"] }, "can_revert_on_current_merge_request": { "type": ["boolean", "null"] },
"can_cherry_pick_on_current_merge_request": { "type": ["boolean", "null"] }, "can_cherry_pick_on_current_merge_request": { "type": ["boolean", "null"] },
"can_create_note": { "type": "boolean" }, "can_create_note": { "type": "boolean" },
"can_create_issue": { "type": "boolean" },
"can_update": { "type": "boolean" } "can_update": { "type": "boolean" }
}, },
"additionalProperties": false "additionalProperties": false
......
...@@ -11,7 +11,9 @@ const discussionFixture = 'merge_requests/diff_discussion.json'; ...@@ -11,7 +11,9 @@ const discussionFixture = 'merge_requests/diff_discussion.json';
describe('diff_file_header', () => { describe('diff_file_header', () => {
let vm; let vm;
let props; let props;
const diffDiscussionMock = getJSONFixture(discussionFixture)[0];
const Component = Vue.extend(DiffFileHeader); const Component = Vue.extend(DiffFileHeader);
const store = new Vuex.Store({ const store = new Vuex.Store({
modules: { modules: {
diffs: diffsModule, diffs: diffsModule,
...@@ -20,7 +22,6 @@ describe('diff_file_header', () => { ...@@ -20,7 +22,6 @@ describe('diff_file_header', () => {
}); });
beforeEach(() => { beforeEach(() => {
const diffDiscussionMock = getJSONFixture(discussionFixture)[0];
const diffFile = convertObjectPropsToCamelCase(diffDiscussionMock.diff_file, { deep: true }); const diffFile = convertObjectPropsToCamelCase(diffDiscussionMock.diff_file, { deep: true });
props = { props = {
diffFile, diffFile,
...@@ -409,7 +410,7 @@ describe('diff_file_header', () => { ...@@ -409,7 +410,7 @@ describe('diff_file_header', () => {
}); });
describe('handles toggle discussions', () => { describe('handles toggle discussions', () => {
it('dispatches toggleFileDiscussions when user clicks on toggle discussions button', () => { it('renders a disabled button when diff has no discussions', () => {
const propsCopy = Object.assign({}, props); const propsCopy = Object.assign({}, props);
propsCopy.diffFile.submodule = false; propsCopy.diffFile.submodule = false;
propsCopy.diffFile.blob = { propsCopy.diffFile.blob = {
...@@ -428,11 +429,44 @@ describe('diff_file_header', () => { ...@@ -428,11 +429,44 @@ describe('diff_file_header', () => {
store, store,
}); });
spyOn(vm, 'toggleFileDiscussions'); expect(
vm.$el.querySelector('.js-btn-vue-toggle-comments').getAttribute('disabled'),
vm.$el.querySelector('.js-btn-vue-toggle-comments').click(); ).toEqual('disabled');
});
expect(vm.toggleFileDiscussions).toHaveBeenCalled();
describe('with discussions', () => {
it('dispatches toggleFileDiscussions when user clicks on toggle discussions button', () => {
const propsCopy = Object.assign({}, props);
propsCopy.diffFile.submodule = false;
propsCopy.diffFile.blob = {
id: '848ed9407c6730ff16edb3dd24485a0eea24292a',
path: 'lib/base.js',
name: 'base.js',
mode: '100644',
readableText: true,
icon: 'file-text-o',
};
propsCopy.addMergeRequestButtons = true;
propsCopy.diffFile.deletedFile = true;
const discussionGetter = () => [diffDiscussionMock];
notesModule.getters.discussions = discussionGetter;
vm = mountComponentWithStore(Component, {
props: propsCopy,
store: new Vuex.Store({
modules: {
diffs: diffsModule,
notes: notesModule,
},
}),
});
spyOn(vm, 'toggleFileDiscussions');
vm.$el.querySelector('.js-btn-vue-toggle-comments').click();
expect(vm.toggleFileDiscussions).toHaveBeenCalled();
});
}); });
}); });
}); });
......
...@@ -167,6 +167,24 @@ describe('Diffs Module Getters', () => { ...@@ -167,6 +167,24 @@ describe('Diffs Module Getters', () => {
}); });
}); });
describe('diffHasDiscussions', () => {
it('returns true when getDiffFileDiscussions returns discussions', () => {
expect(
getters.diffHasDiscussions(localState, {
getDiffFileDiscussions: () => [discussionMock],
})(diffFileMock),
).toEqual(true);
});
it('returns false when getDiffFileDiscussions returns no discussions', () => {
expect(
getters.diffHasDiscussions(localState, {
getDiffFileDiscussions: () => [],
})(diffFileMock),
).toEqual(false);
});
});
describe('getDiffFileDiscussions', () => { describe('getDiffFileDiscussions', () => {
it('returns an array with discussions when fileHash matches and the discussion belongs to a diff', () => { it('returns an array with discussions when fileHash matches and the discussion belongs to a diff', () => {
discussionMock.diff_file.file_hash = diffFileMock.fileHash; discussionMock.diff_file.file_hash = diffFileMock.fileHash;
......
...@@ -45,6 +45,33 @@ describe('ide component', () => { ...@@ -45,6 +45,33 @@ describe('ide component', () => {
}); });
}); });
describe('onBeforeUnload', () => {
it('returns undefined when no staged files or changed files', () => {
expect(vm.onBeforeUnload()).toBe(undefined);
});
it('returns warning text when their are changed files', () => {
vm.$store.state.changedFiles.push(file());
expect(vm.onBeforeUnload()).toBe('Are you sure you want to lose unsaved changes?');
});
it('returns warning text when their are staged files', () => {
vm.$store.state.stagedFiles.push(file());
expect(vm.onBeforeUnload()).toBe('Are you sure you want to lose unsaved changes?');
});
it('updates event object', () => {
const event = {};
vm.$store.state.stagedFiles.push(file());
vm.onBeforeUnload(event);
expect(event.returnValue).toBe('Are you sure you want to lose unsaved changes?');
});
});
describe('file finder', () => { describe('file finder', () => {
beforeEach(done => { beforeEach(done => {
spyOn(vm, 'toggleFileFinder'); spyOn(vm, 'toggleFileFinder');
......
import Poll from '~/lib/utils/poll'; import Poll from '~/lib/utils/poll';
import { successCodes } from '~/lib/utils/http_status';
const waitForAllCallsToFinish = (service, waitForCount, successCallback) => { const waitForAllCallsToFinish = (service, waitForCount, successCallback) => {
const timer = () => { const timer = () => {
...@@ -91,28 +92,32 @@ describe('Poll', () => { ...@@ -91,28 +92,32 @@ describe('Poll', () => {
}).catch(done.fail); }).catch(done.fail);
}); });
it('starts polling when http status is 200 and interval header is provided', (done) => { describe('for 2xx status code', () => {
mockServiceCall(service, { status: 200, headers: { 'poll-interval': 1 } }); successCodes.forEach(httpCode => {
it(`starts polling when http status is ${httpCode} and interval header is provided`, (done) => {
mockServiceCall(service, { status: httpCode, headers: { 'poll-interval': 1 } });
const Polling = new Poll({ const Polling = new Poll({
resource: service, resource: service,
method: 'fetch', method: 'fetch',
data: { page: 1 }, data: { page: 1 },
successCallback: callbacks.success, successCallback: callbacks.success,
errorCallback: callbacks.error, errorCallback: callbacks.error,
}); });
Polling.makeRequest(); Polling.makeRequest();
waitForAllCallsToFinish(service, 2, () => { waitForAllCallsToFinish(service, 2, () => {
Polling.stop(); Polling.stop();
expect(service.fetch.calls.count()).toEqual(2); expect(service.fetch.calls.count()).toEqual(2);
expect(service.fetch).toHaveBeenCalledWith({ page: 1 }); expect(service.fetch).toHaveBeenCalledWith({ page: 1 });
expect(callbacks.success).toHaveBeenCalled(); expect(callbacks.success).toHaveBeenCalled();
expect(callbacks.error).not.toHaveBeenCalled(); expect(callbacks.error).not.toHaveBeenCalled();
done(); done();
});
});
}); });
}); });
......
require 'fast_spec_helper'
describe Gitlab::Auth::Activity do
describe '.each_counter' do
it 'has all static counters defined' do
described_class.each_counter do |counter|
expect(described_class).to respond_to(counter)
end
end
it 'has all static incrementers defined' do
described_class.each_counter do |counter|
expect(described_class).to respond_to("#{counter}_increment!")
end
end
it 'has all counters starting with `user_`' do
described_class.each_counter do |counter|
expect(counter).to start_with('user_')
end
end
it 'yields counter method, name and description' do
described_class.each_counter do |method, name, description|
expect(method).to eq "#{name}_counter"
expect(description).to start_with('Counter of')
end
end
end
end
...@@ -3,24 +3,30 @@ require 'spec_helper' ...@@ -3,24 +3,30 @@ require 'spec_helper'
describe Gitlab::Auth::BlockedUserTracker do describe Gitlab::Auth::BlockedUserTracker do
set(:user) { create(:user) } set(:user) { create(:user) }
describe '.log_if_user_blocked' do describe '#log_blocked_user_activity!' do
it 'does not log if user failed to login due to undefined reason' do it 'does not log if user failed to login due to undefined reason' do
expect_any_instance_of(SystemHooksService).not_to receive(:execute_hooks_for) expect_any_instance_of(SystemHooksService).not_to receive(:execute_hooks_for)
expect(described_class.log_if_user_blocked({})).to be_nil tracker = described_class.new({})
expect(tracker.user).to be_nil
expect(tracker.user_blocked?).to be_falsey
expect(tracker.log_blocked_user_activity!).to be_nil
end end
it 'gracefully handles malformed environment variables' do it 'gracefully handles malformed environment variables' do
env = { 'warden.options' => 'test' } tracker = described_class.new({ 'warden.options' => 'test' })
expect(described_class.log_if_user_blocked(env)).to be_nil expect(tracker.user).to be_nil
expect(tracker.user_blocked?).to be_falsey
expect(tracker.log_blocked_user_activity!).to be_nil
end end
context 'failed login due to blocked user' do context 'failed login due to blocked user' do
let(:base_env) { { 'warden.options' => { message: User::BLOCKED_MESSAGE } } } let(:base_env) { { 'warden.options' => { message: User::BLOCKED_MESSAGE } } }
let(:env) { base_env.merge(request_env) } let(:env) { base_env.merge(request_env) }
subject { described_class.log_if_user_blocked(env) } subject { described_class.new(env) }
before do before do
expect_any_instance_of(SystemHooksService).to receive(:execute_hooks_for).with(user, :failed_login) expect_any_instance_of(SystemHooksService).to receive(:execute_hooks_for).with(user, :failed_login)
...@@ -32,14 +38,17 @@ describe Gitlab::Auth::BlockedUserTracker do ...@@ -32,14 +38,17 @@ describe Gitlab::Auth::BlockedUserTracker do
it 'logs a blocked user' do it 'logs a blocked user' do
user.block! user.block!
expect(subject).to be_truthy expect(subject.user).to be_blocked
expect(subject.user_blocked?).to be true
expect(subject.log_blocked_user_activity!).to be_truthy
end end
it 'logs a blocked user by e-mail' do it 'logs a blocked user by e-mail' do
user.block! user.block!
env[described_class::ACTIVE_RECORD_REQUEST_PARAMS]['user']['login'] = user.email env[described_class::ACTIVE_RECORD_REQUEST_PARAMS]['user']['login'] = user.email
expect(subject).to be_truthy expect(subject.user).to be_blocked
expect(subject.log_blocked_user_activity!).to be_truthy
end end
end end
...@@ -49,13 +58,17 @@ describe Gitlab::Auth::BlockedUserTracker do ...@@ -49,13 +58,17 @@ describe Gitlab::Auth::BlockedUserTracker do
it 'logs a blocked user' do it 'logs a blocked user' do
user.block! user.block!
expect(subject).to be_truthy expect(subject.user).to be_blocked
expect(subject.user_blocked?).to be true
expect(subject.log_blocked_user_activity!).to be_truthy
end end
it 'logs a LDAP blocked user' do it 'logs a LDAP blocked user' do
user.ldap_block! user.ldap_block!
expect(subject).to be_truthy expect(subject.user).to be_blocked
expect(subject.user_blocked?).to be true
expect(subject.log_blocked_user_activity!).to be_truthy
end end
end end
end end
......
...@@ -3,7 +3,7 @@ require 'spec_helper' ...@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::Kubernetes::ConfigMap do describe Gitlab::Kubernetes::ConfigMap do
let(:kubeclient) { double('kubernetes client') } let(:kubeclient) { double('kubernetes client') }
let(:application) { create(:clusters_applications_prometheus) } let(:application) { create(:clusters_applications_prometheus) }
let(:config_map) { described_class.new(application.name, application.files) } let(:config_map) { described_class.new(application.name, application.values) }
let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
let(:metadata) do let(:metadata) do
...@@ -15,7 +15,7 @@ describe Gitlab::Kubernetes::ConfigMap do ...@@ -15,7 +15,7 @@ describe Gitlab::Kubernetes::ConfigMap do
end end
describe '#generate' do describe '#generate' do
let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: application.files) } let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: { values: application.values }) }
subject { config_map.generate } subject { config_map.generate }
it 'should build a Kubeclient Resource' do it 'should build a Kubeclient Resource' do
......
...@@ -39,7 +39,7 @@ describe Gitlab::Kubernetes::Helm::Api do ...@@ -39,7 +39,7 @@ describe Gitlab::Kubernetes::Helm::Api do
end end
context 'with a ConfigMap' do context 'with a ConfigMap' do
let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application.name, application.files).generate } let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application.name, application.values).generate }
it 'creates a ConfigMap on kubeclient' do it 'creates a ConfigMap on kubeclient' do
expect(client).to receive(:create_config_map).with(resource).once expect(client).to receive(:create_config_map).with(resource).once
......
...@@ -2,25 +2,7 @@ require 'spec_helper' ...@@ -2,25 +2,7 @@ require 'spec_helper'
describe Gitlab::Kubernetes::Helm::BaseCommand do describe Gitlab::Kubernetes::Helm::BaseCommand do
let(:application) { create(:clusters_applications_helm) } let(:application) { create(:clusters_applications_helm) }
let(:test_class) do let(:base_command) { described_class.new(application.name) }
Class.new do
include Gitlab::Kubernetes::Helm::BaseCommand
def name
"test-class-name"
end
def files
{
some: 'value'
}
end
end
end
let(:base_command) do
test_class.new
end
subject { base_command } subject { base_command }
...@@ -36,9 +18,15 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do ...@@ -36,9 +18,15 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do
end end
end end
describe '#config_map?' do
subject { base_command.config_map? }
it { is_expected.to be_falsy }
end
describe '#pod_name' do describe '#pod_name' do
subject { base_command.pod_name } subject { base_command.pod_name }
it { is_expected.to eq('install-test-class-name') } it { is_expected.to eq('install-helm') }
end end
end end
require 'spec_helper'
describe Gitlab::Kubernetes::Helm::Certificate do
describe '.generate_root' do
subject { described_class.generate_root }
it 'should generate a root CA that expires a long way in the future' do
expect(subject.cert.not_after).to be > 999.years.from_now
end
end
describe '#issue' do
subject { described_class.generate_root.issue }
it 'should generate a cert that expires soon' do
expect(subject.cert.not_after).to be < 60.minutes.from_now
end
context 'passing in INFINITE_EXPIRY' do
subject { described_class.generate_root.issue(expires_in: described_class::INFINITE_EXPIRY) }
it 'should generate a cert that expires a long way in the future' do
expect(subject.cert.not_after).to be > 999.years.from_now
end
end
end
end
...@@ -2,9 +2,9 @@ require 'spec_helper' ...@@ -2,9 +2,9 @@ require 'spec_helper'
describe Gitlab::Kubernetes::Helm::InitCommand do describe Gitlab::Kubernetes::Helm::InitCommand do
let(:application) { create(:clusters_applications_helm) } let(:application) { create(:clusters_applications_helm) }
let(:commands) { 'helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem >/dev/null' } let(:commands) { 'helm init >/dev/null' }
subject { described_class.new(name: application.name, files: {}) } subject { described_class.new(application.name) }
it_behaves_like 'helm commands' it_behaves_like 'helm commands'
end end
require 'rails_helper' require 'rails_helper'
describe Gitlab::Kubernetes::Helm::InstallCommand do describe Gitlab::Kubernetes::Helm::InstallCommand do
let(:files) { { 'ca.pem': 'some file content' } } let(:application) { create(:clusters_applications_prometheus) }
let(:repository) { 'https://repository.example.com' } let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
let(:version) { '1.2.3' } let(:install_command) { application.install_command }
let(:install_command) do
described_class.new(
name: 'app-name',
chart: 'chart-name',
files: files,
version: version, repository: repository
)
end
subject { install_command } subject { install_command }
it_behaves_like 'helm commands' do context 'for ingress' do
let(:commands) do let(:application) { create(:clusters_applications_ingress) }
<<~EOS
helm init --client-only >/dev/null it_behaves_like 'helm commands' do
helm repo add app-name https://repository.example.com let(:commands) do
helm install --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem chart-name --name app-name --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null <<~EOS
EOS helm init --client-only >/dev/null
helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS
end
end end
end end
context 'when there is no repository' do context 'for prometheus' do
let(:repository) { nil } let(:application) { create(:clusters_applications_prometheus) }
it_behaves_like 'helm commands' do it_behaves_like 'helm commands' do
let(:commands) do let(:commands) do
<<~EOS <<~EOS
helm init --client-only >/dev/null helm init --client-only >/dev/null
helm install --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem chart-name --name app-name --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null helm install #{application.chart} --name #{application.name} --version #{application.version} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS EOS
end end
end end
end end
context 'when there is no ca.pem file' do context 'for runner' do
let(:files) { { 'file.txt': 'some content' } } let(:ci_runner) { create(:ci_runner) }
let(:application) { create(:clusters_applications_runner, runner: ci_runner) }
it_behaves_like 'helm commands' do it_behaves_like 'helm commands' do
let(:commands) do let(:commands) do
<<~EOS <<~EOS
helm init --client-only >/dev/null helm init --client-only >/dev/null
helm repo add app-name https://repository.example.com helm repo add #{application.name} #{application.repository}
helm install chart-name --name app-name --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS EOS
end end
end end
end end
context 'when there is no version' do context 'for jupyter' do
let(:version) { nil } let(:application) { create(:clusters_applications_jupyter) }
it_behaves_like 'helm commands' do it_behaves_like 'helm commands' do
let(:commands) do let(:commands) do
<<~EOS <<~EOS
helm init --client-only >/dev/null helm init --client-only >/dev/null
helm repo add app-name https://repository.example.com helm repo add #{application.name} #{application.repository}
helm install --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem chart-name --name app-name --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS EOS
end end
end end
end end
describe '#config_map?' do
subject { install_command.config_map? }
it { is_expected.to be_truthy }
end
describe '#config_map_resource' do describe '#config_map_resource' do
let(:metadata) do let(:metadata) do
{ {
name: "values-content-configuration-app-name", name: "values-content-configuration-#{application.name}",
namespace: 'gitlab-managed-apps', namespace: namespace,
labels: { name: "values-content-configuration-app-name" } labels: { name: "values-content-configuration-#{application.name}" }
} }
end end
let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: files) } let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: { values: application.values }) }
subject { install_command.config_map_resource } subject { install_command.config_map_resource }
......
...@@ -2,13 +2,14 @@ require 'rails_helper' ...@@ -2,13 +2,14 @@ require 'rails_helper'
describe Gitlab::Kubernetes::Helm::Pod do describe Gitlab::Kubernetes::Helm::Pod do
describe '#generate' do describe '#generate' do
let(:app) { create(:clusters_applications_prometheus) } let(:cluster) { create(:cluster) }
let(:app) { create(:clusters_applications_prometheus, cluster: cluster) }
let(:command) { app.install_command } let(:command) { app.install_command }
let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
subject { described_class.new(command, namespace) } subject { described_class.new(command, namespace) }
context 'with a command' do shared_examples 'helm pod' do
it 'should generate a Kubeclient::Resource' do it 'should generate a Kubeclient::Resource' do
expect(subject.generate).to be_a_kind_of(Kubeclient::Resource) expect(subject.generate).to be_a_kind_of(Kubeclient::Resource)
end end
...@@ -40,6 +41,10 @@ describe Gitlab::Kubernetes::Helm::Pod do ...@@ -40,6 +41,10 @@ describe Gitlab::Kubernetes::Helm::Pod do
spec = subject.generate.spec spec = subject.generate.spec
expect(spec.restartPolicy).to eq('Never') expect(spec.restartPolicy).to eq('Never')
end end
end
context 'with a install command' do
it_behaves_like 'helm pod'
it 'should include volumes for the container' do it 'should include volumes for the container' do
container = subject.generate.spec.containers.first container = subject.generate.spec.containers.first
...@@ -55,8 +60,24 @@ describe Gitlab::Kubernetes::Helm::Pod do ...@@ -55,8 +60,24 @@ describe Gitlab::Kubernetes::Helm::Pod do
it 'should mount configMap specification in the volume' do it 'should mount configMap specification in the volume' do
volume = subject.generate.spec.volumes.first volume = subject.generate.spec.volumes.first
expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}") expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}")
expect(volume.configMap['items'].first['key']).to eq(:'values.yaml') expect(volume.configMap['items'].first['key']).to eq('values')
expect(volume.configMap['items'].first['path']).to eq(:'values.yaml') expect(volume.configMap['items'].first['path']).to eq('values.yaml')
end
end
context 'with a init command' do
let(:app) { create(:clusters_applications_helm, cluster: cluster) }
it_behaves_like 'helm pod'
it 'should not include volumeMounts inside the container' do
container = subject.generate.spec.containers.first
expect(container.volumeMounts).to be_nil
end
it 'should not a volume inside the specification' do
spec = subject.generate.spec
expect(spec.volumes).to be_nil
end end
end end
end end
......
...@@ -29,7 +29,7 @@ describe Ci::BuildRunnerSession, model: true do ...@@ -29,7 +29,7 @@ describe Ci::BuildRunnerSession, model: true do
it 'adds Authorization header if authorization is present' do it 'adds Authorization header if authorization is present' do
subject.authorization = 'whatever' subject.authorization = 'whatever'
expect(terminal_specification[:headers]).to include(Authorization: 'whatever') expect(terminal_specification[:headers]).to include(Authorization: ['whatever'])
end end
end end
end end
......
...@@ -6,24 +6,13 @@ describe Clusters::Applications::Helm do ...@@ -6,24 +6,13 @@ describe Clusters::Applications::Helm do
describe '.installed' do describe '.installed' do
subject { described_class.installed } subject { described_class.installed }
let!(:installed_cluster) { create(:clusters_applications_helm, :installed) } let!(:cluster) { create(:clusters_applications_helm, :installed) }
before do before do
create(:clusters_applications_helm, :errored) create(:clusters_applications_helm, :errored)
end end
it { is_expected.to contain_exactly(installed_cluster) } it { is_expected.to contain_exactly(cluster) }
end
describe '#issue_client_cert' do
let(:application) { create(:clusters_applications_helm) }
subject { application.issue_client_cert }
it 'returns a new cert' do
is_expected.to be_kind_of(Gitlab::Kubernetes::Helm::Certificate)
expect(subject.cert_string).not_to eq(application.ca_cert)
expect(subject.key_string).not_to eq(application.ca_key)
end
end end
describe '#install_command' do describe '#install_command' do
...@@ -36,16 +25,5 @@ describe Clusters::Applications::Helm do ...@@ -36,16 +25,5 @@ describe Clusters::Applications::Helm do
it 'should be initialized with 1 arguments' do it 'should be initialized with 1 arguments' do
expect(subject.name).to eq('helm') expect(subject.name).to eq('helm')
end end
it 'should have cert files' do
expect(subject.files[:'ca.pem']).to be_present
expect(subject.files[:'ca.pem']).to eq(helm.ca_cert)
expect(subject.files[:'cert.pem']).to be_present
expect(subject.files[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject.files[:'cert.pem'])
expect(cert.not_after).to be > 999.years.from_now
end
end end
end end
...@@ -74,43 +74,18 @@ describe Clusters::Applications::Ingress do ...@@ -74,43 +74,18 @@ describe Clusters::Applications::Ingress do
expect(subject.name).to eq('ingress') expect(subject.name).to eq('ingress')
expect(subject.chart).to eq('stable/nginx-ingress') expect(subject.chart).to eq('stable/nginx-ingress')
expect(subject.version).to be_nil expect(subject.version).to be_nil
expect(subject.files).to eq(ingress.files) expect(subject.values).to eq(ingress.values)
end end
end end
describe '#files' do describe '#values' do
let(:application) { ingress } subject { ingress.values }
subject { application.files }
let(:values) { subject[:'values.yaml'] }
it 'should include ingress valid keys in values' do it 'should include ingress valid keys' do
expect(values).to include('image') is_expected.to include('image')
expect(values).to include('repository') is_expected.to include('repository')
expect(values).to include('stats') is_expected.to include('stats')
expect(values).to include('podAnnotations') is_expected.to include('podAnnotations')
end
context 'when the helm application does not have a ca_cert' do
before do
application.cluster.application_helm.ca_cert = nil
end
it 'should not include cert files' do
expect(subject[:'ca.pem']).not_to be_present
expect(subject[:'cert.pem']).not_to be_present
expect(subject[:'key.pem']).not_to be_present
end
end
it 'should include cert files' do
expect(subject[:'ca.pem']).to be_present
expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
expect(subject[:'cert.pem']).to be_present
expect(subject[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
expect(cert.not_after).to be < 60.minutes.from_now
end end
end end
end end
...@@ -38,46 +38,23 @@ describe Clusters::Applications::Jupyter do ...@@ -38,46 +38,23 @@ describe Clusters::Applications::Jupyter do
expect(subject.chart).to eq('jupyter/jupyterhub') expect(subject.chart).to eq('jupyter/jupyterhub')
expect(subject.version).to be_nil expect(subject.version).to be_nil
expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/') expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/')
expect(subject.files).to eq(jupyter.files) expect(subject.values).to eq(jupyter.values)
end end
end end
describe '#files' do describe '#values' do
let(:application) { create(:clusters_applications_jupyter) } let(:jupyter) { create(:clusters_applications_jupyter) }
subject { application.files }
let(:values) { subject[:'values.yaml'] }
it 'should include cert files' do subject { jupyter.values }
expect(subject[:'ca.pem']).to be_present
expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
expect(subject[:'cert.pem']).to be_present
expect(subject[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
expect(cert.not_after).to be < 60.minutes.from_now
end
context 'when the helm application does not have a ca_cert' do
before do
application.cluster.application_helm.ca_cert = nil
end
it 'should not include cert files' do
expect(subject[:'ca.pem']).not_to be_present
expect(subject[:'cert.pem']).not_to be_present
expect(subject[:'key.pem']).not_to be_present
end
end
it 'should include valid values' do it 'should include valid values' do
expect(values).to include('ingress') is_expected.to include('ingress')
expect(values).to include('hub') is_expected.to include('hub')
expect(values).to include('rbac') is_expected.to include('rbac')
expect(values).to include('proxy') is_expected.to include('proxy')
expect(values).to include('auth') is_expected.to include('auth')
expect(values).to match(/clientId: '?#{application.oauth_application.uid}/) is_expected.to include("clientId: #{jupyter.oauth_application.uid}")
expect(values).to match(/callbackUrl: '?#{application.callback_url}/) is_expected.to include("callbackUrl: #{jupyter.callback_url}")
end end
end end
end end
...@@ -153,44 +153,21 @@ describe Clusters::Applications::Prometheus do ...@@ -153,44 +153,21 @@ describe Clusters::Applications::Prometheus do
expect(command.name).to eq('prometheus') expect(command.name).to eq('prometheus')
expect(command.chart).to eq('stable/prometheus') expect(command.chart).to eq('stable/prometheus')
expect(command.version).to eq('6.7.3') expect(command.version).to eq('6.7.3')
expect(command.files).to eq(prometheus.files) expect(command.values).to eq(prometheus.values)
end end
end end
describe '#files' do describe '#values' do
let(:application) { create(:clusters_applications_prometheus) } let(:prometheus) { create(:clusters_applications_prometheus) }
subject { application.files }
let(:values) { subject[:'values.yaml'] }
it 'should include cert files' do
expect(subject[:'ca.pem']).to be_present
expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
expect(subject[:'cert.pem']).to be_present
expect(subject[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
expect(cert.not_after).to be < 60.minutes.from_now
end
context 'when the helm application does not have a ca_cert' do
before do
application.cluster.application_helm.ca_cert = nil
end
it 'should not include cert files' do subject { prometheus.values }
expect(subject[:'ca.pem']).not_to be_present
expect(subject[:'cert.pem']).not_to be_present
expect(subject[:'key.pem']).not_to be_present
end
end
it 'should include prometheus valid values' do it 'should include prometheus valid values' do
expect(values).to include('alertmanager') is_expected.to include('alertmanager')
expect(values).to include('kubeStateMetrics') is_expected.to include('kubeStateMetrics')
expect(values).to include('nodeExporter') is_expected.to include('nodeExporter')
expect(values).to include('pushgateway') is_expected.to include('pushgateway')
expect(values).to include('serverFiles') is_expected.to include('serverFiles')
end end
end end
end end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment