Commit b9cc1188 authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'live-trace-v2' into live-trace-v2-efficient-destroy-all

parents ddd6b21b 54695563
......@@ -51,7 +51,6 @@ gem 'omniauth-shibboleth', '~> 1.2.0'
gem 'omniauth-twitter', '~> 1.4'
gem 'omniauth_crowd', '~> 2.2.0'
gem 'omniauth-authentiq', '~> 0.3.1'
gem 'omniauth-jwt', '~> 0.0.2'
gem 'rack-oauth2', '~> 1.2.1'
gem 'jwt', '~> 1.5.6'
......@@ -415,7 +414,7 @@ end
# Gitaly GRPC client
gem 'gitaly-proto', '~> 0.97.0', require: 'gitaly'
gem 'grpc', '~> 1.10.0'
gem 'grpc', '~> 1.11.0'
# Locked until https://github.com/google/protobuf/issues/4210 is closed
gem 'google-protobuf', '= 3.5.1'
......
......@@ -374,7 +374,7 @@ GEM
rake
grape_logging (1.7.0)
grape
grpc (1.10.0)
grpc (1.11.0)
google-protobuf (~> 3.1)
googleapis-common-protos-types (~> 1.0.0)
googleauth (>= 0.5.1, < 0.7)
......@@ -555,9 +555,6 @@ GEM
jwt (>= 1.5)
omniauth (>= 1.1.1)
omniauth-oauth2 (>= 1.5)
omniauth-jwt (0.0.2)
jwt
omniauth (~> 1.1)
omniauth-kerberos (0.3.0)
omniauth-multipassword
timfel-krb5-auth (~> 0.8)
......@@ -1076,7 +1073,7 @@ DEPENDENCIES
grape-entity (~> 0.6.0)
grape-route-helpers (~> 2.1.0)
grape_logging (~> 1.7)
grpc (~> 1.10.0)
grpc (~> 1.11.0)
haml_lint (~> 0.26.0)
hamlit (~> 2.6.1)
hashie-forbidden_attributes
......@@ -1117,7 +1114,6 @@ DEPENDENCIES
omniauth-github (~> 1.1.1)
omniauth-gitlab (~> 1.0.2)
omniauth-google-oauth2 (~> 0.5.3)
omniauth-jwt (~> 0.0.2)
omniauth-kerberos (~> 0.3.0)
omniauth-oauth2-generic (~> 0.2.2)
omniauth-saml (~> 1.10)
......
......@@ -86,7 +86,7 @@ export default {
v-html="resolveSvg"
></span>
</span>
<span class=".line-resolve-text">
<span class="line-resolve-text">
{{ resolvedDiscussionCount }}/{{ discussionCount }} {{ countText }} resolved
</span>
</div>
......
import $ from 'jquery';
import _ from 'underscore';
function isValidProjectId(id) {
return id > 0;
......@@ -43,7 +44,7 @@ class SidebarMoveIssue {
renderRow: project => `
<li>
<a href="#" class="js-move-issue-dropdown-item">
${project.name_with_namespace}
${_.escape(project.name_with_namespace)}
</a>
</li>
`,
......
......@@ -772,7 +772,3 @@ ul.notes {
height: auto;
}
}
.line-resolve-text {
vertical-align: middle;
}
......@@ -41,7 +41,7 @@ module DropdownsHelper
def dropdown_toggle(toggle_text, data_attr, options = {})
default_label = data_attr[:default_label]
content_tag(:button, class: "dropdown-menu-toggle #{options[:toggle_class] if options.key?(:toggle_class)}", id: (options[:id] if options.key?(:id)), type: "button", data: data_attr) do
content_tag(:button, disabled: options[:disabled], class: "dropdown-menu-toggle #{options[:toggle_class] if options.key?(:toggle_class)}", id: (options[:id] if options.key?(:id)), type: "button", data: data_attr) do
output = content_tag(:span, toggle_text, class: "dropdown-toggle-text #{'is-default' if toggle_text == default_label}")
output << icon('chevron-down')
output.html_safe
......
......@@ -37,20 +37,20 @@ class GroupMember < Member
private
def send_invite
notification_service.invite_group_member(self, @raw_invite_token)
run_after_commit_or_now { notification_service.invite_group_member(self, @raw_invite_token) }
super
end
def post_create_hook
notification_service.new_group_member(self)
run_after_commit_or_now { notification_service.new_group_member(self) }
super
end
def post_update_hook
if access_level_changed?
notification_service.update_group_member(self)
run_after_commit { notification_service.update_group_member(self) }
end
super
......
......@@ -92,7 +92,7 @@ class ProjectMember < Member
private
def send_invite
notification_service.invite_project_member(self, @raw_invite_token)
run_after_commit_or_now { notification_service.invite_project_member(self, @raw_invite_token) }
super
end
......@@ -100,7 +100,7 @@ class ProjectMember < Member
def post_create_hook
unless owner?
event_service.join_project(self.project, self.user)
notification_service.new_project_member(self)
run_after_commit_or_now { notification_service.new_project_member(self) }
end
super
......@@ -108,7 +108,7 @@ class ProjectMember < Member
def post_update_hook
if access_level_changed?
notification_service.update_project_member(self)
run_after_commit { notification_service.update_project_member(self) }
end
super
......
......@@ -26,7 +26,7 @@ module Issues
issue.update(closed_by: current_user)
event_service.close_issue(issue, current_user)
create_note(issue, commit) if system_note
notification_service.close_issue(issue, current_user) if notifications
notification_service.async.close_issue(issue, current_user) if notifications
todo_service.close_issue(issue, current_user)
execute_hooks(issue, 'close')
invalidate_cache_counts(issue, users: issue.assignees)
......
......@@ -139,7 +139,7 @@ module Issues
end
def notify_participants
notification_service.issue_moved(@old_issue, @new_issue, @current_user)
notification_service.async.issue_moved(@old_issue, @new_issue, @current_user)
end
end
end
......@@ -6,7 +6,7 @@ module Issues
if issue.reopen
event_service.reopen_issue(issue, current_user)
create_note(issue, 'reopened')
notification_service.reopen_issue(issue, current_user)
notification_service.async.reopen_issue(issue, current_user)
execute_hooks(issue, 'reopen')
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
......
......@@ -30,7 +30,7 @@ module Issues
if issue.assignees != old_assignees
create_assignee_note(issue, old_assignees)
notification_service.reassigned_issue(issue, current_user, old_assignees)
notification_service.async.reassigned_issue(issue, current_user, old_assignees)
todo_service.reassigned_issue(issue, current_user, old_assignees)
end
......@@ -41,13 +41,13 @@ module Issues
added_labels = issue.labels - old_labels
if added_labels.present?
notification_service.relabeled_issue(issue, added_labels, current_user)
notification_service.async.relabeled_issue(issue, added_labels, current_user)
end
added_mentions = issue.mentioned_users - old_mentioned_users
if added_mentions.present?
notification_service.new_mentions_in_issue(issue, added_mentions, current_user)
notification_service.async.new_mentions_in_issue(issue, added_mentions, current_user)
end
end
......
......@@ -10,7 +10,7 @@ module MergeRequests
if merge_request.close
create_event(merge_request)
create_note(merge_request)
notification_service.close_mr(merge_request, current_user)
notification_service.async.close_mr(merge_request, current_user)
todo_service.close_merge_request(merge_request, current_user)
execute_hooks(merge_request, 'close')
invalidate_cache_counts(merge_request, users: merge_request.assignees)
......
......@@ -6,7 +6,7 @@ module MergeRequests
if merge_request.reopen
create_event(merge_request)
create_note(merge_request, 'reopened')
notification_service.reopen_mr(merge_request, current_user)
notification_service.async.reopen_mr(merge_request, current_user)
execute_hooks(merge_request, 'reopen')
merge_request.reload_diff(current_user)
merge_request.mark_as_unchecked
......
......@@ -4,7 +4,7 @@ module MergeRequests
return unless merge_request.discussions_resolved?
SystemNoteService.resolve_all_discussions(merge_request, project, current_user)
notification_service.resolve_all_discussions(merge_request, current_user)
notification_service.async.resolve_all_discussions(merge_request, current_user)
end
end
end
......@@ -21,6 +21,7 @@ module MergeRequests
update(merge_request)
end
# rubocop:disable Metrics/AbcSize
def handle_changes(merge_request, options)
old_associations = options.fetch(:old_associations, {})
old_labels = old_associations.fetch(:labels, [])
......@@ -42,8 +43,11 @@ module MergeRequests
end
if merge_request.previous_changes.include?('assignee_id')
old_assignee_id = merge_request.previous_changes['assignee_id'].first
old_assignee = User.find(old_assignee_id) if old_assignee_id
create_assignee_note(merge_request)
notification_service.reassigned_merge_request(merge_request, current_user)
notification_service.async.reassigned_merge_request(merge_request, current_user, old_assignee)
todo_service.reassigned_merge_request(merge_request, current_user)
end
......@@ -54,7 +58,7 @@ module MergeRequests
added_labels = merge_request.labels - old_labels
if added_labels.present?
notification_service.relabeled_merge_request(
notification_service.async.relabeled_merge_request(
merge_request,
added_labels,
current_user
......@@ -63,13 +67,14 @@ module MergeRequests
added_mentions = merge_request.mentioned_users - old_mentioned_users
if added_mentions.present?
notification_service.new_mentions_in_merge_request(
notification_service.async.new_mentions_in_merge_request(
merge_request,
added_mentions,
current_user
)
end
end
# rubocop:enable Metrics/AbcSize
def merge_from_quick_action(merge_request)
last_diff_sha = params.delete(:merge)
......
......@@ -7,7 +7,32 @@
# Ex.
# NotificationService.new.new_issue(issue, current_user)
#
# When calculating the recipients of a notification is expensive (for instance,
# in the new issue case), `#async` will make that calculation happen in Sidekiq
# instead:
#
# NotificationService.new.async.new_issue(issue, current_user)
#
class NotificationService
class Async
attr_reader :parent
delegate :respond_to_missing, to: :parent
def initialize(parent)
@parent = parent
end
def method_missing(meth, *args)
return super unless parent.respond_to?(meth)
MailScheduler::NotificationServiceWorker.perform_async(meth.to_s, *args)
end
end
def async
@async ||= Async.new(self)
end
# Always notify user about ssh key added
# only if ssh key is not deploy key
#
......@@ -142,8 +167,23 @@ class NotificationService
# * merge_request assignee if their notification level is not Disabled
# * users with custom level checked with "reassign merge request"
#
def reassigned_merge_request(merge_request, current_user)
reassign_resource_email(merge_request, current_user, :reassigned_merge_request_email)
def reassigned_merge_request(merge_request, current_user, previous_assignee)
recipients = NotificationRecipientService.build_recipients(
merge_request,
current_user,
action: "reassign",
previous_assignee: previous_assignee
)
recipients.each do |recipient|
mailer.reassigned_merge_request_email(
recipient.user.id,
merge_request.id,
previous_assignee&.id,
current_user.id,
recipient.reason
).deliver_later
end
end
# When we add labels to a merge request we should send an email to:
......@@ -421,29 +461,6 @@ class NotificationService
end
end
def reassign_resource_email(target, current_user, method)
previous_assignee_id = previous_record(target, 'assignee_id')
previous_assignee = User.find_by(id: previous_assignee_id) if previous_assignee_id
recipients = NotificationRecipientService.build_recipients(
target,
current_user,
action: "reassign",
previous_assignee: previous_assignee
)
recipients.each do |recipient|
mailer.send(
method,
recipient.user.id,
target.id,
previous_assignee_id,
current_user.id,
recipient.reason
).deliver_later
end
end
def relabeled_resource_email(target, labels, current_user, method)
recipients = labels.flat_map { |l| l.subscribers(target.project) }.uniq
recipients = notifiable_users(
......@@ -471,14 +488,6 @@ class NotificationService
Notify
end
def previous_record(object, attribute)
return unless object && attribute
if object.previous_changes.include?(attribute)
object.previous_changes[attribute].first
end
end
private
def recipients_for_pages_domain(domain)
......
module Projects
class UpdatePagesService < BaseService
InvaildStateError = Class.new(StandardError)
InvalidStateError = Class.new(StandardError)
FailedToExtractError = Class.new(StandardError)
BLOCK_SIZE = 32.kilobytes
......@@ -21,8 +21,8 @@ module Projects
@status.enqueue!
@status.run!
raise InvaildStateError, 'missing pages artifacts' unless build.artifacts?
raise InvaildStateError, 'pages are outdated' unless latest?
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
raise InvalidStateError, 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts
FileUtils.mkdir_p(tmp_path)
......@@ -31,16 +31,16 @@ module Projects
# Check if we did extract public directory
archive_public_path = File.join(archive_path, 'public')
raise InvaildStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
raise InvaildStateError, 'pages are outdated' unless latest?
raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
raise InvalidStateError, 'pages are outdated' unless latest?
deploy_page!(archive_public_path)
success
end
rescue InvaildStateError => e
rescue InvalidStateError => e
error(e.message)
rescue => e
error(e.message, false)
error(e.message)
raise e
end
......@@ -48,17 +48,15 @@ module Projects
def success
@status.success
delete_artifact!
super
end
def error(message, allow_delete_artifact = true)
def error(message)
register_failure
log_error("Projects::UpdatePagesService: #{message}")
@status.allow_failure = !latest?
@status.description = message
@status.drop(:script_failure)
delete_artifact! if allow_delete_artifact
super
end
......@@ -77,18 +75,18 @@ module Projects
if artifacts.ends_with?('.zip')
extract_zip_archive!(temp_path)
else
raise InvaildStateError, 'unsupported artifacts format'
raise InvalidStateError, 'unsupported artifacts format'
end
end
def extract_zip_archive!(temp_path)
raise InvaildStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
# Calculate page size after extract
public_entry = build.artifacts_metadata_entry(SITE_PATH, recursive: true)
if public_entry.total_size > max_size
raise InvaildStateError, "artifacts for pages are too large: #{public_entry.total_size}"
raise InvalidStateError, "artifacts for pages are too large: #{public_entry.total_size}"
end
# Requires UnZip at least 6.00 Info-ZIP.
......@@ -162,11 +160,6 @@ module Projects
build.artifacts_file.path
end
def delete_artifact!
build.reload # Reload stable object to prevent erase artifacts with old state
build.erase_artifacts! unless build.has_expiring_artifacts?
end
def latest_sha
project.commit(build.ref).try(:sha).to_s
ensure
......
......@@ -20,11 +20,12 @@ class RepositoryArchiveCleanUpService
private
def clean_up_old_archives
run(%W(find #{path} -not -path #{path} -type f \( -name \*.tar -o -name \*.bz2 -o -name \*.tar.gz -o -name \*.zip \) -maxdepth 2 -mmin +#{mmin} -delete))
run(%W(find #{path} -mindepth 1 -maxdepth 3 -type f \( -name \*.tar -o -name \*.bz2 -o -name \*.tar.gz -o -name \*.zip \) -mmin +#{mmin} -delete))
end
def clean_up_empty_directories
run(%W(find #{path} -not -path #{path} -type d -empty -name \*.git -maxdepth 1 -delete))
run(%W(find #{path} -mindepth 2 -maxdepth 2 -type d -empty -delete))
run(%W(find #{path} -mindepth 1 -maxdepth 1 -type d -empty -delete))
end
def run(cmd)
......
......@@ -8,18 +8,17 @@
%li{ class: "branch-item js-branch-#{branch.name}" }
.branch-info
.branch-title
= link_to project_tree_path(@project, branch.name), class: 'item-title str-truncated-100 ref-name' do
= sprite_icon('fork', size: 12)
= sprite_icon('fork', size: 12)
= link_to project_tree_path(@project, branch.name), class: 'item-title str-truncated-100 ref-name prepend-left-8' do
= branch.name
&nbsp;
- if branch.name == @repository.root_ref
%span.label.label-primary default
%span.label.label-primary.prepend-left-5 default
- elsif merged
%span.label.label-info.has-tooltip{ title: s_('Branches|Merged into %{default_branch}') % { default_branch: @repository.root_ref } }
%span.label.label-info.has-tooltip.prepend-left-5{ title: s_('Branches|Merged into %{default_branch}') % { default_branch: @repository.root_ref } }
= s_('Branches|merged')
- if protected_branch?(@project, branch)
%span.label.label-success
%span.label.label-success.prepend-left-5
= s_('Branches|protected')
.block-truncated
......
- can_admin_project = can?(current_user, :admin_project, @project)
= render layout: 'projects/protected_branches/shared/branches_list', locals: { can_admin_project: can_admin_project } do
= render partial: 'projects/protected_branches/protected_branch', collection: @protected_branches, locals: { can_admin_project: can_admin_project}
= render partial: 'projects/protected_branches/protected_branch', collection: @protected_branches
......@@ -21,4 +21,4 @@
- if can_admin_project
%td
= link_to 'Unprotect', [@project.namespace.becomes(Namespace), @project, protected_branch], data: { confirm: 'Branch will be writable for developers. Are you sure?' }, method: :delete, class: 'btn btn-warning'
= link_to 'Unprotect', [@project.namespace.becomes(Namespace), @project, protected_branch], disabled: local_assigns[:disabled], data: { confirm: 'Branch will be writable for developers. Are you sure?' }, method: :delete, class: "btn btn-warning"
......@@ -41,6 +41,7 @@
- github_importer:github_import_stage_import_repository
- mail_scheduler:mail_scheduler_issue_due
- mail_scheduler:mail_scheduler_notification_service
- object_storage_upload
- object_storage:object_storage_background_move
......
......@@ -4,4 +4,8 @@ module MailSchedulerQueue
included do
queue_namespace :mail_scheduler
end
def notification_service
@notification_service ||= NotificationService.new
end
end
......@@ -4,8 +4,6 @@ module MailScheduler
include MailSchedulerQueue
def perform(project_id)
notification_service = NotificationService.new
Issue.opened.due_tomorrow.in_projects(project_id).preload(:project).find_each do |issue|
notification_service.issue_due(issue)
end
......
require 'active_job/arguments'
module MailScheduler
class NotificationServiceWorker
include ApplicationWorker
include MailSchedulerQueue
def perform(meth, *args)
deserialized_args = ActiveJob::Arguments.deserialize(args)
notification_service.public_send(meth, *deserialized_args) # rubocop:disable GitlabSecurity/PublicSend
rescue ActiveJob::DeserializationError
end
def self.perform_async(*args)
super(*ActiveJob::Arguments.serialize(args))
end
end
end
---
title: Don't automatically remove artifacts for pages jobs after pages:deploy has
run
merge_request: 18628
author:
type: fixed
---
title: Ensure member notifications are sent after the member actual creation/update in the DB
merge_request: 18538
author:
type: fixed
---
title: Ports omniauth-jwt gem onto GitLab OmniAuth Strategies suite
merge_request: 18580
author:
type: fixed
---
title: Fix redirection error for applications using OpenID
merge_request: 18599
author:
type: fixed
---
title: Fixed inconsistent protected branch pill baseline
merge_request:
author:
type: fixed
---
title: Compute notification recipients in background jobs
merge_request:
author:
type: performance
---
title: Revert discussion counter height
merge_request: 18656
author: George Tsiolis
type: changed
---
title: Serve archive requests with the correct file in all cases
merge_request:
author:
type: security
---
title: Sanitizes user name to avoid XSS attacks
merge_request:
author:
type: security
......@@ -534,7 +534,7 @@ production: &base
# required_claims: ["name", "email"],
# info_map: { name: "name", email: "email" },
# auth_url: 'https://example.com/',
# valid_within: nil,
# valid_within: null,
# }
# }
# - { name: 'saml',
......@@ -825,7 +825,7 @@ test:
required_claims: ["name", "email"],
info_map: { name: "name", email: "email" },
auth_url: 'https://example.com/',
valid_within: nil,
valid_within: null,
}
}
- { name: 'auth0',
......
......@@ -104,5 +104,5 @@ Doorkeeper.configure do
# set to true if you want this to be allowed
# wildcard_redirect_uri false
base_controller 'ApplicationController'
base_controller '::Gitlab::BaseDoorkeeperController'
end
......@@ -25,5 +25,6 @@ end
module OmniAuth
module Strategies
autoload :Bitbucket, Rails.root.join('lib', 'omni_auth', 'strategies', 'bitbucket')
autoload :Jwt, Rails.root.join('lib', 'omni_auth', 'strategies', 'jwt')
end
end
......@@ -15,8 +15,8 @@ To understand what features you have access to, check the [GitLab subscriptions]
| General documentation | GitLab CI/CD docs |
| :----- | :----- |
| [User documentation](user/index.md) | [GitLab CI/CD](ci/README.md) |
| [Administrator documentation](administration/index.md) | [GitLab CI/CD quick start guide](ci/quick_start/README.md) |
| [User documentation](user/index.md) | [GitLab CI/CD quick start guide](ci/quick_start/README.md) |
| [Administrator documentation](administration/index.md) | [GitLab CI/CD examples](ci/examples/README.md) |
| [Contributor documentation](#contributor-documentation) | [Configuring `.gitlab-ci.yml`](ci/yaml/README.md) |
| [Getting started with GitLab](#getting-started-with-gitlab) | [Using Docker images](ci/docker/using_docker_images.md) |
| [API](api/README.md) | [Auto DevOps](topics/autodevops/index.md) |
......@@ -90,6 +90,7 @@ Manage your [repositories](user/project/repository/index.md) from the UI (user i
- [Create a file](user/project/repository/web_editor.md#create-a-file)
- [Upload a file](user/project/repository/web_editor.md#upload-a-file)
- [File templates](user/project/repository/web_editor.md#template-dropdowns)
- [Jupyter Notebook files](user/project/repository/index.md#jupyter-notebook-files)
- [Create a directory](user/project/repository/web_editor.md#create-a-directory)
- [Start a merge request](user/project/repository/web_editor.md#tips) (when committing via UI)
- [Branches](user/project/repository/branches/index.md)
......@@ -100,6 +101,14 @@ Manage your [repositories](user/project/repository/index.md) from the UI (user i
- [Commits](user/project/repository/index.md#commits)
- [Signing commits](user/project/repository/gpg_signed_commits/index.md): use GPG to sign your commits.
#### Merge Requests
- [Merge Requests](user/project/merge_requests/index.md)
- [Work In Progress "WIP" Merge Requests](user/project/merge_requests/work_in_progress_merge_requests.md)
- [Merge Request discussion resolution](user/discussions/index.md#moving-a-single-discussion-to-a-new-issue): Resolve discussions, move discussions in a merge request to an issue, only allow merge requests to be merged if all discussions are resolved.
- [Checkout merge requests locally](user/project/merge_requests/index.md#checkout-merge-requests-locally)
- [Cherry-pick](user/project/merge_requests/cherry_pick_changes.md)
#### Integrations
- [Project Services](user/project/integrations/project_services.md): Integrate a project with external services, such as CI and chat.
......@@ -113,18 +122,16 @@ Manage your [repositories](user/project/repository/index.md) from the UI (user i
### Verify
Spot errors sooner and shorten feedback cycles with built-in code review, code testing,
Code Quality, and Review Apps. Customize your approval workflow controls, automatically
test the quality of your code, and spin up a staging environment for every code change.
GitLab Continuous Integration is the most popular next generation testing system that
auto scales to run your tests faster.
Spot errors sooner, improve security and shorten feedback cycles with built-in
static code analysis, code testing, code quality, dependency checking and review
apps. Customize your approval workflow controls, automatically test the quality
of your code, and spin up a staging environment for every code change. GitLab
Continuous Integration is the most popular next generation testing system that
scales to run your tests faster.
- [Merge Requests](user/project/merge_requests/index.md)
- [Work In Progress Merge Requests](user/project/merge_requests/work_in_progress_merge_requests.md)
- [Merge Request discussion resolution](user/discussions/index.md#moving-a-single-discussion-to-a-new-issue): Resolve discussions, move discussions in a merge request to an issue, only allow merge requests to be merged if all discussions are resolved.
- [Checkout merge requests locally](user/project/merge_requests/index.md#checkout-merge-requests-locally)
- [Cherry-pick](user/project/merge_requests/cherry_pick_changes.md)
- [GitLab CI/CD](ci/README.md): Explore the features and capabilities of Continuous Integration, Continuous Delivery, and Continuous Deployment with GitLab.
- [Review Apps](ci/review_apps/index.md): Preview changes to your app right from a merge request.
- [Pipeline Graphs](ci/pipelines.md#pipeline-graphs)
### Package
......@@ -132,7 +139,6 @@ GitLab Container Registry gives you the enhanced security and access controls of
custom Docker images without 3rd party add-ons. Easily upload and download images
from GitLab CI/CD with full Git repository management integration.
- [GitLab CI/CD](ci/README.md): Explore the features and capabilities of Continuous Integration, Continuous Delivery, and Continuous Deployment with GitLab.
- [GitLab Container Registry](user/project/container_registry.md): Learn how to use GitLab's built-in Container Registry.
### Release
......@@ -141,9 +147,11 @@ Spend less time configuring your tools, and more time creating. Whether you’re
deploying to one server or thousands, build, test, and release your code
confidently and securely with GitLab’s built-in Continuous Delivery and Deployment.
- [GitLab Pages](user/project/pages/index.md): Build, test, and deploy a static site directly from GitLab.
- [Auto Deploy](topics/autodevops/index.md#auto-deploy): Configure GitLab CI for the deployment of your application.
- [Environments and deployments](ci/environments.md): With environments, you can control the continuous deployment of your software within GitLab.
- [GitLab Pages](user/project/pages/index.md): Build, test, and deploy a static site directly from GitLab.
- [Scheduled Pipelines](user/project/pipelines/schedules.md)
- [Protected Runners](ci/runners/README.md#protected-runners)
### Configure
......@@ -152,6 +160,9 @@ Auto Devops. Best practice templates get you started with minimal to zero
configuration. Then customize everything from buildpacks to CI/CD.
- [Auto DevOps](topics/autodevops/index.md)
- [Deployment of Helm, Ingress, and Prometheus on Kubernetes](user/project/clusters/index.md#installing-applications)
- [Protected secret variables](ci/variables/README.md#protected-secret-variables)
- [Easy creation of Kubernetes clusters on GKE](user/project/clusters/index.md#adding-and-creating-a-new-gke-cluster-via-gitlab)
### Monitor
......
......@@ -50,7 +50,7 @@ JWT will provide you with a secret key for you to use.
required_claims: ["name", "email"],
info_map: { name: "name", email: "email" },
auth_url: 'https://example.com/',
valid_within: nil,
valid_within: null,
}
}
```
......
......@@ -41,6 +41,7 @@ comments: false
- [Avoid modules with instance variables](module_with_instance_variables.md) if possible
- [How to dump production data to staging](db_dump.md)
- [Working with the GitHub importer](github_importer.md)
- [Working with Merge Request diffs](diffs.md)
## Performance guides
......
# Working with Merge Request diffs
Currently we rely on different sources to present merge request diffs, these include:
- Rugged gem
- Gitaly service
- Database (through `merge_request_diff_files`)
- Redis (cached highlighted diffs)
We're constantly moving Rugged calls to Gitaly and the progress can be followed through [Gitaly repo](https://gitlab.com/gitlab-org/gitaly).
## Architecture overview
When refreshing a Merge Request (pushing to a source branch, force-pushing to target branch, or if the target branch now contains any commits from the MR)
we fetch the comparison information using `Gitlab::Git::Compare`, which fetches `base` and `head` data using Gitaly and diff between them through
`Gitlab::Git::Diff.between` (which uses _Gitaly_ if it's enabled, otherwise _Rugged_).
The diffs fetching process _limits_ single file diff sizes and the overall size of the whole diff through a series of constant values. Raw diff files are
then persisted on `merge_request_diff_files` table.
Even though diffs higher than 10kb are collapsed (`Gitlab::Git::Diff::COLLAPSE_LIMIT`), we still keep them on Postgres. However, diff files over _safety limits_
(see the [Diff limits section](#diff-limits)) are _not_ persisted.
In order to present diffs information on the Merge Request diffs page, we:
1. Fetch all diff files from database `merge_request_diff_files`
2. Fetch the _old_ and _new_ file blobs in batch to:
1. Highlight old and new file content
2. Know which viewer it should use for each file (text, image, deleted, etc)
3. Know if the file content changed
4. Know if it was stored externally
5. Know if it had storage errors
3. If the diff file is cacheable (text-based), it's cached on Redis
using `Gitlab::Diff::FileCollection::MergeRequestDiff`
## Diff limits
As explained above, we limit single diff files and the size of the whole diff. There are scenarios where we collapse the diff file,
and cases where the diff file is not presented at all, and the user is guided to the Blob view. Here we'll go into details about
these limits.
### Diff collection limits
Limits that act onto all diff files collection. Files number, lines number and files size are considered.
```ruby
Gitlab::Git::DiffCollection.collection_limits[:safe_max_files] = Gitlab::Git::DiffCollection::DEFAULT_LIMITS[:max_files] = 100
```
File diffs will be collapsed (but be expandable) if 100 files have already been rendered.
```ruby
Gitlab::Git::DiffCollection.collection_limits[:safe_max_lines] = Gitlab::Git::DiffCollection::DEFAULT_LIMITS[:max_lines] = 5000
```
File diffs will be collapsed (but be expandable) if 5000 lines have already been rendered.
```ruby
Gitlab::Git::DiffCollection.collection_limits[:safe_max_bytes] = Gitlab::Git::DiffCollection.collection_limits[:safe_max_files] * 5.kilobytes = 500.kilobytes
```
File diffs will be collapsed (but be expandable) if 500 kilobytes have already been rendered.
```ruby
Gitlab::Git::DiffCollection.collection_limits[:max_files] = Commit::DIFF_HARD_LIMIT_FILES = 1000
```
No more files will be rendered at all if 1000 files have already been rendered.
```ruby
Gitlab::Git::DiffCollection.collection_limits[:max_lines] = Commit::DIFF_HARD_LIMIT_LINES = 50000
```
No more files will be rendered at all if 50,000 lines have already been rendered.
```ruby
Gitlab::Git::DiffCollection.collection_limits[:max_bytes] = Gitlab::Git::DiffCollection.collection_limits[:max_files] * 5.kilobytes = 5000.kilobytes
```
No more files will be rendered at all if 5 megabytes have already been rendered.
### Individual diff file limits
Limits that act onto each diff file of a collection. Files number, lines number and files size are considered.
```ruby
Gitlab::Git::Diff::COLLAPSE_LIMIT = 10.kilobytes
```
File diff will be collapsed (but be expandable) if it is larger than 10 kilobytes.
```ruby
Gitlab::Git::Diff::SIZE_LIMIT = 100.kilobytes
```
File diff will not be rendered if it's larger than 100 kilobytes.
```ruby
Commit::DIFF_SAFE_LINES = Gitlab::Git::DiffCollection::DEFAULT_LIMITS[:max_lines] = 5000
```
File diff will be suppressed (technically different from collapsed, but behaves the same, and is expandable) if it has more than 5000 lines.
## Viewers
Diff Viewers, which can be found on `models/diff_viewer/*` are classes used to map metadata about each type of Diff File. It has information
whether it's a binary, which partial should be used to render it or which File extensions this class accounts for.
`DiffViewer::Base` validates _blobs_ (old and new versions) content, extension and file type in order to check if it can be rendered.
......@@ -238,6 +238,7 @@ work.
The default environment scope is `*`, which means all jobs, regardless of their
environment, will use that cluster. Each scope can only be used by a single
cluster in a project, and a validation error will occur if otherwise.
Also, jobs that don't have an environment keyword set will not be able to access any cluster.
---
......
# GitLab Pages
With GitLab Pages you can host your website at no cost.
Your files live in a GitLab project's [repository](../repository/index.md),
from which you can deploy [static websites](#explore-gitlab-pages).
GitLab Pages supports all static site generators (SSGs).
With GitLab Pages it's easy to publish your project website. GitLab Pages is a hosting service for static websites, at no additional cost.
## Getting Started
Follow the steps below to get your website live. They shouldn't take more than
5 minutes to complete:
[Create a project from scratch](getting_started_part_two.md#create-a-project-from-scratch)
to get you started quickly, or,
alternatively, start from an existing project as follows:
- 1. [Fork](../../../gitlab-basics/fork-project.md#how-to-fork-a-project) an [example project](https://gitlab.com/pages)
- 2. Change a file to trigger a GitLab CI/CD pipeline
- 3. Visit your project's **Settings > Pages** to see your **website link**, and click on it. Bam! Your website is live.
- 1. [Fork](../../../gitlab-basics/fork-project.md#how-to-fork-a-project) an [example project](https://gitlab.com/pages):
by forking a project, you create a copy of the codebase you're forking from to start from a template instead of starting from scratch.
- 2. Change a file to trigger a GitLab CI/CD pipeline: GitLab CI/CD will build and deploy your site to GitLab Pages.
- 3. Visit your project's **Settings > Pages** to see your **website link**, and click on it. Bam! Your website is live! :)
_Further steps (optional):_
- 4. Remove the [fork relationship](getting_started_part_two.md#fork-a-project-to-get-started-from) (_You don't need the relationship unless you intent to contribute back to the example project you forked from_).
- 4. Remove the [fork relationship](getting_started_part_two.md#fork-a-project-to-get-started-from)
(_You don't need the relationship unless you intent to contribute back to the example project you forked from_).
- 5. Make it a [user/group website](getting_started_part_one.md#user-and-group-websites)
**Watch a video with the steps above: https://www.youtube.com/watch?v=TWqh9MtT4Bg**
......@@ -27,14 +26,23 @@ _Advanced options:_
- [Use a custom domain](getting_started_part_three.md#adding-your-custom-domain-to-gitlab-pages)
- Apply [SSL/TLS certification](getting_started_part_three.md#ssl-tls-certificates) to your custom domain
## Explore GitLab Pages
## How Does It Work?
With GitLab Pages you can create [static websites](getting_started_part_one.md#what-you-need-to-know-before-getting-started)
for your GitLab projects, groups, or user accounts. You can use any static
website generator: Jekyll, Middleman, Hexo, Hugo, Pelican, you name it!
for your GitLab projects, groups, or user accounts.
It supports plain static content, such as HTML, and **all** [static site generators (SSGs)](https://about.gitlab.com/2016/06/03/ssg-overview-gitlab-pages-part-1-dynamic-x-static/), such as Jekyll, Middleman, Hexo, Hugo, and Pelican.
Connect as many custom domains as you like and bring your own TLS certificate
to secure them.
Your files live in a project [repository](../repository/index.md) on GitLab.
[GitLab CI](../../../ci/README.md) picks up those files and makes them available at, typically,
`http://<username>.gilab.io/<projectname>`. Please read through the docs on
[GitLab Pages domains](getting_started_part_one.md#gitlab-pages-domain) for more info.
## Explore GitLab Pages
Read the following tutorials to know more about:
- [Static websites and GitLab Pages domains](getting_started_part_one.md): Understand what is a static website, and how GitLab Pages default domains work
......
# This is a base controller for doorkeeper.
# It adds the `can?` helper used in the views.
module Gitlab
class BaseDoorkeeperController < ActionController::Base
include Gitlab::Allowable
helper_method :can?
end
end
......@@ -12,7 +12,7 @@ module Gitlab
# class.
#
class RemoteRepository
attr_reader :path, :relative_path, :gitaly_repository
attr_reader :relative_path, :gitaly_repository
def initialize(repository)
@relative_path = repository.relative_path
......@@ -21,7 +21,6 @@ module Gitlab
# These instance variables will not be available in gitaly-ruby, where
# we have no disk access to this repository.
@repository = repository
@path = repository.path
end
def empty?
......@@ -69,6 +68,10 @@ module Gitlab
env
end
def path
@repository.path
end
private
# Must return an object that responds to 'address' and 'storage'.
......
......@@ -391,18 +391,6 @@ module Gitlab
nil
end
def archive_prefix(ref, sha, append_sha:)
append_sha = (ref != sha) if append_sha.nil?
project_name = self.name.chomp('.git')
formatted_ref = ref.tr('/', '-')
prefix_segments = [project_name, formatted_ref]
prefix_segments << sha if append_sha
prefix_segments.join('-')
end
def archive_metadata(ref, storage_path, format = "tar.gz", append_sha:)
ref ||= root_ref
commit = Gitlab::Git::Commit.find(self, ref)
......@@ -413,12 +401,44 @@ module Gitlab
{
'RepoPath' => path,
'ArchivePrefix' => prefix,
'ArchivePath' => archive_file_path(prefix, storage_path, format),
'ArchivePath' => archive_file_path(storage_path, commit.id, prefix, format),
'CommitId' => commit.id
}
end
def archive_file_path(name, storage_path, format = "tar.gz")
# This is both the filename of the archive (missing the extension) and the
# name of the top-level member of the archive under which all files go
#
# FIXME: The generated prefix is incorrect for projects with hashed
# storage enabled
def archive_prefix(ref, sha, append_sha:)
append_sha = (ref != sha) if append_sha.nil?
project_name = self.name.chomp('.git')
formatted_ref = ref.tr('/', '-')
prefix_segments = [project_name, formatted_ref]
prefix_segments << sha if append_sha
prefix_segments.join('-')
end
private :archive_prefix
# The full path on disk where the archive should be stored. This is used
# to cache the archive between requests.
#
# The path is a global namespace, so needs to be globally unique. This is
# achieved by including `gl_repository` in the path.
#
# Archives relating to a particular ref when the SHA is not present in the
# filename must be invalidated when the ref is updated to point to a new
# SHA. This is achieved by including the SHA in the path.
#
# As this is a full path on disk, it is not "cloud native". This should
# be resolved by either removing the cache, or moving the implementation
# into Gitaly and removing the ArchivePath parameter from the git-archive
# senddata response.
def archive_file_path(storage_path, sha, name, format = "tar.gz")
# Build file path
return nil unless name
......@@ -436,8 +456,9 @@ module Gitlab
end
file_name = "#{name}.#{extension}"
File.join(storage_path, self.name, file_name)
File.join(storage_path, self.gl_repository, sha, file_name)
end
private :archive_file_path
# Return repo size in megabytes
def size
......@@ -1179,6 +1200,8 @@ module Gitlab
if is_enabled
gitaly_fetch_ref(source_repository, source_ref: source_ref, target_ref: target_ref)
else
# When removing this code, also remove source_repository#path
# to remove deprecated method calls
local_fetch_ref(source_repository.path, source_ref: source_ref, target_ref: target_ref)
end
end
......
......@@ -142,7 +142,7 @@ module Gitlab
:repository_service,
:is_rebase_in_progress,
request,
timeout: GitalyClient.default_timeout
timeout: GitalyClient.fast_timeout
)
response.in_progress
......@@ -159,7 +159,7 @@ module Gitlab
:repository_service,
:is_squash_in_progress,
request,
timeout: GitalyClient.default_timeout
timeout: GitalyClient.fast_timeout
)
response.in_progress
......
require 'omniauth'
require 'jwt'
module OmniAuth
module Strategies
class JWT
ClaimInvalid = Class.new(StandardError)
include OmniAuth::Strategy
args [:secret]
option :secret, nil
option :algorithm, 'HS256'
option :uid_claim, 'email'
option :required_claims, %w(name email)
option :info_map, { name: "name", email: "email" }
option :auth_url, nil
option :valid_within, nil
uid { decoded[options.uid_claim] }
extra do
{ raw_info: decoded }
end
info do
options.info_map.each_with_object({}) do |(k, v), h|
h[k.to_s] = decoded[v.to_s]
end
end
def request_phase
redirect options.auth_url
end
def decoded
@decoded ||= ::JWT.decode(request.params['jwt'], options.secret, options.algorithm).first
(options.required_claims || []).each do |field|
raise ClaimInvalid, "Missing required '#{field}' claim" unless @decoded.key?(field.to_s)
end
raise ClaimInvalid, "Missing required 'iat' claim" if options.valid_within && !@decoded["iat"]
if options.valid_within && (Time.now.to_i - @decoded["iat"]).abs > options.valid_within
raise ClaimInvalid, "'iat' timestamp claim is too skewed from present"
end
@decoded
end
def callback_phase
super
rescue ClaimInvalid => e
fail! :claim_invalid, e
end
end
class Jwt < JWT; end
end
end
require 'spec_helper'
feature 'Groups > Members > Manage access requests' do
let(:user) { create(:user) }
let(:owner) { create(:user) }
let(:group) { create(:group, :public, :access_requestable) }
background do
group.request_access(user)
group.add_owner(owner)
sign_in(owner)
end
scenario 'owner can see access requests' do
visit group_group_members_path(group)
expect_visible_access_request(group, user)
end
scenario 'owner can grant access' do
visit group_group_members_path(group)
expect_visible_access_request(group, user)
perform_enqueued_jobs { click_on 'Grant access' }
expect(ActionMailer::Base.deliveries.last.to).to eq [user.notification_email]
expect(ActionMailer::Base.deliveries.last.subject).to match "Access to the #{group.name} group was granted"
end
scenario 'owner can deny access' do
visit group_group_members_path(group)
expect_visible_access_request(group, user)
perform_enqueued_jobs { click_on 'Deny access' }
expect(ActionMailer::Base.deliveries.last.to).to eq [user.notification_email]
expect(ActionMailer::Base.deliveries.last.subject).to match "Access to the #{group.name} group was denied"
end
def expect_visible_access_request(group, user)
expect(group.requesters.exists?(user_id: user)).to be_truthy
expect(page).to have_content "Users requesting access to #{group.name} 1"
expect(page).to have_content user.name
end
end
require 'spec_helper'
feature 'Groups > Members > Master manages access requests' do
it_behaves_like 'Master manages access requests' do
let(:entity) { create(:group, :public, :access_requestable) }
let(:members_page_path) { group_group_members_path(entity) }
end
end
require 'spec_helper'
feature 'Projects > Members > Master manages access requests' do
let(:user) { create(:user) }
let(:master) { create(:user) }
let(:project) { create(:project, :public, :access_requestable) }
background do
project.request_access(user)
project.add_master(master)
sign_in(master)
end
scenario 'master can see access requests' do
visit project_project_members_path(project)
expect_visible_access_request(project, user)
end
scenario 'master can grant access' do
visit project_project_members_path(project)
expect_visible_access_request(project, user)
perform_enqueued_jobs { click_on 'Grant access' }
expect(ActionMailer::Base.deliveries.last.to).to eq [user.notification_email]
expect(ActionMailer::Base.deliveries.last.subject).to match "Access to the #{project.full_name} project was granted"
end
scenario 'master can deny access' do
visit project_project_members_path(project)
expect_visible_access_request(project, user)
perform_enqueued_jobs { click_on 'Deny access' }
expect(ActionMailer::Base.deliveries.last.to).to eq [user.notification_email]
expect(ActionMailer::Base.deliveries.last.subject).to match "Access to the #{project.full_name} project was denied"
end
def expect_visible_access_request(project, user)
expect(project.requesters.exists?(user_id: user)).to be_truthy
expect(page).to have_content "Users requesting access to #{project.name} 1"
expect(page).to have_content user.name
it_behaves_like 'Master manages access requests' do
let(:entity) { create(:project, :public, :access_requestable) }
let(:members_page_path) { project_project_members_path(entity) }
end
end
......@@ -138,7 +138,7 @@ const RESPONSE_MAP = {
},
{
id: 20,
name_with_namespace: 'foo / bar',
name_with_namespace: '<img src=x onerror=alert(document.domain)> foo / bar',
},
],
},
......
......@@ -69,6 +69,15 @@ describe('SidebarMoveIssue', function () {
expect($.fn.glDropdown).toHaveBeenCalled();
});
it('escapes html from project name', (done) => {
this.$toggleButton.dropdown('toggle');
setTimeout(() => {
expect(this.$content.find('.js-move-issue-dropdown-item')[1].innerHTML.trim()).toEqual('&lt;img src=x onerror=alert(document.domain)&gt; foo / bar');
done();
});
});
});
describe('onConfirmClicked', () => {
......
......@@ -234,59 +234,72 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RefService, :tag_names
end
shared_examples 'archive check' do |extenstion|
it { expect(metadata['ArchivePath']).to match(%r{tmp/gitlab-git-test.git/gitlab-git-test-master-#{SeedRepo::LastCommit::ID}}) }
it { expect(metadata['ArchivePath']).to end_with extenstion }
end
describe '#archive_metadata' do
let(:storage_path) { '/tmp' }
let(:cache_key) { File.join(repository.gl_repository, SeedRepo::LastCommit::ID) }
describe '#archive_prefix' do
let(:project_name) { 'project-name'}
let(:append_sha) { true }
let(:ref) { 'master' }
let(:format) { nil }
before do
expect(repository).to receive(:name).once.and_return(project_name)
end
let(:expected_extension) { 'tar.gz' }
let(:expected_filename) { "#{expected_prefix}.#{expected_extension}" }
let(:expected_path) { File.join(storage_path, cache_key, expected_filename) }
let(:expected_prefix) { "gitlab-git-test-#{ref}-#{SeedRepo::LastCommit::ID}" }
it 'returns parameterised string for a ref containing slashes' do
prefix = repository.archive_prefix('test/branch', 'SHA', append_sha: nil)
subject(:metadata) { repository.archive_metadata(ref, storage_path, format, append_sha: append_sha) }
expect(prefix).to eq("#{project_name}-test-branch-SHA")
it 'sets RepoPath to the repository path' do
expect(metadata['RepoPath']).to eq(repository.path)
end
it 'returns correct string for a ref containing dots' do
prefix = repository.archive_prefix('test.branch', 'SHA', append_sha: nil)
expect(prefix).to eq("#{project_name}-test.branch-SHA")
it 'sets CommitId to the commit SHA' do
expect(metadata['CommitId']).to eq(SeedRepo::LastCommit::ID)
end
it 'returns string with sha when append_sha is false' do
prefix = repository.archive_prefix('test.branch', 'SHA', append_sha: false)
expect(prefix).to eq("#{project_name}-test.branch")
it 'sets ArchivePrefix to the expected prefix' do
expect(metadata['ArchivePrefix']).to eq(expected_prefix)
end
end
describe '#archive' do
let(:metadata) { repository.archive_metadata('master', '/tmp', append_sha: true) }
it 'sets ArchivePath to the expected globally-unique path' do
# This is really important from a security perspective. Think carefully
# before changing it: https://gitlab.com/gitlab-org/gitlab-ce/issues/45689
expect(expected_path).to include(File.join(repository.gl_repository, SeedRepo::LastCommit::ID))
it_should_behave_like 'archive check', '.tar.gz'
end
describe '#archive_zip' do
let(:metadata) { repository.archive_metadata('master', '/tmp', 'zip', append_sha: true) }
expect(metadata['ArchivePath']).to eq(expected_path)
end
it_should_behave_like 'archive check', '.zip'
end
context 'append_sha varies archive path and filename' do
where(:append_sha, :ref, :expected_prefix) do
sha = SeedRepo::LastCommit::ID
describe '#archive_bz2' do
let(:metadata) { repository.archive_metadata('master', '/tmp', 'tbz2', append_sha: true) }
true | 'master' | "gitlab-git-test-master-#{sha}"
true | sha | "gitlab-git-test-#{sha}-#{sha}"
false | 'master' | "gitlab-git-test-master"
false | sha | "gitlab-git-test-#{sha}"
nil | 'master' | "gitlab-git-test-master-#{sha}"
nil | sha | "gitlab-git-test-#{sha}"
end
it_should_behave_like 'archive check', '.tar.bz2'
end
with_them do
it { expect(metadata['ArchivePrefix']).to eq(expected_prefix) }
it { expect(metadata['ArchivePath']).to eq(expected_path) }
end
end
describe '#archive_fallback' do
let(:metadata) { repository.archive_metadata('master', '/tmp', 'madeup', append_sha: true) }
context 'format varies archive path and filename' do
where(:format, :expected_extension) do
nil | 'tar.gz'
'madeup' | 'tar.gz'
'tbz2' | 'tar.bz2'
'zip' | 'zip'
end
it_should_behave_like 'archive check', '.tar.gz'
with_them do
it { expect(metadata['ArchivePrefix']).to eq(expected_prefix) }
it { expect(metadata['ArchivePath']).to eq(expected_path) }
end
end
end
describe '#size' do
......
require 'spec_helper'
describe OmniAuth::Strategies::Jwt do
include Rack::Test::Methods
include DeviseHelpers
context '.decoded' do
let(:strategy) { described_class.new({}) }
let(:timestamp) { Time.now.to_i }
let(:jwt_config) { Devise.omniauth_configs[:jwt] }
let(:key) { JWT.encode(claims, jwt_config.strategy.secret) }
let(:claims) do
{
id: 123,
name: "user_example",
email: "user@example.com",
iat: timestamp
}
end
before do
allow_any_instance_of(OmniAuth::Strategy).to receive(:options).and_return(jwt_config.strategy)
allow_any_instance_of(Rack::Request).to receive(:params).and_return({ 'jwt' => key })
end
it 'decodes the user information' do
result = strategy.decoded
expect(result["id"]).to eq(123)
expect(result["name"]).to eq("user_example")
expect(result["email"]).to eq("user@example.com")
expect(result["iat"]).to eq(timestamp)
end
context 'required claims is missing' do
let(:claims) do
{
id: 123,
email: "user@example.com",
iat: timestamp
}
end
it 'raises error' do
expect { strategy.decoded }.to raise_error(OmniAuth::Strategies::JWT::ClaimInvalid)
end
end
context 'when valid_within is specified but iat attribute is missing in response' do
let(:claims) do
{
id: 123,
name: "user_example",
email: "user@example.com"
}
end
before do
jwt_config.strategy.valid_within = Time.now.to_i
end
it 'raises error' do
expect { strategy.decoded }.to raise_error(OmniAuth::Strategies::JWT::ClaimInvalid)
end
end
context 'when timestamp claim is too skewed from present' do
let(:claims) do
{
id: 123,
name: "user_example",
email: "user@example.com",
iat: timestamp - 10.minutes.to_i
}
end
before do
jwt_config.strategy.valid_within = 2.seconds
end
it 'raises error' do
expect { strategy.decoded }.to raise_error(OmniAuth::Strategies::JWT::ClaimInvalid)
end
end
end
end
......@@ -28,52 +28,12 @@ describe GroupMember do
end
end
describe 'notifications' do
describe "#after_create" do
it "sends email to user" do
membership = build(:group_member)
it_behaves_like 'members notifications', :group
allow(membership).to receive(:notification_service)
.and_return(double('NotificationService').as_null_object)
expect(membership).to receive(:notification_service)
describe '#real_source_type' do
subject { create(:group_member).real_source_type }
membership.save
end
end
describe "#after_update" do
before do
@group_member = create :group_member
allow(@group_member).to receive(:notification_service)
.and_return(double('NotificationService').as_null_object)
end
it "sends email to user" do
expect(@group_member).to receive(:notification_service)
@group_member.update_attribute(:access_level, GroupMember::MASTER)
end
it "does not send an email when the access level has not changed" do
expect(@group_member).not_to receive(:notification_service)
@group_member.update_attribute(:access_level, GroupMember::OWNER)
end
end
describe '#after_accept_request' do
it 'calls NotificationService.accept_group_access_request' do
member = create(:group_member, user: build(:user), requested_at: Time.now)
expect_any_instance_of(NotificationService).to receive(:new_group_member)
member.__send__(:after_accept_request)
end
end
describe '#real_source_type' do
subject { create(:group_member).real_source_type }
it { is_expected.to eq 'Group' }
end
it { is_expected.to eq 'Group' }
end
describe '#update_two_factor_requirement' do
......
......@@ -123,15 +123,5 @@ describe ProjectMember do
it { expect(@project_2.users).to be_empty }
end
describe 'notifications' do
describe '#after_accept_request' do
it 'calls NotificationService.new_project_member' do
member = create(:project_member, user: create(:user), requested_at: Time.now)
expect_any_instance_of(NotificationService).to receive(:new_project_member)
member.__send__(:after_accept_request)
end
end
end
it_behaves_like 'members notifications', :project
end
......@@ -153,4 +153,13 @@ describe 'OpenID Connect requests' do
end
end
end
context 'OpenID configuration information' do
it 'correctly returns the configuration' do
get '/.well-known/openid-configuration'
expect(response).to have_gitlab_http_status(200)
expect(json_response).to have_key('issuer')
end
end
end
......@@ -96,6 +96,37 @@ describe NotificationService, :mailer do
it_should_behave_like 'participating by assignee notification'
end
describe '#async' do
let(:async) { notification.async }
set(:key) { create(:personal_key) }
it 'returns an Async object with the correct parent' do
expect(async).to be_a(described_class::Async)
expect(async.parent).to eq(notification)
end
context 'when receiving a public method' do
it 'schedules a MailScheduler::NotificationServiceWorker' do
expect(MailScheduler::NotificationServiceWorker)
.to receive(:perform_async).with('new_key', key)
async.new_key(key)
end
end
context 'when receiving a private method' do
it 'raises NoMethodError' do
expect { async.notifiable?(key) }.to raise_error(NoMethodError)
end
end
context 'when recieving a non-existent method' do
it 'raises NoMethodError' do
expect { async.foo(key) }.to raise_error(NoMethodError)
end
end
end
describe 'Keys' do
describe '#new_key' do
let(:key_options) { {} }
......@@ -982,6 +1013,8 @@ describe NotificationService, :mailer do
let(:merge_request) { create :merge_request, source_project: project, assignee: create(:user), description: 'cc @participant' }
before do
project.add_master(merge_request.author)
project.add_master(merge_request.assignee)
build_team(merge_request.target_project)
add_users_with_subscription(merge_request.target_project, merge_request)
update_custom_notification(:new_merge_request, @u_guest_custom, resource: project)
......@@ -1093,15 +1126,18 @@ describe NotificationService, :mailer do
end
describe '#reassigned_merge_request' do
let(:current_user) { create(:user) }
before do
update_custom_notification(:reassign_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:reassign_merge_request, @u_custom_global)
end
it do
notification.reassigned_merge_request(merge_request, merge_request.author)
notification.reassigned_merge_request(merge_request, current_user, merge_request.author)
should_email(merge_request.assignee)
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
......@@ -1116,7 +1152,7 @@ describe NotificationService, :mailer do
end
it 'adds "assigned" reason for new assignee' do
notification.reassigned_merge_request(merge_request, merge_request.author)
notification.reassigned_merge_request(merge_request, current_user, merge_request.author)
email = find_email_for(merge_request.assignee)
......@@ -1126,7 +1162,7 @@ describe NotificationService, :mailer do
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.reassigned_merge_request(merge_request, @u_disabled) }
let(:notification_trigger) { notification.reassigned_merge_request(merge_request, current_user, merge_request.author) }
end
end
......
......@@ -29,25 +29,10 @@ describe Projects::UpdatePagesService do
end
describe 'pages artifacts' do
context 'with expiry date' do
before do
build.artifacts_expire_in = "2 days"
build.save!
end
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(true)
end
end
context 'without expiry date' do
it "does delete artifacts" do
expect(execute).to eq(:success)
it "doesn't delete artifacts after deploying" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(false)
end
expect(build.reload.artifacts?).to eq(true)
end
end
......@@ -100,25 +85,10 @@ describe Projects::UpdatePagesService do
end
describe 'pages artifacts' do
context 'with expiry date' do
before do
build.artifacts_expire_in = "2 days"
build.save!
end
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.artifacts?).to eq(true)
end
end
context 'without expiry date' do
it "does delete artifacts" do
expect(execute).to eq(:success)
it "doesn't delete artifacts after deploying" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(false)
end
expect(build.artifacts?).to eq(true)
end
end
......@@ -171,13 +141,12 @@ describe Projects::UpdatePagesService do
build.reload
expect(deploy_status).to be_failed
expect(build.artifacts?).to be_truthy
end
end
context 'when failed to extract zip artifacts' do
before do
allow_any_instance_of(described_class)
expect_any_instance_of(described_class)
.to receive(:extract_zip_archive!)
.and_raise(Projects::UpdatePagesService::FailedToExtractError)
end
......@@ -188,21 +157,19 @@ describe Projects::UpdatePagesService do
build.reload
expect(deploy_status).to be_failed
expect(build.artifacts?).to be_truthy
end
end
context 'when missing artifacts metadata' do
before do
allow(build).to receive(:artifacts_metadata?).and_return(false)
expect(build).to receive(:artifacts_metadata?).and_return(false)
end
it 'does not raise an error and remove artifacts as failed job' do
it 'does not raise an error as failed job' do
execute
build.reload
expect(deploy_status).to be_failed
expect(build.artifacts?).to be_falsey
end
end
end
......
require 'spec_helper'
describe RepositoryArchiveCleanUpService do
describe '#execute' do
subject(:service) { described_class.new }
subject(:service) { described_class.new }
describe '#execute (new archive locations)' do
let(:sha) { "0" * 40 }
it 'removes outdated archives and directories in a new-style path' do
in_directory_with_files("project-999/#{sha}", %w[tar tar.bz2 tar.gz zip], 3.hours) do |dirname, files|
service.execute
files.each { |filename| expect(File.exist?(filename)).to be_falsy }
expect(File.directory?(dirname)).to be_falsy
expect(File.directory?(File.dirname(dirname))).to be_falsy
end
end
it 'does not remove directories when they contain outdated non-archives' do
in_directory_with_files("project-999/#{sha}", %w[tar conf rb], 2.hours) do |dirname, files|
service.execute
expect(File.directory?(dirname)).to be_truthy
end
end
it 'does not remove in-date archives in a new-style path' do
in_directory_with_files("project-999/#{sha}", %w[tar tar.bz2 tar.gz zip], 1.hour) do |dirname, files|
service.execute
files.each { |filename| expect(File.exist?(filename)).to be_truthy }
end
end
end
describe '#execute (legacy archive locations)' do
context 'when the downloads directory does not exist' do
it 'does not remove any archives' do
path = '/invalid/path/'
stub_repository_downloads_path(path)
allow(File).to receive(:directory?).and_call_original
expect(File).to receive(:directory?).with(path).and_return(false)
expect(service).not_to receive(:clean_up_old_archives)
expect(service).not_to receive(:clean_up_empty_directories)
......@@ -19,7 +51,7 @@ describe RepositoryArchiveCleanUpService do
context 'when the downloads directory exists' do
shared_examples 'invalid archive files' do |dirname, extensions, mtime|
it 'does not remove files and directoy' do
it 'does not remove files and directory' do
in_directory_with_files(dirname, extensions, mtime) do |dir, files|
service.execute
......@@ -43,7 +75,7 @@ describe RepositoryArchiveCleanUpService do
end
context 'with files older than 2 hours inside invalid directories' do
it_behaves_like 'invalid archive files', 'john_doe/sample.git', %w[conf rb tar tar.gz], 2.hours
it_behaves_like 'invalid archive files', 'john/doe/sample.git', %w[conf rb tar tar.gz], 2.hours
end
context 'with files newer than 2 hours that matches valid archive extensions' do
......@@ -58,24 +90,24 @@ describe RepositoryArchiveCleanUpService do
it_behaves_like 'invalid archive files', 'sample.git', %w[conf rb tar tar.gz], 1.hour
end
end
end
def in_directory_with_files(dirname, extensions, mtime)
Dir.mktmpdir do |tmpdir|
stub_repository_downloads_path(tmpdir)
dir = File.join(tmpdir, dirname)
files = create_temporary_files(dir, extensions, mtime)
def in_directory_with_files(dirname, extensions, mtime)
Dir.mktmpdir do |tmpdir|
stub_repository_downloads_path(tmpdir)
dir = File.join(tmpdir, dirname)
files = create_temporary_files(dir, extensions, mtime)
yield(dir, files)
end
yield(dir, files)
end
end
def stub_repository_downloads_path(path)
allow(Gitlab.config.gitlab).to receive(:repository_downloads_path).and_return(path)
end
def stub_repository_downloads_path(path)
allow(Gitlab.config.gitlab).to receive(:repository_downloads_path).and_return(path)
end
def create_temporary_files(dir, extensions, mtime)
FileUtils.mkdir_p(dir)
FileUtils.touch(extensions.map { |ext| File.join(dir, "sample.#{ext}") }, mtime: Time.now - mtime)
end
def create_temporary_files(dir, extensions, mtime)
FileUtils.mkdir_p(dir)
FileUtils.touch(extensions.map { |ext| File.join(dir, "sample.#{ext}") }, mtime: Time.now - mtime)
end
end
RSpec.shared_examples 'Master manages access requests' do
let(:user) { create(:user) }
let(:master) { create(:user) }
before do
entity.request_access(user)
entity.respond_to?(:add_owner) ? entity.add_owner(master) : entity.add_master(master)
sign_in(master)
end
it 'master can see access requests' do
visit members_page_path
expect_visible_access_request(entity, user)
end
it 'master can grant access', :js do
visit members_page_path
expect_visible_access_request(entity, user)
accept_confirm { click_on 'Grant access' }
expect_no_visible_access_request(entity, user)
page.within('.members-list') do
expect(page).to have_content user.name
end
end
it 'master can deny access', :js do
visit members_page_path
expect_visible_access_request(entity, user)
accept_confirm { click_on 'Deny access' }
expect_no_visible_access_request(entity, user)
expect(page).not_to have_content user.name
end
def expect_visible_access_request(entity, user)
expect(entity.requesters.exists?(user_id: user)).to be_truthy
expect(page).to have_content "Users requesting access to #{entity.name} 1"
expect(page).to have_content user.name
end
def expect_no_visible_access_request(entity, user)
expect(entity.requesters.exists?(user_id: user)).to be_falsy
expect(page).not_to have_content "Users requesting access to #{entity.name}"
end
end
RSpec.shared_examples 'members notifications' do |entity_type|
let(:notification_service) { double('NotificationService').as_null_object }
before do
allow(member).to receive(:notification_service).and_return(notification_service)
end
describe "#after_create" do
let(:member) { build(:"#{entity_type}_member") }
it "sends email to user" do
expect(notification_service).to receive(:"new_#{entity_type}_member").with(member)
member.save
end
end
describe "#after_update" do
let(:member) { create(:"#{entity_type}_member", :developer) }
it "calls NotificationService.update_#{entity_type}_member" do
expect(notification_service).to receive(:"update_#{entity_type}_member").with(member)
member.update_attribute(:access_level, Member::MASTER)
end
it "does not send an email when the access level has not changed" do
expect(notification_service).not_to receive(:"update_#{entity_type}_member")
member.touch
end
end
describe '#accept_request' do
let(:member) { create(:"#{entity_type}_member", :access_request) }
it "calls NotificationService.new_#{entity_type}_member" do
expect(notification_service).to receive(:"new_#{entity_type}_member").with(member)
member.accept_request
end
end
describe "#accept_invite!" do
let(:member) { create(:"#{entity_type}_member", :invited) }
it "calls NotificationService.accept_#{entity_type}_invite" do
expect(notification_service).to receive(:"accept_#{entity_type}_invite").with(member)
member.accept_invite!(build(:user))
end
end
describe "#decline_invite!" do
let(:member) { create(:"#{entity_type}_member", :invited) }
it "calls NotificationService.decline_#{entity_type}_invite" do
expect(notification_service).to receive(:"decline_#{entity_type}_invite").with(member)
member.decline_invite!
end
end
end
......@@ -12,8 +12,8 @@ describe MailScheduler::IssueDueWorker do
create(:issue, :opened, project: project, due_date: 2.days.from_now) # due on another day
create(:issue, :opened, due_date: Date.tomorrow) # different project
expect_any_instance_of(NotificationService).to receive(:issue_due).with(issue1)
expect_any_instance_of(NotificationService).to receive(:issue_due).with(issue2)
expect(worker.notification_service).to receive(:issue_due).with(issue1)
expect(worker.notification_service).to receive(:issue_due).with(issue2)
worker.perform(project.id)
end
......
require 'spec_helper'
describe MailScheduler::NotificationServiceWorker do
let(:worker) { described_class.new }
let(:method) { 'new_key' }
set(:key) { create(:personal_key) }
def serialize(*args)
ActiveJob::Arguments.serialize(args)
end
describe '#perform' do
it 'deserializes arguments from global IDs' do
expect(worker.notification_service).to receive(method).with(key)
worker.perform(method, *serialize(key))
end
context 'when the arguments cannot be deserialized' do
it 'does nothing' do
expect(worker.notification_service).not_to receive(method)
worker.perform(method, key.to_global_id.to_s.succ)
end
end
context 'when the method is not a public method' do
it 'raises NoMethodError' do
expect { worker.perform('notifiable?', *serialize(key)) }.to raise_error(NoMethodError)
end
end
end
describe '.perform_async' do
it 'serializes arguments as global IDs when scheduling' do
Sidekiq::Testing.fake! do
described_class.perform_async(method, key)
expect(described_class.jobs.count).to eq(1)
expect(described_class.jobs.first).to include('args' => [method, *serialize(key)])
end
end
end
end
......@@ -22,13 +22,11 @@ describe NamespacelessProjectDestroyWorker do
end
end
# Only possible with schema 20180222043024 and lower.
# Project#namespace_id has not null constraint since then
context 'project has no namespace', :migration, schema: 20180222043024 do
let!(:project) do
project = build(:project, namespace_id: nil)
project.save(validate: false)
project
context 'project has no namespace' do
let!(:project) { create(:project) }
before do
allow_any_instance_of(Project).to receive(:namespace).and_return(nil)
end
context 'project not a fork of another project' do
......@@ -61,8 +59,7 @@ describe NamespacelessProjectDestroyWorker do
let!(:parent_project) { create(:project) }
let(:project) do
namespaceless_project = fork_project(parent_project)
namespaceless_project.namespace_id = nil
namespaceless_project.save(validate: false)
namespaceless_project.save
namespaceless_project
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment