Commit 38ba246b authored by Stan Hu's avatar Stan Hu

Merge branch 'ce-to-ee-2018-11-28' into 'master'

CE upstream - 2018-11-28 18:21 UTC

See merge request gitlab-org/gitlab-ee!8628
parents ded6c265 ec1982c5
...@@ -10,10 +10,10 @@ const Api = { ...@@ -10,10 +10,10 @@ const Api = {
projectsPath: '/api/:version/projects.json', projectsPath: '/api/:version/projects.json',
projectPath: '/api/:version/projects/:id', projectPath: '/api/:version/projects/:id',
projectLabelsPath: '/:namespace_path/:project_path/labels', projectLabelsPath: '/:namespace_path/:project_path/labels',
mergeRequestPath: '/api/:version/projects/:id/merge_requests/:mrid', projectMergeRequestPath: '/api/:version/projects/:id/merge_requests/:mrid',
projectMergeRequestChangesPath: '/api/:version/projects/:id/merge_requests/:mrid/changes',
projectMergeRequestVersionsPath: '/api/:version/projects/:id/merge_requests/:mrid/versions',
mergeRequestsPath: '/api/:version/merge_requests', mergeRequestsPath: '/api/:version/merge_requests',
mergeRequestChangesPath: '/api/:version/projects/:id/merge_requests/:mrid/changes',
mergeRequestVersionsPath: '/api/:version/projects/:id/merge_requests/:mrid/versions',
groupLabelsPath: '/groups/:namespace_path/-/labels', groupLabelsPath: '/groups/:namespace_path/-/labels',
ldapGroupsPath: '/api/:version/ldap/:provider/groups.json', ldapGroupsPath: '/api/:version/ldap/:provider/groups.json',
issuableTemplatePath: '/:namespace_path/:project_path/templates/:type/:key', issuableTemplatePath: '/:namespace_path/:project_path/templates/:type/:key',
...@@ -101,36 +101,36 @@ const Api = { ...@@ -101,36 +101,36 @@ const Api = {
}, },
// Return Merge Request for project // Return Merge Request for project
mergeRequest(projectPath, mergeRequestId, params = {}) { projectMergeRequest(projectPath, mergeRequestId, params = {}) {
const url = Api.buildUrl(Api.mergeRequestPath) const url = Api.buildUrl(Api.projectMergeRequestPath)
.replace(':id', encodeURIComponent(projectPath)) .replace(':id', encodeURIComponent(projectPath))
.replace(':mrid', mergeRequestId); .replace(':mrid', mergeRequestId);
return axios.get(url, { params }); return axios.get(url, { params });
}, },
mergeRequests(params = {}) { projectMergeRequestChanges(projectPath, mergeRequestId) {
const url = Api.buildUrl(Api.mergeRequestsPath); const url = Api.buildUrl(Api.projectMergeRequestChangesPath)
return axios.get(url, { params });
},
mergeRequestChanges(projectPath, mergeRequestId) {
const url = Api.buildUrl(Api.mergeRequestChangesPath)
.replace(':id', encodeURIComponent(projectPath)) .replace(':id', encodeURIComponent(projectPath))
.replace(':mrid', mergeRequestId); .replace(':mrid', mergeRequestId);
return axios.get(url); return axios.get(url);
}, },
mergeRequestVersions(projectPath, mergeRequestId) { projectMergeRequestVersions(projectPath, mergeRequestId) {
const url = Api.buildUrl(Api.mergeRequestVersionsPath) const url = Api.buildUrl(Api.projectMergeRequestVersionsPath)
.replace(':id', encodeURIComponent(projectPath)) .replace(':id', encodeURIComponent(projectPath))
.replace(':mrid', mergeRequestId); .replace(':mrid', mergeRequestId);
return axios.get(url); return axios.get(url);
}, },
mergeRequests(params = {}) {
const url = Api.buildUrl(Api.mergeRequestsPath);
return axios.get(url, { params });
},
newLabel(namespacePath, projectPath, data, callback) { newLabel(namespacePath, projectPath, data, callback) {
let url; let url;
......
...@@ -41,13 +41,13 @@ export default { ...@@ -41,13 +41,13 @@ export default {
return Api.project(`${namespace}/${project}`); return Api.project(`${namespace}/${project}`);
}, },
getProjectMergeRequestData(projectId, mergeRequestId, params = {}) { getProjectMergeRequestData(projectId, mergeRequestId, params = {}) {
return Api.mergeRequest(projectId, mergeRequestId, params); return Api.projectMergeRequest(projectId, mergeRequestId, params);
}, },
getProjectMergeRequestChanges(projectId, mergeRequestId) { getProjectMergeRequestChanges(projectId, mergeRequestId) {
return Api.mergeRequestChanges(projectId, mergeRequestId); return Api.projectMergeRequestChanges(projectId, mergeRequestId);
}, },
getProjectMergeRequestVersions(projectId, mergeRequestId) { getProjectMergeRequestVersions(projectId, mergeRequestId) {
return Api.mergeRequestVersions(projectId, mergeRequestId); return Api.projectMergeRequestVersions(projectId, mergeRequestId);
}, },
getBranchData(projectId, currentBranchId) { getBranchData(projectId, currentBranchId) {
return Api.branchSingle(projectId, currentBranchId); return Api.branchSingle(projectId, currentBranchId);
......
...@@ -23,13 +23,19 @@ export const receiveMergeRequestsError = ({ commit, dispatch }, { type, search } ...@@ -23,13 +23,19 @@ export const receiveMergeRequestsError = ({ commit, dispatch }, { type, search }
export const receiveMergeRequestsSuccess = ({ commit }, data) => export const receiveMergeRequestsSuccess = ({ commit }, data) =>
commit(types.RECEIVE_MERGE_REQUESTS_SUCCESS, data); commit(types.RECEIVE_MERGE_REQUESTS_SUCCESS, data);
export const fetchMergeRequests = ({ dispatch, state: { state } }, { type, search = '' }) => { export const fetchMergeRequests = (
{ dispatch, state: { state }, rootState: { currentProjectId } },
{ type, search = '' },
) => {
dispatch('requestMergeRequests'); dispatch('requestMergeRequests');
dispatch('resetMergeRequests'); dispatch('resetMergeRequests');
const scope = type ? scopes[type] : 'all'; const scope = type && scopes[type];
const request = scope
? Api.mergeRequests({ scope, state, search })
: Api.projectMergeRequest(currentProjectId, '', { state, search });
return Api.mergeRequests({ scope, state, search }) return request
.then(({ data }) => dispatch('receiveMergeRequestsSuccess', data)) .then(({ data }) => dispatch('receiveMergeRequestsSuccess', data))
.catch(() => dispatch('receiveMergeRequestsError', { type, search })); .catch(() => dispatch('receiveMergeRequestsError', { type, search }));
}; };
......
...@@ -128,7 +128,7 @@ export default { ...@@ -128,7 +128,7 @@ export default {
}; };
</script> </script>
<template> <template>
<div class="prepend-top-default js-environment-container"> <div class="prepend-top-default append-bottom-default js-environment-container">
<div class="environment-information"> <div class="environment-information">
<ci-icon :status="iconStatus" /> <ci-icon :status="iconStatus" />
<p class="inline append-bottom-0" v-html="environment"></p> <p class="inline append-bottom-0" v-html="environment"></p>
......
...@@ -28,20 +28,22 @@ export default { ...@@ -28,20 +28,22 @@ export default {
<div class="bs-callout bs-callout-warning"> <div class="bs-callout bs-callout-warning">
<p v-if="tags.length" class="js-stuck-with-tags append-bottom-0"> <p v-if="tags.length" class="js-stuck-with-tags append-bottom-0">
{{ {{
s__(`This job is stuck, because you don't have s__(`This job is stuck because you don't have
any active runners online with any of these tags assigned to them:`) any active runners online with any of these tags assigned to them:`)
}} }}
<span v-for="(tag, index) in tags" :key="index" class="badge badge-primary"> {{ tag }} </span> <span v-for="(tag, index) in tags" :key="index" class="badge badge-primary append-right-4">
{{ tag }}
</span>
</p> </p>
<p v-else-if="hasNoRunnersForProject" class="js-stuck-no-runners append-bottom-0"> <p v-else-if="hasNoRunnersForProject" class="js-stuck-no-runners append-bottom-0">
{{ {{
s__(`Job|This job is stuck, because the project s__(`Job|This job is stuck because the project
doesn't have any runners online assigned to it.`) doesn't have any runners online assigned to it.`)
}} }}
</p> </p>
<p v-else class="js-stuck-no-active-runner append-bottom-0"> <p v-else class="js-stuck-no-active-runner append-bottom-0">
{{ {{
s__(`This job is stuck, because you don't s__(`This job is stuck because you don't
have any active runners that can run this job.`) have any active runners that can run this job.`)
}} }}
</p> </p>
......
...@@ -15,6 +15,8 @@ module Ci ...@@ -15,6 +15,8 @@ module Ci
WRITE_LOCK_SLEEP = 0.01.seconds WRITE_LOCK_SLEEP = 0.01.seconds
WRITE_LOCK_TTL = 1.minute WRITE_LOCK_TTL = 1.minute
FailedToPersistDataError = Class.new(StandardError)
# Note: The ordering of this enum is related to the precedence of persist store. # Note: The ordering of this enum is related to the precedence of persist store.
# The bottom item takes the higest precedence, and the top item takes the lowest precedence. # The bottom item takes the higest precedence, and the top item takes the lowest precedence.
enum data_store: { enum data_store: {
...@@ -109,16 +111,19 @@ module Ci ...@@ -109,16 +111,19 @@ module Ci
def unsafe_persist_to!(new_store) def unsafe_persist_to!(new_store)
return if data_store == new_store.to_s return if data_store == new_store.to_s
raise ArgumentError, 'Can not persist empty data' unless size > 0
old_store_class = self.class.get_store_class(data_store) current_data = get_data
get_data.tap do |the_data| unless current_data&.bytesize.to_i == CHUNK_SIZE
self.raw_data = nil raise FailedToPersistDataError, 'Data is not fullfilled in a bucket'
self.data_store = new_store
unsafe_set_data!(the_data)
end end
old_store_class = self.class.get_store_class(data_store)
self.raw_data = nil
self.data_store = new_store
unsafe_set_data!(current_data)
old_store_class.delete_data(self) old_store_class.delete_data(self)
end end
......
...@@ -8,6 +8,7 @@ class EnvironmentStatus ...@@ -8,6 +8,7 @@ class EnvironmentStatus
delegate :id, to: :environment delegate :id, to: :environment
delegate :name, to: :environment delegate :name, to: :environment
delegate :project, to: :environment delegate :project, to: :environment
delegate :status, to: :deployment, allow_nil: true
delegate :deployed_at, to: :deployment, allow_nil: true delegate :deployed_at, to: :deployment, allow_nil: true
def self.for_merge_request(mr, user) def self.for_merge_request(mr, user)
...@@ -43,22 +44,6 @@ class EnvironmentStatus ...@@ -43,22 +44,6 @@ class EnvironmentStatus
.merge_request_diff_files.where(deleted_file: false) .merge_request_diff_files.where(deleted_file: false)
end end
##
# Since frontend has not supported all statuses yet, BE has to
# proxy some status to a supported status.
def status
return unless deployment
case deployment.status
when 'created'
'running'
when 'canceled'
'failed'
else
deployment.status
end
end
private private
PAGE_EXTENSIONS = /\A\.(s?html?|php|asp|cgi|pl)\z/i.freeze PAGE_EXTENSIONS = /\A\.(s?html?|php|asp|cgi|pl)\z/i.freeze
......
# frozen_string_literal: true # frozen_string_literal: true
class PoolRepository < ActiveRecord::Base class PoolRepository < ActiveRecord::Base
POOL_PREFIX = '@pools'
belongs_to :shard belongs_to :shard
validates :shard, presence: true validates :shard, presence: true
# For now, only pool repositories are tracked in the database. However, we may has_many :member_projects, class_name: 'Project'
# want to add other repository types in the future
self.table_name = 'repositories'
has_many :pool_member_projects, class_name: 'Project', foreign_key: :pool_repository_id after_create :correct_disk_path
def shard_name def shard_name
shard&.name shard&.name
...@@ -19,4 +15,15 @@ class PoolRepository < ActiveRecord::Base ...@@ -19,4 +15,15 @@ class PoolRepository < ActiveRecord::Base
def shard_name=(name) def shard_name=(name)
self.shard = Shard.by_name(name) self.shard = Shard.by_name(name)
end end
private
def correct_disk_path
update!(disk_path: storage.disk_path)
end
def storage
Storage::HashedProject
.new(self, prefix: Storage::HashedProject::POOL_PATH_PREFIX)
end
end end
...@@ -879,9 +879,9 @@ class Project < ActiveRecord::Base ...@@ -879,9 +879,9 @@ class Project < ActiveRecord::Base
end end
def readme_url def readme_url
readme = repository.readme readme_path = repository.readme_path
if readme if readme_path
Gitlab::Routing.url_helpers.project_blob_url(self, File.join(default_branch, readme.path)) Gitlab::Routing.url_helpers.project_blob_url(self, File.join(default_branch, readme_path))
end end
end end
......
...@@ -39,7 +39,7 @@ class Repository ...@@ -39,7 +39,7 @@ class Repository
# #
# For example, for entry `:commit_count` there's a method called `commit_count` which # For example, for entry `:commit_count` there's a method called `commit_count` which
# stores its data in the `commit_count` cache key. # stores its data in the `commit_count` cache key.
CACHED_METHODS = %i(size commit_count rendered_readme contribution_guide CACHED_METHODS = %i(size commit_count rendered_readme readme_path contribution_guide
changelog license_blob license_key gitignore changelog license_blob license_key gitignore
gitlab_ci_yml branch_names tag_names branch_count gitlab_ci_yml branch_names tag_names branch_count
tag_count avatar exists? root_ref has_visible_content? tag_count avatar exists? root_ref has_visible_content?
...@@ -52,7 +52,7 @@ class Repository ...@@ -52,7 +52,7 @@ class Repository
# changed. This Hash maps file types (as returned by Gitlab::FileDetector) to # changed. This Hash maps file types (as returned by Gitlab::FileDetector) to
# the corresponding methods to call for refreshing caches. # the corresponding methods to call for refreshing caches.
METHOD_CACHES_FOR_FILE_TYPES = { METHOD_CACHES_FOR_FILE_TYPES = {
readme: :rendered_readme, readme: %i(rendered_readme readme_path),
changelog: :changelog, changelog: :changelog,
license: %i(license_blob license_key license), license: %i(license_blob license_key license),
contributing: :contribution_guide, contributing: :contribution_guide,
...@@ -595,6 +595,11 @@ class Repository ...@@ -595,6 +595,11 @@ class Repository
head_tree&.readme head_tree&.readme
end end
def readme_path
readme&.path
end
cache_method :readme_path
def rendered_readme def rendered_readme
return unless readme return unless readme
......
...@@ -5,17 +5,19 @@ module Storage ...@@ -5,17 +5,19 @@ module Storage
attr_accessor :project attr_accessor :project
delegate :gitlab_shell, :repository_storage, to: :project delegate :gitlab_shell, :repository_storage, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze REPOSITORY_PATH_PREFIX = '@hashed'
POOL_PATH_PREFIX = '@pools'
def initialize(project) def initialize(project, prefix: REPOSITORY_PATH_PREFIX)
@project = project @project = project
@prefix = prefix
end end
# Base directory # Base directory
# #
# @return [String] directory where repository is stored # @return [String] directory where repository is stored
def base_dir def base_dir
"#{ROOT_PATH_PREFIX}/#{disk_hash[0..1]}/#{disk_hash[2..3]}" if disk_hash "#{@prefix}/#{disk_hash[0..1]}/#{disk_hash[2..3]}" if disk_hash
end end
# Disk path is used to build repository and project's wiki path on disk # Disk path is used to build repository and project's wiki path on disk
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
%li %li
%span.light %span.light
%i.fa.fa-clock-o %i.fa.fa-clock-o
= event.created_at.strftime('%-I:%M%P') = event.created_at.to_time.in_time_zone.strftime('%-I:%M%P')
- if event.visible_to_user?(current_user) - if event.visible_to_user?(current_user)
- if event.push? - if event.push?
#{event.action_name} #{event.ref_type} #{event.action_name} #{event.ref_type}
......
---
title: Show user contributions in correct timezone within user profile
merge_request: 23419
author:
type: changed
---
title: Scope default MR search in WebIDE dropdown to current project
merge_request: 23400
author:
type: changed
---
title: Improves performance of Project#readme_url by caching the README path
merge_request: 23357
author:
type: performance
---
title: Adds margins between tags when a job is stuck
merge_request:
author:
type: fixed
---
title: Validate chunk size when persist
merge_request: 23341
author:
type: fixed
---
title: Return real deployment status to frontend
merge_request: 23270
author:
type: fixed
class RenameRepositoriesPoolRepositories < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# This change doesn't require downtime as the table is not in use, so we're
# free to change an empty table
DOWNTIME = false
def change
rename_table :repositories, :pool_repositories
end
end
# frozen_string_literal: true
class DropNotNullConstraintPoolRepositoryDiskPath < ActiveRecord::Migration[5.0]
DOWNTIME = false
def change
change_column_null :pool_repositories, :disk_path, true
end
end
...@@ -2041,6 +2041,13 @@ ActiveRecord::Schema.define(version: 20181126153547) do ...@@ -2041,6 +2041,13 @@ ActiveRecord::Schema.define(version: 20181126153547) do
t.index ["name"], name: "index_plans_on_name", using: :btree t.index ["name"], name: "index_plans_on_name", using: :btree
end end
create_table "pool_repositories", id: :bigserial, force: :cascade do |t|
t.integer "shard_id", null: false
t.string "disk_path"
t.index ["disk_path"], name: "index_pool_repositories_on_disk_path", unique: true, using: :btree
t.index ["shard_id"], name: "index_pool_repositories_on_shard_id", using: :btree
end
create_table "programming_languages", force: :cascade do |t| create_table "programming_languages", force: :cascade do |t|
t.string "name", null: false t.string "name", null: false
t.string "color", null: false t.string "color", null: false
...@@ -2480,13 +2487,6 @@ ActiveRecord::Schema.define(version: 20181126153547) do ...@@ -2480,13 +2487,6 @@ ActiveRecord::Schema.define(version: 20181126153547) do
t.index ["project_id"], name: "index_remote_mirrors_on_project_id", using: :btree t.index ["project_id"], name: "index_remote_mirrors_on_project_id", using: :btree
end end
create_table "repositories", id: :bigserial, force: :cascade do |t|
t.integer "shard_id", null: false
t.string "disk_path", null: false
t.index ["disk_path"], name: "index_repositories_on_disk_path", unique: true, using: :btree
t.index ["shard_id"], name: "index_repositories_on_shard_id", using: :btree
end
create_table "repository_languages", id: false, force: :cascade do |t| create_table "repository_languages", id: false, force: :cascade do |t|
t.integer "project_id", null: false t.integer "project_id", null: false
t.integer "programming_language_id", null: false t.integer "programming_language_id", null: false
...@@ -3257,6 +3257,7 @@ ActiveRecord::Schema.define(version: 20181126153547) do ...@@ -3257,6 +3257,7 @@ ActiveRecord::Schema.define(version: 20181126153547) do
add_foreign_key "path_locks", "projects", name: "fk_5265c98f24", on_delete: :cascade add_foreign_key "path_locks", "projects", name: "fk_5265c98f24", on_delete: :cascade
add_foreign_key "path_locks", "users" add_foreign_key "path_locks", "users"
add_foreign_key "personal_access_tokens", "users" add_foreign_key "personal_access_tokens", "users"
add_foreign_key "pool_repositories", "shards", on_delete: :restrict
add_foreign_key "project_authorizations", "projects", on_delete: :cascade add_foreign_key "project_authorizations", "projects", on_delete: :cascade
add_foreign_key "project_authorizations", "users", on_delete: :cascade add_foreign_key "project_authorizations", "users", on_delete: :cascade
add_foreign_key "project_auto_devops", "projects", on_delete: :cascade add_foreign_key "project_auto_devops", "projects", on_delete: :cascade
...@@ -3271,7 +3272,7 @@ ActiveRecord::Schema.define(version: 20181126153547) do ...@@ -3271,7 +3272,7 @@ ActiveRecord::Schema.define(version: 20181126153547) do
add_foreign_key "project_repository_states", "projects", on_delete: :cascade add_foreign_key "project_repository_states", "projects", on_delete: :cascade
add_foreign_key "project_statistics", "projects", on_delete: :cascade add_foreign_key "project_statistics", "projects", on_delete: :cascade
add_foreign_key "project_tracing_settings", "projects", on_delete: :cascade add_foreign_key "project_tracing_settings", "projects", on_delete: :cascade
add_foreign_key "projects", "repositories", column: "pool_repository_id", name: "fk_6e5c14658a", on_delete: :nullify add_foreign_key "projects", "pool_repositories", name: "fk_6e5c14658a", on_delete: :nullify
add_foreign_key "prometheus_alert_events", "projects", on_delete: :cascade add_foreign_key "prometheus_alert_events", "projects", on_delete: :cascade
add_foreign_key "prometheus_alert_events", "prometheus_alerts", on_delete: :cascade add_foreign_key "prometheus_alert_events", "prometheus_alerts", on_delete: :cascade
add_foreign_key "prometheus_alerts", "environments", on_delete: :cascade add_foreign_key "prometheus_alerts", "environments", on_delete: :cascade
...@@ -3300,7 +3301,6 @@ ActiveRecord::Schema.define(version: 20181126153547) do ...@@ -3300,7 +3301,6 @@ ActiveRecord::Schema.define(version: 20181126153547) do
add_foreign_key "push_rules", "projects", name: "fk_83b29894de", on_delete: :cascade add_foreign_key "push_rules", "projects", name: "fk_83b29894de", on_delete: :cascade
add_foreign_key "releases", "projects", name: "fk_47fe2a0596", on_delete: :cascade add_foreign_key "releases", "projects", name: "fk_47fe2a0596", on_delete: :cascade
add_foreign_key "remote_mirrors", "projects", name: "fk_43a9aa4ca8", on_delete: :cascade add_foreign_key "remote_mirrors", "projects", name: "fk_43a9aa4ca8", on_delete: :cascade
add_foreign_key "repositories", "shards", on_delete: :restrict
add_foreign_key "repository_languages", "projects", on_delete: :cascade add_foreign_key "repository_languages", "projects", on_delete: :cascade
add_foreign_key "resource_label_events", "epics", on_delete: :cascade add_foreign_key "resource_label_events", "epics", on_delete: :cascade
add_foreign_key "resource_label_events", "issues", on_delete: :cascade add_foreign_key "resource_label_events", "issues", on_delete: :cascade
......
...@@ -6,7 +6,7 @@ namespace :gitlab do ...@@ -6,7 +6,7 @@ namespace :gitlab do
desc "GitLab | Cleanup | Clean namespaces" desc "GitLab | Cleanup | Clean namespaces"
task dirs: :gitlab_environment do task dirs: :gitlab_environment do
namespaces = Set.new(Namespace.pluck(:path)) namespaces = Set.new(Namespace.pluck(:path))
namespaces << Storage::HashedProject::ROOT_PATH_PREFIX namespaces << Storage::HashedProject::REPOSITORY_PATH_PREFIX
Gitaly::Server.all.each do |server| Gitaly::Server.all.each do |server|
all_dirs = Gitlab::GitalyClient::StorageService all_dirs = Gitlab::GitalyClient::StorageService
...@@ -79,7 +79,7 @@ namespace :gitlab do ...@@ -79,7 +79,7 @@ namespace :gitlab do
# TODO ignoring hashed repositories for now. But revisit to fully support # TODO ignoring hashed repositories for now. But revisit to fully support
# possible orphaned hashed repos # possible orphaned hashed repos
next if repo_with_namespace.start_with?(Storage::HashedProject::ROOT_PATH_PREFIX) next if repo_with_namespace.start_with?(Storage::HashedProject::REPOSITORY_PATH_PREFIX)
next if Project.find_by_full_path(repo_with_namespace) next if Project.find_by_full_path(repo_with_namespace)
new_path = path + move_suffix new_path = path + move_suffix
......
...@@ -4739,7 +4739,7 @@ msgstr "" ...@@ -4739,7 +4739,7 @@ msgstr ""
msgid "Job|The artifacts will be removed in" msgid "Job|The artifacts will be removed in"
msgstr "" msgstr ""
msgid "Job|This job is stuck, because the project doesn't have any runners online assigned to it." msgid "Job|This job is stuck because the project doesn't have any runners online assigned to it."
msgstr "" msgstr ""
msgid "Jul" msgid "Jul"
...@@ -8427,10 +8427,10 @@ msgstr "" ...@@ -8427,10 +8427,10 @@ msgstr ""
msgid "This job is in pending state and is waiting to be picked by a runner" msgid "This job is in pending state and is waiting to be picked by a runner"
msgstr "" msgstr ""
msgid "This job is stuck, because you don't have any active runners online with any of these tags assigned to them:" msgid "This job is stuck because you don't have any active runners online with any of these tags assigned to them:"
msgstr "" msgstr ""
msgid "This job is stuck, because you don't have any active runners that can run this job." msgid "This job is stuck because you don't have any active runners that can run this job."
msgstr "" msgstr ""
msgid "This job is the most recent deployment to %{link}." msgid "This job is the most recent deployment to %{link}."
......
...@@ -80,6 +80,15 @@ GITLAB_USERNAME=jsmith GITLAB_PASSWORD=password GITLAB_SANDBOX_NAME=jsmith-qa-sa ...@@ -80,6 +80,15 @@ GITLAB_USERNAME=jsmith GITLAB_PASSWORD=password GITLAB_SANDBOX_NAME=jsmith-qa-sa
All [supported environment variables are here](https://gitlab.com/gitlab-org/gitlab-qa/blob/master/docs/what_tests_can_be_run.md#supported-environment-variables). All [supported environment variables are here](https://gitlab.com/gitlab-org/gitlab-qa/blob/master/docs/what_tests_can_be_run.md#supported-environment-variables).
### Sending additional cookies
The environment variable `QA_COOKIES` can be set to send additional cookies
on every request. This is necessary on gitlab.com to direct traffic to the
canary fleet. To do this set `QA_COOKIES="gitlab_canary=true"`.
To set multiple cookies, separate them with the `;` character, for example: `QA_COOKIES="cookie1=value;cookie2=value2"`
### Building a Docker image to test ### Building a Docker image to test
Once you have made changes to the CE/EE repositories, you may want to build a Once you have made changes to the CE/EE repositories, you may want to build a
......
...@@ -118,6 +118,15 @@ module QA ...@@ -118,6 +118,15 @@ module QA
def perform(&block) def perform(&block)
visit(url) visit(url)
if QA::Runtime::Env.qa_cookies
browser = Capybara.current_session.driver.browser
QA::Runtime::Env.qa_cookies.each do |cookie|
name, value = cookie.split("=")
value ||= ""
browser.manage.add_cookie name: name, value: value
end
end
yield.tap { clear! } if block_given? yield.tap { clear! } if block_given?
end end
......
...@@ -40,6 +40,10 @@ module QA ...@@ -40,6 +40,10 @@ module QA
ENV['CI'] || ENV['CI_SERVER'] ENV['CI'] || ENV['CI_SERVER']
end end
def qa_cookies
ENV['QA_COOKIES'] && ENV['QA_COOKIES'].split(';')
end
def signup_disabled? def signup_disabled?
enabled?(ENV['SIGNUP_DISABLED'], default: false) enabled?(ENV['SIGNUP_DISABLED'], default: false)
end end
......
FactoryBot.define do
factory :pool_repository do
shard
end
end
FactoryBot.define do
factory :shard do
name "default"
end
end
...@@ -65,7 +65,20 @@ describe 'Merge request > User sees deployment widget', :js do ...@@ -65,7 +65,20 @@ describe 'Merge request > User sees deployment widget', :js do
visit project_merge_request_path(project, merge_request) visit project_merge_request_path(project, merge_request)
wait_for_requests wait_for_requests
expect(page).to have_content("Deploying to #{environment.name}") expect(page).to have_content("Will deploy to #{environment.name}")
expect(page).not_to have_css('.js-deploy-time')
end
end
context 'when deployment was cancelled' do
let(:build) { create(:ci_build, :canceled, pipeline: pipeline) }
let!(:deployment) { create(:deployment, :canceled, environment: environment, sha: sha, ref: ref, deployable: build) }
it 'displays that the environment name' do
visit project_merge_request_path(project, merge_request)
wait_for_requests
expect(page).to have_content("Failed to deploy to #{environment.name}")
expect(page).not_to have_css('.js-deploy-time') expect(page).not_to have_css('.js-deploy-time')
end end
end end
......
...@@ -754,7 +754,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do ...@@ -754,7 +754,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'renders message about job being stuck because no runners are active' do it 'renders message about job being stuck because no runners are active' do
expect(page).to have_css('.js-stuck-no-active-runner') expect(page).to have_css('.js-stuck-no-active-runner')
expect(page).to have_content("This job is stuck, because you don't have any active runners that can run this job.") expect(page).to have_content("This job is stuck because you don't have any active runners that can run this job.")
end end
end end
...@@ -764,7 +764,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do ...@@ -764,7 +764,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'renders message about job being stuck because of no runners with the specified tags' do it 'renders message about job being stuck because of no runners with the specified tags' do
expect(page).to have_css('.js-stuck-with-tags') expect(page).to have_css('.js-stuck-with-tags')
expect(page).to have_content("This job is stuck, because you don't have any active runners online with any of these tags assigned to them:") expect(page).to have_content("This job is stuck because you don't have any active runners online with any of these tags assigned to them:")
end end
end end
...@@ -774,7 +774,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do ...@@ -774,7 +774,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'renders message about job being stuck because of no runners with the specified tags' do it 'renders message about job being stuck because of no runners with the specified tags' do
expect(page).to have_css('.js-stuck-with-tags') expect(page).to have_css('.js-stuck-with-tags')
expect(page).to have_content("This job is stuck, because you don't have any active runners online with any of these tags assigned to them:") expect(page).to have_content("This job is stuck because you don't have any active runners online with any of these tags assigned to them:")
end end
end end
...@@ -783,7 +783,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do ...@@ -783,7 +783,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'renders message about job being stuck because not runners are available' do it 'renders message about job being stuck because not runners are available' do
expect(page).to have_css('.js-stuck-no-active-runner') expect(page).to have_css('.js-stuck-no-active-runner')
expect(page).to have_content("This job is stuck, because you don't have any active runners that can run this job.") expect(page).to have_content("This job is stuck because you don't have any active runners that can run this job.")
end end
end end
...@@ -793,7 +793,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do ...@@ -793,7 +793,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'renders message about job being stuck because runners are offline' do it 'renders message about job being stuck because runners are offline' do
expect(page).to have_css('.js-stuck-no-runners') expect(page).to have_css('.js-stuck-no-runners')
expect(page).to have_content("This job is stuck, because the project doesn't have any runners online assigned to it.") expect(page).to have_content("This job is stuck because the project doesn't have any runners online assigned to it.")
end end
end end
end end
......
...@@ -123,7 +123,7 @@ describe('Api', () => { ...@@ -123,7 +123,7 @@ describe('Api', () => {
}); });
}); });
describe('mergerequest', () => { describe('projectMergeRequest', () => {
it('fetches a merge request', done => { it('fetches a merge request', done => {
const projectPath = 'abc'; const projectPath = 'abc';
const mergeRequestId = '123456'; const mergeRequestId = '123456';
...@@ -132,7 +132,7 @@ describe('Api', () => { ...@@ -132,7 +132,7 @@ describe('Api', () => {
title: 'test', title: 'test',
}); });
Api.mergeRequest(projectPath, mergeRequestId) Api.projectMergeRequest(projectPath, mergeRequestId)
.then(({ data }) => { .then(({ data }) => {
expect(data.title).toBe('test'); expect(data.title).toBe('test');
}) })
...@@ -141,7 +141,7 @@ describe('Api', () => { ...@@ -141,7 +141,7 @@ describe('Api', () => {
}); });
}); });
describe('mergerequest changes', () => { describe('projectMergeRequestChanges', () => {
it('fetches the changes of a merge request', done => { it('fetches the changes of a merge request', done => {
const projectPath = 'abc'; const projectPath = 'abc';
const mergeRequestId = '123456'; const mergeRequestId = '123456';
...@@ -150,7 +150,7 @@ describe('Api', () => { ...@@ -150,7 +150,7 @@ describe('Api', () => {
title: 'test', title: 'test',
}); });
Api.mergeRequestChanges(projectPath, mergeRequestId) Api.projectMergeRequestChanges(projectPath, mergeRequestId)
.then(({ data }) => { .then(({ data }) => {
expect(data.title).toBe('test'); expect(data.title).toBe('test');
}) })
...@@ -159,7 +159,7 @@ describe('Api', () => { ...@@ -159,7 +159,7 @@ describe('Api', () => {
}); });
}); });
describe('mergerequest versions', () => { describe('projectMergeRequestVersions', () => {
it('fetches the versions of a merge request', done => { it('fetches the versions of a merge request', done => {
const projectPath = 'abc'; const projectPath = 'abc';
const mergeRequestId = '123456'; const mergeRequestId = '123456';
...@@ -170,7 +170,7 @@ describe('Api', () => { ...@@ -170,7 +170,7 @@ describe('Api', () => {
}, },
]); ]);
Api.mergeRequestVersions(projectPath, mergeRequestId) Api.projectMergeRequestVersions(projectPath, mergeRequestId)
.then(({ data }) => { .then(({ data }) => {
expect(data.length).toBe(1); expect(data.length).toBe(1);
expect(data[0].id).toBe(123); expect(data[0].id).toBe(123);
......
...@@ -14,10 +14,14 @@ import testAction from '../../../../helpers/vuex_action_helper'; ...@@ -14,10 +14,14 @@ import testAction from '../../../../helpers/vuex_action_helper';
describe('IDE merge requests actions', () => { describe('IDE merge requests actions', () => {
let mockedState; let mockedState;
let mockedRootState;
let mock; let mock;
beforeEach(() => { beforeEach(() => {
mockedState = state(); mockedState = state();
mockedRootState = {
currentProjectId: 7,
};
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
}); });
...@@ -86,13 +90,16 @@ describe('IDE merge requests actions', () => { ...@@ -86,13 +90,16 @@ describe('IDE merge requests actions', () => {
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(200, mergeRequests); mock.onGet(/\/api\/v4\/merge_requests\/?/).replyOnce(200, mergeRequests);
}); });
it('calls API with params', () => { it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchMergeRequests({ dispatch() {}, state: mockedState }, { type: 'created' }); fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{ type: 'created' },
);
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: { params: {
...@@ -107,7 +114,7 @@ describe('IDE merge requests actions', () => { ...@@ -107,7 +114,7 @@ describe('IDE merge requests actions', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchMergeRequests( fetchMergeRequests(
{ dispatch() {}, state: mockedState }, { dispatch() {}, state: mockedState, rootState: mockedRootState },
{ type: 'created', search: 'testing search' }, { type: 'created', search: 'testing search' },
); );
...@@ -139,6 +146,49 @@ describe('IDE merge requests actions', () => { ...@@ -139,6 +146,49 @@ describe('IDE merge requests actions', () => {
}); });
}); });
describe('success without type', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/.+\/merge_requests\/?$/).replyOnce(200, mergeRequests);
});
it('calls API with project', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{ type: null, search: 'testing search' },
);
expect(apiSpy).toHaveBeenCalledWith(
jasmine.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`),
{
params: {
state: 'opened',
search: 'testing search',
},
},
);
});
it('dispatches success with received data', done => {
testAction(
fetchMergeRequests,
{ type: null },
{ ...mockedState, ...mockedRootState },
[],
[
{ type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' },
{
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
],
done,
);
});
});
describe('error', () => { describe('error', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(500); mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(500);
......
...@@ -160,9 +160,7 @@ describe('Job App ', () => { ...@@ -160,9 +160,7 @@ describe('Job App ', () => {
setTimeout(() => { setTimeout(() => {
expect(vm.$el.querySelector('.js-job-stuck')).not.toBeNull(); expect(vm.$el.querySelector('.js-job-stuck')).not.toBeNull();
expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain( expect(vm.$el.querySelector('.js-job-stuck .js-stuck-no-active-runner')).not.toBeNull();
"This job is stuck, because you don't have any active runners that can run this job.",
);
done(); done();
}, 0); }, 0);
}); });
...@@ -195,9 +193,7 @@ describe('Job App ', () => { ...@@ -195,9 +193,7 @@ describe('Job App ', () => {
setTimeout(() => { setTimeout(() => {
expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]); expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain( expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
"This job is stuck, because you don't have any active runners online with any of these tags assigned to them:",
);
done(); done();
}, 0); }, 0);
}); });
...@@ -230,9 +226,7 @@ describe('Job App ', () => { ...@@ -230,9 +226,7 @@ describe('Job App ', () => {
setTimeout(() => { setTimeout(() => {
expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]); expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain( expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
"This job is stuck, because you don't have any active runners online with any of these tags assigned to them:",
);
done(); done();
}, 0); }, 0);
}); });
......
...@@ -436,32 +436,47 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -436,32 +436,47 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis } let(:data_store) { :redis }
context 'when data exists' do context 'when data exists' do
let(:data) { 'Sample data in redis' }
before do before do
build_trace_chunk.send(:unsafe_set_data!, data) build_trace_chunk.send(:unsafe_set_data!, data)
end end
it 'persists the data' do context 'when data size reached CHUNK_SIZE' do
expect(build_trace_chunk.redis?).to be_truthy let(:data) { 'a' * described_class::CHUNK_SIZE }
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
subject it 'persists the data' do
expect(build_trace_chunk.redis?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
subject
expect(build_trace_chunk.fog?).to be_truthy expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
it_behaves_like 'Atomic operation'
end end
it_behaves_like 'Atomic operation' context 'when data size has not reached CHUNK_SIZE' do
let(:data) { 'Sample data in redis' }
it 'does not persist the data and the orignal data is intact' do
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
expect(build_trace_chunk.redis?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
end
end
end end
context 'when data does not exist' do context 'when data does not exist' do
it 'does not persist' do it 'does not persist' do
expect { subject }.to raise_error('Can not persist empty data') expect { subject }.to raise_error(described_class::FailedToPersistDataError)
end end
end end
end end
...@@ -470,32 +485,47 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -470,32 +485,47 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :database } let(:data_store) { :database }
context 'when data exists' do context 'when data exists' do
let(:data) { 'Sample data in database' }
before do before do
build_trace_chunk.send(:unsafe_set_data!, data) build_trace_chunk.send(:unsafe_set_data!, data)
end end
it 'persists the data' do context 'when data size reached CHUNK_SIZE' do
expect(build_trace_chunk.database?).to be_truthy let(:data) { 'a' * described_class::CHUNK_SIZE }
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
subject it 'persists the data' do
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
expect(build_trace_chunk.fog?).to be_truthy subject
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
it_behaves_like 'Atomic operation'
end end
it_behaves_like 'Atomic operation' context 'when data size has not reached CHUNK_SIZE' do
let(:data) { 'Sample data in database' }
it 'does not persist the data and the orignal data is intact' do
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
end
end
end end
context 'when data does not exist' do context 'when data does not exist' do
it 'does not persist' do it 'does not persist' do
expect { subject }.to raise_error('Can not persist empty data') expect { subject }.to raise_error(described_class::FailedToPersistDataError)
end end
end end
end end
...@@ -504,27 +534,37 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -504,27 +534,37 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :fog } let(:data_store) { :fog }
context 'when data exists' do context 'when data exists' do
let(:data) { 'Sample data in fog' }
before do before do
build_trace_chunk.send(:unsafe_set_data!, data) build_trace_chunk.send(:unsafe_set_data!, data)
end end
it 'does not change data store' do context 'when data size reached CHUNK_SIZE' do
expect(build_trace_chunk.fog?).to be_truthy let(:data) { 'a' * described_class::CHUNK_SIZE }
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
subject it 'does not change data store' do
expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
expect(build_trace_chunk.fog?).to be_truthy subject
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
it_behaves_like 'Atomic operation'
end end
it_behaves_like 'Atomic operation' context 'when data size has not reached CHUNK_SIZE' do
let(:data) { 'Sample data in fog' }
it 'does not raise error' do
expect { subject }.not_to raise_error
end
end
end end
end end
end end
......
require 'spec_helper'
describe PoolRepository do
describe 'associations' do
it { is_expected.to belong_to(:shard) }
it { is_expected.to have_many(:member_projects) }
end
describe 'validations' do
let!(:pool_repository) { create(:pool_repository) }
it { is_expected.to validate_presence_of(:shard) }
end
describe '#disk_path' do
it 'sets the hashed disk_path' do
pool = create(:pool_repository)
elements = File.split(pool.disk_path)
expect(elements).to all( match(/\d{2,}/) )
end
end
end
...@@ -1488,6 +1488,7 @@ describe Repository do ...@@ -1488,6 +1488,7 @@ describe Repository do
:size, :size,
:commit_count, :commit_count,
:rendered_readme, :rendered_readme,
:readme_path,
:contribution_guide, :contribution_guide,
:changelog, :changelog,
:license_blob, :license_blob,
...@@ -1874,6 +1875,42 @@ describe Repository do ...@@ -1874,6 +1875,42 @@ describe Repository do
end end
end end
describe '#readme_path', :use_clean_rails_memory_store_caching do
context 'with a non-existing repository' do
let(:project) { create(:project) }
it 'returns nil' do
expect(repository.readme_path).to be_nil
end
end
context 'with an existing repository' do
context 'when no README exists' do
let(:project) { create(:project, :empty_repo) }
it 'returns nil' do
expect(repository.readme_path).to be_nil
end
end
context 'when a README exists' do
let(:project) { create(:project, :repository) }
it 'returns the README' do
expect(repository.readme_path).to eq("README.md")
end
it 'caches the response' do
expect(repository).to receive(:readme).and_call_original.once
2.times do
expect(repository.readme_path).to eq("README.md")
end
end
end
end
end
describe '#expire_statistics_caches' do describe '#expire_statistics_caches' do
it 'expires the caches' do it 'expires the caches' do
expect(repository).to receive(:expire_method_caches) expect(repository).to receive(:expire_method_caches)
...@@ -2042,9 +2079,10 @@ describe Repository do ...@@ -2042,9 +2079,10 @@ describe Repository do
describe '#refresh_method_caches' do describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches) expect(repository).to receive(:expire_method_caches)
.with(%i(rendered_readme license_blob license_key license)) .with(%i(rendered_readme readme_path license_blob license_key license))
expect(repository).to receive(:rendered_readme) expect(repository).to receive(:rendered_readme)
expect(repository).to receive(:readme_path)
expect(repository).to receive(:license_blob) expect(repository).to receive(:license_blob)
expect(repository).to receive(:license_key) expect(repository).to receive(:license_key)
expect(repository).to receive(:license) expect(repository).to receive(:license)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment