Commit 9413d1c2 authored by Douwe Maan's avatar Douwe Maan

Merge branch 'master' into 13948-access-request-to-projects-and-groups

# Conflicts:
#	app/views/layouts/nav/_project.html.haml
parents 515205d3 d4cd6dca
......@@ -77,6 +77,7 @@ v 8.9.0 (unreleased)
- All classes in the Banzai::ReferenceParser namespace are now instrumented
- Remove deprecated issues_tracker and issues_tracker_id from project model
- Allow users to create confidential issues in private projects
- Measure CPU time for instrumented methods
v 8.8.5 (unreleased)
- Ensure branch cleanup regardless of whether the GitHub import process succeeds
......
......@@ -210,6 +210,9 @@ gem 'mousetrap-rails', '~> 1.4.6'
# Detect and convert string character encoding
gem 'charlock_holmes', '~> 0.7.3'
# Parse duration
gem 'chronic_duration', '~> 0.10.6'
gem "sass-rails", '~> 5.0.0'
gem "coffee-rails", '~> 4.1.0'
gem "uglifier", '~> 2.7.2'
......
......@@ -124,6 +124,8 @@ GEM
mime-types (>= 1.16)
cause (0.1)
charlock_holmes (0.7.3)
chronic_duration (0.10.6)
numerizer (~> 0.1.1)
chunky_png (1.3.5)
cliver (0.3.2)
coderay (1.1.0)
......@@ -414,6 +416,7 @@ GEM
nokogiri (1.6.8)
mini_portile2 (~> 2.1.0)
pkg-config (~> 1.1.7)
numerizer (0.1.1)
oauth (0.4.7)
oauth2 (1.0.0)
faraday (>= 0.8, < 0.10)
......@@ -839,6 +842,7 @@ DEPENDENCIES
capybara-screenshot (~> 1.0.0)
carrierwave (~> 0.10.0)
charlock_holmes (~> 0.7.3)
chronic_duration (~> 0.10.6)
coffee-rails (~> 4.1.0)
connection_pool (~> 2.0)
coveralls (~> 0.8.2)
......
......@@ -17,6 +17,8 @@ class @CiBuild
.off 'resize.build'
.on 'resize.build', @hideSidebar
@updateArtifactRemoveDate()
if $('#build-trace').length
@getInitialBuildTrace()
@initScrollButtonAffix()
......@@ -103,3 +105,10 @@ class @CiBuild
$('.js-build-sidebar')
.removeClass 'right-sidebar-collapsed'
.addClass 'right-sidebar-expanded'
updateArtifactRemoveDate: ->
$date = $('.js-artifacts-remove')
if $date.length
date = $date.text()
$date.text $.timefor(new Date(date), ' ')
class Projects::ArtifactsController < Projects::ApplicationController
layout 'project'
before_action :authorize_read_build!
before_action :authorize_update_build!, only: [:keep]
before_action :validate_artifacts!
def download
unless artifacts_file.file_storage?
return redirect_to artifacts_file.url
end
unless artifacts_file.exists?
return render_404
end
send_file artifacts_file.path, disposition: 'attachment'
end
def browse
return render_404 unless build.artifacts?
directory = params[:path] ? "#{params[:path]}/" : ''
@entry = build.artifacts_metadata_entry(directory)
......@@ -34,8 +30,17 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
end
def keep
build.keep_artifacts!
redirect_to namespace_project_build_path(project.namespace, project, build)
end
private
def validate_artifacts!
render_404 unless build.artifacts?
end
def build
@build ||= project.builds.find_by!(id: params[:build_id])
end
......
......@@ -20,7 +20,6 @@ module TimeHelper
end
end
def date_from_to(from, to)
"#{from.to_s(:short)} - #{to.to_s(:short)}"
end
......
......@@ -11,6 +11,8 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: nil) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
mount_uploader :artifacts_file, ArtifactUploader
mount_uploader :artifacts_metadata, ArtifactUploader
......@@ -317,7 +319,7 @@ module Ci
end
def artifacts?
artifacts_file.exists?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
......@@ -328,11 +330,15 @@ module Ci
Gitlab::Ci::Build::Artifacts::Metadata.new(artifacts_metadata.path, path, **options).to_entry
end
def erase_artifacts!
remove_artifacts_file!
remove_artifacts_metadata!
end
def erase(opts = {})
return false unless erasable?
remove_artifacts_file!
remove_artifacts_metadata!
erase_artifacts!
erase_trace!
update_erased!(opts[:erased_by])
end
......@@ -345,6 +351,25 @@ module Ci
!self.erased_at.nil?
end
def artifacts_expired?
artifacts_expire_at && artifacts_expire_at < Time.now
end
def artifacts_expire_in
artifacts_expire_at - Time.now if artifacts_expire_at
end
def artifacts_expire_in=(value)
self.artifacts_expire_at =
if value
Time.now + ChronicDuration.parse(value)
end
end
def keep_artifacts!
self.update(artifacts_expire_at: nil)
end
private
def erase_trace!
......@@ -352,7 +377,7 @@ module Ci
end
def update_erased!(user = nil)
self.update(erased_by: user, erased_at: Time.now)
self.update(erased_by: user, erased_at: Time.now, artifacts_expire_at: nil)
end
def yaml_variables
......
......@@ -5,10 +5,11 @@
= icon('cog')
= icon('caret-down')
%ul.dropdown-menu.dropdown-menu-align-right
= render 'layouts/nav/project_settings'
- access = @project.team.max_member_access(current_user.id)
- can_edit = can?(current_user, :admin_project, @project)
= render 'layouts/nav/project_settings', access: access, can_edit: can_edit
- if can_edit || access
%li.divider
- if can_edit
......@@ -18,7 +19,7 @@
- if access
%li
= link_to polymorphic_path([:leave, @project, :members]),
data: { confirm: leave_confirmation_message(@project) }, method: :delete do
data: { confirm: leave_confirmation_message(@project) }, method: :delete, title: 'Leave project' do
Leave Project
%div{ class: nav_control_class }
......
......@@ -3,43 +3,43 @@
= link_to namespace_project_project_members_path(@project.namespace, @project), title: 'Members', class: 'team-tab tab' do
%span
Members
- if @project.allowed_to_share_with_group?
= nav_link(controller: :group_links) do
= link_to namespace_project_group_links_path(@project.namespace, @project), title: "Groups" do
%span
Groups
= nav_link(controller: :deploy_keys) do
= link_to namespace_project_deploy_keys_path(@project.namespace, @project), title: 'Deploy Keys' do
%span
Deploy Keys
= nav_link(controller: :hooks) do
= link_to namespace_project_hooks_path(@project.namespace, @project), title: 'Webhooks' do
%span
Webhooks
= nav_link(controller: :services) do
= link_to namespace_project_services_path(@project.namespace, @project), title: 'Services' do
%span
Services
= nav_link(controller: :protected_branches) do
= link_to namespace_project_protected_branches_path(@project.namespace, @project), title: 'Protected Branches' do
%span
Protected Branches
- if @project.builds_enabled?
= nav_link(controller: :runners) do
= link_to namespace_project_runners_path(@project.namespace, @project), title: 'Runners' do
- if access && can_edit
- if @project.allowed_to_share_with_group?
= nav_link(controller: :group_links) do
= link_to namespace_project_group_links_path(@project.namespace, @project), title: "Groups" do
%span
Groups
= nav_link(controller: :deploy_keys) do
= link_to namespace_project_deploy_keys_path(@project.namespace, @project), title: 'Deploy Keys' do
%span
Runners
= nav_link(controller: :variables) do
= link_to namespace_project_variables_path(@project.namespace, @project), title: 'Variables' do
Deploy Keys
= nav_link(controller: :hooks) do
= link_to namespace_project_hooks_path(@project.namespace, @project), title: 'Webhooks' do
%span
Variables
= nav_link(controller: :triggers) do
= link_to namespace_project_triggers_path(@project.namespace, @project), title: 'Triggers' do
Webhooks
= nav_link(controller: :services) do
= link_to namespace_project_services_path(@project.namespace, @project), title: 'Services' do
%span
Triggers
= nav_link(controller: :badges) do
= link_to namespace_project_badges_path(@project.namespace, @project), title: 'Badges' do
Services
= nav_link(controller: :protected_branches) do
= link_to namespace_project_protected_branches_path(@project.namespace, @project), title: 'Protected Branches' do
%span
Badges
Protected Branches
- if @project.builds_enabled?
= nav_link(controller: :runners) do
= link_to namespace_project_runners_path(@project.namespace, @project), title: 'Runners' do
%span
Runners
= nav_link(controller: :variables) do
= link_to namespace_project_variables_path(@project.namespace, @project), title: 'Variables' do
%span
Variables
= nav_link(controller: :triggers) do
= link_to namespace_project_triggers_path(@project.namespace, @project), title: 'Triggers' do
%span
Triggers
= nav_link(controller: :badges) do
= link_to namespace_project_badges_path(@project.namespace, @project), title: 'Badges' do
%span
Badges
......@@ -11,19 +11,33 @@
%p.build-detail-row
#{@build.coverage}%
- if can?(current_user, :read_build, @project) && @build.artifacts?
- if can?(current_user, :read_build, @project) && (@build.artifacts? || @build.artifacts_expired?)
.block{ class: ("block-first" if !@build.coverage) }
.title
Build artifacts
.btn-group.btn-group-justified{ role: :group }
= link_to download_namespace_project_build_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
Download
- if @build.artifacts_expired?
%p.build-detail-row
The artifacts were removed
#{time_ago_with_tooltip(@build.artifacts_expire_at)}
- elsif @build.artifacts_expire_at
%p.build-detail-row
The artifacts will be removed in
%span.js-artifacts-remove= @build.artifacts_expire_at
- if @build.artifacts_metadata?
= link_to browse_namespace_project_build_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
Browse
- if @build.artifacts?
.btn-group.btn-group-justified{ role: :group }
- if @build.artifacts_expire_at
= link_to keep_namespace_project_build_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default', method: :post do
Keep
.block{ class: ("block-first" if !@build.coverage && !(can?(current_user, :read_build, @project) && @build.artifacts?)) }
= link_to download_namespace_project_build_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
Download
- if @build.artifacts_metadata?
= link_to browse_namespace_project_build_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
Browse
.block{ class: ("block-first" if !@build.coverage && !(can?(current_user, :read_build, @project) && (@build.artifacts? || @build.artifacts_expired?))) }
.title
Build details
- if @build.retryable?
......
class ExpireBuildArtifactsWorker
include Sidekiq::Worker
def perform
Rails.logger.info 'Cleaning old build artifacts'
builds = Ci::Build.with_expired_artifacts
builds.find_each(batch_size: 50).each do |build|
Rails.logger.debug "Removing artifacts build #{build.id}..."
build.erase_artifacts!
end
end
end
......@@ -164,6 +164,9 @@ production: &base
# Flag stuck CI builds as failed
stuck_ci_builds_worker:
cron: "0 0 * * *"
# Remove expired build artifacts
expire_build_artifacts_worker:
cron: "50 * * * *"
# Periodically run 'git fsck' on all repositories. If started more than
# once per hour you will have concurrent 'git fsck' jobs.
repository_check_worker:
......
......@@ -279,6 +279,9 @@ Settings['cron_jobs'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_ci_builds_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_ci_builds_worker']['cron'] ||= '0 0 * * *'
Settings.cron_jobs['stuck_ci_builds_worker']['job_class'] = 'StuckCiBuildsWorker'
Settings.cron_jobs['expire_build_artifacts_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['expire_build_artifacts_worker']['cron'] ||= '50 * * * *'
Settings.cron_jobs['expire_build_artifacts_worker']['job_class'] = 'ExpireBuildArtifactsWorker'
Settings.cron_jobs['repository_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['repository_check_worker']['cron'] ||= '20 * * * *'
Settings.cron_jobs['repository_check_worker']['job_class'] = 'RepositoryCheck::BatchWorker'
......
ChronicDuration.raise_exceptions = true
......@@ -727,6 +727,7 @@ Rails.application.routes.draw do
get :download
get :browse, path: 'browse(/*path)', format: false
get :file, path: 'file/*path', format: false
post :keep
end
end
......
# rubocop:disable all
class ConvertAwardNoteToEmojiAward < ActiveRecord::Migration
def change
def up
execute "INSERT INTO award_emoji (awardable_type, awardable_id, user_id, name, created_at, updated_at) (SELECT noteable_type, noteable_id, author_id, note, created_at, updated_at FROM notes WHERE is_award = true)"
disable_ddl_transaction!
def up
if Gitlab::Database.postgresql?
migrate_postgresql
else
migrate_mysql
end
end
def down
add_column :notes, :is_award, :boolean
# This migration does NOT move the awards on notes, if the table is dropped in another migration, these notes will be lost.
execute "INSERT INTO notes (noteable_type, noteable_id, author_id, note, created_at, updated_at, is_award) (SELECT awardable_type, awardable_id, user_id, name, created_at, updated_at, TRUE FROM award_emoji)"
end
def migrate_postgresql
connection.transaction do
execute 'LOCK notes IN EXCLUSIVE MODE'
execute "INSERT INTO award_emoji (awardable_type, awardable_id, user_id, name, created_at, updated_at) (SELECT noteable_type, noteable_id, author_id, note, created_at, updated_at FROM notes WHERE is_award = true)"
execute "DELETE FROM notes WHERE is_award = true"
remove_column :notes, :is_award, :boolean
end
end
def migrate_mysql
execute 'LOCK TABLES notes WRITE, award_emoji WRITE;'
execute 'INSERT INTO award_emoji (awardable_type, awardable_id, user_id, name, created_at, updated_at) (SELECT noteable_type, noteable_id, author_id, note, created_at, updated_at FROM notes WHERE is_award = true);'
execute "DELETE FROM notes WHERE is_award = true"
remove_column :notes, :is_award, :boolean
ensure
execute 'UNLOCK TABLES'
end
end
# rubocop:disable all
class RemoveNoteIsAward < ActiveRecord::Migration
def change
remove_column :notes, :is_award, :boolean
end
end
class AddArtifactsExpireDateToCiBuilds < ActiveRecord::Migration
def change
add_column :ci_builds, :artifacts_expire_at, :timestamp
end
end
......@@ -144,9 +144,9 @@ ActiveRecord::Schema.define(version: 20160610301627) do
t.text "commands"
t.integer "job_id"
t.string "name"
t.boolean "deploy", default: false
t.boolean "deploy", default: false
t.text "options"
t.boolean "allow_failure", default: false, null: false
t.boolean "allow_failure", default: false, null: false
t.string "stage"
t.integer "trigger_request_id"
t.integer "stage_idx"
......@@ -161,6 +161,7 @@ ActiveRecord::Schema.define(version: 20160610301627) do
t.text "artifacts_metadata"
t.integer "erased_by_id"
t.datetime "erased_at"
t.datetime "artifacts_expire_at"
end
add_index "ci_builds", ["commit_id", "stage_idx", "created_at"], name: "index_ci_builds_on_commit_id_and_stage_idx_and_created_at", using: :btree
......
......@@ -8,32 +8,39 @@ under [`/lib/api`](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/lib/api).
Documentation for various API resources can be found separately in the
following locations:
- [Users](users.md)
- [Session](session.md)
- [Projects](projects.md) including setting Webhooks
- [Project Snippets](project_snippets.md)
- [Services](services.md)
- [Repositories](repositories.md)
- [Repository Files](repository_files.md)
- [Commits](commits.md)
- [Tags](tags.md)
- [Branches](branches.md)
- [Merge Requests](merge_requests.md)
- [Builds](builds.md)
- [Build triggers](build_triggers.md)
- [Build Variables](build_variables.md)
- [Commits](commits.md)
- [Deploy Keys](deploy_keys.md)
- [Groups](groups.md)
- [Issues](issues.md)
- [Keys](keys.md)
- [Labels](labels.md)
- [Merge Requests](merge_requests.md)
- [Milestones](milestones.md)
- [Notes](notes.md) (comments)
- [Deploy Keys](deploy_keys.md)
- [System Hooks](system_hooks.md)
- [Groups](groups.md)
- [Open source license templates](licenses.md)
- [Namespaces](namespaces.md)
- [Settings](settings.md)
- [Keys](keys.md)
- [Builds](builds.md)
- [Build triggers](build_triggers.md)
- [Build Variables](build_variables.md)
- [Notes](notes.md) (comments)
- [Projects](projects.md) including setting Webhooks
- [Project Snippets](project_snippets.md)
- [Repositories](repositories.md)
- [Repository Files](repository_files.md)
- [Runners](runners.md)
- [Open source license templates](licenses.md)
- [Services](services.md)
- [Session](session.md)
- [Settings](settings.md)
- [System Hooks](system_hooks.md)
- [Tags](tags.md)
- [Users](users.md)
### Internal CI API
The following documentation is for the [internal CI API](ci/README.md):
- [Builds](ci/builds.md)
- [Runners](ci/runners.md)
## Authentication
......
......@@ -21,85 +21,85 @@ Example of response
```json
[
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.802Z",
"artifacts_file": {
"filename": "artifacts.zip",
"size": 1000
},
"finished_at": "2015-12-24T17:54:27.895Z",
"id": 7,
"name": "teaspoon",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:27.722Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.802Z",
"artifacts_file": {
"filename": "artifacts.zip",
"size": 1000
},
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.727Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:24.921Z",
"id": 6,
"name": "spinach:other",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:24.729Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
"finished_at": "2015-12-24T17:54:27.895Z",
"id": 7,
"name": "teaspoon",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:27.722Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
},
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.727Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:24.921Z",
"id": 6,
"name": "spinach:other",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:24.729Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
}
]
```
......@@ -125,68 +125,68 @@ Example of response
```json
[
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": "2016-01-11T10:14:09.526Z",
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": null,
"status": "canceled",
"tag": false,
"user": null
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.957Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:33.913Z",
"id": 9,
"name": "brakeman",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:33.727Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": "2016-01-11T10:14:09.526Z",
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": null,
"status": "canceled",
"tag": false,
"user": null
},
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.957Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:33.913Z",
"id": 9,
"name": "brakeman",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:33.727Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
}
]
```
......@@ -211,42 +211,42 @@ Example of response
```json
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.880Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:31.198Z",
"id": 8,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:30.733Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.880Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:31.198Z",
"id": 8,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:30.733Z",
"status": "failed",
"tag": false,
"user": {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"bio": null,
"created_at": "2015-12-21T13:14:24.077Z",
"id": 1,
"is_admin": true,
"linkedin": "",
"name": "Administrator",
"skype": "",
"state": "active",
"twitter": "",
"username": "root",
"web_url": "http://gitlab.dev/u/root",
"website_url": ""
}
}
```
......@@ -323,28 +323,28 @@ Example of response
```json
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": "2016-01-11T10:14:09.526Z",
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": null,
"status": "canceled",
"tag": false,
"user": null
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": "2016-01-11T10:14:09.526Z",
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": null,
"status": "canceled",
"tag": false,
"user": null
}
```
......@@ -369,28 +369,28 @@ Example of response
```json
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": null,
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": null,
"status": "pending",
"tag": false,
"user": null
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": null,
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"started_at": null,
"status": "pending",
"tag": false,
"user": null
}
```
......@@ -419,27 +419,77 @@ Example of response
```json
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"download_url": null,
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"created_at": "2016-01-11T10:13:33.506Z",
"started_at": "2016-01-11T10:13:33.506Z",
"finished_at": "2016-01-11T10:15:10.506Z",
"status": "failed",
"tag": false,
"user": null
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"download_url": null,
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"created_at": "2016-01-11T10:13:33.506Z",
"started_at": "2016-01-11T10:13:33.506Z",
"finished_at": "2016-01-11T10:15:10.506Z",
"status": "failed",
"tag": false,
"user": null
}
```
## Keep artifacts
Prevents artifacts from being deleted when expiration is set.
```
POST /projects/:id/builds/:build_id/artifacts/keep
```
Parameters
| Attribute | Type | required | Description |
|-------------|---------|----------|---------------------|
| `id` | integer | yes | The ID of a project |
| `build_id` | integer | yes | The ID of a build |
Example request:
```
curl -X POST -H "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v3/projects/1/builds/1/artifacts/keep"
```
Example response:
```json
{
"commit": {
"author_email": "admin@example.com",
"author_name": "Administrator",
"created_at": "2015-12-24T16:51:14.000+01:00",
"id": "0ff3ae198f8601a285adcf5c0fff204ee6fba5fd",
"message": "Test the CI integration.",
"short_id": "0ff3ae19",
"title": "Test the CI integration."
},
"coverage": null,
"download_url": null,
"id": 69,
"name": "rubocop",
"ref": "master",
"runner": null,
"stage": "test",
"created_at": "2016-01-11T10:13:33.506Z",
"started_at": "2016-01-11T10:13:33.506Z",
"finished_at": "2016-01-11T10:15:10.506Z",
"status": "failed",
"tag": false,
"user": null
}
```
# GitLab CI API
## Purpose
The main purpose of GitLab CI API is to provide the necessary data and context
for GitLab CI Runners.
All relevant information about the consumer API can be found in a
[separate document](../../api/README.md).
## API Prefix
The current CI API prefix is `/ci/api/v1`.
You need to prepend this prefix to all examples in this documentation, like:
```bash
GET /ci/api/v1/builds/:id/artifacts
```
## Resources
- [Builds](builds.md)
- [Runners](runners.md)
# Builds API
API used by runners to receive and update builds.
>**Note:**
This API is intended to be used only by Runners as their own
communication channel. For the consumer API see the
[Builds API](../builds.md).
## Authentication
This API uses two types of authentication:
1. Unique Runner's token which is the token assigned to the Runner after it
has been registered.
2. Using the build authorization token.
This is project's CI token that can be found under the **Builds** section of
a project's settings. The build authorization token can be passed as a
parameter or a value of `BUILD-TOKEN` header.
These two methods of authentication are interchangeable.
## Builds
### Runs oldest pending build by runner
```
POST /ci/api/v1/builds/register
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|---------------------|
| `token` | string | yes | Unique runner token |
```
curl -X POST "https://gitlab.example.com/ci/api/v1/builds/register" -F "token=t0k3n"
```
### Update details of an existing build
```
PUT /ci/api/v1/builds/:id
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|----------------------|
| `id` | integer | yes | The ID of a project |
| `token` | string | yes | Unique runner token |
| `state` | string | no | The state of a build |
| `trace` | string | no | The trace of a build |
```
curl -X PUT "https://gitlab.example.com/ci/api/v1/builds/1234" -F "token=t0k3n" -F "state=running" -F "trace=Running git clone...\n"
```
### Incremental build trace update
Using this method you need to send trace content as a request body. You also need to provide the `Content-Range` header
with a range of sent trace part. Note that you need to send parts in the proper order, so the begining of the part
must start just after the end of the previous part. If you provide the wrong part, then GitLab CI API will return `416
Range Not Satisfiable` response with a header `Range: 0-X`, where `X` is the current trace length.
For example, if you receive `Range: 0-11` in the response, then your next part must contain a `Content-Range: 11-...`
header and a trace part covered by this range.
For a valid update API will return `202` response with:
* `Build-Status: {status}` header containing current status of the build,
* `Range: 0-{length}` header with the current trace length.
```
PATCH /ci/api/v1/builds/:id/trace.txt
```
Parameters:
| Attribute | Type | Required | Description |
|-----------|---------|----------|----------------------|
| `id` | integer | yes | The ID of a build |
Headers:
| Attribute | Type | Required | Description |
|-----------------|---------|----------|-----------------------------------|
| `BUILD-TOKEN` | string | yes | The build authorization token |
| `Content-Range` | string | yes | Bytes range of trace that is sent |
```
curl -X PATCH "https://gitlab.example.com/ci/api/v1/builds/1234/trace.txt" -H "BUILD-TOKEN=build_t0k3n" -H "Content-Range=0-21" -d "Running git clone...\n"
```
### Upload artifacts to build
```
POST /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| `id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
| `file` | mixed | yes | Artifacts file |
```
curl -X POST "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" -F "token=build_t0k3n" -F "file=@/path/to/file"
```
### Download the artifacts file from build
```
GET /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| `id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
```
curl "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" -F "token=build_t0k3n"
```
### Remove the artifacts file from build
```
DELETE /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| ` id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
```
curl -X DELETE "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" -F "token=build_t0k3n"
```
# Runners API
API used by Runners to register and delete themselves.
>**Note:**
This API is intended to be used only by Runners as their own
communication channel. For the consumer API see the
[new Runners API](../runners.md).
## Authentication
This API uses two types of authentication:
1. Unique Runner's token, which is the token assigned to the Runner after it
has been registered.
2. Using Runners' registration token.
This is a token that can be found in project's settings.
It can also be found in the **Admin > Runners** settings area.
There are two types of tokens you can pass: shared Runner registration
token or project specific registration token.
## Register a new runner
Used to make GitLab CI aware of available runners.
```sh
POST /ci/api/v1/runners/register
```
| Attribute | Type | Required | Description |
| --------- | ------- | --------- | ----------- |
| `token` | string | yes | Runner's registration token |
Example request:
```sh
curl -X POST "https://gitlab.example.com/ci/api/v1/runners/register" -F "token=t0k3n"
```
## Delete a Runner
Used to remove a Runner.
```sh
DELETE /ci/api/v1/runners/delete
```
| Attribute | Type | Required | Description |
| --------- | ------- | --------- | ----------- |
| `token` | string | yes | Runner's registration token |
Example request:
```sh
curl -X DELETE "https://gitlab.example.com/ci/api/v1/runners/delete" -F "token=t0k3n"
```
......@@ -14,5 +14,5 @@
- [Trigger builds through the API](triggers/README.md)
- [Build artifacts](build_artifacts/README.md)
- [User permissions](permissions/README.md)
- [API](api/README.md)
- [API](../../api/ci/README.md)
- [CI services (linked docker containers)](services/README.md)
# GitLab CI API
## Purpose
Main purpose of GitLab CI API is to provide necessary data and context for
GitLab CI Runners.
For consumer API take a look at this [documentation](../../api/README.md) where
you will find all relevant information.
## API Prefix
Current CI API prefix is `/ci/api/v1`.
You need to prepend this prefix to all examples in this documentation, like:
GET /ci/api/v1/builds/:id/artifacts
## Resources
- [Builds](builds.md)
- [Runners](runners.md)
This document was moved to a [new location](../../api/ci/README.md).
# Builds API
API used by runners to receive and update builds.
_**Note:** This API is intended to be used only by Runners as their own
communication channel. For the consumer API see the
[Builds API](../../api/builds.md)._
## Authentication
This API uses two types of authentication:
1. Unique runner's token
Token assigned to runner after it has been registered.
2. Using build authorization token
This is project's CI token that can be found in Continuous Integration
project settings.
Build authorization token can be passed as a parameter or a value of
`BUILD-TOKEN` header. This method are interchangeable.
## Builds
### Runs oldest pending build by runner
```
POST /ci/api/v1/builds/register
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|---------------------|
| `token` | string | yes | Unique runner token |
```
curl -X POST "https://gitlab.example.com/ci/api/v1/builds/register" -F "token=t0k3n"
```
### Update details of an existing build
```
PUT /ci/api/v1/builds/:id
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|----------------------|
| `id` | integer | yes | The ID of a project |
| `token` | string | yes | Unique runner token |
| `state` | string | no | The state of a build |
| `trace` | string | no | The trace of a build |
```
curl -X PUT "https://gitlab.example.com/ci/api/v1/builds/1234" -F "token=t0k3n" -F "state=running" -F "trace=Running git clone...\n"
```
### Incremental build trace update
Using this method you need to send trace content as a request body. You also need to provide the `Content-Range` header
with a range of sent trace part. Note that you need to send parts in the proper order, so the begining of the part
must start just after the end of the previous part. If you provide the wrong part, then GitLab CI API will return `416
Range Not Satisfiable` response with a header `Range: 0-X`, where `X` is the current trace length.
For example, if you receive `Range: 0-11` in the response, then your next part must contain a `Content-Range: 11-...`
header and a trace part covered by this range.
For a valid update API will return `202` response with:
* `Build-Status: {status}` header containing current status of the build,
* `Range: 0-{length}` header with the current trace length.
```
PATCH /ci/api/v1/builds/:id/trace.txt
```
Parameters:
| Attribute | Type | Required | Description |
|-----------|---------|----------|----------------------|
| `id` | integer | yes | The ID of a build |
Headers:
| Attribute | Type | Required | Description |
|-----------------|---------|----------|-----------------------------------|
| `BUILD-TOKEN` | string | yes | The build authorization token |
| `Content-Range` | string | yes | Bytes range of trace that is sent |
```
curl -X PATCH "https://gitlab.example.com/ci/api/v1/builds/1234/trace.txt" -H "BUILD-TOKEN=build_t0k3n" -H "Content-Range=0-21" -d "Running git clone...\n"
```
### Upload artifacts to build
```
POST /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| `id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
| `file` | mixed | yes | Artifacts file |
```
curl -X POST "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" -F "token=build_t0k3n" -F "file=@/path/to/file"
```
### Download the artifacts file from build
```
GET /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| `id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
```
curl "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" -F "token=build_t0k3n"
```
### Remove the artifacts file from build
```
DELETE /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| ` id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
```
curl -X DELETE "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" -F "token=build_t0k3n"
```
This document was moved to a [new location](../../api/ci/builds.md).
# Runners API
API used by runners to register and delete themselves.
_**Note:** This API is intended to be used only by Runners as their own
communication channel. For the consumer API see the
[new Runners API](../../api/runners.md)._
## Authentication
This API uses two types of authentication:
1. Unique runner's token
Token assigned to runner after it has been registered.
2. Using runners' registration token
This is a token that can be found in project's settings.
It can be also found in Admin area &raquo; Runners settings.
There are two types of tokens you can pass - shared runner registration
token or project specific registration token.
## Runners
### Register a new runner
Used to make GitLab CI aware of available runners.
POST /ci/api/v1/runners/register
Parameters:
* `token` (required) - Registration token
### Delete a runner
Used to remove runner.
DELETE /ci/api/v1/runners/delete
Parameters:
* `token` (required) - Unique runner token
This document was moved to a [new location](../../api/ci/runners.md).
......@@ -4,14 +4,14 @@ GitLab CI allows you to use Docker Engine to build and test docker-based project
**This also allows to you to use `docker-compose` and other docker-enabled tools.**
This is one of new trends in Continuous Integration/Deployment to:
One of the new trends in Continuous Integration/Deployment is to:
1. create application image,
1. run test against created image,
1. push image to remote registry,
1. deploy server from pushed image
1. create an application image,
1. run tests against the created image,
1. push image to a remote registry, and
1. deploy to a server from the pushed image.
It's also useful in case when your application already has the `Dockerfile` that can be used to create and test image:
It's also useful when your application already has the `Dockerfile` that can be used to create and test an image:
```bash
$ docker build -t my-image dockerfiles/
$ docker run my-docker-image /script/to/run/tests
......@@ -19,24 +19,25 @@ $ docker tag my-image my-registry:5000/my-image
$ docker push my-registry:5000/my-image
```
However, this requires special configuration of GitLab Runner to enable `docker` support during build.
**This requires running GitLab Runner in privileged mode which can be harmful when untrusted code is run.**
This requires special configuration of GitLab Runner to enable `docker` support during builds.
There are two methods to enable the use of `docker build` and `docker run` during build.
## Runner Configuration
## 1. Use shell executor
There are three methods to enable the use of `docker build` and `docker run` during builds; each with their own tradeoffs.
### Use shell executor
The simplest approach is to install GitLab Runner in `shell` execution mode.
GitLab Runner then executes build scripts as `gitlab-runner` user.
GitLab Runner then executes build scripts as the `gitlab-runner` user.
1. Install [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-ci-multi-runner/#installation).
1. During GitLab Runner installation select `shell` as method of executing build scripts or use command:
```bash
$ sudo gitlab-runner register -n \
$ sudo gitlab-ci-multi-runner register -n \
--url https://gitlab.com/ci \
--token RUNNER_TOKEN \
--registration-token REGISTRATION_TOKEN \
--executor shell
--description "My Runner"
```
......@@ -70,16 +71,18 @@ GitLab Runner then executes build scripts as `gitlab-runner` user.
5. You can now use `docker` command and install `docker-compose` if needed.
6. However, by adding `gitlab-runner` to `docker` group you are effectively granting `gitlab-runner` full root permissions.
For more information please checkout [On Docker security: `docker` group considered harmful](https://www.andreas-jung.com/contents/on-docker-security-docker-group-considered-harmful).
> **Note:**
* By adding `gitlab-runner` to the `docker` group you are effectively granting `gitlab-runner` full root permissions.
For more information please read [On Docker security: `docker` group considered harmful](https://www.andreas-jung.com/contents/on-docker-security-docker-group-considered-harmful).
## 2. Use docker-in-docker executor
### Use docker-in-docker executor
The second approach is to use the special Docker image with all tools installed
The second approach is to use the special docker-in-docker (dind)
[Docker image](https://hub.docker.com/_/docker/) with all tools installed
(`docker` and `docker-compose`) and run the build script in context of that
image in privileged mode.
In order to do that follow the steps:
In order to do that, follow the steps:
1. Install [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-ci-multi-runner/#installation).
......@@ -87,9 +90,9 @@ In order to do that follow the steps:
mode:
```bash
sudo gitlab-runner register -n \
sudo gitlab-ci-multi-runner register -n \
--url https://gitlab.com/ci \
--token RUNNER_TOKEN \
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
--docker-image "docker:latest" \
......@@ -119,11 +122,7 @@ In order to do that follow the steps:
Insecure = false
```
If you want to use the Shared Runners available on your GitLab CE/EE
installation in order to build Docker images, then make sure that your
Shared Runners configuration has the `privileged` mode set to `true`.
1. You can now use `docker` from build script:
1. You can now use `docker` in the build script (note the inclusion of the `docker:dind` service):
```yaml
image: docker:latest
......@@ -141,14 +140,177 @@ In order to do that follow the steps:
- docker run my-docker-image /script/to/run/tests
```
1. However, by enabling `--docker-privileged` you are effectively disabling all
the security mechanisms of containers and exposing your host to privilege
escalation which can lead to container breakout.
For more information, check out the official Docker documentation on
[Runtime privilege and Linux capabilities][docker-cap].
Docker-in-Docker works well, and is the recommended configuration, but it is not without its own challenges:
* By enabling `--docker-privileged`, you are effectively disabling all of
the security mechanisms of containers and exposing your host to privilege
escalation which can lead to container breakout. For more information, check out the official Docker documentation on
[Runtime privilege and Linux capabilities][docker-cap].
* Using docker-in-docker, each build is in a clean environment without the past
history. Concurrent builds work fine because every build gets it's own instance of docker engine so they won't conflict with each other. But this also means builds can be slower because there's no caching of layers.
* By default, `docker:dind` uses `--storage-driver vfs` which is the slowest form
offered.
An example project using this approach can be found here: https://gitlab.com/gitlab-examples/docker.
### Use Docker socket binding
The third approach is to bind-mount `/var/run/docker.sock` into the container so that docker is available in the context of that image.
In order to do that, follow the steps:
1. Install [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-ci-multi-runner/#installation).
1. Register GitLab Runner from the command line to use `docker` and share `/var/run/docker.sock`:
```bash
sudo gitlab-ci-multi-runner register -n \
--url https://gitlab.com/ci \
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
--docker-image "docker:latest" \
--docker-volumes /var/run/docker.sock:/var/run/docker.sock
```
The above command will register a new Runner to use the special
`docker:latest` image which is provided by Docker. **Notice that it's using
the Docker daemon of the Runner itself, and any containers spawned by docker commands will be siblings of the Runner rather than children of the runner.** This may have complications and limitations that are unsuitable for your workflow.
The above command will create a `config.toml` entry similar to this:
```
[[runners]]
url = "https://gitlab.com/ci"
token = REGISTRATION_TOKEN
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:latest"
privileged = false
disable_cache = false
volumes = ["/var/run/docker.sock", "/cache"]
[runners.cache]
Insecure = false
```
1. You can now use `docker` in the build script (note that you don't need to include the `docker:dind` service as when using the Docker in Docker executor):
```yaml
image: docker:latest
before_script:
- docker info
build:
stage: build
script:
- docker build -t my-docker-image .
- docker run my-docker-image /script/to/run/tests
```
While the above method avoids using Docker in privileged mode, you should be aware of the following implications:
* By sharing the docker daemon, you are effectively disabling all
the security mechanisms of containers and exposing your host to privilege
escalation which can lead to container breakout. For example, if a project
ran `docker rm -f $(docker ps -a -q)` it would remove the GitLab Runner
containers.
* Concurrent builds may not work; if your tests
create containers with specific names, they may conflict with each other.
* Sharing files and directories from the source repo into containers may not
work as expected since volume mounting is done in the context of the host
machine, not the build container.
e.g. `docker run --rm -t -i -v $(pwd)/src:/home/app/src test-image:latest run_app_tests`
## Using the GitLab Container Registry
> **Note:**
This feature requires GitLab 8.8 and GitLab Runner 1.2.
Once you've built a Docker image, you can push it up to the built-in [GitLab Container Registry](../../container_registry/README.md). For example, if you're using
docker-in-docker on your runners, this is how your `.gitlab-ci.yml` could look:
```yaml
build:
image: docker:latest
services:
- docker:dind
stage: build
script:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN registry.example.com
- docker build -t registry.example.com/group/project:latest .
- docker push registry.example.com/group/project:latest
```
You have to use the credentials of the special `gitlab-ci-token` user with its
password stored in `$CI_BUILD_TOKEN` in order to push to the Registry connected
to your project. This allows you to automate building and deployment of your
Docker images.
Here's a more elaborate example that splits up the tasks into 4 pipeline stages,
including two tests that run in parallel. The build is stored in the container
registry and used by subsequent stages, downloading the image
when needed. Changes to `master` also get tagged as `latest` and deployed using
an application-specific deploy script:
```yaml
image: docker:latest
services:
- docker:dind
stages:
- build
- test
- release
- deploy
variables:
CONTAINER_TEST_IMAGE: registry.example.com/my-group/my-project:$CI_BUILD_REF_NAME
CONTAINER_RELEASE_IMAGE: registry.example.com/my-group/my-project:latest
before_script:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN registry.example.com
build:
stage: build
script:
- docker build --pull -t $CONTAINER_TEST_IMAGE .
- docker push $CONTAINER_TEST_IMAGE
test1:
stage: test
script:
- docker pull $CONTAINER_TEST_IMAGE
- docker run $CONTAINER_TEST_IMAGE /script/to/run/tests
test2:
stage: test
script:
- docker pull $CONTAINER_TEST_IMAGE
- docker run $CONTAINER_TEST_IMAGE /script/to/run/another/test
release-image:
stage: release
script:
- docker pull $CONTAINER_TEST_IMAGE
- docker tag $CONTAINER_TEST_IMAGE $CONTAINER_RELEASE_IMAGE
- docker push $CONTAINER_RELEASE_IMAGE
only:
- master
deploy:
stage: deploy
script:
- ./deploy.sh
only:
- master
```
Some things you should be aware of when using the Container Registry:
* You must log in to the container registry before running commands. Putting this in `before_script` will run it before each build job.
* Using `docker build --pull` makes sure that Docker fetches any changes to base images before building just in case your cache is stale. It takes slightly longer, but means you don’t get stuck without security patches to base images.
* Doing an explicit `docker pull` before each `docker run` makes sure to fetch the latest image that was just built. This is especially important if you are using multiple runners that cache images locally. Using the git SHA in your image tag makes this less necessary since each build will be unique and you shouldn't ever have a stale image, but it's still possible if you re-build a given commit after a dependency has changed.
* You don't want to build directly to `latest` in case there are multiple builds happening simultaneously.
[docker-in-docker]: https://blog.docker.com/2013/09/docker-can-now-run-within-docker/
[docker-cap]: https://docs.docker.com/engine/reference/run/#runtime-privilege-and-linux-capabilities
......@@ -23,7 +23,7 @@ To use GitLab Runner with docker you need to register a new runner to use the
`docker` executor:
```bash
gitlab-runner register \
gitlab-ci-multi-runner register \
--url "https://gitlab.com/" \
--registration-token "PROJECT_REGISTRATION_TOKEN" \
--description "docker-ruby-2.1" \
......
......@@ -263,10 +263,10 @@ terminal execute:
```bash
# Check using docker executor
gitlab-runner exec docker test:app
gitlab-ci-multi-runner exec docker test:app
# Check using shell executor
gitlab-runner exec shell test:app
gitlab-ci-multi-runner exec shell test:app
```
## Example project
......
......@@ -63,10 +63,10 @@ instance.
Now simply register the runner as any runner:
```
sudo gitlab-runner register
sudo gitlab-ci-multi-runner register
```
Shared runners are enabled by default as of GitLab 8.2, but can be disabled with the
Shared runners are enabled by default as of GitLab 8.2, but can be disabled with the
`DISABLE SHARED RUNNERS` button. Previous versions of GitLab defaulted shared runners to
disabled.
......@@ -93,7 +93,7 @@ setup a specific runner for this project.
To register the runner, run the command below and follow instructions:
```
sudo gitlab-runner register
sudo gitlab-ci-multi-runner register
```
### Making an existing Shared Runner Specific
......
......@@ -31,6 +31,7 @@ If you want a quick introduction to GitLab CI, follow our
- [artifacts](#artifacts)
- [artifacts:name](#artifacts-name)
- [artifacts:when](#artifacts-when)
- [artifacts:expire_in](#artifacts-expire_in)
- [dependencies](#dependencies)
- [before_script and after_script](#before_script-and-after_script)
- [Hidden jobs](#hidden-jobs)
......@@ -678,6 +679,40 @@ job:
when: on_failure
```
#### artifacts:expire_in
>**Note:**
Introduced in GitLab 8.9 and GitLab Runner v1.3.0.
`artifacts:expire_in` is used to remove uploaded artifacts after specified time.
By default artifacts are stored on GitLab forver.
`expire_in` allows to specify after what time the artifacts should be removed.
The artifacts will expire counting from the moment when they are uploaded and stored on GitLab.
After artifacts uploading you can use the **Keep** button on build page to keep the artifacts forever.
Artifacts are removed every hour, but they are not accessible after expire date.
The value of `expire_in` is a elapsed time. The example of parsable values:
- '3 mins 4 sec'
- '2 hrs 20 min'
- '2h20min'
- '6 mos 1 day'
- '47 yrs 6 mos and 4d'
- '3 weeks and 2 days'
---
**Example configurations**
To expire artifacts after 1 week from the moment that they are uploaded:
```yaml
job:
artifacts:
expire_in: 1 week
```
### dependencies
>**Note:**
......
......@@ -79,27 +79,8 @@ delete them.
This feature requires GitLab 8.8 and GitLab Runner 1.2.
Make sure that your GitLab Runner is configured to allow building docker images.
You have to check the [Using Docker Build documentation](../../ci/docker/using_docker_build.md).
You can use [docker:dind](https://hub.docker.com/_/docker/) to build your images,
and this is how `.gitlab-ci.yml` should look like:
```
build_image:
image: docker:git
services:
- docker:dind
stage: build
script:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN registry.example.com
- docker build -t registry.example.com/group/project:latest .
- docker push registry.example.com/group/project:latest
```
You have to use the credentials of the special `gitlab-ci-token` user with its
password stored in `$CI_BUILD_TOKEN` in order to push to the Registry connected
to your project. This allows you to automated building and deployment of your
Docker images.
You have to check the [Using Docker Build documentation](../ci/docker/using_docker_build.md).
Then see the CI documentation on [Using the GitLab Container Registry](../ci/docker/using_docker_build.md#using-the-gitlab-container-registry).
## Limitations
......
......@@ -97,15 +97,16 @@ def #{name}(#{args_signature})
trans = Gitlab::Metrics::Instrumentation.transaction
if trans
start = Time.now
retval = super
duration = (Time.now - start) * 1000.0
start = Time.now
cpu_start = Gitlab::Metrics::System.cpu_time
retval = super
duration = (Time.now - start) * 1000.0
if duration >= Gitlab::Metrics.method_call_threshold
trans.increment(:method_duration, duration)
cpu_duration = Gitlab::Metrics::System.cpu_time - cpu_start
trans.add_metric(Gitlab::Metrics::Instrumentation::SERIES,
{ duration: duration },
{ duration: duration, cpu_duration: cpu_duration },
method: #{label.inspect})
end
......
......@@ -166,6 +166,26 @@ module API
present build, with: Entities::Build,
user_can_download_artifacts: can?(current_user, :download_build_artifacts, user_project)
end
# Keep the artifacts to prevent them from being deleted
#
# Parameters:
# id (required) - the id of a project
# build_id (required) - The ID of a build
# Example Request:
# POST /projects/:id/builds/:build_id/artifacts/keep
post ':id/builds/:build_id/artifacts/keep' do
authorize_update_builds!
build = get_build(params[:build_id])
return not_found!(build) unless build && build.artifacts?
build.keep_artifacts!
status 200
present build, with: Entities::Build,
user_can_download_artifacts: can?(current_user, :read_build, user_project)
end
end
helpers do
......
module Banzai
module Pipeline
class DescriptionPipeline < FullPipeline
WHITELIST = Banzai::Filter::SanitizationFilter::LIMITED.deep_dup.merge(
elements: Banzai::Filter::SanitizationFilter::LIMITED[:elements] - %w(pre code img ol ul li)
)
def self.transform_context(context)
super(context).merge(
# SanitizationFilter
whitelist: whitelist
whitelist: WHITELIST
)
end
private
def self.whitelist
# Descriptions are more heavily sanitized, allowing only a few elements.
# See http://git.io/vkuAN
whitelist = Banzai::Filter::SanitizationFilter::LIMITED
whitelist[:elements] -= %w(pre code img ol ul li)
whitelist
end
end
end
end
......@@ -114,6 +114,7 @@ module Ci
# id (required) - The ID of a build
# token (required) - The build authorization token
# file (required) - Artifacts file
# expire_in (optional) - Specify when artifacts should expire (ex. 7d)
# Parameters (accelerated by GitLab Workhorse):
# file.path - path to locally stored body (generated by Workhorse)
# file.name - real filename as send in Content-Disposition
......@@ -145,6 +146,7 @@ module Ci
build.artifacts_file = artifacts
build.artifacts_metadata = metadata
build.artifacts_expire_in = params['expire_in']
if build.save
present(build, with: Entities::BuildDetails)
......
......@@ -20,7 +20,7 @@ module Ci
expose :name, :token, :stage
expose :project_id
expose :project_name
expose :artifacts_file, using: ArtifactFile, if: lambda { |build, opts| build.artifacts? }
expose :artifacts_file, using: ArtifactFile, if: ->(build, _) { build.artifacts? }
end
class BuildDetails < Build
......@@ -29,6 +29,7 @@ module Ci
expose :before_sha
expose :allow_git_fetch
expose :token
expose :artifacts_expire_at, if: ->(build, _) { build.artifacts? }
expose :options do |model|
model.options
......
......@@ -2,6 +2,8 @@ module Ci
class GitlabCiYamlProcessor
class ValidationError < StandardError; end
include Gitlab::Ci::Config::Node::ValidationHelpers
DEFAULT_STAGES = %w(build test deploy)
DEFAULT_STAGE = 'test'
ALLOWED_YAML_KEYS = [:before_script, :after_script, :image, :services, :types, :stages, :variables, :cache]
......@@ -9,12 +11,14 @@ module Ci
:allow_failure, :type, :stage, :when, :artifacts, :cache,
:dependencies, :before_script, :after_script, :variables]
ALLOWED_CACHE_KEYS = [:key, :untracked, :paths]
ALLOWED_ARTIFACTS_KEYS = [:name, :untracked, :paths, :when]
ALLOWED_ARTIFACTS_KEYS = [:name, :untracked, :paths, :when, :expire_in]
attr_reader :before_script, :after_script, :image, :services, :path, :cache
attr_reader :after_script, :image, :services, :path, :cache
def initialize(config, path = nil)
@config = Gitlab::Ci::Config.new(config).to_hash
@ci_config = Gitlab::Ci::Config.new(config)
@config = @ci_config.to_hash
@path = path
initial_parsing
......@@ -52,7 +56,6 @@ module Ci
private
def initial_parsing
@before_script = @config[:before_script] || []
@after_script = @config[:after_script]
@image = @config[:image]
@services = @config[:services]
......@@ -80,7 +83,7 @@ module Ci
{
stage_idx: stages.index(job[:stage]),
stage: job[:stage],
commands: [job[:before_script] || @before_script, job[:script]].flatten.join("\n"),
commands: [job[:before_script] || [@ci_config.before_script], job[:script]].flatten.compact.join("\n"),
tag_list: job[:tags] || [],
name: name,
only: job[:only],
......@@ -99,6 +102,10 @@ module Ci
end
def validate!
unless @ci_config.valid?
raise ValidationError, @ci_config.errors.first
end
validate_global!
@jobs.each do |name, job|
......@@ -109,10 +116,6 @@ module Ci
end
def validate_global!
unless validate_array_of_strings(@before_script)
raise ValidationError, "before_script should be an array of strings"
end
unless @after_script.nil? || validate_array_of_strings(@after_script)
raise ValidationError, "after_script should be an array of strings"
end
......@@ -282,6 +285,10 @@ module Ci
if job[:artifacts][:when] && !job[:artifacts][:when].in?(%w[on_success on_failure always])
raise ValidationError, "#{name} job: artifacts:when parameter should be on_success, on_failure or always"
end
if job[:artifacts][:expire_in] && !validate_duration(job[:artifacts][:expire_in])
raise ValidationError, "#{name} job: artifacts:expire_in parameter should be a duration"
end
end
def validate_job_dependencies!(name, job)
......@@ -300,22 +307,6 @@ module Ci
end
end
def validate_array_of_strings(values)
values.is_a?(Array) && values.all? { |value| validate_string(value) }
end
def validate_variables(variables)
variables.is_a?(Hash) && variables.all? { |key, value| validate_string(key) && validate_string(value) }
end
def validate_string(value)
value.is_a?(String) || value.is_a?(Symbol)
end
def validate_boolean(value)
value.in?([true, false])
end
def process?(only_params, except_params, ref, tag, trigger_request)
if only_params.present?
return false unless matching?(only_params, ref, tag, trigger_request)
......
module Gitlab
module Ci
##
# Base GitLab CI Configuration facade
#
class Config
class LoaderError < StandardError; end
delegate :valid?, :errors, to: :@global
##
# Temporary delegations that should be removed after refactoring
#
delegate :before_script, to: :@global
def initialize(config)
loader = Loader.new(config)
@config = loader.load!
@config = Loader.new(config).load!
@global = Node::Global.new(@config)
@global.process!
end
def to_hash
......
module Gitlab
module Ci
class Config
module Node
##
# This mixin is responsible for adding DSL, which purpose is to
# simplifly process of adding child nodes.
#
# This can be used only if parent node is a configuration entry that
# holds a hash as a configuration value, for example:
#
# job:
# script: ...
# artifacts: ...
#
module Configurable
extend ActiveSupport::Concern
def allowed_nodes
self.class.allowed_nodes || {}
end
private
def prevalidate!
unless @value.is_a?(Hash)
@errors << 'should be a configuration entry with hash value'
end
end
def create_node(key, factory)
factory.with(value: @value[key])
factory.nullify! unless @value.has_key?(key)
factory.create!
end
class_methods do
def allowed_nodes
Hash[@allowed_nodes.map { |key, factory| [key, factory.dup] }]
end
private
def allow_node(symbol, entry_class, metadata)
factory = Node::Factory.new(entry_class)
.with(description: metadata[:description])
define_method(symbol) do
raise Entry::InvalidError unless valid?
@nodes[symbol].try(:value)
end
(@allowed_nodes ||= {}).merge!(symbol => factory)
end
end
end
end
end
end
end
module Gitlab
module Ci
class Config
module Node
##
# Base abstract class for each configuration entry node.
#
class Entry
class InvalidError < StandardError; end
attr_accessor :description
def initialize(value)
@value = value
@nodes = {}
@errors = []
prevalidate!
end
def process!
return if leaf?
return unless valid?
compose!
nodes.each(&:process!)
nodes.each(&:validate!)
end
def nodes
@nodes.values
end
def valid?
errors.none?
end
def leaf?
allowed_nodes.none?
end
def errors
@errors + nodes.map(&:errors).flatten
end
def allowed_nodes
{}
end
def validate!
raise NotImplementedError
end
def value
raise NotImplementedError
end
private
def prevalidate!
end
def compose!
allowed_nodes.each do |key, essence|
@nodes[key] = create_node(key, essence)
end
end
def create_node(key, essence)
raise NotImplementedError
end
end
end
end
end
end
module Gitlab
module Ci
class Config
module Node
##
# Factory class responsible for fabricating node entry objects.
#
# It uses Fluent Interface pattern to set all necessary attributes.
#
class Factory
class InvalidFactory < StandardError; end
def initialize(entry_class)
@entry_class = entry_class
@attributes = {}
end
def with(attributes)
@attributes.merge!(attributes)
self
end
def nullify!
@entry_class = Node::Null
self
end
def create!
raise InvalidFactory unless @attributes.has_key?(:value)
@entry_class.new(@attributes[:value]).tap do |entry|
entry.description = @attributes[:description]
end
end
end
end
end
end
end
module Gitlab
module Ci
class Config
module Node
##
# This class represents a global entry - root node for entire
# GitLab CI Configuration file.
#
class Global < Entry
include Configurable
allow_node :before_script, Script,
description: 'Script that will be executed before each job.'
end
end
end
end
end
module Gitlab
module Ci
class Config
module Node
##
# This class represents a configuration entry that is not being used
# in configuration file.
#
# This implements Null Object pattern.
#
class Null < Entry
def value
nil
end
def validate!
nil
end
def method_missing(*)
nil
end
end
end
end
end
end
module Gitlab
module Ci
class Config
module Node
##
# Entry that represents a script.
#
# Each element in the value array is a command that will be executed
# by GitLab Runner. Currently we concatenate these commands with
# new line character as a separator, what is compatible with
# implementation in Runner.
#
class Script < Entry
include ValidationHelpers
def value
@value.join("\n")
end
def validate!
unless validate_array_of_strings(@value)
@errors << 'before_script should be an array of strings'
end
end
end
end
end
end
end
module Gitlab
module Ci
class Config
module Node
module ValidationHelpers
private
def validate_duration(value)
value.is_a?(String) && ChronicDuration.parse(value)
rescue ChronicDuration::DurationParseError
false
end
def validate_array_of_strings(values)
values.is_a?(Array) && values.all? { |value| validate_string(value) }
end
def validate_variables(variables)
variables.is_a?(Hash) &&
variables.all? { |key, value| validate_string(key) && validate_string(value) }
end
def validate_string(value)
value.is_a?(String) || value.is_a?(Symbol)
end
def validate_boolean(value)
value.in?([true, false])
end
end
end
end
end
end
......@@ -149,13 +149,16 @@ module Gitlab
trans = Gitlab::Metrics::Instrumentation.transaction
if trans
start = Time.now
retval = super
duration = (Time.now - start) * 1000.0
start = Time.now
cpu_start = Gitlab::Metrics::System.cpu_time
retval = super
duration = (Time.now - start) * 1000.0
if duration >= Gitlab::Metrics.method_call_threshold
cpu_duration = Gitlab::Metrics::System.cpu_time - cpu_start
trans.add_metric(Gitlab::Metrics::Instrumentation::SERIES,
{ duration: duration },
{ duration: duration, cpu_duration: cpu_duration },
method: #{label.inspect})
end
......
......@@ -97,6 +97,42 @@ describe "Builds" do
end
end
context 'Artifacts expire date' do
before do
@build.update_attributes(artifacts_file: artifacts_file, artifacts_expire_at: expire_at)
visit namespace_project_build_path(@project.namespace, @project, @build)
end
context 'no expire date defined' do
let(:expire_at) { nil }
it 'does not have the Keep button' do
expect(page).not_to have_content 'Keep'
end
end
context 'when expire date is defined' do
let(:expire_at) { Time.now + 7.days }
it 'keeps artifacts when Keep button is clicked' do
expect(page).to have_content 'The artifacts will be removed'
click_link 'Keep'
expect(page).not_to have_link 'Keep'
expect(page).not_to have_content 'The artifacts will be removed'
end
end
context 'when artifacts expired' do
let(:expire_at) { Time.now - 7.days }
it 'does not have the Keep button' do
expect(page).to have_content 'The artifacts were removed'
expect(page).not_to have_link 'Keep'
end
end
end
context 'Build raw trace' do
before do
@build.run!
......
......@@ -573,7 +573,12 @@ module Ci
services: ["mysql"],
before_script: ["pwd"],
rspec: {
artifacts: { paths: ["logs/", "binaries/"], untracked: true, name: "custom_name" },
artifacts: {
paths: ["logs/", "binaries/"],
untracked: true,
name: "custom_name",
expire_in: "7d"
},
script: "rspec"
}
})
......@@ -595,7 +600,8 @@ module Ci
artifacts: {
name: "custom_name",
paths: ["logs/", "binaries/"],
untracked: true
untracked: true,
expire_in: "7d"
}
},
when: "on_success",
......@@ -992,6 +998,20 @@ EOT
end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:when parameter should be on_success, on_failure or always")
end
it "returns errors if job artifacts:expire_in is not an a string" do
config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { expire_in: 1 } } })
expect do
GitlabCiYamlProcessor.new(config)
end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:expire_in parameter should be a duration")
end
it "returns errors if job artifacts:expire_in is not an a valid duration" do
config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } })
expect do
GitlabCiYamlProcessor.new(config)
end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: artifacts:expire_in parameter should be a duration")
end
it "returns errors if job artifacts:untracked is not an array of strings" do
config = YAML.dump({ types: ["build", "test"], rspec: { script: "test", artifacts: { untracked: "string" } } })
expect do
......
require 'spec_helper'
describe Gitlab::Ci::Config::Node::Configurable do
let(:node) { Class.new }
before do
node.include(described_class)
end
describe 'allowed nodes' do
before do
node.class_eval do
allow_node :object, Object, description: 'test object'
end
end
describe '#allowed_nodes' do
it 'has valid allowed nodes' do
expect(node.allowed_nodes).to include :object
end
it 'creates a node factory' do
expect(node.allowed_nodes[:object])
.to be_an_instance_of Gitlab::Ci::Config::Node::Factory
end
it 'returns a duplicated factory object' do
first_factory = node.allowed_nodes[:object]
second_factory = node.allowed_nodes[:object]
expect(first_factory).not_to be_equal(second_factory)
end
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Config::Node::Factory do
describe '#create!' do
let(:factory) { described_class.new(entry_class) }
let(:entry_class) { Gitlab::Ci::Config::Node::Script }
context 'when value setting value' do
it 'creates entry with valid value' do
entry = factory
.with(value: ['ls', 'pwd'])
.create!
expect(entry.value).to eq "ls\npwd"
end
context 'when setting description' do
it 'creates entry with description' do
entry = factory
.with(value: ['ls', 'pwd'])
.with(description: 'test description')
.create!
expect(entry.value).to eq "ls\npwd"
expect(entry.description).to eq 'test description'
end
end
end
context 'when not setting value' do
it 'raises error' do
expect { factory.create! }.to raise_error(
Gitlab::Ci::Config::Node::Factory::InvalidFactory
)
end
end
context 'when creating a null entry' do
it 'creates a null entry' do
entry = factory
.with(value: nil)
.nullify!
.create!
expect(entry).to be_an_instance_of Gitlab::Ci::Config::Node::Null
end
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Config::Node::Global do
let(:global) { described_class.new(hash) }
describe '#allowed_nodes' do
it 'can contain global config keys' do
expect(global.allowed_nodes).to include :before_script
end
it 'returns a hash' do
expect(global.allowed_nodes).to be_a Hash
end
end
context 'when hash is valid' do
let(:hash) do
{ before_script: ['ls', 'pwd'] }
end
describe '#process!' do
before { global.process! }
it 'creates nodes hash' do
expect(global.nodes).to be_an Array
end
it 'creates node object for each entry' do
expect(global.nodes.count).to eq 1
end
it 'creates node object using valid class' do
expect(global.nodes.first)
.to be_an_instance_of Gitlab::Ci::Config::Node::Script
end
it 'sets correct description for nodes' do
expect(global.nodes.first.description)
.to eq 'Script that will be executed before each job.'
end
end
describe '#leaf?' do
it 'is not leaf' do
expect(global).not_to be_leaf
end
end
describe '#before_script' do
context 'when processed' do
before { global.process! }
it 'returns correct script' do
expect(global.before_script).to eq "ls\npwd"
end
end
context 'when not processed' do
it 'returns nil' do
expect(global.before_script).to be nil
end
end
end
end
context 'when hash is not valid' do
before { global.process! }
let(:hash) do
{ before_script: 'ls' }
end
describe '#valid?' do
it 'is not valid' do
expect(global).not_to be_valid
end
end
describe '#errors' do
it 'reports errors from child nodes' do
expect(global.errors)
.to include 'before_script should be an array of strings'
end
end
describe '#before_script' do
it 'raises error' do
expect { global.before_script }.to raise_error(
Gitlab::Ci::Config::Node::Entry::InvalidError
)
end
end
end
context 'when value is not a hash' do
let(:hash) { [] }
describe '#valid?' do
it 'is not valid' do
expect(global).not_to be_valid
end
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Config::Node::Null do
let(:entry) { described_class.new(nil) }
describe '#leaf?' do
it 'is leaf node' do
expect(entry).to be_leaf
end
end
describe '#any_method' do
it 'responds with nil' do
expect(entry.any_method).to be nil
end
end
describe '#value' do
it 'returns nil' do
expect(entry.value).to be nil
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Config::Node::Script do
let(:entry) { described_class.new(value) }
describe '#validate!' do
before { entry.validate! }
context 'when entry value is correct' do
let(:value) { ['ls', 'pwd'] }
describe '#value' do
it 'returns concatenated command' do
expect(entry.value).to eq "ls\npwd"
end
end
describe '#errors' do
it 'does not append errors' do
expect(entry.errors).to be_empty
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
let(:value) { 'ls' }
describe '#errors' do
it 'saves errors' do
expect(entry.errors)
.to include /should be an array of strings/
end
end
describe '#valid?' do
it 'is not valid' do
expect(entry).not_to be_valid
end
end
end
end
end
......@@ -29,17 +29,43 @@ describe Gitlab::Ci::Config do
expect(config.to_hash).to eq hash
end
describe '#valid?' do
it 'is valid' do
expect(config).to be_valid
end
it 'has no errors' do
expect(config.errors).to be_empty
end
end
end
context 'when config is invalid' do
let(:yml) { '// invalid' }
describe '.new' do
it 'raises error' do
expect { config }.to raise_error(
Gitlab::Ci::Config::Loader::FormatError,
/Invalid configuration format/
)
context 'when yml is incorrect' do
let(:yml) { '// invalid' }
describe '.new' do
it 'raises error' do
expect { config }.to raise_error(
Gitlab::Ci::Config::Loader::FormatError,
/Invalid configuration format/
)
end
end
end
context 'when config logic is incorrect' do
let(:yml) { 'before_script: "ls"' }
describe '#valid?' do
it 'is not valid' do
expect(config).not_to be_valid
end
it 'has errors' do
expect(config.errors).not_to be_empty
end
end
end
end
......
......@@ -57,7 +57,7 @@ describe Gitlab::Metrics::Instrumentation do
and_return(transaction)
expect(transaction).to receive(:add_metric).
with(described_class::SERIES, an_instance_of(Hash),
with(described_class::SERIES, hash_including(:duration, :cpu_duration),
method: 'Dummy.foo')
@dummy.foo
......@@ -137,7 +137,7 @@ describe Gitlab::Metrics::Instrumentation do
and_return(transaction)
expect(transaction).to receive(:add_metric).
with(described_class::SERIES, an_instance_of(Hash),
with(described_class::SERIES, hash_including(:duration, :cpu_duration),
method: 'Dummy#bar')
@dummy.new.bar
......
......@@ -397,9 +397,34 @@ describe Ci::Build, models: true do
context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
context 'is expired' do
before { build.update(artifacts_expire_at: Time.now - 7.days) }
it { is_expected.to be_falsy }
end
context 'is not expired' do
before { build.update(artifacts_expire_at: Time.now + 7.days) }
it { is_expected.to be_truthy }
end
end
end
describe '#artifacts_expired?' do
subject { build.artifacts_expired? }
context 'is expired' do
before { build.update(artifacts_expire_at: Time.now - 7.days) }
it { is_expected.to be_truthy }
end
context 'is not expired' do
before { build.update(artifacts_expire_at: Time.now + 7.days) }
it { is_expected.to be_falsey }
end
end
describe '#artifacts_metadata?' do
subject { build.artifacts_metadata? }
......@@ -412,7 +437,6 @@ describe Ci::Build, models: true do
it { is_expected.to be_truthy }
end
end
describe '#repo_url' do
let(:build) { create(:ci_build) }
let(:project) { build.project }
......@@ -427,6 +451,50 @@ describe Ci::Build, models: true do
it { is_expected.to include(project.web_url[7..-1]) }
end
describe '#artifacts_expire_in' do
subject { build.artifacts_expire_in }
it { is_expected.to be_nil }
context 'when artifacts_expire_at is specified' do
let(:expire_at) { Time.now + 7.days }
before { build.artifacts_expire_at = expire_at }
it { is_expected.to be_within(5).of(expire_at - Time.now) }
end
end
describe '#artifacts_expire_in=' do
subject { build.artifacts_expire_in }
it 'when assigning valid duration' do
build.artifacts_expire_in = '7 days'
is_expected.to be_within(10).of(7.days.to_i)
end
it 'when assigning invalid duration' do
expect { build.artifacts_expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
is_expected.to be_nil
end
it 'when resseting value' do
build.artifacts_expire_in = nil
is_expected.to be_nil
end
end
describe '#keep_artifacts!' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
it 'to reset expire_at' do
build.keep_artifacts!
expect(build.artifacts_expire_at).to be_nil
end
end
describe '#depends_on_builds' do
let!(:build) { create(:ci_build, pipeline: pipeline, name: 'build', stage_idx: 0, stage: 'build') }
let!(:rspec_test) { create(:ci_build, pipeline: pipeline, name: 'rspec', stage_idx: 1, stage: 'test') }
......
......@@ -241,4 +241,30 @@ describe API::API, api: true do
end
end
end
describe 'POST /projects/:id/builds/:build_id/artifacts/keep' do
before do
post api("/projects/#{project.id}/builds/#{build.id}/artifacts/keep", user)
end
context 'artifacts did not expire' do
let(:build) do
create(:ci_build, :trace, :artifacts, :success,
project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days)
end
it 'keeps artifacts' do
expect(response.status).to eq 200
expect(build.reload.artifacts_expire_at).to be_nil
end
end
context 'no artifacts' do
let(:build) { create(:ci_build, project: project, pipeline: pipeline) }
it 'responds with not found' do
expect(response.status).to eq 404
end
end
end
end
......@@ -364,6 +364,42 @@ describe Ci::API::API do
end
end
context 'with an expire date' do
let!(:artifacts) { file_upload }
let(:post_data) do
{ 'file.path' => artifacts.path,
'file.name' => artifacts.original_filename,
'expire_in' => expire_in }
end
before do
post(post_url, post_data, headers_with_token)
end
context 'with an expire_in given' do
let(:expire_in) { '7 days' }
it 'updates when specified' do
build.reload
expect(response.status).to eq(201)
expect(json_response['artifacts_expire_at']).not_to be_empty
expect(build.artifacts_expire_at).to be_within(5.minutes).of(Time.now + 7.days)
end
end
context 'with no expire_in given' do
let(:expire_in) { nil }
it 'ignores if not specified' do
build.reload
expect(response.status).to eq(201)
expect(json_response['artifacts_expire_at']).to be_nil
expect(build.artifacts_expire_at).to be_nil
end
end
end
context "artifacts file is too large" do
it "should fail to post too large artifact" do
stub_application_setting(max_artifacts_size: 0)
......
require 'spec_helper'
describe ExpireBuildArtifactsWorker do
include RepoHelpers
let(:worker) { described_class.new }
describe '#perform' do
before { build }
subject! { worker.perform }
context 'with expired artifacts' do
let(:build) { create(:ci_build, :artifacts, artifacts_expire_at: Time.now - 7.days) }
it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy
end
it 'does remove files' do
expect(build.reload.artifacts_file.exists?).to be_falsey
end
end
context 'with not yet expired artifacts' do
let(:build) { create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days) }
it 'does not expire' do
expect(build.reload.artifacts_expired?).to be_falsey
end
it 'does not remove files' do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
end
context 'without expire date' do
let(:build) { create(:ci_build, :artifacts) }
it 'does not expire' do
expect(build.reload.artifacts_expired?).to be_falsey
end
it 'does not remove files' do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
end
context 'for expired artifacts' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) }
it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment