Commit 999118f0 authored by Kamil Trzciński's avatar Kamil Trzciński

Merge branch 'feature/sm/artifacts-trace-ee' into 'master'

EE: Trace as artifacts (FileStorage and ObjectStorage)

Closes #4171

See merge request gitlab-org/gitlab-ee!4258
parent b14c484b
class Projects::JobsController < Projects::ApplicationController
prepend EE::Projects::JobsController
before_action :build, except: [:index, :cancel_all]
before_action :authorize_read_build!,
......
......@@ -20,6 +20,7 @@ module Ci
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment
......
......@@ -16,9 +16,12 @@ module Ci
end
end
delegate :open, :exists?, to: :file
enum file_type: {
archive: 1,
metadata: 2
metadata: 2,
trace: 3
}
def self.artifacts_size_for(project)
......
......@@ -39,7 +39,6 @@ module ArtifactMigratable
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
read_attribute(:artifacts_size).to_i + job_artifacts.sum(:size).to_i
end
end
module Ci
class CreateTraceArtifactService < BaseService
def execute(job)
return if job.job_artifacts_trace
job.trace.read do |stream|
if stream.file?
job.create_job_artifacts_trace!(
project: job.project,
file_type: :trace,
file: stream)
end
end
end
end
end
class JobArtifactUploader < GitlabUploader
prepend EE::JobArtifactUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
......@@ -14,6 +15,12 @@ class JobArtifactUploader < GitlabUploader
dynamic_segment
end
def open
raise 'Only File System is supported' unless file_storage?
File.open(path, "rb") if path
end
private
def dynamic_segment
......
......@@ -43,6 +43,7 @@
- pipeline_creation:run_pipeline_schedule
- pipeline_default:build_coverage
- pipeline_default:build_trace_sections
- pipeline_default:create_trace_artifact
- pipeline_default:pipeline_metrics
- pipeline_default:pipeline_notification
- pipeline_default:update_head_pipeline_for_merge_request
......
......@@ -6,9 +6,13 @@ class BuildFinishedWorker
def perform(build_id)
Ci::Build.find_by(id: build_id).try do |build|
BuildTraceSectionsWorker.perform_async(build.id)
# We execute that in sync as this access the files in order to access local file, and reduce IO
BuildTraceSectionsWorker.new.perform(build.id)
BuildCoverageWorker.new.perform(build.id)
BuildHooksWorker.new.perform(build.id)
# We execute that async as this are two indepentent operations that can be executed after TraceSections and Coverage
BuildHooksWorker.perform_async(build.id)
CreateTraceArtifactWorker.perform_async(build.id)
end
end
end
class CreateTraceArtifactWorker
include ApplicationWorker
include PipelineQueue
def perform(job_id)
Ci::Build.preload(:project, :user).find_by(id: job_id).try do |job|
Ci::CreateTraceArtifactService.new(job.project, job.user).execute(job)
end
end
end
---
title: Save traces as artifacts
merge_request: 16702
author:
type: changed
......@@ -16,7 +16,7 @@ There are many places where file uploading is used, according to contexts:
- Project avatars
- Issues/MR/Notes Markdown attachments
- Issues/MR/Notes Legacy Markdown attachments
- CI Build Artifacts
- CI Artifacts (archive, metadata, trace)
- LFS Objects
......@@ -35,7 +35,7 @@ they are still not 100% standardized. You can see them below:
| Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project |
| Issues/MR/Notes Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
| Issues/MR/Notes Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
| CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build |
| CI Artifacts (CE) | yes | shared/artifacts/:disk_hash[0..1]/:disk_hash[2..3]/:disk_hash/:year_:month_:date/:job_id/:job_artifact_id (:disk_hash is SHA256 digest of project_id) | `JobArtifactUploader` | Ci::JobArtifact |
| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
......
module EE
module Projects
module JobsController
extend ActiveSupport::Concern
include SendFileUpload
def raw
if trace_artifact_file
send_upload(trace_artifact_file,
send_params: raw_send_params,
redirect_params: raw_redirect_params)
else
super
end
end
private
def raw_send_params
{ type: 'text/plain; charset=utf-8', disposition: 'inline' }
end
def raw_redirect_params
{ query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
end
def trace_artifact_file
@trace_artifact_file ||= build.job_artifacts_trace&.file
end
end
end
end
......@@ -9,11 +9,11 @@ module EE
prepended do
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
end
def local_store?
[nil, JobArtifactUploader::Store::LOCAL].include?(self.file_store)
[nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
private
......
module EE
module JobArtifactUploader
extend ActiveSupport::Concern
def open
if file_storage?
super
else
::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
end
end
end
end
##
# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
# source: https://gitlab.com/snippets/1685610
module Gitlab
module Ci
class Trace
class HttpIO
BUFFER_SIZE = 128.kilobytes
InvalidURLError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError)
attr_reader :uri, :size
attr_reader :tell
attr_reader :chunk, :chunk_range
alias_method :pos, :tell
def initialize(url, size)
raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
@uri = URI(url)
@size = size
@tell = 0
end
def close
# no-op
end
def binmode
# no-op
end
def binmode?
true
end
def path
@uri.to_s
end
def seek(pos, where = IO::SEEK_SET)
new_pos =
case where
when IO::SEEK_END
size + pos
when IO::SEEK_SET
pos
when IO::SEEK_CUR
tell + pos
else
-1
end
raise 'new position is outside of file' if new_pos < 0 || new_pos > size
@tell = new_pos
end
def eof?
tell == size
end
def each_line
until eof?
line = readline
break if line.nil?
yield(line)
end
end
def read(length = nil)
out = ""
until eof? || (length && out.length >= length)
data = get_chunk
break if data.empty?
out << data
@tell += data.bytesize
end
out = out[0, length] if length && out.length > length
out
end
def readline
out = ""
until eof?
data = get_chunk
new_line = data.index("\n")
if !new_line.nil?
out << data[0..new_line]
@tell += new_line + 1
break
else
out << data
@tell += data.bytesize
end
end
out
end
def write(data)
raise NotImplementedError
end
def truncate(offset)
raise NotImplementedError
end
def flush
raise NotImplementedError
end
def present?
true
end
private
##
# The below methods are not implemented in IO class
#
def in_range?
@chunk_range&.include?(tell)
end
def get_chunk
unless in_range?
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
http.request(request)
end
raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
@chunk = response.body.force_encoding(Encoding::BINARY)
@chunk_range = response.content_range
##
# Note: If provider does not return content_range, then we set it as we requested
# Provider: minio
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: AWS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: GCS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
@chunk_range ||= (chunk_start...(chunk_start + @chunk.length))
end
@chunk[chunk_offset..BUFFER_SIZE]
end
def request
Net::HTTP::Get.new(uri).tap do |request|
request.set_range(chunk_start, BUFFER_SIZE)
end
end
def chunk_offset
tell % BUFFER_SIZE
end
def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE
end
def chunk_end
[chunk_start + BUFFER_SIZE, size].min
end
end
end
end
end
......@@ -52,12 +52,14 @@ module Gitlab
end
def exist?
current_path.present? || old_trace.present?
trace_artifact&.exists? || current_path.present? || old_trace.present?
end
def read
stream = Gitlab::Ci::Trace::Stream.new do
if current_path
if trace_artifact
trace_artifact.open
elsif current_path
File.open(current_path, "rb")
elsif old_trace
StringIO.new(old_trace)
......@@ -82,6 +84,8 @@ module Gitlab
end
def erase!
trace_artifact&.destroy
paths.each do |trace_path|
FileUtils.rm(trace_path, force: true)
end
......@@ -137,6 +141,10 @@ module Gitlab
"#{job.id}.log"
) if job.project&.ci_id
end
def trace_artifact
job.job_artifacts_trace
end
end
end
end
......@@ -159,8 +159,19 @@ describe Projects::JobsController do
get_trace
end
context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['html']).to eq(job.trace.html)
end
end
context 'when job has a trace' do
let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
......@@ -182,7 +193,7 @@ describe Projects::JobsController do
end
context 'when job has a trace with ANSI sequence and Unicode' do
let(:job) { create(:ci_build, :unicode_trace, pipeline: pipeline) }
let(:job) { create(:ci_build, :unicode_trace_live, pipeline: pipeline) }
it 'returns a trace with Unicode' do
expect(response).to have_gitlab_http_status(:ok)
......@@ -381,7 +392,7 @@ describe Projects::JobsController do
end
context 'when job is erasable' do
let(:job) { create(:ci_build, :erasable, :trace, pipeline: pipeline) }
let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) }
it 'redirects to the erased job page' do
expect(response).to have_gitlab_http_status(:found)
......@@ -408,7 +419,7 @@ describe Projects::JobsController do
context 'when user is developer' do
let(:role) { :developer }
let(:job) { create(:ci_build, :erasable, :trace, pipeline: pipeline, user: triggered_by) }
let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline, user: triggered_by) }
context 'when triggered by same user' do
let(:triggered_by) { user }
......@@ -439,8 +450,18 @@ describe Projects::JobsController do
get_raw
end
context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq job.job_artifacts_trace.open.read
end
end
context 'when job has a trace file' do
let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'send a trace file' do
expect(response).to have_gitlab_http_status(:ok)
......
require 'spec_helper'
describe Projects::JobsController do
include ApiHelpers
include HttpIOHelpers
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) }
describe 'GET trace.json' do
context 'when trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
context 'when there are no network issues' do
before do
stub_remote_trace_206
get_trace
end
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['html']).to eq(job.trace.html)
end
end
context 'when there is a network issue' do
before do
stub_remote_trace_500
end
it 'returns a trace' do
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
def get_trace
get :trace, namespace_id: project.namespace,
project_id: project,
id: job.id,
format: :json
end
end
describe 'GET raw' do
subject do
post :raw, namespace_id: project.namespace,
project_id: project,
id: job.id
end
context 'when the trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
end
it 'redirect to the trace file url' do
expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
end
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Trace::HttpIO do
include HttpIOHelpers
let(:http_io) { described_class.new(url, size) }
let(:url) { remote_trace_url }
let(:size) { remote_trace_size }
describe 'Interchangeability between IO and HttpIO' do
EXCEPT_METHODS = %i[read_nonblock raw raw! cooked cooked! getch echo= echo?
winsize winsize= iflush oflush ioflush beep goto cursor cursor= pressed?
getpass write_nonblock stat pathconf wait_readable wait_writable getbyte <<
wait lines bytes chars codepoints getc readpartial set_encoding printf print
putc puts readlines gets each each_byte each_char each_codepoint to_io reopen
syswrite to_i fileno sysread fdatasync fsync sync= sync lineno= lineno readchar
ungetbyte readbyte ungetc nonblock= nread rewind pos= eof close_on_exec?
close_on_exec= closed? close_read close_write isatty tty? binmode? sysseek
advise ioctl fcntl pid external_encoding internal_encoding autoclose? autoclose=
posix_fileno nonblock? ready? noecho nonblock].freeze
it 'HttpIO covers core interfaces in IO' do
expected_interfaces = ::IO.instance_methods(false)
expected_interfaces -= EXCEPT_METHODS
expect(expected_interfaces - described_class.instance_methods).to be_empty
end
end
describe '#close' do
subject { http_io.close }
it { is_expected.to be_nil }
end
describe '#binmode' do
subject { http_io.binmode }
it { is_expected.to be_nil }
end
describe '#binmode?' do
subject { http_io.binmode? }
it { is_expected.to be_truthy }
end
describe '#path' do
subject { http_io.path }
it { is_expected.to eq(url) }
end
describe '#seek' do
subject { http_io.seek(pos, where) }
context 'when moves pos to end of the file' do
let(:pos) { 0 }
let(:where) { IO::SEEK_END }
it { is_expected.to eq(size) }
end
context 'when moves pos to middle of the file' do
let(:pos) { size / 2 }
let(:where) { IO::SEEK_SET }
it { is_expected.to eq(size / 2) }
end
context 'when moves pos around' do
it 'matches the result' do
expect(http_io.seek(0)).to eq(0)
expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100)
expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file')
end
end
end
describe '#eof?' do
subject { http_io.eof? }
context 'when current pos is at end of the file' do
before do
http_io.seek(size, IO::SEEK_SET)
end
it { is_expected.to be_truthy }
end
context 'when current pos is not at end of the file' do
before do
http_io.seek(0, IO::SEEK_SET)
end
it { is_expected.to be_falsey }
end
end
describe '#each_line' do
subject { http_io.each_line }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
it 'yields lines' do
expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a)
end
context 'when buckets on GCS' do
context 'when BUFFER_SIZE is larger than file size' do
before do
stub_remote_trace_200
set_larger_buffer_size_than(size)
end
it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
http_io.each_line { |line| }
end
end
end
end
describe '#read' do
subject { http_io.read(length) }
context 'when there are no network issue' do
before do
stub_remote_trace_206
end
context 'when read whole size' do
let(:length) { nil }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when read only first 100 bytes' do
let(:length) { 100 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
end
context 'when tries to read oversize' do
let(:length) { size + 1000 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when tries to read 0 bytes' do
let(:length) { 0 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
describe '#readline' do
subject { http_io.readline }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
shared_examples 'all line matching' do
it 'reads a line' do
(0...remote_trace_body.lines.count).each do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when pos is at middle of the file' do
before do
set_smaller_buffer_size_than(size)
http_io.seek(size / 2)
string_io.seek(size / 2)
end
it 'reads from pos' do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
describe '#write' do
subject { http_io.write(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#truncate' do
subject { http_io.truncate(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#flush' do
subject { http_io.flush }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#present?' do
subject { http_io.present? }
it { is_expected.to be_truthy }
end
end
require 'spec_helper'
describe JobArtifactUploader do
let(:store) { ObjectStorage::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
describe '#open' do
subject { uploader.open }
context 'when trace is stored in Object storage' do
before do
allow(uploader).to receive(:file_storage?) { false }
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
end
it 'returns http io stream' do
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
end
end
end
end
......@@ -135,13 +135,19 @@ FactoryBot.define do
coverage_regex '/(d+)/'
end
trait :trace do
trait :trace_live do
after(:create) do |build, evaluator|
build.trace.set('BUILD TRACE')
end
end
trait :unicode_trace do
trait :trace_artifact do
after(:create) do |build, evaluator|
create(:ci_job_artifact, :trace, job: build)
end
end
trait :unicode_trace_live do
after(:create) do |build, evaluator|
trace = File.binread(
File.expand_path(
......
......@@ -30,5 +30,14 @@ FactoryBot.define do
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
trait :trace do
file_type :trace
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain')
end
end
end
end
......@@ -7,7 +7,7 @@ feature 'Jobs' do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
let(:job2) { create(:ci_build) }
let(:artifacts_file) do
......@@ -468,6 +468,7 @@ feature 'Jobs' do
describe 'GET /:project/jobs/:id/raw', :js do
context 'access source' do
context 'job from project' do
context 'when job is running' do
before do
job.run!
end
......@@ -483,6 +484,21 @@ feature 'Jobs' do
end
end
context 'when job is complete' do
let(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
it 'sends the right headers' do
requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do
visit raw_project_job_path(project, job)
end
expect(requests.first.status_code).to eq(200)
expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8')
expect(requests.first.response_headers['X-Sendfile']).to eq(job.job_artifacts_trace.file.path)
end
end
end
context 'job from other project' do
before do
job2.run!
......
This diff is collapsed.
......@@ -7,7 +7,7 @@ describe Projects::JobsController, '(JavaScript fixtures)', type: :controller do
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project_empty_repo, namespace: namespace, path: 'builds-project') }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let!(:build_with_artifacts) { create(:ci_build, :success, :artifacts, :trace, pipeline: pipeline, stage: 'test', artifacts_expire_at: Time.now + 18.months) }
let!(:build_with_artifacts) { create(:ci_build, :success, :artifacts, :trace_artifact, pipeline: pipeline, stage: 'test', artifacts_expire_at: Time.now + 18.months) }
let!(:failed_build) { create(:ci_build, :failed, pipeline: pipeline, stage: 'build') }
let!(:pending_build) { create(:ci_build, :pending, pipeline: pipeline, stage: 'deploy') }
......
......@@ -238,11 +238,98 @@ describe Gitlab::Ci::Trace do
end
end
describe '#read' do
shared_examples 'read successfully with IO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(IO)
end
end
end
shared_examples 'read successfully with StringIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(StringIO)
end
end
end
shared_examples 'failed to read' do
it 'yields without source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_nil
end
end
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_id) exists' do
before do
expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_ci_id) exists' do
before do
expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when db trace exists' do
before do
build.send(:write_attribute, :trace, "data")
end
it_behaves_like 'read successfully with StringIO'
end
context 'when no sources exist' do
it_behaves_like 'failed to read'
end
end
describe 'trace handling' do
subject { trace.exist? }
context 'trace does not exist' do
it { expect(trace.exist?).to be(false) }
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
context 'when the trace artifact has been erased' do
before do
trace.erase!
end
it { is_expected.to be_falsy }
it 'removes associations' do
expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
end
end
end
context 'new trace path is used' do
before do
trace.send(:ensure_directory)
......
......@@ -726,7 +726,7 @@ describe Ci::Build do
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
describe '#erase' do
before do
......@@ -760,7 +760,7 @@ describe Ci::Build do
end
describe '#erased?' do
let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
subject { build.erased? }
context 'job has not been erased' do
......@@ -795,7 +795,7 @@ describe Ci::Build do
context 'old artifacts' do
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) }
describe '#erase' do
before do
......@@ -829,7 +829,7 @@ describe Ci::Build do
end
describe '#erased?' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) }
subject { build.erased? }
context 'job has not been erased' do
......
......@@ -12,6 +12,9 @@ describe Ci::JobArtifact do
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
it { is_expected.to delegate_method(:open).to(:file) }
it { is_expected.to delegate_method(:exists?).to(:file) }
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
......
require 'spec_helper'
describe API::Jobs do
include HttpIOHelpers
set(:project) do
create(:project, :repository, public_builds: false)
end
......@@ -480,19 +482,46 @@ describe API::Jobs do
end
describe 'GET /projects/:id/jobs/:job_id/trace' do
let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
before do
get api("/projects/#{project.id}/jobs/#{job.id}/trace", api_user)
end
context 'authorized user' do
context 'when trace is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
before do
stub_remote_trace_206
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
it 'returns specific job trace' do
expect(response).to have_gitlab_http_status(200)
expect(response.body).to eq(job.trace.raw)
end
end
context 'when trace is artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns specific job trace' do
expect(response).to have_gitlab_http_status(200)
expect(response.body).to eq(job.trace.raw)
end
end
context 'when trace is file' do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'returns specific job trace' do
expect(response).to have_gitlab_http_status(200)
expect(response.body).to eq(job.trace.raw)
end
end
end
context 'unauthorized user' do
let(:api_user) { nil }
......@@ -577,11 +606,11 @@ describe API::Jobs do
end
context 'job is erasable' do
let(:job) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline) }
let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline) }
it 'erases job content' do
expect(response).to have_gitlab_http_status(201)
expect(job).not_to have_trace
expect(job.trace.exist?).to be_falsy
expect(job.artifacts_file.exists?).to be_falsy
expect(job.artifacts_metadata.exists?).to be_falsy
end
......@@ -595,7 +624,7 @@ describe API::Jobs do
end
context 'job is not erasable' do
let(:job) { create(:ci_build, :trace, project: project, pipeline: pipeline) }
let(:job) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }
it 'responds with forbidden' do
expect(response).to have_gitlab_http_status(403)
......@@ -604,7 +633,7 @@ describe API::Jobs do
context 'when a developer erases a build' do
let(:role) { :developer }
let(:job) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline, user: owner) }
let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline, user: owner) }
context 'when the build was created by the developer' do
let(:owner) { user }
......@@ -627,7 +656,7 @@ describe API::Jobs do
context 'artifacts did not expire' do
let(:job) do
create(:ci_build, :trace, :artifacts, :success,
create(:ci_build, :trace_artifact, :artifacts, :success,
project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days)
end
......
......@@ -639,7 +639,7 @@ describe API::Runner do
end
describe 'PUT /api/v4/jobs/:id' do
let(:job) { create(:ci_build, :pending, :trace, pipeline: pipeline, runner_id: runner.id) }
let(:job) { create(:ci_build, :pending, :trace_live, pipeline: pipeline, runner_id: runner.id) }
before do
job.run!
......@@ -681,11 +681,17 @@ describe API::Runner do
end
context 'when tace is given' do
it 'updates a running build' do
update_job(trace: 'BUILD TRACE UPDATED')
it 'creates a trace artifact' do
allow_any_instance_of(BuildFinishedWorker).to receive(:perform).with(job.id) do
CreateTraceArtifactWorker.new.perform(job.id)
end
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
job.reload
expect(response).to have_gitlab_http_status(200)
expect(job.reload.trace.raw).to eq 'BUILD TRACE UPDATED'
expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
end
end
......@@ -714,7 +720,7 @@ describe API::Runner do
end
describe 'PATCH /api/v4/jobs/:id/trace' do
let(:job) { create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) }
let(:job) { create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) }
let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
let(:update_interval) { 10.seconds.to_i }
......@@ -775,7 +781,7 @@ describe API::Runner do
context 'when project for the build has been deleted' do
let(:job) do
create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) do |job|
create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
job.project.update(pending_delete: true)
end
end
......
......@@ -356,7 +356,7 @@ describe API::V3::Builds do
end
describe 'GET /projects/:id/builds/:build_id/trace' do
let(:build) { create(:ci_build, :trace, pipeline: pipeline) }
let(:build) { create(:ci_build, :trace_live, pipeline: pipeline) }
before do
get v3_api("/projects/#{project.id}/builds/#{build.id}/trace", api_user)
......@@ -451,7 +451,7 @@ describe API::V3::Builds do
end
context 'job is erasable' do
let(:build) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline) }
let(:build) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline) }
it 'erases job content' do
expect(response.status).to eq 201
......@@ -467,7 +467,7 @@ describe API::V3::Builds do
end
context 'job is not erasable' do
let(:build) { create(:ci_build, :trace, project: project, pipeline: pipeline) }
let(:build) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }
it 'responds with forbidden' do
expect(response.status).to eq 403
......@@ -482,7 +482,7 @@ describe API::V3::Builds do
context 'artifacts did not expire' do
let(:build) do
create(:ci_build, :trace, :artifacts, :success,
create(:ci_build, :trace_artifact, :artifacts, :success,
project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days)
end
......
require 'spec_helper'
describe Ci::CreateTraceArtifactService do
describe '#execute' do
subject { described_class.new(nil, nil).execute(job) }
let(:job) { create(:ci_build) }
context 'when the job does not have trace artifact' do
context 'when the job has a trace file' do
before do
allow_any_instance_of(Gitlab::Ci::Trace)
.to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
allow_any_instance_of(JobArtifactUploader).to receive(:move_to_cache) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:move_to_store) { false }
end
it 'creates trace artifact' do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
expect(job.job_artifacts_trace.read_attribute(:file)).to eq('sample_trace')
end
context 'when the job has already had trace artifact' do
before do
create(:ci_job_artifact, :trace, job: job)
end
it 'does not create trace artifact' do
expect { subject }.not_to change { Ci::JobArtifact.count }
end
end
end
context 'when the job does not have a trace file' do
it 'does not create trace artifact' do
expect { subject }.not_to change { Ci::JobArtifact.count }
end
end
end
end
end
......@@ -17,7 +17,8 @@ describe Ci::RetryBuildService do
%i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
erased_at auto_canceled_by job_artifacts job_artifacts_archive
job_artifacts_metadata job_artifacts_trace].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
......@@ -37,7 +38,7 @@ describe Ci::RetryBuildService do
let(:build) do
create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment,
:triggered, :trace_artifact, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline,
auto_canceled_by: create(:ci_empty_pipeline, project: project)) do |build|
##
......
module HttpIOHelpers
def stub_remote_trace_206
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 206) }
end
def stub_remote_trace_200
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 200) }
end
def stub_remote_trace_500
WebMock.stub_request(:get, remote_trace_url)
.to_return(status: [500, "Internal Server Error"])
end
def remote_trace_url
"http://trace.com/trace"
end
def remote_trace_response(request, responce_status)
range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
{
status: responce_status,
headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i),
body: range_trace_body(range[1].to_i, range[2].to_i)
}
end
def remote_trace_response_headers(responce_status, from, to)
headers = { 'Content-Type' => 'text/plain' }
if responce_status == 206
headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
end
headers
end
def range_trace_body(from, to)
remote_trace_body[from..to]
end
def remote_trace_body
@remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
end
def remote_trace_size
remote_trace_body.length
end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
end
......@@ -23,6 +23,33 @@ describe JobArtifactUploader do
store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end
describe '#open' do
subject { uploader.open }
context 'when trace is stored in File storage' do
context 'when file exists' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain')
end
before do
uploader.store!(file)
end
it 'returns io stream' do
is_expected.to be_a(IO)
end
end
context 'when file does not exist' do
it 'returns nil' do
is_expected.to be_nil
end
end
end
end
context 'file is stored in valid local_path' do
let(:file) do
fixture_file_upload(
......
......@@ -6,17 +6,15 @@ describe BuildFinishedWorker do
let!(:build) { create(:ci_build) }
it 'calculates coverage and calls hooks' do
expect(BuildCoverageWorker)
expect(BuildTraceSectionsWorker)
.to receive(:new).ordered.and_call_original
expect(BuildHooksWorker)
expect(BuildCoverageWorker)
.to receive(:new).ordered.and_call_original
expect(BuildTraceSectionsWorker)
.to receive(:perform_async)
expect_any_instance_of(BuildCoverageWorker)
.to receive(:perform)
expect_any_instance_of(BuildHooksWorker)
.to receive(:perform)
expect_any_instance_of(BuildTraceSectionsWorker).to receive(:perform)
expect_any_instance_of(BuildCoverageWorker).to receive(:perform)
expect(BuildHooksWorker).to receive(:perform_async)
expect(CreateTraceArtifactWorker).to receive(:perform_async)
described_class.new.perform(build.id)
end
......
require 'spec_helper'
describe CreateTraceArtifactWorker do
describe '#perform' do
subject { described_class.new.perform(job&.id) }
context 'when job is found' do
let(:job) { create(:ci_build) }
it 'executes service' do
expect_any_instance_of(Ci::CreateTraceArtifactService)
.to receive(:execute).with(job)
subject
end
end
context 'when job is not found' do
let(:job) { nil }
it 'does not execute service' do
expect_any_instance_of(Ci::CreateTraceArtifactService)
.not_to receive(:execute)
subject
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment