Commit 5e56168c authored by John Jarvis's avatar John Jarvis

Merge branch 'security/fix-issue-213139' into 'master'

Prevent filename bypass on artifact upload

Closes #107

See merge request gitlab-org/security/gitlab!403
parents 90297a44 96857bf2
---
title: Prevent filename bypass on artifact upload
merge_request:
author:
type: security
......@@ -27,12 +27,18 @@ describe Gitlab::Middleware::Multipart do
end
end
RSpec.shared_examples 'not allowing the multipart upload' do
RSpec.shared_examples 'not allowing the multipart upload when package upload path is used' do
it 'does not allow files to be uploaded' do
with_tmp_dir('tmp/uploads', storage_path) do |dir, env|
allow(Packages::PackageFileUploader).to receive(:root).and_return(File.join(dir, storage_path))
# with_tmp_dir set the same workhorse_upload_path for all Uploaders,
# so we have to prevent JobArtifactUploader to allow the tested path
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(Dir.tmpdir)
status, headers, body = middleware.call(env)
expect { middleware.call(env) }.to raise_error(UploadedFile::InvalidPathError)
expect(status).to eq(400)
expect(headers).to eq({ 'Content-Type' => 'text/plain' })
expect(body).to start_with('insecure path used')
end
end
end
......@@ -50,7 +56,7 @@ describe Gitlab::Middleware::Multipart do
expect(::Packages::PackageFileUploader).not_to receive(:workhorse_upload_path)
end
it_behaves_like 'not allowing the multipart upload'
it_behaves_like 'not allowing the multipart upload when package upload path is used'
end
where(:object_storage_enabled, :direct_upload_enabled, :example_name) do
......
......@@ -251,21 +251,14 @@ module API
end
params do
requires :id, type: Integer, desc: %q(Job's ID)
requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact file to store (generated by Multipart middleware))
optional :token, type: String, desc: %q(Job's authentication token)
optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
optional :artifact_type, type: String, desc: %q(The type of artifact),
default: 'archive', values: Ci::JobArtifact.file_types.keys
optional :artifact_format, type: String, desc: %q(The format of artifact),
default: 'zip', values: Ci::JobArtifact.file_formats.keys
optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
optional 'file.size', type: Integer, desc: %q(real size of file (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file (generated by Workhorse))
optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse))
optional 'metadata.size', type: Integer, desc: %q(real size of metadata (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of metadata (generated by Workhorse))
optional :metadata, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact metadata to store (generated by Multipart middleware))
end
post '/:id/artifacts' do
not_allowed! unless Gitlab.config.artifacts.enabled
......@@ -274,10 +267,9 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts = UploadedFile.from_params(params, :file, JobArtifactUploader.workhorse_local_upload_path)
metadata = UploadedFile.from_params(params, :metadata, JobArtifactUploader.workhorse_local_upload_path)
artifacts = params[:file]
metadata = params[:metadata]
bad_request!('Missing artifacts file!') unless artifacts
file_too_large! unless artifacts.size < max_artifacts_size(job)
result = Ci::CreateJobArtifactsService.new(job.project).execute(job, artifacts, params, metadata_file: metadata)
......
......@@ -107,6 +107,7 @@ module Gitlab
[
::FileUploader.root,
Gitlab.config.uploads.storage_path,
JobArtifactUploader.workhorse_upload_path,
File.join(Rails.root, 'public/uploads/tmp')
]
end
......@@ -125,6 +126,8 @@ module Gitlab
Handler.new(env, message).with_open_files do
@app.call(env)
end
rescue UploadedFile::InvalidPathError => e
[400, { 'Content-Type' => 'text/plain' }, e.message]
end
end
end
......
......@@ -89,6 +89,17 @@ describe Gitlab::Middleware::Multipart do
end
end
it 'allows files in the job artifact upload path' do
with_tmp_dir('artifacts') do |dir, env|
expect(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(File.join(dir, 'artifacts'))
expect(app).to receive(:call) do |env|
expect(get_params(env)['file']).to be_a(::UploadedFile)
end
middleware.call(env)
end
end
it 'allows symlinks for uploads dir' do
Tempfile.open('two-levels') do |tempfile|
symlinked_dir = '/some/dir/uploads'
......
......@@ -1422,8 +1422,8 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
describe 'artifacts' do
let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
......@@ -1703,12 +1703,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'fails to post artifacts without GitLab-Workhorse' do
post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'Is missing GitLab Workhorse token headers' do
let(:jwt_token) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
it 'fails to post artifacts without GitLab-Workhorse' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).once
......@@ -1722,15 +1722,14 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
context 'when setting an expire date' do
let(:default_artifacts_expire_in) {}
let(:post_data) do
{ 'file.path' => file_upload.path,
'file.name' => file_upload.original_filename,
'expire_in' => expire_in }
{ file: file_upload,
expire_in: expire_in }
end
before do
stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
upload_artifacts(file_upload, headers_with_token, post_data)
end
context 'when an expire_in is given' do
......@@ -1783,20 +1782,22 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:stored_artifacts_size) { job.reload.artifacts_size }
let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
let(:file_keys) { post_data.keys }
let(:send_rewritten_field) { true }
before do
post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
workhorse_finalize_with_multiple_files(
api("/jobs/#{job.id}/artifacts"),
method: :post,
file_keys: file_keys,
params: post_data,
headers: headers_with_token,
send_rewritten_field: send_rewritten_field
)
end
context 'when posts data accelerated by workhorse is correct' do
let(:post_data) do
{ 'file.path' => artifacts.path,
'file.name' => artifacts.original_filename,
'file.sha256' => artifacts_sha256,
'metadata.path' => metadata.path,
'metadata.name' => metadata.original_filename,
'metadata.sha256' => metadata_sha256 }
end
let(:post_data) { { file: artifacts, metadata: metadata } }
it 'stores artifacts and artifacts metadata' do
expect(response).to have_gitlab_http_status(:created)
......@@ -1808,9 +1809,30 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
context 'with a malicious file.path param' do
let(:post_data) { {} }
let(:tmp_file) { Tempfile.new('crafted.file.path') }
let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
it 'rejects the request' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(stored_artifacts_size).to be_nil
end
end
context 'when workhorse header is missing' do
let(:post_data) { { file: artifacts, metadata: metadata } }
let(:send_rewritten_field) { false }
it 'rejects the request' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(stored_artifacts_size).to be_nil
end
end
context 'when there is no artifacts file in post data' do
let(:post_data) do
{ 'metadata' => metadata }
{ metadata: metadata }
end
it 'is expected to respond with bad request' do
......@@ -2053,7 +2075,8 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
method: :post,
file_key: :file,
params: params.merge(file: file),
headers: headers
headers: headers,
send_rewritten_field: true
)
end
end
......
......@@ -33,22 +33,36 @@ module WorkhorseHelpers
# workhorse_finalize will transform file_key inside params as if it was the finalize call of an inline object storage upload.
# note that based on the content of the params it can simulate a disc acceleration or an object storage upload
def workhorse_finalize(url, method: :post, file_key:, params:, headers: {}, send_rewritten_field: false)
workhorse_request_with_file(method, url,
file_key: file_key,
params: params,
extra_headers: headers,
send_rewritten_field: send_rewritten_field
workhorse_finalize_with_multiple_files(url, method: method, file_keys: file_key, params: params, headers: headers, send_rewritten_field: send_rewritten_field)
end
def workhorse_finalize_with_multiple_files(url, method: :post, file_keys:, params:, headers: {}, send_rewritten_field: false)
workhorse_request_with_multiple_files(method, url,
file_keys: file_keys,
params: params,
extra_headers: headers,
send_rewritten_field: send_rewritten_field
)
end
def workhorse_request_with_file(method, url, file_key:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
workhorse_request_with_multiple_files(method, url, file_keys: file_key, params: params, env: env, extra_headers: extra_headers, send_rewritten_field: send_rewritten_field)
end
def workhorse_request_with_multiple_files(method, url, file_keys:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
workhorse_params = params.dup
file = workhorse_params.delete(file_key)
workhorse_params = workhorse_disk_accelerated_file_params(file_key, file).merge(workhorse_params)
file_keys = Array(file_keys)
rewritten_fields = {}
file_keys.each do |key|
file = workhorse_params.delete(key)
rewritten_fields[key] = file.path if file
workhorse_params = workhorse_disk_accelerated_file_params(key, file).merge(workhorse_params)
end
headers = if send_rewritten_field
workhorse_rewritten_fields_header(file_key => file.path)
workhorse_rewritten_fields_header(rewritten_fields)
else
{}
end
......@@ -75,7 +89,11 @@ module WorkhorseHelpers
"#{key}.name" => file.original_filename,
"#{key}.size" => file.size
}.tap do |params|
params["#{key}.path"] = file.path if file.path
if file.path
params["#{key}.path"] = file.path
params["#{key}.sha256"] = Digest::SHA256.file(file.path).hexdigest
end
params["#{key}.remote_id"] = file.remote_id if file.respond_to?(:remote_id) && file.remote_id.present?
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment