Commit 8066d3d2 authored by Mayra Cabrera's avatar Mayra Cabrera

Merge branch 'sh-fix-lfs-aws-specific-storage' into 'master'

Fix LFS not working with S3 specific-storage settings

See merge request gitlab-org/gitlab!52296
parents 4c9338ed da419833
......@@ -21,7 +21,17 @@ module Repositories
def upload_authorize
set_workhorse_internal_api_content_type
authorized = LfsObjectUploader.workhorse_authorize(has_length: true)
# We don't actually know whether Workhorse received an LFS upload
# request with a Content-Length header or `Transfer-Encoding:
# chunked`. Since we don't know, we need to be pessimistic and
# set `has_length` to `false` so that multipart uploads will be
# used for AWS. Otherwise, AWS will respond with `501 NOT IMPLEMENTED`
# error because a PutObject request with `Transfer-Encoding: chunked`
# is not supported.
#
# This is only an issue with object storage-specific settings, not
# with consolidated object storage settings.
authorized = LfsObjectUploader.workhorse_authorize(has_length: false, maximum_size: size)
authorized.merge!(LfsOid: oid, LfsSize: size)
render json: authorized
......
---
title: Fix LFS not working with S3 specific-storage settings
merge_request: 52296
author:
type: fixed
......@@ -11,24 +11,76 @@ RSpec.describe Repositories::LfsStorageController do
let_it_be(:pat) { create(:personal_access_token, user: user, scopes: ['write_repository']) }
let(:lfs_enabled) { true }
let(:params) do
{
repository_path: "#{project.full_path}.git",
oid: '6b9765d3888aaec789e8c309eb05b05c3a87895d6ad70d2264bd7270fff665ac',
size: '6725030'
}
end
before do
stub_config(lfs: { enabled: lfs_enabled })
end
describe 'PUT #upload_finalize' do
describe 'PUT #upload_authorize' do
let(:headers) { workhorse_internal_api_request_header }
let(:extra_headers) { {} }
let(:uploaded_file) { temp_file }
let(:params) do
{
repository_path: "#{project.full_path}.git",
oid: '6b9765d3888aaec789e8c309eb05b05c3a87895d6ad70d2264bd7270fff665ac',
size: '6725030'
}
before do
request.headers.merge!(extra_headers)
request.headers.merge!(headers)
end
subject do
put :upload_authorize, params: params
end
context 'with unauthorized roles' do
where(:user_role, :expected_status) do
:guest | :forbidden
:anonymous | :unauthorized
end
with_them do
let(:extra_headers) do
if user_role == :anonymous
{}
else
{ 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) }
end
end
before do
project.send("add_#{user_role}", user) unless user_role == :anonymous
end
it_behaves_like 'returning response status', params[:expected_status]
end
end
context 'with at least developer role' do
let(:extra_headers) { { 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) } }
before do
project.add_developer(user)
end
it 'sets Workhorse with a max limit' do
expect(LfsObjectUploader).to receive(:workhorse_authorize).with(has_length: false, maximum_size: params[:size].to_i).and_call_original
subject
expect(response).to have_gitlab_http_status(:ok)
end
end
end
describe 'PUT #upload_finalize' do
let(:headers) { workhorse_internal_api_request_header }
let(:extra_headers) { {} }
let(:uploaded_file) { temp_file }
before do
request.headers.merge!(extra_headers)
request.headers.merge!(headers)
......
......@@ -750,7 +750,7 @@ RSpec.describe 'Git LFS API and storage' do
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
expect(json_response['RemoteObject']).not_to have_key('MultipartUpload')
expect(json_response['RemoteObject']).to have_key('MultipartUpload')
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment