Commit a9abe0ab authored by Stan Hu's avatar Stan Hu

Fix uploader not returning 413 when artifact too large

When an upload exceeds the maximum limit, `ErrEntityTooLarge` gets
returned but is wrapped in multiple layers of errors when it is
checked. As a result, when Google Cloud Storage were used to upload
files, artifacts exceeding the maximum size would report a "500 Internal
Server Error" instead of the correct "413 Request Entity Too Large"
error message.

To fix this, we check the state of `hardLimitReader` and set the error
to `ErrEntityTooLarge` at the end of the `SaveFileFromReader` to ensure
that this error will be returned.

Closes https://gitlab.com/gitlab-org/gitlab-workhorse/-/issues/328
parent 824f842f
---
title: Fix uploader not returning 413 when artifact too large
merge_request: 663
author:
type: fixed
...@@ -166,7 +166,13 @@ func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts ...@@ -166,7 +166,13 @@ func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts
return nil, SizeError(fmt.Errorf("the upload size %d is over maximum of %d bytes", size, opts.MaximumSize)) return nil, SizeError(fmt.Errorf("the upload size %d is over maximum of %d bytes", size, opts.MaximumSize))
} }
reader = &hardLimitReader{r: reader, n: opts.MaximumSize} hlr := &hardLimitReader{r: reader, n: opts.MaximumSize}
reader = hlr
defer func() {
if hlr.n < 0 {
err = ErrEntityTooLarge
}
}()
} }
fh.Size, err = uploadDestination.Consume(ctx, reader, opts.Deadline) fh.Size, err = uploadDestination.Consume(ctx, reader, opts.Deadline)
......
...@@ -2,6 +2,7 @@ package filestore_test ...@@ -2,6 +2,7 @@ package filestore_test
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os" "os"
...@@ -428,6 +429,107 @@ func TestSaveMultipartInBodyFailure(t *testing.T) { ...@@ -428,6 +429,107 @@ func TestSaveMultipartInBodyFailure(t *testing.T) {
require.EqualError(t, err, test.MultipartUploadInternalError().Error()) require.EqualError(t, err, test.MultipartUploadInternalError().Error())
} }
func TestSaveRemoteFileWithLimit(t *testing.T) {
testhelper.ConfigureSecret()
type remote int
const (
notRemote remote = iota
remoteSingle
remoteMultipart
)
remoteTypes := []remote{remoteSingle, remoteMultipart}
tests := []struct {
name string
objectSize int64
maxSize int64
expectedErr error
testData string
}{
{
name: "known size with no limit",
testData: test.ObjectContent,
objectSize: test.ObjectSize,
},
{
name: "unknown size with no limit",
testData: test.ObjectContent,
objectSize: -1,
},
{
name: "unknown object size with limit",
testData: test.ObjectContent,
objectSize: -1,
maxSize: test.ObjectSize - 1,
expectedErr: filestore.ErrEntityTooLarge,
},
{
name: "large object with unknown size with limit",
testData: string(make([]byte, 20000)),
objectSize: -1,
maxSize: 19000,
expectedErr: filestore.ErrEntityTooLarge,
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
var opts filestore.SaveFileOpts
for _, remoteType := range remoteTypes {
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
require.NoError(t, err)
defer os.RemoveAll(tmpFolder)
osStub, ts := test.StartObjectStore()
defer ts.Close()
switch remoteType {
case remoteSingle:
objectURL := ts.URL + test.ObjectPath
opts.RemoteID = "test-file"
opts.RemoteURL = objectURL
opts.PresignedPut = objectURL + "?Signature=ASignature"
opts.PresignedDelete = objectURL + "?Signature=AnotherSignature"
opts.Deadline = testDeadline()
opts.MaximumSize = tc.maxSize
case remoteMultipart:
objectURL := ts.URL + test.ObjectPath
opts.RemoteID = "test-file"
opts.RemoteURL = objectURL
opts.PresignedDelete = objectURL + "?Signature=AnotherSignature"
opts.PartSize = int64(len(tc.testData)/2) + 1
opts.PresignedParts = []string{objectURL + "?partNumber=1", objectURL + "?partNumber=2"}
opts.PresignedCompleteMultipart = objectURL + "?Signature=CompleteSignature"
opts.Deadline = testDeadline()
opts.MaximumSize = tc.maxSize
require.Less(t, int64(len(tc.testData)), int64(len(opts.PresignedParts))*opts.PartSize, "check part size calculation")
osStub.InitiateMultipartUpload(test.ObjectPath)
}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(tc.testData), tc.objectSize, &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
require.NotNil(t, fh)
} else {
require.True(t, errors.Is(err, tc.expectedErr))
require.Nil(t, fh)
}
}
})
}
}
func checkFileHandlerWithFields(t *testing.T, fh *filestore.FileHandler, fields map[string]string, prefix string) { func checkFileHandlerWithFields(t *testing.T, fh *filestore.FileHandler, fields map[string]string, prefix string) {
key := func(field string) string { key := func(field string) string {
if prefix == "" { if prefix == "" {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment