Commit 9d3a182a authored by allison.browne's avatar allison.browne

Add specs and address CR feedback

Add upload finder specs, multipart upload specs and
specs for the publish details service
parent c285500d
# frozen_string_literal: true
class UploadFinder
class UploaderFinder
def initialize(project, secret, file_path)
@project = project
@secret = secret
......
......@@ -45,8 +45,8 @@ module StatusPage
# Publish Images
def publish_images(issue, user_notes)
existing_image_keys = storage_client.list_object_keys(StatusPage::Storage.uploads_path(issue.iid))
# Send all description images to s3
publish_markdown_uploads(
markdown_field: issue.description,
issue_iid: issue.iid,
......@@ -64,11 +64,12 @@ module StatusPage
end
def publish_markdown_uploads(markdown_field:, issue_iid:, existing_image_keys:)
markdown_field.scan(FileUploader::MARKDOWN_PATTERN).map do |md|
key = StatusPage::Storage.upload_path(issue_iid, $~[:secret], $~[:file])
markdown_field.scan(FileUploader::MARKDOWN_PATTERN).map do |secret, file_name|
key = StatusPage::Storage.upload_path(issue_iid, secret, file_name)
next if existing_image_keys.include? key
uploader = UploadFinder.new(@project, $~[:secret], $~[:file]).execute
uploader = UploaderFinder.new(@project, secret, file_name).execute
uploader.open do |open_file|
storage_client.multipart_upload(key, open_file)
end
......
......@@ -4,6 +4,9 @@ module StatusPage
module Storage
# Implements a minimal AWS S3 client.
class S3Client
# 5 megabytes is the minimum part size specified in the amazon SDK
MULTIPART_UPLOAD_PART_SIZE = 5.megabytes
def initialize(region:, bucket_name:, access_key_id:, secret_access_key:)
@bucket_name = bucket_name
@client = Aws::S3::Client.new(
......@@ -69,17 +72,20 @@ module StatusPage
# file: An open file or file-like io object
def multipart_upload(key, file)
# AWS sdk v2 has upload_file which supports multipart
# However Gitlab::HttpIO used when objectStorage is enabled
# However Gitlab::HttpIO used when object storage is enabled
# cannot be used with upload_file
wrap_errors(key: key) do
upload_id = client.create_multipart_upload({ bucket: bucket_name, key: key }).to_h[:upload_id]
parts = upload_in_parts(key, file, upload_id)
complete_multipart_upload(key, upload_id, parts)
begin
parts = upload_in_parts(key, file, upload_id)
complete_multipart_upload(key, upload_id, parts)
# Rescue on Exception since even on keyboard inturrupt we want to abort the upload and re-raise
rescue
# Abort clears the already uploaded parts so that they do not cost the bucket owner
abort_multipart_upload(key, upload_id)
raise
end
end
# Rescue on Exception since even on keyboard inturrupt we want to abor the upload and re-raise
rescue Exception => e # rubocop:disable Lint/RescueException
abort_multipart_upload(key, upload_id)
raise e
end
private
......@@ -89,21 +95,20 @@ module StatusPage
def upload_in_parts(key, file, upload_id)
parts = []
part_number = 1
part_size = 5.megabytes
file.seek(0)
until file.eof?
part = client.upload_part({
body: file.read(part_size),
body: file.read(MULTIPART_UPLOAD_PART_SIZE),
bucket: bucket_name,
key: key,
part_number: part_number, # required
upload_id: upload_id
})
parts << part.to_h.merge(part_number: part_number)
part_number += 1
end
file.seek(0)
parts
end
......
......@@ -3,12 +3,12 @@
require 'spec_helper'
describe StatusPage::Storage::S3Client, :aws_s3 do
let(:region) { 'eu-west-1' }
let(:bucket_name) { 'bucket_name' }
let(:access_key_id) { 'key_id' }
let(:secret_access_key) { 'secret' }
let!(:region) { 'eu-west-1' }
let!(:bucket_name) { 'bucket_name' }
let!(:access_key_id) { 'key_id' }
let!(:secret_access_key) { 'secret' }
let(:client) do
let!(:client) do
described_class.new(
region: region, bucket_name: bucket_name, access_key_id: access_key_id,
secret_access_key: secret_access_key
......@@ -160,6 +160,72 @@ describe StatusPage::Storage::S3Client, :aws_s3 do
end
end
describe 'multipart_upload' do
let(:key) { '123' }
let(:file) { Tempfile.new('foo') }
let(:upload_id) { '123456789' }
let(:s3_client) { client.instance_variable_get(:@client) }
subject(:result) { client.multipart_upload(key, file) }
before do
file.open
file.write('hello world')
file.rewind
allow(s3_client).to receive(:create_multipart_upload).and_return(
instance_double(Aws::S3::Types::CompleteMultipartUploadOutput, { to_h: { upload_id: upload_id } })
)
end
after do
file.close
end
context 'when sucessful' do
before do
stub_responses(
:upload_part,
instance_double(Aws::S3::Types::UploadPartOutput, to_h: {})
)
end
it 'completes' do
expect(s3_client).to receive(:complete_multipart_upload)
result
end
context 'with more than one part' do
before do
stub_const("#{described_class}::MULTIPART_UPLOAD_PART_SIZE", 1.byte)
end
it 'completes' do
# Ensure size limit triggers more than one part upload
expect(s3_client).to receive(:upload_part).at_least(:twice)
expect(s3_client).to receive(:complete_multipart_upload)
result
end
end
end
context 'when failed' do
let(:aws_error) { 'SomeError' }
before do
stub_responses(:upload_part, aws_error)
end
it 'raises an error' do
expect(s3_client).to receive(:abort_multipart_upload)
msg = error_message(aws_error, key: key)
expect { result }.to raise_error(StatusPage::Storage::Error, msg)
end
end
end
private
def stub_responses(*args)
......
......@@ -22,13 +22,13 @@ RSpec.shared_examples 'publish incidents' do
context 'when json upload succeeds' do
before do
allow(storage_client).to receive(:upload_object).with(key, content_json)
allow(storage_client).to receive(:list_object_keys).and_return(Set.new)
end
it 'publishes details as JSON' do
expect(result).to be_success
expect(storage_client).to receive(:upload_object).with(key, content_json)
expect(result).to be_success
end
end
......@@ -84,43 +84,50 @@ RSpec.shared_examples 'publish incidents' do
allow(storage_client).to receive(:list_object_keys).and_return(Set.new)
end
context 'no upload in markdown' do
context 'when not in markdown' do
it 'publishes no images' do
expect(result).to be_success
expect(result.payload[:image_object_keys]).to eq([])
end
end
context 'upload in markdown' do
context 'when in markdown' do
let(:upload_secret) { '734b8524a16d44eb0ff28a2c2e4ff3c0' }
let(:image_file_name) { 'tanuki.png'}
let(:upload_path) { "/uploads/#{upload_secret}/#{image_file_name}" }
let(:markdown_field) { "![tanuki](#{upload_path})" }
let(:status_page_upload_path) { StatusPage::Storage.upload_path(issue.iid, upload_secret, image_file_name) }
let(:user_notes) { [] }
let(:open_file) { instance_double(File) }
let(:upload) { double(file: double(:file, file: upload_path)) }
let(:open_file) { instance_double(File, read: 'stubbed read') }
let(:uploader) { instance_double(FileUploader) }
before do
allow_next_instance_of(FileUploader) do |uploader|
allow(uploader).to receive(:retrieve_from_store!).and_return(upload)
allow(uploader).to receive(:open).and_yield(open_file).twice
allow_next_instance_of(UploadFinder) do |finder|
allow(finder).to receive(:execute).and_return(uploader)
end
allow(File).to receive(:open).and_return(open_file)
allow(storage_client).to receive(:upload_object).with(upload_path, open_file)
allow(storage_client).to receive(:list_object_keys).and_return(Set[])
allow(storage_client).to receive(:upload_object)
end
it 'publishes description images' do
expect(storage_client).to receive(:multipart_upload).with(status_page_upload_path, open_file).once
expect(result).to be_success
expect(result.payload[:image_object_keys]).to eq([status_page_upload_path])
end
context 'user notes uploads' do
let(:user_note) { instance_double(Note, note: markdown_field) }
let(:user_notes) { [user_note] }
let(:issue) { instance_double(Issue, notes: user_notes, description: '', iid: incident_id) }
it 'publishes images' do
expect(storage_client).to receive(:multipart_upload).with(status_page_upload_path, open_file).once
expect(result).to be_success
expect(result.payload[:image_object_keys]).to eq([status_page_upload_path])
end
end
......@@ -130,13 +137,14 @@ RSpec.shared_examples 'publish incidents' do
end
it 'publishes no images' do
expect(storage_client).not_to receive(:multipart_upload)
expect(result).to be_success
expect(result.payload[:image_object_keys]).to eq([])
end
end
context 'when images are already in s3' do
let(:upload_secret_2) { '9cb61a79ce884d5b6c1dd42728d3c159' }
let(:upload_secret_2) { '9cb61a79ce884d5b681dd42728d3c159' }
let(:image_file_name_2) { 'tanuki_2.png' }
let(:upload_path_2) { "/uploads/#{upload_secret_2}/#{image_file_name_2}" }
let(:markdown_field) { "![tanuki](#{upload_path}) and ![tanuki_2](#{upload_path_2})" }
......@@ -146,9 +154,11 @@ RSpec.shared_examples 'publish incidents' do
allow(storage_client).to receive(:list_object_keys).and_return(Set[status_page_upload_path])
end
it 'publishes new images' do
it 'publishes only new images' do
expect(storage_client).to receive(:multipart_upload).with(status_page_upload_path_2, open_file).once
expect(storage_client).not_to receive(:multipart_upload).with(status_page_upload_path, open_file)
expect(result).to be_success
expect(result.payload[:image_object_keys]).to eq([status_page_upload_path_2, status_page_upload_path_2])
end
end
end
......
......@@ -22,7 +22,7 @@ module Gitlab
return @text unless needs_rewrite?
@text.gsub(@pattern) do |markdown|
file = UploadFinder.new(@source_project, $~[:secret], $~[:file]).execute
file = UploaderFinder.new(@source_project, $~[:secret], $~[:file]).execute
break markdown unless file.try(:exists?)
......@@ -46,7 +46,7 @@ module Gitlab
def files
referenced_files = @text.scan(@pattern).map do
find_file(@source_project, $~[:secret], $~[:file])
UploaderFinder.new(@source_project, $~[:secret], $~[:file]).execute
end
referenced_files.compact.select(&:exists?)
......
# frozen_string_literal: true
require 'spec_helper'
describe UploaderFinder do
describe '#execute' do
let(:project) { build(:project) }
let(:upload) { create(:upload, :issuable_upload, :with_file) }
let(:secret) { upload.secret }
let(:file_name) { upload.path }
subject { described_class.new(project, secret, file_name).execute }
before do
upload.save
end
it 'gets the uploader' do
allow_next_instance_of(FileUploader) do |uploader|
expect(uploader).to receive(:retrieve_from_store!).with(upload.path).and_return(uploader)
end
expect(subject).to be_an_instance_of(FileUploader)
expect(subject.model).to eq(project)
expect(subject.secret).to eq(secret)
end
context 'path traversal in file name' do
before do
upload.path = '/uploads/11111111111111111111111111111111/../../../../../../../../../../../../../../etc/passwd)'
upload.save
end
it 'throws an error' do
expect { subject }.to raise_error(an_instance_of(StandardError).and(having_attributes(message: "Invalid path")))
end
end
end
end
......@@ -68,16 +68,6 @@ describe Gitlab::Gfm::UploadsRewriter do
expect(moved_text.scan(/\A\[.*?\]/).count).to eq(1)
end
context 'path traversal in file name' do
let(:text) do
"![a](/uploads/11111111111111111111111111111111/../../../../../../../../../../../../../../etc/passwd)"
end
it 'throw an error' do
expect { rewriter.rewrite(new_project) }.to raise_error(an_instance_of(StandardError).and(having_attributes(message: "Invalid path")))
end
end
context "file are stored locally" do
include_examples "files are accessible"
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment