2
0
Fork 0
mirror of https://github.com/discourse/discourse.git synced 2025-09-06 10:50:21 +08:00

DEV: Run some specs with fake S3 implementation instead of stubs

This commit is contained in:
Gerhard Schlager 2022-06-20 18:36:05 +02:00 committed by Gerhard Schlager
parent 235f172b9c
commit 0bcc478635
4 changed files with 312 additions and 222 deletions

View file

@ -10,6 +10,7 @@ describe FileStore::S3Store do
let(:resource) { Aws::S3::Resource.new(client: client) } let(:resource) { Aws::S3::Resource.new(client: client) }
let(:s3_bucket) { resource.bucket("s3-upload-bucket") } let(:s3_bucket) { resource.bucket("s3-upload-bucket") }
let(:s3_object) { stub } let(:s3_object) { stub }
let(:upload_path) { Discourse.store.upload_path }
fab!(:optimized_image) { Fabricate(:optimized_image) } fab!(:optimized_image) { Fabricate(:optimized_image) }
let(:optimized_image_file) { file_from_fixtures("logo.png") } let(:optimized_image_file) { file_from_fixtures("logo.png") }
@ -191,22 +192,21 @@ describe FileStore::S3Store do
context 'copying files in S3' do context 'copying files in S3' do
describe '#copy_file' do describe '#copy_file' do
it "copies the from in S3 with the right paths" do it "copies the from in S3 with the right paths" do
s3_helper.expects(:s3_bucket).returns(s3_bucket)
upload.update!( upload.update!(
url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/original/1X/#{upload.sha1}.png" url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/original/1X/#{upload.sha1}.png"
) )
source = Discourse.store.get_path_for_upload(upload) source = "#{upload_path}/#{Discourse.store.get_path_for_upload(upload)}"
destination = Discourse.store.get_path_for_upload(upload).sub('.png', '.jpg') destination = source.sub('.png', '.jpg')
bucket = prepare_fake_s3(source, upload)
s3_object = stub expect(bucket.find_object(source)).to be_present
expect(bucket.find_object(destination)).to be_nil
s3_bucket.expects(:object).with(destination).returns(s3_object)
expect_copy_from(s3_object, "s3-upload-bucket/#{source}")
store.copy_file(upload.url, source, destination) store.copy_file(upload.url, source, destination)
expect(bucket.find_object(source)).to be_present
expect(bucket.find_object(destination)).to be_present
end end
end end
end end
@ -214,33 +214,19 @@ describe FileStore::S3Store do
context 'removal from s3' do context 'removal from s3' do
describe "#remove_upload" do describe "#remove_upload" do
it "removes the file from s3 with the right paths" do it "removes the file from s3 with the right paths" do
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once upload_key = Discourse.store.get_path_for_upload(upload)
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/original/1X/#{upload.sha1}.png") tombstone_key = "tombstone/#{upload_key}"
s3_object = stub bucket = prepare_fake_s3(upload_key, upload)
s3_bucket.expects(:object).with("tombstone/original/1X/#{upload.sha1}.png").returns(s3_object) upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}")
expect_copy_from(s3_object, "s3-upload-bucket/original/1X/#{upload.sha1}.png")
s3_bucket.expects(:object).with("original/1X/#{upload.sha1}.png").returns(s3_object) expect(bucket.find_object(upload_key)).to be_present
s3_object.expects(:delete) expect(bucket.find_object(tombstone_key)).to be_nil
store.remove_upload(upload) store.remove_upload(upload)
end
it "removes the optimized image from s3 with the right paths" do expect(bucket.find_object(upload_key)).to be_nil
optimized = Fabricate(:optimized_image, version: 1) expect(bucket.find_object(tombstone_key)).to be_present
upload = optimized.upload
path = "optimized/1X/#{upload.sha1}_#{optimized.version}_#{optimized.width}x#{optimized.height}.png"
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
optimized.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{path}")
s3_object = stub
s3_bucket.expects(:object).with("tombstone/#{path}").returns(s3_object)
expect_copy_from(s3_object, "s3-upload-bucket/#{path}")
s3_bucket.expects(:object).with(path).returns(s3_object)
s3_object.expects(:delete)
store.remove_optimized_image(optimized)
end end
describe "when s3_upload_bucket includes folders path" do describe "when s3_upload_bucket includes folders path" do
@ -249,41 +235,47 @@ describe FileStore::S3Store do
end end
it "removes the file from s3 with the right paths" do it "removes the file from s3 with the right paths" do
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once upload_key = "discourse-uploads/#{Discourse.store.get_path_for_upload(upload)}"
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/original/1X/#{upload.sha1}.png") tombstone_key = "discourse-uploads/tombstone/#{Discourse.store.get_path_for_upload(upload)}"
s3_object = stub bucket = prepare_fake_s3(upload_key, upload)
s3_bucket.expects(:object).with("discourse-uploads/tombstone/original/1X/#{upload.sha1}.png").returns(s3_object) upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}")
expect_copy_from(s3_object, "s3-upload-bucket/discourse-uploads/original/1X/#{upload.sha1}.png")
s3_bucket.expects(:object).with("discourse-uploads/original/1X/#{upload.sha1}.png").returns(s3_object) expect(bucket.find_object(upload_key)).to be_present
s3_object.expects(:delete) expect(bucket.find_object(tombstone_key)).to be_nil
store.remove_upload(upload) store.remove_upload(upload)
expect(bucket.find_object(upload_key)).to be_nil
expect(bucket.find_object(tombstone_key)).to be_present
end end
end end
end end
describe "#remove_optimized_image" do describe "#remove_optimized_image" do
let(:image_path) do let(:optimized_key) { FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) }
FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) let(:tombstone_key) { "tombstone/#{optimized_key}" }
end let(:upload) { optimized_image.upload }
let(:upload_key) { Discourse.store.get_path_for_upload(upload) }
before do before do
optimized_image.update!( optimized_image.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{optimized_key}")
url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{image_path}" upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}")
)
end end
it "removes the file from s3 with the right paths" do it "removes the optimized image from s3 with the right paths" do
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once bucket = prepare_fake_s3(upload_key, upload)
s3_object = stub store_fake_s3_object(optimized_key, optimized_image)
s3_bucket.expects(:object).with("tombstone/#{image_path}").returns(s3_object) expect(bucket.find_object(upload_key)).to be_present
expect_copy_from(s3_object, "s3-upload-bucket/#{image_path}") expect(bucket.find_object(optimized_key)).to be_present
s3_bucket.expects(:object).with("#{image_path}").returns(s3_object) expect(bucket.find_object(tombstone_key)).to be_nil
s3_object.expects(:delete)
store.remove_optimized_image(optimized_image) store.remove_optimized_image(optimized_image)
expect(bucket.find_object(upload_key)).to be_present
expect(bucket.find_object(optimized_key)).to be_nil
expect(bucket.find_object(tombstone_key)).to be_present
end end
describe "when s3_upload_bucket includes folders path" do describe "when s3_upload_bucket includes folders path" do
@ -291,29 +283,24 @@ describe FileStore::S3Store do
SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads"
end end
before do let(:image_path) { FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) }
optimized_image.update!( let(:optimized_key) { "discourse-uploads/#{image_path}" }
url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{image_path}" let(:tombstone_key) { "discourse-uploads/tombstone/#{image_path}" }
) let(:upload_key) { "discourse-uploads/#{Discourse.store.get_path_for_upload(upload)}" }
end
it "removes the file from s3 with the right paths" do it "removes the file from s3 with the right paths" do
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once bucket = prepare_fake_s3(upload_key, upload)
s3_object = stub store_fake_s3_object(optimized_key, optimized_image)
s3_bucket.expects(:object) expect(bucket.find_object(upload_key)).to be_present
.with("discourse-uploads/tombstone/#{image_path}") expect(bucket.find_object(optimized_key)).to be_present
.returns(s3_object) expect(bucket.find_object(tombstone_key)).to be_nil
expect_copy_from(s3_object, "s3-upload-bucket/discourse-uploads/#{image_path}")
s3_bucket.expects(:object).with(
"discourse-uploads/#{image_path}"
).returns(s3_object)
s3_object.expects(:delete)
store.remove_optimized_image(optimized_image) store.remove_optimized_image(optimized_image)
expect(bucket.find_object(upload_key)).to be_present
expect(bucket.find_object(optimized_key)).to be_nil
expect(bucket.find_object(tombstone_key)).to be_present
end end
end end
end end
@ -484,11 +471,18 @@ describe FileStore::S3Store do
end end
end end
def expect_copy_from(s3_object, source) def prepare_fake_s3(upload_key, upload)
s3_object.expects(:copy_from).with( @fake_s3 = FakeS3.create
copy_source: source @fake_s3_bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
).returns( store_fake_s3_object(upload_key, upload)
stub(copy_object_result: stub(etag: '"etagtest"')) @fake_s3_bucket
end
def store_fake_s3_object(upload_key, upload)
@fake_s3_bucket.put_object(
key: upload_key,
size: upload.filesize,
last_modified: upload.created_at
) )
end end
end end

View file

@ -99,61 +99,73 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
describe "#remove_upload" do describe "#remove_upload" do
let(:store) { FileStore::S3Store.new } let(:store) { FileStore::S3Store.new }
let(:client) { Aws::S3::Client.new(stub_responses: true) }
let(:resource) { Aws::S3::Resource.new(client: client) } let(:upload) { build_upload }
let(:s3_bucket) { resource.bucket(SiteSetting.s3_upload_bucket) } let(:upload_key) { "#{upload_path}/original/1X/#{upload.sha1}.png" }
let(:s3_helper) { store.s3_helper }
def prepare_fake_s3
@fake_s3 = FakeS3.create
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
bucket.put_object(
key: upload_key,
size: upload.filesize,
last_modified: upload.created_at
)
bucket
end
it "removes the file from s3 on multisite" do it "removes the file from s3 on multisite" do
test_multisite_connection('default') do test_multisite_connection('default') do
upload = build_upload
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png") upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png")
s3_object = stub tombstone_key = "uploads/tombstone/default/original/1X/#{upload.sha1}.png"
bucket = prepare_fake_s3
s3_bucket.expects(:object).with("uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object) expect(bucket.find_object(upload_key)).to be_present
expect_copy_from(s3_object, "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png") expect(bucket.find_object(tombstone_key)).to be_nil
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
s3_object.expects(:delete)
store.remove_upload(upload) store.remove_upload(upload)
expect(bucket.find_object(upload_key)).to be_nil
expect(bucket.find_object(tombstone_key)).to be_present
end end
end end
it "removes the file from s3 on another multisite db" do it "removes the file from s3 on another multisite db" do
test_multisite_connection('second') do test_multisite_connection('second') do
upload = build_upload
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png") upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png")
s3_object = stub tombstone_key = "uploads/tombstone/second/original/1X/#{upload.sha1}.png"
bucket = prepare_fake_s3
s3_bucket.expects(:object).with("uploads/tombstone/second/original/1X/#{upload.sha1}.png").returns(s3_object) expect(bucket.find_object(upload_key)).to be_present
expect_copy_from(s3_object, "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png") expect(bucket.find_object(tombstone_key)).to be_nil
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
s3_object.expects(:delete)
store.remove_upload(upload) store.remove_upload(upload)
expect(bucket.find_object(upload_key)).to be_nil
expect(bucket.find_object(tombstone_key)).to be_present
end end
end end
describe "when s3_upload_bucket includes folders path" do describe "when s3_upload_bucket includes folders path" do
let(:upload_key) { "discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png" }
before do before do
SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads"
end end
it "removes the file from s3 on multisite" do it "removes the file from s3 on multisite" do
test_multisite_connection('default') do test_multisite_connection('default') do
upload = build_upload
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png") upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png")
s3_object = stub tombstone_key = "discourse-uploads/uploads/tombstone/default/original/1X/#{upload.sha1}.png"
bucket = prepare_fake_s3
s3_bucket.expects(:object).with("discourse-uploads/uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object) expect(bucket.find_object(upload_key)).to be_present
expect_copy_from(s3_object, "s3-upload-bucket/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png") expect(bucket.find_object(tombstone_key)).to be_nil
s3_bucket.expects(:object).with("discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
s3_object.expects(:delete)
store.remove_upload(upload) store.remove_upload(upload)
expect(bucket.find_object(upload_key)).to be_nil
expect(bucket.find_object(tombstone_key)).to be_present
end end
end end
end end
@ -345,12 +357,4 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
end end
end end
end end
def expect_copy_from(s3_object, source)
s3_object.expects(:copy_from).with(
copy_source: source
).returns(
stub(copy_object_result: stub(etag: '"etagtest"'))
)
end
end end

View file

@ -2,7 +2,6 @@
RSpec.describe ExternalUploadManager do RSpec.describe ExternalUploadManager do
fab!(:user) { Fabricate(:user) } fab!(:user) { Fabricate(:user) }
let(:type) { "card_background" }
let!(:logo_file) { file_from_fixtures("logo.png") } let!(:logo_file) { file_from_fixtures("logo.png") }
let!(:pdf_file) { file_from_fixtures("large.pdf", "pdf") } let!(:pdf_file) { file_from_fixtures("large.pdf", "pdf") }
let(:object_size) { 1.megabyte } let(:object_size) { 1.megabyte }
@ -10,9 +9,9 @@ RSpec.describe ExternalUploadManager do
let(:client_sha1) { Upload.generate_digest(object_file) } let(:client_sha1) { Upload.generate_digest(object_file) }
let(:sha1) { Upload.generate_digest(object_file) } let(:sha1) { Upload.generate_digest(object_file) }
let(:object_file) { logo_file } let(:object_file) { logo_file }
let(:metadata_headers) { {} } let(:external_upload_stub_metadata) { {} }
let!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user) } let!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user) }
let(:upload_base_url) { "https://#{SiteSetting.s3_upload_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" } let(:s3_bucket_name) { SiteSetting.s3_upload_bucket }
subject do subject do
ExternalUploadManager.new(external_upload_stub) ExternalUploadManager.new(external_upload_stub)
@ -27,10 +26,8 @@ RSpec.describe ExternalUploadManager do
SiteSetting.s3_backup_bucket = "s3-backup-bucket" SiteSetting.s3_backup_bucket = "s3-backup-bucket"
SiteSetting.backup_location = BackupLocationSiteSetting::S3 SiteSetting.backup_location = BackupLocationSiteSetting::S3
stub_head_object prepare_fake_s3
stub_download_object_filehelper stub_download_object_filehelper
stub_copy_object
stub_delete_object
end end
describe "#ban_user_from_external_uploads!" do describe "#ban_user_from_external_uploads!" do
@ -69,6 +66,7 @@ RSpec.describe ExternalUploadManager do
before do before do
external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded]) external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded])
end end
it "raises an error" do it "raises an error" do
expect { subject.transform! }.to raise_error(ExternalUploadManager::CannotPromoteError) expect { subject.transform! }.to raise_error(ExternalUploadManager::CannotPromoteError)
end end
@ -77,14 +75,11 @@ RSpec.describe ExternalUploadManager do
context "when the upload does not get changed in UploadCreator (resized etc.)" do context "when the upload does not get changed in UploadCreator (resized etc.)" do
it "copies the stubbed upload on S3 to its new destination and deletes it" do it "copies the stubbed upload on S3 to its new destination and deletes it" do
upload = subject.transform! upload = subject.transform!
expect(WebMock).to have_requested(
:put, bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
"#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}", expect(@fake_s3.operation_called?(:copy_object)).to eq(true)
).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_upload_bucket}/#{external_upload_stub.key}" }) expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present
expect(WebMock).to have_requested( expect(bucket.find_object(external_upload_stub.key)).to be_nil
:delete,
"#{upload_base_url}/#{external_upload_stub.key}"
)
end end
it "errors if the image upload is too big" do it "errors if the image upload is too big" do
@ -105,22 +100,22 @@ RSpec.describe ExternalUploadManager do
end end
context "when the upload does get changed by the UploadCreator" do context "when the upload does get changed by the UploadCreator" do
let(:file) { file_from_fixtures("should_be_jpeg.heic", "images") } let(:object_file) { file_from_fixtures("should_be_jpeg.heic", "images") }
let(:object_size) { 1.megabyte }
let(:external_upload_stub) { Fabricate(:image_external_upload_stub, original_filename: "should_be_jpeg.heic", filesize: object_size) }
it "creates a new upload in s3 (not copy) and deletes the original stubbed upload" do it "creates a new upload in s3 (not copy) and deletes the original stubbed upload" do
upload = subject.transform! upload = subject.transform!
expect(WebMock).to have_requested(
:put, bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
"#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}", expect(@fake_s3.operation_called?(:copy_object)).to eq(false)
) expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present
expect(WebMock).to have_requested( expect(bucket.find_object(external_upload_stub.key)).to be_nil
:delete, "#{upload_base_url}/#{external_upload_stub.key}"
)
end end
end end
context "when the sha has been set on the s3 object metadata by the clientside JS" do context "when the sha has been set on the s3 object metadata by the clientside JS" do
let(:metadata_headers) { { "x-amz-meta-sha1-checksum" => client_sha1 } } let(:external_upload_stub_metadata) { { "sha1-checksum" => client_sha1 } }
context "when the downloaded file sha1 does not match the client sha1" do context "when the downloaded file sha1 does not match the client sha1" do
let(:client_sha1) { "blahblah" } let(:client_sha1) { "blahblah" }
@ -128,6 +123,9 @@ RSpec.describe ExternalUploadManager do
it "raises an error, deletes the stub" do it "raises an error, deletes the stub" do
expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError) expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError)
expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false) expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false)
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_nil
end end
it "does not delete the stub if enable_upload_debug_mode" do it "does not delete the stub if enable_upload_debug_mode" do
@ -135,6 +133,9 @@ RSpec.describe ExternalUploadManager do
expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError) expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError)
external_stub = ExternalUploadStub.find(external_upload_stub.id) external_stub = ExternalUploadStub.find(external_upload_stub.id)
expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed]) expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed])
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_present
end end
end end
end end
@ -150,10 +151,9 @@ RSpec.describe ExternalUploadManager do
expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError) expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError)
expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false) expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false)
expect(Discourse.redis.get("#{ExternalUploadManager::BAN_USER_REDIS_PREFIX}#{external_upload_stub.created_by_id}")).to eq("1") expect(Discourse.redis.get("#{ExternalUploadManager::BAN_USER_REDIS_PREFIX}#{external_upload_stub.created_by_id}")).to eq("1")
expect(WebMock).to have_requested(
:delete, bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
"#{upload_base_url}/#{external_upload_stub.key}" expect(bucket.find_object(external_upload_stub.key)).to be_nil
)
end end
it "does not delete the stub if enable_upload_debug_mode" do it "does not delete the stub if enable_upload_debug_mode" do
@ -161,6 +161,9 @@ RSpec.describe ExternalUploadManager do
expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError) expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError)
external_stub = ExternalUploadStub.find(external_upload_stub.id) external_stub = ExternalUploadStub.find(external_upload_stub.id)
expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed]) expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed])
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_present
end end
end end
end end
@ -193,18 +196,14 @@ RSpec.describe ExternalUploadManager do
it "copies the stubbed upload on S3 to its new destination and deletes it" do it "copies the stubbed upload on S3 to its new destination and deletes it" do
upload = subject.transform! upload = subject.transform!
expect(WebMock).to have_requested(
:put, bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
"#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}", expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present
).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_upload_bucket}/#{external_upload_stub.key}" }) expect(bucket.find_object(external_upload_stub.key)).to be_nil
expect(WebMock).to have_requested(
:delete, "#{upload_base_url}/#{external_upload_stub.key}"
)
end end
end end
context "when the upload type is backup" do context "when the upload type is backup" do
let(:upload_base_url) { "https://#{SiteSetting.s3_backup_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" }
let(:object_size) { 200.megabytes } let(:object_size) { 200.megabytes }
let(:object_file) { file_from_fixtures("backup_since_v1.6.tar.gz", "backups") } let(:object_file) { file_from_fixtures("backup_since_v1.6.tar.gz", "backups") }
let!(:external_upload_stub) do let!(:external_upload_stub) do
@ -217,21 +216,7 @@ RSpec.describe ExternalUploadManager do
folder_prefix: RailsMultisite::ConnectionManagement.current_db folder_prefix: RailsMultisite::ConnectionManagement.current_db
) )
end end
let(:s3_bucket_name) { SiteSetting.s3_backup_bucket }
before do
stub_request(:head, "https://#{SiteSetting.s3_backup_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com/")
# stub copy and delete object for backup, which copies the original filename to the root,
# and also uses current_db in the bucket name always
stub_request(
:put,
"#{upload_base_url}/#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz"
).to_return(
status: 200,
headers: { "ETag" => etag },
body: copy_object_result
)
end
it "does not try and download the file" do it "does not try and download the file" do
FileHelper.expects(:download).never FileHelper.expects(:download).never
@ -253,32 +238,17 @@ RSpec.describe ExternalUploadManager do
end end
it "copies the stubbed upload on S3 to its new destination and deletes it" do it "copies the stubbed upload on S3 to its new destination and deletes it" do
upload = subject.transform! bucket = @fake_s3.bucket(SiteSetting.s3_backup_bucket)
expect(WebMock).to have_requested( expect(bucket.find_object(external_upload_stub.key)).to be_present
:put,
"#{upload_base_url}/#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz", subject.transform!
).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_backup_bucket}/#{external_upload_stub.key}" })
expect(WebMock).to have_requested( expect(bucket.find_object("#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz")).to be_present
:delete, "#{upload_base_url}/#{external_upload_stub.key}" expect(bucket.find_object(external_upload_stub.key)).to be_nil
)
end end
end end
end end
def stub_head_object
stub_request(
:head,
"#{upload_base_url}/#{external_upload_stub.key}"
).to_return(
status: 200,
headers: {
ETag: etag,
"Content-Length" => object_size,
"Content-Type" => "image/png",
}.merge(metadata_headers)
)
end
def stub_download_object_filehelper def stub_download_object_filehelper
signed_url = Discourse.store.signed_url_for_path(external_upload_stub.key) signed_url = Discourse.store.signed_url_for_path(external_upload_stub.key)
uri = URI.parse(signed_url) uri = URI.parse(signed_url)
@ -289,49 +259,14 @@ RSpec.describe ExternalUploadManager do
) )
end end
def copy_object_result def prepare_fake_s3
<<~XML @fake_s3 = FakeS3.create
<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n
<CopyObjectResult
xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">
<LastModified>2021-07-19T04:10:41.000Z</LastModified>
<ETag>&quot;#{etag}&quot;</ETag>
</CopyObjectResult>
XML
end
def stub_copy_object @fake_s3.bucket(s3_bucket_name).put_object(
upload_pdf = Fabricate(:upload, sha1: "testbc60eb18e8f974cbfae8bb0f069c3a311024", original_filename: "test.pdf", extension: "pdf") key: external_upload_stub.key,
upload_path = Discourse.store.get_path_for_upload(upload_pdf) size: object_size,
upload_pdf.destroy! last_modified: Time.zone.now,
metadata: external_upload_stub_metadata
stub_request(
:put,
"#{upload_base_url}/#{upload_path}"
).to_return(
status: 200,
headers: { "ETag" => etag },
body: copy_object_result
)
upload_png = Fabricate(:upload, sha1: "bc975735dfc6409c1c2aa5ebf2239949bcbdbd65", original_filename: "test.png", extension: "png")
upload_path = Discourse.store.get_path_for_upload(upload_png)
upload_png.destroy!
stub_request(
:put,
"#{upload_base_url}/#{upload_path}"
).to_return(
status: 200,
headers: { "ETag" => etag },
body: copy_object_result
)
end
def stub_delete_object
stub_request(
:delete, "#{upload_base_url}/#{external_upload_stub.key}"
).to_return(
status: 200
) )
end end
end end

157
spec/support/fake_s3.rb Normal file
View file

@ -0,0 +1,157 @@
# frozen_string_literal: true
class FakeS3
attr_reader :s3_client
def self.create
s3 = self.new
s3.stub_bucket(SiteSetting.s3_upload_bucket) if SiteSetting.s3_upload_bucket.present?
s3.stub_bucket(File.join(SiteSetting.s3_backup_bucket, RailsMultisite::ConnectionManagement.current_db)) if SiteSetting.s3_backup_bucket.present?
s3.stub_s3_helper
s3
end
def initialize
@buckets = {}
@operations = []
@s3_client = Aws::S3::Client.new(stub_responses: true, region: SiteSetting.s3_region)
stub_methods
end
def bucket(bucket_name)
bucket_name, _prefix = bucket_name.split("/", 2)
@buckets[bucket_name]
end
def stub_bucket(full_bucket_name)
bucket_name, _prefix = full_bucket_name.split("/", 2)
s3_helper = S3Helper.new(
full_bucket_name,
Rails.configuration.multisite ? FileStore::S3Store.new.multisite_tombstone_prefix : FileStore::S3Store::TOMBSTONE_PREFIX,
client: @s3_client
)
@buckets[bucket_name] = FakeS3Bucket.new(full_bucket_name, s3_helper)
end
def stub_s3_helper
@buckets.each do |bucket_name, bucket|
S3Helper.stubs(:new)
.with { |b| b == bucket_name || b == bucket.name }
.returns(bucket.s3_helper)
end
end
def operation_called?(name)
@operations.any? do |operation|
operation[:name] == name && (block_given? ? yield(operation) : true)
end
end
private
def find_bucket(params)
bucket(params[:bucket])
end
def find_object(params)
bucket = find_bucket(params)
bucket&.find_object(params[:key])
end
def log_operation(context)
@operations << {
name: context.operation_name,
params: context.params.dup
}
end
def calculate_etag(context)
# simple, reproducible ETag calculation
Digest::MD5.hexdigest(context.params.to_json)
end
def stub_methods
@s3_client.stub_responses(:head_object, -> (context) do
log_operation(context)
if object = find_object(context.params)
{ content_length: object[:size], last_modified: object[:last_modified], metadata: object[:metadata] }
else
{ status_code: 404, headers: {}, body: "" }
end
end)
@s3_client.stub_responses(:get_object, -> (context) do
log_operation(context)
if object = find_object(context.params)
{ content_length: object[:size], body: "" }
else
{ status_code: 404, headers: {}, body: "" }
end
end)
@s3_client.stub_responses(:delete_object, -> (context) do
log_operation(context)
find_bucket(context.params)&.delete_object(context.params[:key])
nil
end)
@s3_client.stub_responses(:copy_object, -> (context) do
log_operation(context)
source_bucket_name, source_key = context.params[:copy_source].split("/", 2)
copy_source = { bucket: source_bucket_name, key: source_key }
if context.params[:metadata_directive] == "REPLACE"
attribute_overrides = context.params.except(:copy_source, :metadata_directive)
else
attribute_overrides = context.params.slice(:key, :bucket)
end
new_object = find_object(copy_source).dup.merge(attribute_overrides)
find_bucket(new_object).put_object(new_object)
{ copy_object_result: { etag: calculate_etag(context) } }
end)
@s3_client.stub_responses(:create_multipart_upload, -> (context) do
log_operation(context)
find_bucket(context.params).put_object(context.params)
{ upload_id: SecureRandom.hex }
end)
@s3_client.stub_responses(:put_object, -> (context) do
log_operation(context)
find_bucket(context.params).put_object(context.params)
{ etag: calculate_etag(context) }
end)
end
end
class FakeS3Bucket
attr_reader :name, :s3_helper
def initialize(bucket_name, s3_helper)
@name = bucket_name
@s3_helper = s3_helper
@objects = {}
end
def put_object(obj)
@objects[obj[:key]] = obj
end
def delete_object(key)
@objects.delete(key)
end
def find_object(key)
@objects[key]
end
end