2019-02-15 15:39:39 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
require 'spec_helper'
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
RSpec.describe Uploads::Fog do
|
2023-05-08 21:46:49 +05:30
|
|
|
let(:credentials) do
|
|
|
|
{
|
|
|
|
provider: "AWS",
|
|
|
|
aws_access_key_id: "AWS_ACCESS_KEY_ID",
|
|
|
|
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
|
|
|
|
region: "eu-central-1"
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:bucket_prefix) { nil }
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:data_store) { described_class.new }
|
2023-05-08 21:46:49 +05:30
|
|
|
let(:config) { { connection: credentials, bucket_prefix: bucket_prefix, remote_directory: 'uploads' } }
|
2019-02-15 15:39:39 +05:30
|
|
|
|
|
|
|
before do
|
2023-05-08 21:46:49 +05:30
|
|
|
stub_uploads_object_storage(FileUploader, config: config)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#available?' do
|
|
|
|
subject { data_store.available? }
|
|
|
|
|
|
|
|
context 'when object storage is enabled' do
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when object storage is disabled' do
|
|
|
|
before do
|
2023-05-08 21:46:49 +05:30
|
|
|
stub_uploads_object_storage(FileUploader, config: config, enabled: false)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'model with uploads' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let(:relation) { project.uploads }
|
2023-05-08 21:46:49 +05:30
|
|
|
let(:connection) { ::Fog::Storage.new(credentials) }
|
|
|
|
let(:paths) { relation.pluck(:path) }
|
|
|
|
|
|
|
|
# Only fog-aws simulates mocking of deleting an object properly.
|
|
|
|
# We'll just test that the various providers implement the require methods.
|
|
|
|
describe 'Fog provider acceptance tests' do
|
|
|
|
let!(:uploads) { create_list(:upload, 2, :with_file, :issuable_upload, model: project) }
|
|
|
|
|
|
|
|
shared_examples 'Fog provider' do
|
|
|
|
describe '#get_object' do
|
|
|
|
it 'returns a Hash with a body' do
|
|
|
|
expect(connection.get_object('uploads', paths.first)[:body]).not_to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#delete_object' do
|
|
|
|
it 'returns true' do
|
|
|
|
expect(connection.delete_object('uploads', paths.first)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
uploads.each { |upload| upload.retrieve_uploader.migrate!(2) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with AWS provider' do
|
|
|
|
it_behaves_like 'Fog provider'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with Google provider' do
|
|
|
|
let(:credentials) do
|
|
|
|
{
|
|
|
|
provider: "Google",
|
|
|
|
google_storage_access_key_id: 'ACCESS_KEY_ID',
|
|
|
|
google_storage_secret_access_key: 'SECRET_ACCESS_KEY'
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'Fog provider'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with AzureRM provider' do
|
|
|
|
let(:credentials) do
|
|
|
|
{
|
|
|
|
provider: 'AzureRM',
|
|
|
|
azure_storage_account_name: 'test-access-id',
|
|
|
|
azure_storage_access_key: 'secret'
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'Fog provider'
|
|
|
|
end
|
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
|
|
|
|
describe '#keys' do
|
|
|
|
let!(:uploads) { create_list(:upload, 2, :object_storage, uploader: FileUploader, model: project) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { data_store.keys(relation) }
|
|
|
|
|
|
|
|
it 'returns keys' do
|
|
|
|
is_expected.to match_array(relation.pluck(:path))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#delete_keys' do
|
2023-05-08 21:46:49 +05:30
|
|
|
let(:connection) { ::Fog::Storage.new(credentials) }
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:keys) { data_store.keys(relation) }
|
2021-12-11 22:18:48 +05:30
|
|
|
let(:paths) { relation.pluck(:path) }
|
2019-02-15 15:39:39 +05:30
|
|
|
let!(:uploads) { create_list(:upload, 2, :with_file, :issuable_upload, model: project) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { data_store.delete_keys(keys) }
|
|
|
|
|
|
|
|
before do
|
2019-12-21 20:55:43 +05:30
|
|
|
uploads.each { |upload| upload.retrieve_uploader.migrate!(2) }
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'deletes multiple data' do
|
2021-12-11 22:18:48 +05:30
|
|
|
paths.each do |path|
|
|
|
|
expect(connection.get_object('uploads', path)[:body]).not_to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
subject
|
|
|
|
|
|
|
|
paths.each do |path|
|
|
|
|
expect { connection.get_object('uploads', path)[:body] }.to raise_error(Excon::Error::NotFound)
|
|
|
|
end
|
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
|
2023-05-08 21:46:49 +05:30
|
|
|
context 'with bucket prefix' do
|
|
|
|
let(:bucket_prefix) { 'test-prefix' }
|
|
|
|
|
|
|
|
it 'deletes multiple data' do
|
|
|
|
paths.each do |path|
|
|
|
|
expect(connection.get_object('uploads', File.join(bucket_prefix, path))[:body]).not_to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
subject
|
|
|
|
|
|
|
|
paths.each do |path|
|
|
|
|
expect { connection.get_object('uploads', File.join(bucket_prefix, path))[:body] }.to raise_error(Excon::Error::NotFound)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
context 'when one of keys is missing' do
|
|
|
|
let(:keys) { ['unknown'] + super() }
|
|
|
|
|
|
|
|
it 'deletes only existing keys' do
|
2019-02-15 15:39:39 +05:30
|
|
|
paths.each do |path|
|
|
|
|
expect(connection.get_object('uploads', path)[:body]).not_to be_nil
|
|
|
|
end
|
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
expect_next_instance_of(::Fog::Storage) do |storage|
|
|
|
|
allow(storage).to receive(:delete_object).and_call_original
|
|
|
|
expect(storage).to receive(:delete_object).with('uploads', keys.first).and_raise(::Google::Apis::ClientError, 'NotFound')
|
|
|
|
end
|
|
|
|
|
|
|
|
subject
|
2019-02-15 15:39:39 +05:30
|
|
|
|
|
|
|
paths.each do |path|
|
|
|
|
expect { connection.get_object('uploads', path)[:body] }.to raise_error(Excon::Error::NotFound)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|