debian-mirror-gitlab/spec/requests/api/project_import_spec.rb

539 lines
16 KiB
Ruby
Raw Normal View History

2019-12-26 22:10:19 +05:30
# frozen_string_literal: true
2018-03-27 19:54:05 +05:30
require 'spec_helper'
2022-05-07 20:08:51 +05:30
RSpec.describe API::ProjectImport, :aggregate_failures do
2020-04-08 14:13:33 +05:30
include WorkhorseHelpers
2021-06-08 01:23:25 +05:30
include AfterNextHelpers
2020-04-08 14:13:33 +05:30
2021-03-11 19:13:27 +05:30
include_context 'workhorse headers'
2018-03-27 19:54:05 +05:30
let(:user) { create(:user) }
2018-11-08 19:23:39 +05:30
let(:file) { File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') }
2018-03-27 19:54:05 +05:30
let(:namespace) { create(:group) }
2020-01-01 13:55:28 +05:30
2020-04-08 14:13:33 +05:30
before do
2018-03-27 19:54:05 +05:30
namespace.add_owner(user)
end
2021-10-29 20:43:33 +05:30
shared_examples 'requires authentication' do
let(:user) { nil }
it 'returns 401' do
subject
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
2018-03-27 19:54:05 +05:30
describe 'POST /projects/import' do
2020-04-08 14:13:33 +05:30
subject { upload_archive(file_upload, workhorse_headers, params) }
let(:file_upload) { fixture_file_upload(file) }
let(:params) do
{
path: 'test-import',
'file.size' => file_upload.size
}
end
before do
allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/')
end
2021-10-29 20:43:33 +05:30
it_behaves_like 'requires authentication'
2021-06-08 01:23:25 +05:30
it 'executes a limited number of queries' do
control_count = ActiveRecord::QueryRecorder.new { subject }.count
2022-05-07 20:08:51 +05:30
expect(control_count).to be <= 105
2021-06-08 01:23:25 +05:30
end
2018-03-27 19:54:05 +05:30
it 'schedules an import using a namespace' do
stub_import(namespace)
2020-04-08 14:13:33 +05:30
params[:namespace] = namespace.id
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
subject
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-03-27 19:54:05 +05:30
end
it 'schedules an import using the namespace path' do
stub_import(namespace)
2020-04-08 14:13:33 +05:30
params[:namespace] = namespace.full_path
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
subject
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-03-27 19:54:05 +05:30
end
2019-12-21 20:55:43 +05:30
context 'when a name is explicitly set' do
let(:expected_name) { 'test project import' }
it 'schedules an import using a namespace and a different name' do
stub_import(namespace)
2020-04-08 14:13:33 +05:30
params[:name] = expected_name
params[:namespace] = namespace.id
2019-12-21 20:55:43 +05:30
2020-04-08 14:13:33 +05:30
subject
2019-12-21 20:55:43 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-12-21 20:55:43 +05:30
end
it 'schedules an import using the namespace path and a different name' do
stub_import(namespace)
2020-04-08 14:13:33 +05:30
params[:name] = expected_name
params[:namespace] = namespace.full_path
2019-12-21 20:55:43 +05:30
2020-04-08 14:13:33 +05:30
subject
2019-12-21 20:55:43 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-12-21 20:55:43 +05:30
end
it 'sets name correctly' do
stub_import(namespace)
2020-04-08 14:13:33 +05:30
params[:name] = expected_name
params[:namespace] = namespace.full_path
2019-12-21 20:55:43 +05:30
2020-04-08 14:13:33 +05:30
subject
2019-12-21 20:55:43 +05:30
project = Project.find(json_response['id'])
expect(project.name).to eq(expected_name)
end
it 'sets name correctly with an overwrite' do
stub_import(namespace)
2020-04-08 14:13:33 +05:30
params[:name] = 'new project name'
params[:namespace] = namespace.full_path
params[:overwrite] = true
2019-12-21 20:55:43 +05:30
2020-04-08 14:13:33 +05:30
subject
2019-12-21 20:55:43 +05:30
project = Project.find(json_response['id'])
expect(project.name).to eq('new project name')
end
it 'schedules an import using the path and name explicitly set to nil' do
stub_import(namespace)
2020-04-08 14:13:33 +05:30
params[:name] = nil
params[:namespace] = namespace.full_path
2019-12-21 20:55:43 +05:30
2020-04-08 14:13:33 +05:30
subject
2019-12-21 20:55:43 +05:30
project = Project.find(json_response['id'])
expect(project.name).to eq('test-import')
end
end
2018-03-27 19:54:05 +05:30
it 'schedules an import at the user namespace level' do
stub_import(user.namespace)
2020-04-08 14:13:33 +05:30
params[:path] = 'test-import2'
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
subject
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-03-27 19:54:05 +05:30
end
2018-11-20 20:47:30 +05:30
it 'does not schedule an import for a namespace that does not exist' do
2019-02-15 15:39:39 +05:30
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
2018-03-27 19:54:05 +05:30
expect(::Projects::CreateService).not_to receive(:new)
2020-04-08 14:13:33 +05:30
params[:namespace] = 'nonexistent'
params[:path] = 'test-import2'
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
subject
expect(response).to have_gitlab_http_status(:not_found)
2018-03-27 19:54:05 +05:30
expect(json_response['message']).to eq('404 Namespace Not Found')
end
it 'does not schedule an import if the user has no permission to the namespace' do
2019-02-15 15:39:39 +05:30
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
new_namespace = create(:group)
params[:path] = 'test-import3'
params[:namespace] = new_namespace.full_path
subject
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2018-03-27 19:54:05 +05:30
expect(json_response['message']).to eq('404 Namespace Not Found')
end
2020-04-08 14:13:33 +05:30
context 'if user uploads no valid file' do
let(:file) { 'README.md' }
it 'does not schedule an import if the user uploads no valid file' do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
params[:path] = 'test-import3'
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
subject
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']['error']).to eq('You need to upload a GitLab project export archive (ending in .gz).')
end
2018-03-27 19:54:05 +05:30
end
2018-05-09 12:01:36 +05:30
it 'stores params that can be overridden' do
stub_import(namespace)
override_params = { 'description' => 'Hello world' }
2020-04-08 14:13:33 +05:30
params[:namespace] = namespace.id
params[:override_params] = override_params
subject
2018-05-09 12:01:36 +05:30
import_project = Project.find(json_response['id'])
expect(import_project.import_data.data['override_params']).to eq(override_params)
end
it 'does not store params that are not allowed' do
stub_import(namespace)
override_params = { 'not_allowed' => 'Hello world' }
2020-04-08 14:13:33 +05:30
params[:namespace] = namespace.id
params[:override_params] = override_params
2018-05-09 12:01:36 +05:30
2020-04-08 14:13:33 +05:30
subject
2018-05-09 12:01:36 +05:30
import_project = Project.find(json_response['id'])
2020-04-08 14:13:33 +05:30
expect(import_project.import_data.data['override_params']).to be_empty
2018-05-09 12:01:36 +05:30
end
context 'when target path already exists in namespace' do
let(:existing_project) { create(:project, namespace: user.namespace) }
it 'does not schedule an import' do
2019-02-15 15:39:39 +05:30
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
2018-05-09 12:01:36 +05:30
2020-04-08 14:13:33 +05:30
params[:path] = existing_project.path
2018-05-09 12:01:36 +05:30
2020-04-08 14:13:33 +05:30
subject
expect(response).to have_gitlab_http_status(:bad_request)
2018-05-09 12:01:36 +05:30
expect(json_response['message']).to eq('Name has already been taken')
end
context 'when param overwrite is true' do
it 'schedules an import' do
stub_import(user.namespace)
2020-04-08 14:13:33 +05:30
params[:path] = existing_project.path
params[:overwrite] = true
subject
2018-05-09 12:01:36 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-05-09 12:01:36 +05:30
end
end
end
2020-03-13 15:44:24 +05:30
context 'when request exceeds the rate limit' do
before do
allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
end
it 'prevents users from importing projects' do
2020-04-08 14:13:33 +05:30
params[:namespace] = namespace.id
2020-03-13 15:44:24 +05:30
2020-04-08 14:13:33 +05:30
subject
expect(response).to have_gitlab_http_status(:too_many_requests)
2020-03-13 15:44:24 +05:30
expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.')
end
end
2020-04-08 14:13:33 +05:30
context 'when using remote storage' do
let(:file_name) { 'project_export.tar.gz' }
let!(:fog_connection) do
stub_uploads_object_storage(ImportExportUploader, direct_upload: true)
end
2021-04-29 21:17:54 +05:30
# rubocop:disable Rails/SaveBang
2020-04-08 14:13:33 +05:30
let(:tmp_object) do
fog_connection.directories.new(key: 'uploads').files.create(
key: "tmp/uploads/#{file_name}",
body: fixture_file_upload(file)
)
end
2021-04-29 21:17:54 +05:30
# rubocop:enable Rails/SaveBang
2020-04-08 14:13:33 +05:30
let(:file_upload) { fog_to_uploaded_file(tmp_object) }
it 'schedules an import' do
stub_import(namespace)
params[:namespace] = namespace.id
subject
expect(response).to have_gitlab_http_status(:created)
end
end
def upload_archive(file, headers = {}, params = {})
workhorse_finalize(
api("/projects/import", user),
method: :post,
file_key: :file,
params: params.merge(file: file),
headers: headers,
send_rewritten_field: true
)
end
2018-03-27 19:54:05 +05:30
def stub_import(namespace)
2019-02-15 15:39:39 +05:30
expect_any_instance_of(ProjectImportState).to receive(:schedule)
2018-03-27 19:54:05 +05:30
expect(::Projects::CreateService).to receive(:new).with(user, hash_including(namespace_id: namespace.id)).and_call_original
end
end
2021-06-08 01:23:25 +05:30
describe 'POST /projects/remote-import' do
2021-10-29 20:43:33 +05:30
subject do
post api('/projects/remote-import', user), params: params
end
2021-06-08 01:23:25 +05:30
let(:params) do
{
path: 'test-import',
url: 'http://some.s3.url/file'
}
end
2021-10-29 20:43:33 +05:30
it_behaves_like 'requires authentication'
2021-06-08 01:23:25 +05:30
it 'returns NOT FOUND when the feature is disabled' do
stub_feature_flags(import_project_from_remote_file: false)
2021-10-29 20:43:33 +05:30
subject
2021-06-08 01:23:25 +05:30
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when the feature flag is enabled' do
before do
stub_feature_flags(import_project_from_remote_file: true)
end
context 'when the response is successful' do
it 'schedules the import successfully' do
project = create(
:project,
namespace: user.namespace,
name: 'test-import',
path: 'test-import'
)
service_response = ServiceResponse.success(payload: project)
2022-05-07 20:08:51 +05:30
expect_next(::Import::GitlabProjects::CreateProjectService)
2021-06-08 01:23:25 +05:30
.to receive(:execute)
.and_return(service_response)
2021-10-29 20:43:33 +05:30
subject
2021-06-08 01:23:25 +05:30
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include({
'id' => project.id,
'name' => 'test-import',
'name_with_namespace' => "#{user.namespace.name} / test-import",
'path' => 'test-import',
'path_with_namespace' => "#{user.namespace.path}/test-import"
})
end
end
context 'when the service returns an error' do
it 'fails to schedule the import' do
service_response = ServiceResponse.error(
message: 'Failed to import',
http_status: :bad_request
)
2022-05-07 20:08:51 +05:30
expect_next(::Import::GitlabProjects::CreateProjectService)
.to receive(:execute)
.and_return(service_response)
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({
'message' => 'Failed to import'
})
end
end
end
end
describe 'POST /projects/remote-import-s3' do
subject do
post api('/projects/remote-import-s3', user), params: params
end
let(:params) do
{
path: 'test-import',
region: 'region_name',
bucket_name: 'bucket_name',
file_key: 'file_key',
access_key_id: 'access_key_id',
secret_access_key: 'secret_access_key'
}
end
it_behaves_like 'requires authentication'
it 'returns NOT FOUND when the feature is disabled' do
stub_feature_flags(import_project_from_remote_file_s3: false)
subject
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when the feature flag is enabled' do
before do
stub_feature_flags(import_project_from_remote_file_s3: true)
end
context 'when the response is successful' do
it 'schedules the import successfully' do
project = create(
:project,
namespace: user.namespace,
name: 'test-import',
path: 'test-import'
)
service_response = ServiceResponse.success(payload: project)
expect_next(::Import::GitlabProjects::CreateProjectService)
.to receive(:execute)
.and_return(service_response)
subject
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include({
'id' => project.id,
'name' => 'test-import',
'name_with_namespace' => "#{user.namespace.name} / test-import",
'path' => 'test-import',
'path_with_namespace' => "#{user.namespace.path}/test-import"
})
end
end
context 'when the service returns an error' do
it 'fails to schedule the import' do
service_response = ServiceResponse.error(
message: 'Failed to import',
http_status: :bad_request
)
expect_next(::Import::GitlabProjects::CreateProjectService)
2021-06-08 01:23:25 +05:30
.to receive(:execute)
.and_return(service_response)
2021-10-29 20:43:33 +05:30
subject
2021-06-08 01:23:25 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({
'message' => 'Failed to import'
})
end
end
end
end
2018-03-27 19:54:05 +05:30
describe 'GET /projects/:id/import' do
2021-10-29 20:43:33 +05:30
it 'public project accessible for an unauthenticated user' do
project = create(:project, :public)
get api("/projects/#{project.id}/import", nil)
expect(response).to have_gitlab_http_status(:ok)
end
2018-03-27 19:54:05 +05:30
it 'returns the import status' do
2018-10-15 14:42:47 +05:30
project = create(:project, :import_started)
2018-11-18 11:00:15 +05:30
project.add_maintainer(user)
2018-03-27 19:54:05 +05:30
get api("/projects/#{project.id}/import", user)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2018-03-27 19:54:05 +05:30
expect(json_response).to include('import_status' => 'started')
end
it 'returns the import status and the error if failed' do
2018-10-15 14:42:47 +05:30
project = create(:project, :import_failed)
2018-11-18 11:00:15 +05:30
project.add_maintainer(user)
2021-04-29 21:17:54 +05:30
project.import_state.update!(last_error: 'error')
2018-03-27 19:54:05 +05:30
get api("/projects/#{project.id}/import", user)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2018-03-27 19:54:05 +05:30
expect(json_response).to include('import_status' => 'failed',
'import_error' => 'error')
end
end
2020-04-08 14:13:33 +05:30
describe 'POST /projects/import/authorize' do
subject { post api('/projects/import/authorize', user), headers: workhorse_headers }
2021-10-29 20:43:33 +05:30
it_behaves_like 'requires authentication'
2020-04-08 14:13:33 +05:30
it 'authorizes importing project with workhorse header' do
subject
expect(response).to have_gitlab_http_status(:ok)
2021-02-22 17:27:13 +05:30
expect(response.media_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
2020-04-08 14:13:33 +05:30
expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
end
it 'rejects requests that bypassed gitlab-workhorse' do
workhorse_headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
subject
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: true)
end
it 'responds with status 200, location of file remote store and object details' do
subject
expect(response).to have_gitlab_http_status(:ok)
2021-02-22 17:27:13 +05:30
expect(response.media_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
2020-04-08 14:13:33 +05:30
expect(json_response).not_to have_key('TempPath')
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
expect(json_response['RemoteObject']).to have_key('MultipartUpload')
end
end
context 'when direct upload is disabled' do
before do
stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: false)
end
it 'handles as a local file' do
subject
expect(response).to have_gitlab_http_status(:ok)
2021-02-22 17:27:13 +05:30
expect(response.media_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
2020-04-08 14:13:33 +05:30
expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
end
end
end
2018-03-27 19:54:05 +05:30
end