debian-mirror-gitlab/spec/controllers/projects/merge_requests_controller_spec.rb

2242 lines
68 KiB
Ruby
Raw Normal View History

2019-07-31 22:56:46 +05:30
# frozen_string_literal: true
2015-09-11 14:41:01 +05:30
require 'spec_helper'
2020-06-23 00:09:42 +05:30
RSpec.describe Projects::MergeRequestsController do
2018-03-17 18:26:18 +05:30
include ProjectForksHelper
2019-12-26 22:10:19 +05:30
include Gitlab::Routing
2018-03-17 18:26:18 +05:30
2021-01-29 00:20:46 +05:30
let_it_be_with_refind(:project) { create(:project, :repository) }
let_it_be_with_reload(:project_public_with_private_builds) { create(:project, :repository, :public, :builds_private) }
2021-04-29 21:17:54 +05:30
2021-01-29 00:20:46 +05:30
let(:user) { project.owner }
2015-09-11 14:41:01 +05:30
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
before do
sign_in(user)
2017-08-17 22:00:37 +05:30
end
describe 'GET commit_change_content' do
it 'renders commit_change_content template' do
get :commit_change_content,
2019-02-15 15:39:39 +05:30
params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
2017-08-17 22:00:37 +05:30
format: 'html'
expect(response).to render_template('_commit_change_content')
end
2015-12-23 02:04:40 +05:30
end
2016-08-24 12:49:21 +05:30
describe "GET show" do
2017-08-17 22:00:37 +05:30
def go(extra_params = {})
params = {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
}
2019-02-15 15:39:39 +05:30
get :show, params: params.merge(extra_params)
2017-08-17 22:00:37 +05:30
end
2021-04-29 21:17:54 +05:30
context 'with the invite_members_in_comment experiment', :experiment do
context 'when user can invite' do
before do
stub_experiments(invite_members_in_comment: :invite_member_link)
project.add_maintainer(user)
end
it 'assigns the candidate experience and tracks the event' do
expect(experiment(:invite_members_in_comment)).to track(:view, property: project.root_ancestor.id.to_s)
.for(:invite_member_link)
.with_context(namespace: project.root_ancestor)
.on_next_instance
go
end
end
context 'when user can not invite' do
it 'does not track the event' do
expect(experiment(:invite_members_in_comment)).not_to track(:view)
go
end
end
end
2020-11-24 15:15:51 +05:30
context 'with view param' do
before do
go(view: 'parallel')
end
it 'saves the preferred diff view in a cookie' do
expect(response.cookies['diff_view']).to eq('parallel')
end
end
2020-03-13 15:44:24 +05:30
context 'when merge request is unchecked' do
before do
merge_request.mark_as_unchecked!
end
2021-04-29 21:17:54 +05:30
context 'check_mergeability_async_in_widget feature flag is disabled' do
before do
stub_feature_flags(check_mergeability_async_in_widget: false)
2020-03-13 15:44:24 +05:30
end
2021-04-29 21:17:54 +05:30
it 'checks mergeability asynchronously' do
expect_next_instance_of(MergeRequests::MergeabilityCheckService) do |service|
expect(service).not_to receive(:execute)
expect(service).to receive(:async_execute)
end
go
end
2020-03-13 15:44:24 +05:30
end
end
2017-08-17 22:00:37 +05:30
describe 'as html' do
2018-11-08 19:23:39 +05:30
context 'when diff files were cleaned' do
render_views
it 'renders page when diff size is not persisted and diff_refs does not exist' do
diff = merge_request.merge_request_diff
diff.clean!
diff.update!(real_size: nil,
start_commit_sha: nil,
base_commit_sha: nil)
go(format: :html)
2019-12-04 20:38:33 +05:30
expect(response).to be_successful
2018-11-08 19:23:39 +05:30
end
end
2020-11-24 15:15:51 +05:30
context 'with `default_merge_ref_for_diffs` feature flag enabled' do
before do
stub_feature_flags(default_merge_ref_for_diffs: true)
go
end
it 'adds the diff_head parameter' do
expect(assigns["endpoint_metadata_url"]).to eq(
diffs_metadata_project_json_merge_request_path(
project,
merge_request,
'json',
2021-01-29 00:20:46 +05:30
diff_head: true,
view: 'inline'))
2020-11-24 15:15:51 +05:30
end
end
2020-04-08 14:13:33 +05:30
context 'when diff is missing' do
render_views
it 'renders merge request page' do
2021-01-29 00:20:46 +05:30
merge_request.merge_request_diff.destroy!
2020-04-08 14:13:33 +05:30
go(format: :html)
expect(response).to be_successful
end
end
2017-08-17 22:00:37 +05:30
it "renders merge request page" do
2019-07-07 11:18:12 +05:30
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
2017-08-17 22:00:37 +05:30
go(format: :html)
2015-09-11 14:41:01 +05:30
2019-12-04 20:38:33 +05:30
expect(response).to be_successful
2015-09-11 14:41:01 +05:30
end
2018-03-17 18:26:18 +05:30
2020-11-24 15:15:51 +05:30
it 'logs the view with Gitlab::Search::RecentMergeRequests' do
recent_merge_requests_double = instance_double(::Gitlab::Search::RecentMergeRequests, log_view: nil)
expect(::Gitlab::Search::RecentMergeRequests).to receive(:new).with(user: user).and_return(recent_merge_requests_double)
go(format: :html)
expect(response).to be_successful
expect(recent_merge_requests_double).to have_received(:log_view).with(merge_request)
end
2018-11-08 19:23:39 +05:30
context "that is invalid" do
let(:merge_request) { create(:invalid_merge_request, target_project: project, source_project: project) }
it "renders merge request page" do
go(format: :html)
2019-12-04 20:38:33 +05:30
expect(response).to be_successful
end
end
context 'when project has moved' do
let(:new_project) { create(:project) }
before do
2021-01-29 00:20:46 +05:30
project.route.destroy!
2019-12-04 20:38:33 +05:30
new_project.redirect_routes.create!(path: project.full_path)
new_project.add_developer(user)
end
it 'redirects from an old merge request correctly' do
get :show,
params: {
namespace_id: project.namespace,
project_id: project,
id: merge_request
}
expect(response).to redirect_to(project_merge_request_path(new_project, merge_request))
2021-01-29 00:20:46 +05:30
expect(response).to have_gitlab_http_status(:moved_permanently)
2019-12-04 20:38:33 +05:30
end
it 'redirects from an old merge request commits correctly' do
get :commits,
params: {
namespace_id: project.namespace,
project_id: project,
id: merge_request
}
expect(response).to redirect_to(commits_project_merge_request_path(new_project, merge_request))
2021-01-29 00:20:46 +05:30
expect(response).to have_gitlab_http_status(:moved_permanently)
2018-11-08 19:23:39 +05:30
end
end
2017-08-17 22:00:37 +05:30
end
2015-09-11 14:41:01 +05:30
2018-12-13 13:39:08 +05:30
context 'when user is setting notes filters' do
let(:issuable) { merge_request }
2019-03-02 22:35:43 +05:30
let(:issuable_parent) { project }
2018-12-13 13:39:08 +05:30
let!(:discussion_note) { create(:discussion_note_on_merge_request, :system, noteable: issuable, project: project) }
let!(:discussion_comment) { create(:discussion_note_on_merge_request, noteable: issuable, project: project) }
it_behaves_like 'issuable notes filter'
end
2017-08-17 22:00:37 +05:30
describe 'as json' do
2019-07-07 11:18:12 +05:30
before do
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
end
2018-03-17 18:26:18 +05:30
context 'with basic serializer param' do
2017-08-17 22:00:37 +05:30
it 'renders basic MR entity as json' do
2018-03-17 18:26:18 +05:30
go(serializer: 'basic', format: :json)
2015-09-11 14:41:01 +05:30
2017-08-17 22:00:37 +05:30
expect(response).to match_response_schema('entities/merge_request_basic')
end
2015-09-11 14:41:01 +05:30
end
2018-03-17 18:26:18 +05:30
context 'with widget serializer param' do
it 'renders widget MR entity as json' do
go(serializer: 'widget', format: :json)
2015-09-11 14:41:01 +05:30
2018-03-17 18:26:18 +05:30
expect(response).to match_response_schema('entities/merge_request_widget')
2017-08-17 22:00:37 +05:30
end
2015-09-11 14:41:01 +05:30
end
2018-03-17 18:26:18 +05:30
context 'when no serialiser was passed' do
it 'renders widget MR entity as json' do
go(serializer: nil, format: :json)
2015-09-11 14:41:01 +05:30
2018-03-17 18:26:18 +05:30
expect(response).to match_response_schema('entities/merge_request_widget')
2017-08-17 22:00:37 +05:30
end
2015-09-11 14:41:01 +05:30
end
2018-11-08 19:23:39 +05:30
context "that is invalid" do
let(:merge_request) { create(:invalid_merge_request, target_project: project, source_project: project) }
it "renders merge request page" do
go(format: :json)
2019-12-04 20:38:33 +05:30
expect(response).to be_successful
2018-11-08 19:23:39 +05:30
end
end
2015-09-11 14:41:01 +05:30
end
describe "as diff" do
it "triggers workhorse to serve the request" do
2017-08-17 22:00:37 +05:30
go(format: :diff)
2015-09-11 14:41:01 +05:30
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-diff:")
2015-09-11 14:41:01 +05:30
end
end
describe "as patch" do
2016-08-24 12:49:21 +05:30
it 'triggers workhorse to serve the request' do
2017-08-17 22:00:37 +05:30
go(format: :patch)
2015-09-11 14:41:01 +05:30
2016-08-24 12:49:21 +05:30
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-format-patch:")
2015-09-11 14:41:01 +05:30
end
end
end
2016-08-24 12:49:21 +05:30
describe 'GET index' do
2017-09-10 17:25:29 +05:30
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
2017-08-17 22:00:37 +05:30
def get_merge_requests(page = nil)
2016-04-02 18:10:28 +05:30
get :index,
2019-02-15 15:39:39 +05:30
params: {
namespace_id: project.namespace.to_param,
project_id: project,
state: 'opened',
page: page.to_param
}
2017-08-17 22:00:37 +05:30
end
it_behaves_like "issuables list meta-data", :merge_request
2019-03-02 22:35:43 +05:30
it_behaves_like 'set sort order from user preference' do
let(:sorting_param) { 'updated_asc' }
end
2019-02-15 15:39:39 +05:30
2017-08-17 22:00:37 +05:30
context 'when page param' do
2019-03-02 22:35:43 +05:30
let(:last_page) { project.merge_requests.page.total_pages }
2017-08-17 22:00:37 +05:30
let!(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
it 'redirects to last_page if page number is larger than number of pages' do
get_merge_requests(last_page + 1)
2019-12-26 22:10:19 +05:30
expect(response).to redirect_to(project_merge_requests_path(project, page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
2017-08-17 22:00:37 +05:30
end
it 'redirects to specified page' do
get_merge_requests(last_page)
expect(assigns(:merge_requests).current_page).to eq(last_page)
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:ok)
2017-08-17 22:00:37 +05:30
end
it 'does not redirect to external sites when provided a host field' do
external_host = "www.example.com"
get :index,
2019-02-15 15:39:39 +05:30
params: {
namespace_id: project.namespace.to_param,
project_id: project,
state: 'opened',
page: (last_page + 1).to_param,
host: external_host
}
2017-08-17 22:00:37 +05:30
2019-12-26 22:10:19 +05:30
expect(response).to redirect_to(project_merge_requests_path(project, page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
2017-08-17 22:00:37 +05:30
end
2016-04-02 18:10:28 +05:30
end
context 'when filtering by opened state' do
context 'with opened merge requests' do
2016-09-13 17:45:13 +05:30
it 'lists those merge requests' do
2017-09-10 17:25:29 +05:30
expect(merge_request).to be_persisted
2016-04-02 18:10:28 +05:30
get_merge_requests
expect(assigns(:merge_requests)).to include(merge_request)
end
end
context 'with reopened merge requests' do
before do
merge_request.close!
merge_request.reopen!
end
2016-09-13 17:45:13 +05:30
it 'lists those merge requests' do
2016-04-02 18:10:28 +05:30
get_merge_requests
expect(assigns(:merge_requests)).to include(merge_request)
end
end
end
end
2016-08-24 12:49:21 +05:30
describe 'PUT update' do
2018-03-17 18:26:18 +05:30
def update_merge_request(mr_params, additional_params = {})
params = {
namespace_id: project.namespace,
project_id: project,
id: merge_request.iid,
merge_request: mr_params
}.merge(additional_params)
2019-02-15 15:39:39 +05:30
put :update, params: params
2018-03-17 18:26:18 +05:30
end
2017-08-17 22:00:37 +05:30
context 'changing the assignee' do
it 'limits the attributes exposed on the assignee' do
assignee = create(:user)
project.add_developer(assignee)
2019-07-31 22:56:46 +05:30
update_merge_request({ assignee_ids: [assignee.id] }, format: :json)
2019-09-30 21:07:59 +05:30
expect(json_response['assignees']).to all(include(*%w(name username avatar_url id state web_url)))
2017-08-17 22:00:37 +05:30
end
end
2018-03-17 18:26:18 +05:30
context 'when user does not have access to update issue' do
before do
reporter = create(:user)
project.add_reporter(reporter)
sign_in(reporter)
end
it 'responds with 404' do
update_merge_request(title: 'New title')
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2018-03-17 18:26:18 +05:30
end
end
2016-06-02 11:05:42 +05:30
context 'there is no source project' do
2019-03-02 22:35:43 +05:30
let(:forked_project) { fork_project_with_submodules(project) }
2018-03-17 18:26:18 +05:30
let!(:merge_request) { create(:merge_request, source_project: forked_project, source_branch: 'add-submodule-version-bump', target_branch: 'master', target_project: project) }
2016-06-02 11:05:42 +05:30
before do
2021-01-29 00:20:46 +05:30
forked_project.destroy!
2016-06-02 11:05:42 +05:30
end
it 'closes MR without errors' do
2018-03-17 18:26:18 +05:30
update_merge_request(state_event: 'close')
2016-06-02 11:05:42 +05:30
2020-10-24 23:57:45 +05:30
expect(response).to redirect_to([merge_request.target_project, merge_request])
2016-06-02 11:05:42 +05:30
expect(merge_request.reload.closed?).to be_truthy
end
2016-09-29 09:46:39 +05:30
it 'allows editing of a closed merge request' do
merge_request.close!
2018-03-17 18:26:18 +05:30
update_merge_request(title: 'New title')
2016-09-29 09:46:39 +05:30
2020-10-24 23:57:45 +05:30
expect(response).to redirect_to([merge_request.target_project, merge_request])
2016-09-29 09:46:39 +05:30
expect(merge_request.reload.title).to eq 'New title'
end
it 'does not allow to update target branch closed merge request' do
merge_request.close!
2018-03-17 18:26:18 +05:30
update_merge_request(target_branch: 'new_branch')
2016-09-29 09:46:39 +05:30
expect { merge_request.reload.target_branch }.not_to change { merge_request.target_branch }
end
2017-08-17 22:00:37 +05:30
it_behaves_like 'update invalid issuable', MergeRequest
2016-06-02 11:05:42 +05:30
end
2019-02-15 15:39:39 +05:30
context 'two merge requests with the same source branch' do
it 'does not allow a closed merge request to be reopened if another one is open' do
merge_request.close!
create(:merge_request, source_project: merge_request.source_project, source_branch: merge_request.source_branch)
update_merge_request(state_event: 'reopen')
errors = assigns[:merge_request].errors
expect(errors[:validate_branches]).to include(/Another open merge request already exists for this source branch/)
expect(merge_request.reload).to be_closed
end
end
2016-06-02 11:05:42 +05:30
end
2016-08-24 12:49:21 +05:30
describe 'POST merge' do
let(:base_params) do
{
2017-08-17 22:00:37 +05:30
namespace_id: project.namespace,
project_id: project,
id: merge_request.iid,
2018-11-08 19:23:39 +05:30
squash: false,
2017-08-17 22:00:37 +05:30
format: 'json'
}
end
2017-08-17 22:00:37 +05:30
context 'when user cannot access' do
2017-09-10 17:25:29 +05:30
let(:user) { create(:user) }
before do
2017-08-17 22:00:37 +05:30
project.add_reporter(user)
2019-02-15 15:39:39 +05:30
post :merge, params: base_params, xhr: true
end
2017-08-17 22:00:37 +05:30
it 'returns 404' do
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when the merge request is not mergeable' do
before do
2021-01-29 00:20:46 +05:30
merge_request.update!(title: "WIP: #{merge_request.title}")
2019-02-15 15:39:39 +05:30
post :merge, params: base_params
end
it 'returns :failed' do
2017-08-17 22:00:37 +05:30
expect(json_response).to eq('status' => 'failed')
end
end
context 'when the sha parameter does not match the source SHA' do
2017-09-10 17:25:29 +05:30
before do
2019-02-15 15:39:39 +05:30
post :merge, params: base_params.merge(sha: 'foo')
2017-09-10 17:25:29 +05:30
end
it 'returns :sha_mismatch' do
2017-08-17 22:00:37 +05:30
expect(json_response).to eq('status' => 'sha_mismatch')
end
end
context 'when the sha parameter matches the source SHA' do
2018-11-08 19:23:39 +05:30
def merge_with_sha(params = {})
post_params = base_params.merge(sha: merge_request.diff_head_sha).merge(params)
2019-02-15 15:39:39 +05:30
post :merge, params: post_params, as: :json
end
it 'returns :success' do
merge_with_sha
2017-08-17 22:00:37 +05:30
expect(json_response).to eq('status' => 'success')
end
2018-11-08 19:23:39 +05:30
it 'starts the merge immediately with permitted params' do
2019-12-26 22:10:19 +05:30
expect(MergeWorker).to receive(:perform_async).with(merge_request.id, anything, { 'sha' => merge_request.diff_head_sha })
merge_with_sha
end
2018-11-08 19:23:39 +05:30
context 'when squash is passed as 1' do
it 'updates the squash attribute on the MR to true' do
2021-01-29 00:20:46 +05:30
merge_request.update!(squash: false)
2018-11-08 19:23:39 +05:30
merge_with_sha(squash: '1')
2020-07-28 23:09:34 +05:30
expect(merge_request.reload.squash_on_merge?).to be_truthy
2018-11-08 19:23:39 +05:30
end
end
context 'when squash is passed as 0' do
it 'updates the squash attribute on the MR to false' do
2021-01-29 00:20:46 +05:30
merge_request.update!(squash: true)
2018-11-08 19:23:39 +05:30
merge_with_sha(squash: '0')
2020-07-28 23:09:34 +05:30
expect(merge_request.reload.squash_on_merge?).to be_falsey
2018-11-08 19:23:39 +05:30
end
end
2019-03-02 22:35:43 +05:30
context 'when a squash commit message is passed' do
let(:message) { 'My custom squash commit message' }
2019-12-26 22:10:19 +05:30
it 'passes the same message to SquashService', :sidekiq_might_not_need_inline do
params = { squash: '1',
squash_commit_message: message,
sha: merge_request.diff_head_sha }
expected_squash_params = { squash_commit_message: message,
sha: merge_request.diff_head_sha,
merge_request: merge_request }
2019-03-02 22:35:43 +05:30
2019-12-26 22:10:19 +05:30
expect_next_instance_of(MergeRequests::SquashService, project, user, expected_squash_params) do |squash_service|
2019-03-02 22:35:43 +05:30
expect(squash_service).to receive(:execute).and_return({
status: :success,
squash_sha: SecureRandom.hex(20)
})
end
merge_with_sha(params)
end
end
2019-09-04 21:01:54 +05:30
context 'when merge when pipeline succeeds option is passed' do
2018-03-17 18:26:18 +05:30
let!(:head_pipeline) do
create(:ci_empty_pipeline, project: project, sha: merge_request.diff_head_sha, ref: merge_request.source_branch, head_pipeline_of: merge_request)
end
2018-03-17 18:26:18 +05:30
def merge_when_pipeline_succeeds
2019-02-15 15:39:39 +05:30
post :merge, params: base_params.merge(sha: merge_request.diff_head_sha, merge_when_pipeline_succeeds: '1')
end
2017-08-17 22:00:37 +05:30
it 'returns :merge_when_pipeline_succeeds' do
merge_when_pipeline_succeeds
2017-08-17 22:00:37 +05:30
expect(json_response).to eq('status' => 'merge_when_pipeline_succeeds')
end
2017-08-17 22:00:37 +05:30
it 'sets the MR to merge when the pipeline succeeds' do
service = double(:merge_when_pipeline_succeeds_service)
2019-09-04 21:01:54 +05:30
allow(service).to receive(:available_for?) { true }
2019-09-04 21:01:54 +05:30
expect(AutoMerge::MergeWhenPipelineSucceedsService)
2017-08-17 22:00:37 +05:30
.to receive(:new).with(project, anything, anything)
.and_return(service)
expect(service).to receive(:execute).with(merge_request)
2017-08-17 22:00:37 +05:30
merge_when_pipeline_succeeds
end
context 'when project.only_allow_merge_if_pipeline_succeeds? is true' do
before do
project.update_column(:only_allow_merge_if_pipeline_succeeds, true)
end
2018-03-17 18:26:18 +05:30
context 'and head pipeline is not the current one' do
before do
2021-01-29 00:20:46 +05:30
head_pipeline.update!(sha: 'not_current_sha')
2018-03-17 18:26:18 +05:30
end
it 'returns :failed' do
merge_when_pipeline_succeeds
expect(json_response).to eq('status' => 'failed')
end
end
2017-08-17 22:00:37 +05:30
it 'returns :merge_when_pipeline_succeeds' do
merge_when_pipeline_succeeds
expect(json_response).to eq('status' => 'merge_when_pipeline_succeeds')
end
end
2019-09-04 21:01:54 +05:30
context 'when auto merge has not been enabled yet' do
it 'calls AutoMergeService#execute' do
expect_next_instance_of(AutoMergeService) do |service|
expect(service).to receive(:execute).with(merge_request, 'merge_when_pipeline_succeeds')
end
merge_when_pipeline_succeeds
end
end
context 'when auto merge has already been enabled' do
before do
merge_request.update!(auto_merge_enabled: true, merge_user: user)
end
it 'calls AutoMergeService#update' do
expect_next_instance_of(AutoMergeService) do |service|
expect(service).to receive(:update).with(merge_request)
end
merge_when_pipeline_succeeds
end
end
2017-08-17 22:00:37 +05:30
end
describe 'only_allow_merge_if_all_discussions_are_resolved? setting' do
let(:merge_request) { create(:merge_request_with_diff_notes, source_project: project, author: user) }
context 'when enabled' do
before do
project.update_column(:only_allow_merge_if_all_discussions_are_resolved, true)
end
context 'with unresolved discussion' do
before do
expect(merge_request).not_to be_discussions_resolved
end
it 'returns :failed' do
merge_with_sha
expect(json_response).to eq('status' => 'failed')
end
end
context 'with all discussions resolved' do
before do
merge_request.discussions.each { |d| d.resolve!(user) }
expect(merge_request).to be_discussions_resolved
end
it 'returns :success' do
merge_with_sha
expect(json_response).to eq('status' => 'success')
end
end
end
2016-08-24 12:49:21 +05:30
2017-08-17 22:00:37 +05:30
context 'when disabled' do
2016-08-24 12:49:21 +05:30
before do
2017-08-17 22:00:37 +05:30
project.update_column(:only_allow_merge_if_all_discussions_are_resolved, false)
2016-08-24 12:49:21 +05:30
end
2017-08-17 22:00:37 +05:30
context 'with unresolved discussion' do
before do
expect(merge_request).not_to be_discussions_resolved
end
2016-08-24 12:49:21 +05:30
2017-08-17 22:00:37 +05:30
it 'returns :success' do
merge_with_sha
expect(json_response).to eq('status' => 'success')
end
end
context 'with all discussions resolved' do
before do
merge_request.discussions.each { |d| d.resolve!(user) }
expect(merge_request).to be_discussions_resolved
end
it 'returns :success' do
merge_with_sha
expect(json_response).to eq('status' => 'success')
end
2016-08-24 12:49:21 +05:30
end
end
end
end
end
2016-08-24 12:49:21 +05:30
describe "DELETE destroy" do
2017-09-10 17:25:29 +05:30
let(:user) { create(:user) }
2016-06-02 11:05:42 +05:30
it "denies access to users unless they're admin or project owner" do
2019-02-15 15:39:39 +05:30
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
2016-06-02 11:05:42 +05:30
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2016-06-02 11:05:42 +05:30
end
context "when the user is owner" do
2021-01-29 00:20:46 +05:30
let_it_be(:owner) { create(:user) }
let_it_be(:namespace) { create(:namespace, owner: owner) }
let_it_be(:project) { create(:project, :repository, namespace: namespace) }
2016-06-02 11:05:42 +05:30
2017-09-10 17:25:29 +05:30
before do
sign_in owner
end
2016-06-02 11:05:42 +05:30
it "deletes the merge request" do
2019-12-04 20:38:33 +05:30
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid, destroy_confirm: true }
2016-06-02 11:05:42 +05:30
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:found)
2017-09-10 17:25:29 +05:30
expect(controller).to set_flash[:notice].to(/The merge request was successfully deleted\./)
2016-06-02 11:05:42 +05:30
end
2016-09-29 09:46:39 +05:30
2019-12-04 20:38:33 +05:30
it "prevents deletion if destroy_confirm is not set" do
2020-01-01 13:55:28 +05:30
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
2019-12-04 20:38:33 +05:30
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:found)
2019-12-04 20:38:33 +05:30
expect(controller).to set_flash[:notice].to('Destroy confirmation not provided for merge request')
end
it "prevents deletion in JSON format if destroy_confirm is not set" do
2020-01-01 13:55:28 +05:30
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
2019-12-04 20:38:33 +05:30
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid, format: 'json' }
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:unprocessable_entity)
2019-12-04 20:38:33 +05:30
expect(json_response).to eq({ 'errors' => 'Destroy confirmation not provided for merge request' })
end
2016-06-02 11:05:42 +05:30
end
end
2015-09-11 14:41:01 +05:30
describe 'GET commits' do
def go(format: 'html')
get :commits,
2019-02-15 15:39:39 +05:30
params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
2015-09-11 14:41:01 +05:30
format: format
end
2017-09-10 17:25:29 +05:30
it 'renders the commits template to a string' do
go format: 'json'
2015-09-11 14:41:01 +05:30
2017-09-10 17:25:29 +05:30
expect(response).to render_template('projects/merge_requests/_commits')
expect(json_response).to have_key('html')
2015-09-11 14:41:01 +05:30
end
end
2016-09-13 17:45:13 +05:30
2016-11-24 13:41:30 +05:30
describe 'GET pipelines' do
2017-08-17 22:00:37 +05:30
before do
create(:ci_pipeline, project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
2017-09-10 17:25:29 +05:30
get :pipelines,
2019-02-15 15:39:39 +05:30
params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
2017-09-10 17:25:29 +05:30
format: :json
2016-09-13 17:45:13 +05:30
end
2019-07-31 22:56:46 +05:30
context 'with "enabled" builds on a public project' do
let(:project) { create(:project, :repository, :public) }
context 'for a project owner' do
it 'responds with serialized pipelines' do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
end
end
context 'for an unassociated user' do
let(:user) { create :user }
it 'responds with no pipelines' do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
end
end
end
context 'with private builds on a public project' do
2021-01-29 00:20:46 +05:30
let(:project) { project_public_with_private_builds }
2019-07-31 22:56:46 +05:30
context 'for a project owner' do
it 'responds with serialized pipelines' do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
end
end
context 'for an unassociated user' do
let(:user) { create :user }
it 'responds with no pipelines' do
expect(json_response['pipelines']).to be_empty
expect(json_response['count']['all']).to eq(0)
expect(response).to include_pagination_headers
end
end
context 'from a project fork' do
let(:fork_user) { create :user }
let(:forked_project) { fork_project(project, fork_user, repository: true) } # Forked project carries over :builds_private
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: forked_project) }
context 'with private builds' do
context 'for the target project member' do
2019-12-26 22:10:19 +05:30
it 'does not respond with serialized pipelines', :sidekiq_might_not_need_inline do
2019-07-31 22:56:46 +05:30
expect(json_response['pipelines']).to be_empty
expect(json_response['count']['all']).to eq(0)
expect(response).to include_pagination_headers
end
end
context 'for the source project member' do
let(:user) { fork_user }
2019-12-26 22:10:19 +05:30
it 'responds with serialized pipelines', :sidekiq_might_not_need_inline do
2019-07-31 22:56:46 +05:30
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
end
end
end
context 'with public builds' do
let(:forked_project) do
fork_project(project, fork_user, repository: true).tap do |new_project|
2021-01-29 00:20:46 +05:30
new_project.project_feature.update!(builds_access_level: ProjectFeature::ENABLED)
2019-07-31 22:56:46 +05:30
end
end
context 'for the target project member' do
2019-12-26 22:10:19 +05:30
it 'does not respond with serialized pipelines', :sidekiq_might_not_need_inline do
2019-07-31 22:56:46 +05:30
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
end
end
context 'for the source project member' do
let(:user) { fork_user }
2019-12-26 22:10:19 +05:30
it 'responds with serialized pipelines', :sidekiq_might_not_need_inline do
2019-07-31 22:56:46 +05:30
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
end
end
end
end
2016-09-13 17:45:13 +05:30
end
end
2020-03-13 15:44:24 +05:30
describe 'GET context commits' do
it 'returns the commits for context commits' do
get :context_commits,
params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: 'json'
expect(response).to have_gitlab_http_status(:success)
expect(json_response).to be_an Array
end
end
2019-12-26 22:10:19 +05:30
describe 'GET exposed_artifacts' do
2021-01-29 00:20:46 +05:30
let_it_be(:merge_request) do
2019-12-26 22:10:19 +05:30
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project)
end
let(:pipeline) do
create(:ci_pipeline,
:success,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
let!(:job) { create(:ci_build, pipeline: pipeline, options: job_options) }
let!(:job_metadata) { create(:ci_job_artifact, :metadata, job: job) }
before do
allow_any_instance_of(MergeRequest)
.to receive(:find_exposed_artifacts)
.and_return(report)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(pipeline)
end
subject do
get :exposed_artifacts, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
end
describe 'permissions on a public project with private CI/CD' do
let(:project) { create :project, :repository, :public, :builds_private }
let(:report) { { status: :parsed, data: [] } }
let(:job_options) { {} }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
subject
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2019-12-26 22:10:19 +05:30
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
subject
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2019-12-26 22:10:19 +05:30
expect(response.body).to be_blank
end
end
end
context 'when pipeline has jobs with exposed artifacts' do
let(:job_options) do
{
artifacts: {
paths: ['ci_artifacts.txt'],
expose_as: 'Exposed artifact'
}
}
end
context 'when fetching exposed artifacts is in progress' do
let(:report) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
subject
end
it 'returns 204 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when fetching exposed artifacts is completed' do
let(:data) do
Ci::GenerateExposedArtifactsReportService.new(project, user)
.execute(nil, pipeline)
end
let(:report) { { status: :parsed, data: data } }
it 'returns exposed artifacts' do
subject
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:ok)
2019-12-26 22:10:19 +05:30
expect(json_response['status']).to eq('parsed')
expect(json_response['data']).to eq([{
'job_name' => 'test',
'job_path' => project_job_path(project, job),
'url' => file_project_job_artifacts_path(project, job, 'ci_artifacts.txt'),
'text' => 'Exposed artifact'
}])
end
end
end
context 'when pipeline does not have jobs with exposed artifacts' do
let(:report) { double }
let(:job_options) do
{
artifacts: {
paths: ['ci_artifacts.txt']
}
}
end
it 'returns no content' do
subject
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:no_content)
2019-12-26 22:10:19 +05:30
expect(response.body).to be_empty
end
end
end
2020-04-08 14:13:33 +05:30
describe 'GET coverage_reports' do
2021-01-29 00:20:46 +05:30
let_it_be(:merge_request) do
2020-04-08 14:13:33 +05:30
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project)
end
let(:pipeline) do
create(:ci_pipeline,
:success,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
before do
allow_any_instance_of(MergeRequest)
.to receive(:find_coverage_reports)
.and_return(report)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(pipeline)
end
subject do
get :coverage_reports, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
end
describe 'permissions on a public project with private CI/CD' do
let(:project) { create :project, :repository, :public, :builds_private }
let(:report) { { status: :parsed, data: [] } }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
end
context 'when pipeline has jobs with coverage reports' do
before do
allow_any_instance_of(MergeRequest)
.to receive(:has_coverage_reports?)
.and_return(true)
end
context 'when processing coverage reports is in progress' do
let(:report) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
subject
end
it 'returns 204 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when processing coverage reports is completed' do
let(:report) { { status: :parsed, data: pipeline.coverage_reports } }
it 'returns coverage reports' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'files' => {} })
end
end
context 'when user created corrupted coverage reports' do
let(:report) { { status: :error, status_reason: 'Failed to parse coverage reports' } }
it 'does not send polling interval' do
expect(Gitlab::PollingInterval).not_to receive(:set_header)
subject
end
it 'returns 400 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'status_reason' => 'Failed to parse coverage reports' })
end
end
end
context 'when pipeline does not have jobs with coverage reports' do
let(:report) { double }
it 'returns no content' do
subject
expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_empty
end
end
end
2021-03-11 19:13:27 +05:30
describe 'GET codequality_mr_diff_reports' do
let_it_be(:merge_request) do
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project)
end
let(:pipeline) do
create(:ci_pipeline,
:success,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
before do
allow_any_instance_of(MergeRequest)
.to receive(:find_codequality_mr_diff_reports)
.and_return(report)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(pipeline)
end
subject(:get_codequality_mr_diff_reports) do
get :codequality_mr_diff_reports, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
end
context 'permissions on a public project with private CI/CD' do
let(:project) { create :project, :repository, :public, :builds_private }
let(:report) { { status: :parsed, data: { 'files' => {} } } }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
get_codequality_mr_diff_reports
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
get_codequality_mr_diff_reports
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
end
context 'when pipeline has jobs with codequality mr diff report' do
before do
allow_any_instance_of(MergeRequest)
.to receive(:has_codequality_mr_diff_report?)
.and_return(true)
end
context 'when processing codequality mr diff report is in progress' do
let(:report) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
get_codequality_mr_diff_reports
end
it 'returns 204 HTTP status' do
get_codequality_mr_diff_reports
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when processing codequality mr diff report is completed' do
let(:report) { { status: :parsed, data: { 'files' => {} } } }
it 'returns codequality mr diff report' do
get_codequality_mr_diff_reports
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'files' => {} })
end
end
end
end
2020-05-24 23:13:21 +05:30
describe 'GET terraform_reports' do
2021-01-29 00:20:46 +05:30
let_it_be(:merge_request) do
2020-05-24 23:13:21 +05:30
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project)
end
let(:pipeline) do
create(:ci_pipeline,
:success,
:with_terraform_reports,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
before do
allow_any_instance_of(MergeRequest)
.to receive(:find_terraform_reports)
.and_return(report)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(pipeline)
end
subject do
get :terraform_reports, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
end
describe 'permissions on a public project with private CI/CD' do
let(:project) { create :project, :repository, :public, :builds_private }
let(:report) { { status: :parsed, data: [] } }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
end
context 'when pipeline has jobs with terraform reports' do
before do
allow_next_instance_of(MergeRequest) do |merge_request|
allow(merge_request).to receive(:has_terraform_reports?).and_return(true)
end
end
context 'when processing terraform reports is in progress' do
let(:report) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
subject
end
it 'returns 204 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when processing terraform reports is completed' do
let(:report) { { status: :parsed, data: pipeline.terraform_reports.plans } }
it 'returns terraform reports' do
subject
expect(response).to have_gitlab_http_status(:ok)
2020-06-23 00:09:42 +05:30
pipeline.builds.each do |build|
expect(json_response).to match(
a_hash_including(
build.id.to_s => hash_including(
'create' => 0,
'delete' => 0,
'update' => 1,
'job_name' => build.options.dig(:artifacts, :name).to_s
)
2020-05-24 23:13:21 +05:30
)
)
2020-06-23 00:09:42 +05:30
end
2020-05-24 23:13:21 +05:30
end
end
context 'when user created corrupted terraform reports' do
let(:report) { { status: :error, status_reason: 'Failed to parse terraform reports' } }
it 'does not send polling interval' do
expect(Gitlab::PollingInterval).not_to receive(:set_header)
subject
end
it 'returns 400 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'status_reason' => 'Failed to parse terraform reports' })
end
end
end
context 'when pipeline does not have jobs with terraform reports' do
before do
allow_next_instance_of(MergeRequest) do |merge_request|
allow(merge_request).to receive(:has_terraform_reports?).and_return(false)
end
end
let(:report) { { status: :error } }
it 'returns error' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
2018-11-18 11:00:15 +05:30
describe 'GET test_reports' do
2021-01-29 00:20:46 +05:30
let_it_be(:merge_request) do
2019-09-04 21:01:54 +05:30
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project
)
end
2018-11-18 11:00:15 +05:30
subject do
2019-09-04 21:01:54 +05:30
get :test_reports, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
2018-11-18 11:00:15 +05:30
end
before do
allow_any_instance_of(MergeRequest)
2019-09-04 21:01:54 +05:30
.to receive(:compare_test_reports)
.and_return(comparison_status)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(merge_request.all_pipelines.take)
end
describe 'permissions on a public project with private CI/CD' do
let(:project) { create :project, :repository, :public, :builds_private }
let(:comparison_status) { { status: :parsed, data: { summary: 1 } } }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
subject
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2019-09-04 21:01:54 +05:30
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
subject
2020-03-13 15:44:24 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2019-09-04 21:01:54 +05:30
expect(response.body).to be_blank
end
end
2018-11-18 11:00:15 +05:30
end
context 'when comparison is being processed' do
let(:comparison_status) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
subject
end
it 'returns 204 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when comparison is done' do
let(:comparison_status) { { status: :parsed, data: { summary: 1 } } }
it 'does not send polling interval' do
expect(Gitlab::PollingInterval).not_to receive(:set_header)
subject
end
it 'returns 200 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'summary' => 1 })
end
end
context 'when user created corrupted test reports' do
let(:comparison_status) { { status: :error, status_reason: 'Failed to parse test reports' } }
it 'does not send polling interval' do
expect(Gitlab::PollingInterval).not_to receive(:set_header)
subject
end
it 'returns 400 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'status_reason' => 'Failed to parse test reports' })
end
end
end
2020-05-24 23:13:21 +05:30
describe 'GET accessibility_reports' do
2021-01-29 00:20:46 +05:30
let_it_be(:merge_request) do
2020-05-24 23:13:21 +05:30
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project
)
end
let(:pipeline) do
create(:ci_pipeline,
:success,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
before do
allow_any_instance_of(MergeRequest)
.to receive(:compare_accessibility_reports)
.and_return(accessibility_comparison)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(pipeline)
end
subject do
get :accessibility_reports, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
end
context 'permissions on a public project with private CI/CD' do
2021-01-29 00:20:46 +05:30
let(:project) { project_public_with_private_builds }
2020-05-24 23:13:21 +05:30
let(:accessibility_comparison) { { status: :parsed, data: { summary: 1 } } }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
end
context 'when pipeline has jobs with accessibility reports' do
before do
allow_any_instance_of(MergeRequest)
.to receive(:has_accessibility_reports?)
.and_return(true)
end
context 'when processing accessibility reports is in progress' do
let(:accessibility_comparison) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
subject
end
it 'returns 204 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when processing accessibility reports is completed' do
let(:accessibility_comparison) { { status: :parsed, data: { summary: 1 } } }
it 'returns accessibility reports' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'summary' => 1 })
end
end
context 'when user created corrupted accessibility reports' do
let(:accessibility_comparison) { { status: :error, status_reason: 'This merge request does not have accessibility reports' } }
it 'does not send polling interval' do
expect(Gitlab::PollingInterval).not_to receive(:set_header)
subject
end
it 'returns 400 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'status_reason' => 'This merge request does not have accessibility reports' })
end
end
end
end
2021-02-22 17:27:13 +05:30
describe 'GET codequality_reports' do
let_it_be(:merge_request) do
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project
)
end
let(:pipeline) do
create(:ci_pipeline,
:success,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
before do
allow_any_instance_of(MergeRequest)
.to receive(:compare_codequality_reports)
.and_return(codequality_comparison)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(pipeline)
end
subject do
get :codequality_reports, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
end
context 'permissions on a public project with private CI/CD' do
let(:project) { project_public_with_private_builds }
let(:codequality_comparison) { { status: :parsed, data: { summary: 1 } } }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
end
context 'when pipeline has jobs with codequality reports' do
before do
allow_any_instance_of(MergeRequest)
.to receive(:has_codequality_reports?)
.and_return(true)
end
context 'when processing codequality reports is in progress' do
let(:codequality_comparison) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
subject
end
it 'returns 204 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when processing codequality reports is completed' do
let(:codequality_comparison) { { status: :parsed, data: { summary: 1 } } }
it 'returns codequality reports' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'summary' => 1 })
end
end
end
context 'when pipeline has job without a codequality report' do
let(:codequality_comparison) { { status: :error, status_reason: 'no codequality report' } }
it 'returns a 400' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'status_reason' => 'no codequality report' })
end
end
end
2017-08-17 22:00:37 +05:30
describe 'POST remove_wip' do
before do
2016-11-03 12:29:30 +05:30
merge_request.title = merge_request.wip_title
2021-01-29 00:20:46 +05:30
merge_request.save!
2016-11-03 12:29:30 +05:30
2019-02-15 15:39:39 +05:30
post :remove_wip,
params: {
format: :json,
namespace_id: merge_request.project.namespace.to_param,
project_id: merge_request.project,
id: merge_request.iid
},
xhr: true
2017-08-17 22:00:37 +05:30
end
2016-11-03 12:29:30 +05:30
2017-08-17 22:00:37 +05:30
it 'removes the wip status' do
2016-11-03 12:29:30 +05:30
expect(merge_request.reload.title).to eq(merge_request.wipless_title)
end
2017-08-17 22:00:37 +05:30
it 'renders MergeRequest as JSON' do
2020-04-22 19:07:51 +05:30
expect(json_response.keys).to include('id', 'iid', 'title', 'has_ci', 'merge_status', 'can_be_merged', 'current_user')
2017-08-17 22:00:37 +05:30
end
2016-11-03 12:29:30 +05:30
end
2019-09-04 21:01:54 +05:30
describe 'POST cancel_auto_merge' do
2017-08-17 22:00:37 +05:30
subject do
2019-09-04 21:01:54 +05:30
post :cancel_auto_merge,
2019-02-15 15:39:39 +05:30
params: {
format: :json,
namespace_id: merge_request.project.namespace.to_param,
project_id: merge_request.project,
id: merge_request.iid
},
xhr: true
2017-08-17 22:00:37 +05:30
end
2019-09-04 21:01:54 +05:30
it 'calls AutoMergeService' do
auto_merge_service = double
2017-08-17 22:00:37 +05:30
2019-09-04 21:01:54 +05:30
allow(AutoMergeService)
2017-08-17 22:00:37 +05:30
.to receive(:new)
2019-09-04 21:01:54 +05:30
.and_return(auto_merge_service)
2017-08-17 22:00:37 +05:30
2019-09-04 21:01:54 +05:30
allow(auto_merge_service).to receive(:available_strategies).with(merge_request)
expect(auto_merge_service).to receive(:cancel).with(merge_request)
2017-08-17 22:00:37 +05:30
subject
end
2018-03-17 18:26:18 +05:30
it { is_expected.to have_gitlab_http_status(:success) }
2017-08-17 22:00:37 +05:30
it 'renders MergeRequest as JSON' do
subject
2016-11-03 12:29:30 +05:30
2020-04-22 19:07:51 +05:30
expect(json_response.keys).to include('id', 'iid', 'title', 'has_ci', 'merge_status', 'can_be_merged', 'current_user')
2017-08-17 22:00:37 +05:30
end
end
2016-11-03 12:29:30 +05:30
describe 'POST assign_related_issues' do
let(:issue1) { create(:issue, project: project) }
let(:issue2) { create(:issue, project: project) }
def post_assign_issues
merge_request.update!(description: "Closes #{issue1.to_reference} and #{issue2.to_reference}",
author: user,
source_branch: 'feature',
target_branch: 'master')
post :assign_related_issues,
2019-02-15 15:39:39 +05:30
params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
}
2016-11-03 12:29:30 +05:30
end
it 'shows a flash message on success' do
post_assign_issues
expect(flash[:notice]).to eq '2 issues have been assigned to you'
end
it 'correctly pluralizes flash message on success' do
2017-08-17 22:00:37 +05:30
issue2.assignees = [user]
2016-11-03 12:29:30 +05:30
post_assign_issues
expect(flash[:notice]).to eq '1 issue has been assigned to you'
end
it 'calls MergeRequests::AssignIssuesService' do
2017-09-10 17:25:29 +05:30
expect(MergeRequests::AssignIssuesService).to receive(:new)
.with(project, user, merge_request: merge_request)
.and_return(double(execute: { count: 1 }))
2016-11-03 12:29:30 +05:30
post_assign_issues
end
it 'is skipped when not signed in' do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
sign_out(:user)
expect(MergeRequests::AssignIssuesService).not_to receive(:new)
post_assign_issues
end
end
describe 'GET ci_environments_status' do
context 'the environment is from a forked project' do
2018-12-13 13:39:08 +05:30
let(:forked) { fork_project(project, user, repository: true) }
let(:sha) { forked.commit.sha }
let(:environment) { create(:environment, project: forked) }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: forked) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:deployment) { create(:deployment, :succeed, environment: environment, sha: sha, ref: 'master', deployable: build) }
2016-11-03 12:29:30 +05:30
let(:merge_request) do
2018-12-13 13:39:08 +05:30
create(:merge_request, source_project: forked, target_project: project, target_branch: 'master', head_pipeline: pipeline)
2016-11-03 12:29:30 +05:30
end
2019-12-26 22:10:19 +05:30
it 'links to the environment on that project', :sidekiq_might_not_need_inline do
2018-12-05 23:21:45 +05:30
get_ci_environments_status
2021-01-29 00:20:46 +05:30
expect(json_response.first['url']).to match(/#{forked.full_path}/)
2018-12-05 23:21:45 +05:30
end
2019-12-26 22:10:19 +05:30
context "when environment_target is 'merge_commit'", :sidekiq_might_not_need_inline do
2018-12-13 13:39:08 +05:30
it 'returns nothing' do
get_ci_environments_status(environment_target: 'merge_commit')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
context 'when is merged' do
let(:source_environment) { create(:environment, project: project) }
let(:merge_commit_sha) { project.repository.merge(user, forked.commit.id, merge_request, "merged in test") }
let(:post_merge_pipeline) { create(:ci_pipeline, sha: merge_commit_sha, project: project) }
let(:post_merge_build) { create(:ci_build, pipeline: post_merge_pipeline) }
let!(:source_deployment) { create(:deployment, :succeed, environment: source_environment, sha: merge_commit_sha, ref: 'master', deployable: post_merge_build) }
before do
merge_request.update!(merge_commit_sha: merge_commit_sha)
merge_request.mark_as_merged!
end
it 'returns the environment on the source project' do
get_ci_environments_status(environment_target: 'merge_commit')
expect(response).to have_gitlab_http_status(:ok)
2021-01-29 00:20:46 +05:30
expect(json_response.first['url']).to match(/#{project.full_path}/)
2018-12-13 13:39:08 +05:30
end
end
end
2018-12-05 23:21:45 +05:30
# we're trying to reduce the overall number of queries for this method.
2019-12-04 20:38:33 +05:30
# set a hard limit for now. https://gitlab.com/gitlab-org/gitlab-foss/issues/52287
2019-12-26 22:10:19 +05:30
it 'keeps queries in check', :sidekiq_might_not_need_inline do
2018-12-05 23:21:45 +05:30
control_count = ActiveRecord::QueryRecorder.new { get_ci_environments_status }.count
expect(control_count).to be <= 137
end
2019-12-26 22:10:19 +05:30
it 'has no N+1 SQL issues for environments', :request_store, :sidekiq_might_not_need_inline, retry: 0 do
2019-09-30 21:07:59 +05:30
# First run to insert test data from lets, which does take up some 30 queries
get_ci_environments_status
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get_ci_environments_status }.count
2018-12-13 13:39:08 +05:30
2019-09-30 21:07:59 +05:30
environment2 = create(:environment, project: forked)
create(:deployment, :succeed, environment: environment2, sha: sha, ref: 'master', deployable: build)
2020-01-01 13:55:28 +05:30
# TODO address the last 3 queries
# See https://gitlab.com/gitlab-org/gitlab-foss/issues/63952 (3 queries)
leeway = 3
2019-09-30 21:07:59 +05:30
expect { get_ci_environments_status }.not_to exceed_all_query_limit(control_count + leeway)
2016-11-03 12:29:30 +05:30
end
end
2019-09-30 21:07:59 +05:30
context 'when a merge request has multiple environments with deployments' do
let(:sha) { merge_request.diff_head_sha }
let(:ref) { merge_request.source_branch }
let!(:build) { create(:ci_build, pipeline: pipeline) }
let!(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
let!(:environment) { create(:environment, name: 'env_a', project: project) }
let!(:another_environment) { create(:environment, name: 'env_b', project: project) }
before do
merge_request.update_head_pipeline
create(:deployment, :succeed, environment: environment, sha: sha, ref: ref, deployable: build)
create(:deployment, :succeed, environment: another_environment, sha: sha, ref: ref, deployable: build)
end
it 'exposes multiple environment statuses' do
get_ci_environments_status
expect(json_response.count).to eq 2
end
context 'when route map is not present in the project' do
it 'does not have N+1 Gitaly requests for environments', :request_store do
expect(merge_request).to be_present
expect { get_ci_environments_status }
.to change { Gitlab::GitalyClient.get_request_count }.by_at_most(1)
end
end
context 'when there is route map present in a project' do
before do
allow_any_instance_of(EnvironmentStatus)
.to receive(:has_route_map?)
.and_return(true)
end
it 'does not have N+1 Gitaly requests for diff files', :request_store do
expect(merge_request.merge_request_diff.merge_request_diff_files).to be_many
expect { get_ci_environments_status }
.to change { Gitlab::GitalyClient.get_request_count }.by_at_most(1)
end
end
end
2020-01-01 13:55:28 +05:30
it 'uses the explicitly linked deployments' do
expect(EnvironmentStatus)
.to receive(:for_deployed_merge_request)
.with(merge_request, user)
.and_call_original
get_ci_environments_status(environment_target: 'merge_commit')
end
2019-09-30 21:07:59 +05:30
def get_ci_environments_status(extra_params = {})
params = {
namespace_id: merge_request.project.namespace.to_param,
project_id: merge_request.project,
id: merge_request.iid,
format: 'json'
}
get :ci_environments_status, params: params.merge(extra_params)
end
2016-11-03 12:29:30 +05:30
end
2017-08-17 22:00:37 +05:30
describe 'GET pipeline_status.json' do
context 'when head_pipeline exists' do
let!(:pipeline) do
create(:ci_pipeline, project: merge_request.source_project,
ref: merge_request.source_branch,
2017-09-10 17:25:29 +05:30
sha: merge_request.diff_head_sha,
head_pipeline_of: merge_request)
2017-08-17 22:00:37 +05:30
end
let(:status) { pipeline.detailed_status(double('user')) }
2019-09-04 21:01:54 +05:30
it 'returns a detailed head_pipeline status in json' do
2017-09-10 17:25:29 +05:30
get_pipeline_status
2017-08-17 22:00:37 +05:30
2018-03-17 18:26:18 +05:30
expect(response).to have_gitlab_http_status(:ok)
2017-08-17 22:00:37 +05:30
expect(json_response['text']).to eq status.text
expect(json_response['label']).to eq status.label
expect(json_response['icon']).to eq status.icon
2018-11-08 19:23:39 +05:30
expect(json_response['favicon']).to match_asset_path "/assets/ci_favicons/#{status.favicon}.png"
2017-08-17 22:00:37 +05:30
end
2019-09-04 21:01:54 +05:30
context 'with project member visibility on a public project' do
let(:user) { create(:user) }
2021-01-29 00:20:46 +05:30
let(:project) { project_public_with_private_builds }
2019-09-04 21:01:54 +05:30
it 'returns pipeline data to project members' do
project.add_developer(user)
get_pipeline_status
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['text']).to eq status.text
expect(json_response['label']).to eq status.label
expect(json_response['icon']).to eq status.icon
expect(json_response['favicon']).to match_asset_path "/assets/ci_favicons/#{status.favicon}.png"
end
it 'returns blank OK response to non-project-members' do
get_pipeline_status
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
2017-08-17 22:00:37 +05:30
end
context 'when head_pipeline does not exist' do
2017-09-10 17:25:29 +05:30
before do
get_pipeline_status
end
2017-08-17 22:00:37 +05:30
2019-09-04 21:01:54 +05:30
it 'returns blank OK response' do
2018-03-17 18:26:18 +05:30
expect(response).to have_gitlab_http_status(:ok)
2017-08-17 22:00:37 +05:30
expect(json_response).to be_empty
end
end
def get_pipeline_status
2019-02-15 15:39:39 +05:30
get :pipeline_status, params: {
namespace_id: project.namespace,
project_id: project,
id: merge_request.iid
},
2017-08-17 22:00:37 +05:30
format: :json
end
end
2018-03-17 18:26:18 +05:30
describe 'POST #rebase' do
2019-03-02 22:35:43 +05:30
let(:viewer) { user }
2018-03-17 18:26:18 +05:30
def post_rebase
2019-02-15 15:39:39 +05:30
post :rebase, params: { namespace_id: project.namespace, project_id: project, id: merge_request }
2018-03-17 18:26:18 +05:30
end
def expect_rebase_worker_for(user)
2020-03-13 15:44:24 +05:30
expect(RebaseWorker).to receive(:perform_async).with(merge_request.id, user.id, false)
2018-03-17 18:26:18 +05:30
end
context 'successfully' do
it 'enqeues a RebaseWorker' do
expect_rebase_worker_for(viewer)
post_rebase
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:ok)
2018-03-17 18:26:18 +05:30
end
end
2019-12-26 22:10:19 +05:30
context 'with NOWAIT lock' do
it 'returns a 409' do
allow_any_instance_of(MergeRequest).to receive(:with_lock).with('FOR UPDATE NOWAIT').and_raise(ActiveRecord::LockWaitTimeout)
expect(RebaseWorker).not_to receive(:perform_async)
post_rebase
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:conflict)
2020-03-13 15:44:24 +05:30
expect(json_response['merge_error']).to eq('Failed to enqueue the rebase operation, possibly due to a long-lived transaction. Try again later.')
2019-12-26 22:10:19 +05:30
end
end
2018-03-17 18:26:18 +05:30
context 'with a forked project' do
2018-12-13 13:39:08 +05:30
let(:forked_project) { fork_project(project, fork_owner, repository: true) }
let(:fork_owner) { create(:user) }
2018-03-17 18:26:18 +05:30
before do
2018-12-13 13:39:08 +05:30
project.add_developer(fork_owner)
merge_request.update!(source_project: forked_project)
forked_project.add_reporter(user)
2018-03-17 18:26:18 +05:30
end
context 'user cannot push to source branch' do
it 'returns 404' do
expect_rebase_worker_for(viewer).never
post_rebase
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2018-03-17 18:26:18 +05:30
end
end
context 'user can push to source branch' do
before do
project.add_reporter(fork_owner)
sign_in(fork_owner)
end
2019-12-26 22:10:19 +05:30
it 'returns 200', :sidekiq_might_not_need_inline do
2018-03-17 18:26:18 +05:30
expect_rebase_worker_for(fork_owner)
post_rebase
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:ok)
2018-03-17 18:26:18 +05:30
end
end
end
end
2018-12-05 23:21:45 +05:30
2019-02-15 15:39:39 +05:30
describe 'GET discussions' do
context 'when authenticated' do
before do
project.add_developer(user)
sign_in(user)
2019-09-30 21:07:59 +05:30
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
2019-02-15 15:39:39 +05:30
end
it 'returns 200' do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:ok)
2019-02-15 15:39:39 +05:30
end
context 'highlight preloading' do
context 'with commit diff notes' do
let!(:commit_diff_note) do
create(:diff_note_on_commit, project: merge_request.project)
end
it 'preloads notes diffs highlights' do
expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
note_diff_file = commit_diff_note.note_diff_file
2019-12-04 20:38:33 +05:30
expect(collection).to receive(:load_highlight).and_call_original
2019-02-15 15:39:39 +05:30
expect(collection).to receive(:find_by_id).with(note_diff_file.id).and_call_original
end
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
end
end
context 'with diff notes' do
let!(:diff_note) do
create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
end
it 'preloads notes diffs highlights' do
expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
note_diff_file = diff_note.note_diff_file
2019-12-04 20:38:33 +05:30
expect(collection).to receive(:load_highlight).and_call_original
2019-02-15 15:39:39 +05:30
expect(collection).to receive(:find_by_id).with(note_diff_file.id).and_call_original
end
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
end
it 'does not preload highlights when diff note is resolved' do
Notes::ResolveService.new(diff_note.project, user).execute(diff_note)
expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
note_diff_file = diff_note.note_diff_file
2019-12-04 20:38:33 +05:30
expect(collection).to receive(:load_highlight).and_call_original
2019-02-15 15:39:39 +05:30
expect(collection).to receive(:find_by_id).with(note_diff_file.id).and_call_original
end
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
end
end
end
end
2019-10-12 21:52:04 +05:30
context do
it_behaves_like 'discussions provider' do
let!(:author) { create(:user) }
let!(:project) { create(:project) }
let!(:merge_request) { create(:merge_request, source_project: project) }
let!(:mr_note1) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
let!(:mr_note2) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
let(:requested_iid) { merge_request.iid }
let(:expected_discussion_count) { 2 }
let(:expected_discussion_ids) { [mr_note1.discussion_id, mr_note2.discussion_id] }
end
end
2019-02-15 15:39:39 +05:30
end
2018-12-05 23:21:45 +05:30
describe 'GET edit' do
it 'responds successfully' do
2019-02-15 15:39:39 +05:30
get :edit, params: { namespace_id: project.namespace, project_id: project, id: merge_request }
2018-12-05 23:21:45 +05:30
expect(response).to have_gitlab_http_status(:success)
end
it 'assigns the noteable to make sure autocompletes work' do
2019-02-15 15:39:39 +05:30
get :edit, params: { namespace_id: project.namespace, project_id: project, id: merge_request }
2018-12-05 23:21:45 +05:30
expect(assigns(:noteable)).not_to be_nil
end
end
2021-01-29 00:20:46 +05:30
describe 'POST export_csv' do
subject { post :export_csv, params: { namespace_id: project.namespace, project_id: project } }
it 'redirects to the merge request index' do
subject
expect(response).to redirect_to(project_merge_requests_path(project))
expect(response.flash[:notice]).to match(/\AYour CSV export has started/i)
end
it 'enqueues an IssuableExportCsvWorker worker' do
expect(IssuableExportCsvWorker).to receive(:perform_async).with(:merge_request, user.id, project.id, anything)
subject
end
end
2015-09-11 14:41:01 +05:30
end