2019-07-07 11:18:12 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-09-02 18:07:02 +05:30
|
|
|
require 'spec_helper'
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
RSpec.describe MergeRequest, factory_default: :keep do
|
2016-08-24 12:49:21 +05:30
|
|
|
include RepoHelpers
|
2018-03-17 18:26:18 +05:30
|
|
|
include ProjectForksHelper
|
2018-11-18 11:00:15 +05:30
|
|
|
include ReactiveCachingHelpers
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
|
2021-04-17 20:07:23 +05:30
|
|
|
let_it_be(:namespace) { create_default(:namespace).freeze }
|
|
|
|
let_it_be(:project, refind: true) { create_default(:project, :repository).freeze }
|
2020-11-24 15:15:51 +05:30
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
subject { create(:merge_request) }
|
|
|
|
|
|
|
|
describe 'associations' do
|
2020-07-28 23:09:34 +05:30
|
|
|
subject { build_stubbed(:merge_request) }
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it { is_expected.to belong_to(:target_project).class_name('Project') }
|
|
|
|
it { is_expected.to belong_to(:source_project).class_name('Project') }
|
2015-12-23 02:04:40 +05:30
|
|
|
it { is_expected.to belong_to(:merge_user).class_name("User") }
|
2019-07-31 22:56:46 +05:30
|
|
|
it { is_expected.to have_many(:assignees).through(:merge_request_assignees) }
|
2020-11-24 15:15:51 +05:30
|
|
|
it { is_expected.to have_many(:reviewers).through(:merge_request_reviewers) }
|
2017-09-10 17:25:29 +05:30
|
|
|
it { is_expected.to have_many(:merge_request_diffs) }
|
2020-01-01 13:55:28 +05:30
|
|
|
it { is_expected.to have_many(:user_mentions).class_name("MergeRequestUserMention") }
|
2020-05-24 23:13:21 +05:30
|
|
|
it { is_expected.to belong_to(:milestone) }
|
|
|
|
it { is_expected.to belong_to(:iteration) }
|
|
|
|
it { is_expected.to have_many(:resource_milestone_events) }
|
|
|
|
it { is_expected.to have_many(:resource_state_events) }
|
2020-06-23 00:09:42 +05:30
|
|
|
it { is_expected.to have_many(:draft_notes) }
|
|
|
|
it { is_expected.to have_many(:reviews).inverse_of(:merge_request) }
|
2023-03-04 22:38:38 +05:30
|
|
|
it { is_expected.to have_many(:reviewed_by_users).through(:reviews).source(:author) }
|
2021-01-29 00:20:46 +05:30
|
|
|
it { is_expected.to have_one(:cleanup_schedule).inverse_of(:merge_request) }
|
2022-10-11 01:57:18 +05:30
|
|
|
it { is_expected.to have_many(:created_environments).class_name('Environment').inverse_of(:merge_request) }
|
2018-12-13 13:39:08 +05:30
|
|
|
|
|
|
|
context 'for forks' do
|
|
|
|
let!(:project) { create(:project) }
|
|
|
|
let!(:fork) { fork_project(project) }
|
|
|
|
let!(:merge_request) { create(:merge_request, target_project: project, source_project: fork) }
|
|
|
|
|
|
|
|
it 'does not load another project due to inverse relationship' do
|
|
|
|
expect(project.merge_requests.first.target_project.object_id).to eq(project.object_id)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'finds the associated merge request' do
|
|
|
|
expect(project.merge_requests.find(merge_request.id)).to eq(merge_request)
|
|
|
|
end
|
|
|
|
end
|
2023-03-04 22:38:38 +05:30
|
|
|
|
|
|
|
describe '#reviewed_by_users' do
|
|
|
|
let!(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
context 'when the same user has several reviews' do
|
|
|
|
before do
|
|
|
|
2.times { create(:review, merge_request: merge_request, project: merge_request.project, author: merge_request.author) }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns distinct users' do
|
|
|
|
expect(merge_request.reviewed_by_users).to match_array([merge_request.author])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
describe '.from_and_to_forks' do
|
|
|
|
it 'returns only MRs from and to forks (with no internal MRs)' do
|
|
|
|
project = create(:project)
|
|
|
|
fork = fork_project(project)
|
|
|
|
fork_2 = fork_project(project)
|
|
|
|
mr_from_fork = create(:merge_request, source_project: fork, target_project: project)
|
|
|
|
mr_to_fork = create(:merge_request, source_project: project, target_project: fork)
|
|
|
|
|
|
|
|
create(:merge_request, source_project: fork, target_project: fork_2)
|
|
|
|
create(:merge_request, source_project: project, target_project: project)
|
|
|
|
|
|
|
|
expect(described_class.from_and_to_forks(project)).to contain_exactly(mr_from_fork, mr_to_fork)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
describe '.order_merged_at_asc' do
|
|
|
|
let_it_be(:older_mr) { create(:merge_request, :with_merged_metrics) }
|
|
|
|
let_it_be(:newer_mr) { create(:merge_request, :with_merged_metrics) }
|
|
|
|
|
|
|
|
it 'returns MRs ordered by merged_at ascending' do
|
|
|
|
expect(described_class.order_merged_at_asc).to eq([older_mr, newer_mr])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.order_merged_at_desc' do
|
|
|
|
let_it_be(:older_mr) { create(:merge_request, :with_merged_metrics) }
|
|
|
|
let_it_be(:newer_mr) { create(:merge_request, :with_merged_metrics) }
|
|
|
|
|
|
|
|
it 'returns MRs ordered by merged_at descending' do
|
|
|
|
expect(described_class.order_merged_at_desc).to eq([newer_mr, older_mr])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
describe '.order_closed_at_asc' do
|
|
|
|
let_it_be(:older_mr) { create(:merge_request, :closed_last_month) }
|
|
|
|
let_it_be(:newer_mr) { create(:merge_request, :closed_last_month) }
|
|
|
|
|
|
|
|
it 'returns MRs ordered by closed_at ascending' do
|
|
|
|
expect(described_class.order_closed_at_asc).to eq([older_mr, newer_mr])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.order_closed_at_desc' do
|
|
|
|
let_it_be(:older_mr) { create(:merge_request, :closed_last_month) }
|
|
|
|
let_it_be(:newer_mr) { create(:merge_request, :closed_last_month) }
|
|
|
|
|
|
|
|
it 'returns MRs ordered by closed_at descending' do
|
|
|
|
expect(described_class.order_closed_at_desc).to eq([newer_mr, older_mr])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-29 00:20:46 +05:30
|
|
|
describe '.with_jira_issue_keys' do
|
|
|
|
let_it_be(:mr_with_jira_title) { create(:merge_request, :unique_branches, title: 'Fix TEST-123') }
|
|
|
|
let_it_be(:mr_with_jira_description) { create(:merge_request, :unique_branches, description: 'this closes TEST-321') }
|
|
|
|
let_it_be(:mr_without_jira_reference) { create(:merge_request, :unique_branches) }
|
|
|
|
|
|
|
|
subject { described_class.with_jira_issue_keys }
|
|
|
|
|
|
|
|
it { is_expected.to contain_exactly(mr_with_jira_title, mr_with_jira_description) }
|
|
|
|
|
|
|
|
it { is_expected.not_to include(mr_without_jira_reference) }
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
context 'scopes' do
|
|
|
|
let_it_be(:user1) { create(:user) }
|
|
|
|
let_it_be(:user2) { create(:user) }
|
|
|
|
|
2022-08-27 11:52:29 +05:30
|
|
|
let_it_be(:merge_request1) { create(:merge_request, :unique_branches, reviewers: [user1]) }
|
|
|
|
let_it_be(:merge_request2) { create(:merge_request, :unique_branches, reviewers: [user2]) }
|
|
|
|
let_it_be(:merge_request3) { create(:merge_request, :unique_branches, reviewers: []) }
|
|
|
|
let_it_be(:merge_request4) { create(:merge_request, :draft_merge_request) }
|
2021-02-22 17:27:13 +05:30
|
|
|
|
|
|
|
describe '.review_requested' do
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'returns MRs that have any review requests' do
|
2021-02-22 17:27:13 +05:30
|
|
|
expect(described_class.review_requested).to eq([merge_request1, merge_request2])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.no_review_requested' do
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'returns MRs that have no review requests' do
|
|
|
|
expect(described_class.no_review_requested).to eq([merge_request3, merge_request4])
|
2021-02-22 17:27:13 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.review_requested_to' do
|
|
|
|
it 'returns MRs that the user has been requested to review' do
|
|
|
|
expect(described_class.review_requested_to(user1)).to eq([merge_request1])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.no_review_requested_to' do
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'returns MRs that the user has not been requested to review' do
|
|
|
|
expect(described_class.no_review_requested_to(user1))
|
|
|
|
.to eq([merge_request2, merge_request3, merge_request4])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.drafts' do
|
|
|
|
it 'returns MRs where draft == true' do
|
|
|
|
expect(described_class.drafts).to eq([merge_request4])
|
2021-02-22 17:27:13 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#squash?' do
|
|
|
|
let(:merge_request) { build(:merge_request, squash: squash) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
subject { merge_request.squash? }
|
|
|
|
|
|
|
|
context 'disabled in database' do
|
|
|
|
let(:squash) { false }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'enabled in database' do
|
|
|
|
let(:squash) { true }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
describe '#default_squash_commit_message' do
|
|
|
|
let(:project) { subject.project }
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:is_multiline) { -> (c) { c.description.present? } }
|
|
|
|
let(:multiline_commits) { subject.commits.select(&is_multiline) }
|
|
|
|
let(:singleline_commits) { subject.commits.reject(&is_multiline) }
|
2019-03-02 22:35:43 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'returns the merge request title' do
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(subject.default_squash_commit_message).to eq(subject.title)
|
|
|
|
end
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
it 'uses template from target project' do
|
|
|
|
subject.target_project.squash_commit_template = 'Squashed branch %{source_branch} into %{target_branch}'
|
|
|
|
|
|
|
|
expect(subject.default_squash_commit_message)
|
|
|
|
.to eq('Squashed branch master into feature')
|
|
|
|
end
|
2019-03-02 22:35:43 +05:30
|
|
|
end
|
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
describe 'modules' do
|
|
|
|
subject { described_class }
|
|
|
|
|
|
|
|
it { is_expected.to include_module(Issuable) }
|
|
|
|
it { is_expected.to include_module(Referable) }
|
|
|
|
it { is_expected.to include_module(Sortable) }
|
|
|
|
it { is_expected.to include_module(Taskable) }
|
2020-05-24 23:13:21 +05:30
|
|
|
it { is_expected.to include_module(MilestoneEventable) }
|
|
|
|
it { is_expected.to include_module(StateEventable) }
|
2018-10-15 14:42:47 +05:30
|
|
|
|
|
|
|
it_behaves_like 'AtomicInternalId' do
|
|
|
|
let(:internal_id_attribute) { :iid }
|
|
|
|
let(:instance) { build(:merge_request) }
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:scope) { :target_project }
|
2018-10-15 14:42:47 +05:30
|
|
|
let(:scope_attrs) { { project: instance.target_project } }
|
|
|
|
let(:usage) { :merge_requests }
|
|
|
|
end
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
describe 'validation' do
|
2020-07-28 23:09:34 +05:30
|
|
|
subject { build_stubbed(:merge_request) }
|
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
it { is_expected.to validate_presence_of(:target_branch) }
|
|
|
|
it { is_expected.to validate_presence_of(:source_branch) }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context "Validation of merge user with Merge When Pipeline Succeeds" do
|
2015-12-23 02:04:40 +05:30
|
|
|
it "allows user to be nil when the feature is disabled" do
|
|
|
|
expect(subject).to be_valid
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is invalid without merge user" do
|
2017-08-17 22:00:37 +05:30
|
|
|
subject.merge_when_pipeline_succeeds = true
|
2015-12-23 02:04:40 +05:30
|
|
|
expect(subject).not_to be_valid
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is valid with merge user" do
|
2017-08-17 22:00:37 +05:30
|
|
|
subject.merge_when_pipeline_succeeds = true
|
2015-12-23 02:04:40 +05:30
|
|
|
subject.merge_user = build(:user)
|
|
|
|
|
|
|
|
expect(subject).to be_valid
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-06-05 12:25:43 +05:30
|
|
|
context 'for branch' do
|
|
|
|
where(:branch_name, :valid) do
|
|
|
|
'foo' | true
|
|
|
|
'foo:bar' | false
|
|
|
|
'+foo:bar' | false
|
|
|
|
'foo bar' | false
|
|
|
|
'-foo' | false
|
|
|
|
'HEAD' | true
|
|
|
|
'refs/heads/master' | true
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
it "validates source_branch" do
|
|
|
|
subject = build(:merge_request, source_branch: branch_name, target_branch: 'master')
|
|
|
|
subject.valid?
|
|
|
|
|
|
|
|
expect(subject.errors.added?(:source_branch)).to eq(!valid)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "validates target_branch" do
|
|
|
|
subject = build(:merge_request, source_branch: 'master', target_branch: branch_name)
|
|
|
|
subject.valid?
|
|
|
|
|
|
|
|
expect(subject.errors.added?(:target_branch)).to eq(!valid)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'for forks' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let(:fork1) { fork_project(project) }
|
|
|
|
let(:fork2) { fork_project(project) }
|
|
|
|
|
|
|
|
it 'allows merge requests for sibling-forks' do
|
|
|
|
subject.source_project = fork1
|
|
|
|
subject.target_project = fork2
|
|
|
|
|
|
|
|
expect(subject).to be_valid
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'callbacks' do
|
2023-01-13 00:05:48 +05:30
|
|
|
describe '#ensure_merge_request_diff' do
|
|
|
|
let(:merge_request) { build(:merge_request) }
|
|
|
|
|
|
|
|
context 'when async_merge_request_diff_creation is true' do
|
|
|
|
before do
|
|
|
|
merge_request.skip_ensure_merge_request_diff = true
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not create a merge_request_diff after create' do
|
|
|
|
merge_request.save!
|
|
|
|
|
|
|
|
expect(merge_request.merge_request_diff).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when async_merge_request_diff_creation is false' do
|
|
|
|
before do
|
|
|
|
merge_request.skip_ensure_merge_request_diff = false
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'creates merge_request_diff after create' do
|
|
|
|
merge_request.save!
|
|
|
|
|
|
|
|
expect(merge_request.merge_request_diff).not_to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#ensure_merge_request_metrics' do
|
2020-10-24 23:57:45 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
it 'creates metrics after saving' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(merge_request.metrics).to be_persisted
|
|
|
|
expect(MergeRequest::Metrics.count).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not duplicate metrics for a merge request' do
|
|
|
|
merge_request.mark_as_merged!
|
|
|
|
|
|
|
|
expect(MergeRequest::Metrics.count).to eq(1)
|
|
|
|
end
|
2020-07-28 23:09:34 +05:30
|
|
|
|
|
|
|
it 'does not create duplicated metrics records when MR is concurrently updated' do
|
2021-09-04 01:27:46 +05:30
|
|
|
merge_request.metrics.destroy!
|
2020-07-28 23:09:34 +05:30
|
|
|
|
|
|
|
instance1 = MergeRequest.find(merge_request.id)
|
|
|
|
instance2 = MergeRequest.find(merge_request.id)
|
|
|
|
|
|
|
|
instance1.ensure_metrics
|
|
|
|
instance2.ensure_metrics
|
|
|
|
|
|
|
|
metrics_records = MergeRequest::Metrics.where(merge_request_id: merge_request.id)
|
|
|
|
expect(metrics_records.size).to eq(1)
|
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
|
|
|
|
it 'syncs the `target_project_id` to the metrics record' do
|
|
|
|
project = create(:project)
|
|
|
|
|
|
|
|
merge_request.update!(target_project: project, state: :closed)
|
|
|
|
|
|
|
|
expect(merge_request.target_project_id).to eq(project.id)
|
|
|
|
expect(merge_request.target_project_id).to eq(merge_request.metrics.target_project_id)
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
|
|
|
describe '#set_draft_status' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
context 'MR is a draft' do
|
|
|
|
before do
|
|
|
|
expect(merge_request.draft).to be_falsy
|
|
|
|
|
|
|
|
merge_request.title = "Draft: #{merge_request.title}"
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sets draft to true' do
|
|
|
|
merge_request.save!
|
|
|
|
|
|
|
|
expect(merge_request.draft).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'MR is not a draft' do
|
|
|
|
before do
|
|
|
|
expect(merge_request.draft).to be_falsey
|
|
|
|
|
|
|
|
merge_request.title = "This is not a draft"
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sets draft to true' do
|
|
|
|
merge_request.save!
|
|
|
|
|
|
|
|
expect(merge_request.draft).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
describe 'respond to' do
|
2020-04-08 14:13:33 +05:30
|
|
|
subject { build(:merge_request) }
|
|
|
|
|
2015-04-26 12:48:37 +05:30
|
|
|
it { is_expected.to respond_to(:unchecked?) }
|
2020-03-13 15:44:24 +05:30
|
|
|
it { is_expected.to respond_to(:checking?) }
|
2015-04-26 12:48:37 +05:30
|
|
|
it { is_expected.to respond_to(:can_be_merged?) }
|
|
|
|
it { is_expected.to respond_to(:cannot_be_merged?) }
|
2015-12-23 02:04:40 +05:30
|
|
|
it { is_expected.to respond_to(:merge_params) }
|
2017-08-17 22:00:37 +05:30
|
|
|
it { is_expected.to respond_to(:merge_when_pipeline_succeeds) }
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '.by_commit_sha' do
|
|
|
|
subject(:by_commit_sha) { described_class.by_commit_sha(sha) }
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
let!(:merge_request) { create(:merge_request) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
context 'with sha contained in latest merge request diff' do
|
|
|
|
let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
|
|
|
|
|
|
|
|
it 'returns merge requests' do
|
|
|
|
expect(by_commit_sha).to eq([merge_request])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with sha contained not in latest merge request diff' do
|
|
|
|
let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
|
|
|
|
|
|
|
|
it 'returns empty requests' do
|
2021-09-04 01:27:46 +05:30
|
|
|
latest_merge_request_diff = merge_request.merge_request_diffs.create!
|
2020-03-13 15:44:24 +05:30
|
|
|
|
|
|
|
MergeRequestDiffCommit.where(
|
|
|
|
merge_request_diff_id: latest_merge_request_diff,
|
|
|
|
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0'
|
|
|
|
).delete_all
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
expect(by_commit_sha).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with sha not contained in' do
|
|
|
|
let(:sha) { 'b83d6e3' }
|
|
|
|
|
|
|
|
it 'returns empty result' do
|
|
|
|
expect(by_commit_sha).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
describe '.by_merge_commit_sha' do
|
|
|
|
it 'returns merge requests that match the given merge commit' do
|
|
|
|
mr = create(:merge_request, :merged, merge_commit_sha: '123abc')
|
|
|
|
|
|
|
|
create(:merge_request, :merged, merge_commit_sha: '123def')
|
|
|
|
|
|
|
|
expect(described_class.by_merge_commit_sha('123abc')).to eq([mr])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
describe '.by_squash_commit_sha' do
|
|
|
|
subject { described_class.by_squash_commit_sha(sha) }
|
|
|
|
|
|
|
|
let(:sha) { '123abc' }
|
|
|
|
let(:merge_request) { create(:merge_request, :merged, squash_commit_sha: sha) }
|
|
|
|
|
|
|
|
it 'returns merge requests that match the given squash commit' do
|
|
|
|
is_expected.to eq([merge_request])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
describe '.by_merge_or_squash_commit_sha' do
|
|
|
|
subject { described_class.by_merge_or_squash_commit_sha([sha1, sha2]) }
|
|
|
|
|
|
|
|
let(:sha1) { '123abc' }
|
|
|
|
let(:sha2) { '456abc' }
|
|
|
|
let(:mr1) { create(:merge_request, :merged, squash_commit_sha: sha1) }
|
|
|
|
let(:mr2) { create(:merge_request, :merged, merge_commit_sha: sha2) }
|
|
|
|
|
|
|
|
it 'returns merge requests that match the given squash and merge commits' do
|
|
|
|
is_expected.to include(mr1, mr2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
describe '.join_metrics' do
|
|
|
|
let_it_be(:join_condition) { '"merge_request_metrics"."target_project_id" = 1' }
|
|
|
|
|
|
|
|
context 'when a no target_project_id is available' do
|
|
|
|
it 'moves target_project_id condition to the merge request metrics' do
|
|
|
|
expect(described_class.join_metrics(1).to_sql).to include(join_condition)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when a target_project_id is present in the where conditions' do
|
|
|
|
it 'moves target_project_id condition to the merge request metrics' do
|
|
|
|
expect(described_class.where(target_project_id: 1).join_metrics.to_sql).to include(join_condition)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
describe '.by_related_commit_sha' do
|
|
|
|
subject { described_class.by_related_commit_sha(sha) }
|
|
|
|
|
|
|
|
context 'when commit is a squash commit' do
|
|
|
|
let!(:merge_request) { create(:merge_request, :merged, squash_commit_sha: sha) }
|
|
|
|
let(:sha) { '123abc' }
|
|
|
|
|
|
|
|
it { is_expected.to eq([merge_request]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when commit is a part of the merge request' do
|
2021-03-11 19:13:27 +05:30
|
|
|
let!(:merge_request) { create(:merge_request) }
|
2021-03-08 18:12:59 +05:30
|
|
|
let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
|
|
|
|
|
|
|
|
it { is_expected.to eq([merge_request]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when commit is a merge commit' do
|
|
|
|
let!(:merge_request) { create(:merge_request, :merged, merge_commit_sha: sha) }
|
|
|
|
let(:sha) { '123abc' }
|
|
|
|
|
|
|
|
it { is_expected.to eq([merge_request]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when commit is not found' do
|
|
|
|
let(:sha) { '0000' }
|
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
end
|
2021-03-11 19:13:27 +05:30
|
|
|
|
|
|
|
context 'when commit is part of the merge request and a squash commit at the same time' do
|
|
|
|
let!(:merge_request) { create(:merge_request) }
|
|
|
|
let(:sha) { merge_request.commits.first.id }
|
|
|
|
|
|
|
|
before do
|
|
|
|
merge_request.update!(squash_commit_sha: sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq([merge_request]) }
|
|
|
|
end
|
2021-03-08 18:12:59 +05:30
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
describe '.in_projects' do
|
|
|
|
it 'returns the merge requests for a set of projects' do
|
|
|
|
expect(described_class.in_projects(Project.all)).to eq([subject])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '.set_latest_merge_request_diff_ids!' do
|
|
|
|
def create_merge_request_with_diffs(source_branch, diffs: 2)
|
|
|
|
params = {
|
|
|
|
target_project: project,
|
|
|
|
target_branch: 'master',
|
|
|
|
source_project: project,
|
|
|
|
source_branch: source_branch
|
|
|
|
}
|
|
|
|
|
|
|
|
create(:merge_request, params).tap do |mr|
|
2021-09-04 01:27:46 +05:30
|
|
|
diffs.times { mr.merge_request_diffs.create! }
|
2021-03-11 19:13:27 +05:30
|
|
|
mr.create_merge_head_diff
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
|
|
|
|
it 'sets IDs for merge requests, whether they are already set or not' do
|
|
|
|
merge_requests = [
|
|
|
|
create_merge_request_with_diffs('feature'),
|
|
|
|
create_merge_request_with_diffs('feature-conflict'),
|
|
|
|
create_merge_request_with_diffs('wip', diffs: 0),
|
|
|
|
create_merge_request_with_diffs('csv')
|
|
|
|
]
|
|
|
|
|
|
|
|
merge_requests.take(2).each do |merge_request|
|
|
|
|
merge_request.update_column(:latest_merge_request_diff_id, nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
expected = merge_requests.map do |merge_request|
|
|
|
|
merge_request.merge_request_diffs.maximum(:id)
|
|
|
|
end
|
|
|
|
|
|
|
|
expect { project.merge_requests.set_latest_merge_request_diff_ids! }
|
|
|
|
.to change { merge_requests.map { |mr| mr.reload.latest_merge_request_diff_id } }.to(expected)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '.recent_target_branches' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let!(:merge_request1) { create(:merge_request, :opened, source_project: project, target_branch: 'feature') }
|
|
|
|
let!(:merge_request2) { create(:merge_request, :closed, source_project: project, target_branch: 'merge-test') }
|
|
|
|
let!(:merge_request3) { create(:merge_request, :opened, source_project: project, target_branch: 'fix') }
|
|
|
|
let!(:merge_request4) { create(:merge_request, :closed, source_project: project, target_branch: 'feature') }
|
|
|
|
|
|
|
|
before do
|
|
|
|
merge_request1.update_columns(updated_at: 1.day.since)
|
|
|
|
merge_request2.update_columns(updated_at: 2.days.since)
|
|
|
|
merge_request3.update_columns(updated_at: 3.days.since)
|
|
|
|
merge_request4.update_columns(updated_at: 4.days.since)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns target branches sort by updated at desc' do
|
2020-03-13 15:44:24 +05:30
|
|
|
expect(described_class.recent_target_branches).to match_array(%w[feature merge-test fix])
|
2019-07-07 11:18:12 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
describe '.sort_by_attribute' do
|
|
|
|
context 'merged_at' do
|
|
|
|
let_it_be(:older_mr) { create(:merge_request, :with_merged_metrics) }
|
|
|
|
let_it_be(:newer_mr) { create(:merge_request, :with_merged_metrics) }
|
|
|
|
|
|
|
|
it 'sorts asc' do
|
|
|
|
merge_requests = described_class.sort_by_attribute(:merged_at_asc)
|
|
|
|
expect(merge_requests).to eq([older_mr, newer_mr])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sorts desc' do
|
|
|
|
merge_requests = described_class.sort_by_attribute(:merged_at_desc)
|
|
|
|
expect(merge_requests).to eq([newer_mr, older_mr])
|
|
|
|
end
|
|
|
|
end
|
2021-10-27 15:23:28 +05:30
|
|
|
|
|
|
|
context 'closed_at' do
|
|
|
|
let_it_be(:older_mr) { create(:merge_request, :closed_last_month) }
|
|
|
|
let_it_be(:newer_mr) { create(:merge_request, :closed_last_month) }
|
|
|
|
|
|
|
|
it 'sorts asc' do
|
|
|
|
merge_requests = described_class.sort_by_attribute(:closed_at_asc)
|
|
|
|
expect(merge_requests).to eq([older_mr, newer_mr])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sorts desc' do
|
|
|
|
merge_requests = described_class.sort_by_attribute(:closed_at_desc)
|
|
|
|
expect(merge_requests).to eq([newer_mr, older_mr])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sorts asc when its closed_at' do
|
|
|
|
merge_requests = described_class.sort_by_attribute(:closed_at)
|
|
|
|
expect(merge_requests).to eq([older_mr, newer_mr])
|
|
|
|
end
|
|
|
|
end
|
2022-04-04 11:22:00 +05:30
|
|
|
|
|
|
|
context 'title' do
|
|
|
|
let_it_be(:first_mr) { create(:merge_request, :closed, title: 'One') }
|
|
|
|
let_it_be(:second_mr) { create(:merge_request, :closed, title: 'Two') }
|
|
|
|
|
|
|
|
it 'sorts asc' do
|
|
|
|
merge_requests = described_class.sort_by_attribute(:title_asc)
|
|
|
|
expect(merge_requests).to eq([first_mr, second_mr])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sorts desc' do
|
|
|
|
merge_requests = described_class.sort_by_attribute(:title_desc)
|
|
|
|
expect(merge_requests).to eq([second_mr, first_mr])
|
|
|
|
end
|
|
|
|
end
|
2020-11-24 15:15:51 +05:30
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
describe 'time to merge calculations' do
|
|
|
|
let_it_be(:user) { create(:user) }
|
|
|
|
let_it_be(:project) { create(:project) }
|
|
|
|
|
|
|
|
let!(:mr1) do
|
|
|
|
create(
|
|
|
|
:merge_request,
|
|
|
|
:with_merged_metrics,
|
|
|
|
source_project: project,
|
|
|
|
target_project: project
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:mr2) do
|
|
|
|
create(
|
|
|
|
:merge_request,
|
|
|
|
:with_merged_metrics,
|
|
|
|
source_project: project,
|
|
|
|
target_project: project
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:mr3) do
|
|
|
|
create(
|
|
|
|
:merge_request,
|
|
|
|
:with_merged_metrics,
|
|
|
|
source_project: project,
|
|
|
|
target_project: project
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:unmerged_mr) do
|
|
|
|
create(
|
|
|
|
:merge_request,
|
|
|
|
source_project: project,
|
|
|
|
target_project: project
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
2022-08-13 15:12:31 +05:30
|
|
|
project.add_member(user, :developer)
|
2021-02-22 17:27:13 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '.total_time_to_merge' do
|
|
|
|
it 'returns the sum of the time to merge for all merged MRs' do
|
|
|
|
mrs = project.merge_requests
|
|
|
|
|
|
|
|
expect(mrs.total_time_to_merge).to be_within(1).of(expected_total_time(mrs))
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merged_at is earlier than created_at' do
|
|
|
|
before do
|
|
|
|
mr1.metrics.update!(merged_at: mr1.metrics.created_at - 1.week)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
mrs = project.merge_requests.where(id: mr1.id)
|
|
|
|
|
|
|
|
expect(mrs.total_time_to_merge).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def expected_total_time(mrs)
|
|
|
|
mrs = mrs.reject { |mr| mr.merged_at.nil? }
|
|
|
|
mrs.reduce(0.0) do |sum, mr|
|
|
|
|
(mr.merged_at - mr.created_at) + sum
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#target_branch_sha' do
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
subject { create(:merge_request, source_project: project, target_project: project) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
context 'when the target branch does not exist' do
|
2016-06-02 11:05:42 +05:30
|
|
|
before do
|
2018-03-17 18:26:18 +05:30
|
|
|
project.repository.rm_branch(subject.author, subject.target_branch)
|
|
|
|
subject.clear_memoized_shas
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil' do
|
2016-08-24 12:49:21 +05:30
|
|
|
expect(subject.target_branch_sha).to be_nil
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
|
|
|
it 'returns memoized value' do
|
|
|
|
subject.target_branch_sha = '8ffb3c15a5475e59ae909384297fede4badcb4c7'
|
|
|
|
|
|
|
|
expect(subject.target_branch_sha).to eq '8ffb3c15a5475e59ae909384297fede4badcb4c7'
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#card_attributes' do
|
|
|
|
it 'includes the author name' do
|
|
|
|
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
|
2019-07-31 22:56:46 +05:30
|
|
|
allow(subject).to receive(:assignees).and_return([])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject.card_attributes)
|
2019-07-31 22:56:46 +05:30
|
|
|
.to eq({ 'Author' => 'Robert', 'Assignee' => "" })
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
it 'includes the assignees name' do
|
2017-08-17 22:00:37 +05:30
|
|
|
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
|
2019-07-31 22:56:46 +05:30
|
|
|
allow(subject).to receive(:assignees).and_return([double(name: 'Douwe'), double(name: 'Robert')])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject.card_attributes)
|
2019-07-31 22:56:46 +05:30
|
|
|
.to eq({ 'Author' => 'Robert', 'Assignee' => 'Douwe and Robert' })
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#assignee_or_author?' do
|
|
|
|
let(:user) { create(:user) }
|
|
|
|
|
|
|
|
it 'returns true for a user that is assigned to a merge request' do
|
2019-07-31 22:56:46 +05:30
|
|
|
subject.assignees = [user]
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(subject.assignee_or_author?(user)).to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true for a user that is the author of a merge request' do
|
|
|
|
subject.author = user
|
|
|
|
|
|
|
|
expect(subject.assignee_or_author?(user)).to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false for a user that is not the assignee or author' do
|
|
|
|
expect(subject.assignee_or_author?(user)).to eq(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
describe '#visible_closing_issues_for' do
|
|
|
|
let(:guest) { create(:user) }
|
|
|
|
let(:developer) { create(:user) }
|
|
|
|
let(:issue_1) { create(:issue, project: subject.source_project) }
|
|
|
|
let(:issue_2) { create(:issue, project: subject.source_project) }
|
|
|
|
let(:confidential_issue) { create(:issue, :confidential, project: subject.source_project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
subject.project.add_developer(subject.author)
|
|
|
|
subject.target_branch = subject.project.default_branch
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue_1.to_reference} #{issue_2.to_reference} #{confidential_issue.to_reference}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'shows only allowed issues to guest' do
|
|
|
|
subject.project.add_guest(guest)
|
|
|
|
|
|
|
|
subject.cache_merge_request_closes_issues!
|
|
|
|
|
|
|
|
expect(subject.visible_closing_issues_for(guest)).to match_array([issue_1, issue_2])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'shows only allowed issues to developer' do
|
|
|
|
subject.project.add_developer(developer)
|
|
|
|
|
|
|
|
subject.cache_merge_request_closes_issues!
|
|
|
|
|
|
|
|
expect(subject.visible_closing_issues_for(developer)).to match_array([issue_1, confidential_issue, issue_2])
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when external issue tracker is enabled' do
|
2021-03-11 19:13:27 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
subject { create(:merge_request, source_project: project) }
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
before do
|
|
|
|
subject.project.has_external_issue_tracker = true
|
|
|
|
subject.project.save!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'calls non #closes_issues to retrieve data' do
|
|
|
|
expect(subject).to receive(:closes_issues)
|
|
|
|
expect(subject).not_to receive(:cached_closes_issues)
|
|
|
|
|
|
|
|
subject.visible_closing_issues_for
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
describe '#cache_merge_request_closes_issues!' do
|
|
|
|
before do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.project.add_developer(subject.author)
|
2016-11-03 12:29:30 +05:30
|
|
|
subject.target_branch = subject.project.default_branch
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches closed issues' do
|
|
|
|
issue = create :issue, project: subject.project
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }.to change(subject.merge_requests_closing_issues, :count).by(1)
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
it 'does not cache closed issues when merge request is closed' do
|
|
|
|
issue = create :issue, project: subject.project
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
|
|
|
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
2019-12-21 20:55:43 +05:30
|
|
|
allow(subject).to receive(:state_id).and_return(described_class.available_states[:closed])
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not cache closed issues when merge request is merged' do
|
|
|
|
issue = create :issue, project: subject.project
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
2019-12-21 20:55:43 +05:30
|
|
|
allow(subject).to receive(:state_id).and_return(described_class.available_states[:merged])
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
context 'when both internal and external issue trackers are enabled' do
|
|
|
|
before do
|
2021-09-30 23:02:18 +05:30
|
|
|
create(:jira_integration, project: subject.project)
|
2021-03-11 19:13:27 +05:30
|
|
|
subject.project.reload
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not cache issues from external trackers' do
|
|
|
|
issue = ExternalIssue.new('JIRA-123', subject.project)
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to raise_error
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches an internal issue' do
|
|
|
|
issue = create(:issue, project: subject.project)
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }
|
|
|
|
.to change(subject.merge_requests_closing_issues, :count).by(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when only external issue tracker enabled' do
|
2021-03-11 19:13:27 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
subject { create(:merge_request, source_project: project) }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
subject.project.has_external_issue_tracker = true
|
|
|
|
subject.project.issues_enabled = false
|
|
|
|
subject.project.save!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not cache issues from external trackers' do
|
|
|
|
issue = ExternalIssue.new('JIRA-123', subject.project)
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'does not cache an internal issue' do
|
|
|
|
issue = create(:issue, project: subject.project)
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }
|
|
|
|
.not_to change(subject.merge_requests_closing_issues, :count)
|
|
|
|
end
|
2022-10-11 01:57:18 +05:30
|
|
|
|
|
|
|
it 'caches issues from another project with issues enabled' do
|
|
|
|
project = create(:project, :public, issues_enabled: true)
|
|
|
|
issue = create(:issue, project: project)
|
|
|
|
commit = double('commit1', safe_message: "Fixes #{issue.to_reference(full: true)}")
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
|
|
|
|
expect { subject.cache_merge_request_closes_issues!(subject.author) }
|
|
|
|
.to change(subject.merge_requests_closing_issues, :count).by(1)
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#source_branch_sha' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:last_branch_commit) { subject.source_project.repository.commit(Gitlab::Git::BRANCH_REF_PREFIX + subject.source_branch) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
context 'with diffs' do
|
2021-03-11 19:13:27 +05:30
|
|
|
subject { create(:merge_request) }
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'returns the sha of the source branch last commit' do
|
2016-08-24 12:49:21 +05:30
|
|
|
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without diffs' do
|
|
|
|
subject { create(:merge_request, :without_diffs) }
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'returns the sha of the source branch last commit' do
|
2016-08-24 12:49:21 +05:30
|
|
|
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
context 'when there is a tag name matching the branch name' do
|
|
|
|
let(:tag_name) { subject.source_branch }
|
|
|
|
|
|
|
|
it 'returns the sha of the source branch last commit' do
|
|
|
|
subject.source_project.repository.add_tag(subject.author,
|
|
|
|
tag_name,
|
|
|
|
subject.target_branch_sha,
|
|
|
|
'Add a tag')
|
|
|
|
|
|
|
|
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
|
|
|
|
|
|
|
|
subject.source_project.repository.rm_tag(subject.author, tag_name)
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the merge request is being created' do
|
|
|
|
subject { build(:merge_request, source_branch: nil, compare_commits: []) }
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'returns nil' do
|
2016-08-24 12:49:21 +05:30
|
|
|
expect(subject.source_branch_sha).to be_nil
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
|
|
|
it 'returns memoized value' do
|
|
|
|
subject.source_branch_sha = '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b'
|
|
|
|
|
|
|
|
expect(subject.source_branch_sha).to eq '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b'
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
describe '#to_reference' do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:project) { build(:project, name: 'sample-project') }
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:merge_request) { build(:merge_request, target_project: project, iid: 1) }
|
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
it 'returns a String reference to the object' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(merge_request.to_reference).to eq "!1"
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'supports a cross-project reference' do
|
2017-09-10 17:25:29 +05:30
|
|
|
another_project = build(:project, name: 'another-project', namespace: project.namespace)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(merge_request.to_reference(another_project)).to eq "sample-project!1"
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a String reference with the full path' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(merge_request.to_reference(full: true)).to eq(project.full_path + '!1')
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
describe '#raw_diffs' do
|
|
|
|
let(:options) { { paths: ['a/b', 'b/a', 'c/*'] } }
|
|
|
|
|
|
|
|
context 'when there are MR diffs' do
|
2021-03-11 19:13:27 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
it 'delegates to the MR diffs' do
|
2016-09-13 17:45:13 +05:30
|
|
|
expect(merge_request.merge_request_diff).to receive(:raw_diffs).with(options)
|
|
|
|
|
|
|
|
merge_request.raw_diffs(options)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no MR diffs' do
|
2020-04-08 14:13:33 +05:30
|
|
|
let(:merge_request) { build(:merge_request) }
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
it 'delegates to the compare object' do
|
|
|
|
merge_request.compare = double(:compare)
|
|
|
|
|
|
|
|
expect(merge_request.compare).to receive(:raw_diffs).with(options)
|
|
|
|
|
|
|
|
merge_request.raw_diffs(options)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#diffs' do
|
|
|
|
let(:merge_request) { build(:merge_request) }
|
|
|
|
let(:options) { { paths: ['a/b', 'b/a', 'c/*'] } }
|
|
|
|
|
|
|
|
context 'when there are MR diffs' do
|
|
|
|
it 'delegates to the MR diffs' do
|
2021-09-04 01:27:46 +05:30
|
|
|
merge_request.save!
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
expect(merge_request.merge_request_diff).to receive(:raw_diffs).with(hash_including(options)).and_call_original
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
merge_request.diffs(options).diff_files
|
2016-08-24 12:49:21 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no MR diffs' do
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'delegates to the compare object, setting expanded: true' do
|
2016-08-24 12:49:21 +05:30
|
|
|
merge_request.compare = double(:compare)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(merge_request.compare).to receive(:diffs).with(options.merge(expanded: true))
|
2016-08-24 12:49:21 +05:30
|
|
|
|
|
|
|
merge_request.diffs(options)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
describe '#note_positions_for_paths' do
|
2020-06-23 00:09:42 +05:30
|
|
|
let(:user) { create(:user) }
|
2021-03-11 19:13:27 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
2019-12-21 20:55:43 +05:30
|
|
|
let(:project) { merge_request.project }
|
|
|
|
let!(:diff_note) do
|
|
|
|
create(:diff_note_on_merge_request, project: project, noteable: merge_request)
|
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
let!(:draft_note) do
|
|
|
|
create(:draft_note_on_text_diff, author: user, merge_request: merge_request)
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
|
|
|
let(:file_paths) { merge_request.diffs.diff_files.map(&:file_path) }
|
|
|
|
|
|
|
|
subject do
|
|
|
|
merge_request.note_positions_for_paths(file_paths)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a Gitlab::Diff::PositionCollection' do
|
|
|
|
expect(subject).to be_a(Gitlab::Diff::PositionCollection)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'within all diff files' do
|
|
|
|
it 'returns correct positions' do
|
|
|
|
expect(subject).to match_array([diff_note.position])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'within specific diff file' do
|
|
|
|
let(:file_paths) { [diff_note.position.file_path] }
|
|
|
|
|
|
|
|
it 'returns correct positions' do
|
|
|
|
expect(subject).to match_array([diff_note.position])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'within no diff files' do
|
|
|
|
let(:file_paths) { [] }
|
|
|
|
|
|
|
|
it 'returns no positions' do
|
|
|
|
expect(subject.to_a).to be_empty
|
|
|
|
end
|
|
|
|
end
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
context 'when user is given' do
|
|
|
|
subject do
|
|
|
|
merge_request.note_positions_for_paths(file_paths, user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns notes and draft notes positions' do
|
|
|
|
expect(subject).to match_array([draft_note.position, diff_note.position])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when user is not given' do
|
|
|
|
subject do
|
|
|
|
merge_request.note_positions_for_paths(file_paths)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns notes positions' do
|
|
|
|
expect(subject).to match_array([diff_note.position])
|
|
|
|
end
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
describe '#discussions_diffs' do
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
shared_examples 'discussions diffs collection' do
|
|
|
|
it 'initializes Gitlab::DiscussionsDiff::FileCollection with correct data' do
|
|
|
|
note_diff_file = diff_note.note_diff_file
|
|
|
|
|
|
|
|
expect(Gitlab::DiscussionsDiff::FileCollection)
|
|
|
|
.to receive(:new)
|
|
|
|
.with([note_diff_file])
|
|
|
|
.and_call_original
|
|
|
|
|
|
|
|
result = merge_request.discussions_diffs
|
|
|
|
|
|
|
|
expect(result).to be_a(Gitlab::DiscussionsDiff::FileCollection)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'eager loads relations' do
|
|
|
|
result = merge_request.discussions_diffs
|
|
|
|
|
|
|
|
recorder = ActiveRecord::QueryRecorder.new do
|
|
|
|
result.first.diff_note
|
|
|
|
result.first.diff_note.project
|
|
|
|
end
|
|
|
|
|
|
|
|
expect(recorder.count).to be_zero
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'with commit diff note' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:other_merge_request) { create(:merge_request, source_project: create(:project, :repository)) }
|
2019-02-15 15:39:39 +05:30
|
|
|
|
|
|
|
let!(:diff_note) do
|
|
|
|
create(:diff_note_on_commit, project: merge_request.project)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:other_mr_diff_note) do
|
|
|
|
create(:diff_note_on_commit, project: other_merge_request.project)
|
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
it_behaves_like 'discussions diffs collection'
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with merge request diff note' do
|
2019-12-04 20:38:33 +05:30
|
|
|
let!(:diff_note) do
|
2019-02-15 15:39:39 +05:30
|
|
|
create(:diff_note_on_merge_request, project: merge_request.project, noteable: merge_request)
|
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
it_behaves_like 'discussions diffs collection'
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#diff_size' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:merge_request) do
|
2020-11-24 15:15:51 +05:30
|
|
|
build(:merge_request, source_project: project, source_branch: 'expand-collapse-files', target_branch: 'master')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are MR diffs' do
|
|
|
|
it 'returns the correct count' do
|
2021-09-04 01:27:46 +05:30
|
|
|
merge_request.save!
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(merge_request.diff_size).to eq('105')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the correct overflow count' do
|
|
|
|
allow(Commit).to receive(:max_diff_options).and_return(max_files: 2)
|
2021-09-04 01:27:46 +05:30
|
|
|
merge_request.save!
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(merge_request.diff_size).to eq('2+')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not perform highlighting' do
|
2021-09-04 01:27:46 +05:30
|
|
|
merge_request.save!
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(Gitlab::Diff::Highlight).not_to receive(:new)
|
|
|
|
|
|
|
|
merge_request.diff_size
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no MR diffs' do
|
|
|
|
def set_compare(merge_request)
|
|
|
|
merge_request.compare = CompareService.new(
|
|
|
|
merge_request.source_project,
|
|
|
|
merge_request.source_branch
|
|
|
|
).execute(
|
|
|
|
merge_request.target_project,
|
|
|
|
merge_request.target_branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the correct count' do
|
|
|
|
set_compare(merge_request)
|
|
|
|
|
|
|
|
expect(merge_request.diff_size).to eq('105')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the correct overflow count' do
|
2022-01-26 12:08:38 +05:30
|
|
|
allow(Commit).to receive(:diff_max_files).and_return(2)
|
2017-08-17 22:00:37 +05:30
|
|
|
set_compare(merge_request)
|
|
|
|
|
|
|
|
expect(merge_request.diff_size).to eq('2+')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not perform highlighting' do
|
|
|
|
set_compare(merge_request)
|
|
|
|
|
|
|
|
expect(Gitlab::Diff::Highlight).not_to receive(:new)
|
|
|
|
|
|
|
|
merge_request.diff_size
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
describe '#modified_paths' do
|
|
|
|
let(:paths) { double(:paths) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
subject(:merge_request) { build(:merge_request) }
|
|
|
|
|
|
|
|
before do
|
2020-04-22 19:07:51 +05:30
|
|
|
allow(diff).to receive(:modified_paths).and_return(paths)
|
2018-12-13 13:39:08 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when past_merge_request_diff is specified' do
|
|
|
|
let(:another_diff) { double(:merge_request_diff) }
|
|
|
|
let(:diff) { another_diff }
|
|
|
|
|
|
|
|
it 'returns affected file paths from specified past_merge_request_diff' do
|
|
|
|
expect(merge_request.modified_paths(past_merge_request_diff: another_diff)).to eq(paths)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when compare is present' do
|
|
|
|
let(:compare) { double(:compare) }
|
|
|
|
let(:diff) { compare }
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
before do
|
2018-12-13 13:39:08 +05:30
|
|
|
merge_request.compare = compare
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
expect(merge_request).to receive(:diff_stats).and_return(diff_stats)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and diff_stats are not present' do
|
|
|
|
let(:diff_stats) { nil }
|
|
|
|
|
|
|
|
it 'returns affected file paths from compare' do
|
|
|
|
expect(merge_request.modified_paths).to eq(paths)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and diff_stats are present' do
|
|
|
|
let(:diff_stats) { double(:diff_stats) }
|
|
|
|
|
|
|
|
it 'returns affected file paths from compare' do
|
|
|
|
diff_stats_path = double(:diff_stats_paths)
|
|
|
|
expect(diff_stats).to receive(:paths).and_return(diff_stats_path)
|
|
|
|
|
|
|
|
expect(merge_request.modified_paths).to eq(diff_stats_path)
|
|
|
|
end
|
2018-12-13 13:39:08 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no arguments provided' do
|
|
|
|
let(:diff) { merge_request.merge_request_diff }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
subject(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
|
|
|
|
|
|
|
|
it 'returns affected file paths for merge_request_diff' do
|
|
|
|
expect(merge_request.modified_paths).to eq(paths)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
describe '#new_paths' do
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request, source_branch: 'expand-collapse-files', target_branch: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns new path of changed files' do
|
|
|
|
expect(merge_request.new_paths.count).to eq(105)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-01-15 13:20:01 +05:30
|
|
|
describe "#related_notes" do
|
2014-09-02 18:07:02 +05:30
|
|
|
let!(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
before do
|
2015-04-26 12:48:37 +05:30
|
|
|
allow(merge_request).to receive(:commits) { [merge_request.source_project.repository.commit] }
|
2016-06-16 23:09:34 +05:30
|
|
|
create(:note_on_commit, commit_id: merge_request.commits.first.id,
|
|
|
|
project: merge_request.project)
|
2014-09-02 18:07:02 +05:30
|
|
|
create(:note, noteable: merge_request, project: merge_request.project)
|
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
it "includes notes for commits" do
|
2015-04-26 12:48:37 +05:30
|
|
|
expect(merge_request.commits).not_to be_empty
|
2017-01-15 13:20:01 +05:30
|
|
|
expect(merge_request.related_notes.count).to eq(2)
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
2015-11-26 14:37:03 +05:30
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
it "includes notes for commits from target project as well" do
|
2016-06-16 23:09:34 +05:30
|
|
|
create(:note_on_commit, commit_id: merge_request.commits.first.id,
|
|
|
|
project: merge_request.target_project)
|
|
|
|
|
2015-11-26 14:37:03 +05:30
|
|
|
expect(merge_request.commits).not_to be_empty
|
2017-01-15 13:20:01 +05:30
|
|
|
expect(merge_request.related_notes.count).to eq(3)
|
2015-11-26 14:37:03 +05:30
|
|
|
end
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
it "excludes system notes for commits" do
|
|
|
|
system_note = create(:note_on_commit, :system, commit_id: merge_request.commits.first.id,
|
|
|
|
project: merge_request.project)
|
|
|
|
|
|
|
|
expect(merge_request.related_notes.count).to eq(2)
|
|
|
|
expect(merge_request.related_notes).not_to include(system_note)
|
|
|
|
end
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#for_fork?' do
|
|
|
|
it 'returns true if the merge request is for a fork' do
|
2017-09-10 17:25:29 +05:30
|
|
|
subject.source_project = build_stubbed(:project, namespace: create(:group))
|
|
|
|
subject.target_project = build_stubbed(:project, namespace: create(:group))
|
2014-09-02 18:07:02 +05:30
|
|
|
|
2015-04-26 12:48:37 +05:30
|
|
|
expect(subject.for_fork?).to be_truthy
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false if is not for a fork' do
|
2015-04-26 12:48:37 +05:30
|
|
|
expect(subject.for_fork?).to be_falsey
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#closes_issues' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project) }
|
|
|
|
|
2014-09-02 18:07:02 +05:30
|
|
|
let(:issue0) { create :issue, project: subject.project }
|
|
|
|
let(:issue1) { create :issue, project: subject.project }
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
let(:commit0) { double('commit0', safe_message: "Fixes #{issue0.to_reference}") }
|
|
|
|
let(:commit1) { double('commit1', safe_message: "Fixes #{issue0.to_reference}") }
|
|
|
|
let(:commit2) { double('commit2', safe_message: "Fixes #{issue1.to_reference}") }
|
2014-09-02 18:07:02 +05:30
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { create(:merge_request, source_project: project) }
|
|
|
|
|
2014-09-02 18:07:02 +05:30
|
|
|
before do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.project.add_developer(subject.author)
|
2015-09-11 14:41:01 +05:30
|
|
|
allow(subject).to receive(:commits).and_return([commit0, commit1, commit2])
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'accesses the set of issues that will be closed on acceptance' do
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(subject.project).to receive(:default_branch)
|
|
|
|
.and_return(subject.target_branch)
|
2014-09-02 18:07:02 +05:30
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
closed = subject.closes_issues
|
|
|
|
|
|
|
|
expect(closed).to include(issue0, issue1)
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'only lists issues as to be closed if it targets the default branch' do
|
2015-09-11 14:41:01 +05:30
|
|
|
allow(subject.project).to receive(:default_branch).and_return('master')
|
2014-09-02 18:07:02 +05:30
|
|
|
subject.target_branch = 'something-else'
|
|
|
|
|
2015-04-26 12:48:37 +05:30
|
|
|
expect(subject.closes_issues).to be_empty
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
|
|
|
|
it 'ignores referenced issues when auto-close is disabled' do
|
|
|
|
subject.project.update!(autoclose_referenced_issues: false)
|
|
|
|
|
|
|
|
allow(subject.project).to receive(:default_branch)
|
|
|
|
.and_return(subject.target_branch)
|
|
|
|
|
|
|
|
expect(subject.closes_issues).to be_empty
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#issues_mentioned_but_not_closing' do
|
|
|
|
let(:closing_issue) { create :issue, project: subject.project }
|
|
|
|
let(:mentioned_issue) { create :issue, project: subject.project }
|
|
|
|
let(:commit) { double('commit', safe_message: "Fixes #{closing_issue.to_reference}") }
|
2014-09-02 18:07:02 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'detects issues mentioned in description but not closed' do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.project.add_developer(subject.author)
|
2017-08-17 22:00:37 +05:30
|
|
|
subject.description = "Is related to #{mentioned_issue.to_reference} and #{closing_issue.to_reference}"
|
|
|
|
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(subject.project).to receive(:default_branch)
|
|
|
|
.and_return(subject.target_branch)
|
2018-11-18 11:00:15 +05:30
|
|
|
subject.cache_merge_request_closes_issues!
|
2014-09-02 18:07:02 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(subject.issues_mentioned_but_not_closing(subject.author)).to match_array([mentioned_issue])
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the project has an external issue tracker' do
|
|
|
|
before do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.project.add_developer(subject.author)
|
2017-08-17 22:00:37 +05:30
|
|
|
commit = double(:commit, safe_message: 'Fixes TEST-3')
|
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
create(:jira_integration, project: subject.project)
|
2021-03-11 19:13:27 +05:30
|
|
|
subject.project.reload
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
allow(subject).to receive(:commits).and_return([commit])
|
|
|
|
allow(subject).to receive(:description).and_return('Is related to TEST-2 and TEST-3')
|
|
|
|
allow(subject.project).to receive(:default_branch).and_return(subject.target_branch)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'detects issues mentioned in description but not closed' do
|
2018-11-18 11:00:15 +05:30
|
|
|
subject.cache_merge_request_closes_issues!
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(subject.issues_mentioned_but_not_closing(subject.author).map(&:to_s)).to match_array(['TEST-2'])
|
|
|
|
end
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
describe "#draft?" do
|
2020-07-28 23:09:34 +05:30
|
|
|
subject { build_stubbed(:merge_request) }
|
|
|
|
|
|
|
|
[
|
2021-11-18 22:05:49 +05:30
|
|
|
'draft:', 'Draft: ', '[Draft]', '[DRAFT] '
|
2022-04-04 11:22:00 +05:30
|
|
|
].each do |draft_prefix|
|
|
|
|
it "detects the '#{draft_prefix}' prefix" do
|
|
|
|
subject.title = "#{draft_prefix}#{subject.title}"
|
2020-07-28 23:09:34 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq true
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
context "returns false" do
|
|
|
|
# We have removed support for variations of "WIP", and additionally need
|
|
|
|
# to test unsupported variations of "Draft" that we have seen users
|
|
|
|
# attempt.
|
|
|
|
#
|
|
|
|
[
|
|
|
|
'WIP:', 'WIP: ', '[WIP]', '[WIP] ', ' [WIP] WIP: [WIP] WIP:',
|
|
|
|
"WIP ", "(WIP)",
|
|
|
|
"draft", "Draft", "Draft -", "draft - ", "Draft ", "draft "
|
|
|
|
].each do |trigger|
|
|
|
|
it "when '#{trigger}' prefixes the title" do
|
|
|
|
subject.title = "#{trigger}#{subject.title}"
|
2021-11-18 22:05:49 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
2021-11-18 22:05:49 +05:30
|
|
|
end
|
2020-07-28 23:09:34 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
["WIP", "Draft"].each do |trigger| # rubocop:disable Style/WordArray
|
|
|
|
it "when merge request title is simply '#{trigger}'" do
|
|
|
|
subject.title = trigger
|
2020-07-28 23:09:34 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
2020-07-28 23:09:34 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
it "when #{trigger} is in the middle of the title" do
|
|
|
|
subject.title = "Something with #{trigger} in the middle"
|
2020-07-28 23:09:34 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
it "when #{trigger} is at the end of the title" do
|
|
|
|
subject.title = "Something ends with #{trigger}"
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
it "when title contains words starting with #{trigger}" do
|
|
|
|
subject.title = "#{trigger}foo #{subject.title}"
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
it "when title contains words containing with #{trigger}" do
|
|
|
|
subject.title = "Foo#{trigger}Bar #{subject.title}"
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
it 'when Draft: in the middle of the title' do
|
|
|
|
subject.title = 'Something with Draft: in the middle'
|
2015-09-11 14:41:01 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
it "when the title does not contain draft" do
|
|
|
|
expect(subject.draft?).to eq false
|
|
|
|
end
|
2021-04-17 20:07:23 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
it "is aliased to #draft?" do
|
|
|
|
expect(subject.method(:work_in_progress?)).to eq(subject.method(:draft?))
|
|
|
|
end
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
describe "#draftless_title" do
|
2020-07-28 23:09:34 +05:30
|
|
|
subject { build_stubbed(:merge_request) }
|
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
['draft:', 'Draft: ', '[Draft]', '[DRAFT] '].each do |draft_prefix|
|
|
|
|
it "removes a '#{draft_prefix}' prefix" do
|
2022-07-23 23:45:48 +05:30
|
|
|
draftless_title = subject.title
|
2022-04-04 11:22:00 +05:30
|
|
|
subject.title = "#{draft_prefix}#{subject.title}"
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draftless_title).to eq draftless_title
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it "is satisfies the #work_in_progress? method" do
|
2022-04-04 11:22:00 +05:30
|
|
|
subject.title = "#{draft_prefix}#{subject.title}"
|
2022-07-23 23:45:48 +05:30
|
|
|
subject.title = subject.draftless_title
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
expect(subject.work_in_progress?).to eq false
|
|
|
|
end
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
[
|
|
|
|
'WIP:', 'WIP: ', '[WIP]', '[WIP] ', '[WIP] WIP: [WIP] WIP:'
|
|
|
|
].each do |wip_prefix|
|
|
|
|
it "doesn't remove a '#{wip_prefix}' prefix" do
|
|
|
|
subject.title = "#{wip_prefix}#{subject.title}"
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draftless_title).to eq subject.title
|
2022-04-04 11:22:00 +05:30
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'removes only draft prefix from the MR title' do
|
|
|
|
subject.title = 'Draft: Implement feature called draft'
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draftless_title).to eq 'Implement feature called draft'
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not remove WIP in the middle of the title' do
|
|
|
|
subject.title = 'Something with WIP in the middle'
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draftless_title).to eq subject.title
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not remove Draft in the middle of the title' do
|
|
|
|
subject.title = 'Something with Draft in the middle'
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draftless_title).to eq subject.title
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not remove WIP at the end of the title' do
|
|
|
|
subject.title = 'Something ends with WIP'
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draftless_title).to eq subject.title
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not remove Draft at the end of the title' do
|
|
|
|
subject.title = 'Something ends with Draft'
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draftless_title).to eq subject.title
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
describe "#draft_title" do
|
2020-07-28 23:09:34 +05:30
|
|
|
it "adds the Draft: prefix to the title" do
|
2022-07-23 23:45:48 +05:30
|
|
|
draft_title = "Draft: #{subject.title}"
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft_title).to eq draft_title
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
it "does not add the Draft: prefix multiple times" do
|
2022-07-23 23:45:48 +05:30
|
|
|
draft_title = "Draft: #{subject.title}"
|
|
|
|
subject.title = subject.draft_title
|
|
|
|
subject.title = subject.draft_title
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2022-07-23 23:45:48 +05:30
|
|
|
expect(subject.draft_title).to eq draft_title
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it "is satisfies the #work_in_progress? method" do
|
2022-07-23 23:45:48 +05:30
|
|
|
subject.title = subject.draft_title
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
expect(subject.work_in_progress?).to eq true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
describe '#permits_force_push?' do
|
|
|
|
let_it_be(:merge_request) { build_stubbed(:merge_request) }
|
|
|
|
|
|
|
|
subject { merge_request.permits_force_push? }
|
|
|
|
|
|
|
|
context 'when source branch is not protected' do
|
|
|
|
before do
|
|
|
|
allow(ProtectedBranch).to receive(:protected?).and_return(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source branch is protected' do
|
|
|
|
before do
|
|
|
|
allow(ProtectedBranch).to receive(:protected?).and_return(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when force push is not allowed' do
|
|
|
|
before do
|
|
|
|
allow(ProtectedBranch).to receive(:allow_force_push?) { false }
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when force push is allowed' do
|
|
|
|
before do
|
|
|
|
allow(ProtectedBranch).to receive(:allow_force_push?) { true }
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-12-23 02:04:40 +05:30
|
|
|
describe '#can_remove_source_branch?' do
|
2020-03-13 15:44:24 +05:30
|
|
|
let_it_be(:user) { create(:user) }
|
|
|
|
let_it_be(:merge_request, reload: true) { create(:merge_request, :simple) }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
subject { merge_request }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
before do
|
2018-11-18 11:00:15 +05:30
|
|
|
subject.source_project.add_maintainer(user)
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it "can't be removed when its a protected branch" do
|
2017-08-17 22:00:37 +05:30
|
|
|
allow(ProtectedBranch).to receive(:protected?).and_return(true)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2015-12-23 02:04:40 +05:30
|
|
|
expect(subject.can_remove_source_branch?(user)).to be_falsey
|
|
|
|
end
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
it "can't be removed because source project has been deleted" do
|
|
|
|
subject.source_project = nil
|
|
|
|
|
|
|
|
expect(subject.can_remove_source_branch?(user)).to be_falsey
|
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
it "can't remove a root ref" do
|
2021-09-04 01:27:46 +05:30
|
|
|
subject.update!(source_branch: 'master', target_branch: 'feature')
|
2015-12-23 02:04:40 +05:30
|
|
|
|
|
|
|
expect(subject.can_remove_source_branch?(user)).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is unable to remove the source branch for a project the user cannot push to" do
|
2018-03-17 18:26:18 +05:30
|
|
|
user2 = create(:user)
|
|
|
|
|
2015-12-23 02:04:40 +05:30
|
|
|
expect(subject.can_remove_source_branch?(user2)).to be_falsey
|
|
|
|
end
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
it "can be removed if the last commit is the head of the source branch" do
|
2016-09-29 09:46:39 +05:30
|
|
|
allow(subject).to receive(:source_branch_head).and_return(subject.diff_head_commit)
|
2016-04-02 18:10:28 +05:30
|
|
|
|
2015-12-23 02:04:40 +05:30
|
|
|
expect(subject.can_remove_source_branch?(user)).to be_truthy
|
|
|
|
end
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
it "cannot be removed if the last commit is not also the head of the source branch" do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.clear_memoized_shas
|
2016-08-24 12:49:21 +05:30
|
|
|
subject.source_branch = "lfs"
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
expect(subject.can_remove_source_branch?(user)).to be_falsey
|
|
|
|
end
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
describe "#source_branch_exists?" do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
2020-07-28 23:09:34 +05:30
|
|
|
let(:repository) { merge_request.source_project.repository }
|
|
|
|
|
|
|
|
context 'when the source project is set' do
|
2020-11-24 15:15:51 +05:30
|
|
|
it 'returns true when the branch exists' do
|
|
|
|
expect(merge_request.source_branch_exists?).to eq(true)
|
2020-07-28 23:09:34 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the source project is not set' do
|
|
|
|
before do
|
|
|
|
merge_request.source_project = nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(merge_request.source_branch_exists?).to eq(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
describe '#default_merge_commit_message' do
|
2016-11-03 12:29:30 +05:30
|
|
|
it 'includes merge information as the title' do
|
|
|
|
request = build(:merge_request, source_branch: 'source', target_branch: 'target')
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(request.default_merge_commit_message)
|
2016-11-03 12:29:30 +05:30
|
|
|
.to match("Merge branch 'source' into 'target'\n\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'includes its title in the body' do
|
|
|
|
request = build(:merge_request, title: 'Remove all technical debt')
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(request.default_merge_commit_message)
|
2016-11-03 12:29:30 +05:30
|
|
|
.to match("Remove all technical debt\n\n")
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'includes its closed issues in the body' do
|
|
|
|
issue = create(:issue, project: subject.project)
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.project.add_developer(subject.author)
|
2017-08-17 22:00:37 +05:30
|
|
|
subject.description = "This issue Closes #{issue.to_reference}"
|
2018-11-18 11:00:15 +05:30
|
|
|
allow(subject.project).to receive(:default_branch).and_return(subject.target_branch)
|
|
|
|
subject.cache_merge_request_closes_issues!
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(subject.default_merge_commit_message)
|
2017-08-17 22:00:37 +05:30
|
|
|
.to match("Closes #{issue.to_reference}")
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'includes its reference in the body' do
|
|
|
|
request = build_stubbed(:merge_request)
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(request.default_merge_commit_message)
|
2018-03-17 18:26:18 +05:30
|
|
|
.to match("See merge request #{request.to_reference(full: true)}")
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'excludes multiple linebreak runs when description is blank' do
|
|
|
|
request = build(:merge_request, title: 'Title', description: nil)
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(request.default_merge_commit_message).not_to match("Title\n\n\n\n")
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'includes its description in the body' do
|
|
|
|
request = build(:merge_request, description: 'By removing all code')
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(request.default_merge_commit_message(include_description: true))
|
2017-08-17 22:00:37 +05:30
|
|
|
.to match("By removing all code\n\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not includes its description in the body' do
|
|
|
|
request = build(:merge_request, description: 'By removing all code')
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(request.default_merge_commit_message)
|
2017-08-17 22:00:37 +05:30
|
|
|
.not_to match("By removing all code\n\n")
|
|
|
|
end
|
2021-12-11 22:18:48 +05:30
|
|
|
|
|
|
|
it 'uses template from target project' do
|
|
|
|
request = build(:merge_request, title: 'Fix everything')
|
2022-03-02 08:16:31 +05:30
|
|
|
request.target_project.merge_commit_template = '%{title}'
|
2021-12-11 22:18:48 +05:30
|
|
|
|
|
|
|
expect(request.default_merge_commit_message)
|
|
|
|
.to eq('Fix everything')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'ignores template when include_description is true' do
|
|
|
|
request = build(:merge_request, title: 'Fix everything')
|
|
|
|
subject.target_project.merge_commit_template = '%{title}'
|
|
|
|
|
|
|
|
expect(request.default_merge_commit_message(include_description: true))
|
|
|
|
.to match("See merge request #{request.to_reference(full: true)}")
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
2019-09-04 21:01:54 +05:30
|
|
|
describe "#auto_merge_strategy" do
|
|
|
|
subject { merge_request.auto_merge_strategy }
|
|
|
|
|
|
|
|
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
|
|
|
|
|
|
|
|
it { is_expected.to eq('merge_when_pipeline_succeeds') }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2019-09-04 21:01:54 +05:30
|
|
|
context 'when auto merge is disabled' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2019-09-04 21:01:54 +05:30
|
|
|
it { is_expected.to be_nil }
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '#committers' do
|
|
|
|
it 'returns all the committers of every commit in the merge request' do
|
|
|
|
users = subject.commits.without_merge_commits.map(&:committer_email).uniq.map do |email|
|
2019-03-02 22:35:43 +05:30
|
|
|
create(:user, email: email)
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(subject.committers).to match_array(users)
|
2019-03-02 22:35:43 +05:30
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'returns an empty array if no committer is associated with a user' do
|
|
|
|
expect(subject.committers).to be_empty
|
2019-03-02 22:35:43 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
describe '#diverged_commits_count' do
|
2019-03-02 22:35:43 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:forked_project) { fork_project(project, nil, repository: true) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
context 'when the target branch does not exist anymore' do
|
|
|
|
subject { create(:merge_request, source_project: project, target_project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
project.repository.raw_repository.delete_branch(subject.target_branch)
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.clear_memoized_shas
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not crash' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect { subject.diverged_commits_count }.not_to raise_error
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns 0' do
|
|
|
|
expect(subject.diverged_commits_count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'diverged on same repository' do
|
|
|
|
subject(:merge_request_with_divergence) { create(:merge_request, :diverged, source_project: project, target_project: project) }
|
|
|
|
|
|
|
|
it 'counts commits that are on target branch but not on source branch' do
|
2016-11-03 12:29:30 +05:30
|
|
|
expect(subject.diverged_commits_count).to eq(29)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'diverged on fork' do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject(:merge_request_fork_with_divergence) { create(:merge_request, :diverged, source_project: forked_project, target_project: project) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'counts commits that are on target branch but not on source branch', :sidekiq_might_not_need_inline do
|
2016-11-03 12:29:30 +05:30
|
|
|
expect(subject.diverged_commits_count).to eq(29)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'rebased on fork' do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject(:merge_request_rebased) { create(:merge_request, :rebased, source_project: forked_project, target_project: project) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
it 'counts commits that are on target branch but not on source branch' do
|
|
|
|
expect(subject.diverged_commits_count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'caching' do
|
2018-03-17 18:26:18 +05:30
|
|
|
before do
|
2016-06-02 11:05:42 +05:30
|
|
|
allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches the output' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject).to receive(:compute_diverged_commits_count)
|
|
|
|
.once
|
|
|
|
.and_return(2)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
subject.diverged_commits_count
|
|
|
|
subject.diverged_commits_count
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'invalidates the cache when the source sha changes' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject).to receive(:compute_diverged_commits_count)
|
|
|
|
.twice
|
|
|
|
.and_return(2)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
subject.diverged_commits_count
|
2016-08-24 12:49:21 +05:30
|
|
|
allow(subject).to receive(:source_branch_sha).and_return('123abc')
|
2016-06-02 11:05:42 +05:30
|
|
|
subject.diverged_commits_count
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'invalidates the cache when the target sha changes' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject).to receive(:compute_diverged_commits_count)
|
|
|
|
.twice
|
|
|
|
.and_return(2)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
subject.diverged_commits_count
|
2016-08-24 12:49:21 +05:30
|
|
|
allow(subject).to receive(:target_branch_sha).and_return('123abc')
|
2016-06-02 11:05:42 +05:30
|
|
|
subject.diverged_commits_count
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-09-02 18:07:02 +05:30
|
|
|
it_behaves_like 'an editable mentionable' do
|
2021-03-11 19:13:27 +05:30
|
|
|
subject { create(:merge_request, :simple, source_project: create(:project, :repository)) }
|
2015-09-11 14:41:01 +05:30
|
|
|
|
|
|
|
let(:backref_text) { "merge request #{subject.to_reference}" }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:set_mentionable_text) { ->(txt) { subject.description = txt } }
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
|
|
|
|
it_behaves_like 'a Taskable' do
|
2015-09-11 14:41:01 +05:30
|
|
|
subject { create :merge_request, :simple }
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
2015-11-26 14:37:03 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#commit_shas' do
|
2019-12-26 22:10:19 +05:30
|
|
|
context 'persisted merge request' do
|
|
|
|
context 'with a limit' do
|
|
|
|
it 'returns a limited number of commit shas' do
|
2022-11-25 23:54:43 +05:30
|
|
|
expect(subject.commit_shas(limit: 2)).to eq(
|
|
|
|
%w[b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6])
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without a limit' do
|
|
|
|
it 'returns all commit shas of the merge request diff' do
|
|
|
|
expect(subject.commit_shas.size).to eq(29)
|
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
context 'new merge request' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
subject { build(:merge_request, source_project: project) }
|
2019-12-26 22:10:19 +05:30
|
|
|
|
|
|
|
context 'compare commits' do
|
|
|
|
before do
|
|
|
|
subject.compare_commits = [
|
|
|
|
double(sha: 'sha1'), double(sha: 'sha2')
|
|
|
|
]
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without a limit' do
|
|
|
|
it 'returns all shas of compare commits' do
|
|
|
|
expect(subject.commit_shas).to eq(%w[sha2 sha1])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with a limit' do
|
|
|
|
it 'returns a limited number of shas' do
|
|
|
|
expect(subject.commit_shas(limit: 1)).to eq(['sha2'])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns diff_head_sha as an array' do
|
|
|
|
expect(subject.commit_shas).to eq([subject.diff_head_sha])
|
|
|
|
expect(subject.commit_shas(limit: 2)).to eq([subject.diff_head_sha])
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'head pipeline' do
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:diff_head_sha) { Digest::SHA1.hexdigest(SecureRandom.hex) }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
before do
|
2019-07-07 11:18:12 +05:30
|
|
|
allow(subject).to receive(:diff_head_sha).and_return(diff_head_sha)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2015-11-26 14:37:03 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#head_pipeline' do
|
|
|
|
it 'returns nil for MR without head_pipeline_id' do
|
|
|
|
subject.update_attribute(:head_pipeline_id, nil)
|
|
|
|
|
|
|
|
expect(subject.head_pipeline).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the source project does not exist' do
|
|
|
|
it 'returns nil' do
|
|
|
|
allow(subject).to receive(:source_project).and_return(nil)
|
|
|
|
|
|
|
|
expect(subject.head_pipeline).to be_nil
|
|
|
|
end
|
2015-11-26 14:37:03 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#actual_head_pipeline' do
|
|
|
|
it 'returns nil for MR with old pipeline' do
|
|
|
|
pipeline = create(:ci_empty_pipeline, sha: 'notlatestsha')
|
|
|
|
subject.update_attribute(:head_pipeline_id, pipeline.id)
|
|
|
|
|
|
|
|
expect(subject.actual_head_pipeline).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the pipeline for MR with recent pipeline' do
|
2019-07-07 11:18:12 +05:30
|
|
|
pipeline = create(:ci_empty_pipeline, sha: diff_head_sha)
|
|
|
|
subject.update_attribute(:head_pipeline_id, pipeline.id)
|
|
|
|
|
|
|
|
expect(subject.actual_head_pipeline).to eq(subject.head_pipeline)
|
|
|
|
expect(subject.actual_head_pipeline).to eq(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the pipeline for MR with recent merge request pipeline' do
|
|
|
|
pipeline = create(:ci_empty_pipeline, sha: 'merge-sha', source_sha: diff_head_sha)
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.update_attribute(:head_pipeline_id, pipeline.id)
|
|
|
|
|
|
|
|
expect(subject.actual_head_pipeline).to eq(subject.head_pipeline)
|
|
|
|
expect(subject.actual_head_pipeline).to eq(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil when source project does not exist' do
|
2015-11-26 14:37:03 +05:30
|
|
|
allow(subject).to receive(:source_project).and_return(nil)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(subject.actual_head_pipeline).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
describe '#merge_pipeline' do
|
|
|
|
it 'returns nil when not merged' do
|
|
|
|
expect(subject.merge_pipeline).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the MR is merged' do
|
|
|
|
let(:sha) { subject.target_project.commit.id }
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, sha: sha, ref: subject.target_branch, project: subject.target_project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
subject.mark_as_merged!
|
|
|
|
end
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
context 'and there is a merge commit' do
|
|
|
|
before do
|
|
|
|
subject.update_attribute(:merge_commit_sha, pipeline.sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the pipeline associated with that merge request' do
|
|
|
|
expect(subject.merge_pipeline).to eq(pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and there is no merge commit, but there is a diff head' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:diff_head_sha).and_return(pipeline.sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the pipeline associated with that merge request' do
|
|
|
|
expect(subject.merge_pipeline).to eq(pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and there is no merge commit, but there is a squash commit' do
|
|
|
|
before do
|
|
|
|
subject.update_attribute(:squash_commit_sha, pipeline.sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the pipeline associated with that merge request' do
|
|
|
|
expect(subject.merge_pipeline).to eq(pipeline)
|
|
|
|
end
|
2018-12-13 13:39:08 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#has_ci?' do
|
|
|
|
let(:merge_request) { build_stubbed(:merge_request) }
|
|
|
|
|
|
|
|
context 'has ci' do
|
|
|
|
it 'returns true if MR has head_pipeline_id and commits' do
|
2021-09-30 23:02:18 +05:30
|
|
|
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(merge_request).to receive(:head_pipeline_id) { double }
|
|
|
|
allow(merge_request).to receive(:has_no_commits?) { false }
|
|
|
|
|
|
|
|
expect(merge_request.has_ci?).to be(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true if MR has any pipeline and commits' do
|
2021-09-30 23:02:18 +05:30
|
|
|
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(merge_request).to receive(:head_pipeline_id) { nil }
|
|
|
|
allow(merge_request).to receive(:has_no_commits?) { false }
|
|
|
|
allow(merge_request).to receive(:all_pipelines) { [double] }
|
|
|
|
|
|
|
|
expect(merge_request.has_ci?).to be(true)
|
|
|
|
end
|
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
it 'returns true if MR has CI integration and commits' do
|
|
|
|
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { double }
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(merge_request).to receive(:head_pipeline_id) { nil }
|
|
|
|
allow(merge_request).to receive(:has_no_commits?) { false }
|
|
|
|
allow(merge_request).to receive(:all_pipelines) { [] }
|
|
|
|
|
|
|
|
expect(merge_request.has_ci?).to be(true)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'has no ci' do
|
2021-09-30 23:02:18 +05:30
|
|
|
it 'returns false if MR has no CI integration nor pipeline, and no commits' do
|
|
|
|
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(merge_request).to receive(:head_pipeline_id) { nil }
|
|
|
|
allow(merge_request).to receive(:all_pipelines) { [] }
|
|
|
|
allow(merge_request).to receive(:has_no_commits?) { true }
|
|
|
|
|
|
|
|
expect(merge_request.has_ci?).to be(false)
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe '#update_head_pipeline' do
|
|
|
|
subject { merge_request.update_head_pipeline }
|
|
|
|
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
context 'when there is a pipeline with the diff head sha' do
|
|
|
|
let!(:pipeline) do
|
|
|
|
create(:ci_empty_pipeline,
|
|
|
|
project: merge_request.project,
|
|
|
|
sha: merge_request.diff_head_sha,
|
|
|
|
ref: merge_request.source_branch)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates the head pipeline' do
|
|
|
|
expect { subject }
|
|
|
|
.to change { merge_request.reload.head_pipeline }
|
|
|
|
.from(nil).to(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merge request has already had head pipeline' do
|
|
|
|
before do
|
|
|
|
merge_request.update!(head_pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when failed to find an actual head pipeline' do
|
|
|
|
before do
|
2022-08-27 11:52:29 +05:30
|
|
|
allow(merge_request).to receive(:find_actual_head_pipeline) {}
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not update the current head pipeline' do
|
|
|
|
expect { subject }
|
|
|
|
.not_to change { merge_request.reload.head_pipeline }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
context 'when detached merge request pipeline is run on head ref of the merge request' do
|
|
|
|
let!(:pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
source: :merge_request_event,
|
|
|
|
project: merge_request.source_project,
|
|
|
|
ref: merge_request.ref_path,
|
|
|
|
sha: sha,
|
|
|
|
merge_request: merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:sha) { merge_request.diff_head_sha }
|
|
|
|
|
|
|
|
it 'sets the head ref of the merge request to the pipeline ref' do
|
|
|
|
expect(pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates correctly even though the target branch name of the merge request is different from the pipeline ref' do
|
|
|
|
expect { subject }
|
|
|
|
.to change { merge_request.reload.head_pipeline }
|
|
|
|
.from(nil).to(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when sha is not HEAD of the source branch' do
|
|
|
|
let(:sha) { merge_request.diff_base_sha }
|
|
|
|
|
|
|
|
it 'does not update head pipeline' do
|
|
|
|
expect { subject }.not_to change { merge_request.reload.head_pipeline }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when there are no pipelines with the diff head sha' do
|
|
|
|
it 'does not update the head pipeline' do
|
|
|
|
expect { subject }
|
|
|
|
.not_to change { merge_request.reload.head_pipeline }
|
|
|
|
end
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
describe '#has_test_reports?' do
|
|
|
|
subject { merge_request.has_test_reports? }
|
|
|
|
|
|
|
|
context 'when head pipeline has test reports' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { create(:merge_request, :with_test_reports) }
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have test reports' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
describe '#has_accessibility_reports?' do
|
|
|
|
subject { merge_request.has_accessibility_reports? }
|
|
|
|
|
|
|
|
context 'when head pipeline has an accessibility reports' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { create(:merge_request, :with_accessibility_reports) }
|
2020-05-24 23:13:21 +05:30
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have accessibility reports' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
2020-05-24 23:13:21 +05:30
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
describe '#has_coverage_reports?' do
|
|
|
|
subject { merge_request.has_coverage_reports? }
|
|
|
|
|
|
|
|
context 'when head pipeline has coverage reports' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { create(:merge_request, :with_coverage_reports) }
|
2020-04-08 14:13:33 +05:30
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have coverage reports' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
2020-04-08 14:13:33 +05:30
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
describe '#has_codequality_mr_diff_report?' do
|
|
|
|
subject { merge_request.has_codequality_mr_diff_report? }
|
|
|
|
|
|
|
|
context 'when head pipeline has codequality mr diff report' do
|
|
|
|
let(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have codeqquality mr diff report' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
describe '#has_codequality_reports?' do
|
|
|
|
subject { merge_request.has_codequality_reports? }
|
|
|
|
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
context 'when head pipeline has a codequality report' do
|
|
|
|
let(:merge_request) { create(:merge_request, :with_codequality_reports, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have a codequality report' do
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
describe '#has_terraform_reports?' do
|
|
|
|
context 'when head pipeline has terraform reports' do
|
|
|
|
it 'returns true' do
|
2020-11-24 15:15:51 +05:30
|
|
|
merge_request = create(:merge_request, :with_terraform_reports)
|
2020-05-24 23:13:21 +05:30
|
|
|
|
|
|
|
expect(merge_request.has_terraform_reports?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have terraform reports' do
|
|
|
|
it 'returns false' do
|
2020-11-24 15:15:51 +05:30
|
|
|
merge_request = create(:merge_request)
|
2020-05-24 23:13:21 +05:30
|
|
|
|
|
|
|
expect(merge_request.has_terraform_reports?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
describe '#has_sast_reports?' do
|
|
|
|
subject { merge_request.has_sast_reports? }
|
|
|
|
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
stub_licensed_features(sast: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline has sast reports' do
|
|
|
|
let(:merge_request) { create(:merge_request, :with_sast_reports, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have sast reports' do
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#has_secret_detection_reports?' do
|
|
|
|
subject { merge_request.has_secret_detection_reports? }
|
|
|
|
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
stub_licensed_features(secret_detection: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline has secret detection reports' do
|
|
|
|
let(:merge_request) { create(:merge_request, :with_secret_detection_reports, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have secrets detection reports' do
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe '#calculate_reactive_cache' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { merge_request.calculate_reactive_cache(service_class_name) }
|
|
|
|
|
|
|
|
context 'when given an unknown service class name' do
|
|
|
|
let(:service_class_name) { 'Integer' }
|
|
|
|
|
|
|
|
it 'raises a NameError exception' do
|
|
|
|
expect { subject }.to raise_error(NameError, service_class_name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when given a known service class name' do
|
|
|
|
let(:service_class_name) { 'Ci::CompareTestReportsService' }
|
|
|
|
|
|
|
|
it 'does not raises a NameError exception' do
|
|
|
|
allow_any_instance_of(service_class_name.constantize).to receive(:execute).and_return(nil)
|
|
|
|
|
|
|
|
expect { subject }.not_to raise_error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
describe '#find_exposed_artifacts' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:merge_request) { create(:merge_request, :with_test_reports, source_project: project) }
|
|
|
|
let(:pipeline) { merge_request.head_pipeline }
|
|
|
|
|
|
|
|
subject { merge_request.find_exposed_artifacts }
|
|
|
|
|
|
|
|
context 'when head pipeline has exposed artifacts' do
|
|
|
|
let!(:job) do
|
|
|
|
create(:ci_build, options: { artifacts: { expose_as: 'artifact', paths: ['ci_artifacts.txt'] } }, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
|
|
|
|
|
|
|
|
context 'when reactive cache worker is parsing results asynchronously' do
|
|
|
|
it 'returns status' do
|
|
|
|
expect(subject[:status]).to eq(:parsing)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when reactive cache worker is inline' do
|
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns status and data' do
|
|
|
|
expect(subject[:status]).to eq(:parsed)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when an error occurrs' do
|
|
|
|
before do
|
|
|
|
expect_next_instance_of(Ci::FindExposedArtifactsService) do |service|
|
|
|
|
expect(service).to receive(:for_pipeline)
|
|
|
|
.and_raise(StandardError.new)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an error message' do
|
|
|
|
expect(subject[:status]).to eq(:error)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when cached results is not latest' do
|
|
|
|
before do
|
|
|
|
allow_next_instance_of(Ci::GenerateExposedArtifactsReportService) do |service|
|
|
|
|
allow(service).to receive(:latest?).and_return(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
it 'raises and InvalidateReactiveCache error' do
|
|
|
|
expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#find_coverage_reports' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
|
|
|
|
let(:pipeline) { merge_request.head_pipeline }
|
|
|
|
|
|
|
|
subject { merge_request.find_coverage_reports }
|
|
|
|
|
|
|
|
context 'when head pipeline has coverage reports' do
|
|
|
|
context 'when reactive cache worker is parsing results asynchronously' do
|
|
|
|
it 'returns status' do
|
|
|
|
expect(subject[:status]).to eq(:parsing)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when reactive cache worker is inline' do
|
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns status and data' do
|
|
|
|
expect(subject[:status]).to eq(:parsed)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when an error occurrs' do
|
|
|
|
before do
|
|
|
|
merge_request.update!(head_pipeline: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an error message' do
|
|
|
|
expect(subject[:status]).to eq(:error)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when cached results is not latest' do
|
|
|
|
before do
|
|
|
|
allow_next_instance_of(Ci::GenerateCoverageReportsService) do |service|
|
|
|
|
allow(service).to receive(:latest?).and_return(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'raises and InvalidateReactiveCache error' do
|
|
|
|
expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
describe '#find_codequality_mr_diff_reports' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
2021-06-08 01:23:25 +05:30
|
|
|
let(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports, source_project: project, id: 123456789) }
|
2021-03-11 19:13:27 +05:30
|
|
|
let(:pipeline) { merge_request.head_pipeline }
|
|
|
|
|
|
|
|
subject(:mr_diff_report) { merge_request.find_codequality_mr_diff_reports }
|
|
|
|
|
|
|
|
context 'when head pipeline has coverage reports' do
|
|
|
|
context 'when reactive cache worker is parsing results asynchronously' do
|
|
|
|
it 'returns status' do
|
|
|
|
expect(mr_diff_report[:status]).to eq(:parsing)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when reactive cache worker is inline' do
|
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns status and data' do
|
|
|
|
expect(mr_diff_report[:status]).to eq(:parsed)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when an error occurrs' do
|
|
|
|
before do
|
|
|
|
merge_request.update!(head_pipeline: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an error message' do
|
|
|
|
expect(mr_diff_report[:status]).to eq(:error)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when cached results is not latest' do
|
|
|
|
before do
|
|
|
|
allow_next_instance_of(Ci::GenerateCodequalityMrDiffReportService) do |service|
|
|
|
|
allow(service).to receive(:latest?).and_return(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'raises and InvalidateReactiveCache error' do
|
|
|
|
expect { mr_diff_report }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
describe '#compare_test_reports' do
|
|
|
|
subject { merge_request.compare_test_reports }
|
|
|
|
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
|
|
|
|
|
|
let!(:base_pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
:with_test_reports,
|
|
|
|
project: project,
|
|
|
|
ref: merge_request.target_branch,
|
|
|
|
sha: merge_request.diff_base_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
merge_request.update!(head_pipeline_id: head_pipeline.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline has test reports' do
|
|
|
|
let!(:head_pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
:with_test_reports,
|
|
|
|
project: project,
|
|
|
|
ref: merge_request.source_branch,
|
|
|
|
sha: merge_request.diff_head_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when reactive cache worker is parsing asynchronously' do
|
|
|
|
it 'returns status' do
|
|
|
|
expect(subject[:status]).to eq(:parsing)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when reactive cache worker is inline' do
|
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns status and data' do
|
|
|
|
expect_any_instance_of(Ci::CompareTestReportsService)
|
|
|
|
.to receive(:execute).with(base_pipeline, head_pipeline).and_call_original
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when cached results is not latest' do
|
|
|
|
before do
|
|
|
|
allow_any_instance_of(Ci::CompareTestReportsService)
|
|
|
|
.to receive(:latest?).and_return(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'raises and InvalidateReactiveCache error' do
|
|
|
|
expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when head pipeline does not have test reports' do
|
|
|
|
let!(:head_pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
project: project,
|
|
|
|
ref: merge_request.source_branch,
|
|
|
|
sha: merge_request.diff_head_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns status and error message' do
|
|
|
|
expect(subject[:status]).to eq(:error)
|
|
|
|
expect(subject[:status_reason]).to eq('This merge request does not have test reports')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
describe '#compare_accessibility_reports' do
|
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
|
|
|
let_it_be(:merge_request, reload: true) { create(:merge_request, :with_accessibility_reports, source_project: project) }
|
|
|
|
let_it_be(:pipeline) { merge_request.head_pipeline }
|
|
|
|
|
|
|
|
subject { merge_request.compare_accessibility_reports }
|
|
|
|
|
|
|
|
context 'when head pipeline has accessibility reports' do
|
|
|
|
let(:job) do
|
|
|
|
create(:ci_build, options: { artifacts: { reports: { pa11y: ['accessibility.json'] } } }, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
|
|
|
|
|
|
|
|
context 'when reactive cache worker is parsing results asynchronously' do
|
|
|
|
it 'returns parsing status' do
|
|
|
|
expect(subject[:status]).to eq(:parsing)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when reactive cache worker is inline' do
|
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns parsed status' do
|
|
|
|
expect(subject[:status]).to eq(:parsed)
|
|
|
|
expect(subject[:data]).to be_present
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when an error occurrs' do
|
|
|
|
before do
|
|
|
|
merge_request.update!(head_pipeline: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an error status' do
|
|
|
|
expect(subject[:status]).to eq(:error)
|
|
|
|
expect(subject[:status_reason]).to eq("This merge request does not have accessibility reports")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when cached result is not latest' do
|
|
|
|
before do
|
|
|
|
allow_next_instance_of(Ci::CompareAccessibilityReportsService) do |service|
|
|
|
|
allow(service).to receive(:latest?).and_return(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'raises an InvalidateReactiveCache error' do
|
|
|
|
expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
describe '#compare_codequality_reports' do
|
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
|
|
|
let_it_be(:merge_request, reload: true) { create(:merge_request, :with_codequality_reports, source_project: project) }
|
|
|
|
let_it_be(:pipeline) { merge_request.head_pipeline }
|
|
|
|
|
|
|
|
subject { merge_request.compare_codequality_reports }
|
|
|
|
|
|
|
|
context 'when head pipeline has codequality report' do
|
|
|
|
let(:job) do
|
|
|
|
create(:ci_build, options: { artifacts: { reports: { codeclimate: ['codequality.json'] } } }, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
|
|
|
|
|
|
|
|
context 'when reactive cache worker is parsing results asynchronously' do
|
|
|
|
it 'returns parsing status' do
|
|
|
|
expect(subject[:status]).to eq(:parsing)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when reactive cache worker is inline' do
|
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns parsed status' do
|
|
|
|
expect(subject[:status]).to eq(:parsed)
|
|
|
|
expect(subject[:data]).to be_present
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when an error occurrs' do
|
|
|
|
before do
|
|
|
|
merge_request.update!(head_pipeline: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an error status' do
|
|
|
|
expect(subject[:status]).to eq(:error)
|
|
|
|
expect(subject[:status_reason]).to eq("This merge request does not have codequality reports")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when cached result is not latest' do
|
|
|
|
before do
|
|
|
|
allow_next_instance_of(Ci::CompareCodequalityReportsService) do |service|
|
|
|
|
allow(service).to receive(:latest?).and_return(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'raises an InvalidateReactiveCache error' do
|
|
|
|
expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#all_commit_shas' do
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'when merge request is persisted' do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:all_commit_shas) do
|
2016-11-03 12:29:30 +05:30
|
|
|
subject.merge_request_diffs.flat_map(&:commits).map(&:sha).uniq
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
shared_examples 'returning all SHA' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns all SHAs from all merge_request_diffs' do
|
2016-11-03 12:29:30 +05:30
|
|
|
expect(subject.merge_request_diffs.size).to eq(2)
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject.all_commit_shas).to match_array(all_commit_shas)
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'with a completely different branch' do
|
|
|
|
before do
|
2021-09-04 01:27:46 +05:30
|
|
|
subject.update!(target_branch: 'csv')
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'returning all SHA'
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'with a branch having no difference' do
|
|
|
|
before do
|
2021-09-04 01:27:46 +05:30
|
|
|
subject.update!(target_branch: 'branch-merged')
|
2016-11-03 12:29:30 +05:30
|
|
|
subject.reload # make sure commits were not cached
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'returning all SHA'
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'when merge request is not persisted' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'when compare commits are set in the service' do
|
|
|
|
let(:commit) { spy('commit') }
|
|
|
|
|
|
|
|
subject do
|
2020-11-24 15:15:51 +05:30
|
|
|
build(:merge_request, source_project: project, compare_commits: [commit, commit])
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns commits from compare commits temporary data' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject.all_commit_shas).to eq [commit, commit]
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'when compare commits are not set in the service' do
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { build(:merge_request, source_project: project) }
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
it 'returns array with diff head sha element only' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject.all_commit_shas).to eq [subject.diff_head_sha]
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
describe '#short_merge_commit_sha' do
|
|
|
|
let(:merge_request) { build_stubbed(:merge_request) }
|
|
|
|
|
|
|
|
it 'returns short id when there is a merge_commit_sha' do
|
|
|
|
merge_request.merge_commit_sha = 'f7ce827c314c9340b075657fd61c789fb01cf74d'
|
|
|
|
|
|
|
|
expect(merge_request.short_merge_commit_sha).to eq('f7ce827c')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil when there is no merge_commit_sha' do
|
|
|
|
merge_request.merge_commit_sha = nil
|
|
|
|
|
|
|
|
expect(merge_request.short_merge_commit_sha).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
describe '#merged_commit_sha' do
|
|
|
|
it 'returns nil when not merged' do
|
|
|
|
expect(subject.merged_commit_sha).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the MR is merged' do
|
|
|
|
let(:sha) { 'f7ce827c314c9340b075657fd61c789fb01cf74d' }
|
|
|
|
|
|
|
|
before do
|
|
|
|
subject.mark_as_merged!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns merge_commit_sha when there is a merge_commit_sha' do
|
|
|
|
subject.update_attribute(:merge_commit_sha, sha)
|
|
|
|
|
|
|
|
expect(subject.merged_commit_sha).to eq(sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns squash_commit_sha when there is a squash_commit_sha' do
|
|
|
|
subject.update_attribute(:squash_commit_sha, sha)
|
|
|
|
|
|
|
|
expect(subject.merged_commit_sha).to eq(sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns diff_head_sha when there are no merge_commit_sha and squash_commit_sha' do
|
|
|
|
allow(subject).to receive(:diff_head_sha).and_return(sha)
|
|
|
|
|
|
|
|
expect(subject.merged_commit_sha).to eq(sha)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#short_merged_commit_sha' do
|
|
|
|
context 'when merged_commit_sha is nil' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:merged_commit_sha).and_return(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(subject.short_merged_commit_sha).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merged_commit_sha is present' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:merged_commit_sha).and_return('f7ce827c314c9340b075657fd61c789fb01cf74d')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns shortened merged_commit_sha' do
|
|
|
|
expect(subject.short_merged_commit_sha).to eq('f7ce827c')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#can_be_reverted?' do
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { create(:merge_request, source_project: create(:project, :repository)) }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'when there is no merge_commit for the MR' do
|
|
|
|
before do
|
2020-06-23 00:09:42 +05:30
|
|
|
subject.metrics.update!(merged_at: Time.current.utc)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.can_be_reverted?(nil)).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the MR has been merged' do
|
|
|
|
before do
|
|
|
|
MergeRequests::MergeService
|
2021-06-08 01:23:25 +05:30
|
|
|
.new(project: subject.target_project, current_user: subject.author, params: { sha: subject.diff_head_sha })
|
2018-03-17 18:26:18 +05:30
|
|
|
.execute(subject)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is no revert commit' do
|
|
|
|
it 'returns true' do
|
|
|
|
expect(subject.can_be_reverted?(nil)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is no merged_at for the MR' do
|
|
|
|
before do
|
|
|
|
subject.metrics.update!(merged_at: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(subject.can_be_reverted?(nil)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is a revert commit' do
|
|
|
|
let(:current_user) { subject.author }
|
|
|
|
let(:branch) { subject.target_branch }
|
|
|
|
let(:project) { subject.target_project }
|
|
|
|
|
|
|
|
let(:revert_commit_id) do
|
|
|
|
params = {
|
|
|
|
commit: subject.merge_commit,
|
|
|
|
branch_name: branch,
|
|
|
|
start_branch: branch
|
|
|
|
}
|
|
|
|
|
|
|
|
Commits::RevertService.new(project, current_user, params).execute[:result]
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
2018-11-18 11:00:15 +05:30
|
|
|
project.add_maintainer(current_user)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
ProcessCommitWorker.new.perform(project.id,
|
|
|
|
current_user.id,
|
|
|
|
project.commit(revert_commit_id).to_hash,
|
|
|
|
project.default_branch == branch)
|
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
context 'but merged at timestamp cannot be found' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:merged_at) { nil }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.can_be_reverted?(current_user)).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'when the revert commit is mentioned in a note after the MR was merged' do
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.can_be_reverted?(current_user)).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is no merged_at for the MR' do
|
|
|
|
before do
|
|
|
|
subject.metrics.update!(merged_at: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.can_be_reverted?(current_user)).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the revert commit is mentioned in a note just before the MR was merged' do
|
|
|
|
before do
|
|
|
|
subject.notes.last.update!(created_at: subject.metrics.merged_at - 30.seconds)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.can_be_reverted?(current_user)).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the revert commit is mentioned in a note long before the MR was merged' do
|
|
|
|
before do
|
|
|
|
subject.notes.last.update!(created_at: subject.metrics.merged_at - 2.minutes)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(subject.can_be_reverted?(current_user)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
describe '#merged_at' do
|
|
|
|
context 'when MR is not merged' do
|
|
|
|
let(:merge_request) { create(:merge_request, :closed) }
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(merge_request.merged_at).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when metrics has merged_at data' do
|
|
|
|
let(:merge_request) { create(:merge_request, :merged) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
merge_request.metrics.update!(merged_at: 1.day.ago)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns metrics merged_at' do
|
|
|
|
expect(merge_request.merged_at).to eq(merge_request.metrics.merged_at)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merged event is persisted, but no metrics merged_at is persisted' do
|
|
|
|
let(:user) { create(:user) }
|
|
|
|
let(:merge_request) { create(:merge_request, :merged) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
EventCreateService.new.merge_mr(merge_request, user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns merged event creation date' do
|
|
|
|
expect(merge_request.merge_event).to be_persisted
|
|
|
|
expect(merge_request.merged_at).to eq(merge_request.merge_event.created_at)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
context 'when no metrics or merge event exists' do
|
|
|
|
let(:user) { create(:user) }
|
|
|
|
let(:merge_request) { create(:merge_request, :merged) }
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
before do
|
2021-01-03 14:25:43 +05:30
|
|
|
merge_request.metrics.destroy!
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
context 'when resource event for the merge exists' do
|
2020-06-23 00:09:42 +05:30
|
|
|
before do
|
|
|
|
SystemNoteService.change_status(merge_request,
|
|
|
|
merge_request.target_project,
|
|
|
|
user,
|
|
|
|
merge_request.state, nil)
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
it 'returns the resource event creation date' do
|
2020-06-23 00:09:42 +05:30
|
|
|
expect(merge_request.reload.metrics).to be_nil
|
|
|
|
expect(merge_request.merge_event).to be_nil
|
2021-01-03 14:25:43 +05:30
|
|
|
expect(merge_request.resource_state_events.count).to eq(1)
|
|
|
|
expect(merge_request.merged_at).to eq(merge_request.resource_state_events.first.created_at)
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
|
|
|
end
|
2018-11-18 11:00:15 +05:30
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
context 'when system note for the merge exists' do
|
|
|
|
before do
|
|
|
|
# We do not create these system notes anymore but we need this to work for existing MRs
|
|
|
|
# that used system notes instead of resource state events
|
|
|
|
create(:note, :system, noteable: merge_request, note: 'merged')
|
|
|
|
end
|
2018-11-18 11:00:15 +05:30
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
it 'returns the merging note creation date' do
|
|
|
|
expect(merge_request.reload.metrics).to be_nil
|
|
|
|
expect(merge_request.merge_event).to be_nil
|
|
|
|
expect(merge_request.notes.count).to eq(1)
|
|
|
|
expect(merge_request.merged_at).to eq(merge_request.notes.first.created_at)
|
|
|
|
end
|
2018-11-18 11:00:15 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
describe '#participants' do
|
|
|
|
let(:mr) do
|
|
|
|
create(:merge_request, source_project: project, target_project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:note1) do
|
|
|
|
create(:note_on_merge_request, noteable: mr, project: project, note: 'a')
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:note2) do
|
|
|
|
create(:note_on_merge_request, noteable: mr, project: project, note: 'b')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'includes the merge request author' do
|
|
|
|
expect(mr.participants).to include(mr.author)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'includes the authors of the notes' do
|
|
|
|
expect(mr.participants).to include(note1.author, note2.author)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'cached counts' do
|
|
|
|
it 'updates when assignees change' do
|
|
|
|
user1 = create(:user)
|
|
|
|
user2 = create(:user)
|
2019-07-31 22:56:46 +05:30
|
|
|
mr = create(:merge_request, assignees: [user1])
|
2017-08-17 22:00:37 +05:30
|
|
|
mr.project.add_developer(user1)
|
|
|
|
mr.project.add_developer(user2)
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(user1.assigned_open_merge_requests_count).to eq(1)
|
|
|
|
expect(user2.assigned_open_merge_requests_count).to eq(0)
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
mr.assignees = [user2]
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(user1.assigned_open_merge_requests_count).to eq(0)
|
|
|
|
expect(user2.assigned_open_merge_requests_count).to eq(1)
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#merge_async' do
|
|
|
|
it 'enqueues MergeWorker job and updates merge_jid' do
|
|
|
|
merge_request = create(:merge_request)
|
|
|
|
user_id = double(:user_id)
|
2018-10-15 14:42:47 +05:30
|
|
|
params = {}
|
2018-03-17 18:26:18 +05:30
|
|
|
merge_jid = 'hash-123'
|
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
allow(MergeWorker).to receive(:with_status).and_return(MergeWorker)
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(merge_request).to receive(:expire_etag_cache)
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(MergeWorker).to receive(:perform_async).with(merge_request.id, user_id, params) do
|
|
|
|
merge_jid
|
|
|
|
end
|
|
|
|
|
|
|
|
merge_request.merge_async(user_id, params)
|
|
|
|
|
|
|
|
expect(merge_request.reload.merge_jid).to eq(merge_jid)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
describe '#rebase_async' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
let(:user_id) { double(:user_id) }
|
|
|
|
let(:rebase_jid) { 'rebase-jid' }
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
subject(:execute) { merge_request.rebase_async(user_id) }
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
before do
|
|
|
|
allow(RebaseWorker).to receive(:with_status).and_return(RebaseWorker)
|
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
it 'atomically enqueues a RebaseWorker job and updates rebase_jid' do
|
|
|
|
expect(RebaseWorker)
|
|
|
|
.to receive(:perform_async)
|
2020-03-13 15:44:24 +05:30
|
|
|
.with(merge_request.id, user_id, false)
|
2019-09-30 21:07:59 +05:30
|
|
|
.and_return(rebase_jid)
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(merge_request).to receive(:expire_etag_cache)
|
2019-09-30 21:07:59 +05:30
|
|
|
expect(merge_request).to receive(:lock!).and_call_original
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
execute
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
expect(merge_request.rebase_jid).to eq(rebase_jid)
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
it 'refuses to enqueue a job if a rebase is in progress' do
|
|
|
|
merge_request.update_column(:rebase_jid, rebase_jid)
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
expect(RebaseWorker).not_to receive(:perform_async)
|
|
|
|
expect(Gitlab::SidekiqStatus)
|
|
|
|
.to receive(:running?)
|
|
|
|
.with(rebase_jid)
|
|
|
|
.and_return(true)
|
|
|
|
|
|
|
|
expect { execute }.to raise_error(ActiveRecord::StaleObjectError)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
it 'refuses to enqueue a job if the MR is not open' do
|
2019-12-21 20:55:43 +05:30
|
|
|
merge_request.update_column(:state_id, 5)
|
2019-09-30 21:07:59 +05:30
|
|
|
|
|
|
|
expect(RebaseWorker).not_to receive(:perform_async)
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
expect { execute }.to raise_error(ActiveRecord::StaleObjectError)
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
2019-12-26 22:10:19 +05:30
|
|
|
|
|
|
|
it "raises ActiveRecord::LockWaitTimeout after 6 tries" do
|
|
|
|
expect(merge_request).to receive(:with_lock).exactly(6).times.and_raise(ActiveRecord::LockWaitTimeout)
|
|
|
|
expect(RebaseWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
expect { execute }.to raise_error(MergeRequest::RebaseLockTimeout)
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#mergeable?' do
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { build_stubbed(:merge_request) }
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
it 'returns false if #mergeable_state? is false' do
|
|
|
|
expect(subject).to receive(:mergeable_state?) { false }
|
|
|
|
|
|
|
|
expect(subject.mergeable?).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'return true if #mergeable_state? is true and the MR #can_be_merged? is true' do
|
|
|
|
allow(subject).to receive(:mergeable_state?) { true }
|
2019-09-30 21:07:59 +05:30
|
|
|
expect(subject).to receive(:check_mergeability)
|
2016-06-16 23:09:34 +05:30
|
|
|
expect(subject).to receive(:can_be_merged?) { true }
|
|
|
|
|
|
|
|
expect(subject.mergeable?).to be_truthy
|
|
|
|
end
|
2020-11-24 15:15:51 +05:30
|
|
|
|
2021-04-17 20:07:23 +05:30
|
|
|
it 'return true if #mergeable_state? is true and the MR #can_be_merged? is false' do
|
|
|
|
allow(subject).to receive(:mergeable_state?) { true }
|
|
|
|
expect(subject).to receive(:check_mergeability)
|
|
|
|
expect(subject).to receive(:can_be_merged?) { false }
|
|
|
|
|
|
|
|
expect(subject.mergeable?).to be_falsey
|
|
|
|
end
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
context 'with skip_ci_check option' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive_messages(check_mergeability: nil,
|
|
|
|
can_be_merged?: true,
|
|
|
|
broken?: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
where(:mergeable_ci_state, :skip_ci_check, :expected_mergeable) do
|
|
|
|
false | false | false
|
|
|
|
false | true | true
|
|
|
|
true | false | true
|
|
|
|
true | true | true
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
it 'overrides mergeable_ci_state?' do
|
|
|
|
allow(subject).to receive(:mergeable_ci_state?) { mergeable_ci_state }
|
|
|
|
|
|
|
|
expect(subject.mergeable?(skip_ci_check: skip_ci_check)).to eq(expected_mergeable)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with skip_discussions_check option' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive_messages(mergeable_ci_state?: true,
|
|
|
|
check_mergeability: nil,
|
|
|
|
can_be_merged?: true,
|
|
|
|
broken?: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
where(:mergeable_discussions_state, :skip_discussions_check, :expected_mergeable) do
|
|
|
|
false | false | false
|
|
|
|
false | true | true
|
|
|
|
true | false | true
|
|
|
|
true | true | true
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
it 'overrides mergeable_discussions_state?' do
|
|
|
|
allow(subject).to receive(:mergeable_discussions_state?) { mergeable_discussions_state }
|
|
|
|
|
|
|
|
expect(subject.mergeable?(skip_discussions_check: skip_discussions_check)).to eq(expected_mergeable)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
describe '#check_mergeability' do
|
|
|
|
let(:mergeability_service) { double }
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
subject { create(:merge_request, merge_status: 'unchecked') }
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
before do
|
|
|
|
allow(MergeRequests::MergeabilityCheckService).to receive(:new) do
|
|
|
|
mergeability_service
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
shared_examples_for 'method that executes MergeabilityCheckService' do
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'executes MergeabilityCheckService' do
|
|
|
|
expect(mergeability_service).to receive(:execute)
|
|
|
|
|
|
|
|
subject.check_mergeability
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
context 'when async is true' do
|
2020-06-23 00:09:42 +05:30
|
|
|
it 'executes MergeabilityCheckService asynchronously' do
|
|
|
|
expect(mergeability_service).to receive(:async_execute)
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
subject.check_mergeability(async: true)
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
|
|
|
|
context 'if the merge status is unchecked' do
|
|
|
|
it_behaves_like 'method that executes MergeabilityCheckService'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'if the merge status is checking' do
|
|
|
|
before do
|
|
|
|
subject.mark_as_checking!
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'method that executes MergeabilityCheckService'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'if the merge status is checked' do
|
|
|
|
before do
|
|
|
|
subject.mark_as_mergeable!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not call MergeabilityCheckService' do
|
|
|
|
expect(MergeRequests::MergeabilityCheckService).not_to receive(:new)
|
|
|
|
|
|
|
|
subject.check_mergeability
|
|
|
|
end
|
|
|
|
end
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
shared_examples 'for mergeable_state' do
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { create(:merge_request) }
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
it 'checks if merge request can be merged' do
|
|
|
|
allow(subject).to receive(:mergeable_ci_state?) { true }
|
2019-09-30 21:07:59 +05:30
|
|
|
expect(subject).to receive(:check_mergeability)
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
subject.mergeable?
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when not open' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
subject.close
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.mergeable_state?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when working in progress' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
2020-07-28 23:09:34 +05:30
|
|
|
subject.title = '[Draft] MR'
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.mergeable_state?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when broken' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
allow(subject).to receive(:broken?) { true }
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.mergeable_state?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when failed' do
|
2022-05-07 20:08:51 +05:30
|
|
|
context 'when #mergeable_ci_state? is false' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:mergeable_ci_state?) { false }
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
2020-11-24 15:15:51 +05:30
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.mergeable_state?).to be_falsey
|
2020-11-24 15:15:51 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
it 'returns true when skipping ci check' do
|
|
|
|
expect(subject.mergeable_state?(skip_ci_check: true)).to be(true)
|
|
|
|
end
|
2021-11-18 22:05:49 +05:30
|
|
|
end
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
context 'when #mergeable_discussions_state? is false' do
|
2017-08-17 22:00:37 +05:30
|
|
|
before do
|
2022-05-07 20:08:51 +05:30
|
|
|
allow(subject).to receive(:mergeable_discussions_state?) { false }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.mergeable_state?).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true when skipping discussions check' do
|
|
|
|
expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true)
|
|
|
|
end
|
2021-11-18 22:05:49 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
describe '#mergeable_state?' do
|
2022-05-07 20:08:51 +05:30
|
|
|
it_behaves_like 'for mergeable_state'
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
describe "#public_merge_status" do
|
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
subject { build(:merge_request, merge_status: status) }
|
|
|
|
|
|
|
|
where(:status, :public_status) do
|
|
|
|
'cannot_be_merged_rechecking' | 'checking'
|
2021-04-17 20:07:23 +05:30
|
|
|
'preparing' | 'checking'
|
2020-04-22 19:07:51 +05:30
|
|
|
'checking' | 'checking'
|
|
|
|
'cannot_be_merged' | 'cannot_be_merged'
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
it { expect(subject.public_merge_status).to eq(public_status) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
describe "#head_pipeline_active? " do
|
2021-04-17 20:07:23 +05:30
|
|
|
context 'when project lacks a head_pipeline relation' do
|
|
|
|
before do
|
|
|
|
subject.head_pipeline = nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.head_pipeline_active?).to be false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when project has a head_pipeline relation' do
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:head_pipeline) { pipeline }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'accesses the value from the head_pipeline' do
|
|
|
|
expect(subject.head_pipeline)
|
|
|
|
.to receive(:active?)
|
|
|
|
|
|
|
|
subject.head_pipeline_active?
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#actual_head_pipeline_success? " do
|
2021-04-17 20:07:23 +05:30
|
|
|
context 'when project lacks an actual_head_pipeline relation' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:actual_head_pipeline) { nil }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.actual_head_pipeline_success?).to be false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when project has a actual_head_pipeline relation' do
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:actual_head_pipeline) { pipeline }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'accesses the value from the actual_head_pipeline' do
|
|
|
|
expect(subject.actual_head_pipeline)
|
|
|
|
.to receive(:success?)
|
|
|
|
|
|
|
|
subject.actual_head_pipeline_success?
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe "#actual_head_pipeline_active? " do
|
2021-04-17 20:07:23 +05:30
|
|
|
context 'when project lacks an actual_head_pipeline relation' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:actual_head_pipeline) { nil }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.actual_head_pipeline_active?).to be false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when project has a actual_head_pipeline relation' do
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:actual_head_pipeline) { pipeline }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'accesses the value from the actual_head_pipeline' do
|
|
|
|
expect(subject.actual_head_pipeline)
|
|
|
|
.to receive(:active?)
|
|
|
|
|
|
|
|
subject.actual_head_pipeline_active?
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
describe '#mergeable_ci_state?' do
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline) }
|
|
|
|
|
|
|
|
context 'when it is only allowed to merge when build is green' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true) }
|
2020-06-23 00:09:42 +05:30
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { build(:merge_request, source_project: project) }
|
2020-06-23 00:09:42 +05:30
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
context 'and a failed pipeline is associated' do
|
|
|
|
before do
|
2021-09-04 01:27:46 +05:30
|
|
|
pipeline.update!(status: 'failed', sha: subject.diff_head_sha)
|
2017-08-17 22:00:37 +05:30
|
|
|
allow(subject).to receive(:head_pipeline) { pipeline }
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_falsey }
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'and a successful pipeline is associated' do
|
|
|
|
before do
|
2021-09-04 01:27:46 +05:30
|
|
|
pipeline.update!(status: 'success', sha: subject.diff_head_sha)
|
2017-08-17 22:00:37 +05:30
|
|
|
allow(subject).to receive(:head_pipeline) { pipeline }
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and a skipped pipeline is associated' do
|
|
|
|
before do
|
2021-09-04 01:27:46 +05:30
|
|
|
pipeline.update!(status: 'skipped', sha: subject.diff_head_sha)
|
2020-06-23 00:09:42 +05:30
|
|
|
allow(subject).to receive(:head_pipeline).and_return(pipeline)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it { expect(subject.mergeable_ci_state?).to be_falsey }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
context 'when no pipeline is associated' do
|
|
|
|
before do
|
2020-06-23 00:09:42 +05:30
|
|
|
allow(subject).to receive(:head_pipeline).and_return(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when it is only allowed to merge when build is green or skipped' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true, allow_merge_on_skipped_pipeline: true) }
|
2020-06-23 00:09:42 +05:30
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { build(:merge_request, source_project: project) }
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
context 'and a failed pipeline is associated' do
|
|
|
|
before do
|
|
|
|
pipeline.update!(status: 'failed', sha: subject.diff_head_sha)
|
|
|
|
allow(subject).to receive(:head_pipeline).and_return(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and a successful pipeline is associated' do
|
|
|
|
before do
|
|
|
|
pipeline.update!(status: 'success', sha: subject.diff_head_sha)
|
|
|
|
allow(subject).to receive(:head_pipeline).and_return(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and a skipped pipeline is associated' do
|
|
|
|
before do
|
|
|
|
pipeline.update!(status: 'skipped', sha: subject.diff_head_sha)
|
|
|
|
allow(subject).to receive(:head_pipeline).and_return(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no pipeline is associated' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:head_pipeline).and_return(nil)
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
it { expect(subject.mergeable_ci_state?).to be_falsey }
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merges are not restricted to green builds' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: false) }
|
2020-06-23 00:09:42 +05:30
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { build(:merge_request, source_project: project) }
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
context 'and a failed pipeline is associated' do
|
|
|
|
before do
|
|
|
|
pipeline.statuses << create(:commit_status, status: 'failed', project: project)
|
2017-08-17 22:00:37 +05:30
|
|
|
allow(subject).to receive(:head_pipeline) { pipeline }
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no pipeline is associated' do
|
|
|
|
before do
|
2017-08-17 22:00:37 +05:30
|
|
|
allow(subject).to receive(:head_pipeline) { nil }
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_truthy }
|
2015-11-26 14:37:03 +05:30
|
|
|
end
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
context 'and a skipped pipeline is associated' do
|
|
|
|
before do
|
|
|
|
pipeline.update!(status: 'skipped', sha: subject.diff_head_sha)
|
|
|
|
allow(subject).to receive(:head_pipeline).and_return(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no pipeline is associated' do
|
|
|
|
before do
|
|
|
|
allow(subject).to receive(:head_pipeline).and_return(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject.mergeable_ci_state?).to be_truthy }
|
|
|
|
end
|
2015-11-26 14:37:03 +05:30
|
|
|
end
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#mergeable_discussions_state?' do
|
|
|
|
let(:merge_request) { create(:merge_request_with_diff_notes, source_project: project) }
|
|
|
|
|
|
|
|
context 'when project.only_allow_merge_if_all_discussions_are_resolved == true' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository, only_allow_merge_if_all_discussions_are_resolved: true) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'with all discussions resolved' do
|
|
|
|
before do
|
|
|
|
merge_request.discussions.each { |d| d.resolve!(merge_request.author) }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(merge_request.mergeable_discussions_state?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with unresolved discussions' do
|
|
|
|
before do
|
|
|
|
merge_request.discussions.each(&:unresolve!)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(merge_request.mergeable_discussions_state?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with no discussions' do
|
|
|
|
before do
|
2020-06-23 00:09:42 +05:30
|
|
|
merge_request.notes.destroy_all # rubocop: disable Cop/DestroyAll
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(merge_request.mergeable_discussions_state?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when project.only_allow_merge_if_all_discussions_are_resolved == false' do
|
|
|
|
let(:project) { create(:project, :repository, only_allow_merge_if_all_discussions_are_resolved: false) }
|
|
|
|
|
|
|
|
context 'with unresolved discussions' do
|
|
|
|
before do
|
|
|
|
merge_request.discussions.each(&:unresolve!)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(merge_request.mergeable_discussions_state?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe "#reload_diff" do
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'calls MergeRequests::ReloadDiffsService#execute with correct params' do
|
|
|
|
user = create(:user)
|
|
|
|
service = instance_double(MergeRequests::ReloadDiffsService, execute: nil)
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(MergeRequests::ReloadDiffsService)
|
|
|
|
.to receive(:new).with(subject, user)
|
|
|
|
.and_return(service)
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
subject.reload_diff(user)
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(service).to have_received(:execute)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using the after_update hook to update' do
|
|
|
|
context 'when the branches are updated' do
|
|
|
|
it 'uses the new heads to generate the diff' do
|
|
|
|
expect { subject.update!(source_branch: subject.target_branch, target_branch: subject.source_branch) }
|
|
|
|
.to change { subject.merge_request_diff.start_commit_sha }
|
|
|
|
.and change { subject.merge_request_diff.head_commit_sha }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#update_diff_discussion_positions' do
|
2022-04-04 11:22:00 +05:30
|
|
|
subject { create(:merge_request, source_project: project) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:create_commit) { project.commit("913c66a37b4a45b9769037c55c2d238bd0942d2e") }
|
|
|
|
let(:modify_commit) { project.commit("874797c3a73b60d2187ed6e2fcabd289ff75171e") }
|
|
|
|
let(:edit_commit) { project.commit("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") }
|
|
|
|
let(:discussion) { create(:diff_note_on_merge_request, noteable: subject, project: project, position: old_position).to_discussion }
|
|
|
|
let(:path) { "files/ruby/popen.rb" }
|
|
|
|
let(:new_line) { 9 }
|
|
|
|
|
|
|
|
let(:old_diff_refs) do
|
|
|
|
Gitlab::Diff::DiffRefs.new(
|
|
|
|
base_sha: create_commit.parent_id,
|
|
|
|
head_sha: modify_commit.sha
|
|
|
|
)
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
let(:new_diff_refs) do
|
|
|
|
Gitlab::Diff::DiffRefs.new(
|
|
|
|
base_sha: create_commit.parent_id,
|
|
|
|
head_sha: edit_commit.sha
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:old_position) do
|
|
|
|
Gitlab::Diff::Position.new(
|
|
|
|
old_path: path,
|
|
|
|
new_path: path,
|
|
|
|
old_line: nil,
|
|
|
|
new_line: new_line,
|
|
|
|
diff_refs: old_diff_refs
|
|
|
|
)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it "updates diff discussion positions" do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(Discussions::UpdateDiffPositionService).to receive(:new).with(
|
2016-08-24 12:49:21 +05:30
|
|
|
subject.project,
|
2017-09-10 17:25:29 +05:30
|
|
|
subject.author,
|
2016-08-24 12:49:21 +05:30
|
|
|
old_diff_refs: old_diff_refs,
|
2022-04-04 11:22:00 +05:30
|
|
|
new_diff_refs: new_diff_refs,
|
2017-09-10 17:25:29 +05:30
|
|
|
paths: discussion.position.paths
|
2016-08-24 12:49:21 +05:30
|
|
|
).and_call_original
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect_any_instance_of(Discussions::UpdateDiffPositionService).to receive(:execute).with(discussion).and_call_original
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
|
2022-04-04 11:22:00 +05:30
|
|
|
new_diff_refs: new_diff_refs,
|
2018-03-17 18:26:18 +05:30
|
|
|
current_user: subject.author)
|
|
|
|
end
|
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
it 'does not call the resolve method' do
|
|
|
|
expect(MergeRequests::ResolvedDiscussionNotificationService).not_to receive(:new)
|
2021-03-11 19:13:27 +05:30
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
|
|
|
|
new_diff_refs: new_diff_refs,
|
|
|
|
current_user: subject.author)
|
|
|
|
end
|
2021-03-11 19:13:27 +05:30
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
context 'when resolve_outdated_diff_discussions is set' do
|
2018-03-17 18:26:18 +05:30
|
|
|
before do
|
|
|
|
discussion
|
|
|
|
|
|
|
|
subject.project.update!(resolve_outdated_diff_discussions: true)
|
|
|
|
end
|
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
context 'when the active discussion is resolved in the update' do
|
|
|
|
it 'calls MergeRequests::ResolvedDiscussionNotificationService' do
|
|
|
|
expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService)
|
|
|
|
.to receive(:execute).with(subject)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
|
|
|
|
new_diff_refs: new_diff_refs,
|
|
|
|
current_user: subject.author)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the active discussion does not have resolved in the update' do
|
|
|
|
let(:new_line) { 16 }
|
|
|
|
|
|
|
|
it 'does not call the resolve method' do
|
|
|
|
expect(MergeRequests::ResolvedDiscussionNotificationService).not_to receive(:new)
|
|
|
|
|
|
|
|
subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
|
|
|
|
new_diff_refs: new_diff_refs,
|
|
|
|
current_user: subject.author)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the active discussion was already resolved' do
|
|
|
|
before do
|
|
|
|
discussion.resolve!(subject.author)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not call the resolve method' do
|
|
|
|
expect(MergeRequests::ResolvedDiscussionNotificationService).not_to receive(:new)
|
|
|
|
|
|
|
|
subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
|
|
|
|
new_diff_refs: new_diff_refs,
|
|
|
|
current_user: subject.author)
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
describe '#branch_merge_base_commit' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
subject { create(:merge_request, source_project: project) }
|
2020-11-24 15:15:51 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
context 'source and target branch exist' do
|
|
|
|
it { expect(subject.branch_merge_base_commit.sha).to eq('ae73cb07c9eeaf35924a10f713b364d32b2dd34f') }
|
|
|
|
it { expect(subject.branch_merge_base_commit).to be_a(Commit) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the target branch does not exist' do
|
|
|
|
before do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject.project.repository.rm_branch(subject.author, subject.target_branch)
|
|
|
|
subject.clear_memoized_shas
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(subject.branch_merge_base_commit).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe "#diff_refs" do
|
2016-09-13 17:45:13 +05:30
|
|
|
context "with diffs" do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
subject { create(:merge_request, source_project: project) }
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
let(:expected_diff_refs) do
|
|
|
|
Gitlab::Diff::DiffRefs.new(
|
2022-10-11 01:57:18 +05:30
|
|
|
base_sha: subject.merge_request_diff.base_commit_sha,
|
2019-09-30 21:07:59 +05:30
|
|
|
start_sha: subject.merge_request_diff.start_commit_sha,
|
2022-10-11 01:57:18 +05:30
|
|
|
head_sha: subject.merge_request_diff.head_commit_sha
|
2019-09-30 21:07:59 +05:30
|
|
|
)
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
|
|
|
it "does not touch the repository" do
|
|
|
|
subject # Instantiate the object
|
|
|
|
|
|
|
|
expect_any_instance_of(Repository).not_to receive(:commit)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
subject.diff_refs
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it "returns expected diff_refs" do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(subject.diff_refs).to eq(expected_diff_refs)
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
2019-09-30 21:07:59 +05:30
|
|
|
|
|
|
|
context 'when importing' do
|
|
|
|
before do
|
|
|
|
subject.importing = true
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns MR diff_refs" do
|
|
|
|
expect(subject.diff_refs).to eq(expected_diff_refs)
|
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
describe "#source_project_missing?" do
|
2019-03-02 22:35:43 +05:30
|
|
|
let(:project) { create(:project) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:forked_project) { fork_project(project) }
|
2019-03-02 22:35:43 +05:30
|
|
|
let(:user) { create(:user) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
context "when the fork exists" do
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request,
|
2018-03-17 18:26:18 +05:30
|
|
|
source_project: forked_project,
|
2016-09-29 09:46:39 +05:30
|
|
|
target_project: project)
|
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
it { expect(merge_request.source_project_missing?).to be_falsey }
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context "when the source project is the same as the target project" do
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
it { expect(merge_request.source_project_missing?).to be_falsey }
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context "when the fork does not exist" do
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:merge_request) do
|
2016-09-29 09:46:39 +05:30
|
|
|
create(:merge_request,
|
2018-03-17 18:26:18 +05:30
|
|
|
source_project: forked_project,
|
2016-09-29 09:46:39 +05:30
|
|
|
target_project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true" do
|
|
|
|
unlink_project.execute
|
|
|
|
merge_request.reload
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
expect(merge_request.source_project_missing?).to be_truthy
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#merge_ongoing?' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns true when the merge request is locked' do
|
2019-12-21 20:55:43 +05:30
|
|
|
merge_request = build_stubbed(:merge_request, state_id: described_class.available_states[:locked])
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
expect(merge_request.merge_ongoing?).to be(true)
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns true when merge_id, MR is not merged and it has no running job' do
|
2019-12-21 20:55:43 +05:30
|
|
|
merge_request = build_stubbed(:merge_request, state_id: described_class.available_states[:opened], merge_jid: 'foo')
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { true }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(merge_request.merge_ongoing?).to be(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false when merge_jid is nil' do
|
2019-12-21 20:55:43 +05:30
|
|
|
merge_request = build_stubbed(:merge_request, state_id: described_class.available_states[:opened], merge_jid: nil)
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
expect(merge_request.merge_ongoing?).to be(false)
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns false if MR is merged' do
|
2019-12-21 20:55:43 +05:30
|
|
|
merge_request = build_stubbed(:merge_request, state_id: described_class.available_states[:merged], merge_jid: 'foo')
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(merge_request.merge_ongoing?).to be(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false if there is no merge job running' do
|
2019-12-21 20:55:43 +05:30
|
|
|
merge_request = build_stubbed(:merge_request, state_id: described_class.available_states[:opened], merge_jid: 'foo')
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { false }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
expect(merge_request.merge_ongoing?).to be(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
describe "#closed_or_merged_without_fork?" do
|
2019-03-02 22:35:43 +05:30
|
|
|
let(:project) { create(:project) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:forked_project) { fork_project(project) }
|
2019-03-02 22:35:43 +05:30
|
|
|
let(:user) { create(:user) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
context "when the merge request is closed" do
|
|
|
|
let(:closed_merge_request) do
|
|
|
|
create(:closed_merge_request,
|
2018-03-17 18:26:18 +05:30
|
|
|
source_project: forked_project,
|
2016-09-29 09:46:39 +05:30
|
|
|
target_project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false if the fork exist" do
|
2021-02-22 17:27:13 +05:30
|
|
|
expect(closed_merge_request.closed_or_merged_without_fork?).to be_falsey
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true if the fork does not exist" do
|
|
|
|
unlink_project.execute
|
|
|
|
closed_merge_request.reload
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
expect(closed_merge_request.closed_or_merged_without_fork?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the merge request was merged" do
|
|
|
|
let(:merged_merge_request) do
|
|
|
|
create(:merged_merge_request,
|
|
|
|
source_project: forked_project,
|
|
|
|
target_project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false if the fork exist" do
|
|
|
|
expect(merged_merge_request.closed_or_merged_without_fork?).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true if the fork does not exist" do
|
|
|
|
unlink_project.execute
|
|
|
|
merged_merge_request.reload
|
|
|
|
|
|
|
|
expect(merged_merge_request.closed_or_merged_without_fork?).to be_truthy
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the merge request is open" do
|
|
|
|
let(:open_merge_request) do
|
|
|
|
create(:merge_request,
|
2018-03-17 18:26:18 +05:30
|
|
|
source_project: forked_project,
|
2016-09-29 09:46:39 +05:30
|
|
|
target_project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false" do
|
2021-02-22 17:27:13 +05:30
|
|
|
expect(open_merge_request.closed_or_merged_without_fork?).to be_falsey
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#reopenable?' do
|
|
|
|
context 'when the merge request is closed' do
|
|
|
|
it 'returns true' do
|
|
|
|
subject.close
|
|
|
|
|
|
|
|
expect(subject.reopenable?).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'forked project' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:project) { create(:project, :public) }
|
2016-09-29 09:46:39 +05:30
|
|
|
let(:user) { create(:user) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:forked_project) { fork_project(project, user) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
let!(:merge_request) do
|
2016-09-29 09:46:39 +05:30
|
|
|
create(:closed_merge_request,
|
2018-03-17 18:26:18 +05:30
|
|
|
source_project: forked_project,
|
2016-09-29 09:46:39 +05:30
|
|
|
target_project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false if unforked' do
|
2018-03-17 18:26:18 +05:30
|
|
|
Projects::UnlinkForkService.new(forked_project, user).execute
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
expect(merge_request.reload.reopenable?).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false if the source project is deleted' do
|
2018-03-17 18:26:18 +05:30
|
|
|
Projects::DestroyService.new(forked_project, user).execute
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
expect(merge_request.reload.reopenable?).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false if the merge request is merged' do
|
2021-09-04 01:27:46 +05:30
|
|
|
merge_request.update!(state: 'merged')
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
expect(merge_request.reload.reopenable?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the merge request is opened' do
|
|
|
|
it 'returns false' do
|
|
|
|
expect(subject.reopenable?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
describe '#pipeline_coverage_delta' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let!(:merge_request) { create(:merge_request) }
|
2020-01-01 13:55:28 +05:30
|
|
|
|
|
|
|
let!(:source_pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
project: project,
|
|
|
|
ref: merge_request.source_branch,
|
|
|
|
sha: merge_request.diff_head_sha
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:target_pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
project: project,
|
|
|
|
ref: merge_request.target_branch,
|
|
|
|
sha: merge_request.diff_base_sha
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_build(pipeline, coverage, name)
|
|
|
|
create(:ci_build, :success, pipeline: pipeline, coverage: coverage, name: name)
|
|
|
|
merge_request.update_head_pipeline
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when both source and target branches have coverage information' do
|
|
|
|
it 'returns the appropriate coverage delta' do
|
|
|
|
create_build(source_pipeline, 60.2, 'test:1')
|
|
|
|
create_build(target_pipeline, 50, 'test:2')
|
|
|
|
|
2022-01-26 12:08:38 +05:30
|
|
|
expect(merge_request.pipeline_coverage_delta).to be_within(0.001).of(10.2)
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when target branch does not have coverage information' do
|
|
|
|
it 'returns nil' do
|
|
|
|
create_build(source_pipeline, 50, 'test:1')
|
|
|
|
|
|
|
|
expect(merge_request.pipeline_coverage_delta).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source branch does not have coverage information' do
|
|
|
|
it 'returns nil for coverage_delta' do
|
|
|
|
create_build(target_pipeline, 50, 'test:1')
|
|
|
|
|
|
|
|
expect(merge_request.pipeline_coverage_delta).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'neither source nor target branch has coverage information' do
|
|
|
|
it 'returns nil for coverage_delta' do
|
|
|
|
expect(merge_request.pipeline_coverage_delta).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-17 20:07:23 +05:30
|
|
|
describe '#use_merge_base_pipeline_for_comparison?' do
|
|
|
|
let(:project) { create(:project, :public, :repository) }
|
|
|
|
let(:merge_request) { create(:merge_request, :with_codequality_reports, source_project: project) }
|
|
|
|
|
|
|
|
subject { merge_request.use_merge_base_pipeline_for_comparison?(service_class) }
|
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
context 'when service class is Ci::CompareMetricsReportsService' do
|
|
|
|
let(:service_class) { 'Ci::CompareMetricsReportsService' }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
2021-04-17 20:07:23 +05:30
|
|
|
context 'when service class is Ci::CompareCodequalityReportsService' do
|
|
|
|
let(:service_class) { 'Ci::CompareCodequalityReportsService' }
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it { is_expected.to be_truthy }
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when service class is different' do
|
|
|
|
let(:service_class) { 'Ci::GenerateCoverageReportsService' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#comparison_base_pipeline' do
|
|
|
|
subject(:pipeline) { merge_request.comparison_base_pipeline(service_class) }
|
|
|
|
|
|
|
|
let(:project) { create(:project, :public, :repository) }
|
|
|
|
let(:merge_request) { create(:merge_request, :with_codequality_reports, source_project: project) }
|
|
|
|
let!(:base_pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
:with_test_reports,
|
|
|
|
project: project,
|
|
|
|
ref: merge_request.target_branch,
|
|
|
|
sha: merge_request.diff_base_sha
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when service class is Ci::CompareCodequalityReportsService' do
|
|
|
|
let(:service_class) { 'Ci::CompareCodequalityReportsService' }
|
|
|
|
|
|
|
|
context 'when merge request has a merge request pipeline' do
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request, :with_merge_request_pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_base_pipeline) do
|
|
|
|
create(:ci_pipeline, ref: merge_request.target_branch, sha: merge_request.target_branch_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
merge_base_pipeline
|
|
|
|
merge_request.update_head_pipeline
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the merge_base_pipeline' do
|
|
|
|
expect(pipeline).to eq(merge_base_pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merge does not have a merge request pipeline' do
|
|
|
|
it 'returns the base_pipeline' do
|
|
|
|
expect(pipeline).to eq(base_pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when service_class is different' do
|
|
|
|
let(:service_class) { 'Ci::GenerateCoverageReportsService' }
|
|
|
|
|
|
|
|
it 'returns the base_pipeline' do
|
|
|
|
expect(pipeline).to eq(base_pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
describe '#base_pipeline' do
|
|
|
|
let(:pipeline_arguments) do
|
|
|
|
{
|
|
|
|
project: project,
|
|
|
|
ref: merge_request.target_branch,
|
|
|
|
sha: merge_request.diff_base_sha
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:project) { create(:project, :public, :repository) }
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
let!(:first_pipeline) { create(:ci_pipeline, pipeline_arguments) }
|
|
|
|
let!(:last_pipeline) { create(:ci_pipeline, pipeline_arguments) }
|
|
|
|
let!(:last_pipeline_with_other_ref) { create(:ci_pipeline, pipeline_arguments.merge(ref: 'other')) }
|
2018-11-18 11:00:15 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'returns latest pipeline for the target branch' do
|
2018-11-18 11:00:15 +05:30
|
|
|
expect(merge_request.base_pipeline).to eq(last_pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
describe '#merge_base_pipeline' do
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request, :with_merge_request_pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_base_pipeline) do
|
|
|
|
create(:ci_pipeline, ref: merge_request.target_branch, sha: merge_request.target_branch_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
merge_base_pipeline
|
|
|
|
merge_request.update_head_pipeline
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a pipeline pointing to a commit on the target ref' do
|
|
|
|
expect(merge_request.merge_base_pipeline).to eq(merge_base_pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#has_commits?' do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'returns true when merge request diff has commits' do
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(subject.merge_request_diff).to receive(:commits_count)
|
|
|
|
.and_return(2)
|
2019-05-18 00:54:41 +05:30
|
|
|
|
2019-05-30 16:15:17 +05:30
|
|
|
expect(subject.has_commits?).to be_truthy
|
2019-05-18 00:54:41 +05:30
|
|
|
end
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
context 'when commits_count is nil' do
|
|
|
|
it 'returns false' do
|
|
|
|
allow(subject.merge_request_diff).to receive(:commits_count)
|
|
|
|
.and_return(nil)
|
|
|
|
|
|
|
|
expect(subject.has_commits?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#has_no_commits?' do
|
|
|
|
before do
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(subject.merge_request_diff).to receive(:commits_count)
|
|
|
|
.and_return(0)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true when merge request diff has 0 commits' do
|
|
|
|
expect(subject.has_no_commits?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#merge_request_diff_for' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
subject { create(:merge_request, importing: true, source_project: project) }
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
let!(:merge_request_diff1) { subject.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
|
|
|
|
let!(:merge_request_diff2) { subject.merge_request_diffs.create!(head_commit_sha: nil) }
|
|
|
|
let!(:merge_request_diff3) { subject.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'with diff refs' do
|
|
|
|
it 'returns the diffs' do
|
|
|
|
expect(subject.merge_request_diff_for(merge_request_diff1.diff_refs)).to eq(merge_request_diff1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with a commit SHA' do
|
|
|
|
it 'returns the diffs' do
|
|
|
|
expect(subject.merge_request_diff_for(merge_request_diff3.head_commit_sha)).to eq(merge_request_diff3)
|
|
|
|
end
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
it 'runs a single query on the initial call, and none afterwards' do
|
|
|
|
expect { subject.merge_request_diff_for(merge_request_diff1.diff_refs) }
|
|
|
|
.not_to exceed_query_limit(1)
|
|
|
|
|
|
|
|
expect { subject.merge_request_diff_for(merge_request_diff2.diff_refs) }
|
|
|
|
.not_to exceed_query_limit(0)
|
|
|
|
|
|
|
|
expect { subject.merge_request_diff_for(merge_request_diff3.head_commit_sha) }
|
|
|
|
.not_to exceed_query_limit(0)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
describe '#version_params_for' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
subject { create(:merge_request, importing: true, source_project: project) }
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
let!(:merge_request_diff1) { subject.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
|
|
|
|
let!(:merge_request_diff2) { subject.merge_request_diffs.create!(head_commit_sha: nil) }
|
|
|
|
let!(:merge_request_diff3) { subject.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
context 'when the diff refs are for an older merge request version' do
|
|
|
|
let(:diff_refs) { merge_request_diff1.diff_refs }
|
|
|
|
|
|
|
|
it 'returns the diff ID for the version to show' do
|
|
|
|
expect(subject.version_params_for(diff_refs)).to eq(diff_id: merge_request_diff1.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the diff refs are for a comparison between merge request versions' do
|
|
|
|
let(:diff_refs) { merge_request_diff3.compare_with(merge_request_diff1.head_commit_sha).diff_refs }
|
|
|
|
|
|
|
|
it 'returns the diff ID and start sha of the versions to compare' do
|
|
|
|
expect(subject.version_params_for(diff_refs)).to eq(diff_id: merge_request_diff3.id, start_sha: merge_request_diff1.head_commit_sha)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the diff refs are not for a merge request version' do
|
|
|
|
let(:diff_refs) { project.commit(sample_commit.id).diff_refs }
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(subject.version_params_for(diff_refs)).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#fetch_ref!' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
subject { create(:merge_request, source_project: project) }
|
2020-11-24 15:15:51 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'fetches the ref correctly' do
|
|
|
|
expect { subject.target_project.repository.delete_refs(subject.ref_path) }.not_to raise_error
|
|
|
|
|
|
|
|
subject.fetch_ref!
|
|
|
|
expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
describe '#eager_fetch_ref!' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
# We use build instead of create to test that an IID is allocated
|
|
|
|
subject { build(:merge_request, source_project: project) }
|
|
|
|
|
|
|
|
it 'fetches the ref correctly' do
|
|
|
|
expect(subject.iid).to be_nil
|
|
|
|
|
|
|
|
expect { subject.eager_fetch_ref! }.to change { subject.iid.to_i }.by(1)
|
|
|
|
|
|
|
|
expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'only fetches the ref once after saved' do
|
|
|
|
expect(subject.target_project.repository).to receive(:fetch_source_branch!).once.and_call_original
|
|
|
|
|
|
|
|
subject.save!
|
|
|
|
|
|
|
|
expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe 'removing a merge request' do
|
|
|
|
it 'refreshes the number of open merge requests of the target project' do
|
|
|
|
project = subject.target_project
|
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
expect { subject.destroy! }
|
2018-03-17 18:26:18 +05:30
|
|
|
.to change { project.open_merge_requests_count }.from(1).to(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'throttled touch' do
|
|
|
|
subject { create(:merge_request, updated_at: 1.hour.ago) }
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'state machine transitions' do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
shared_examples_for 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
|
|
|
|
specify do
|
|
|
|
expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
|
|
|
|
|
|
|
|
transition!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples_for 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
|
|
|
|
specify do
|
|
|
|
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(subject).and_call_original
|
|
|
|
|
|
|
|
transition!
|
|
|
|
end
|
2023-03-04 22:38:38 +05:30
|
|
|
|
|
|
|
context 'when transaction is not committed' do
|
|
|
|
it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
|
|
|
|
def transition!
|
|
|
|
MergeRequest.transaction do
|
|
|
|
super
|
|
|
|
|
|
|
|
raise ActiveRecord::Rollback
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2022-11-25 23:54:43 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'for an invalid state transition' do
|
|
|
|
specify 'is not a valid state transition' do
|
|
|
|
expect { transition! }.to raise_error(StateMachines::InvalidTransition)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'for a valid state transition' do
|
|
|
|
it 'is a valid state transition' do
|
|
|
|
expect { transition! }
|
|
|
|
.to change { subject.merge_status }
|
|
|
|
.from(merge_status.to_s)
|
|
|
|
.to(expected_merge_status)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#unlock_mr' do
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { create(:merge_request, state: 'locked', source_project: project, merge_jid: 123) }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'updates merge request head pipeline and sets merge_jid to nil', :sidekiq_might_not_need_inline do
|
2018-03-17 18:26:18 +05:30
|
|
|
pipeline = create(:ci_empty_pipeline, project: subject.project, ref: subject.source_branch, sha: subject.source_branch_sha)
|
|
|
|
|
|
|
|
subject.unlock_mr
|
|
|
|
|
|
|
|
subject.reload
|
|
|
|
expect(subject.head_pipeline).to eq(pipeline)
|
|
|
|
expect(subject.merge_jid).to be_nil
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
describe '#mark_as_preparing' do
|
2021-04-17 20:07:23 +05:30
|
|
|
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
|
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
let(:expected_merge_status) { 'preparing' }
|
|
|
|
|
|
|
|
def transition!
|
|
|
|
subject.mark_as_preparing!
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
context 'when the status is unchecked' do
|
|
|
|
let(:merge_status) { :unchecked }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is checking' do
|
|
|
|
let(:merge_status) { :checking }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is can_be_merged' do
|
|
|
|
let(:merge_status) { :can_be_merged }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_recheck' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_recheck }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged' do
|
|
|
|
let(:merge_status) { :cannot_be_merged }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_rechecking' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_rechecking }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#mark_as_unchecked' do
|
|
|
|
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
|
|
|
|
|
|
|
|
def transition!
|
|
|
|
subject.mark_as_unchecked!
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is unchecked' do
|
|
|
|
let(:merge_status) { :unchecked }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is checking' do
|
|
|
|
let(:merge_status) { :checking }
|
|
|
|
let(:expected_merge_status) { 'unchecked' }
|
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is can_be_merged' do
|
|
|
|
let(:merge_status) { :can_be_merged }
|
|
|
|
let(:expected_merge_status) { 'unchecked' }
|
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_recheck' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_recheck }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged' do
|
|
|
|
let(:merge_status) { :cannot_be_merged }
|
|
|
|
let(:expected_merge_status) { 'cannot_be_merged_recheck' }
|
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_rechecking' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_rechecking }
|
|
|
|
let(:expected_merge_status) { 'cannot_be_merged_recheck' }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#mark_as_checking' do
|
|
|
|
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
|
|
|
|
|
|
|
|
def transition!
|
|
|
|
subject.mark_as_checking!
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is unchecked' do
|
|
|
|
let(:merge_status) { :unchecked }
|
|
|
|
let(:expected_merge_status) { 'checking' }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is checking' do
|
|
|
|
let(:merge_status) { :checking }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is can_be_merged' do
|
|
|
|
let(:merge_status) { :can_be_merged }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_recheck' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_recheck }
|
|
|
|
let(:expected_merge_status) { 'cannot_be_merged_rechecking' }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged' do
|
|
|
|
let(:merge_status) { :cannot_be_merged }
|
|
|
|
|
2022-11-25 23:54:43 +05:30
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_rechecking' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_rechecking }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#mark_as_mergeable' do
|
|
|
|
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
|
|
|
|
|
|
|
|
let(:expected_merge_status) { 'can_be_merged' }
|
|
|
|
|
|
|
|
def transition!
|
|
|
|
subject.mark_as_mergeable!
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is unchecked' do
|
|
|
|
let(:merge_status) { :unchecked }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is checking' do
|
|
|
|
let(:merge_status) { :checking }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is can_be_merged' do
|
|
|
|
let(:merge_status) { :can_be_merged }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_recheck' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_recheck }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged' do
|
|
|
|
let(:merge_status) { :cannot_be_merged }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_rechecking' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_rechecking }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#mark_as_unmergeable' do
|
|
|
|
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
|
|
|
|
|
|
|
|
let(:expected_merge_status) { 'cannot_be_merged' }
|
|
|
|
|
|
|
|
def transition!
|
|
|
|
subject.mark_as_unmergeable!
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is unchecked' do
|
|
|
|
let(:merge_status) { :unchecked }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is checking' do
|
|
|
|
let(:merge_status) { :checking }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is can_be_merged' do
|
|
|
|
let(:merge_status) { :can_be_merged }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_recheck' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_recheck }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged' do
|
|
|
|
let(:merge_status) { :cannot_be_merged }
|
|
|
|
|
|
|
|
include_examples 'for an invalid state transition'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the status is cannot_be_merged_rechecking' do
|
|
|
|
let(:merge_status) { :cannot_be_merged_rechecking }
|
|
|
|
|
|
|
|
include_examples 'for a valid state transition'
|
|
|
|
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
|
2021-04-17 20:07:23 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-08-13 15:12:31 +05:30
|
|
|
describe 'transition to closed' do
|
|
|
|
context 'with merge error' do
|
|
|
|
subject { create(:merge_request, merge_error: 'merge error') }
|
|
|
|
|
|
|
|
it 'clears merge error' do
|
|
|
|
subject.close!
|
|
|
|
|
|
|
|
expect(subject.reload.merge_error).to eq(nil)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe 'transition to cannot_be_merged' do
|
|
|
|
let(:notification_service) { double(:notification_service) }
|
|
|
|
let(:todo_service) { double(:todo_service) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { create(:merge_request, state, source_project: project, merge_status: :unchecked) }
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
before do
|
|
|
|
allow(NotificationService).to receive(:new).and_return(notification_service)
|
|
|
|
allow(TodoService).to receive(:new).and_return(todo_service)
|
|
|
|
|
|
|
|
allow(subject.project.repository).to receive(:can_be_merged?).and_return(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
[:opened, :locked].each do |state|
|
|
|
|
context state do
|
|
|
|
let(:state) { state }
|
|
|
|
|
|
|
|
it 'notifies conflict, but does not notify again if rechecking still results in cannot_be_merged' do
|
|
|
|
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
|
|
|
|
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
subject.mark_as_unmergeable!
|
|
|
|
|
|
|
|
subject.mark_as_unchecked!
|
|
|
|
subject.mark_as_unmergeable!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'notifies conflict, but does not notify again if rechecking still results in cannot_be_merged with async mergeability check' do
|
|
|
|
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
|
|
|
|
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
|
|
|
|
|
|
|
|
subject.mark_as_checking!
|
|
|
|
subject.mark_as_unmergeable!
|
|
|
|
|
|
|
|
subject.mark_as_unchecked!
|
|
|
|
subject.mark_as_checking!
|
|
|
|
subject.mark_as_unmergeable!
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'notifies conflict, whenever newly unmergeable' do
|
|
|
|
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
|
|
|
|
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
subject.mark_as_unmergeable!
|
|
|
|
|
|
|
|
subject.mark_as_unchecked!
|
|
|
|
subject.mark_as_mergeable!
|
|
|
|
|
|
|
|
subject.mark_as_unchecked!
|
|
|
|
subject.mark_as_unmergeable!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'notifies conflict, whenever newly unmergeable with async mergeability check' do
|
|
|
|
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
|
|
|
|
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
|
|
|
|
|
|
|
|
subject.mark_as_checking!
|
|
|
|
subject.mark_as_unmergeable!
|
|
|
|
|
|
|
|
subject.mark_as_unchecked!
|
|
|
|
subject.mark_as_checking!
|
|
|
|
subject.mark_as_mergeable!
|
|
|
|
|
|
|
|
subject.mark_as_unchecked!
|
|
|
|
subject.mark_as_checking!
|
|
|
|
subject.mark_as_unmergeable!
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not notify whenever merge request is newly unmergeable due to other reasons' do
|
|
|
|
allow(subject.project.repository).to receive(:can_be_merged?).and_return(true)
|
|
|
|
|
|
|
|
expect(notification_service).not_to receive(:merge_request_unmergeable)
|
|
|
|
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
subject.mark_as_unmergeable!
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
[:closed, :merged].each do |state|
|
|
|
|
let(:state) { state }
|
|
|
|
|
|
|
|
context state do
|
|
|
|
it 'does not notify' do
|
|
|
|
expect(notification_service).not_to receive(:merge_request_unmergeable)
|
|
|
|
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
subject.mark_as_unmergeable!
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'source branch is missing' do
|
2020-11-24 15:15:51 +05:30
|
|
|
subject { create(:merge_request, :invalid, :opened, source_project: project, merge_status: :unchecked, target_branch: 'master') }
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
before do
|
|
|
|
allow(subject.project.repository).to receive(:can_be_merged?).and_call_original
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not raise error' do
|
|
|
|
expect(notification_service).not_to receive(:merge_request_unmergeable)
|
|
|
|
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
|
|
|
|
|
|
|
|
expect { subject.mark_as_unmergeable }.not_to raise_error
|
|
|
|
expect(subject.cannot_be_merged?).to eq(true)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'check_state?' do
|
|
|
|
it 'indicates whether MR is still checking for mergeability' do
|
|
|
|
state_machine = described_class.state_machines[:merge_status]
|
2020-04-25 10:58:03 +05:30
|
|
|
check_states = [:unchecked, :cannot_be_merged_recheck, :cannot_be_merged_rechecking, :checking]
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
check_states.each do |merge_status|
|
|
|
|
expect(state_machine.check_state?(merge_status)).to be true
|
|
|
|
end
|
|
|
|
|
|
|
|
(state_machine.states.map(&:name) - check_states).each do |merge_status|
|
|
|
|
expect(state_machine.check_state?(merge_status)).to be false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#should_be_rebased?' do
|
|
|
|
it 'returns false for the same source and target branches' do
|
2020-11-24 15:15:51 +05:30
|
|
|
merge_request = build_stubbed(:merge_request, source_project: project, target_project: project)
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(merge_request.should_be_rebased?).to be_falsey
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#rebase_in_progress?' do
|
2019-09-30 21:07:59 +05:30
|
|
|
where(:rebase_jid, :jid_valid, :result) do
|
|
|
|
'foo' | true | true
|
|
|
|
'foo' | false | false
|
|
|
|
'' | true | false
|
|
|
|
nil | true | false
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
with_them do
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { build_stubbed(:merge_request) }
|
2019-09-30 21:07:59 +05:30
|
|
|
|
|
|
|
subject { merge_request.rebase_in_progress? }
|
|
|
|
|
|
|
|
it do
|
|
|
|
allow(Gitlab::SidekiqStatus).to receive(:running?).with(rebase_jid) { jid_valid }
|
|
|
|
|
|
|
|
merge_request.rebase_jid = rebase_jid
|
|
|
|
|
|
|
|
is_expected.to eq(result)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2019-09-30 21:07:59 +05:30
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#allow_collaboration' do
|
2018-03-27 19:54:05 +05:30
|
|
|
let(:merge_request) do
|
2018-11-08 19:23:39 +05:30
|
|
|
build(:merge_request, source_branch: 'fixes', allow_collaboration: true)
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'is false when pushing by a maintainer is not possible' do
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request).to receive(:collaborative_push_possible?) { false }
|
2018-03-27 19:54:05 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request.allow_collaboration).to be_falsy
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'is true when pushing by a maintainer is possible' do
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request).to receive(:collaborative_push_possible?) { true }
|
2018-03-27 19:54:05 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request.allow_collaboration).to be_truthy
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#collaborative_push_possible?' do
|
2018-03-27 19:54:05 +05:30
|
|
|
let(:merge_request) do
|
|
|
|
build(:merge_request, source_branch: 'fixes')
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(ProtectedBranch).to receive(:protected?) { false }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not allow maintainer to push if the source project is the same as the target' do
|
|
|
|
merge_request.target_project = merge_request.source_project = create(:project, :public)
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request.collaborative_push_possible?).to be_falsy
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'allows maintainer to push when both source and target are public' do
|
|
|
|
merge_request.target_project = build(:project, :public)
|
|
|
|
merge_request.source_project = build(:project, :public)
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request.collaborative_push_possible?).to be_truthy
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not available for protected branches' do
|
|
|
|
merge_request.target_project = build(:project, :public)
|
|
|
|
merge_request.source_project = build(:project, :public)
|
|
|
|
|
|
|
|
expect(ProtectedBranch).to receive(:protected?)
|
|
|
|
.with(merge_request.source_project, 'fixes')
|
|
|
|
.and_return(true)
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request.collaborative_push_possible?).to be_falsy
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#can_allow_collaboration?' do
|
2018-03-27 19:54:05 +05:30
|
|
|
let(:target_project) { create(:project, :public) }
|
|
|
|
let(:source_project) { fork_project(target_project) }
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request,
|
|
|
|
source_project: source_project,
|
|
|
|
source_branch: 'fixes',
|
|
|
|
target_project: target_project)
|
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
let(:user) { create(:user) }
|
|
|
|
|
|
|
|
before do
|
2018-11-08 19:23:39 +05:30
|
|
|
allow(merge_request).to receive(:collaborative_push_possible?) { true }
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'is false if the user does not have push access to the source project' do
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request.can_allow_collaboration?(user)).to be_falsy
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'is true when the user has push access to the source project' do
|
|
|
|
source_project.add_developer(user)
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(merge_request.can_allow_collaboration?(user)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#merge_participants' do
|
|
|
|
it 'contains author' do
|
2020-07-02 01:45:43 +05:30
|
|
|
expect(subject.merge_participants).to contain_exactly(subject.author)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe 'when merge_when_pipeline_succeeds? is true' do
|
|
|
|
describe 'when merge user is author' do
|
|
|
|
let(:user) { create(:user) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
subject do
|
|
|
|
create(:merge_request,
|
|
|
|
merge_when_pipeline_succeeds: true,
|
|
|
|
merge_user: user,
|
|
|
|
author: user)
|
|
|
|
end
|
|
|
|
|
2020-07-02 01:45:43 +05:30
|
|
|
context 'author is not a project member' do
|
|
|
|
it 'is empty' do
|
|
|
|
expect(subject.merge_participants).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'author is a project member' do
|
|
|
|
before do
|
|
|
|
subject.project.team.add_reporter(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'contains author only' do
|
|
|
|
expect(subject.merge_participants).to contain_exactly(subject.author)
|
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'when merge user and author are different users' do
|
|
|
|
let(:merge_user) { create(:user) }
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
subject do
|
|
|
|
create(:merge_request,
|
|
|
|
merge_when_pipeline_succeeds: true,
|
|
|
|
merge_user: merge_user)
|
|
|
|
end
|
|
|
|
|
2020-07-02 01:45:43 +05:30
|
|
|
before do
|
|
|
|
subject.project.team.add_reporter(subject.author)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'merge user is not a member' do
|
|
|
|
it 'contains author only' do
|
|
|
|
expect(subject.merge_participants).to contain_exactly(subject.author)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'both author and merge users are project members' do
|
|
|
|
before do
|
|
|
|
subject.project.team.add_reporter(merge_user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'contains author and merge user' do
|
|
|
|
expect(subject.merge_participants).to contain_exactly(subject.author, merge_user)
|
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
end
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
end
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
describe '.merge_request_ref?' do
|
|
|
|
subject { described_class.merge_request_ref?(ref) }
|
|
|
|
|
|
|
|
context 'when ref is ref name of a branch' do
|
|
|
|
let(:ref) { 'feature' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is HEAD ref path of a branch' do
|
|
|
|
let(:ref) { 'refs/heads/feature' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is HEAD ref path of a merge request' do
|
|
|
|
let(:ref) { 'refs/merge-requests/1/head' }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is merge ref path of a merge request' do
|
|
|
|
let(:ref) { 'refs/merge-requests/1/merge' }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
end
|
2019-09-30 21:07:59 +05:30
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
describe '.merge_train_ref?' do
|
|
|
|
subject { described_class.merge_train_ref?(ref) }
|
|
|
|
|
|
|
|
context 'when ref is ref name of a branch' do
|
|
|
|
let(:ref) { 'feature' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is HEAD ref path of a branch' do
|
|
|
|
let(:ref) { 'refs/heads/feature' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is HEAD ref path of a merge request' do
|
|
|
|
let(:ref) { 'refs/merge-requests/1/head' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is merge ref path of a merge request' do
|
|
|
|
let(:ref) { 'refs/merge-requests/1/merge' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is train ref path of a merge request' do
|
|
|
|
let(:ref) { 'refs/merge-requests/1/train' }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-08-27 11:52:29 +05:30
|
|
|
describe '#in_locked_state' do
|
|
|
|
let(:merge_request) { create(:merge_request, :opened) }
|
|
|
|
|
|
|
|
context 'when the merge request does not change state' do
|
|
|
|
it 'returns to previous state and has no errors on the object' do
|
|
|
|
expect(merge_request.opened?).to eq(true)
|
|
|
|
|
|
|
|
merge_request.in_locked_state do
|
|
|
|
expect(merge_request.locked?).to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
expect(merge_request.opened?).to eq(true)
|
|
|
|
expect(merge_request.errors).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the merge request is merged while locked' do
|
|
|
|
it 'becomes merged and has no errors on the object' do
|
|
|
|
expect(merge_request.opened?).to eq(true)
|
|
|
|
|
|
|
|
merge_request.in_locked_state do
|
|
|
|
expect(merge_request.locked?).to eq(true)
|
|
|
|
merge_request.mark_as_merged!
|
|
|
|
end
|
|
|
|
|
|
|
|
expect(merge_request.merged?).to eq(true)
|
|
|
|
expect(merge_request.errors).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
describe '#cleanup_refs' do
|
|
|
|
subject { merge_request.cleanup_refs(only: only) }
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
let(:merge_request) { build(:merge_request, source_project: create(:project, :repository)) }
|
2019-09-30 21:07:59 +05:30
|
|
|
|
|
|
|
context 'when removing all refs' do
|
|
|
|
let(:only) { :all }
|
|
|
|
|
|
|
|
it 'deletes all refs from the target project' do
|
|
|
|
expect(merge_request.target_project.repository)
|
|
|
|
.to receive(:delete_refs)
|
|
|
|
.with(merge_request.ref_path, merge_request.merge_ref_path, merge_request.train_ref_path)
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when removing only train ref' do
|
|
|
|
let(:only) { :train }
|
|
|
|
|
|
|
|
it 'deletes train ref from the target project' do
|
|
|
|
expect(merge_request.target_project.repository)
|
|
|
|
.to receive(:delete_refs)
|
|
|
|
.with(merge_request.train_ref_path)
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
describe '.with_auto_merge_enabled' do
|
2019-12-21 20:55:43 +05:30
|
|
|
let!(:project) { create(:project) }
|
|
|
|
let!(:fork) { fork_project(project) }
|
|
|
|
let!(:merge_request1) do
|
|
|
|
create(:merge_request,
|
|
|
|
:merge_when_pipeline_succeeds,
|
|
|
|
target_project: project,
|
|
|
|
target_branch: 'master',
|
|
|
|
source_project: project,
|
|
|
|
source_branch: 'feature-1')
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:merge_request4) do
|
|
|
|
create(:merge_request,
|
|
|
|
target_project: project,
|
|
|
|
target_branch: 'master',
|
|
|
|
source_project: fork,
|
|
|
|
source_branch: 'fork-feature-2')
|
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
let(:query) { described_class.with_auto_merge_enabled }
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it { expect(query).to contain_exactly(merge_request1) }
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'versioned description'
|
2019-12-26 22:10:19 +05:30
|
|
|
|
|
|
|
describe '#commits' do
|
|
|
|
context 'persisted merge request' do
|
|
|
|
context 'with a limit' do
|
|
|
|
it 'returns a limited number of commits' do
|
2022-11-25 23:54:43 +05:30
|
|
|
expect(subject.commits(limit: 2).map(&:sha)).to eq(
|
|
|
|
%w[
|
|
|
|
b83d6e391c22777fca1ed3012fce84f633d7fed0
|
|
|
|
498214de67004b1da3d820901307bed2a68a8ef6
|
|
|
|
])
|
|
|
|
expect(subject.commits(limit: 3).map(&:sha)).to eq(
|
|
|
|
%w[
|
|
|
|
b83d6e391c22777fca1ed3012fce84f633d7fed0
|
|
|
|
498214de67004b1da3d820901307bed2a68a8ef6
|
|
|
|
1b12f15a11fc6e62177bef08f47bc7b5ce50b141
|
|
|
|
])
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without a limit' do
|
|
|
|
it 'returns all commits of the merge request diff' do
|
|
|
|
expect(subject.commits.size).to eq(29)
|
|
|
|
end
|
|
|
|
end
|
2023-01-13 00:05:48 +05:30
|
|
|
|
|
|
|
context 'with a page' do
|
|
|
|
it 'returns a limited number of commits for page' do
|
|
|
|
expect(subject.commits(limit: 1, page: 1).map(&:sha)).to eq(
|
|
|
|
%w[
|
|
|
|
b83d6e391c22777fca1ed3012fce84f633d7fed0
|
|
|
|
])
|
|
|
|
expect(subject.commits(limit: 1, page: 2).map(&:sha)).to eq(
|
|
|
|
%w[
|
|
|
|
498214de67004b1da3d820901307bed2a68a8ef6
|
|
|
|
])
|
|
|
|
end
|
|
|
|
end
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'new merge request' do
|
|
|
|
subject { build(:merge_request) }
|
|
|
|
|
|
|
|
context 'compare commits' do
|
|
|
|
let(:first_commit) { double }
|
|
|
|
let(:second_commit) { double }
|
|
|
|
|
|
|
|
before do
|
|
|
|
subject.compare_commits = [
|
|
|
|
first_commit, second_commit
|
|
|
|
]
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without a limit' do
|
|
|
|
it 'returns all the compare commits' do
|
|
|
|
expect(subject.commits.to_a).to eq([second_commit, first_commit])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with a limit' do
|
|
|
|
it 'returns a limited number of commits' do
|
|
|
|
expect(subject.commits(limit: 1).to_a).to eq([second_commit])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#recent_commits' do
|
|
|
|
before do
|
|
|
|
stub_const("#{MergeRequestDiff}::COMMITS_SAFE_SIZE", 2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the safe number of commits' do
|
2022-11-25 23:54:43 +05:30
|
|
|
expect(subject.recent_commits.map(&:sha)).to eq(
|
|
|
|
%w[
|
|
|
|
b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
|
|
|
|
])
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
|
|
|
|
describe '#recent_visible_deployments' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
it 'returns visible deployments' do
|
2020-05-24 23:13:21 +05:30
|
|
|
envs = create_list(:environment, 3, project: merge_request.target_project)
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
created = create(
|
|
|
|
:deployment,
|
|
|
|
:created,
|
|
|
|
project: merge_request.target_project,
|
2020-05-24 23:13:21 +05:30
|
|
|
environment: envs[0]
|
2020-01-01 13:55:28 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
success = create(
|
|
|
|
:deployment,
|
|
|
|
:success,
|
|
|
|
project: merge_request.target_project,
|
2020-05-24 23:13:21 +05:30
|
|
|
environment: envs[1]
|
2020-01-01 13:55:28 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
failed = create(
|
|
|
|
:deployment,
|
|
|
|
:failed,
|
|
|
|
project: merge_request.target_project,
|
2020-05-24 23:13:21 +05:30
|
|
|
environment: envs[2]
|
2020-01-01 13:55:28 +05:30
|
|
|
)
|
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
merge_request_relation = MergeRequest.where(id: merge_request.id)
|
|
|
|
created.link_merge_requests(merge_request_relation)
|
|
|
|
success.link_merge_requests(merge_request_relation)
|
|
|
|
failed.link_merge_requests(merge_request_relation)
|
2020-01-01 13:55:28 +05:30
|
|
|
|
|
|
|
expect(merge_request.recent_visible_deployments).to eq([failed, success])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'only returns a limited number of deployments' do
|
|
|
|
20.times do
|
2020-05-24 23:13:21 +05:30
|
|
|
environment = create(:environment, project: merge_request.target_project)
|
2020-01-01 13:55:28 +05:30
|
|
|
deploy = create(
|
|
|
|
:deployment,
|
|
|
|
:success,
|
|
|
|
project: merge_request.target_project,
|
|
|
|
environment: environment
|
|
|
|
)
|
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
deploy.link_merge_requests(MergeRequest.where(id: merge_request.id))
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
expect(merge_request.recent_visible_deployments.count).to eq(10)
|
|
|
|
end
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
|
|
|
|
describe '#diffable_merge_ref?' do
|
2021-03-11 19:13:27 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
context 'merge request can be merged' do
|
2021-03-11 19:13:27 +05:30
|
|
|
context 'merge_head diff is not created' do
|
2020-05-24 23:13:21 +05:30
|
|
|
it 'returns true' do
|
2021-03-11 19:13:27 +05:30
|
|
|
expect(merge_request.diffable_merge_ref?).to eq(false)
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-11 19:13:27 +05:30
|
|
|
context 'merge_head diff is created' do
|
2020-05-24 23:13:21 +05:30
|
|
|
before do
|
2021-03-11 19:13:27 +05:30
|
|
|
create(:merge_request_diff, :merge_head, merge_request: merge_request)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
it 'returns true' do
|
2021-03-11 19:13:27 +05:30
|
|
|
expect(merge_request.diffable_merge_ref?).to eq(true)
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
context 'merge request is merged' do
|
2021-03-11 19:13:27 +05:30
|
|
|
before do
|
|
|
|
merge_request.mark_as_merged!
|
|
|
|
end
|
2021-02-22 17:27:13 +05:30
|
|
|
|
|
|
|
it 'returns false' do
|
2021-03-11 19:13:27 +05:30
|
|
|
expect(merge_request.diffable_merge_ref?).to eq(false)
|
2021-02-22 17:27:13 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
context 'merge request cannot be merged' do
|
|
|
|
before do
|
2021-03-11 19:13:27 +05:30
|
|
|
merge_request.mark_as_unchecked!
|
2021-01-03 14:25:43 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
2023-01-13 00:05:48 +05:30
|
|
|
expect(merge_request.diffable_merge_ref?).to eq(false)
|
2021-01-03 14:25:43 +05:30
|
|
|
end
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-04-22 19:07:51 +05:30
|
|
|
|
|
|
|
describe '#predefined_variables' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
it 'caches all SQL-sourced data on the first call' do
|
|
|
|
control = ActiveRecord::QueryRecorder.new { merge_request.predefined_variables }.count
|
|
|
|
|
|
|
|
expect(control).to be > 0
|
|
|
|
|
|
|
|
count = ActiveRecord::QueryRecorder.new { merge_request.predefined_variables }.count
|
|
|
|
|
|
|
|
expect(count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
describe 'banzai_render_context' do
|
|
|
|
let(:project) { build(:project_empty_repo) }
|
|
|
|
let(:merge_request) { build :merge_request, target_project: project, source_project: project }
|
|
|
|
|
|
|
|
subject(:context) { merge_request.banzai_render_context(:title) }
|
|
|
|
|
|
|
|
it 'sets the label_url_method in the context' do
|
|
|
|
expect(context[:label_url_method]).to eq(:project_merge_requests_url)
|
|
|
|
end
|
|
|
|
end
|
2020-11-24 15:15:51 +05:30
|
|
|
|
|
|
|
describe '#head_pipeline_builds_with_coverage' do
|
|
|
|
it 'delegates to head_pipeline' do
|
|
|
|
expect(subject)
|
|
|
|
.to delegate_method(:builds_with_coverage)
|
|
|
|
.to(:head_pipeline)
|
|
|
|
.with_prefix
|
2022-08-13 15:12:31 +05:30
|
|
|
.allow_nil
|
2020-11-24 15:15:51 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#merge_ref_head' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
context 'when merge_ref_sha is not present' do
|
|
|
|
let!(:result) do
|
|
|
|
MergeRequests::MergeToRefService
|
2021-06-08 01:23:25 +05:30
|
|
|
.new(project: merge_request.project, current_user: merge_request.author)
|
2020-11-24 15:15:51 +05:30
|
|
|
.execute(merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the commit based on merge ref path' do
|
|
|
|
expect(merge_request.merge_ref_head.id).to eq(result[:commit_id])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merge_ref_sha is present' do
|
|
|
|
before do
|
|
|
|
merge_request.update!(merge_ref_sha: merge_request.project.repository.commit.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the commit based on cached merge_ref_sha' do
|
|
|
|
expect(merge_request.merge_ref_head.id).to eq(merge_request.merge_ref_sha)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2021-01-03 14:25:43 +05:30
|
|
|
|
|
|
|
describe '#allows_reviewers?' do
|
2021-03-11 19:13:27 +05:30
|
|
|
it 'returns true' do
|
2021-01-03 14:25:43 +05:30
|
|
|
merge_request = build_stubbed(:merge_request)
|
|
|
|
|
|
|
|
expect(merge_request.allows_reviewers?).to be(true)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#update_and_mark_in_progress_merge_commit_sha' do
|
|
|
|
let(:ref) { subject.target_project.repository.commit.id }
|
|
|
|
|
|
|
|
before do
|
|
|
|
expect(subject.target_project).to receive(:mark_primary_write_location)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates commit ID' do
|
|
|
|
expect { subject.update_and_mark_in_progress_merge_commit_sha(ref) }
|
|
|
|
.to change { subject.in_progress_merge_commit_sha }
|
|
|
|
.from(nil).to(ref)
|
|
|
|
end
|
|
|
|
end
|
2021-03-11 19:13:27 +05:30
|
|
|
|
|
|
|
describe '#enabled_reports' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
where(:report_type, :with_reports, :feature) do
|
|
|
|
:sast | :with_sast_reports | :sast
|
|
|
|
:secret_detection | :with_secret_detection_reports | :secret_detection
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
subject { merge_request.enabled_reports[report_type] }
|
|
|
|
|
|
|
|
before do
|
|
|
|
stub_licensed_features({ feature => true })
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when head pipeline has reports" do
|
|
|
|
let(:merge_request) { create(:merge_request, with_reports, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when head pipeline does not have reports" do
|
|
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2021-04-17 20:07:23 +05:30
|
|
|
|
|
|
|
describe '#includes_ci_config?' do
|
|
|
|
let(:merge_request) { build(:merge_request) }
|
|
|
|
let(:project) { merge_request.project }
|
|
|
|
|
|
|
|
subject(:result) { merge_request.includes_ci_config? }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(merge_request).to receive(:diff_stats).and_return(diff_stats)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when diff_stats is nil' do
|
|
|
|
let(:diff_stats) {}
|
|
|
|
|
|
|
|
it { is_expected.to eq(false) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when diff_stats does not include the ci config path of the project' do
|
|
|
|
let(:diff_stats) { [double(path: 'abc.txt')] }
|
|
|
|
|
|
|
|
it { is_expected.to eq(false) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when diff_stats includes the ci config path of the project' do
|
|
|
|
let(:diff_stats) { [double(path: '.gitlab-ci.yml')] }
|
|
|
|
|
|
|
|
it { is_expected.to eq(true) }
|
|
|
|
end
|
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
|
|
|
describe '.from_fork' do
|
|
|
|
let!(:project) { create(:project, :repository) }
|
|
|
|
let!(:forked_project) { fork_project(project) }
|
|
|
|
let!(:fork_mr) { create(:merge_request, source_project: forked_project, target_project: project) }
|
|
|
|
let!(:regular_mr) { create(:merge_request, source_project: project) }
|
|
|
|
|
|
|
|
it 'returns merge requests from forks only' do
|
|
|
|
expect(described_class.from_fork).to eq([fork_mr])
|
|
|
|
end
|
|
|
|
end
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
it_behaves_like 'it has loose foreign keys' do
|
|
|
|
let(:factory_name) { :merge_request }
|
|
|
|
end
|
2022-04-04 11:22:00 +05:30
|
|
|
|
|
|
|
context 'loose foreign key on merge_requests.head_pipeline_id' do
|
|
|
|
it_behaves_like 'cleanup by a loose foreign key' do
|
|
|
|
let!(:parent) { create(:ci_pipeline) }
|
|
|
|
let!(:model) { create(:merge_request, head_pipeline: parent) }
|
|
|
|
end
|
|
|
|
end
|
2022-05-07 20:08:51 +05:30
|
|
|
|
2022-08-27 11:52:29 +05:30
|
|
|
describe '#merge_blocked_by_other_mrs?' do
|
|
|
|
it 'returns false when there is no blocking merge requests' do
|
|
|
|
expect(subject.merge_blocked_by_other_mrs?).to be_falsy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
describe '#merge_request_reviewers_with' do
|
|
|
|
let_it_be(:reviewer1) { create(:user) }
|
|
|
|
let_it_be(:reviewer2) { create(:user) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
subject.update!(reviewers: [reviewer1, reviewer2])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns reviewers' do
|
|
|
|
reviewers = subject.merge_request_reviewers_with([reviewer1.id])
|
|
|
|
|
|
|
|
expect(reviewers).to match_array([subject.merge_request_reviewers[0]])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#merge_request_assignees_with' do
|
|
|
|
let_it_be(:assignee1) { create(:user) }
|
|
|
|
let_it_be(:assignee2) { create(:user) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
subject.update!(assignees: [assignee1, assignee2])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns assignees' do
|
|
|
|
assignees = subject.merge_request_assignees_with([assignee1.id])
|
|
|
|
|
|
|
|
expect(assignees).to match_array([subject.merge_request_assignees[0]])
|
|
|
|
end
|
|
|
|
end
|
2022-07-23 23:45:48 +05:30
|
|
|
|
|
|
|
describe '#recent_diff_head_shas' do
|
|
|
|
let_it_be(:merge_request_with_diffs) do
|
|
|
|
params = {
|
|
|
|
target_project: project,
|
|
|
|
source_project: project,
|
|
|
|
target_branch: 'master',
|
|
|
|
source_branch: 'feature'
|
|
|
|
}
|
|
|
|
|
|
|
|
create(:merge_request, params).tap do |mr|
|
|
|
|
4.times { mr.merge_request_diffs.create! }
|
|
|
|
mr.create_merge_head_diff
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:shas) do
|
|
|
|
# re-find to avoid caching the association
|
|
|
|
described_class.find(merge_request_with_diffs.id).merge_request_diffs.order(id: :desc).pluck(:head_commit_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'correctly sorted and limited diff_head_shas' do
|
|
|
|
it 'has up to MAX_RECENT_DIFF_HEAD_SHAS, ordered most recent first' do
|
|
|
|
stub_const('MergeRequest::MAX_RECENT_DIFF_HEAD_SHAS', 3)
|
|
|
|
|
|
|
|
expect(subject.recent_diff_head_shas).to eq(shas.first(3))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'supports limits' do
|
|
|
|
expect(subject.recent_diff_head_shas(2)).to eq(shas.first(2))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the association is not loaded' do
|
|
|
|
subject(:mr) { merge_request_with_diffs }
|
|
|
|
|
|
|
|
include_examples 'correctly sorted and limited diff_head_shas'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the association is loaded' do
|
|
|
|
subject(:mr) do
|
|
|
|
described_class.where(id: merge_request_with_diffs.id).preload(:merge_request_diffs).first
|
|
|
|
end
|
|
|
|
|
|
|
|
include_examples 'correctly sorted and limited diff_head_shas'
|
|
|
|
|
|
|
|
it 'does not issue any queries' do
|
|
|
|
expect(subject).to be_a(described_class) # preload here
|
|
|
|
|
|
|
|
expect { subject.recent_diff_head_shas }.not_to exceed_query_limit(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#target_default_branch?' do
|
|
|
|
let_it_be(:merge_request) { build(:merge_request, project: project) }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(merge_request.target_default_branch?).to be false
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with target_branch equal project default branch' do
|
|
|
|
before do
|
|
|
|
merge_request.target_branch = "master"
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(merge_request.target_default_branch?).to be true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2022-11-25 23:54:43 +05:30
|
|
|
|
|
|
|
describe '#can_suggest_reviewers?' do
|
|
|
|
let_it_be(:merge_request) { build(:merge_request, :opened, project: project) }
|
|
|
|
|
|
|
|
subject(:can_suggest_reviewers) { merge_request.can_suggest_reviewers? }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(can_suggest_reviewers).to be(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#suggested_reviewer_users' do
|
|
|
|
let_it_be(:merge_request) { build(:merge_request, project: project) }
|
|
|
|
|
|
|
|
subject(:suggested_reviewer_users) { merge_request.suggested_reviewer_users }
|
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
end
|
2014-09-02 18:07:02 +05:30
|
|
|
end
|