2019-07-07 11:18:12 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
require 'spec_helper'
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe Ci::Pipeline, :mailer do
|
2019-07-07 11:18:12 +05:30
|
|
|
include ProjectForksHelper
|
2019-09-04 21:01:54 +05:30
|
|
|
include StubRequests
|
2019-07-07 11:18:12 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:user) { create(:user) }
|
2020-03-13 15:44:24 +05:30
|
|
|
let_it_be(:project) { create(:project) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_empty_pipeline, status: :created, project: project)
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it_behaves_like 'having unique enum values'
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
it { is_expected.to belong_to(:project) }
|
2016-08-24 12:49:21 +05:30
|
|
|
it { is_expected.to belong_to(:user) }
|
2017-08-17 22:00:37 +05:30
|
|
|
it { is_expected.to belong_to(:auto_canceled_by) }
|
|
|
|
it { is_expected.to belong_to(:pipeline_schedule) }
|
2019-02-15 15:39:39 +05:30
|
|
|
it { is_expected.to belong_to(:merge_request) }
|
2019-12-04 20:38:33 +05:30
|
|
|
it { is_expected.to belong_to(:external_pull_request) }
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
it { is_expected.to have_many(:statuses) }
|
|
|
|
it { is_expected.to have_many(:trigger_requests) }
|
2017-09-10 17:25:29 +05:30
|
|
|
it { is_expected.to have_many(:variables) }
|
2016-06-16 23:09:34 +05:30
|
|
|
it { is_expected.to have_many(:builds) }
|
2017-08-17 22:00:37 +05:30
|
|
|
it { is_expected.to have_many(:auto_canceled_pipelines) }
|
|
|
|
it { is_expected.to have_many(:auto_canceled_jobs) }
|
2019-12-21 20:55:43 +05:30
|
|
|
it { is_expected.to have_many(:sourced_pipelines) }
|
|
|
|
it { is_expected.to have_many(:triggered_pipelines) }
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it { is_expected.to have_one(:chat_data) }
|
2019-12-21 20:55:43 +05:30
|
|
|
it { is_expected.to have_one(:source_pipeline) }
|
|
|
|
it { is_expected.to have_one(:triggered_by_pipeline) }
|
|
|
|
it { is_expected.to have_one(:source_job) }
|
2020-03-13 15:44:24 +05:30
|
|
|
it { is_expected.to have_one(:pipeline_config) }
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it { is_expected.to validate_presence_of(:sha) }
|
|
|
|
it { is_expected.to validate_presence_of(:status) }
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
it { is_expected.to respond_to :git_author_name }
|
|
|
|
it { is_expected.to respond_to :git_author_email }
|
|
|
|
it { is_expected.to respond_to :short_sha }
|
2018-03-17 18:26:18 +05:30
|
|
|
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
|
|
|
|
|
|
|
|
describe 'associations' do
|
|
|
|
it 'has a bidirectional relationship with projects' do
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:all_pipelines)
|
|
|
|
expect(Project.reflect_on_association(:all_pipelines).has_inverse?).to eq(:project)
|
|
|
|
expect(Project.reflect_on_association(:ci_pipelines).has_inverse?).to eq(:project)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
describe '.processables' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, name: 'build', pipeline: pipeline)
|
|
|
|
create(:ci_bridge, name: 'bridge', pipeline: pipeline)
|
|
|
|
create(:commit_status, name: 'commit status', pipeline: pipeline)
|
|
|
|
create(:generic_commit_status, name: 'generic status', pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'has an association with processable CI/CD entities' do
|
|
|
|
pipeline.processables.pluck('name').yield_self do |processables|
|
|
|
|
expect(processables).to match_array %w[build bridge]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'makes it possible to append a new processable' do
|
|
|
|
pipeline.processables << build(:ci_bridge)
|
|
|
|
|
|
|
|
pipeline.save!
|
|
|
|
|
|
|
|
expect(pipeline.processables.reload.count).to eq 3
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '.for_sha' do
|
|
|
|
subject { described_class.for_sha(sha) }
|
|
|
|
|
|
|
|
let(:sha) { 'abc' }
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, sha: 'abc') }
|
|
|
|
|
|
|
|
it 'returns the pipeline' do
|
|
|
|
is_expected.to contain_exactly(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when argument is array' do
|
|
|
|
let(:sha) { %w[abc def] }
|
|
|
|
let!(:pipeline_2) { create(:ci_pipeline, sha: 'def') }
|
|
|
|
|
|
|
|
it 'returns the pipelines' do
|
|
|
|
is_expected.to contain_exactly(pipeline, pipeline_2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when sha is empty' do
|
|
|
|
let(:sha) { nil }
|
|
|
|
|
|
|
|
it 'does not return anything' do
|
|
|
|
is_expected.to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.for_source_sha' do
|
|
|
|
subject { described_class.for_source_sha(source_sha) }
|
|
|
|
|
|
|
|
let(:source_sha) { 'abc' }
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, source_sha: 'abc') }
|
|
|
|
|
|
|
|
it 'returns the pipeline' do
|
|
|
|
is_expected.to contain_exactly(pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when argument is array' do
|
|
|
|
let(:source_sha) { %w[abc def] }
|
|
|
|
let!(:pipeline_2) { create(:ci_pipeline, source_sha: 'def') }
|
|
|
|
|
|
|
|
it 'returns the pipelines' do
|
|
|
|
is_expected.to contain_exactly(pipeline, pipeline_2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source_sha is empty' do
|
|
|
|
let(:source_sha) { nil }
|
|
|
|
|
|
|
|
it 'does not return anything' do
|
|
|
|
is_expected.to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.for_sha_or_source_sha' do
|
|
|
|
subject { described_class.for_sha_or_source_sha(sha) }
|
|
|
|
|
|
|
|
let(:sha) { 'abc' }
|
|
|
|
|
|
|
|
context 'when sha is matched' do
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, sha: sha) }
|
|
|
|
|
|
|
|
it 'returns the pipeline' do
|
|
|
|
is_expected.to contain_exactly(pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source sha is matched' do
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, source_sha: sha) }
|
|
|
|
|
|
|
|
it 'returns the pipeline' do
|
|
|
|
is_expected.to contain_exactly(pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when both sha and source sha are not matched' do
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, sha: 'bcd', source_sha: 'bcd') }
|
|
|
|
|
|
|
|
it 'does not return anything' do
|
|
|
|
is_expected.to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe '#merge_request?' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, merge_request: merge_request) }
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(pipeline).to be_merge_request
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when merge request is nil' do
|
|
|
|
let(:merge_request) { nil }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(pipeline).not_to be_merge_request
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '#detached_merge_request_pipeline?' do
|
|
|
|
subject { pipeline.detached_merge_request_pipeline? }
|
|
|
|
|
|
|
|
let!(:pipeline) do
|
|
|
|
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
let(:target_sha) { nil }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
|
|
|
|
context 'when target sha exists' do
|
|
|
|
let(:target_sha) { merge_request.target_branch_sha }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#merge_request_pipeline?' do
|
|
|
|
subject { pipeline.merge_request_pipeline? }
|
|
|
|
|
|
|
|
let!(:pipeline) do
|
|
|
|
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
let(:target_sha) { merge_request.target_branch_sha }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
|
|
|
|
context 'when target sha is empty' do
|
|
|
|
let(:target_sha) { nil }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#merge_request_ref?' do
|
|
|
|
subject { pipeline.merge_request_ref? }
|
|
|
|
|
|
|
|
it 'calls MergeRequest#merge_request_ref?' do
|
|
|
|
expect(MergeRequest).to receive(:merge_request_ref?).with(pipeline.ref)
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
describe '#merge_request_event_type' do
|
|
|
|
subject { pipeline.merge_request_event_type }
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
let(:pipeline) { merge_request.all_pipelines.last }
|
2019-12-04 20:38:33 +05:30
|
|
|
|
|
|
|
context 'when pipeline is merge request pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
|
2019-12-04 20:38:33 +05:30
|
|
|
|
|
|
|
it { is_expected.to eq(:merged_result) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is detached merge request pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
|
2019-12-04 20:38:33 +05:30
|
|
|
|
|
|
|
it { is_expected.to eq(:detached) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '#legacy_detached_merge_request_pipeline?' do
|
|
|
|
subject { pipeline.legacy_detached_merge_request_pipeline? }
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
let_it_be(:merge_request) { create(:merge_request) }
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:ref) { 'feature' }
|
|
|
|
let(:target_sha) { nil }
|
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
build(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, ref: ref, target_sha: target_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
|
|
|
|
context 'when pipeline ref is a merge request ref' do
|
|
|
|
let(:ref) { 'refs/merge-requests/1/head' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when target sha is set' do
|
|
|
|
let(:target_sha) { 'target-sha' }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#matches_sha_or_source_sha?' do
|
|
|
|
subject { pipeline.matches_sha_or_source_sha?(sample_sha) }
|
|
|
|
|
|
|
|
let(:sample_sha) { Digest::SHA1.hexdigest(SecureRandom.hex) }
|
|
|
|
|
|
|
|
context 'when sha matches' do
|
|
|
|
let(:pipeline) { build(:ci_pipeline, sha: sample_sha) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source_sha matches' do
|
|
|
|
let(:pipeline) { build(:ci_pipeline, source_sha: sample_sha) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when both sha and source_sha do not matche' do
|
|
|
|
let(:pipeline) { build(:ci_pipeline, sha: 'test', source_sha: 'test') }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#source_ref' do
|
|
|
|
subject { pipeline.source_ref }
|
|
|
|
|
|
|
|
let(:pipeline) { create(:ci_pipeline, ref: 'feature') }
|
|
|
|
|
|
|
|
it 'returns source ref' do
|
|
|
|
is_expected.to eq('feature')
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the pipeline is a detached merge request pipeline' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, ref: merge_request.ref_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns source ref' do
|
|
|
|
is_expected.to eq(merge_request.source_branch)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#source_ref_slug' do
|
|
|
|
subject { pipeline.source_ref_slug }
|
|
|
|
|
|
|
|
let(:pipeline) { create(:ci_pipeline, ref: 'feature') }
|
|
|
|
|
|
|
|
it 'slugifies with the source ref' do
|
|
|
|
expect(Gitlab::Utils).to receive(:slugify).with('feature')
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the pipeline is a detached merge request pipeline' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, ref: merge_request.ref_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'slugifies with the source ref of the merge request' do
|
|
|
|
expect(Gitlab::Utils).to receive(:slugify).with(merge_request.source_branch)
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.with_reports' do
|
|
|
|
context 'when pipeline has a test report' do
|
2020-04-08 14:13:33 +05:30
|
|
|
subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
let!(:pipeline_with_report) { create(:ci_pipeline, :with_test_reports) }
|
|
|
|
|
|
|
|
it 'selects the pipeline' do
|
|
|
|
is_expected.to eq([pipeline_with_report])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
context 'when pipeline has a coverage report' do
|
|
|
|
subject { described_class.with_reports(Ci::JobArtifact.coverage_reports) }
|
|
|
|
|
|
|
|
let!(:pipeline_with_report) { create(:ci_pipeline, :with_coverage_reports) }
|
|
|
|
|
|
|
|
it 'selects the pipeline' do
|
|
|
|
is_expected.to eq([pipeline_with_report])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
context 'when pipeline does not have metrics reports' do
|
2020-04-08 14:13:33 +05:30
|
|
|
subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
let!(:pipeline_without_report) { create(:ci_empty_pipeline) }
|
|
|
|
|
|
|
|
it 'does not select the pipeline' do
|
|
|
|
is_expected.to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.merge_request_event' do
|
|
|
|
subject { described_class.merge_request_event }
|
2019-02-15 15:39:39 +05:30
|
|
|
|
|
|
|
context 'when there is a merge request pipeline' do
|
2019-07-07 11:18:12 +05:30
|
|
|
let!(:pipeline) { create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request) }
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
it 'returns merge request pipeline first' do
|
|
|
|
expect(subject).to eq([pipeline])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no merge request pipelines' do
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, source: :push) }
|
|
|
|
|
|
|
|
it 'returns empty array' do
|
|
|
|
expect(subject).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe 'modules' do
|
|
|
|
it_behaves_like 'AtomicInternalId', validate_presence: false do
|
|
|
|
let(:internal_id_attribute) { :iid }
|
|
|
|
let(:instance) { build(:ci_pipeline) }
|
|
|
|
let(:scope) { :project }
|
|
|
|
let(:scope_attrs) { { project: instance.project } }
|
|
|
|
let(:usage) { :ci_pipelines }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#source' do
|
|
|
|
context 'when creating new pipeline' do
|
|
|
|
let(:pipeline) do
|
|
|
|
build(:ci_empty_pipeline, status: :created, project: project, source: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "prevents from creating an object" do
|
|
|
|
expect(pipeline).not_to be_valid
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when updating existing pipeline' do
|
|
|
|
before do
|
|
|
|
pipeline.update_attribute(:source, nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "object is valid" do
|
|
|
|
expect(pipeline).to be_valid
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#block' do
|
|
|
|
it 'changes pipeline status to manual' do
|
|
|
|
expect(pipeline.block).to be true
|
|
|
|
expect(pipeline.reload).to be_manual
|
|
|
|
expect(pipeline.reload).to be_blocked
|
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
describe '#delay' do
|
|
|
|
subject { pipeline.delay }
|
|
|
|
|
|
|
|
let(:pipeline) { build(:ci_pipeline, status: :created) }
|
|
|
|
|
|
|
|
it 'changes pipeline status to schedule' do
|
|
|
|
subject
|
|
|
|
|
|
|
|
expect(pipeline).to be_scheduled
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#valid_commit_sha' do
|
2016-06-16 23:09:34 +05:30
|
|
|
context 'commit.sha can not start with 00000000' do
|
|
|
|
before do
|
|
|
|
pipeline.sha = '0' * 40
|
|
|
|
pipeline.valid_commit_sha
|
|
|
|
end
|
|
|
|
|
|
|
|
it('commit errors should not be empty') { expect(pipeline.errors).not_to be_empty }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#short_sha' do
|
2016-06-16 23:09:34 +05:30
|
|
|
subject { pipeline.short_sha }
|
|
|
|
|
|
|
|
it 'has 8 items' do
|
|
|
|
expect(subject.size).to eq(8)
|
|
|
|
end
|
|
|
|
it { expect(pipeline.sha).to start_with(subject) }
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#retried' do
|
2016-06-16 23:09:34 +05:30
|
|
|
subject { pipeline.retried }
|
|
|
|
|
|
|
|
before do
|
2017-08-17 22:00:37 +05:30
|
|
|
@build1 = create(:ci_build, pipeline: pipeline, name: 'deploy', retried: true)
|
|
|
|
@build2 = create(:ci_build, pipeline: pipeline, name: 'deploy')
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns old builds' do
|
|
|
|
is_expected.to contain_exactly(@build1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "coverage" do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:project) { create(:project, build_coverage_regex: "/.*/") }
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
it "calculates average when there are two builds with coverage" do
|
2017-08-17 22:00:37 +05:30
|
|
|
create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline)
|
|
|
|
create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline)
|
2016-06-16 23:09:34 +05:30
|
|
|
expect(pipeline.coverage).to eq("35.00")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "calculates average when there are two builds with coverage and one with nil" do
|
2017-08-17 22:00:37 +05:30
|
|
|
create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline)
|
|
|
|
create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline)
|
|
|
|
create(:ci_build, pipeline: pipeline)
|
2016-06-16 23:09:34 +05:30
|
|
|
expect(pipeline.coverage).to eq("35.00")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "calculates average when there are two builds with coverage and one is retried" do
|
2017-08-17 22:00:37 +05:30
|
|
|
create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline)
|
|
|
|
create(:ci_build, name: "rubocop", coverage: 30, pipeline: pipeline, retried: true)
|
|
|
|
create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline)
|
2016-06-16 23:09:34 +05:30
|
|
|
expect(pipeline.coverage).to eq("35.00")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "calculates average when there is one build without coverage" do
|
2018-03-17 18:26:18 +05:30
|
|
|
FactoryBot.create(:ci_build, pipeline: pipeline)
|
2016-06-16 23:09:34 +05:30
|
|
|
expect(pipeline.coverage).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#retryable?' do
|
|
|
|
subject { pipeline.retryable? }
|
|
|
|
|
|
|
|
context 'no failed builds' do
|
|
|
|
before do
|
2016-11-03 12:29:30 +05:30
|
|
|
create_build('rspec', 'success')
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
it 'is not retryable' do
|
2016-06-16 23:09:34 +05:30
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
context 'one canceled job' do
|
|
|
|
before do
|
|
|
|
create_build('rubocop', 'canceled')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is retryable' do
|
|
|
|
is_expected.to be_truthy
|
|
|
|
end
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with failed builds' do
|
|
|
|
before do
|
2016-11-03 12:29:30 +05:30
|
|
|
create_build('rspec', 'running')
|
|
|
|
create_build('rubocop', 'failed')
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
it 'is retryable' do
|
2016-06-16 23:09:34 +05:30
|
|
|
is_expected.to be_truthy
|
|
|
|
end
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
def create_build(name, status)
|
|
|
|
create(:ci_build, name: name, status: status, pipeline: pipeline)
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#persisted_variables' do
|
|
|
|
context 'when pipeline is not persisted yet' do
|
|
|
|
subject { build(:ci_pipeline).persisted_variables }
|
|
|
|
|
|
|
|
it 'does not contain some variables' do
|
|
|
|
keys = subject.map { |variable| variable[:key] }
|
|
|
|
|
|
|
|
expect(keys).not_to include 'CI_PIPELINE_ID'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is persisted' do
|
|
|
|
subject { build_stubbed(:ci_pipeline).persisted_variables }
|
|
|
|
|
|
|
|
it 'does contains persisted variables' do
|
|
|
|
keys = subject.map { |variable| variable[:key] }
|
|
|
|
|
|
|
|
expect(keys).to eq %w[CI_PIPELINE_ID CI_PIPELINE_URL]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#predefined_variables' do
|
|
|
|
subject { pipeline.predefined_variables }
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
it 'includes all predefined variables in a valid order' do
|
|
|
|
keys = subject.map { |variable| variable[:key] }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(keys).to eq %w[
|
|
|
|
CI_PIPELINE_IID
|
|
|
|
CI_PIPELINE_SOURCE
|
|
|
|
CI_CONFIG_PATH
|
|
|
|
CI_COMMIT_SHA
|
|
|
|
CI_COMMIT_SHORT_SHA
|
|
|
|
CI_COMMIT_BEFORE_SHA
|
|
|
|
CI_COMMIT_REF_NAME
|
|
|
|
CI_COMMIT_REF_SLUG
|
|
|
|
CI_COMMIT_BRANCH
|
|
|
|
CI_COMMIT_MESSAGE
|
|
|
|
CI_COMMIT_TITLE
|
|
|
|
CI_COMMIT_DESCRIPTION
|
|
|
|
CI_COMMIT_REF_PROTECTED
|
|
|
|
CI_BUILD_REF
|
|
|
|
CI_BUILD_BEFORE_SHA
|
|
|
|
CI_BUILD_REF_NAME
|
|
|
|
CI_BUILD_REF_SLUG
|
|
|
|
]
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
context 'when pipeline is merge request' do
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:pipeline) do
|
2020-03-13 15:44:24 +05:30
|
|
|
create(:ci_pipeline, merge_request: merge_request)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request,
|
|
|
|
source_project: project,
|
|
|
|
source_branch: 'feature',
|
|
|
|
target_project: project,
|
2019-07-07 11:18:12 +05:30
|
|
|
target_branch: 'master',
|
2019-07-31 22:56:46 +05:30
|
|
|
assignees: assignees,
|
2019-07-07 11:18:12 +05:30
|
|
|
milestone: milestone,
|
|
|
|
labels: labels)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
let(:assignees) { create_list(:user, 2) }
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:milestone) { create(:milestone, project: project) }
|
|
|
|
let(:labels) { create_list(:label, 2) }
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'exposes merge request pipeline variables' do
|
|
|
|
expect(subject.to_hash)
|
|
|
|
.to include(
|
|
|
|
'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
|
|
|
|
'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
|
|
|
|
'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
|
|
|
|
'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
|
|
|
|
'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
|
|
|
|
'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
|
|
|
|
'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
|
2019-07-07 11:18:12 +05:30
|
|
|
'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => pipeline.target_sha.to_s,
|
2019-02-15 15:39:39 +05:30
|
|
|
'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
|
|
|
|
'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
|
|
|
|
'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
|
2019-07-07 11:18:12 +05:30
|
|
|
'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
|
|
|
|
'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => pipeline.source_sha.to_s,
|
|
|
|
'CI_MERGE_REQUEST_TITLE' => merge_request.title,
|
2019-07-31 22:56:46 +05:30
|
|
|
'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
|
2019-07-07 11:18:12 +05:30
|
|
|
'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
|
2020-03-13 15:44:24 +05:30
|
|
|
'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
|
2019-12-04 20:38:33 +05:30
|
|
|
'CI_MERGE_REQUEST_EVENT_TYPE' => pipeline.merge_request_event_type.to_s)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source project does not exist' do
|
|
|
|
before do
|
|
|
|
merge_request.update_column(:source_project_id, nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not expose source project related variables' do
|
|
|
|
expect(subject.to_hash.keys).not_to include(
|
|
|
|
%w[CI_MERGE_REQUEST_SOURCE_PROJECT_ID
|
|
|
|
CI_MERGE_REQUEST_SOURCE_PROJECT_PATH
|
|
|
|
CI_MERGE_REQUEST_SOURCE_PROJECT_URL
|
|
|
|
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME])
|
|
|
|
end
|
|
|
|
end
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
context 'without assignee' do
|
2019-07-31 22:56:46 +05:30
|
|
|
let(:assignees) { [] }
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
it 'does not expose assignee variable' do
|
|
|
|
expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_ASSIGNEES')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without milestone' do
|
|
|
|
let(:milestone) { nil }
|
|
|
|
|
|
|
|
it 'does not expose milestone variable' do
|
|
|
|
expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_MILESTONE')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without labels' do
|
|
|
|
let(:labels) { [] }
|
|
|
|
|
|
|
|
it 'does not expose labels variable' do
|
|
|
|
expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_LABELS')
|
|
|
|
end
|
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
2019-12-04 20:38:33 +05:30
|
|
|
|
|
|
|
context 'when source is external pull request' do
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline, source: :external_pull_request_event, external_pull_request: pull_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:pull_request) { create(:external_pull_request, project: project) }
|
|
|
|
|
|
|
|
it 'exposes external pull request pipeline variables' do
|
|
|
|
expect(subject.to_hash)
|
|
|
|
.to include(
|
|
|
|
'CI_EXTERNAL_PULL_REQUEST_IID' => pull_request.pull_request_iid.to_s,
|
|
|
|
'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA' => pull_request.source_sha,
|
|
|
|
'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA' => pull_request.target_sha,
|
|
|
|
'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME' => pull_request.source_branch,
|
|
|
|
'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_NAME' => pull_request.target_branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#protected_ref?' do
|
2019-01-03 12:48:30 +05:30
|
|
|
before do
|
|
|
|
pipeline.project = create(:project, :repository)
|
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
it 'delegates method to project' do
|
|
|
|
expect(pipeline).not_to be_protected_ref
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#legacy_trigger' do
|
|
|
|
let(:trigger_request) { create(:ci_trigger_request) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
before do
|
|
|
|
pipeline.trigger_requests << trigger_request
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns first trigger request' do
|
|
|
|
expect(pipeline.legacy_trigger).to eq trigger_request
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#auto_canceled?' do
|
|
|
|
subject { pipeline.auto_canceled? }
|
|
|
|
|
|
|
|
context 'when it is canceled' do
|
|
|
|
before do
|
|
|
|
pipeline.cancel
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is auto_canceled_by' do
|
|
|
|
before do
|
|
|
|
pipeline.update(auto_canceled_by: create(:ci_empty_pipeline))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is auto canceled' do
|
|
|
|
is_expected.to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is no auto_canceled_by' do
|
|
|
|
it 'is not auto canceled' do
|
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when it is retried and canceled manually' do
|
|
|
|
before do
|
|
|
|
pipeline.enqueue
|
|
|
|
pipeline.cancel
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not auto canceled' do
|
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe 'pipeline stages' do
|
2018-05-09 12:01:36 +05:30
|
|
|
describe 'legacy stages' do
|
2017-08-17 22:00:37 +05:30
|
|
|
before do
|
2018-05-09 12:01:36 +05:30
|
|
|
create(:commit_status, pipeline: pipeline,
|
|
|
|
stage: 'build',
|
|
|
|
name: 'linux',
|
|
|
|
stage_idx: 0,
|
|
|
|
status: 'success')
|
|
|
|
|
|
|
|
create(:commit_status, pipeline: pipeline,
|
|
|
|
stage: 'build',
|
|
|
|
name: 'mac',
|
|
|
|
stage_idx: 0,
|
|
|
|
status: 'failed')
|
|
|
|
|
|
|
|
create(:commit_status, pipeline: pipeline,
|
|
|
|
stage: 'deploy',
|
|
|
|
name: 'staging',
|
|
|
|
stage_idx: 2,
|
|
|
|
status: 'running')
|
|
|
|
|
|
|
|
create(:commit_status, pipeline: pipeline,
|
|
|
|
stage: 'test',
|
|
|
|
name: 'rspec',
|
|
|
|
stage_idx: 1,
|
|
|
|
status: 'success')
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#legacy_stages' do
|
2019-12-21 20:55:43 +05:30
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
subject { pipeline.legacy_stages }
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
where(:ci_composite_status) do
|
|
|
|
[false, true]
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
with_them do
|
|
|
|
before do
|
|
|
|
stub_feature_flags(ci_composite_status: ci_composite_status)
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
context 'stages list' do
|
|
|
|
it 'returns ordered list of stages' do
|
|
|
|
expect(subject.map(&:name)).to eq(%w[build test deploy])
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
context 'stages with statuses' do
|
|
|
|
let(:statuses) do
|
|
|
|
subject.map { |stage| [stage.name, stage.status] }
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'returns list of stages with correct statuses' do
|
|
|
|
expect(statuses).to eq([%w(build failed),
|
2018-05-09 12:01:36 +05:30
|
|
|
%w(test success),
|
|
|
|
%w(deploy running)])
|
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
context 'when commit status is retried' do
|
|
|
|
before do
|
|
|
|
create(:commit_status, pipeline: pipeline,
|
|
|
|
stage: 'build',
|
|
|
|
name: 'mac',
|
|
|
|
stage_idx: 0,
|
|
|
|
status: 'success')
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
Ci::ProcessPipelineService
|
|
|
|
.new(pipeline)
|
|
|
|
.execute
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'ignores the previous state' do
|
|
|
|
expect(statuses).to eq([%w(build success),
|
|
|
|
%w(test success),
|
|
|
|
%w(deploy running)])
|
|
|
|
end
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
context 'when there is a stage with warnings' do
|
|
|
|
before do
|
|
|
|
create(:commit_status, pipeline: pipeline,
|
|
|
|
stage: 'deploy',
|
|
|
|
name: 'prod:2',
|
|
|
|
stage_idx: 2,
|
|
|
|
status: 'failed',
|
|
|
|
allow_failure: true)
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'populates stage with correct number of warnings' do
|
|
|
|
deploy_stage = pipeline.legacy_stages.third
|
|
|
|
|
|
|
|
expect(deploy_stage).not_to receive(:statuses)
|
|
|
|
expect(deploy_stage).to have_warnings
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#stages_count' do
|
|
|
|
it 'returns a valid number of stages' do
|
|
|
|
expect(pipeline.stages_count).to eq(3)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
describe '#stages_names' do
|
|
|
|
it 'returns a valid names of stages' do
|
|
|
|
expect(pipeline.stages_names).to eq(%w(build test deploy))
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
describe '#legacy_stage' do
|
|
|
|
subject { pipeline.legacy_stage('test') }
|
|
|
|
|
|
|
|
context 'with status in stage' do
|
|
|
|
before do
|
|
|
|
create(:commit_status, pipeline: pipeline, stage: 'test')
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(subject).to be_a Ci::LegacyStage }
|
|
|
|
it { expect(subject.name).to eq 'test' }
|
|
|
|
it { expect(subject.statuses).not_to be_empty }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
context 'without status in stage' do
|
|
|
|
before do
|
|
|
|
create(:commit_status, pipeline: pipeline, stage: 'build')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'return stage object' do
|
|
|
|
is_expected.to be_nil
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
describe '#stages' do
|
|
|
|
before do
|
|
|
|
create(:ci_stage_entity, project: project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
name: 'build')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns persisted stages' do
|
|
|
|
expect(pipeline.stages).not_to be_empty
|
|
|
|
expect(pipeline.stages).to all(be_persisted)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#ordered_stages' do
|
|
|
|
before do
|
|
|
|
create(:ci_stage_entity, project: project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
position: 4,
|
|
|
|
name: 'deploy')
|
|
|
|
|
|
|
|
create(:ci_build, project: project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
stage: 'test',
|
|
|
|
stage_idx: 3,
|
|
|
|
name: 'test')
|
|
|
|
|
|
|
|
create(:ci_build, project: project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
stage: 'build',
|
|
|
|
stage_idx: 2,
|
|
|
|
name: 'build')
|
|
|
|
|
|
|
|
create(:ci_stage_entity, project: project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
position: 1,
|
|
|
|
name: 'sanity')
|
|
|
|
|
|
|
|
create(:ci_stage_entity, project: project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
position: 5,
|
|
|
|
name: 'cleanup')
|
|
|
|
end
|
|
|
|
|
|
|
|
subject { pipeline.ordered_stages }
|
|
|
|
|
|
|
|
context 'when using legacy stages' do
|
|
|
|
before do
|
|
|
|
stub_feature_flags(ci_pipeline_persisted_stages: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns legacy stages in valid order' do
|
|
|
|
expect(subject.map(&:name)).to eq %w[build test]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using persisted stages' do
|
|
|
|
before do
|
|
|
|
stub_feature_flags(ci_pipeline_persisted_stages: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipelines is not complete' do
|
|
|
|
it 'still returns legacy stages' do
|
|
|
|
expect(subject).to all(be_a Ci::LegacyStage)
|
|
|
|
expect(subject.map(&:name)).to eq %w[build test]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is complete' do
|
|
|
|
before do
|
|
|
|
pipeline.succeed!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns stages in valid order' do
|
|
|
|
expect(subject).to all(be_a Ci::Stage)
|
|
|
|
expect(subject.map(&:name))
|
|
|
|
.to eq %w[sanity build test deploy cleanup]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
describe 'state machine' do
|
|
|
|
let(:current) { Time.now.change(usec: 0) }
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:build) { create_build('build1', queued_at: 0) }
|
|
|
|
let(:build_b) { create_build('build2', queued_at: 0) }
|
|
|
|
let(:build_c) { create_build('build3', queued_at: 0) }
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
%w[succeed! drop! cancel! skip!].each do |action|
|
|
|
|
context "when the pipeline recieved #{action} event" do
|
|
|
|
it 'deletes a persistent ref' do
|
|
|
|
expect(pipeline.persistent_ref).to receive(:delete).once
|
|
|
|
|
|
|
|
pipeline.public_send(action)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe '#duration', :sidekiq_inline do
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when multiple builds are finished' do
|
|
|
|
before do
|
|
|
|
travel_to(current + 30) do
|
|
|
|
build.run!
|
2020-03-13 15:44:24 +05:30
|
|
|
build.reload.success!
|
2017-08-17 22:00:37 +05:30
|
|
|
build_b.run!
|
|
|
|
build_c.run!
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
travel_to(current + 40) do
|
2020-03-13 15:44:24 +05:30
|
|
|
build_b.reload.drop!
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
travel_to(current + 70) do
|
2020-03-13 15:44:24 +05:30
|
|
|
build_c.reload.success!
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'matches sum of builds duration' do
|
|
|
|
pipeline.reload
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(pipeline.duration).to eq(40)
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when pipeline becomes blocked' do
|
|
|
|
let!(:build) { create_build('build:1') }
|
|
|
|
let!(:action) { create_build('manual:action', :manual) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
travel_to(current + 1.minute) do
|
|
|
|
build.run!
|
|
|
|
end
|
|
|
|
|
|
|
|
travel_to(current + 5.minutes) do
|
2020-03-13 15:44:24 +05:30
|
|
|
build.reload.success!
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'recalculates pipeline duration' do
|
|
|
|
pipeline.reload
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(pipeline).to be_manual
|
|
|
|
expect(pipeline.duration).to eq 4.minutes
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
describe '#started_at' do
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, status: from_status) }
|
|
|
|
|
|
|
|
%i[created preparing pending].each do |status|
|
|
|
|
context "from #{status}" do
|
|
|
|
let(:from_status) { status }
|
|
|
|
|
|
|
|
it 'updates on transitioning to running' do
|
|
|
|
pipeline.run
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(pipeline.started_at).not_to be_nil
|
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
context 'from created' do
|
|
|
|
let(:from_status) { :created }
|
|
|
|
|
|
|
|
it 'does not update on transitioning to success' do
|
|
|
|
pipeline.succeed
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(pipeline.started_at).to be_nil
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
describe '#finished_at' do
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'updates on transitioning to success', :sidekiq_might_not_need_inline do
|
2016-09-13 17:45:13 +05:30
|
|
|
build.success
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
expect(pipeline.reload.finished_at).not_to be_nil
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
it 'does not update on transitioning to running' do
|
|
|
|
build.run
|
|
|
|
|
|
|
|
expect(pipeline.reload.finished_at).to be_nil
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
describe 'merge request metrics' do
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, status: from_status) }
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
before do
|
|
|
|
expect(PipelineMetricsWorker).to receive(:perform_async).with(pipeline.id)
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'when transitioning to running' do
|
2019-07-07 11:18:12 +05:30
|
|
|
%i[created preparing pending].each do |status|
|
|
|
|
context "from #{status}" do
|
|
|
|
let(:from_status) { status }
|
|
|
|
|
|
|
|
it 'schedules metrics workers' do
|
|
|
|
pipeline.run
|
|
|
|
end
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when transitioning to success' do
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:from_status) { 'created' }
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
it 'schedules metrics workers' do
|
|
|
|
pipeline.succeed
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe 'merge on success' do
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, status: from_status) }
|
|
|
|
|
|
|
|
%i[created preparing pending running].each do |status|
|
|
|
|
context "from #{status}" do
|
|
|
|
let(:from_status) { status }
|
|
|
|
|
|
|
|
it 'schedules pipeline success worker' do
|
|
|
|
expect(PipelineSuccessWorker).to receive(:perform_async).with(pipeline.id)
|
|
|
|
|
|
|
|
pipeline.succeed
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe 'pipeline caching' do
|
2020-03-13 15:44:24 +05:30
|
|
|
before do
|
|
|
|
pipeline.config_source = 'repository_source'
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'performs ExpirePipelinesCacheWorker' do
|
|
|
|
expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
|
|
|
|
|
|
|
|
pipeline.cancel
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-04 21:01:54 +05:30
|
|
|
describe 'auto merge' do
|
|
|
|
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
|
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline, :running, project: merge_request.source_project,
|
|
|
|
ref: merge_request.source_branch,
|
|
|
|
sha: merge_request.diff_head_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
merge_request.update_head_pipeline
|
|
|
|
end
|
|
|
|
|
|
|
|
%w[succeed! drop! cancel! skip!].each do |action|
|
|
|
|
context "when the pipeline recieved #{action} event" do
|
|
|
|
it 'performs AutoMergeProcessWorker' do
|
|
|
|
expect(AutoMergeProcessWorker).to receive(:perform_async).with(merge_request.id)
|
|
|
|
|
|
|
|
pipeline.public_send(action)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when auto merge is not enabled in the merge request' do
|
|
|
|
let(:merge_request) { create(:merge_request) }
|
|
|
|
|
|
|
|
it 'performs AutoMergeProcessWorker' do
|
|
|
|
expect(AutoMergeProcessWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
pipeline.succeed!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe 'auto devops pipeline metrics' do
|
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, config_source: config_source) }
|
|
|
|
let(:config_source) { :auto_devops_source }
|
|
|
|
|
|
|
|
where(:action, :status) do
|
|
|
|
:succeed | 'success'
|
|
|
|
:drop | 'failed'
|
|
|
|
:skip | 'skipped'
|
|
|
|
:cancel | 'canceled'
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
context "when pipeline receives action '#{params[:action]}'" do
|
|
|
|
subject { pipeline.public_send(action) }
|
|
|
|
|
|
|
|
it { expect { subject }.to change { auto_devops_pipelines_completed_total(status) }.by(1) }
|
|
|
|
|
|
|
|
context 'when not auto_devops_source?' do
|
|
|
|
let(:config_source) { :repository_source }
|
|
|
|
|
|
|
|
it { expect { subject }.not_to change { auto_devops_pipelines_completed_total(status) } }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is bridge triggered' do
|
|
|
|
before do
|
|
|
|
pipeline.source_bridge = create(:ci_bridge)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source bridge is dependent on pipeline status' do
|
|
|
|
before do
|
|
|
|
allow(pipeline.source_bridge).to receive(:dependent?).and_return(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'schedules the pipeline bridge worker' do
|
|
|
|
expect(::Ci::PipelineBridgeStatusWorker).to receive(:perform_async)
|
|
|
|
|
|
|
|
pipeline.succeed!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source bridge is not dependent on pipeline status' do
|
|
|
|
it 'does not schedule the pipeline bridge worker' do
|
|
|
|
expect(::Ci::PipelineBridgeStatusWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
pipeline.succeed!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def auto_devops_pipelines_completed_total(status)
|
|
|
|
Gitlab::Metrics.counter(:auto_devops_pipelines_completed_total, 'Number of completed auto devops pipelines').get(status: status)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def create_build(name, *traits, queued_at: current, started_from: 0, **opts)
|
|
|
|
create(:ci_build, *traits,
|
2016-09-29 09:46:39 +05:30
|
|
|
name: name,
|
|
|
|
pipeline: pipeline,
|
|
|
|
queued_at: queued_at,
|
2017-08-17 22:00:37 +05:30
|
|
|
started_at: queued_at + started_from,
|
|
|
|
**opts)
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#branch?' do
|
|
|
|
subject { pipeline.branch? }
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when ref is not a tag' do
|
2016-06-16 23:09:34 +05:30
|
|
|
before do
|
|
|
|
pipeline.tag = false
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'return true' do
|
2016-06-16 23:09:34 +05:30
|
|
|
is_expected.to be_truthy
|
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
context 'when pipeline is merge request' do
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:pipeline) do
|
2020-03-13 15:44:24 +05:30
|
|
|
create(:ci_pipeline, merge_request: merge_request)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request,
|
|
|
|
source_project: project,
|
|
|
|
source_branch: 'feature',
|
|
|
|
target_project: project,
|
|
|
|
target_branch: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when ref is a tag' do
|
2016-06-16 23:09:34 +05:30
|
|
|
before do
|
|
|
|
pipeline.tag = true
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'return false' do
|
2016-06-16 23:09:34 +05:30
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe '#git_ref' do
|
|
|
|
subject { pipeline.send(:git_ref) }
|
|
|
|
|
|
|
|
context 'when ref is branch' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, tag: false) }
|
|
|
|
|
|
|
|
it 'returns branch ref' do
|
|
|
|
is_expected.to eq(Gitlab::Git::BRANCH_REF_PREFIX + pipeline.ref.to_s)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is tag' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, tag: true) }
|
|
|
|
|
|
|
|
it 'returns branch ref' do
|
|
|
|
is_expected.to eq(Gitlab::Git::TAG_REF_PREFIX + pipeline.ref.to_s)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is merge request' do
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline,
|
2019-07-07 11:18:12 +05:30
|
|
|
source: :merge_request_event,
|
2019-02-15 15:39:39 +05:30
|
|
|
merge_request: merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request,
|
|
|
|
source_project: project,
|
|
|
|
source_branch: 'feature',
|
|
|
|
target_project: project,
|
|
|
|
target_branch: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns branch ref' do
|
|
|
|
is_expected.to eq(Gitlab::Git::BRANCH_REF_PREFIX + pipeline.ref.to_s)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
describe 'ref_exists?' do
|
|
|
|
context 'when repository exists' do
|
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
where(:tag, :ref, :result) do
|
|
|
|
false | 'master' | true
|
|
|
|
false | 'non-existent-branch' | false
|
|
|
|
true | 'v1.1.0' | true
|
|
|
|
true | 'non-existent-tag' | false
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_empty_pipeline, project: project, tag: tag, ref: ref)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "correctly detects ref" do
|
|
|
|
expect(pipeline.ref_exists?).to be result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when repository does not exist' do
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_empty_pipeline, project: project, ref: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'always returns false' do
|
|
|
|
expect(pipeline.ref_exists?).to eq false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
context 'with non-empty project' do
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
project: project,
|
|
|
|
ref: project.default_branch,
|
|
|
|
sha: project.commit.sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#latest?' do
|
|
|
|
context 'with latest sha' do
|
|
|
|
it 'returns true' do
|
|
|
|
expect(pipeline).to be_latest
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
context 'with a branch name as the ref' do
|
|
|
|
it 'looks up commit with the full ref name' do
|
|
|
|
expect(pipeline.project).to receive(:commit).with('refs/heads/master').and_call_original
|
|
|
|
|
|
|
|
expect(pipeline).to be_latest
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
context 'with not latest sha' do
|
|
|
|
before do
|
|
|
|
pipeline.update(
|
|
|
|
sha: project.commit("#{project.default_branch}~1").sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(pipeline).not_to be_latest
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#manual_actions' do
|
|
|
|
subject { pipeline.manual_actions }
|
|
|
|
|
|
|
|
it 'when none defined' do
|
|
|
|
is_expected.to be_empty
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when action defined' do
|
|
|
|
let!(:manual) { create(:ci_build, :manual, pipeline: pipeline, name: 'deploy') }
|
|
|
|
|
|
|
|
it 'returns one action' do
|
|
|
|
is_expected.to contain_exactly(manual)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'there are multiple of the same name' do
|
|
|
|
let!(:manual2) { create(:ci_build, :manual, pipeline: pipeline, name: 'deploy') }
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
before do
|
|
|
|
manual.update(retried: true)
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
it 'returns latest one' do
|
|
|
|
is_expected.to contain_exactly(manual2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
describe '#branch_updated?' do
|
|
|
|
context 'when pipeline has before SHA' do
|
|
|
|
before do
|
|
|
|
pipeline.update_column(:before_sha, 'a1b2c3d4')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'runs on a branch update push' do
|
|
|
|
expect(pipeline.before_sha).not_to be Gitlab::Git::BLANK_SHA
|
|
|
|
expect(pipeline.branch_updated?).to be true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline does not have before SHA' do
|
|
|
|
before do
|
|
|
|
pipeline.update_column(:before_sha, Gitlab::Git::BLANK_SHA)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not run on a branch updating push' do
|
|
|
|
expect(pipeline.branch_updated?).to be false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#modified_paths' do
|
|
|
|
context 'when old and new revisions are set' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
pipeline.update(before_sha: '1234abcd', sha: '2345bcde')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'fetches stats for changes between commits' do
|
|
|
|
expect(project.repository)
|
|
|
|
.to receive(:diff_stats).with('1234abcd', '2345bcde')
|
|
|
|
.and_call_original
|
|
|
|
|
|
|
|
pipeline.modified_paths
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when either old or new revision is missing' do
|
|
|
|
before do
|
|
|
|
pipeline.update_column(:before_sha, Gitlab::Git::BLANK_SHA)
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'returns nil' do
|
|
|
|
expect(pipeline.modified_paths).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when source is merge request' do
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request,
|
|
|
|
source_project: project,
|
|
|
|
source_branch: 'feature',
|
|
|
|
target_project: project,
|
|
|
|
target_branch: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns merge request modified paths' do
|
|
|
|
expect(pipeline.modified_paths).to match(merge_request.modified_paths)
|
2018-12-05 23:21:45 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
describe '#all_worktree_paths' do
|
|
|
|
let(:files) { { 'main.go' => '', 'mocks/mocks.go' => '' } }
|
|
|
|
let(:project) { create(:project, :custom_repo, files: files) }
|
|
|
|
let(:pipeline) { build(:ci_pipeline, project: project, sha: project.repository.head_commit.sha) }
|
|
|
|
|
|
|
|
it 'returns all file paths cached' do
|
|
|
|
expect(project.repository).to receive(:ls_files).with(pipeline.sha).once.and_call_original
|
|
|
|
expect(pipeline.all_worktree_paths).to eq(files.keys)
|
|
|
|
expect(pipeline.all_worktree_paths).to eq(files.keys)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#top_level_worktree_paths' do
|
|
|
|
let(:files) { { 'main.go' => '', 'mocks/mocks.go' => '' } }
|
|
|
|
let(:project) { create(:project, :custom_repo, files: files) }
|
|
|
|
let(:pipeline) { build(:ci_pipeline, project: project, sha: project.repository.head_commit.sha) }
|
|
|
|
|
|
|
|
it 'returns top-level file paths cached' do
|
|
|
|
expect(project.repository).to receive(:tree).with(pipeline.sha).once.and_call_original
|
|
|
|
expect(pipeline.top_level_worktree_paths).to eq(['main.go'])
|
|
|
|
expect(pipeline.top_level_worktree_paths).to eq(['main.go'])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#has_kubernetes_active?' do
|
|
|
|
context 'when kubernetes is active' do
|
|
|
|
context 'when user configured kubernetes from CI/CD > Clusters' do
|
|
|
|
let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
|
|
|
|
let(:project) { cluster.project }
|
|
|
|
|
2019-09-04 21:01:54 +05:30
|
|
|
it 'returns true' do
|
|
|
|
expect(pipeline).to have_kubernetes_active
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when kubernetes is not active' do
|
|
|
|
it 'returns false' do
|
|
|
|
expect(pipeline).not_to have_kubernetes_active
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#has_warnings?' do
|
|
|
|
subject { pipeline.has_warnings? }
|
|
|
|
|
|
|
|
context 'build which is allowed to fail fails' do
|
|
|
|
before do
|
|
|
|
create :ci_build, :success, pipeline: pipeline, name: 'rspec'
|
|
|
|
create :ci_build, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop'
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
it 'returns true' do
|
|
|
|
is_expected.to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'build which is allowed to fail succeeds' do
|
|
|
|
before do
|
|
|
|
create :ci_build, :success, pipeline: pipeline, name: 'rspec'
|
|
|
|
create :ci_build, :allowed_to_fail, :success, pipeline: pipeline, name: 'rubocop'
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
it 'returns false' do
|
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'build is retried and succeeds' do
|
|
|
|
before do
|
|
|
|
create :ci_build, :success, pipeline: pipeline, name: 'rubocop'
|
|
|
|
create :ci_build, :failed, pipeline: pipeline, name: 'rspec'
|
|
|
|
create :ci_build, :success, pipeline: pipeline, name: 'rspec'
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#number_of_warnings' do
|
|
|
|
it 'returns the number of warnings' do
|
|
|
|
create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop')
|
|
|
|
|
|
|
|
expect(pipeline.number_of_warnings).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'supports eager loading of the number of warnings' do
|
|
|
|
pipeline2 = create(:ci_empty_pipeline, status: :created, project: project)
|
|
|
|
|
|
|
|
create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop')
|
|
|
|
create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline2, name: 'rubocop')
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
pipelines = project.ci_pipelines.to_a
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
pipelines.each(&:number_of_warnings)
|
|
|
|
|
|
|
|
# To run the queries we need to actually use the lazy objects, which we do
|
|
|
|
# by just sending "to_i" to them.
|
|
|
|
amount = ActiveRecord::QueryRecorder
|
|
|
|
.new { pipelines.each { |p| p.number_of_warnings.to_i } }
|
|
|
|
.count
|
|
|
|
|
|
|
|
expect(amount).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe '#needs_processing?' do
|
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
|
|
|
|
subject { pipeline.needs_processing? }
|
|
|
|
|
|
|
|
where(:processed, :result) do
|
|
|
|
nil | true
|
|
|
|
false | true
|
|
|
|
true | false
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
let(:build) do
|
|
|
|
create(:ci_build, :success, pipeline: pipeline, name: 'rubocop')
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
build.update_column(:processed, processed)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(result) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
shared_context 'with some outdated pipelines' do
|
|
|
|
before do
|
2018-03-17 18:26:18 +05:30
|
|
|
create_pipeline(:canceled, 'ref', 'A', project)
|
|
|
|
create_pipeline(:success, 'ref', 'A', project)
|
|
|
|
create_pipeline(:failed, 'ref', 'B', project)
|
|
|
|
create_pipeline(:skipped, 'feature', 'C', project)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
def create_pipeline(status, ref, sha, project)
|
|
|
|
create(
|
|
|
|
:ci_empty_pipeline,
|
|
|
|
status: status,
|
|
|
|
ref: ref,
|
|
|
|
sha: sha,
|
|
|
|
project: project
|
|
|
|
)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '.newest_first' do
|
2017-08-17 22:00:37 +05:30
|
|
|
include_context 'with some outdated pipelines'
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns the pipelines from new to old' do
|
|
|
|
expect(described_class.newest_first.pluck(:status))
|
|
|
|
.to eq(%w[skipped failed success canceled])
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2018-12-13 13:39:08 +05:30
|
|
|
|
|
|
|
it 'searches limited backlog' do
|
|
|
|
expect(described_class.newest_first(limit: 1).pluck(:status))
|
|
|
|
.to eq(%w[skipped])
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '.latest_status' do
|
|
|
|
include_context 'with some outdated pipelines'
|
|
|
|
|
|
|
|
context 'when no ref is specified' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns the status of the latest pipeline' do
|
|
|
|
expect(described_class.latest_status).to eq('skipped')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is specified' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns the status of the latest pipeline for the given ref' do
|
|
|
|
expect(described_class.latest_status('ref')).to eq('failed')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
describe '.latest_successful_for_ref' do
|
2017-08-17 22:00:37 +05:30
|
|
|
include_context 'with some outdated pipelines'
|
|
|
|
|
|
|
|
let!(:latest_successful_pipeline) do
|
2018-03-17 18:26:18 +05:30
|
|
|
create_pipeline(:success, 'ref', 'D', project)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the latest successful pipeline' do
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(described_class.latest_successful_for_ref('ref'))
|
|
|
|
.to eq(latest_successful_pipeline)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.latest_successful_for_sha' do
|
|
|
|
include_context 'with some outdated pipelines'
|
|
|
|
|
|
|
|
let!(:latest_successful_pipeline) do
|
|
|
|
create_pipeline(:success, 'ref', 'awesomesha', project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the latest successful pipeline' do
|
|
|
|
expect(described_class.latest_successful_for_sha('awesomesha'))
|
2017-09-10 17:25:29 +05:30
|
|
|
.to eq(latest_successful_pipeline)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.latest_successful_for_refs' do
|
|
|
|
include_context 'with some outdated pipelines'
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:latest_successful_pipeline1) do
|
|
|
|
create_pipeline(:success, 'ref1', 'D', project)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:latest_successful_pipeline2) do
|
|
|
|
create_pipeline(:success, 'ref2', 'D', project)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'returns the latest successful pipeline for both refs' do
|
|
|
|
refs = %w(ref1 ref2 ref3)
|
|
|
|
|
|
|
|
expect(described_class.latest_successful_for_refs(refs)).to eq({ 'ref1' => latest_successful_pipeline1, 'ref2' => latest_successful_pipeline2 })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
describe '.latest_pipeline_per_commit' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:project) { create(:project) }
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
let!(:commit_123_ref_master) do
|
|
|
|
create(
|
|
|
|
:ci_empty_pipeline,
|
|
|
|
status: 'success',
|
|
|
|
ref: 'master',
|
|
|
|
sha: '123',
|
|
|
|
project: project
|
|
|
|
)
|
|
|
|
end
|
|
|
|
let!(:commit_123_ref_develop) do
|
|
|
|
create(
|
|
|
|
:ci_empty_pipeline,
|
|
|
|
status: 'success',
|
|
|
|
ref: 'develop',
|
|
|
|
sha: '123',
|
|
|
|
project: project
|
|
|
|
)
|
|
|
|
end
|
|
|
|
let!(:commit_456_ref_test) do
|
|
|
|
create(
|
|
|
|
:ci_empty_pipeline,
|
|
|
|
status: 'success',
|
|
|
|
ref: 'test',
|
|
|
|
sha: '456',
|
|
|
|
project: project
|
|
|
|
)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'without a ref' do
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'returns a Hash containing the latest pipeline per commit for all refs' do
|
|
|
|
result = described_class.latest_pipeline_per_commit(%w[123 456])
|
|
|
|
|
|
|
|
expect(result).to match(
|
|
|
|
'123' => commit_123_ref_develop,
|
|
|
|
'456' => commit_456_ref_test
|
|
|
|
)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'only includes the latest pipeline of the given commit SHAs' do
|
|
|
|
result = described_class.latest_pipeline_per_commit(%w[123])
|
|
|
|
|
|
|
|
expect(result).to match(
|
|
|
|
'123' => commit_123_ref_develop
|
|
|
|
)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are two pipelines for a ref and SHA' do
|
2019-12-21 20:55:43 +05:30
|
|
|
let!(:commit_123_ref_master_latest) do
|
2018-03-17 18:26:18 +05:30
|
|
|
create(
|
|
|
|
:ci_empty_pipeline,
|
|
|
|
status: 'failed',
|
|
|
|
ref: 'master',
|
|
|
|
sha: '123',
|
|
|
|
project: project
|
|
|
|
)
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the latest pipeline' do
|
|
|
|
result = described_class.latest_pipeline_per_commit(%w[123])
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(result).to match(
|
|
|
|
'123' => commit_123_ref_master_latest
|
|
|
|
)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with a ref' do
|
|
|
|
it 'only includes the pipelines for the given ref' do
|
2019-12-21 20:55:43 +05:30
|
|
|
result = described_class.latest_pipeline_per_commit(%w[123 456], 'master')
|
|
|
|
|
|
|
|
expect(result).to match(
|
|
|
|
'123' => commit_123_ref_master
|
|
|
|
)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
describe '.latest_successful_ids_per_project' do
|
|
|
|
let(:projects) { create_list(:project, 2) }
|
|
|
|
let!(:pipeline1) { create(:ci_pipeline, :success, project: projects[0]) }
|
|
|
|
let!(:pipeline2) { create(:ci_pipeline, :success, project: projects[0]) }
|
|
|
|
let!(:pipeline3) { create(:ci_pipeline, :failed, project: projects[0]) }
|
|
|
|
let!(:pipeline4) { create(:ci_pipeline, :success, project: projects[1]) }
|
|
|
|
|
|
|
|
it 'returns expected pipeline ids' do
|
|
|
|
expect(described_class.latest_successful_ids_per_project)
|
|
|
|
.to contain_exactly(pipeline2, pipeline4)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '.internal_sources' do
|
|
|
|
subject { described_class.internal_sources }
|
|
|
|
|
|
|
|
it { is_expected.to be_an(Array) }
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
describe '.bridgeable_statuses' do
|
|
|
|
subject { described_class.bridgeable_statuses }
|
|
|
|
|
|
|
|
it { is_expected.to be_an(Array) }
|
2020-03-13 15:44:24 +05:30
|
|
|
it { is_expected.not_to include('created', 'waiting_for_resource', 'preparing', 'pending') }
|
2019-10-12 21:52:04 +05:30
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe '#status', :sidekiq_inline do
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:build) do
|
|
|
|
create(:ci_build, :created, pipeline: pipeline, name: 'test')
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
|
|
|
subject { pipeline.reload.status }
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
context 'on waiting for resource' do
|
|
|
|
before do
|
|
|
|
allow(build).to receive(:requires_resource?) { true }
|
|
|
|
allow(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async)
|
|
|
|
|
|
|
|
build.enqueue
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq('waiting_for_resource') }
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
context 'on prepare' do
|
|
|
|
before do
|
|
|
|
# Prevent skipping directly to 'pending'
|
|
|
|
allow(build).to receive(:prerequisites).and_return([double])
|
|
|
|
allow(Ci::BuildPrepareWorker).to receive(:perform_async)
|
|
|
|
|
|
|
|
build.enqueue
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq('preparing') }
|
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
context 'on queuing' do
|
|
|
|
before do
|
|
|
|
build.enqueue
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq('pending') }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on run' do
|
|
|
|
before do
|
|
|
|
build.enqueue
|
2020-03-13 15:44:24 +05:30
|
|
|
build.reload.run
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq('running') }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on drop' do
|
|
|
|
before do
|
|
|
|
build.drop
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq('failed') }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on success' do
|
|
|
|
before do
|
|
|
|
build.success
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq('success') }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on cancel' do
|
|
|
|
before do
|
|
|
|
build.cancel
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when build is pending' do
|
|
|
|
let(:build) do
|
|
|
|
create(:ci_build, :pending, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq('canceled') }
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'on failure and build retry' do
|
|
|
|
before do
|
2017-09-10 17:25:29 +05:30
|
|
|
stub_not_protect_default_branch
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
build.drop
|
2017-08-17 22:00:37 +05:30
|
|
|
project.add_developer(user)
|
|
|
|
|
|
|
|
Ci::Build.retry(build, user)
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
# We are changing a state: created > failed > running
|
|
|
|
# Instead of: created > failed > pending
|
|
|
|
# Since the pipeline already run, so it should not be pending anymore
|
|
|
|
|
|
|
|
it { is_expected.to eq('running') }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#update_status' do
|
|
|
|
context 'when pipeline is empty' do
|
|
|
|
it 'updates does not change pipeline status' do
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(pipeline.statuses.latest.slow_composite_status).to be_nil
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
expect { pipeline.update_legacy_status }
|
2019-12-21 20:55:43 +05:30
|
|
|
.to change { pipeline.reload.status }
|
|
|
|
.from('created')
|
|
|
|
.to('skipped')
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when updating status to pending' do
|
|
|
|
before do
|
2019-12-21 20:55:43 +05:30
|
|
|
create(:ci_build, pipeline: pipeline, status: :running)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates pipeline status to running' do
|
2020-03-13 15:44:24 +05:30
|
|
|
expect { pipeline.update_legacy_status }
|
2019-12-21 20:55:43 +05:30
|
|
|
.to change { pipeline.reload.status }
|
|
|
|
.from('created')
|
|
|
|
.to('running')
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
context 'when updating status to scheduled' do
|
|
|
|
before do
|
2019-12-21 20:55:43 +05:30
|
|
|
create(:ci_build, pipeline: pipeline, status: :scheduled)
|
2018-12-05 23:21:45 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates pipeline status to scheduled' do
|
2020-03-13 15:44:24 +05:30
|
|
|
expect { pipeline.update_legacy_status }
|
2019-12-21 20:55:43 +05:30
|
|
|
.to change { pipeline.reload.status }
|
|
|
|
.from('created')
|
|
|
|
.to('scheduled')
|
2018-12-05 23:21:45 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when statuses status was not recognized' do
|
|
|
|
before do
|
|
|
|
allow(pipeline)
|
|
|
|
.to receive(:latest_builds_status)
|
|
|
|
.and_return(:unknown)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'raises an exception' do
|
2020-03-13 15:44:24 +05:30
|
|
|
expect { pipeline.update_legacy_status }
|
2018-11-08 19:23:39 +05:30
|
|
|
.to raise_error(HasStatus::UnknownStatusError)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#detailed_status' do
|
|
|
|
subject { pipeline.detailed_status(user) }
|
|
|
|
|
|
|
|
context 'when pipeline is created' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :created) }
|
|
|
|
|
|
|
|
it 'returns detailed status for created pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatusText|created')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is pending' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :pending) }
|
|
|
|
|
|
|
|
it 'returns detailed status for pending pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatusText|pending')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is running' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :running) }
|
|
|
|
|
|
|
|
it 'returns detailed status for running pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatus|running')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is successful' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :success) }
|
|
|
|
|
|
|
|
it 'returns detailed status for successful pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatusText|passed')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is failed' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :failed) }
|
|
|
|
|
|
|
|
it 'returns detailed status for failed pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatusText|failed')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is canceled' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :canceled) }
|
|
|
|
|
|
|
|
it 'returns detailed status for canceled pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatusText|canceled')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is skipped' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :skipped) }
|
|
|
|
|
|
|
|
it 'returns detailed status for skipped pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatusText|skipped')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is blocked' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :manual) }
|
|
|
|
|
|
|
|
it 'returns detailed status for blocked pipeline' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.text).to eq s_('CiStatusText|blocked')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is successful but with warnings' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, status: :success) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'retruns detailed status for successful pipeline with warnings' do
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(subject.label).to eq(s_('CiStatusLabel|passed with warnings'))
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#cancelable?' do
|
|
|
|
%i[created running pending].each do |status0|
|
|
|
|
context "when there is a build #{status0}" do
|
|
|
|
before do
|
|
|
|
create(:ci_build, status0, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is cancelable' do
|
|
|
|
expect(pipeline.cancelable?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when there is an external job #{status0}" do
|
|
|
|
before do
|
|
|
|
create(:generic_commit_status, status0, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is cancelable' do
|
|
|
|
expect(pipeline.cancelable?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
%i[success failed canceled].each do |status1|
|
|
|
|
context "when there are generic_commit_status jobs for #{status0} and #{status1}" do
|
|
|
|
before do
|
|
|
|
create(:generic_commit_status, status0, pipeline: pipeline)
|
|
|
|
create(:generic_commit_status, status1, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is cancelable' do
|
|
|
|
expect(pipeline.cancelable?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when there are generic_commit_status and ci_build jobs for #{status0} and #{status1}" do
|
|
|
|
before do
|
|
|
|
create(:generic_commit_status, status0, pipeline: pipeline)
|
|
|
|
create(:ci_build, status1, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is cancelable' do
|
|
|
|
expect(pipeline.cancelable?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when there are ci_build jobs for #{status0} and #{status1}" do
|
|
|
|
before do
|
|
|
|
create(:ci_build, status0, pipeline: pipeline)
|
|
|
|
create(:ci_build, status1, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is cancelable' do
|
|
|
|
expect(pipeline.cancelable?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
%i[success failed canceled].each do |status|
|
|
|
|
context "when there is a build #{status}" do
|
|
|
|
before do
|
|
|
|
create(:ci_build, status, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not cancelable' do
|
|
|
|
expect(pipeline.cancelable?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when there is an external job #{status}" do
|
|
|
|
before do
|
|
|
|
create(:generic_commit_status, status, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not cancelable' do
|
|
|
|
expect(pipeline.cancelable?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
context 'when there is a manual action present in the pipeline' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :manual, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not cancelable' do
|
|
|
|
expect(pipeline).not_to be_cancelable
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#cancel_running' do
|
|
|
|
let(:latest_status) { pipeline.statuses.pluck(:status) }
|
|
|
|
|
|
|
|
context 'when there is a running external job and a regular job' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :running, pipeline: pipeline)
|
|
|
|
create(:generic_commit_status, :running, pipeline: pipeline)
|
|
|
|
|
|
|
|
pipeline.cancel_running
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'cancels both jobs' do
|
|
|
|
expect(latest_status).to contain_exactly('canceled', 'canceled')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when jobs are in different stages' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :running, stage_idx: 0, pipeline: pipeline)
|
|
|
|
create(:ci_build, :running, stage_idx: 1, pipeline: pipeline)
|
|
|
|
|
|
|
|
pipeline.cancel_running
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'cancels both jobs' do
|
|
|
|
expect(latest_status).to contain_exactly('canceled', 'canceled')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are created builds present in the pipeline' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :running, stage_idx: 0, pipeline: pipeline)
|
|
|
|
create(:ci_build, :created, stage_idx: 1, pipeline: pipeline)
|
|
|
|
|
|
|
|
pipeline.cancel_running
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'cancels created builds' do
|
|
|
|
expect(latest_status).to eq %w(canceled canceled)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#retry_failed' do
|
|
|
|
let(:latest_status) { pipeline.statuses.latest.pluck(:status) }
|
|
|
|
|
|
|
|
before do
|
2017-09-10 17:25:29 +05:30
|
|
|
stub_not_protect_default_branch
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
project.add_developer(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is a failed build and failed external status' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :failed, name: 'build', pipeline: pipeline)
|
|
|
|
create(:generic_commit_status, :failed, name: 'jenkins', pipeline: pipeline)
|
|
|
|
|
|
|
|
pipeline.retry_failed(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'retries only build' do
|
|
|
|
expect(latest_status).to contain_exactly('pending', 'failed')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when builds are in different stages' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :failed, name: 'build', stage_idx: 0, pipeline: pipeline)
|
|
|
|
create(:ci_build, :failed, name: 'jenkins', stage_idx: 1, pipeline: pipeline)
|
|
|
|
|
|
|
|
pipeline.retry_failed(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'retries both builds' do
|
|
|
|
expect(latest_status).to contain_exactly('pending', 'created')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are canceled and failed' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :failed, name: 'build', stage_idx: 0, pipeline: pipeline)
|
|
|
|
create(:ci_build, :canceled, name: 'jenkins', stage_idx: 1, pipeline: pipeline)
|
|
|
|
|
|
|
|
pipeline.retry_failed(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'retries both builds' do
|
|
|
|
expect(latest_status).to contain_exactly('pending', 'created')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
describe '#execute_hooks' do
|
2016-09-29 09:46:39 +05:30
|
|
|
let!(:build_a) { create_build('a', 0) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:build_b) { create_build('b', 0) }
|
2016-09-13 17:45:13 +05:30
|
|
|
|
|
|
|
let!(:hook) do
|
|
|
|
create(:project_hook, project: project, pipeline_events: enabled)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
2017-09-10 17:25:29 +05:30
|
|
|
WebHookWorker.drain
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with pipeline hooks enabled' do
|
|
|
|
let(:enabled) { true }
|
|
|
|
|
|
|
|
before do
|
2019-09-04 21:01:54 +05:30
|
|
|
stub_full_request(hook.url, method: :post)
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
context 'with multiple builds', :sidekiq_inline do
|
2016-09-13 17:45:13 +05:30
|
|
|
context 'when build is queued' do
|
|
|
|
before do
|
2020-03-13 15:44:24 +05:30
|
|
|
build_a.reload.enqueue
|
|
|
|
build_b.reload.enqueue
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
it 'receives a pending event once' do
|
2016-09-13 17:45:13 +05:30
|
|
|
expect(WebMock).to have_requested_pipeline_hook('pending').once
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when build is run' do
|
|
|
|
before do
|
2020-03-13 15:44:24 +05:30
|
|
|
build_a.reload.enqueue
|
|
|
|
build_a.reload.run!
|
|
|
|
build_b.reload.enqueue
|
|
|
|
build_b.reload.run!
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
it 'receives a running event once' do
|
2016-09-13 17:45:13 +05:30
|
|
|
expect(WebMock).to have_requested_pipeline_hook('running').once
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when all builds succeed' do
|
|
|
|
before do
|
|
|
|
build_a.success
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
# We have to reload build_b as this is in next stage and it gets triggered by PipelineProcessWorker
|
|
|
|
build_b.reload.success
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
it 'receives a success event once' do
|
2016-09-13 17:45:13 +05:30
|
|
|
expect(WebMock).to have_requested_pipeline_hook('success').once
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
context 'when stage one failed' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:build_b) { create_build('b', 1) }
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
before do
|
|
|
|
build_a.drop
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'receives a failed event once' do
|
|
|
|
expect(WebMock).to have_requested_pipeline_hook('failed').once
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
def have_requested_pipeline_hook(status)
|
2019-09-04 21:01:54 +05:30
|
|
|
have_requested(:post, stubbed_hostname(hook.url)).with do |req|
|
2016-09-13 17:45:13 +05:30
|
|
|
json_body = JSON.parse(req.body)
|
|
|
|
json_body['object_attributes']['status'] == status &&
|
|
|
|
json_body['builds'].length == 2
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with pipeline hooks disabled' do
|
|
|
|
let(:enabled) { false }
|
|
|
|
|
|
|
|
before do
|
|
|
|
build_a.enqueue
|
|
|
|
build_b.enqueue
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'did not execute pipeline_hook after touched' do
|
|
|
|
expect(WebMock).not_to have_requested(:post, hook.url)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
def create_build(name, stage_idx)
|
|
|
|
create(:ci_build,
|
|
|
|
:created,
|
|
|
|
pipeline: pipeline,
|
|
|
|
name: name,
|
2020-03-13 15:44:24 +05:30
|
|
|
stage: "stage:#{stage_idx}",
|
2016-09-29 09:46:39 +05:30
|
|
|
stage_idx: stage_idx)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe "#merge_requests_as_head_pipeline" do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:project) { create(:project) }
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: 'a288a022a53a5a944fae87bcec6efc87b7061808') }
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
it "returns merge requests whose `diff_head_sha` matches the pipeline's SHA" do
|
2020-01-01 13:55:28 +05:30
|
|
|
allow_next_instance_of(MergeRequest) do |instance|
|
|
|
|
allow(instance).to receive(:diff_head_sha) { 'a288a022a53a5a944fae87bcec6efc87b7061808' }
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
merge_request = create(:merge_request, source_project: project, head_pipeline: pipeline, source_branch: pipeline.ref)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(pipeline.merge_requests_as_head_pipeline).to eq([merge_request])
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't return merge requests whose source branch doesn't match the pipeline's ref" do
|
|
|
|
create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master')
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(pipeline.merge_requests_as_head_pipeline).to be_empty
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't return merge requests whose `diff_head_sha` doesn't match the pipeline's SHA" do
|
|
|
|
create(:merge_request, source_project: project, source_branch: pipeline.ref)
|
2020-01-01 13:55:28 +05:30
|
|
|
allow_next_instance_of(MergeRequest) do |instance|
|
|
|
|
allow(instance).to receive(:diff_head_sha) { '97de212e80737a608d939f648d959671fb0a0142b' }
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(pipeline.merge_requests_as_head_pipeline).to be_empty
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
describe "#all_merge_requests" do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:project) { create(:project) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
shared_examples 'a method that returns all merge requests for a given pipeline' do
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: pipeline_project, ref: 'master') }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it "returns all merge requests having the same source branch" do
|
|
|
|
merge_request = create(:merge_request, source_project: pipeline_project, target_project: project, source_branch: pipeline.ref)
|
2019-05-30 16:15:17 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(pipeline.all_merge_requests).to eq([merge_request])
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it "doesn't return merge requests having a different source branch" do
|
|
|
|
create(:merge_request, source_project: pipeline_project, target_project: project, source_branch: 'feature', target_branch: 'master')
|
2019-02-15 15:39:39 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(pipeline.all_merge_requests).to be_empty
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
context 'when there is a merge request pipeline' do
|
|
|
|
let(:source_branch) { 'feature' }
|
|
|
|
let(:target_branch) { 'master' }
|
2019-02-15 15:39:39 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
let!(:pipeline) do
|
2019-02-15 15:39:39 +05:30
|
|
|
create(:ci_pipeline,
|
2019-07-07 11:18:12 +05:30
|
|
|
source: :merge_request_event,
|
|
|
|
project: pipeline_project,
|
2019-02-15 15:39:39 +05:30
|
|
|
ref: source_branch,
|
2019-07-07 11:18:12 +05:30
|
|
|
merge_request: merge_request)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
let(:merge_request) do
|
2019-02-15 15:39:39 +05:30
|
|
|
create(:merge_request,
|
2019-07-07 11:18:12 +05:30
|
|
|
source_project: pipeline_project,
|
2019-02-15 15:39:39 +05:30
|
|
|
source_branch: source_branch,
|
|
|
|
target_project: project,
|
2019-07-07 11:18:12 +05:30
|
|
|
target_branch: target_branch)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'returns an associated merge request' do
|
|
|
|
expect(pipeline.all_merge_requests).to eq([merge_request])
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
context 'when there is another merge request pipeline that targets a different branch' do
|
|
|
|
let(:target_branch_2) { 'merge-test' }
|
|
|
|
|
|
|
|
let!(:pipeline_2) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
source: :merge_request_event,
|
|
|
|
project: pipeline_project,
|
|
|
|
ref: source_branch,
|
|
|
|
merge_request: merge_request_2)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:merge_request_2) do
|
|
|
|
create(:merge_request,
|
|
|
|
source_project: pipeline_project,
|
|
|
|
source_branch: source_branch,
|
|
|
|
target_project: project,
|
|
|
|
target_branch: target_branch_2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not return an associated merge request' do
|
|
|
|
expect(pipeline.all_merge_requests).not_to include(merge_request_2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'a method that returns all merge requests for a given pipeline' do
|
|
|
|
let(:pipeline_project) { project }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'for a fork' do
|
|
|
|
let(:fork) { fork_project(project) }
|
|
|
|
|
|
|
|
it_behaves_like 'a method that returns all merge requests for a given pipeline' do
|
|
|
|
let(:pipeline_project) { fork }
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#stuck?' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :pending, pipeline: pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is stuck' do
|
|
|
|
it 'is stuck' do
|
|
|
|
expect(pipeline).to be_stuck
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is not stuck' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
2018-11-08 19:23:39 +05:30
|
|
|
create(:ci_runner, :instance, :online)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'is not stuck' do
|
|
|
|
expect(pipeline).not_to be_stuck
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#has_yaml_errors?' do
|
2019-12-26 22:10:19 +05:30
|
|
|
context 'when yaml_errors is set' do
|
|
|
|
before do
|
|
|
|
pipeline.yaml_errors = 'File not found'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'returns true if yaml_errors is set' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(pipeline).to have_yaml_errors
|
2019-12-26 22:10:19 +05:30
|
|
|
expect(pipeline.yaml_errors).to include('File not foun')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'returns false if yaml_errors is not set' do
|
|
|
|
expect(pipeline).not_to have_yaml_errors
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'notifications when pipeline success or failed' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
project: project,
|
|
|
|
sha: project.commit('master').sha,
|
2019-09-04 21:01:54 +05:30
|
|
|
user: project.owner)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
2018-03-17 18:26:18 +05:30
|
|
|
project.add_developer(pipeline.user)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
pipeline.user.global_notification_setting
|
|
|
|
.update(level: 'custom', failed_pipeline: true, success_pipeline: true)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
perform_enqueued_jobs do
|
|
|
|
pipeline.enqueue
|
|
|
|
pipeline.run
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'sending a notification' do
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'sends an email', :sidekiq_might_not_need_inline do
|
2017-08-17 22:00:37 +05:30
|
|
|
should_only_email(pipeline.user, kind: :bcc)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'not sending any notification' do
|
|
|
|
it 'does not send any email' do
|
|
|
|
should_not_email_anyone
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
shared_examples 'enqueues the notification worker' do
|
|
|
|
it 'enqueues PipelineUpdateCiRefStatusWorker' do
|
|
|
|
expect(PipelineUpdateCiRefStatusWorker).to receive(:perform_async).with(pipeline.id)
|
|
|
|
expect(PipelineNotificationWorker).not_to receive(:perform_async).with(pipeline.id)
|
|
|
|
|
|
|
|
pipeline.succeed
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ci_pipeline_fixed_notifications is disabled' do
|
|
|
|
before do
|
|
|
|
stub_feature_flags(ci_pipeline_fixed_notifications: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'enqueues PipelineNotificationWorker' do
|
|
|
|
expect(PipelineUpdateCiRefStatusWorker).not_to receive(:perform_async).with(pipeline.id)
|
|
|
|
expect(PipelineNotificationWorker).to receive(:perform_async).with(pipeline.id)
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
pipeline.succeed
|
|
|
|
end
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with success pipeline' do
|
|
|
|
it_behaves_like 'sending a notification' do
|
|
|
|
before do
|
|
|
|
perform_enqueued_jobs do
|
|
|
|
pipeline.succeed
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
it_behaves_like 'enqueues the notification worker'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with failed pipeline' do
|
2020-04-08 14:13:33 +05:30
|
|
|
it_behaves_like 'sending a notification' do
|
|
|
|
before do
|
|
|
|
perform_enqueued_jobs do
|
|
|
|
create(:ci_build, :failed, pipeline: pipeline)
|
|
|
|
create(:generic_commit_status, :failed, pipeline: pipeline)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
pipeline.drop
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
it_behaves_like 'enqueues the notification worker'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with skipped pipeline' do
|
|
|
|
before do
|
|
|
|
perform_enqueued_jobs do
|
|
|
|
pipeline.skip
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'not sending any notification'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with cancelled pipeline' do
|
|
|
|
before do
|
|
|
|
perform_enqueued_jobs do
|
|
|
|
pipeline.cancel
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'not sending any notification'
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
describe '#find_job_with_archive_artifacts' do
|
|
|
|
let!(:old_job) { create(:ci_build, name: 'rspec', retried: true, pipeline: pipeline) }
|
|
|
|
let!(:job_without_artifacts) { create(:ci_build, name: 'rspec', pipeline: pipeline) }
|
|
|
|
let!(:expected_job) { create(:ci_build, :artifacts, name: 'rspec', pipeline: pipeline ) }
|
|
|
|
let!(:different_job) { create(:ci_build, name: 'deploy', pipeline: pipeline) }
|
|
|
|
|
|
|
|
subject { pipeline.find_job_with_archive_artifacts('rspec') }
|
|
|
|
|
|
|
|
it 'finds the expected job' do
|
|
|
|
expect(subject).to eq(expected_job)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#latest_builds_with_artifacts' do
|
2019-07-31 22:56:46 +05:30
|
|
|
let!(:fresh_build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
|
|
|
|
let!(:stale_build) { create(:ci_build, :success, :expired, :artifacts, pipeline: pipeline) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
it 'returns an Array' do
|
|
|
|
expect(pipeline.latest_builds_with_artifacts).to be_an_instance_of(Array)
|
|
|
|
end
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
it 'returns the latest builds with non-expired artifacts' do
|
|
|
|
expect(pipeline.latest_builds_with_artifacts).to contain_exactly(fresh_build)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not return builds with expired artifacts' do
|
|
|
|
expect(pipeline.latest_builds_with_artifacts).not_to include(stale_build)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'memoizes the returned relation' do
|
|
|
|
query_count = ActiveRecord::QueryRecorder
|
|
|
|
.new { 2.times { pipeline.latest_builds_with_artifacts.to_a } }
|
|
|
|
.count
|
|
|
|
|
|
|
|
expect(query_count).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '#has_reports?' do
|
|
|
|
subject { pipeline.has_reports?(Ci::JobArtifact.test_reports) }
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
context 'when pipeline has builds with test reports' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :test_reports, pipeline: pipeline, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline status is running' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :running, project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline status is success' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :success, project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline does not have builds with test reports' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :artifacts, pipeline: pipeline, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :success, project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when retried build has test reports' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :retried, :test_reports, pipeline: pipeline, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :success, project: project) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#test_reports' do
|
|
|
|
subject { pipeline.test_reports }
|
|
|
|
|
|
|
|
context 'when pipeline has multiple builds with test reports' do
|
|
|
|
let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
|
|
|
|
let!(:build_java) { create(:ci_build, :success, name: 'java', pipeline: pipeline, project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_job_artifact, :junit, job: build_rspec, project: project)
|
|
|
|
create(:ci_job_artifact, :junit_with_ant, job: build_java, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns test reports with collected data' do
|
|
|
|
expect(subject.total_count).to be(7)
|
|
|
|
expect(subject.success_count).to be(5)
|
|
|
|
expect(subject.failed_count).to be(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when builds are retried' do
|
|
|
|
let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline, project: project) }
|
|
|
|
let!(:build_java) { create(:ci_build, :retried, :success, name: 'java', pipeline: pipeline, project: project) }
|
|
|
|
|
|
|
|
it 'does not take retried builds into account' do
|
|
|
|
expect(subject.total_count).to be(0)
|
|
|
|
expect(subject.success_count).to be(0)
|
|
|
|
expect(subject.failed_count).to be(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline does not have any builds with test reports' do
|
|
|
|
it 'returns empty test reports' do
|
|
|
|
expect(subject.total_count).to be(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe '#test_reports_count', :use_clean_rails_memory_store_caching do
|
|
|
|
subject { pipeline.test_reports }
|
|
|
|
|
|
|
|
context 'when pipeline has multiple builds with test reports' do
|
|
|
|
let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
|
|
|
|
let!(:build_java) { create(:ci_build, :success, name: 'java', pipeline: pipeline, project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_job_artifact, :junit, job: build_rspec, project: project)
|
|
|
|
create(:ci_job_artifact, :junit_with_ant, job: build_java, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns test report count equal to test reports total_count' do
|
|
|
|
expect(subject.total_count).to eq(7)
|
|
|
|
expect(subject.total_count).to eq(pipeline.test_reports_count)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'reads from cache when records are cached' do
|
|
|
|
expect(Rails.cache.fetch(['project', project.id, 'pipeline', pipeline.id, 'test_reports_count'], force: false)).to be_nil
|
|
|
|
|
|
|
|
pipeline.test_reports_count
|
|
|
|
|
|
|
|
expect(ActiveRecord::QueryRecorder.new { pipeline.test_reports_count }.count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline does not have any builds with test reports' do
|
|
|
|
it 'returns empty test report count' do
|
|
|
|
expect(subject.total_count).to eq(0)
|
|
|
|
expect(subject.total_count).to eq(pipeline.test_reports_count)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
describe '#coverage_reports' do
|
|
|
|
subject { pipeline.coverage_reports }
|
|
|
|
|
|
|
|
context 'when pipeline has multiple builds with coverage reports' do
|
|
|
|
let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
|
|
|
|
let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline, project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_job_artifact, :cobertura, job: build_rspec, project: project)
|
|
|
|
create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns coverage reports with collected data' do
|
|
|
|
expect(subject.files.keys).to match_array([
|
|
|
|
"auth/token.go",
|
|
|
|
"auth/rpccredentials.go",
|
|
|
|
"app/controllers/abuse_reports_controller.rb"
|
|
|
|
])
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when builds are retried' do
|
|
|
|
let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline, project: project) }
|
|
|
|
let!(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline, project: project) }
|
|
|
|
|
|
|
|
it 'does not take retried builds into account' do
|
|
|
|
expect(subject.files).to eql({})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline does not have any builds with coverage reports' do
|
|
|
|
it 'returns empty coverage reports' do
|
|
|
|
expect(subject.files).to eql({})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#total_size' do
|
|
|
|
let!(:build_job1) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
|
|
|
|
let!(:build_job2) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
|
|
|
|
let!(:test_job_failed_and_retried) { create(:ci_build, :failed, :retried, pipeline: pipeline, stage_idx: 1) }
|
|
|
|
let!(:second_test_job) { create(:ci_build, pipeline: pipeline, stage_idx: 1) }
|
|
|
|
let!(:deploy_job) { create(:ci_build, pipeline: pipeline, stage_idx: 2) }
|
|
|
|
|
|
|
|
it 'returns all jobs (including failed and retried)' do
|
|
|
|
expect(pipeline.total_size).to eq(5)
|
|
|
|
end
|
|
|
|
end
|
2018-11-20 20:47:30 +05:30
|
|
|
|
|
|
|
describe '#status' do
|
|
|
|
context 'when transitioning to failed' do
|
|
|
|
context 'when pipeline has autodevops as source' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :running, :auto_devops_source) }
|
|
|
|
|
|
|
|
it 'calls autodevops disable service' do
|
|
|
|
expect(AutoDevops::DisableWorker).to receive(:perform_async).with(pipeline.id)
|
|
|
|
|
|
|
|
pipeline.drop
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline has other source' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :running, :repository_source) }
|
|
|
|
|
|
|
|
it 'does not call auto devops disable service' do
|
|
|
|
expect(AutoDevops::DisableWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
pipeline.drop
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-12-05 23:21:45 +05:30
|
|
|
|
|
|
|
describe '#default_branch?' do
|
|
|
|
let(:default_branch) { 'master'}
|
|
|
|
|
|
|
|
subject { pipeline.default_branch? }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(project).to receive(:default_branch).and_return(default_branch)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline ref is the default branch of the project' do
|
|
|
|
let(:pipeline) do
|
|
|
|
build(:ci_empty_pipeline, status: :created, project: project, ref: default_branch)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true" do
|
|
|
|
expect(subject).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline ref is not the default branch of the project' do
|
|
|
|
let(:pipeline) do
|
|
|
|
build(:ci_empty_pipeline, status: :created, project: project, ref: 'another_branch')
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false" do
|
|
|
|
expect(subject).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-07-31 22:56:46 +05:30
|
|
|
|
|
|
|
describe '#find_stage_by_name' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline) }
|
|
|
|
let(:stage_name) { 'test' }
|
|
|
|
|
|
|
|
let(:stage) do
|
|
|
|
create(:ci_stage_entity,
|
|
|
|
pipeline: pipeline,
|
|
|
|
project: pipeline.project,
|
|
|
|
name: 'test')
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
create_list(:ci_build, 2, pipeline: pipeline, stage: stage.name)
|
|
|
|
end
|
|
|
|
|
|
|
|
subject { pipeline.find_stage_by_name!(stage_name) }
|
|
|
|
|
|
|
|
context 'when stage exists' do
|
|
|
|
it { is_expected.to eq(stage) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when stage does not exist' do
|
|
|
|
let(:stage_name) { 'build' }
|
|
|
|
|
|
|
|
it 'raises an ActiveRecord exception' do
|
|
|
|
expect do
|
|
|
|
subject
|
|
|
|
end.to raise_exception(ActiveRecord::RecordNotFound)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-09-30 21:07:59 +05:30
|
|
|
|
|
|
|
describe '#error_messages' do
|
|
|
|
subject { pipeline.error_messages }
|
|
|
|
|
|
|
|
before do
|
|
|
|
pipeline.valid?
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline has errors' do
|
|
|
|
let(:pipeline) { build(:ci_pipeline, sha: nil, ref: nil) }
|
|
|
|
|
|
|
|
it 'returns the full error messages' do
|
|
|
|
is_expected.to eq("Sha can't be blank and Ref can't be blank")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline does not have errors' do
|
|
|
|
let(:pipeline) { build(:ci_pipeline) }
|
|
|
|
|
|
|
|
it 'returns empty string' do
|
|
|
|
is_expected.to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
describe '#created_successfully?' do
|
|
|
|
subject { pipeline.created_successfully? }
|
|
|
|
|
|
|
|
context 'when pipeline is not persisted' do
|
|
|
|
let(:pipeline) { build(:ci_pipeline) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is persisted' do
|
|
|
|
context 'when pipeline has failure reasons' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, failure_reason: :config_error) }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline has no failure reasons' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, failure_reason: nil) }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
describe '#parent_pipeline' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
context 'when pipeline is triggered by a pipeline from the same project' do
|
|
|
|
let(:upstream_pipeline) { create(:ci_pipeline, project: pipeline.project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_sources_pipeline,
|
|
|
|
source_pipeline: upstream_pipeline,
|
|
|
|
source_project: project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the parent pipeline' do
|
|
|
|
expect(pipeline.parent_pipeline).to eq(upstream_pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is child' do
|
|
|
|
expect(pipeline).to be_child
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is triggered by a pipeline from another project' do
|
|
|
|
let(:upstream_pipeline) { create(:ci_pipeline) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_sources_pipeline,
|
|
|
|
source_pipeline: upstream_pipeline,
|
|
|
|
source_project: upstream_pipeline.project,
|
|
|
|
pipeline: pipeline,
|
|
|
|
project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(pipeline.parent_pipeline).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not child' do
|
|
|
|
expect(pipeline).not_to be_child
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is not triggered by a pipeline' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(pipeline.parent_pipeline).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not child' do
|
|
|
|
expect(pipeline).not_to be_child
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#child_pipelines' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
context 'when pipeline triggered other pipelines on same project' do
|
|
|
|
let(:downstream_pipeline) { create(:ci_pipeline, project: pipeline.project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_sources_pipeline,
|
|
|
|
source_pipeline: pipeline,
|
|
|
|
source_project: pipeline.project,
|
|
|
|
pipeline: downstream_pipeline,
|
|
|
|
project: pipeline.project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the child pipelines' do
|
|
|
|
expect(pipeline.child_pipelines).to eq [downstream_pipeline]
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is parent' do
|
|
|
|
expect(pipeline).to be_parent
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline triggered other pipelines on another project' do
|
|
|
|
let(:downstream_pipeline) { create(:ci_pipeline) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_sources_pipeline,
|
|
|
|
source_pipeline: pipeline,
|
|
|
|
source_project: pipeline.project,
|
|
|
|
pipeline: downstream_pipeline,
|
|
|
|
project: downstream_pipeline.project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns empty array' do
|
|
|
|
expect(pipeline.child_pipelines).to be_empty
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not parent' do
|
|
|
|
expect(pipeline).not_to be_parent
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline did not trigger any pipelines' do
|
|
|
|
it 'returns empty array' do
|
|
|
|
expect(pipeline.child_pipelines).to be_empty
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is not parent' do
|
|
|
|
expect(pipeline).not_to be_parent
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'upstream status interactions' do
|
|
|
|
context 'when a pipeline has an upstream status' do
|
|
|
|
context 'when an upstream status is a bridge' do
|
|
|
|
let(:bridge) { create(:ci_bridge, status: :pending) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_sources_pipeline, pipeline: pipeline, source_job: bridge)
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#bridge_triggered?' do
|
|
|
|
it 'is a pipeline triggered by a bridge' do
|
|
|
|
expect(pipeline).to be_bridge_triggered
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#source_job' do
|
|
|
|
it 'has a correct source job' do
|
|
|
|
expect(pipeline.source_job).to eq bridge
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#source_bridge' do
|
|
|
|
it 'has a correct bridge source' do
|
|
|
|
expect(pipeline.source_bridge).to eq bridge
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#update_bridge_status!' do
|
|
|
|
it 'can update bridge status if it is running' do
|
|
|
|
pipeline.update_bridge_status!
|
|
|
|
|
|
|
|
expect(bridge.reload).to be_success
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can not update bridge status if is not active' do
|
|
|
|
bridge.success!
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
expect { pipeline.update_bridge_status! }.not_to change { bridge.status }
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when an upstream status is a build' do
|
|
|
|
let(:build) { create(:ci_build) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_sources_pipeline, pipeline: pipeline, source_job: build)
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#bridge_triggered?' do
|
|
|
|
it 'is a pipeline that has not been triggered by a bridge' do
|
|
|
|
expect(pipeline).not_to be_bridge_triggered
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#source_job' do
|
|
|
|
it 'has a correct source job' do
|
|
|
|
expect(pipeline.source_job).to eq build
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#source_bridge' do
|
|
|
|
it 'does not have a bridge source' do
|
|
|
|
expect(pipeline.source_bridge).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#update_bridge_status!' do
|
2020-04-08 14:13:33 +05:30
|
|
|
it 'tracks an ArgumentError and does not update upstream job status' do
|
|
|
|
expect(Gitlab::ErrorTracking)
|
|
|
|
.to receive(:track_exception)
|
|
|
|
.with(instance_of(ArgumentError), pipeline_id: pipeline.id)
|
|
|
|
|
|
|
|
pipeline.update_bridge_status!
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|