debian-mirror-gitlab/spec/services/ci/create_pipeline_service_spec.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

2208 lines
68 KiB
Ruby
Raw Permalink Normal View History

2019-07-31 22:56:46 +05:30
# frozen_string_literal: true
2016-09-13 17:45:13 +05:30
require 'spec_helper'
2023-03-17 16:20:25 +05:30
RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :clean_gitlab_redis_cache, feature_category: :continuous_integration do
2018-03-17 18:26:18 +05:30
include ProjectForksHelper
2021-12-11 22:18:48 +05:30
let_it_be_with_refind(:project) { create(:project, :repository) }
2022-03-02 08:16:31 +05:30
let_it_be_with_reload(:user) { project.first_owner }
2021-06-08 01:23:25 +05:30
2017-09-10 17:25:29 +05:30
let(:ref_name) { 'refs/heads/master' }
2016-09-13 17:45:13 +05:30
before do
2021-11-11 11:23:49 +05:30
stub_ci_pipeline_to_return_yaml_file
2016-09-13 17:45:13 +05:30
end
describe '#execute' do
2019-07-07 11:18:12 +05:30
# rubocop:disable Metrics/ParameterLists
2017-09-10 17:25:29 +05:30
def execute_service(
source: :push,
2022-08-27 11:52:29 +05:30
before: '00000000',
2017-09-10 17:25:29 +05:30
after: project.commit.id,
ref: ref_name,
2018-10-15 14:42:47 +05:30
trigger_request: nil,
2019-02-15 15:39:39 +05:30
variables_attributes: nil,
merge_request: nil,
2019-12-04 20:38:33 +05:30
external_pull_request: nil,
2019-07-07 11:18:12 +05:30
push_options: nil,
source_sha: nil,
2019-07-31 22:56:46 +05:30
target_sha: nil,
save_on_errors: true)
2017-08-17 22:00:37 +05:30
params = { ref: ref,
2022-08-27 11:52:29 +05:30
before: before,
2017-08-17 22:00:37 +05:30
after: after,
2019-02-15 15:39:39 +05:30
variables_attributes: variables_attributes,
2019-07-07 11:18:12 +05:30
push_options: push_options,
source_sha: source_sha,
target_sha: target_sha }
2017-08-17 22:00:37 +05:30
2019-12-04 20:38:33 +05:30
described_class.new(project, user, params).execute(source,
save_on_errors: save_on_errors,
trigger_request: trigger_request,
merge_request: merge_request,
2021-01-29 00:20:46 +05:30
external_pull_request: external_pull_request) do |pipeline|
yield(pipeline) if block_given?
end
2016-09-13 17:45:13 +05:30
end
2019-07-07 11:18:12 +05:30
# rubocop:enable Metrics/ParameterLists
2016-09-13 17:45:13 +05:30
2022-01-26 12:08:38 +05:30
context 'performance' do
it_behaves_like 'pipelines are created without N+1 SQL queries' do
let(:config1) do
<<~YAML
job1:
stage: build
script: exit 0
job2:
stage: test
script: exit 0
YAML
end
let(:config2) do
<<~YAML
job1:
stage: build
script: exit 0
job2:
stage: test
script: exit 0
job3:
stage: deploy
script: exit 0
YAML
end
let(:accepted_n_plus_ones) do
1 + # SELECT "ci_instance_variables"
2023-03-04 22:38:38 +05:30
1 + # INSERT INTO "ci_stages"
1 + # SELECT "ci_builds".* FROM "ci_builds"
1 + # INSERT INTO "ci_builds"
1 + # INSERT INTO "ci_builds_metadata"
1 # SELECT "taggings".* FROM "taggings"
2022-01-26 12:08:38 +05:30
end
end
end
2016-09-13 17:45:13 +05:30
context 'valid params' do
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2017-08-17 22:00:37 +05:30
let(:pipeline_on_previous_commit) do
execute_service(
after: previous_commit_sha_from_ref('master')
2021-10-27 15:23:28 +05:30
).payload
end
it 'responds with success' do
expect(execute_service).to be_success
2016-09-13 17:45:13 +05:30
end
2017-09-10 17:25:29 +05:30
it 'creates a pipeline' do
expect(pipeline).to be_kind_of(Ci::Pipeline)
expect(pipeline).to be_valid
expect(pipeline).to be_persisted
expect(pipeline).to be_push
2019-02-15 15:39:39 +05:30
expect(pipeline).to eq(project.ci_pipelines.last)
2017-09-10 17:25:29 +05:30
expect(pipeline).to have_attributes(user: user)
2021-04-29 21:17:54 +05:30
expect(pipeline).to have_attributes(status: 'created')
2019-07-31 22:56:46 +05:30
expect(pipeline.iid).not_to be_nil
2018-03-17 18:26:18 +05:30
expect(pipeline.repository_source?).to be true
2017-09-10 17:25:29 +05:30
expect(pipeline.builds.first).to be_kind_of(Ci::Build)
2019-12-26 22:10:19 +05:30
expect(pipeline.yaml_errors).not_to be_present
2017-09-10 17:25:29 +05:30
end
it 'increments the prometheus counter' do
2021-04-29 21:17:54 +05:30
counter = spy('pipeline created counter')
allow(Gitlab::Ci::Pipeline::Metrics)
.to receive(:pipelines_created_counter).and_return(counter)
2017-09-10 17:25:29 +05:30
pipeline
2021-04-29 21:17:54 +05:30
expect(counter).to have_received(:increment)
2017-09-10 17:25:29 +05:30
end
2020-06-23 00:09:42 +05:30
it 'records pipeline size in a prometheus histogram' do
histogram = spy('pipeline size histogram')
2020-07-28 23:09:34 +05:30
allow(Gitlab::Ci::Pipeline::Metrics)
2021-04-29 21:17:54 +05:30
.to receive(:pipeline_size_histogram).and_return(histogram)
2020-06-23 00:09:42 +05:30
execute_service
expect(histogram).to have_received(:observe)
2023-01-13 00:05:48 +05:30
.with({ source: 'push', plan: project.actual_plan_name }, 5)
2020-06-23 00:09:42 +05:30
end
2021-03-08 18:12:59 +05:30
it 'tracks included template usage' do
expect_next_instance_of(Gitlab::Ci::Pipeline::Chain::TemplateUsage) do |instance|
expect(instance).to receive(:perform!)
end
execute_service
end
2017-09-10 17:25:29 +05:30
context 'when merge requests already exist for this source branch' do
2022-03-02 08:16:31 +05:30
let!(:merge_request_1) do
2019-02-15 15:39:39 +05:30
create(:merge_request, source_branch: 'feature', target_branch: "master", source_project: project)
2018-03-17 18:26:18 +05:30
end
2020-10-24 23:57:45 +05:30
2022-03-02 08:16:31 +05:30
let!(:merge_request_2) do
2019-02-15 15:39:39 +05:30
create(:merge_request, source_branch: 'feature', target_branch: "v1.1.0", source_project: project)
2018-03-17 18:26:18 +05:30
end
2017-09-10 17:25:29 +05:30
2018-03-17 18:26:18 +05:30
context 'when the head pipeline sha equals merge request sha' do
2019-12-26 22:10:19 +05:30
it 'updates head pipeline of each merge request', :sidekiq_might_not_need_inline do
2021-10-27 15:23:28 +05:30
head_pipeline = execute_service(ref: 'feature', after: nil).payload
2017-09-10 17:25:29 +05:30
2018-03-17 18:26:18 +05:30
expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
end
end
2017-09-10 17:25:29 +05:30
end
2017-08-17 22:00:37 +05:30
context 'auto-cancel enabled' do
before do
2020-11-24 15:15:51 +05:30
project.update!(auto_cancel_pending_pipelines: 'enabled')
2017-08-17 22:00:37 +05:30
end
it 'does not cancel HEAD pipeline' do
pipeline
pipeline_on_previous_commit
2021-04-29 21:17:54 +05:30
expect(pipeline.reload).to have_attributes(status: 'created', auto_canceled_by_id: nil)
2017-08-17 22:00:37 +05:30
end
2019-12-26 22:10:19 +05:30
it 'auto cancel pending non-HEAD pipelines', :sidekiq_might_not_need_inline do
2017-08-17 22:00:37 +05:30
pipeline_on_previous_commit
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: pipeline.id)
end
2021-04-29 21:17:54 +05:30
it 'cancels running outdated pipelines', :sidekiq_inline do
pipeline_on_previous_commit.reload.run
2021-10-27 15:23:28 +05:30
head_pipeline = execute_service.payload
2017-08-17 22:00:37 +05:30
2019-12-04 20:38:33 +05:30
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: head_pipeline.id)
2017-08-17 22:00:37 +05:30
end
2019-12-26 22:10:19 +05:30
it 'cancel created outdated pipelines', :sidekiq_might_not_need_inline do
2020-11-24 15:15:51 +05:30
pipeline_on_previous_commit.update!(status: 'created')
2017-08-17 22:00:37 +05:30
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: pipeline.id)
end
it 'does not cancel pipelines from the other branches' do
2021-04-29 21:17:54 +05:30
new_pipeline = execute_service(
2017-08-17 22:00:37 +05:30
ref: 'refs/heads/feature',
after: previous_commit_sha_from_ref('feature')
2021-10-27 15:23:28 +05:30
).payload
2017-08-17 22:00:37 +05:30
pipeline
2021-04-29 21:17:54 +05:30
expect(new_pipeline.reload).to have_attributes(status: 'created', auto_canceled_by_id: nil)
2017-08-17 22:00:37 +05:30
end
2019-12-04 20:38:33 +05:30
context 'when the interruptible attribute is' do
context 'not defined' do
before do
config = YAML.dump(rspec: { script: 'echo' })
stub_ci_pipeline_yaml_file(config)
end
it 'is cancelable' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2019-12-04 20:38:33 +05:30
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_nil
end
end
context 'set to true' do
before do
config = YAML.dump(rspec: { script: 'echo', interruptible: true })
stub_ci_pipeline_yaml_file(config)
end
it 'is cancelable' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2019-12-04 20:38:33 +05:30
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_truthy
end
end
context 'set to false' do
before do
config = YAML.dump(rspec: { script: 'echo', interruptible: false })
stub_ci_pipeline_yaml_file(config)
end
it 'is not cancelable' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2019-12-04 20:38:33 +05:30
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_falsy
end
end
end
context 'interruptible builds' do
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
let(:config) do
{
stages: %w[stage1 stage2 stage3 stage4],
build_1_1: {
stage: 'stage1',
script: 'echo',
interruptible: true
},
build_1_2: {
stage: 'stage1',
script: 'echo',
interruptible: true
},
build_2_1: {
stage: 'stage2',
script: 'echo',
when: 'delayed',
start_in: '10 minutes',
interruptible: true
},
build_3_1: {
stage: 'stage3',
script: 'echo',
interruptible: false
},
build_4_1: {
stage: 'stage4',
script: 'echo'
}
}
end
it 'properly configures interruptible status' do
interruptible_status =
pipeline_on_previous_commit
.builds
.joins(:metadata)
2022-11-25 23:54:43 +05:30
.pluck(:name, "#{Ci::BuildMetadata.quoted_table_name}.interruptible")
2019-12-04 20:38:33 +05:30
expect(interruptible_status).to contain_exactly(
['build_1_1', true],
['build_1_2', true],
['build_2_1', true],
['build_3_1', false],
['build_4_1', nil]
)
end
context 'when only interruptible builds are running' do
context 'when build marked explicitly by interruptible is running' do
2019-12-26 22:10:19 +05:30
it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
2019-12-04 20:38:33 +05:30
pipeline_on_previous_commit
.builds
.find_by_name('build_1_2')
.run!
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(
status: 'canceled', auto_canceled_by_id: pipeline.id)
end
end
context 'when build that is not marked as interruptible is running' do
2019-12-26 22:10:19 +05:30
it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
2020-03-13 15:44:24 +05:30
build_2_1 = pipeline_on_previous_commit
.builds.find_by_name('build_2_1')
build_2_1.enqueue!
build_2_1.reset.run!
2019-12-04 20:38:33 +05:30
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(
status: 'canceled', auto_canceled_by_id: pipeline.id)
end
end
end
context 'when an uninterruptible build is running' do
2020-03-13 15:44:24 +05:30
it 'does not cancel running outdated pipelines', :sidekiq_inline do
build_3_1 = pipeline_on_previous_commit
.builds.find_by_name('build_3_1')
build_3_1.enqueue!
build_3_1.reset.run!
2019-12-04 20:38:33 +05:30
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(
status: 'running', auto_canceled_by_id: nil)
end
end
context 'when an build is waiting on an interruptible scheduled task' do
2019-12-26 22:10:19 +05:30
it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
2019-12-04 20:38:33 +05:30
allow(Ci::BuildScheduleWorker).to receive(:perform_at)
pipeline_on_previous_commit
.builds
.find_by_name('build_2_1')
.schedule!
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(
status: 'canceled', auto_canceled_by_id: pipeline.id)
end
end
context 'when a uninterruptible build has finished' do
2019-12-26 22:10:19 +05:30
it 'does not cancel running outdated pipelines', :sidekiq_might_not_need_inline do
2019-12-04 20:38:33 +05:30
pipeline_on_previous_commit
.builds
.find_by_name('build_3_1')
.success!
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(
status: 'running', auto_canceled_by_id: nil)
end
end
end
2017-08-17 22:00:37 +05:30
end
context 'auto-cancel disabled' do
before do
2020-11-24 15:15:51 +05:30
project.update!(auto_cancel_pending_pipelines: 'disabled')
2017-08-17 22:00:37 +05:30
end
2021-04-29 21:17:54 +05:30
it 'does not auto cancel created non-HEAD pipelines' do
2017-08-17 22:00:37 +05:30
pipeline_on_previous_commit
pipeline
expect(pipeline_on_previous_commit.reload)
2021-04-29 21:17:54 +05:30
.to have_attributes(status: 'created', auto_canceled_by_id: nil)
2017-08-17 22:00:37 +05:30
end
end
def previous_commit_sha_from_ref(ref)
project.commit(ref).parent.sha
end
2016-09-13 17:45:13 +05:30
end
context "skip tag if there is no build for it" do
it "creates commit if there is appropriate job" do
2021-10-27 15:23:28 +05:30
expect(execute_service.payload).to be_persisted
2016-09-13 17:45:13 +05:30
end
it "creates commit if there is no appropriate job but deploy job has right ref setting" do
config = YAML.dump({ deploy: { script: "ls", only: ["master"] } })
stub_ci_pipeline_yaml_file(config)
2021-10-27 15:23:28 +05:30
expect(execute_service.payload).to be_persisted
2016-09-13 17:45:13 +05:30
end
end
2021-10-27 15:23:28 +05:30
it 'skips creating pipeline for refs without .gitlab-ci.yml', :aggregate_failures do
2016-09-13 17:45:13 +05:30
stub_ci_pipeline_yaml_file(nil)
2021-10-27 15:23:28 +05:30
response = execute_service
expect(response).to be_error
expect(response.message).to eq('Missing CI config file')
expect(response.payload).not_to be_persisted
2016-09-13 17:45:13 +05:30
expect(Ci::Pipeline.count).to eq(0)
2022-11-25 23:54:43 +05:30
expect(Onboarding::PipelineCreatedWorker).not_to receive(:perform_async)
2016-09-13 17:45:13 +05:30
end
2017-08-17 22:00:37 +05:30
shared_examples 'a failed pipeline' do
it 'creates failed pipeline' do
stub_ci_pipeline_yaml_file(ci_yaml)
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2017-08-17 22:00:37 +05:30
expect(pipeline).to be_persisted
expect(pipeline.builds.any?).to be false
expect(pipeline.status).to eq('failed')
expect(pipeline.yaml_errors).not_to be_nil
end
end
2019-12-26 22:10:19 +05:30
context 'config evaluation' do
context 'when config is in a file in repository' do
before do
content = YAML.dump(rspec: { script: 'echo' })
stub_ci_pipeline_yaml_file(content)
end
it 'pull it from the repository' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2019-12-26 22:10:19 +05:30
expect(pipeline).to be_repository_source
expect(pipeline.builds.map(&:name)).to eq ['rspec']
end
end
context 'when config is from Auto-DevOps' do
before do
stub_ci_pipeline_yaml_file(nil)
allow_any_instance_of(Project).to receive(:auto_devops_enabled?).and_return(true)
2020-03-13 15:44:24 +05:30
create(:project_auto_devops, project: project)
2019-12-26 22:10:19 +05:30
end
it 'pull it from Auto-DevOps' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2019-12-26 22:10:19 +05:30
expect(pipeline).to be_auto_devops_source
2022-10-11 01:57:18 +05:30
expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality container_scanning secret_detection semgrep-sast test])
2019-12-26 22:10:19 +05:30
end
end
context 'when config is not found' do
before do
stub_ci_pipeline_yaml_file(nil)
end
2021-10-27 15:23:28 +05:30
it 'responds with error message', :aggregate_failures do
response = execute_service
2019-12-26 22:10:19 +05:30
2021-10-27 15:23:28 +05:30
expect(response).to be_error
expect(response.message).to eq('Missing CI config file')
expect(response.payload).not_to be_persisted
2019-12-26 22:10:19 +05:30
end
end
context 'when an unexpected error is raised' do
before do
expect(Gitlab::Ci::YamlProcessor).to receive(:new)
.and_raise(RuntimeError, 'undefined failure')
end
it 'saves error in pipeline' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2019-12-26 22:10:19 +05:30
expect(pipeline.yaml_errors).to include('Undefined error')
end
it 'logs error' do
2020-01-01 13:55:28 +05:30
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
2019-12-26 22:10:19 +05:30
execute_service
end
end
end
2017-08-17 22:00:37 +05:30
context 'when yaml is invalid' do
let(:ci_yaml) { 'invalid: file: fiile' }
let(:message) { 'Message' }
it_behaves_like 'a failed pipeline'
2021-04-29 21:17:54 +05:30
it 'increments the error metric' do
stub_ci_pipeline_yaml_file(ci_yaml)
counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
expect { execute_service }.to change { counter.get(reason: 'config_error') }.by(1)
end
2017-08-17 22:00:37 +05:30
context 'when receive git commit' do
before do
allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message }
end
it_behaves_like 'a failed pipeline'
end
2019-07-07 11:18:12 +05:30
context 'when config has ports' do
context 'in the main image' do
let(:ci_yaml) do
<<-EOS
image:
2022-06-21 17:19:12 +05:30
name: image:1.0
2019-07-07 11:18:12 +05:30
ports:
- 80
EOS
end
it_behaves_like 'a failed pipeline'
end
context 'in the job image' do
let(:ci_yaml) do
<<-EOS
2022-06-21 17:19:12 +05:30
image: image:1.0
2019-07-07 11:18:12 +05:30
test:
script: rspec
image:
2022-06-21 17:19:12 +05:30
name: image:1.0
2019-07-07 11:18:12 +05:30
ports:
- 80
EOS
end
it_behaves_like 'a failed pipeline'
end
context 'in the service' do
let(:ci_yaml) do
<<-EOS
2022-06-21 17:19:12 +05:30
image: image:1.0
2019-07-07 11:18:12 +05:30
test:
script: rspec
2022-06-21 17:19:12 +05:30
image: image:1.0
2019-07-07 11:18:12 +05:30
services:
- name: test
ports:
- 80
EOS
end
it_behaves_like 'a failed pipeline'
end
end
2016-09-13 17:45:13 +05:30
end
2019-12-26 22:10:19 +05:30
context 'when an unexpected error is raised' do
before do
expect(Gitlab::Ci::YamlProcessor).to receive(:new)
.and_raise(RuntimeError, 'undefined failure')
end
it 'saves error in pipeline' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2019-12-26 22:10:19 +05:30
expect(pipeline.yaml_errors).to include('Undefined error')
end
it 'logs error' do
2020-01-01 13:55:28 +05:30
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
2019-12-26 22:10:19 +05:30
execute_service
end
end
2016-09-13 17:45:13 +05:30
context 'when commit contains a [ci skip] directive' do
2021-10-27 15:23:28 +05:30
shared_examples 'creating a pipeline' do
it 'does not skip pipeline creation' do
pipeline = execute_service.payload
2016-09-13 17:45:13 +05:30
2021-10-27 15:23:28 +05:30
expect(pipeline).to be_persisted
expect(pipeline.builds.first.name).to eq("rspec")
end
2016-09-13 17:45:13 +05:30
end
2021-10-27 15:23:28 +05:30
shared_examples 'skipping a pipeline' do
it 'skips pipeline creation' do
pipeline = execute_service.payload
2016-09-13 17:45:13 +05:30
2017-08-17 22:00:37 +05:30
expect(pipeline).to be_persisted
expect(pipeline.builds.any?).to be false
expect(pipeline.status).to eq("skipped")
end
2016-09-13 17:45:13 +05:30
end
2021-10-27 15:23:28 +05:30
before do
allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { commit_message }
end
skip_commit_messages = [
"some message[ci skip]",
"some message[skip ci]",
"some message[CI SKIP]",
"some message[SKIP CI]",
"some message[ci_skip]",
"some message[skip_ci]",
"some message[ci-skip]",
"some message[skip-ci]"
]
2016-09-13 17:45:13 +05:30
2021-10-27 15:23:28 +05:30
skip_commit_messages.each do |skip_commit_message|
context "when the commit message is #{skip_commit_message}" do
let(:commit_message) { skip_commit_message }
2016-09-13 17:45:13 +05:30
2021-10-27 15:23:28 +05:30
it_behaves_like 'skipping a pipeline'
2017-08-17 22:00:37 +05:30
end
2016-09-13 17:45:13 +05:30
end
2017-08-17 22:00:37 +05:30
context 'when commit message does not contain [ci skip] nor [skip ci]' do
let(:commit_message) { 'some message' }
2016-09-13 17:45:13 +05:30
2017-08-17 22:00:37 +05:30
it_behaves_like 'creating a pipeline'
2016-09-13 17:45:13 +05:30
end
2017-08-17 22:00:37 +05:30
context 'when commit message is nil' do
let(:commit_message) { nil }
2016-09-13 17:45:13 +05:30
2017-08-17 22:00:37 +05:30
it_behaves_like 'creating a pipeline'
2016-09-13 17:45:13 +05:30
end
2017-08-17 22:00:37 +05:30
context 'when there is [ci skip] tag in commit message and yaml is invalid' do
2021-10-27 15:23:28 +05:30
let(:commit_message) { 'some message [ci skip]' }
2017-08-17 22:00:37 +05:30
let(:ci_yaml) { 'invalid: file: fiile' }
2016-09-13 17:45:13 +05:30
2021-10-27 15:23:28 +05:30
before do
stub_ci_pipeline_yaml_file(ci_yaml)
end
it_behaves_like 'skipping a pipeline'
2016-09-13 17:45:13 +05:30
end
end
2019-02-15 15:39:39 +05:30
context 'when push options contain ci.skip' do
let(:push_options) do
2019-07-07 11:18:12 +05:30
{ 'ci' => { 'skip' => true } }
2019-02-15 15:39:39 +05:30
end
it 'creates a pipline in the skipped state' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service(push_options: push_options).payload
2019-02-15 15:39:39 +05:30
# TODO: DRY these up with "skips builds creation if the commit message"
expect(pipeline).to be_persisted
expect(pipeline.builds.any?).to be false
expect(pipeline.status).to eq("skipped")
end
end
2016-09-13 17:45:13 +05:30
context 'when there are no jobs for this pipeline' do
before do
config = YAML.dump({ test: { script: 'ls', only: ['feature'] } })
stub_ci_pipeline_yaml_file(config)
end
2021-10-27 15:23:28 +05:30
it 'does not create a new pipeline', :aggregate_failures do
2017-08-17 22:00:37 +05:30
result = execute_service
2016-09-13 17:45:13 +05:30
2021-10-27 15:23:28 +05:30
expect(result).to be_error
2023-03-17 16:20:25 +05:30
expect(result.message).to eq('Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.')
2021-10-27 15:23:28 +05:30
expect(result.payload).not_to be_persisted
2016-09-13 17:45:13 +05:30
expect(Ci::Build.all).to be_empty
expect(Ci::Pipeline.count).to eq(0)
end
2019-07-31 22:56:46 +05:30
describe '#iid' do
let(:internal_id) do
InternalId.find_by(project_id: project.id, usage: :ci_pipelines)
end
before do
expect_any_instance_of(Ci::Pipeline).to receive(:ensure_project_iid!)
.and_call_original
end
2021-10-27 15:23:28 +05:30
it 'rewinds iid', :aggregate_failures do
2021-01-03 14:25:43 +05:30
result = execute_service
2019-07-31 22:56:46 +05:30
2021-10-27 15:23:28 +05:30
expect(result).to be_error
expect(result.payload).not_to be_persisted
2021-01-03 14:25:43 +05:30
expect(internal_id.last_value).to eq(0)
2019-07-31 22:56:46 +05:30
end
end
2016-09-13 17:45:13 +05:30
end
2023-03-04 22:38:38 +05:30
context 'when the configuration includes ID tokens' do
it 'creates variables for the ID tokens' do
config = YAML.dump({
job_with_id_tokens: {
script: 'ls',
id_tokens: {
'TEST_ID_TOKEN' => {
aud: 'https://gitlab.com'
}
}
}
})
stub_ci_pipeline_yaml_file(config)
result = execute_service.payload
expect(result).to be_persisted
expect(result.builds.first.id_tokens).to eq({
'TEST_ID_TOKEN' => { 'aud' => 'https://gitlab.com' }
})
end
end
2016-09-13 17:45:13 +05:30
context 'with manual actions' do
before do
config = YAML.dump({ deploy: { script: 'ls', when: 'manual' } })
stub_ci_pipeline_yaml_file(config)
end
2021-04-29 21:17:54 +05:30
it 'does not create a new pipeline', :sidekiq_inline do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2016-09-13 17:45:13 +05:30
expect(result).to be_persisted
expect(result.manual_actions).not_to be_empty
end
end
2017-08-17 22:00:37 +05:30
context 'with environment' do
before do
2018-12-13 13:39:08 +05:30
config = YAML.dump(
deploy: {
environment: { name: "review/$CI_COMMIT_REF_NAME" },
script: 'ls',
tags: ['hello']
})
2017-08-17 22:00:37 +05:30
stub_ci_pipeline_yaml_file(config)
end
2023-03-17 16:20:25 +05:30
it 'creates the environment with tags', :sidekiq_inline do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2017-08-17 22:00:37 +05:30
expect(result).to be_persisted
2018-11-08 19:23:39 +05:30
expect(Environment.find_by(name: "review/master")).to be_present
2018-12-13 13:39:08 +05:30
expect(result.builds.first.tag_list).to contain_exactly('hello')
expect(result.builds.first.deployment).to be_persisted
expect(result.builds.first.deployment.deployable).to be_a(Ci::Build)
2018-11-08 19:23:39 +05:30
end
end
2020-01-01 13:55:28 +05:30
context 'with environment with auto_stop_in' do
before do
config = YAML.dump(
deploy: {
environment: { name: "review/$CI_COMMIT_REF_NAME", auto_stop_in: '1 day' },
script: 'ls'
})
stub_ci_pipeline_yaml_file(config)
end
it 'creates the environment with auto stop in' do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2020-01-01 13:55:28 +05:30
expect(result).to be_persisted
expect(result.builds.first.options[:environment][:auto_stop_in]).to eq('1 day')
end
end
2018-11-08 19:23:39 +05:30
context 'with environment name including persisted variables' do
before do
config = YAML.dump(
deploy: {
2023-06-20 00:43:36 +05:30
environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_JOB_ID" },
2018-11-08 19:23:39 +05:30
script: 'ls'
}
)
stub_ci_pipeline_yaml_file(config)
end
2023-06-20 00:43:36 +05:30
it 'skips persisted variables in environment name' do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2018-11-08 19:23:39 +05:30
expect(result).to be_persisted
expect(Environment.find_by(name: "review/id1/id2")).to be_present
2017-08-17 22:00:37 +05:30
end
end
2017-09-10 17:25:29 +05:30
2023-06-20 00:43:36 +05:30
context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
before do
stub_feature_flags(ci_remove_legacy_predefined_variables: false)
end
context 'with environment name including persisted variables' do
before do
config = YAML.dump(
deploy: {
environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_BUILD_ID" },
script: 'ls'
}
)
stub_ci_pipeline_yaml_file(config)
end
it 'skips persisted variables in environment name' do
result = execute_service.payload
expect(result).to be_persisted
expect(Environment.find_by(name: "review/id1/id2")).to be_present
end
end
end
2020-01-01 13:55:28 +05:30
context 'environment with Kubernetes configuration' do
let(:kubernetes_namespace) { 'custom-namespace' }
before do
config = YAML.dump(
deploy: {
environment: {
name: "environment-name",
kubernetes: { namespace: kubernetes_namespace }
},
script: 'ls'
}
)
stub_ci_pipeline_yaml_file(config)
end
it 'stores the requested namespace' do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2020-01-01 13:55:28 +05:30
build = result.builds.first
expect(result).to be_persisted
expect(build.options.dig(:environment, :kubernetes, :namespace)).to eq(kubernetes_namespace)
end
end
2017-09-10 17:25:29 +05:30
context 'when environment with invalid name' do
before do
config = YAML.dump(deploy: { environment: { name: 'name,with,commas' }, script: 'ls' })
stub_ci_pipeline_yaml_file(config)
end
it 'does not create an environment' do
expect do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2017-09-10 17:25:29 +05:30
expect(result).to be_persisted
end.not_to change { Environment.count }
end
end
2019-12-21 20:55:43 +05:30
context 'when environment with duplicate names' do
let(:ci_yaml) do
{
deploy: { environment: { name: 'production' }, script: 'ls' },
deploy_2: { environment: { name: 'production' }, script: 'ls' }
}
end
before do
stub_ci_pipeline_yaml_file(YAML.dump(ci_yaml))
end
2023-03-17 16:20:25 +05:30
it 'creates a pipeline with the environment', :sidekiq_inline do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2019-12-21 20:55:43 +05:30
expect(result).to be_persisted
expect(Environment.find_by(name: 'production')).to be_present
expect(result.builds.first.deployment).to be_persisted
expect(result.builds.first.deployment.deployable).to be_a(Ci::Build)
end
end
2017-09-10 17:25:29 +05:30
context 'when builds with auto-retries are configured' do
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2019-12-04 20:38:33 +05:30
let(:rspec_job) { pipeline.builds.find_by(name: 'rspec') }
before do
stub_ci_pipeline_yaml_file(YAML.dump({
rspec: { script: 'rspec', retry: retry_value }
}))
2020-10-24 23:57:45 +05:30
rspec_job.update!(options: { retry: retry_value })
2019-12-04 20:38:33 +05:30
end
2018-12-13 13:39:08 +05:30
context 'as an integer' do
2019-12-04 20:38:33 +05:30
let(:retry_value) { 2 }
2018-12-13 13:39:08 +05:30
it 'correctly creates builds with auto-retry value configured' do
expect(pipeline).to be_persisted
end
2017-09-10 17:25:29 +05:30
end
2018-12-13 13:39:08 +05:30
context 'as hash' do
2019-12-04 20:38:33 +05:30
let(:retry_value) { { max: 2, when: 'runner_system_failure' } }
it 'correctly creates builds with auto-retry value configured' do
expect(pipeline).to be_persisted
end
end
end
2020-03-13 15:44:24 +05:30
context 'with resource group' do
context 'when resource group is defined' do
before do
config = YAML.dump(
test: { stage: 'test', script: 'ls', resource_group: resource_group_key }
)
stub_ci_pipeline_yaml_file(config)
end
let(:resource_group_key) { 'iOS' }
it 'persists the association correctly' do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2020-03-13 15:44:24 +05:30
deploy_job = result.builds.find_by_name!(:test)
resource_group = project.resource_groups.find_by_key!(resource_group_key)
expect(result).to be_persisted
expect(deploy_job.resource_group.key).to eq(resource_group_key)
expect(project.resource_groups.count).to eq(1)
2021-03-11 19:13:27 +05:30
expect(resource_group.processables.count).to eq(1)
2020-03-13 15:44:24 +05:30
expect(resource_group.resources.count).to eq(1)
2021-03-11 19:13:27 +05:30
expect(resource_group.resources.first.processable).to eq(nil)
2020-03-13 15:44:24 +05:30
end
context 'when resource group key includes predefined variables' do
let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' }
it 'interpolates the variables into the key correctly' do
2021-10-27 15:23:28 +05:30
result = execute_service.payload
2020-03-13 15:44:24 +05:30
expect(result).to be_persisted
expect(project.resource_groups.exists?(key: 'master-test')).to eq(true)
end
end
end
end
2021-11-11 11:23:49 +05:30
context 'when resource group is defined for review app deployment' do
before do
config = YAML.dump(
review_app: {
stage: 'test',
script: 'deploy',
environment: {
name: 'review/$CI_COMMIT_REF_SLUG',
on_stop: 'stop_review_app'
},
resource_group: '$CI_ENVIRONMENT_NAME'
},
stop_review_app: {
stage: 'test',
script: 'stop',
when: 'manual',
environment: {
name: 'review/$CI_COMMIT_REF_SLUG',
action: 'stop'
},
resource_group: '$CI_ENVIRONMENT_NAME'
}
)
stub_ci_pipeline_yaml_file(config)
end
it 'persists the association correctly' do
result = execute_service.payload
deploy_job = result.builds.find_by_name!(:review_app)
stop_job = result.builds.find_by_name!(:stop_review_app)
expect(result).to be_persisted
expect(deploy_job.resource_group.key).to eq('review/master')
expect(stop_job.resource_group.key).to eq('review/master')
expect(project.resource_groups.count).to eq(1)
end
it 'initializes scoped variables only once for each build' do
# Bypassing `stub_build` hack because it distrubs the expectations below.
allow_next_instances_of(Gitlab::Ci::Build::Context::Build, 2) do |build_context|
allow(build_context).to receive(:variables) { Gitlab::Ci::Variables::Collection.new }
end
expect_next_instances_of(::Ci::Build, 2) do |ci_build|
expect(ci_build).to receive(:scoped_variables).once.and_call_original
end
expect(execute_service.payload).to be_created_successfully
end
end
2019-12-04 20:38:33 +05:30
context 'with timeout' do
context 'when builds with custom timeouts are configured' do
2018-12-13 13:39:08 +05:30
before do
2019-12-04 20:38:33 +05:30
config = YAML.dump(rspec: { script: 'rspec', timeout: '2m 3s' })
2018-12-13 13:39:08 +05:30
stub_ci_pipeline_yaml_file(config)
end
2019-12-04 20:38:33 +05:30
it 'correctly creates builds with custom timeout value configured' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2017-09-10 17:25:29 +05:30
2018-12-13 13:39:08 +05:30
expect(pipeline).to be_persisted
2019-12-04 20:38:33 +05:30
expect(pipeline.builds.find_by(name: 'rspec').options[:job_timeout]).to eq 123
2018-12-13 13:39:08 +05:30
end
2017-09-10 17:25:29 +05:30
end
end
2020-03-13 15:44:24 +05:30
context 'with release' do
shared_examples_for 'a successful release pipeline' do
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
it 'is valid config' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service.payload
2020-03-13 15:44:24 +05:30
build = pipeline.builds.first
expect(pipeline).to be_kind_of(Ci::Pipeline)
expect(pipeline).to be_valid
expect(pipeline.yaml_errors).not_to be_present
expect(pipeline).to be_persisted
expect(build).to be_kind_of(Ci::Build)
2021-09-30 23:02:18 +05:30
expect(build.options).to eq(config[:release].except(:stage, :only))
2020-04-08 14:13:33 +05:30
expect(build).to be_persisted
2020-03-13 15:44:24 +05:30
end
end
context 'simple example' do
it_behaves_like 'a successful release pipeline' do
let(:config) do
{
release: {
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
description: "./release_changelog.txt"
}
}
}
end
end
end
context 'example with all release metadata' do
it_behaves_like 'a successful release pipeline' do
let(:config) do
{
release: {
script: ["make changelog | tee release_changelog.txt"],
release: {
name: "Release $CI_TAG_NAME",
tag_name: "v0.06",
description: "./release_changelog.txt",
assets: {
links: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
}
}
}
}
end
end
end
end
2017-09-10 17:25:29 +05:30
shared_examples 'when ref is protected' do
let(:user) { create(:user) }
context 'when user is developer' do
before do
project.add_developer(user)
end
2021-10-27 15:23:28 +05:30
it 'does not create a pipeline', :aggregate_failures do
expect(execute_service.payload).not_to be_persisted
2017-09-10 17:25:29 +05:30
expect(Ci::Pipeline.count).to eq(0)
end
end
2018-11-18 11:00:15 +05:30
context 'when user is maintainer' do
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2018-03-17 18:26:18 +05:30
2017-09-10 17:25:29 +05:30
before do
2018-11-18 11:00:15 +05:30
project.add_maintainer(user)
2017-09-10 17:25:29 +05:30
end
2018-03-17 18:26:18 +05:30
it 'creates a protected pipeline' do
expect(pipeline).to be_persisted
expect(pipeline).to be_protected
2017-09-10 17:25:29 +05:30
expect(Ci::Pipeline.count).to eq(1)
end
end
context 'when trigger belongs to no one' do
let(:user) {}
let(:trigger_request) { create(:ci_trigger_request) }
2021-10-27 15:23:28 +05:30
it 'does not create a pipeline', :aggregate_failures do
response = execute_service(trigger_request: trigger_request)
expect(response).to be_error
expect(response.payload).not_to be_persisted
2017-09-10 17:25:29 +05:30
expect(Ci::Pipeline.count).to eq(0)
end
end
context 'when trigger belongs to a developer' do
let(:user) { create(:user) }
let(:trigger) { create(:ci_trigger, owner: user) }
let(:trigger_request) { create(:ci_trigger_request, trigger: trigger) }
before do
project.add_developer(user)
end
2021-10-27 15:23:28 +05:30
it 'does not create a pipeline', :aggregate_failures do
response = execute_service(trigger_request: trigger_request)
expect(response).to be_error
expect(response.payload).not_to be_persisted
2017-09-10 17:25:29 +05:30
expect(Ci::Pipeline.count).to eq(0)
end
end
2018-11-18 11:00:15 +05:30
context 'when trigger belongs to a maintainer' do
2017-09-10 17:25:29 +05:30
let(:user) { create(:user) }
let(:trigger) { create(:ci_trigger, owner: user) }
let(:trigger_request) { create(:ci_trigger_request, trigger: trigger) }
before do
2018-11-18 11:00:15 +05:30
project.add_maintainer(user)
2017-09-10 17:25:29 +05:30
end
it 'creates a pipeline' do
2021-10-27 15:23:28 +05:30
expect(execute_service(trigger_request: trigger_request).payload)
2017-09-10 17:25:29 +05:30
.to be_persisted
expect(Ci::Pipeline.count).to eq(1)
end
end
end
context 'when ref is a protected branch' do
before do
create(:protected_branch, project: project, name: 'master')
end
it_behaves_like 'when ref is protected'
end
context 'when ref is a protected tag' do
let(:ref_name) { 'refs/tags/v1.0.0' }
before do
create(:protected_tag, project: project, name: '*')
end
it_behaves_like 'when ref is protected'
end
2018-03-17 18:26:18 +05:30
context 'when pipeline is running for a tag' do
2017-09-10 17:25:29 +05:30
before do
2023-07-09 08:55:56 +05:30
config = YAML.dump(
test: { script: 'test', only: ['branches'] },
deploy: { script: 'deploy', only: ['tags'] }
)
2017-09-10 17:25:29 +05:30
2018-03-17 18:26:18 +05:30
stub_ci_pipeline_yaml_file(config)
2017-09-10 17:25:29 +05:30
end
2018-03-17 18:26:18 +05:30
it 'creates a tagged pipeline' do
2021-10-27 15:23:28 +05:30
pipeline = execute_service(ref: 'v1.0.0').payload
2017-09-10 17:25:29 +05:30
2018-03-17 18:26:18 +05:30
expect(pipeline.tag?).to be true
2017-09-10 17:25:29 +05:30
end
end
2018-10-15 14:42:47 +05:30
2021-08-04 16:29:09 +05:30
context 'when pipeline is running for a nonexistant-branch' do
let(:gitlab_ci_yaml) { YAML.dump(test: { script: 'test' }) }
let(:ref_name) { 'refs/heads/nonexistant-branch' }
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2021-08-04 16:29:09 +05:30
it 'does not create the pipeline' do
expect(pipeline).not_to be_created_successfully
expect(pipeline.errors[:base]).to eq(['Reference not found'])
end
context 'when there is a tag with that nonexistant-branch' do
# v1.0.0 is on the test repo as a tag
let(:ref_name) { 'refs/heads/v1.0.0' }
it 'does not create the pipeline' do
expect(pipeline).not_to be_created_successfully
expect(pipeline.errors[:base]).to eq(['Reference not found'])
end
end
end
context 'when pipeline is running for a branch with the name of both a branch and a tag' do
let(:gitlab_ci_yaml) { YAML.dump(test: { script: 'test' }) }
# v1.1.0 is on the test repo as branch and tag
let(:ref_name) { 'refs/heads/v1.1.0' }
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2021-08-04 16:29:09 +05:30
it 'creates the pipeline for the branch' do
expect(pipeline).to be_created_successfully
expect(pipeline.branch?).to be true
expect(pipeline.tag?).to be false
end
end
context 'when pipeline is running for a tag with the name of both a branch and a tag' do
let(:gitlab_ci_yaml) { YAML.dump(test: { script: 'test' }) }
# v1.1.0 is on the test repo as branch and tag
let(:ref_name) { 'refs/tags/v1.1.0' }
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2021-08-04 16:29:09 +05:30
it 'creates the pipeline for the tag' do
expect(pipeline).to be_created_successfully
expect(pipeline.branch?).to be false
expect(pipeline.tag?).to be true
end
end
context 'when pipeline is running for an ambiguous ref' do
let(:gitlab_ci_yaml) { YAML.dump(test: { script: 'test' }) }
# v1.1.0 is on the test repo as branch and tag
let(:ref_name) { 'v1.1.0' }
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2021-08-04 16:29:09 +05:30
it 'does not create the pipeline' do
expect(pipeline).not_to be_created_successfully
expect(pipeline.errors[:base]).to eq(['Ref is ambiguous'])
end
end
2018-10-15 14:42:47 +05:30
context 'when pipeline variables are specified' do
2021-11-11 11:23:49 +05:30
subject(:pipeline) { execute_service(variables_attributes: variables_attributes).payload }
context 'with valid pipeline variables' do
let(:variables_attributes) do
[{ key: 'first', secret_value: 'world' },
{ key: 'second', secret_value: 'second_world' }]
end
it 'creates a pipeline with specified variables' do
expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
.to eq variables_attributes.map(&:with_indifferent_access)
end
2018-10-15 14:42:47 +05:30
end
2021-11-11 11:23:49 +05:30
context 'with duplicate pipeline variables' do
let(:variables_attributes) do
[{ key: 'hello', secret_value: 'world' },
{ key: 'hello', secret_value: 'second_world' }]
end
2018-10-15 14:42:47 +05:30
2021-11-11 11:23:49 +05:30
it 'fails to create the pipeline' do
expect(pipeline).to be_failed
expect(pipeline.variables).to be_empty
expect(pipeline.errors[:base]).to eq(['Duplicate variable name: hello'])
end
end
context 'with more than one duplicate pipeline variable' do
let(:variables_attributes) do
[{ key: 'hello', secret_value: 'world' },
{ key: 'hello', secret_value: 'second_world' },
{ key: 'single', secret_value: 'variable' },
{ key: 'other', secret_value: 'value' },
{ key: 'other', secret_value: 'other value' }]
end
it 'fails to create the pipeline' do
expect(pipeline).to be_failed
expect(pipeline.variables).to be_empty
expect(pipeline.errors[:base]).to eq(['Duplicate variable names: hello, other'])
end
2018-10-15 14:42:47 +05:30
end
end
2018-12-13 13:39:08 +05:30
context 'when pipeline has a job with environment' do
2021-10-27 15:23:28 +05:30
let(:pipeline) { execute_service.payload }
2018-12-13 13:39:08 +05:30
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
context 'when environment name is valid' do
let(:config) do
{
review_app: {
script: 'deploy',
environment: {
name: 'review/${CI_COMMIT_REF_NAME}',
url: 'http://${CI_COMMIT_REF_SLUG}-staging.example.com'
}
}
}
end
2023-03-17 16:20:25 +05:30
it 'has a job with environment', :sidekiq_inline do
2018-12-13 13:39:08 +05:30
expect(pipeline.builds.count).to eq(1)
expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
2023-03-17 16:20:25 +05:30
expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
2018-12-13 13:39:08 +05:30
expect(pipeline.builds.first.deployment).to be_created
end
end
context 'when environment name is invalid' do
let(:config) do
{
'job:deploy-to-test-site': {
script: 'deploy',
environment: {
name: '${CI_JOB_NAME}',
url: 'https://$APP_URL'
}
}
}
end
it 'has a job without environment' do
expect(pipeline.builds.count).to eq(1)
expect(pipeline.builds.first.persisted_environment).to be_nil
expect(pipeline.builds.first.deployment).to be_nil
end
end
end
2019-02-15 15:39:39 +05:30
2019-12-04 20:38:33 +05:30
describe 'Pipeline for external pull requests' do
2021-10-27 15:23:28 +05:30
let(:response) do
2023-07-09 08:55:56 +05:30
execute_service(
source: source,
external_pull_request: pull_request,
ref: ref_name,
source_sha: source_sha,
target_sha: target_sha
)
2019-12-04 20:38:33 +05:30
end
2021-10-27 15:23:28 +05:30
let(:pipeline) { response.payload }
2019-12-04 20:38:33 +05:30
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
let(:ref_name) { 'refs/heads/feature' }
let(:source_sha) { project.commit(ref_name).id }
let(:target_sha) { nil }
context 'when source is external pull request' do
let(:source) { :external_pull_request_event }
context 'when config has external_pull_requests keywords' do
let(:config) do
{
build: {
stage: 'build',
script: 'echo'
},
test: {
stage: 'test',
script: 'echo',
only: ['external_pull_requests']
},
pages: {
stage: 'deploy',
script: 'echo',
except: ['external_pull_requests']
}
}
end
context 'when external pull request is specified' do
let(:pull_request) { create(:external_pull_request, project: project, source_branch: 'feature', target_branch: 'master') }
let(:ref_name) { pull_request.source_ref }
it 'creates an external pull request pipeline' do
expect(pipeline).to be_persisted
expect(pipeline).to be_external_pull_request_event
expect(pipeline.external_pull_request).to eq(pull_request)
expect(pipeline.source_sha).to eq(source_sha)
expect(pipeline.builds.order(:stage_id)
.map(&:name))
.to eq(%w[build test])
end
context 'when ref is tag' do
let(:ref_name) { 'refs/tags/v1.1.0' }
2021-10-27 15:23:28 +05:30
it 'does not create an extrnal pull request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('Tag is not included in the list and Failed to build the pipeline!')
2019-12-04 20:38:33 +05:30
expect(pipeline).not_to be_persisted
2021-10-27 15:23:28 +05:30
expect(pipeline.errors[:tag]).to eq(['is not included in the list'])
2019-12-04 20:38:33 +05:30
end
end
context 'when pull request is created from fork' do
it 'does not create an external pull request pipeline'
end
context "when there are no matched jobs" do
let(:config) do
{
test: {
stage: 'test',
script: 'echo',
except: ['external_pull_requests']
}
}
end
2021-10-27 15:23:28 +05:30
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
2023-03-17 16:20:25 +05:30
expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.')
2019-12-04 20:38:33 +05:30
expect(pipeline).not_to be_persisted
2023-03-17 16:20:25 +05:30
expect(pipeline.errors[:base]).to eq(['Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.'])
2019-12-04 20:38:33 +05:30
end
end
end
context 'when external pull request is not specified' do
let(:pull_request) { nil }
2021-10-27 15:23:28 +05:30
it 'does not create an external pull request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq("External pull request can't be blank and Failed to build the pipeline!")
2019-12-04 20:38:33 +05:30
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:external_pull_request]).to eq(["can't be blank"])
end
end
end
context "when config does not have external_pull_requests keywords" do
let(:config) do
{
build: {
stage: 'build',
script: 'echo'
},
test: {
stage: 'test',
script: 'echo'
},
pages: {
stage: 'deploy',
script: 'echo'
}
}
end
context 'when external pull request is specified' do
let(:pull_request) do
create(:external_pull_request,
project: project,
source_branch: Gitlab::Git.ref_name(ref_name),
target_branch: 'master')
end
it 'creates an external pull request pipeline' do
expect(pipeline).to be_persisted
expect(pipeline).to be_external_pull_request_event
expect(pipeline.external_pull_request).to eq(pull_request)
expect(pipeline.source_sha).to eq(source_sha)
expect(pipeline.builds.order(:stage_id)
.map(&:name))
.to eq(%w[build test pages])
end
end
context 'when external pull request is not specified' do
let(:pull_request) { nil }
2021-10-27 15:23:28 +05:30
it 'does not create an external pull request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq("External pull request can't be blank and Failed to build the pipeline!")
2019-12-04 20:38:33 +05:30
expect(pipeline).not_to be_persisted
2021-10-27 15:23:28 +05:30
expect(pipeline.errors[:base]).to eq(['Failed to build the pipeline!'])
2019-12-04 20:38:33 +05:30
end
end
end
end
end
2019-07-31 22:56:46 +05:30
describe 'Pipelines for merge requests' do
2021-10-27 15:23:28 +05:30
let(:response) do
2023-07-09 08:55:56 +05:30
execute_service(
source: source,
merge_request: merge_request,
ref: ref_name,
source_sha: source_sha,
target_sha: target_sha
)
2019-02-15 15:39:39 +05:30
end
2021-10-27 15:23:28 +05:30
let(:pipeline) { response.payload }
2019-02-15 15:39:39 +05:30
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
let(:ref_name) { 'refs/heads/feature' }
2019-07-07 11:18:12 +05:30
let(:source_sha) { project.commit(ref_name).id }
let(:target_sha) { nil }
2019-02-15 15:39:39 +05:30
context 'when source is merge request' do
2019-07-07 11:18:12 +05:30
let(:source) { :merge_request_event }
2019-02-15 15:39:39 +05:30
context "when config has merge_requests keywords" do
let(:config) do
{
build: {
stage: 'build',
script: 'echo'
},
test: {
stage: 'test',
script: 'echo',
only: ['merge_requests']
},
pages: {
stage: 'deploy',
script: 'echo',
except: ['merge_requests']
}
}
end
context 'when merge request is specified' do
let(:merge_request) do
create(:merge_request,
source_project: project,
2019-07-31 22:56:46 +05:30
source_branch: 'feature',
2019-02-15 15:39:39 +05:30
target_project: project,
target_branch: 'master')
end
2019-07-31 22:56:46 +05:30
let(:ref_name) { merge_request.ref_path }
it 'creates a detached merge request pipeline' do
2019-02-15 15:39:39 +05:30
expect(pipeline).to be_persisted
2019-07-07 11:18:12 +05:30
expect(pipeline).to be_merge_request_event
2019-02-15 15:39:39 +05:30
expect(pipeline.merge_request).to eq(merge_request)
2019-12-04 20:38:33 +05:30
expect(pipeline.builds.order(:stage_id).pluck(:name)).to eq(%w[test])
2019-02-15 15:39:39 +05:30
end
2019-07-07 11:18:12 +05:30
it 'persists the specified source sha' do
expect(pipeline.source_sha).to eq(source_sha)
end
it 'does not persist target sha for detached merge request pipeline' do
expect(pipeline.target_sha).to be_nil
end
2022-07-23 23:45:48 +05:30
it 'schedules update for the head pipeline of the merge request' do
allow(MergeRequests::UpdateHeadPipelineWorker).to receive(:perform_async)
2019-07-31 22:56:46 +05:30
pipeline
2022-07-23 23:45:48 +05:30
expect(MergeRequests::UpdateHeadPipelineWorker).to have_received(:perform_async).with('Ci::PipelineCreatedEvent', { 'pipeline_id' => pipeline.id })
2019-07-31 22:56:46 +05:30
end
2021-03-08 18:12:59 +05:30
it 'schedules a namespace onboarding create action worker' do
2022-11-25 23:54:43 +05:30
expect(Onboarding::PipelineCreatedWorker)
2021-03-08 18:12:59 +05:30
.to receive(:perform_async).with(project.namespace_id)
pipeline
end
2019-07-07 11:18:12 +05:30
context 'when target sha is specified' do
let(:target_sha) { merge_request.target_branch_sha }
it 'persists the target sha' do
expect(pipeline.target_sha).to eq(target_sha)
end
end
2019-02-15 15:39:39 +05:30
context 'when ref is tag' do
let(:ref_name) { 'refs/tags/v1.1.0' }
2021-10-27 15:23:28 +05:30
it 'does not create a merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('Tag is not included in the list and Failed to build the pipeline!')
2019-02-15 15:39:39 +05:30
expect(pipeline).not_to be_persisted
2021-10-27 15:23:28 +05:30
expect(pipeline.errors[:tag]).to eq(['is not included in the list'])
2019-02-15 15:39:39 +05:30
end
end
context 'when merge request is created from a forked project' do
let(:merge_request) do
create(:merge_request,
source_project: project,
2019-07-31 22:56:46 +05:30
source_branch: 'feature',
2019-02-15 15:39:39 +05:30
target_project: target_project,
target_branch: 'master')
end
2019-07-31 22:56:46 +05:30
let(:ref_name) { 'refs/heads/feature' }
2019-02-15 15:39:39 +05:30
let!(:project) { fork_project(target_project, nil, repository: true) }
let!(:target_project) { create(:project, :repository) }
2021-01-29 00:20:46 +05:30
let!(:user) { create(:user) }
before do
project.add_developer(user)
end
2019-02-15 15:39:39 +05:30
2019-12-26 22:10:19 +05:30
it 'creates a legacy detached merge request pipeline in the forked project', :sidekiq_might_not_need_inline do
2019-02-15 15:39:39 +05:30
expect(pipeline).to be_persisted
expect(project.ci_pipelines).to eq([pipeline])
expect(target_project.ci_pipelines).to be_empty
end
end
context "when there are no matched jobs" do
let(:config) do
{
test: {
stage: 'test',
script: 'echo',
except: ['merge_requests']
}
}
end
2021-10-27 15:23:28 +05:30
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
2023-03-17 16:20:25 +05:30
expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.')
2019-02-15 15:39:39 +05:30
expect(pipeline).not_to be_persisted
end
end
end
end
context "when config does not have merge_requests keywords" do
let(:config) do
{
build: {
stage: 'build',
script: 'echo'
},
test: {
stage: 'test',
script: 'echo'
},
pages: {
stage: 'deploy',
script: 'echo'
}
}
end
context 'when merge request is specified' do
let(:merge_request) do
create(:merge_request,
source_project: project,
source_branch: Gitlab::Git.ref_name(ref_name),
target_project: project,
target_branch: 'master')
end
2021-10-27 15:23:28 +05:30
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
2023-03-17 16:20:25 +05:30
expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.')
2019-02-15 15:39:39 +05:30
expect(pipeline).not_to be_persisted
end
end
end
context "when config uses regular expression for only keyword" do
let(:config) do
{
build: {
stage: 'build',
script: 'echo',
only: ["/^#{ref_name}$/"]
}
}
end
context 'when merge request is specified' do
let(:merge_request) do
create(:merge_request,
source_project: project,
2019-06-05 12:25:43 +05:30
source_branch: Gitlab::Git.ref_name(ref_name),
2019-02-15 15:39:39 +05:30
target_project: project,
target_branch: 'master')
end
2021-10-27 15:23:28 +05:30
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
2023-03-17 16:20:25 +05:30
expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.')
2019-02-15 15:39:39 +05:30
expect(pipeline).not_to be_persisted
end
end
end
context "when config uses variables for only keyword" do
let(:config) do
{
build: {
stage: 'build',
script: 'echo',
only: {
variables: %w($CI)
}
}
}
end
context 'when merge request is specified' do
let(:merge_request) do
create(:merge_request,
source_project: project,
2019-06-05 12:25:43 +05:30
source_branch: Gitlab::Git.ref_name(ref_name),
2019-02-15 15:39:39 +05:30
target_project: project,
target_branch: 'master')
end
2021-10-27 15:23:28 +05:30
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
2023-03-17 16:20:25 +05:30
expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.')
2019-02-15 15:39:39 +05:30
expect(pipeline).not_to be_persisted
end
end
end
context "when config has 'except: [tags]'" do
let(:config) do
{
build: {
stage: 'build',
script: 'echo',
except: ['tags']
}
}
end
context 'when merge request is specified' do
let(:merge_request) do
create(:merge_request,
source_project: project,
2019-06-05 12:25:43 +05:30
source_branch: Gitlab::Git.ref_name(ref_name),
2019-02-15 15:39:39 +05:30
target_project: project,
target_branch: 'master')
end
2021-10-27 15:23:28 +05:30
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
2023-03-17 16:20:25 +05:30
expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.')
2019-02-15 15:39:39 +05:30
expect(pipeline).not_to be_persisted
end
end
end
end
context 'when source is web' do
let(:source) { :web }
2020-03-13 15:44:24 +05:30
let(:merge_request) { nil }
2019-02-15 15:39:39 +05:30
context "when config has merge_requests keywords" do
let(:config) do
{
build: {
stage: 'build',
script: 'echo'
},
test: {
stage: 'test',
script: 'echo',
only: ['merge_requests']
},
pages: {
stage: 'deploy',
script: 'echo',
except: ['merge_requests']
}
}
end
2020-03-13 15:44:24 +05:30
it 'creates a branch pipeline' do
expect(pipeline).to be_persisted
expect(pipeline).to be_web
expect(pipeline.merge_request).to be_nil
expect(pipeline.builds.order(:stage_id).pluck(:name)).to eq(%w[build pages])
2019-02-15 15:39:39 +05:30
end
end
end
end
2019-10-12 21:52:04 +05:30
context 'when needs is used' do
2021-10-27 15:23:28 +05:30
let(:response) { execute_service }
let(:pipeline) { response.payload }
2019-10-12 21:52:04 +05:30
let(:config) do
{
build_a: {
stage: "build",
script: "ls",
only: %w[master]
},
test_a: {
stage: "test",
script: "ls",
only: %w[master feature],
needs: %w[build_a]
},
deploy: {
stage: "deploy",
script: "ls",
only: %w[tags]
}
}
end
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
context 'when pipeline on master is created' do
let(:ref_name) { 'refs/heads/master' }
it 'creates a pipeline with build_a and test_a' do
expect(pipeline).to be_persisted
2019-12-04 20:38:33 +05:30
expect(pipeline.builds.pluck(:name)).to contain_exactly("build_a", "test_a")
2019-10-12 21:52:04 +05:30
end
2020-07-28 23:09:34 +05:30
it 'bulk inserts all needs' do
expect(Ci::BuildNeed).to receive(:bulk_insert!).and_call_original
expect(pipeline).to be_persisted
end
2019-10-12 21:52:04 +05:30
end
context 'when pipeline on feature is created' do
let(:ref_name) { 'refs/heads/feature' }
2020-10-24 23:57:45 +05:30
shared_examples 'has errors' do
2021-10-27 15:23:28 +05:30
it 'contains the expected errors', :aggregate_failures do
2020-10-24 23:57:45 +05:30
expect(pipeline.builds).to be_empty
2021-03-08 18:12:59 +05:30
2021-09-30 23:02:18 +05:30
error_message = "'test_a' job needs 'build_a' job, but 'build_a' is not in any previous stage"
2021-03-08 18:12:59 +05:30
expect(pipeline.yaml_errors).to eq(error_message)
expect(pipeline.error_messages.map(&:content)).to contain_exactly(error_message)
expect(pipeline.errors[:base]).to contain_exactly(error_message)
2020-10-24 23:57:45 +05:30
end
end
2019-12-04 20:38:33 +05:30
context 'when save_on_errors is enabled' do
2021-10-27 15:23:28 +05:30
let(:response) { execute_service(save_on_errors: true) }
let(:pipeline) { response.payload }
2019-12-04 20:38:33 +05:30
2021-10-27 15:23:28 +05:30
it 'does create a pipeline as test_a depends on build_a', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq("'test_a' job needs 'build_a' job, but 'build_a' is not in any previous stage")
2019-12-04 20:38:33 +05:30
expect(pipeline).to be_persisted
end
2020-10-24 23:57:45 +05:30
it_behaves_like 'has errors'
2019-12-04 20:38:33 +05:30
end
context 'when save_on_errors is disabled' do
2021-10-27 15:23:28 +05:30
let(:response) { execute_service(save_on_errors: false) }
let(:pipeline) { response.payload }
2019-12-04 20:38:33 +05:30
2021-10-27 15:23:28 +05:30
it 'does not create a pipeline as test_a depends on build_a', :aggregate_failures do
expect(response).to be_error
2019-12-04 20:38:33 +05:30
expect(pipeline).not_to be_persisted
end
2020-10-24 23:57:45 +05:30
it_behaves_like 'has errors'
2019-10-12 21:52:04 +05:30
end
end
context 'when pipeline on v1.0.0 is created' do
let(:ref_name) { 'refs/tags/v1.0.0' }
it 'does create a pipeline only with deploy' do
expect(pipeline).to be_persisted
2019-12-04 20:38:33 +05:30
expect(pipeline.builds.pluck(:name)).to contain_exactly("deploy")
end
end
end
2023-06-20 00:43:36 +05:30
describe 'pipeline components' do
let(:components_project) do
create(:project, :repository, creator: user, namespace: user.namespace)
end
let(:component_path) do
"#{Gitlab.config.gitlab.host}/#{components_project.full_path}/my-component@v0.1"
end
let(:template) do
<<~YAML
spec:
inputs:
stage:
suffix:
default: my-job
---
test-$[[ inputs.suffix ]]:
stage: $[[ inputs.stage ]]
script: run tests
YAML
end
let(:sha) do
2023-07-09 08:55:56 +05:30
components_project.repository.create_file(
user,
'my-component/template.yml',
template,
message: 'Add my first CI component',
branch_name: 'master'
)
end
let(:config) do
<<~YAML
include:
- component: #{component_path}
inputs:
stage: my-stage
stages:
- my-stage
test-1:
stage: my-stage
script: run test-1
YAML
end
before do
stub_ci_pipeline_yaml_file(config)
end
context 'when there is no version with specified tag' do
before do
components_project.repository.add_tag(user, 'v0.01', sha)
end
it 'does not create a pipeline' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
.to include "my-component@v0.1' - content not found"
end
end
context 'when there is a proper revision available' do
before do
components_project.repository.add_tag(user, 'v0.1', sha)
end
context 'when component is valid' do
it 'creates a pipeline using a pipeline component' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors).to be_blank
expect(pipeline.statuses.count).to eq 2
expect(pipeline.statuses.map(&:name)).to match_array %w[test-1 test-my-job]
end
end
context 'when interpolation is invalid' do
let(:template) do
<<~YAML
spec:
inputs:
stage:
---
test:
stage: $[[ inputs.stage ]]
script: rspec --suite $[[ inputs.suite ]]
YAML
end
it 'does not create a pipeline' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
.to include 'interpolation interrupted by errors, unknown interpolation key: `suite`'
end
end
context 'when there is a syntax error in the template' do
let(:template) do
<<~YAML
spec:
inputs:
stage:
---
:test
stage: $[[ inputs.stage ]]
YAML
end
it 'does not create a pipeline' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
.to include 'content does not have a valid YAML syntax'
end
end
end
end
# TODO: Remove this test section when include:with is removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/408369
describe 'pipeline components using include:with instead of include:inputs' do
let(:components_project) do
create(:project, :repository, creator: user, namespace: user.namespace)
end
let(:component_path) do
"#{Gitlab.config.gitlab.host}/#{components_project.full_path}/my-component@v0.1"
end
let(:template) do
<<~YAML
spec:
inputs:
stage:
suffix:
default: my-job
---
test-$[[ inputs.suffix ]]:
stage: $[[ inputs.stage ]]
script: run tests
YAML
end
let(:sha) do
2023-06-20 00:43:36 +05:30
components_project.repository.create_file(
user,
'my-component/template.yml',
template,
message: 'Add my first CI component',
branch_name: 'master'
)
end
let(:config) do
<<~YAML
include:
- component: #{component_path}
with:
stage: my-stage
stages:
- my-stage
test-1:
stage: my-stage
script: run test-1
YAML
end
before do
stub_ci_pipeline_yaml_file(config)
end
context 'when there is no version with specified tag' do
before do
components_project.repository.add_tag(user, 'v0.01', sha)
end
it 'does not create a pipeline' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
.to include "my-component@v0.1' - content not found"
end
end
context 'when there is a proper revision available' do
before do
components_project.repository.add_tag(user, 'v0.1', sha)
end
context 'when component is valid' do
it 'creates a pipeline using a pipeline component' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors).to be_blank
expect(pipeline.statuses.count).to eq 2
expect(pipeline.statuses.map(&:name)).to match_array %w[test-1 test-my-job]
end
end
context 'when interpolation is invalid' do
let(:template) do
<<~YAML
spec:
inputs:
stage:
---
test:
stage: $[[ inputs.stage ]]
script: rspec --suite $[[ inputs.suite ]]
YAML
end
it 'does not create a pipeline' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
.to include 'interpolation interrupted by errors, unknown interpolation key: `suite`'
end
end
context 'when there is a syntax error in the template' do
let(:template) do
<<~YAML
spec:
inputs:
stage:
---
:test
stage: $[[ inputs.stage ]]
YAML
end
it 'does not create a pipeline' do
response = execute_service(save_on_errors: true)
pipeline = response.payload
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
.to include 'content does not have a valid YAML syntax'
end
end
end
end
2019-02-15 15:39:39 +05:30
end
2016-09-13 17:45:13 +05:30
end