2019-07-31 22:56:46 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
require 'spec_helper'
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
RSpec.describe Projects::PipelinesController do
|
2017-08-17 22:00:37 +05:30
|
|
|
include ApiHelpers
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
let_it_be(:user) { create(:user) }
|
2020-01-01 13:55:28 +05:30
|
|
|
let_it_be(:project) { create(:project, :public, :repository) }
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2019-02-02 18:00:53 +05:30
|
|
|
let(:feature) { ProjectFeature::ENABLED }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
before do
|
2020-04-08 14:13:33 +05:30
|
|
|
allow(Sidekiq.logger).to receive(:info)
|
2017-09-10 17:25:29 +05:30
|
|
|
stub_not_protect_default_branch
|
2017-08-17 22:00:37 +05:30
|
|
|
project.add_developer(user)
|
2021-04-29 21:17:54 +05:30
|
|
|
project.project_feature.update!(builds_access_level: feature)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
sign_in(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'GET index.json' do
|
|
|
|
before do
|
2020-01-01 13:55:28 +05:30
|
|
|
create_all_pipeline_types
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when using persisted stages', :request_store do
|
2020-01-01 13:55:28 +05:30
|
|
|
render_views
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
it 'returns serialized pipelines' do
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
get_pipelines_index_json
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(response).to match_response_schema('pipeline')
|
|
|
|
|
|
|
|
expect(json_response).to include('pipelines')
|
2020-04-08 14:13:33 +05:30
|
|
|
expect(json_response['pipelines'].count).to eq 6
|
|
|
|
expect(json_response['count']['all']).to eq '6'
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
json_response.dig('pipelines', 0, 'details', 'stages').tap do |stages|
|
|
|
|
expect(stages.count).to eq 3
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
it 'does not execute N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/345470' do
|
2020-01-01 13:55:28 +05:30
|
|
|
get_pipelines_index_json
|
|
|
|
|
|
|
|
control_count = ActiveRecord::QueryRecorder.new do
|
|
|
|
get_pipelines_index_json
|
|
|
|
end.count
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
create_all_pipeline_types
|
|
|
|
|
|
|
|
# There appears to be one extra query for Pipelines#has_warnings? for some reason
|
2020-11-24 15:15:51 +05:30
|
|
|
expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
2020-04-08 14:13:33 +05:30
|
|
|
expect(json_response['pipelines'].count).to eq 12
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'does not include coverage data for the pipelines' do
|
|
|
|
get_pipelines_index_json
|
|
|
|
|
|
|
|
expect(json_response['pipelines'][0]).not_to include('coverage')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
it 'paginates the result' do
|
|
|
|
allow(Ci::Pipeline).to receive(:default_per_page).and_return(2)
|
|
|
|
|
|
|
|
get_pipelines_index_json
|
|
|
|
|
|
|
|
check_pipeline_response(returned: 2, all: 6)
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'when performing gitaly calls', :request_store do
|
2020-01-01 13:55:28 +05:30
|
|
|
it 'limits the Gitaly requests' do
|
2018-12-05 23:21:45 +05:30
|
|
|
# Isolate from test preparation (Repository#exists? is also cached in RequestStore)
|
|
|
|
RequestStore.end!
|
|
|
|
RequestStore.clear!
|
|
|
|
RequestStore.begin!
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
# ListCommitsByOid, RepositoryExists, HasLocalBranches, ListCommitsByRefNames
|
2018-11-08 19:23:39 +05:30
|
|
|
expect { get_pipelines_index_json }
|
2020-07-28 23:09:34 +05:30
|
|
|
.to change { Gitlab::GitalyClient.get_request_count }.by(4)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
context 'when the project is private' do
|
|
|
|
let(:project) { create(:project, :private, :repository) }
|
|
|
|
|
|
|
|
it 'returns `not_found` when the user does not have access' do
|
|
|
|
sign_in(create(:user))
|
|
|
|
|
|
|
|
get_pipelines_index_json
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the pipelines when the user has access' do
|
|
|
|
get_pipelines_index_json
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
expect(json_response['pipelines'].size).to eq(6)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
context 'when user tries to access legacy scope via URL' do
|
|
|
|
it 'redirects to all pipelines with that status instead' do
|
|
|
|
get_pipelines_index_html(scope: 'running')
|
2020-05-24 23:13:21 +05:30
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
expect(response).to redirect_to(project_pipelines_path(project, status: 'running', format: :html))
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
2020-07-28 23:09:34 +05:30
|
|
|
end
|
2020-05-24 23:13:21 +05:30
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
context 'filter by scope' do
|
2020-05-24 23:13:21 +05:30
|
|
|
context 'scope is branches or tags' do
|
|
|
|
before do
|
|
|
|
create(:ci_pipeline, :failed, project: project, ref: 'v1.0.0', tag: true)
|
2020-07-28 23:09:34 +05:30
|
|
|
create(:ci_pipeline, :failed, project: project, ref: 'master', tag: false)
|
|
|
|
create(:ci_pipeline, :failed, project: project, ref: 'feature', tag: false)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when scope is branches' do
|
|
|
|
it 'returns matched pipelines' do
|
|
|
|
get_pipelines_index_json(scope: 'branches')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 2, all: 9)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when scope is tags' do
|
|
|
|
it 'returns matched pipelines' do
|
|
|
|
get_pipelines_index_json(scope: 'tags')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 1, all: 9)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'filter by username' do
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, :running, project: project, user: user) }
|
|
|
|
|
|
|
|
context 'when username exists' do
|
|
|
|
it 'returns matched pipelines' do
|
|
|
|
get_pipelines_index_json(username: user.username)
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 1, all: 1)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when username does not exist' do
|
|
|
|
it 'returns empty' do
|
|
|
|
get_pipelines_index_json(username: 'invalid-username')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 0, all: 0)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'filter by ref' do
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, :running, project: project, ref: 'branch-1') }
|
|
|
|
|
|
|
|
context 'when pipelines with the ref exists' do
|
|
|
|
it 'returns matched pipelines' do
|
|
|
|
get_pipelines_index_json(ref: 'branch-1')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 1, all: 1)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no pipeline with the ref exists' do
|
|
|
|
it 'returns empty list' do
|
|
|
|
get_pipelines_index_json(ref: 'invalid-ref')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 0, all: 0)
|
2020-05-24 23:13:21 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
context 'filter by status' do
|
|
|
|
context 'when pipelines with the status exists' do
|
|
|
|
it 'returns matched pipelines' do
|
|
|
|
get_pipelines_index_json(status: 'success')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 1, all: 1)
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no pipeline with the status exists' do
|
|
|
|
it 'returns empty list' do
|
|
|
|
get_pipelines_index_json(status: 'manual')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 0, all: 0)
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when invalid status' do
|
|
|
|
it 'returns all list' do
|
|
|
|
get_pipelines_index_json(status: 'invalid-status')
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
check_pipeline_response(returned: 6, all: 6)
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
def get_pipelines_index_html(params = {})
|
|
|
|
get :index, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project
|
|
|
|
}.merge(params),
|
|
|
|
format: :html
|
|
|
|
end
|
|
|
|
|
2020-05-24 23:13:21 +05:30
|
|
|
def get_pipelines_index_json(params = {})
|
2019-02-15 15:39:39 +05:30
|
|
|
get :index, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project
|
2020-05-24 23:13:21 +05:30
|
|
|
}.merge(params),
|
2018-11-08 19:23:39 +05:30
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
def create_all_pipeline_types
|
|
|
|
%w(pending running success failed canceled).each_with_index do |status, index|
|
|
|
|
create_pipeline(status, project.commit("HEAD~#{index}"))
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
|
|
|
|
create_pipeline_with_merge_request
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_pipeline_with_merge_request
|
|
|
|
# New merge requests must be created with different branches, so
|
|
|
|
# let's just create new ones with random names.
|
|
|
|
branch_name = "test-#{SecureRandom.hex}"
|
|
|
|
project.repository.create_branch(branch_name, project.repository.root_ref)
|
|
|
|
mr = create(:merge_request, source_project: project, target_project: project, source_branch: branch_name)
|
|
|
|
create_pipeline(:running, project.commit('HEAD'), merge_request: mr)
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
def create_pipeline(status, sha, merge_request: nil)
|
2020-01-01 13:55:28 +05:30
|
|
|
user = create(:user)
|
2018-11-08 19:23:39 +05:30
|
|
|
pipeline = create(:ci_empty_pipeline, status: status,
|
|
|
|
project: project,
|
2020-07-28 23:09:34 +05:30
|
|
|
sha: sha.id,
|
|
|
|
ref: sha.id.first(8),
|
2020-04-08 14:13:33 +05:30
|
|
|
user: user,
|
|
|
|
merge_request: merge_request)
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
create_build(pipeline, 'build', 1, 'build', user)
|
|
|
|
create_build(pipeline, 'test', 2, 'test', user)
|
|
|
|
create_build(pipeline, 'deploy', 3, 'deploy', user)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
|
|
|
pipeline
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
def create_build(pipeline, stage, stage_idx, name, user = nil)
|
2018-11-08 19:23:39 +05:30
|
|
|
status = %w[created running pending success failed canceled].sample
|
2020-03-13 15:44:24 +05:30
|
|
|
create(
|
|
|
|
:ci_build,
|
|
|
|
:artifacts,
|
|
|
|
artifacts_expire_at: 2.days.from_now,
|
|
|
|
pipeline: pipeline,
|
|
|
|
stage: stage,
|
|
|
|
stage_idx: stage_idx,
|
|
|
|
name: name,
|
|
|
|
status: status,
|
|
|
|
user: user
|
|
|
|
)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
2020-05-24 23:13:21 +05:30
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
def check_pipeline_response(returned:, all:)
|
2020-05-24 23:13:21 +05:30
|
|
|
aggregate_failures do
|
|
|
|
expect(response).to match_response_schema('pipeline')
|
|
|
|
|
|
|
|
expect(json_response['pipelines'].count).to eq returned
|
|
|
|
expect(json_response['count']['all'].to_i).to eq all
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
describe 'GET #index' do
|
2021-09-04 01:27:46 +05:30
|
|
|
before do
|
|
|
|
stub_application_setting(auto_devops_enabled: false)
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
def action
|
|
|
|
get :index, params: { namespace_id: project.namespace, project_id: project }
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
subject { project.namespace }
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
context 'runners_availability_section experiment' do
|
|
|
|
it_behaves_like 'tracks assignment and records the subject', :runners_availability_section, :namespace
|
2021-06-08 01:23:25 +05:30
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
2021-06-02 17:11:27 +05:30
|
|
|
describe 'GET #show' do
|
|
|
|
def get_pipeline_html
|
|
|
|
get :show, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :html
|
|
|
|
end
|
|
|
|
|
2021-08-04 16:29:09 +05:30
|
|
|
context 'when the project is public' do
|
|
|
|
render_views
|
2021-06-02 17:11:27 +05:30
|
|
|
|
2021-08-04 16:29:09 +05:30
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
def create_build_with_artifacts(stage, stage_idx, name, status)
|
|
|
|
create(:ci_build, :artifacts, :tags, status, user: user, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name)
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_bridge(stage, stage_idx, name, status)
|
|
|
|
create(:ci_bridge, status, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name)
|
2021-08-04 16:29:09 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
2021-11-11 11:23:49 +05:30
|
|
|
create_build_with_artifacts('build', 0, 'job1', :failed)
|
|
|
|
create_build_with_artifacts('build', 0, 'job2', :running)
|
|
|
|
create_build_with_artifacts('build', 0, 'job3', :pending)
|
|
|
|
create_bridge('deploy', 1, 'deploy-a', :failed)
|
|
|
|
create_bridge('deploy', 1, 'deploy-b', :created)
|
2021-08-04 16:29:09 +05:30
|
|
|
end
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
it 'avoids N+1 database queries', :request_store, :use_sql_query_cache do
|
|
|
|
# warm up
|
|
|
|
get_pipeline_html
|
2021-08-04 16:29:09 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
|
|
|
|
get_pipeline_html
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
end
|
2021-08-04 16:29:09 +05:30
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
create_build_with_artifacts('build', 0, 'job4', :failed)
|
|
|
|
create_build_with_artifacts('build', 0, 'job5', :running)
|
|
|
|
create_build_with_artifacts('build', 0, 'job6', :pending)
|
|
|
|
create_bridge('deploy', 1, 'deploy-c', :failed)
|
|
|
|
create_bridge('deploy', 1, 'deploy-d', :created)
|
|
|
|
|
|
|
|
expect do
|
|
|
|
get_pipeline_html
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
end.not_to exceed_all_query_limit(control)
|
2021-08-04 16:29:09 +05:30
|
|
|
end
|
2021-06-02 17:11:27 +05:30
|
|
|
end
|
|
|
|
|
2021-08-04 16:29:09 +05:30
|
|
|
context 'when the project is private' do
|
|
|
|
let(:project) { create(:project, :private, :repository) }
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
2021-06-02 17:11:27 +05:30
|
|
|
|
2021-08-04 16:29:09 +05:30
|
|
|
it 'returns `not_found` when the user does not have access' do
|
|
|
|
sign_in(create(:user))
|
2021-06-02 17:11:27 +05:30
|
|
|
|
2021-08-04 16:29:09 +05:30
|
|
|
get_pipeline_html
|
2021-06-02 17:11:27 +05:30
|
|
|
|
2021-08-04 16:29:09 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
2021-06-02 17:11:27 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe 'GET show.json' do
|
2019-12-26 22:10:19 +05:30
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'returns the pipeline' do
|
|
|
|
get_pipeline_json
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response).not_to be_an(Array)
|
|
|
|
expect(json_response['id']).to be(pipeline.id)
|
|
|
|
expect(json_response['details']).to have_key 'stages'
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
context 'when the pipeline has multiple stages and groups', :request_store do
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
let(:pipeline) do
|
|
|
|
create(:ci_empty_pipeline, project: project,
|
|
|
|
user: user,
|
|
|
|
sha: project.commit.id)
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
create_build('build', 0, 'build')
|
|
|
|
create_build('test', 1, 'rspec 0')
|
|
|
|
create_build('deploy', 2, 'production')
|
|
|
|
create_build('post deploy', 3, 'pages 0')
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'does not perform N + 1 queries' do
|
2019-12-04 20:38:33 +05:30
|
|
|
# Set up all required variables
|
|
|
|
get_pipeline_json
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
control_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
first_build = pipeline.builds.first
|
|
|
|
first_build.tag_list << [:hello, :world]
|
|
|
|
create(:deployment, deployable: first_build)
|
|
|
|
|
|
|
|
second_build = pipeline.builds.second
|
|
|
|
second_build.tag_list << [:docker, :ruby]
|
|
|
|
create(:deployment, deployable: second_build)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
new_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
expect(new_count).to be_within(1).of(control_count)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-02 18:00:53 +05:30
|
|
|
context 'when builds are disabled' do
|
|
|
|
let(:feature) { ProjectFeature::DISABLED }
|
|
|
|
|
|
|
|
it 'users can not see internal pipelines' do
|
|
|
|
get_pipeline_json
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline is external' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, source: :external, project: project) }
|
|
|
|
|
|
|
|
it 'users can see the external pipeline' do
|
|
|
|
get_pipeline_json
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['id']).to be(pipeline.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
context 'with triggered pipelines' do
|
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
|
|
|
let_it_be(:source_project) { create(:project, :repository) }
|
|
|
|
let_it_be(:target_project) { create(:project, :repository) }
|
|
|
|
let_it_be(:root_pipeline) { create_pipeline(project) }
|
|
|
|
let_it_be(:source_pipeline) { create_pipeline(source_project) }
|
|
|
|
let_it_be(:source_of_source_pipeline) { create_pipeline(source_project) }
|
|
|
|
let_it_be(:target_pipeline) { create_pipeline(target_project) }
|
|
|
|
let_it_be(:target_of_target_pipeline) { create_pipeline(target_project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create_link(source_of_source_pipeline, source_pipeline)
|
|
|
|
create_link(source_pipeline, root_pipeline)
|
|
|
|
create_link(root_pipeline, target_pipeline)
|
|
|
|
create_link(target_pipeline, target_of_target_pipeline)
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'not expanded' do
|
|
|
|
let(:expected_stages) { be_nil }
|
|
|
|
|
|
|
|
it 'does return base details' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
expect(json_response['triggered_by']).to include('id' => source_pipeline.id)
|
|
|
|
expect(json_response['triggered']).to contain_exactly(
|
|
|
|
include('id' => target_pipeline.id))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not expand triggered_by pipeline' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
triggered_by = json_response['triggered_by']
|
|
|
|
expect(triggered_by['triggered_by']).to be_nil
|
|
|
|
expect(triggered_by['triggered']).to be_nil
|
|
|
|
expect(triggered_by['details']['stages']).to expected_stages
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not expand triggered pipelines' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
first_triggered = json_response['triggered'].first
|
|
|
|
expect(first_triggered['triggered_by']).to be_nil
|
|
|
|
expect(first_triggered['triggered']).to be_nil
|
|
|
|
expect(first_triggered['details']['stages']).to expected_stages
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'expanded' do
|
|
|
|
it 'does return base details' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
expect(json_response['triggered_by']).to include('id' => source_pipeline.id)
|
|
|
|
expect(json_response['triggered']).to contain_exactly(
|
|
|
|
include('id' => target_pipeline.id))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does expand triggered_by pipeline' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
triggered_by = json_response['triggered_by']
|
|
|
|
expect(triggered_by['triggered_by']).to include(
|
|
|
|
'id' => source_of_source_pipeline.id)
|
|
|
|
expect(triggered_by['details']['stages']).not_to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not recursively expand triggered_by' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
triggered_by = json_response['triggered_by']
|
|
|
|
expect(triggered_by['triggered']).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does expand triggered pipelines' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
first_triggered = json_response['triggered'].first
|
|
|
|
expect(first_triggered['triggered']).to contain_exactly(
|
|
|
|
include('id' => target_of_target_pipeline.id))
|
|
|
|
expect(first_triggered['details']['stages']).not_to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not recursively expand triggered' do
|
|
|
|
get_pipeline_json(root_pipeline)
|
|
|
|
|
|
|
|
first_triggered = json_response['triggered'].first
|
|
|
|
expect(first_triggered['triggered_by']).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when it does have permission to read other projects' do
|
|
|
|
before do
|
|
|
|
source_project.add_developer(user)
|
|
|
|
target_project.add_developer(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when not-expanding any pipelines' do
|
|
|
|
let(:expanded) { nil }
|
|
|
|
|
|
|
|
it_behaves_like 'not expanded'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expanding non-existing pipeline' do
|
|
|
|
let(:expanded) { [-1] }
|
|
|
|
|
|
|
|
it_behaves_like 'not expanded'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expanding pipeline that is not directly expandable' do
|
|
|
|
let(:expanded) { [source_of_source_pipeline.id, target_of_target_pipeline.id] }
|
|
|
|
|
|
|
|
it_behaves_like 'not expanded'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expanding self' do
|
|
|
|
let(:expanded) { [root_pipeline.id] }
|
|
|
|
|
|
|
|
context 'it does not recursively expand pipelines' do
|
|
|
|
it_behaves_like 'not expanded'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expanding source and target pipeline' do
|
|
|
|
let(:expanded) { [source_pipeline.id, target_pipeline.id] }
|
|
|
|
|
|
|
|
it_behaves_like 'expanded'
|
|
|
|
|
|
|
|
context 'when expand depth is limited to 1' do
|
|
|
|
before do
|
|
|
|
stub_const('TriggeredPipelineEntity::MAX_EXPAND_DEPTH', 1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'not expanded' do
|
|
|
|
# We expect that triggered/triggered_by is not expanded,
|
|
|
|
# but we still return details.stages for that pipeline
|
|
|
|
let(:expected_stages) { be_a(Array) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expanding all' do
|
|
|
|
let(:expanded) do
|
|
|
|
[
|
|
|
|
source_of_source_pipeline.id,
|
|
|
|
source_pipeline.id,
|
|
|
|
root_pipeline.id,
|
|
|
|
target_pipeline.id,
|
|
|
|
target_of_target_pipeline.id
|
|
|
|
]
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'expanded'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when does not have permission to read other projects' do
|
|
|
|
let(:expanded) { [source_pipeline.id, target_pipeline.id] }
|
|
|
|
|
|
|
|
it_behaves_like 'not expanded'
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_pipeline(project)
|
|
|
|
create(:ci_empty_pipeline, project: project).tap do |pipeline|
|
|
|
|
create(:ci_build, pipeline: pipeline, stage: 'test', name: 'rspec')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_link(source_pipeline, pipeline)
|
|
|
|
source_pipeline.sourced_pipelines.create!(
|
|
|
|
source_job: source_pipeline.builds.all.sample,
|
|
|
|
source_project: source_pipeline.project,
|
|
|
|
project: pipeline.project,
|
|
|
|
pipeline: pipeline
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_pipeline_json(pipeline)
|
|
|
|
params = {
|
|
|
|
namespace_id: pipeline.project.namespace,
|
|
|
|
project_id: pipeline.project,
|
|
|
|
id: pipeline,
|
|
|
|
expanded: expanded
|
|
|
|
}
|
|
|
|
|
|
|
|
get :show, params: params.compact, format: :json
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def get_pipeline_json
|
2019-02-15 15:39:39 +05:30
|
|
|
get :show, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :json
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
def create_build(stage, stage_idx, name)
|
|
|
|
create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
describe 'GET dag.json' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create_build('build', 1, 'build')
|
|
|
|
create_build('test', 2, 'test', scheduling_type: 'dag').tap do |job|
|
|
|
|
create(:ci_build_need, build: job, name: 'build')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the pipeline with DAG serialization' do
|
|
|
|
get :dag, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :json
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
|
|
|
|
expect(json_response.fetch('stages')).not_to be_empty
|
|
|
|
|
|
|
|
build_stage = json_response['stages'].first
|
|
|
|
expect(build_stage.fetch('name')).to eq 'build'
|
|
|
|
expect(build_stage.fetch('groups').first.fetch('jobs'))
|
|
|
|
.to eq [{ 'name' => 'build', 'scheduling_type' => 'stage' }]
|
|
|
|
|
|
|
|
test_stage = json_response['stages'].last
|
|
|
|
expect(test_stage.fetch('name')).to eq 'test'
|
|
|
|
expect(test_stage.fetch('groups').first.fetch('jobs'))
|
|
|
|
.to eq [{ 'name' => 'test', 'scheduling_type' => 'dag', 'needs' => ['build'] }]
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_build(stage, stage_idx, name, params = {})
|
|
|
|
create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name, **params)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe 'GET stages.json' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
context 'when accessing existing stage' do
|
|
|
|
before do
|
2018-12-05 23:21:45 +05:30
|
|
|
create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build')
|
2017-08-17 22:00:37 +05:30
|
|
|
create(:ci_build, pipeline: pipeline, stage: 'build')
|
2018-12-05 23:21:45 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'without retried' do
|
|
|
|
before do
|
|
|
|
get_stage('build')
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
it 'returns pipeline jobs without the retried builds' do
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(response).to match_response_schema('pipeline_stage')
|
|
|
|
expect(json_response['latest_statuses'].length).to eq 1
|
|
|
|
expect(json_response).not_to have_key('retried')
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
context 'with retried' do
|
|
|
|
before do
|
|
|
|
get_stage('build', retried: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns pipelines jobs with the retried builds' do
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(response).to match_response_schema('pipeline_stage')
|
|
|
|
expect(json_response['latest_statuses'].length).to eq 1
|
|
|
|
expect(json_response['retried'].length).to eq 1
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when accessing unknown stage' do
|
|
|
|
before do
|
|
|
|
get_stage('test')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'responds with not found' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
def get_stage(name, params = {})
|
2019-02-15 15:39:39 +05:30
|
|
|
get :stage, params: {
|
|
|
|
**params.merge(
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
id: pipeline.id,
|
|
|
|
stage: name,
|
|
|
|
format: :json)
|
|
|
|
}
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'GET status.json' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
let(:status) { pipeline.detailed_status(double('user')) }
|
|
|
|
|
|
|
|
before do
|
2019-02-15 15:39:39 +05:30
|
|
|
get :status, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
id: pipeline.id
|
|
|
|
},
|
2017-08-17 22:00:37 +05:30
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'return a detailed pipeline status in json' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['text']).to eq status.text
|
|
|
|
expect(json_response['label']).to eq status.label
|
|
|
|
expect(json_response['icon']).to eq status.icon
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(json_response['favicon']).to match_asset_path("/assets/ci_favicons/#{status.favicon}.png")
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-28 23:09:34 +05:30
|
|
|
describe 'GET #charts' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
2022-01-26 12:08:38 +05:30
|
|
|
[
|
|
|
|
{
|
|
|
|
chart_param: '',
|
|
|
|
event: 'p_analytics_ci_cd_pipelines'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
chart_param: 'pipelines',
|
|
|
|
event: 'p_analytics_ci_cd_pipelines'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
chart_param: 'deployment-frequency',
|
|
|
|
event: 'p_analytics_ci_cd_deployment_frequency'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
chart_param: 'lead-time',
|
|
|
|
event: 'p_analytics_ci_cd_lead_time'
|
|
|
|
}
|
|
|
|
].each do |tab|
|
|
|
|
it_behaves_like 'tracking unique visits', :charts do
|
|
|
|
let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id, chart: tab[:chart_param] } }
|
|
|
|
let(:target_id) { ['p_analytics_pipelines', tab[:event]] }
|
|
|
|
end
|
2020-07-28 23:09:34 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
describe 'POST create' do
|
|
|
|
let(:project) { create(:project, :public, :repository) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
project.add_developer(user)
|
2021-04-29 21:17:54 +05:30
|
|
|
project.project_feature.update!(builds_access_level: feature)
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with a valid .gitlab-ci.yml file' do
|
|
|
|
before do
|
|
|
|
stub_ci_pipeline_yaml_file(YAML.dump({
|
|
|
|
test: {
|
|
|
|
stage: 'test',
|
|
|
|
script: 'echo'
|
|
|
|
}
|
|
|
|
}))
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'creates a pipeline' do
|
2020-06-23 00:09:42 +05:30
|
|
|
specify do
|
2020-04-08 14:13:33 +05:30
|
|
|
expect { post_request }.to change { project.ci_pipelines.count }.by(1)
|
|
|
|
|
|
|
|
pipeline = project.ci_pipelines.last
|
|
|
|
expected_redirect_path = Gitlab::Routing.url_helpers.project_pipeline_path(project, pipeline)
|
2021-04-29 21:17:54 +05:30
|
|
|
expect(pipeline).to be_created
|
2020-04-08 14:13:33 +05:30
|
|
|
expect(response).to redirect_to(expected_redirect_path)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'creates a pipeline'
|
|
|
|
|
|
|
|
context 'when latest commit contains [ci skip]' do
|
|
|
|
before do
|
|
|
|
project.repository.create_file(user, 'new-file.txt', 'A new file',
|
|
|
|
message: '[skip ci] This is a test',
|
|
|
|
branch_name: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'creates a pipeline'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with an invalid .gitlab-ci.yml file' do
|
|
|
|
before do
|
|
|
|
stub_ci_pipeline_yaml_file('invalid yaml file')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not persist a pipeline' do
|
|
|
|
expect { post_request }.not_to change { project.ci_pipelines.count }
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:bad_request)
|
|
|
|
expect(response).to render_template('new')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def post_request
|
|
|
|
post :create, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
pipeline: {
|
|
|
|
ref: 'master'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
2020-11-24 15:15:51 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe 'POST create.json' do
|
|
|
|
let(:project) { create(:project, :public, :repository) }
|
|
|
|
|
|
|
|
subject do
|
|
|
|
post :create, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
pipeline: { ref: 'master' }
|
|
|
|
},
|
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
project.add_developer(user)
|
2021-04-29 21:17:54 +05:30
|
|
|
project.project_feature.update!(builds_access_level: feature)
|
2020-11-24 15:15:51 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with a valid .gitlab-ci.yml file' do
|
|
|
|
before do
|
|
|
|
stub_ci_pipeline_yaml_file(YAML.dump({
|
|
|
|
test: {
|
|
|
|
stage: 'test',
|
|
|
|
script: 'echo'
|
|
|
|
}
|
|
|
|
}))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'creates a pipeline' do
|
|
|
|
expect { subject }.to change { project.ci_pipelines.count }.by(1)
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:created)
|
|
|
|
expect(json_response['id']).to eq(project.ci_pipelines.last.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with an invalid .gitlab-ci.yml file' do
|
|
|
|
before do
|
|
|
|
stub_ci_pipeline_yaml_file(YAML.dump({
|
|
|
|
build: {
|
|
|
|
stage: 'build',
|
|
|
|
script: 'echo',
|
|
|
|
rules: [{ when: 'always' }]
|
|
|
|
},
|
|
|
|
test: {
|
|
|
|
stage: 'invalid',
|
|
|
|
script: 'echo'
|
|
|
|
}
|
|
|
|
}))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not create a pipeline' do
|
|
|
|
expect { subject }.not_to change { project.ci_pipelines.count }
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:bad_request)
|
|
|
|
expect(json_response['errors']).to eq([
|
|
|
|
'test job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post'
|
|
|
|
])
|
|
|
|
expect(json_response['warnings'][0]).to include(
|
|
|
|
'jobs:build may allow multiple pipelines to run for a single action due to `rules:when`'
|
|
|
|
)
|
|
|
|
expect(json_response['total_warnings']).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe 'POST retry.json' do
|
2021-06-08 01:23:25 +05:30
|
|
|
subject(:post_retry) do
|
2019-02-15 15:39:39 +05:30
|
|
|
post :retry, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
id: pipeline.id
|
|
|
|
},
|
2017-08-17 22:00:37 +05:30
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
let!(:pipeline) { create(:ci_pipeline, :failed, project: project) }
|
|
|
|
let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
|
|
|
|
|
|
|
|
let(:worker_spy) { class_spy(::Ci::RetryPipelineWorker) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
stub_const('::Ci::RetryPipelineWorker', worker_spy)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'retries a pipeline in the background without returning any content' do
|
|
|
|
post_retry
|
|
|
|
|
2019-02-02 18:00:53 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:no_content)
|
2021-06-08 01:23:25 +05:30
|
|
|
expect(::Ci::RetryPipelineWorker).to have_received(:perform_async).with(pipeline.id, user.id)
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
context 'when builds are disabled' do
|
2019-02-02 18:00:53 +05:30
|
|
|
let(:feature) { ProjectFeature::DISABLED }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'fails to retry pipeline' do
|
2021-06-08 01:23:25 +05:30
|
|
|
post_retry
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2022-05-07 20:08:51 +05:30
|
|
|
|
|
|
|
context 'when access denied' do
|
|
|
|
it 'returns an error' do
|
|
|
|
sign_in(create(:user))
|
|
|
|
|
|
|
|
post_retry
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when service returns an error' do
|
|
|
|
before do
|
|
|
|
service_response = ServiceResponse.error(message: 'some error', http_status: 404)
|
|
|
|
allow_next_instance_of(::Ci::RetryPipelineService) do |service|
|
|
|
|
allow(service).to receive(:check_access).and_return(service_response)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not retry' do
|
|
|
|
post_retry
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
expect(response.body).to include('some error')
|
|
|
|
expect(::Ci::RetryPipelineWorker).not_to have_received(:perform_async).with(pipeline.id, user.id)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe 'POST cancel.json' do
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
|
|
|
|
|
|
|
|
before do
|
2019-02-15 15:39:39 +05:30
|
|
|
post :cancel, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
id: pipeline.id
|
|
|
|
},
|
2017-08-17 22:00:37 +05:30
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
it 'cancels a pipeline without returning any content', :sidekiq_might_not_need_inline do
|
2019-02-02 18:00:53 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:no_content)
|
|
|
|
expect(pipeline.reload).to be_canceled
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when builds are disabled' do
|
2019-02-02 18:00:53 +05:30
|
|
|
let(:feature) { ProjectFeature::DISABLED }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'fails to retry pipeline' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
describe 'GET test_report.json' do
|
2020-04-22 19:07:51 +05:30
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
context 'with attachments' do
|
|
|
|
let(:blob) do
|
|
|
|
<<~EOF
|
|
|
|
<testsuites>
|
|
|
|
<testsuite>
|
|
|
|
<testcase classname='Calculator' name='sumTest1' time='0.01'>
|
|
|
|
<failure>Some failure</failure>
|
|
|
|
<system-out>[[ATTACHMENT|some/path.png]]</system-out>
|
|
|
|
</testcase>
|
|
|
|
</testsuite>
|
|
|
|
</testsuites>
|
|
|
|
EOF
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow_any_instance_of(Ci::JobArtifact).to receive(:each_blob).and_yield(blob)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not have N+1 problem with attachments' do
|
|
|
|
get_test_report_json
|
|
|
|
|
|
|
|
create(:ci_build, name: 'rspec', pipeline: pipeline).tap do |build|
|
|
|
|
create(:ci_job_artifact, :junit, job: build)
|
|
|
|
end
|
|
|
|
|
|
|
|
clear_controller_memoization
|
|
|
|
|
|
|
|
control_count = ActiveRecord::QueryRecorder.new { get_test_report_json }.count
|
|
|
|
|
|
|
|
create(:ci_build, name: 'karma', pipeline: pipeline).tap do |build|
|
|
|
|
create(:ci_job_artifact, :junit, job: build)
|
|
|
|
end
|
|
|
|
|
|
|
|
clear_controller_memoization
|
|
|
|
|
|
|
|
expect { get_test_report_json }.not_to exceed_query_limit(control_count)
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
context 'when pipeline does not have a test report' do
|
|
|
|
it 'renders an empty test report' do
|
|
|
|
get_test_report_json
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['total_count']).to eq(0)
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
context 'when pipeline has a test report' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :test_reports, name: 'rspec', pipeline: pipeline)
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
it 'renders the test report' do
|
|
|
|
get_test_report_json
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['total_count']).to eq(4)
|
|
|
|
end
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
context 'when pipeline has a corrupt test report artifact' do
|
|
|
|
before do
|
|
|
|
create(:ci_build, :broken_test_reports, name: 'rspec', pipeline: pipeline)
|
2020-05-24 23:13:21 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
get_test_report_json
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
2020-04-22 19:07:51 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
it 'renders the test reports' do
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['test_suites'].count).to eq(1)
|
|
|
|
end
|
2020-04-22 19:07:51 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
it 'returns a suite_error on the suite with corrupted XML' do
|
|
|
|
expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
|
|
|
|
end
|
|
|
|
end
|
2020-04-22 19:07:51 +05:30
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
context 'when test_report contains attachment and scope is with_attachment as a URL param' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
|
2020-04-22 19:07:51 +05:30
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
it 'returns a test reports with attachment' do
|
|
|
|
get_test_report_json(scope: 'with_attachment')
|
2020-04-22 19:07:51 +05:30
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response["test_suites"]).to be_present
|
|
|
|
expect(json_response["test_suites"].first["test_cases"].first).to include("attachment_url")
|
2020-04-22 19:07:51 +05:30
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
context 'when test_report does not contain attachment and scope is with_attachment as a URL param' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
it 'returns a test reports with empty values' do
|
|
|
|
get_test_report_json(scope: 'with_attachment')
|
2019-12-21 20:55:43 +05:30
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response["test_suites"]).to be_empty
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
end
|
2020-04-22 19:07:51 +05:30
|
|
|
|
|
|
|
def get_test_report_json(**args)
|
|
|
|
params = {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
id: pipeline.id
|
|
|
|
}
|
|
|
|
|
|
|
|
params.merge!(args) if args
|
|
|
|
|
|
|
|
get :test_report,
|
|
|
|
params: params,
|
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
|
|
|
|
def clear_controller_memoization
|
|
|
|
controller.clear_memoization(:pipeline_test_report)
|
|
|
|
controller.instance_variable_set(:@pipeline, nil)
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
describe 'GET latest' do
|
|
|
|
let(:branch_main) { project.repository.branches[0] }
|
|
|
|
let(:branch_secondary) { project.repository.branches[1] }
|
|
|
|
|
|
|
|
let!(:pipeline_master) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
ref: branch_main.name,
|
|
|
|
sha: branch_main.target,
|
|
|
|
project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:pipeline_secondary) do
|
|
|
|
create(:ci_pipeline,
|
|
|
|
ref: branch_secondary.name,
|
|
|
|
sha: branch_secondary.target,
|
|
|
|
project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
project.change_head(branch_main.name)
|
|
|
|
project.reload_default_branch
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'no ref provided' do
|
|
|
|
it 'shows latest pipeline for the default project branch' do
|
|
|
|
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: nil }
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
2019-12-04 20:38:33 +05:30
|
|
|
expect(assigns(:pipeline)).to have_attributes(id: pipeline_master.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'ref provided' do
|
2020-04-22 19:07:51 +05:30
|
|
|
render_views
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
before do
|
|
|
|
create(:ci_pipeline, ref: 'master', project: project)
|
|
|
|
end
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
it 'shows a 404 if no pipeline exists' do
|
|
|
|
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: 'non-existence' }
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
it 'shows the latest pipeline for the provided ref' do
|
|
|
|
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: branch_secondary.name }
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
2019-12-04 20:38:33 +05:30
|
|
|
expect(assigns(:pipeline)).to have_attributes(id: pipeline_secondary.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'newer pipeline exists for older sha' do
|
|
|
|
before do
|
|
|
|
create(:ci_pipeline, ref: branch_secondary.name, sha: project.commit(branch_secondary.name).parent, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'shows the provided ref with the last sha/pipeline combo' do
|
|
|
|
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: branch_secondary.name }
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
2019-12-04 20:38:33 +05:30
|
|
|
expect(assigns(:pipeline)).to have_attributes(id: pipeline_secondary.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'renders a 404 if no pipeline is found for the ref' do
|
|
|
|
get :show, params: { namespace_id: project.namespace, project_id: project, ref: 'no-branch' }
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'DELETE #destroy' do
|
|
|
|
let!(:project) { create(:project, :private, :repository) }
|
|
|
|
let!(:pipeline) { create(:ci_pipeline, :failed, project: project) }
|
|
|
|
let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
|
|
|
|
|
|
|
|
context 'when user has ability to delete pipeline' do
|
|
|
|
before do
|
2022-04-04 11:22:00 +05:30
|
|
|
sign_in(project.first_owner)
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'deletes pipeline and redirects' do
|
|
|
|
delete_pipeline
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:see_other)
|
|
|
|
|
|
|
|
expect(Ci::Build.exists?(build.id)).to be_falsy
|
|
|
|
expect(Ci::Pipeline.exists?(pipeline.id)).to be_falsy
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'and builds are disabled' do
|
|
|
|
let(:feature) { ProjectFeature::DISABLED }
|
|
|
|
|
|
|
|
it 'fails to delete pipeline' do
|
|
|
|
delete_pipeline
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when user has no privileges' do
|
|
|
|
it 'fails to delete pipeline' do
|
|
|
|
delete_pipeline
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:forbidden)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def delete_pipeline
|
|
|
|
delete :destroy, params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
id: pipeline.id
|
|
|
|
}
|
2019-12-04 20:38:33 +05:30
|
|
|
end
|
|
|
|
end
|
2021-01-03 14:25:43 +05:30
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
describe 'GET config_variables.json', :use_clean_rails_memory_store_caching do
|
|
|
|
include ReactiveCachingHelpers
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
let(:result) { YAML.dump(ci_config) }
|
2021-02-22 17:27:13 +05:30
|
|
|
let(:service) { Ci::ListConfigVariablesService.new(project, user) }
|
2021-01-03 14:25:43 +05:30
|
|
|
|
|
|
|
before do
|
|
|
|
stub_gitlab_ci_yml_for_sha(sha, result)
|
2021-02-22 17:27:13 +05:30
|
|
|
allow(Ci::ListConfigVariablesService)
|
|
|
|
.to receive(:new)
|
|
|
|
.and_return(service)
|
2021-01-03 14:25:43 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when sending a valid sha' do
|
|
|
|
let(:sha) { 'master' }
|
|
|
|
let(:ci_config) do
|
|
|
|
{
|
|
|
|
variables: {
|
|
|
|
KEY1: { value: 'val 1', description: 'description 1' }
|
|
|
|
},
|
|
|
|
test: {
|
|
|
|
stage: 'test',
|
|
|
|
script: 'echo'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(service)
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
it 'returns variable list' do
|
|
|
|
get_config_variables
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['KEY1']).to eq({ 'value' => 'val 1', 'description' => 'description 1' })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when sending an invalid sha' do
|
|
|
|
let(:sha) { 'invalid-sha' }
|
|
|
|
let(:ci_config) { nil }
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(service)
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
it 'returns empty json' do
|
|
|
|
get_config_variables
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response).to eq({})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when sending an invalid config' do
|
|
|
|
let(:sha) { 'master' }
|
|
|
|
let(:ci_config) do
|
|
|
|
{
|
|
|
|
variables: {
|
|
|
|
KEY1: { value: 'val 1', description: 'description 1' }
|
|
|
|
},
|
|
|
|
test: {
|
|
|
|
stage: 'invalid',
|
|
|
|
script: 'echo'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
before do
|
|
|
|
synchronous_reactive_cache(service)
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
it 'returns empty result' do
|
|
|
|
get_config_variables
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response).to eq({})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
context 'when the cache is empty' do
|
|
|
|
let(:sha) { 'master' }
|
|
|
|
let(:ci_config) do
|
|
|
|
{
|
|
|
|
variables: {
|
|
|
|
KEY1: { value: 'val 1', description: 'description 1' }
|
|
|
|
},
|
|
|
|
test: {
|
|
|
|
stage: 'test',
|
|
|
|
script: 'echo'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns no content' do
|
|
|
|
get_config_variables
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:no_content)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
context 'when project uses external project ci config' do
|
|
|
|
let(:other_project) { create(:project) }
|
|
|
|
let(:sha) { 'master' }
|
|
|
|
let(:service) { ::Ci::ListConfigVariablesService.new(other_project, user) }
|
|
|
|
|
|
|
|
let(:ci_config) do
|
|
|
|
{
|
|
|
|
variables: {
|
|
|
|
KEY1: { value: 'val 1', description: 'description 1' }
|
|
|
|
},
|
|
|
|
test: {
|
|
|
|
stage: 'test',
|
|
|
|
script: 'echo'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
project.update!(ci_config_path: ".gitlab-ci.yml@#{other_project.full_path}")
|
|
|
|
synchronous_reactive_cache(service)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns other project config variables' do
|
|
|
|
expect(::Ci::ListConfigVariablesService).to receive(:new).with(other_project, anything).and_return(service)
|
|
|
|
|
|
|
|
get_config_variables
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['KEY1']).to eq({ 'value' => 'val 1', 'description' => 'description 1' })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
private
|
|
|
|
|
|
|
|
def stub_gitlab_ci_yml_for_sha(sha, result)
|
|
|
|
allow_any_instance_of(Repository)
|
|
|
|
.to receive(:gitlab_ci_yml_for)
|
|
|
|
.with(sha, '.gitlab-ci.yml')
|
|
|
|
.and_return(result)
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_config_variables
|
|
|
|
get :config_variables, params: { namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
sha: sha },
|
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
end
|
2021-06-08 01:23:25 +05:30
|
|
|
|
|
|
|
describe 'GET downloadable_artifacts.json' do
|
|
|
|
context 'when pipeline is empty' do
|
|
|
|
let(:pipeline) { create(:ci_empty_pipeline) }
|
|
|
|
|
|
|
|
it 'returns status not_found' do
|
|
|
|
get_downloadable_artifacts_json
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:not_found)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline exists' do
|
|
|
|
context 'when pipeline does not have any downloadable artifacts' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
it 'returns an empty array' do
|
|
|
|
get_downloadable_artifacts_json
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['artifacts']).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline has downloadable artifacts' do
|
|
|
|
let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_build, name: 'rspec', pipeline: pipeline).tap do |build|
|
|
|
|
create(:ci_job_artifact, :junit, job: build)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an array of artifacts' do
|
|
|
|
get_downloadable_artifacts_json
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(:ok)
|
|
|
|
expect(json_response['artifacts']).to be_kind_of(Array)
|
|
|
|
expect(json_response['artifacts'].size).to eq(2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def get_downloadable_artifacts_json
|
|
|
|
get :downloadable_artifacts,
|
|
|
|
params: {
|
|
|
|
namespace_id: project.namespace,
|
|
|
|
project_id: project,
|
|
|
|
id: pipeline.id
|
|
|
|
},
|
|
|
|
format: :json
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|