debian-mirror-gitlab/spec/requests/api/ci/runner_spec.rb

2475 lines
89 KiB
Ruby
Raw Normal View History

2019-12-26 22:10:19 +05:30
# frozen_string_literal: true
2017-08-17 22:00:37 +05:30
require 'spec_helper'
2020-07-28 23:09:34 +05:30
RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
2017-08-17 22:00:37 +05:30
include StubGitlabCalls
2018-10-15 14:42:47 +05:30
include RedisHelpers
2019-12-21 20:55:43 +05:30
include WorkhorseHelpers
2017-08-17 22:00:37 +05:30
let(:registration_token) { 'abcdefg123456' }
before do
2018-10-15 14:42:47 +05:30
stub_feature_flags(ci_enable_live_trace: true)
2017-08-17 22:00:37 +05:30
stub_gitlab_calls
stub_application_setting(runners_registration_token: registration_token)
2020-07-28 23:09:34 +05:30
allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
2017-08-17 22:00:37 +05:30
end
describe '/api/v4/runners' do
describe 'POST /api/v4/runners' do
context 'when no token is provided' do
it 'returns 400 error' do
post api('/runners')
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2017-08-17 22:00:37 +05:30
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: { token: 'invalid' }
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
context 'when valid token is provided' do
it 'creates runner with default values' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: { token: registration_token }
2017-08-17 22:00:37 +05:30
2020-07-28 23:09:34 +05:30
runner = ::Ci::Runner.first
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(json_response['id']).to eq(runner.id)
expect(json_response['token']).to eq(runner.token)
expect(runner.run_untagged).to be true
2018-11-08 19:23:39 +05:30
expect(runner.active).to be true
2017-08-17 22:00:37 +05:30
expect(runner.token).not_to eq(registration_token)
2018-10-15 14:42:47 +05:30
expect(runner).to be_instance_type
2017-08-17 22:00:37 +05:30
end
context 'when project token is used' do
2017-09-10 17:25:29 +05:30
let(:project) { create(:project) }
2017-08-17 22:00:37 +05:30
2018-10-15 14:42:47 +05:30
it 'creates project runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: { token: project.runners_token }
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(project.runners.size).to eq(1)
2020-07-28 23:09:34 +05:30
runner = ::Ci::Runner.first
2018-10-15 14:42:47 +05:30
expect(runner.token).not_to eq(registration_token)
expect(runner.token).not_to eq(project.runners_token)
expect(runner).to be_project_type
end
end
context 'when group token is used' do
let(:group) { create(:group) }
it 'creates a group runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: { token: group.runners_token }
2018-10-15 14:42:47 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-31 22:56:46 +05:30
expect(group.runners.reload.size).to eq(1)
2020-07-28 23:09:34 +05:30
runner = ::Ci::Runner.first
2018-10-15 14:42:47 +05:30
expect(runner.token).not_to eq(registration_token)
expect(runner.token).not_to eq(group.runners_token)
expect(runner).to be_group_type
2017-08-17 22:00:37 +05:30
end
end
end
context 'when runner description is provided' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
description: 'server.hostname'
}
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.description).to eq('server.hostname')
2017-08-17 22:00:37 +05:30
end
end
context 'when runner tags are provided' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
tag_list: 'tag1, tag2'
}
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
2017-08-17 22:00:37 +05:30
end
end
context 'when option for running untagged jobs is provided' do
context 'when tags are provided' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
run_untagged: false,
tag_list: ['tag']
}
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.run_untagged).to be false
expect(::Ci::Runner.first.tag_list.sort).to eq(['tag'])
2017-08-17 22:00:37 +05:30
end
end
context 'when tags are not provided' do
2018-11-08 19:23:39 +05:30
it 'returns 400 error' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
run_untagged: false
}
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2018-11-08 19:23:39 +05:30
expect(json_response['message']).to include(
'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs'])
2017-08-17 22:00:37 +05:30
end
end
end
context 'when option for locking Runner is provided' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
locked: true
}
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.locked).to be true
2017-08-17 22:00:37 +05:30
end
end
2018-11-08 19:23:39 +05:30
context 'when option for activating a Runner is provided' do
context 'when active is set to true' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
active: true
}
2018-11-08 19:23:39 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.active).to be true
2018-11-08 19:23:39 +05:30
end
end
context 'when active is set to false' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
active: false
}
2018-11-08 19:23:39 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.active).to be false
2018-11-08 19:23:39 +05:30
end
end
end
2019-07-31 22:56:46 +05:30
context 'when access_level is provided for Runner' do
context 'when access_level is set to ref_protected' do
it 'creates runner' do
post api('/runners'), params: {
token: registration_token,
access_level: 'ref_protected'
}
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.ref_protected?).to be true
2019-07-31 22:56:46 +05:30
end
end
context 'when access_level is set to not_protected' do
it 'creates runner' do
post api('/runners'), params: {
token: registration_token,
access_level: 'not_protected'
}
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.ref_protected?).to be false
2019-07-31 22:56:46 +05:30
end
end
end
2018-05-09 12:01:36 +05:30
context 'when maximum job timeout is specified' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
maximum_timeout: 9000
}
2018-05-09 12:01:36 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.maximum_timeout).to eq(9000)
2018-05-09 12:01:36 +05:30
end
context 'when maximum job timeout is empty' do
it 'creates runner' do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
maximum_timeout: ''
}
2018-05-09 12:01:36 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.maximum_timeout).to be_nil
2018-05-09 12:01:36 +05:30
end
end
end
2017-08-17 22:00:37 +05:30
%w(name version revision platform architecture).each do |param|
context "when info parameter '#{param}' info is present" do
let(:value) { "#{param}_value" }
it "updates provided Runner's parameter" do
2019-02-15 15:39:39 +05:30
post api('/runners'), params: {
token: registration_token,
info: { param => value }
}
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
2017-08-17 22:00:37 +05:30
end
end
end
2018-03-27 19:54:05 +05:30
it "sets the runner's ip_address" do
post api('/runners'),
2019-03-02 22:35:43 +05:30
params: { token: registration_token },
headers: { 'X-Forwarded-For' => '123.111.123.111' }
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.first.ip_address).to eq('123.111.123.111')
2018-03-27 19:54:05 +05:30
end
2017-08-17 22:00:37 +05:30
end
describe 'DELETE /api/v4/runners' do
context 'when no token is provided' do
it 'returns 400 error' do
delete api('/runners')
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2017-08-17 22:00:37 +05:30
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
2019-02-15 15:39:39 +05:30
delete api('/runners'), params: { token: 'invalid' }
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
context 'when valid token is provided' do
let(:runner) { create(:ci_runner) }
it 'deletes Runner' do
2019-02-15 15:39:39 +05:30
delete api('/runners'), params: { token: runner.token }
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:no_content)
2020-07-28 23:09:34 +05:30
expect(::Ci::Runner.count).to eq(0)
2017-08-17 22:00:37 +05:30
end
2018-03-17 18:26:18 +05:30
it_behaves_like '412 response' do
let(:request) { api('/runners') }
let(:params) { { token: runner.token } }
end
2017-08-17 22:00:37 +05:30
end
end
describe 'POST /api/v4/runners/verify' do
let(:runner) { create(:ci_runner) }
context 'when no token is provided' do
it 'returns 400 error' do
post api('/runners/verify')
2018-03-17 18:26:18 +05:30
expect(response).to have_gitlab_http_status :bad_request
2017-08-17 22:00:37 +05:30
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
2019-02-15 15:39:39 +05:30
post api('/runners/verify'), params: { token: 'invalid-token' }
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
context 'when valid token is provided' do
it 'verifies Runner credentials' do
2019-02-15 15:39:39 +05:30
post api('/runners/verify'), params: { token: runner.token }
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2017-08-17 22:00:37 +05:30
end
end
end
end
describe '/api/v4/jobs' do
2020-03-13 15:44:24 +05:30
shared_examples 'application context metadata' do |api_route|
it 'contains correct context metadata' do
# Avoids popping the context from the thread so we can
# check its content after the request.
allow(Labkit::Context).to receive(:pop)
send_request
Labkit::Context.with_context do |context|
expected_context = {
'meta.caller_id' => api_route,
'meta.user' => job.user.username,
'meta.project' => job.project.full_path,
'meta.root_namespace' => job.project.full_path_components.first
}
expect(context.to_h).to include(expected_context)
end
end
end
2019-12-21 20:55:43 +05:30
let(:root_namespace) { create(:namespace) }
let(:namespace) { create(:namespace, parent: root_namespace) }
let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
2019-12-26 22:10:19 +05:30
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
2018-11-08 19:23:39 +05:30
let(:runner) { create(:ci_runner, :project, projects: [project]) }
2020-03-13 15:44:24 +05:30
let(:user) { create(:user) }
2018-05-09 12:01:36 +05:30
let(:job) do
2017-08-17 22:00:37 +05:30
create(:ci_build, :artifacts, :extended_options,
2019-02-15 15:39:39 +05:30
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
2017-08-17 22:00:37 +05:30
end
describe 'POST /api/v4/jobs/request' do
let!(:last_update) {}
let!(:new_update) { }
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
2017-09-10 17:25:29 +05:30
before do
2018-05-09 12:01:36 +05:30
job
2017-09-10 17:25:29 +05:30
stub_container_registry_config(enabled: false)
end
2017-08-17 22:00:37 +05:30
shared_examples 'no jobs available' do
2017-09-10 17:25:29 +05:30
before do
request_job
end
2017-08-17 22:00:37 +05:30
context 'when runner sends version in User-Agent' do
context 'for stable version' do
it 'gives 204 and set X-GitLab-Last-Update' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:no_content)
2017-08-17 22:00:37 +05:30
expect(response.header).to have_key('X-GitLab-Last-Update')
end
end
context 'when last_update is up-to-date' do
let(:last_update) { runner.ensure_runner_queue_value }
it 'gives 204 and set the same X-GitLab-Last-Update' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:no_content)
2017-08-17 22:00:37 +05:30
expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
end
end
context 'when last_update is outdated' do
let(:last_update) { runner.ensure_runner_queue_value }
let(:new_update) { runner.tick_runner_queue }
it 'gives 204 and set a new X-GitLab-Last-Update' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:no_content)
2017-08-17 22:00:37 +05:30
expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
end
end
context 'when beta version is sent' do
let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
2020-04-08 14:13:33 +05:30
it { expect(response).to have_gitlab_http_status(:no_content) }
2017-08-17 22:00:37 +05:30
end
context 'when pre-9-0 version is sent' do
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
2020-04-08 14:13:33 +05:30
it { expect(response).to have_gitlab_http_status(:no_content) }
2017-08-17 22:00:37 +05:30
end
context 'when pre-9-0 beta version is sent' do
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
2020-04-08 14:13:33 +05:30
it { expect(response).to have_gitlab_http_status(:no_content) }
2017-08-17 22:00:37 +05:30
end
end
end
context 'when no token is provided' do
it 'returns 400 error' do
post api('/jobs/request')
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2017-08-17 22:00:37 +05:30
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
2019-02-15 15:39:39 +05:30
post api('/jobs/request'), params: { token: 'invalid' }
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
context 'when valid token is provided' do
context 'when Runner is not active' do
let(:runner) { create(:ci_runner, :inactive) }
2018-11-08 19:23:39 +05:30
let(:update_value) { runner.ensure_runner_queue_value }
2017-08-17 22:00:37 +05:30
it 'returns 204 error' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:no_content)
2018-11-08 19:23:39 +05:30
expect(response.header['X-GitLab-Last-Update']).to eq(update_value)
2017-08-17 22:00:37 +05:30
end
end
context 'when jobs are finished' do
2017-09-10 17:25:29 +05:30
before do
job.success
end
2017-08-17 22:00:37 +05:30
it_behaves_like 'no jobs available'
end
context 'when other projects have pending jobs' do
before do
job.success
create(:ci_build, :pending)
end
it_behaves_like 'no jobs available'
end
context 'when shared runner requests job for project without shared_runners_enabled' do
2018-11-08 19:23:39 +05:30
let(:runner) { create(:ci_runner, :instance) }
2017-08-17 22:00:37 +05:30
it_behaves_like 'no jobs available'
end
context 'when there is a pending job' do
let(:expected_job_info) do
{ 'name' => job.name,
'stage' => job.stage,
'project_id' => job.project.id,
'project_name' => job.project.name }
end
let(:expected_git_info) do
{ 'repo_url' => job.repo_url,
'ref' => job.ref,
'sha' => job.sha,
'before_sha' => job.before_sha,
2019-07-07 11:18:12 +05:30
'ref_type' => 'branch',
2020-05-24 23:13:21 +05:30
'refspecs' => ["+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}"],
2019-09-04 21:01:54 +05:30
'depth' => project.ci_default_git_depth }
2017-08-17 22:00:37 +05:30
end
let(:expected_steps) do
[{ 'name' => 'script',
2019-02-15 15:39:39 +05:30
'script' => %w(echo),
2018-05-09 12:01:36 +05:30
'timeout' => job.metadata_timeout,
2017-08-17 22:00:37 +05:30
'when' => 'on_success',
'allow_failure' => false },
{ 'name' => 'after_script',
'script' => %w(ls date),
2018-05-09 12:01:36 +05:30
'timeout' => job.metadata_timeout,
2017-08-17 22:00:37 +05:30
'when' => 'always',
'allow_failure' => true }]
end
let(:expected_variables) do
2019-07-07 11:18:12 +05:30
[{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
{ 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
{ 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }]
2017-08-17 22:00:37 +05:30
end
let(:expected_artifacts) do
[{ 'name' => 'artifacts_file',
'untracked' => false,
'paths' => %w(out/),
'when' => 'always',
2018-11-18 11:00:15 +05:30
'expire_in' => '7d',
"artifact_type" => "archive",
"artifact_format" => "zip" }]
2017-08-17 22:00:37 +05:30
end
let(:expected_cache) do
[{ 'key' => 'cache_key',
'untracked' => false,
2017-09-10 17:25:29 +05:30
'paths' => ['vendor/*'],
'policy' => 'pull-push' }]
2017-08-17 22:00:37 +05:30
end
2018-03-17 18:26:18 +05:30
let(:expected_features) { { 'trace_sections' => true } }
2017-08-17 22:00:37 +05:30
it 'picks a job' do
request_job info: { platform: :darwin }
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-07-28 23:09:34 +05:30
expect(response.headers['Content-Type']).to eq('application/json')
2017-08-17 22:00:37 +05:30
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
2019-02-15 15:39:39 +05:30
expect(runner.reload.platform).to eq('darwin')
2017-08-17 22:00:37 +05:30
expect(json_response['id']).to eq(job.id)
expect(json_response['token']).to eq(job.token)
expect(json_response['job_info']).to eq(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
2020-04-22 19:07:51 +05:30
expect(json_response['image']).to eq({ 'name' => 'ruby:2.7', 'entrypoint' => '/bin/sh', 'ports' => [] })
2017-09-10 17:25:29 +05:30
expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
2019-07-07 11:18:12 +05:30
'alias' => nil, 'command' => nil, 'ports' => [] },
2018-10-15 14:42:47 +05:30
{ 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
2019-07-07 11:18:12 +05:30
'alias' => 'docker', 'command' => 'sleep 30', 'ports' => [] }])
2017-08-17 22:00:37 +05:30
expect(json_response['steps']).to eq(expected_steps)
expect(json_response['artifacts']).to eq(expected_artifacts)
expect(json_response['cache']).to eq(expected_cache)
expect(json_response['variables']).to include(*expected_variables)
2018-03-17 18:26:18 +05:30
expect(json_response['features']).to eq(expected_features)
2017-08-17 22:00:37 +05:30
end
2020-01-01 13:55:28 +05:30
it 'creates persistent ref' do
2020-07-28 23:09:34 +05:30
expect_any_instance_of(::Ci::PersistentRef).to receive(:create_ref)
2020-01-01 13:55:28 +05:30
.with(job.sha, "refs/#{Repository::REF_PIPELINES}/#{job.commit_id}")
request_job info: { platform: :darwin }
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-01-01 13:55:28 +05:30
expect(json_response['id']).to eq(job.id)
end
2017-08-17 22:00:37 +05:30
context 'when job is made for tag' do
2018-03-17 18:26:18 +05:30
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
2017-08-17 22:00:37 +05:30
it 'sets branch as ref_type' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(json_response['git_info']['ref_type']).to eq('tag')
end
2019-07-07 11:18:12 +05:30
context 'when GIT_DEPTH is specified' do
before do
create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
end
it 'specifies refspecs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response['git_info']['refspecs']).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}")
end
end
2020-07-28 23:09:34 +05:30
context 'when a Gitaly exception is thrown during response' do
before do
allow_next_instance_of(Ci::BuildRunnerPresenter) do |instance|
allow(instance).to receive(:artifacts).and_raise(GRPC::DeadlineExceeded)
end
end
it 'fails the job as a scheduler failure' do
request_job
expect(response).to have_gitlab_http_status(:no_content)
expect(job.reload.failed?).to be_truthy
expect(job.failure_reason).to eq('scheduler_failure')
expect(job.runner_id).to eq(runner.id)
expect(job.runner_session).to be_nil
end
end
2019-09-04 21:01:54 +05:30
context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
before do
project.update!(ci_default_git_depth: nil)
end
2019-07-07 11:18:12 +05:30
it 'specifies refspecs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response['git_info']['refspecs'])
2020-05-24 23:13:21 +05:30
.to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
'+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*')
2019-07-07 11:18:12 +05:30
end
end
2017-08-17 22:00:37 +05:30
end
2019-09-04 21:01:54 +05:30
context 'when job filtered by job_age' do
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, queued_at: 60.seconds.ago) }
context 'job is queued less than job_age parameter' do
let(:job_age) { 120 }
it 'gives 204' do
request_job(job_age: job_age)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:no_content)
2019-09-04 21:01:54 +05:30
end
end
context 'job is queued more than job_age parameter' do
let(:job_age) { 30 }
it 'picks a job' do
request_job(job_age: job_age)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-09-04 21:01:54 +05:30
end
end
end
2017-08-17 22:00:37 +05:30
context 'when job is made for branch' do
it 'sets tag as ref_type' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(json_response['git_info']['ref_type']).to eq('branch')
end
2019-07-07 11:18:12 +05:30
context 'when GIT_DEPTH is specified' do
before do
create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
end
it 'specifies refspecs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response['git_info']['refspecs']).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}")
end
end
2019-09-04 21:01:54 +05:30
context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
before do
project.update!(ci_default_git_depth: nil)
end
2019-07-07 11:18:12 +05:30
it 'specifies refspecs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response['git_info']['refspecs'])
2020-05-24 23:13:21 +05:30
.to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
'+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*')
2019-07-07 11:18:12 +05:30
end
end
end
2020-06-23 00:09:42 +05:30
context 'when job is for a release' do
let!(:job) { create(:ci_build, :release_options, pipeline: pipeline) }
2020-07-28 23:09:34 +05:30
context 'when `multi_build_steps` is passed by the runner' do
2020-06-23 00:09:42 +05:30
it 'exposes release info' do
2020-07-28 23:09:34 +05:30
request_job info: { features: { multi_build_steps: true } }
2020-06-23 00:09:42 +05:30
expect(response).to have_gitlab_http_status(:created)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
expect(json_response['steps']).to eq([
{
"name" => "script",
"script" => ["make changelog | tee release_changelog.txt"],
"timeout" => 3600,
"when" => "on_success",
"allow_failure" => false
},
{
"name" => "release",
"script" =>
2020-07-28 23:09:34 +05:30
["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""],
2020-06-23 00:09:42 +05:30
"timeout" => 3600,
"when" => "on_success",
"allow_failure" => false
}
])
end
end
2020-07-28 23:09:34 +05:30
context 'when `multi_build_steps` is not passed by the runner' do
2020-06-23 00:09:42 +05:30
it 'drops the job' do
request_job
expect(response).to have_gitlab_http_status(:no_content)
end
end
end
2019-07-07 11:18:12 +05:30
context 'when job is made for merge request' do
2019-12-26 22:10:19 +05:30
let(:pipeline) { create(:ci_pipeline, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
2019-07-07 11:18:12 +05:30
let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) }
let(:merge_request) { create(:merge_request) }
it 'sets branch as ref_type' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response['git_info']['ref_type']).to eq('branch')
end
context 'when GIT_DEPTH is specified' do
before do
create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
end
it 'returns the overwritten git depth for merge request refspecs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response['git_info']['depth']).to eq(1)
end
end
2017-08-17 22:00:37 +05:30
end
it 'updates runner info' do
expect { request_job }.to change { runner.reload.contacted_at }
end
2018-03-17 18:26:18 +05:30
%w(version revision platform architecture).each do |param|
2017-08-17 22:00:37 +05:30
context "when info parameter '#{param}' is present" do
let(:value) { "#{param}_value" }
it "updates provided Runner's parameter" do
request_job info: { param => value }
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
end
end
end
2018-03-27 19:54:05 +05:30
it "sets the runner's ip_address" do
post api('/jobs/request'),
2019-02-15 15:39:39 +05:30
params: { token: runner.token },
2019-03-02 22:35:43 +05:30
headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222' }
2018-03-27 19:54:05 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-03-27 19:54:05 +05:30
expect(runner.reload.ip_address).to eq('123.222.123.222')
end
2019-03-13 22:55:13 +05:30
it "handles multiple X-Forwarded-For addresses" do
post api('/jobs/request'),
params: { token: runner.token },
headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222, 127.0.0.1' }
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-03-13 22:55:13 +05:30
expect(runner.reload.ip_address).to eq('123.222.123.222')
end
2017-08-17 22:00:37 +05:30
context 'when concurrently updating a job' do
before do
2020-07-28 23:09:34 +05:30
expect_any_instance_of(::Ci::Build).to receive(:run!)
2017-09-10 17:25:29 +05:30
.and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
2017-08-17 22:00:37 +05:30
end
it 'returns a conflict' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:conflict)
2017-08-17 22:00:37 +05:30
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
end
end
context 'when project and pipeline have multiple jobs' do
2018-03-17 18:26:18 +05:30
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
2017-08-17 22:00:37 +05:30
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
before do
job.success
job2.success
end
it 'returns dependent jobs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(2)
2017-09-10 17:25:29 +05:30
expect(json_response['dependencies']).to include(
2019-02-15 15:39:39 +05:30
{ 'id' => job.id, 'name' => job.name, 'token' => job.token },
{ 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
2017-09-10 17:25:29 +05:30
end
end
context 'when pipeline have jobs with artifacts' do
2018-03-17 18:26:18 +05:30
let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
2017-09-10 17:25:29 +05:30
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
before do
job.success
end
it 'returns dependent jobs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-09-10 17:25:29 +05:30
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(1)
expect(json_response['dependencies']).to include(
2019-02-15 15:39:39 +05:30
{ 'id' => job.id, 'name' => job.name, 'token' => job.token,
2020-04-08 14:13:33 +05:30
'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
2017-08-17 22:00:37 +05:30
end
end
context 'when explicit dependencies are defined' do
2018-03-17 18:26:18 +05:30
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
2017-08-17 22:00:37 +05:30
let!(:test_job) do
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
stage: 'deploy', stage_idx: 1,
2019-02-15 15:39:39 +05:30
options: { script: ['bash'], dependencies: [job2.name] })
2017-08-17 22:00:37 +05:30
end
before do
job.success
job2.success
end
it 'returns dependent jobs' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(1)
2019-02-15 15:39:39 +05:30
expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
2017-08-17 22:00:37 +05:30
end
end
context 'when dependencies is an empty array' do
2018-03-17 18:26:18 +05:30
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
2017-08-17 22:00:37 +05:30
let!(:empty_dependencies_job) do
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
stage: 'deploy', stage_idx: 1,
2019-02-15 15:39:39 +05:30
options: { script: ['bash'], dependencies: [] })
2017-08-17 22:00:37 +05:30
end
before do
job.success
job2.success
end
it 'returns an empty array' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(json_response['id']).to eq(empty_dependencies_job.id)
expect(json_response['dependencies'].count).to eq(0)
end
end
context 'when job has no tags' do
2017-09-10 17:25:29 +05:30
before do
job.update(tags: [])
end
2017-08-17 22:00:37 +05:30
context 'when runner is allowed to pick untagged jobs' do
2017-09-10 17:25:29 +05:30
before do
runner.update_column(:run_untagged, true)
end
2017-08-17 22:00:37 +05:30
it 'picks job' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
end
end
context 'when runner is not allowed to pick untagged jobs' do
2017-09-10 17:25:29 +05:30
before do
runner.update_column(:run_untagged, false)
end
2017-08-17 22:00:37 +05:30
it_behaves_like 'no jobs available'
end
end
context 'when triggered job is available' do
let(:expected_variables) do
2019-07-07 11:18:12 +05:30
[{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
{ 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
{ 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true, 'masked' => false },
{ 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false },
{ 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false, 'masked' => false },
{ 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false, 'masked' => false }]
2017-08-17 22:00:37 +05:30
end
2018-03-17 18:26:18 +05:30
let(:trigger) { create(:ci_trigger, project: project) }
let!(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, builds: [job], trigger: trigger) }
2017-08-17 22:00:37 +05:30
before do
2020-07-28 23:09:34 +05:30
project.variables << ::Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value')
2017-08-17 22:00:37 +05:30
end
2018-03-17 18:26:18 +05:30
shared_examples 'expected variables behavior' do
it 'returns variables for triggers' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-03-17 18:26:18 +05:30
expect(json_response['variables']).to include(*expected_variables)
end
end
context 'when variables are stored in trigger_request' do
before do
trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } )
end
it_behaves_like 'expected variables behavior'
end
context 'when variables are stored in pipeline_variables' do
before do
create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
end
2017-08-17 22:00:37 +05:30
2018-03-17 18:26:18 +05:30
it_behaves_like 'expected variables behavior'
2017-08-17 22:00:37 +05:30
end
end
describe 'registry credentials support' do
let(:registry_url) { 'registry.example.com:5005' }
let(:registry_credentials) do
{ 'type' => 'registry',
'url' => registry_url,
'username' => 'gitlab-ci-token',
'password' => job.token }
end
context 'when registry is enabled' do
2017-09-10 17:25:29 +05:30
before do
stub_container_registry_config(enabled: true, host_port: registry_url)
end
2017-08-17 22:00:37 +05:30
it 'sends registry credentials key' do
request_job
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).to include(registry_credentials)
end
end
context 'when registry is disabled' do
2017-09-10 17:25:29 +05:30
before do
stub_container_registry_config(enabled: false, host_port: registry_url)
end
2017-08-17 22:00:37 +05:30
it 'does not send registry credentials' do
request_job
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).not_to include(registry_credentials)
end
end
end
2018-05-09 12:01:36 +05:30
describe 'timeout support' do
context 'when project specifies job timeout' do
let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) }
it 'contains info about timeout taken from project' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-05-09 12:01:36 +05:30
expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
end
context 'when runner specifies lower timeout' do
2018-11-08 19:23:39 +05:30
let(:runner) { create(:ci_runner, :project, maximum_timeout: 1000, projects: [project]) }
2018-05-09 12:01:36 +05:30
it 'contains info about timeout overridden by runner' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-05-09 12:01:36 +05:30
expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
end
end
context 'when runner specifies bigger timeout' do
2018-11-08 19:23:39 +05:30
let(:runner) { create(:ci_runner, :project, maximum_timeout: 2000, projects: [project]) }
2018-05-09 12:01:36 +05:30
it 'contains info about timeout not overridden by runner' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-05-09 12:01:36 +05:30
expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
end
end
end
end
2017-08-17 22:00:37 +05:30
end
2019-07-07 11:18:12 +05:30
describe 'port support' do
let(:job) { create(:ci_build, pipeline: pipeline, options: options) }
context 'when job image has ports' do
let(:options) do
{
image: {
name: 'ruby',
ports: [80]
},
services: ['mysql']
}
end
it 'returns the image ports' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response).to include(
'id' => job.id,
'image' => a_hash_including('name' => 'ruby', 'ports' => [{ 'number' => 80, 'protocol' => 'http', 'name' => 'default_port' }]),
'services' => all(a_hash_including('name' => 'mysql')))
end
end
context 'when job services settings has ports' do
let(:options) do
{
image: 'ruby',
services: [
{
name: 'tomcat',
ports: [{ number: 8081, protocol: 'http', name: 'custom_port' }]
}
]
}
end
it 'returns the service ports' do
request_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-07-07 11:18:12 +05:30
expect(json_response).to include(
'id' => job.id,
'image' => a_hash_including('name' => 'ruby'),
'services' => all(a_hash_including('name' => 'tomcat', 'ports' => [{ 'number' => 8081, 'protocol' => 'http', 'name' => 'custom_port' }])))
end
end
end
2020-05-24 23:13:21 +05:30
describe 'a job with excluded artifacts' do
context 'when excluded paths are defined' do
let(:job) do
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'test',
stage: 'deploy', stage_idx: 1,
options: { artifacts: { paths: ['abc'], exclude: ['cde'] } })
end
context 'when a runner supports this feature' do
it 'exposes excluded paths when the feature is enabled' do
stub_feature_flags(ci_artifacts_exclude: true)
request_job info: { features: { artifacts_exclude: true } }
expect(response).to have_gitlab_http_status(:created)
expect(json_response.dig('artifacts').first).to include('exclude' => ['cde'])
end
it 'does not expose excluded paths when the feature is disabled' do
stub_feature_flags(ci_artifacts_exclude: false)
request_job info: { features: { artifacts_exclude: true } }
expect(response).to have_gitlab_http_status(:created)
expect(json_response.dig('artifacts').first).not_to have_key('exclude')
end
end
context 'when a runner does not support this feature' do
it 'does not expose the build at all' do
stub_feature_flags(ci_artifacts_exclude: true)
request_job
expect(response).to have_gitlab_http_status(:no_content)
end
end
end
it 'does not expose excluded paths when these are empty' do
request_job
expect(response).to have_gitlab_http_status(:created)
expect(json_response.dig('artifacts').first).not_to have_key('exclude')
end
end
2017-08-17 22:00:37 +05:30
def request_job(token = runner.token, **params)
new_params = params.merge(token: token, last_update: last_update)
2020-07-28 23:09:34 +05:30
post api('/jobs/request'), params: new_params.to_json, headers: { 'User-Agent' => user_agent, 'Content-Type': 'application/json' }
2017-08-17 22:00:37 +05:30
end
end
2020-06-23 00:09:42 +05:30
context 'for web-ide job' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let(:runner) { create(:ci_runner, :project, projects: [project]) }
2020-07-28 23:09:34 +05:30
let(:service) { ::Ci::CreateWebIdeTerminalService.new(project, user, ref: 'master').execute }
2020-06-23 00:09:42 +05:30
let(:pipeline) { service[:pipeline] }
let(:build) { pipeline.builds.first }
let(:job) { {} }
let(:config_content) do
'terminal: { image: ruby, services: [mysql], before_script: [ls], tags: [tag-1], variables: { KEY: value } }'
end
before do
stub_webide_config_file(config_content)
project.add_maintainer(user)
pipeline
end
context 'when runner has matching tag' do
before do
runner.update!(tag_list: ['tag-1'])
end
it 'successfully picks job' do
request_job
build.reload
expect(build).to be_running
expect(build.runner).to eq(runner)
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include(
"id" => build.id,
"variables" => include("key" => 'KEY', "value" => 'value', "public" => true, "masked" => false),
"image" => a_hash_including("name" => 'ruby'),
"services" => all(a_hash_including("name" => 'mysql')),
"job_info" => a_hash_including("name" => 'terminal', "stage" => 'terminal'))
end
end
context 'when runner does not have matching tags' do
it 'does not pick a job' do
request_job
build.reload
expect(build).to be_pending
expect(response).to have_gitlab_http_status(:no_content)
end
end
def request_job(token = runner.token, **params)
post api('/jobs/request'), params: params.merge(token: token)
end
end
2017-08-17 22:00:37 +05:30
end
describe 'PUT /api/v4/jobs/:id' do
2020-03-13 15:44:24 +05:30
let(:job) do
create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, runner_id: runner.id)
end
2017-08-17 22:00:37 +05:30
2017-09-10 17:25:29 +05:30
before do
job.run!
end
2017-08-17 22:00:37 +05:30
2020-03-13 15:44:24 +05:30
it_behaves_like 'application context metadata', '/api/:version/jobs/:id' do
let(:send_request) { update_job(state: 'success') }
end
2020-06-23 00:09:42 +05:30
it 'updates runner info' do
expect { update_job(state: 'success') }.to change { runner.reload.contacted_at }
end
2017-08-17 22:00:37 +05:30
context 'when status is given' do
it 'mark job as succeeded' do
update_job(state: 'success')
2018-03-17 18:26:18 +05:30
job.reload
expect(job).to be_success
2017-08-17 22:00:37 +05:30
end
it 'mark job as failed' do
update_job(state: 'failed')
2018-03-17 18:26:18 +05:30
job.reload
expect(job).to be_failed
expect(job).to be_unknown_failure
end
context 'when failure_reason is script_failure' do
before do
update_job(state: 'failed', failure_reason: 'script_failure')
job.reload
end
it { expect(job).to be_script_failure }
end
context 'when failure_reason is runner_system_failure' do
before do
update_job(state: 'failed', failure_reason: 'runner_system_failure')
job.reload
end
it { expect(job).to be_runner_system_failure }
2017-08-17 22:00:37 +05:30
end
2018-12-13 13:39:08 +05:30
context 'when failure_reason is unrecognized value' do
before do
update_job(state: 'failed', failure_reason: 'what_is_this')
job.reload
end
it { expect(job).to be_unknown_failure }
end
context 'when failure_reason is job_execution_timeout' do
before do
update_job(state: 'failed', failure_reason: 'job_execution_timeout')
job.reload
end
it { expect(job).to be_job_execution_timeout }
end
2019-07-07 11:18:12 +05:30
context 'when failure_reason is unmet_prerequisites' do
before do
update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
job.reload
end
it { expect(job).to be_unmet_prerequisites }
end
2017-08-17 22:00:37 +05:30
end
2018-03-27 19:54:05 +05:30
context 'when trace is given' do
2018-03-17 18:26:18 +05:30
it 'creates a trace artifact' do
2018-03-27 19:54:05 +05:30
allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do
ArchiveTraceWorker.new.perform(job.id)
2018-03-17 18:26:18 +05:30
end
2017-08-17 22:00:37 +05:30
2018-03-17 18:26:18 +05:30
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
job.reload
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2018-03-17 18:26:18 +05:30
expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
2017-08-17 22:00:37 +05:30
end
2019-02-15 15:39:39 +05:30
context 'when concurrent update of trace is happening' do
before do
job.trace.write('wb') do
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
end
end
it 'returns that operation conflicts' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:conflict)
2019-02-15 15:39:39 +05:30
end
end
2017-08-17 22:00:37 +05:30
end
context 'when no trace is given' do
it 'does not override trace information' do
update_job
expect(job.reload.trace.raw).to eq 'BUILD TRACE'
end
2018-11-08 19:23:39 +05:30
context 'when running state is sent' do
it 'updates update_at value' do
expect { update_job_after_time }.to change { job.reload.updated_at }
end
end
context 'when other state is sent' do
it "doesn't update update_at value" do
expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at }
end
end
2017-08-17 22:00:37 +05:30
end
context 'when job has been erased' do
let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
it 'responds with forbidden' do
update_job
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
2018-11-08 19:23:39 +05:30
context 'when job has already been finished' do
before do
job.trace.set('Job failed')
job.drop!(:script_failure)
end
it 'does not update job status and job trace' do
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
job.reload
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2018-11-08 19:23:39 +05:30
expect(response.header['Job-Status']).to eq 'failed'
expect(job.trace.raw).to eq 'Job failed'
expect(job).to be_failed
end
end
2017-08-17 22:00:37 +05:30
def update_job(token = job.token, **params)
new_params = params.merge(token: token)
2019-02-15 15:39:39 +05:30
put api("/jobs/#{job.id}"), params: new_params
2017-08-17 22:00:37 +05:30
end
2018-11-08 19:23:39 +05:30
def update_job_after_time(update_interval = 20.minutes, state = 'running')
Timecop.travel(job.updated_at + update_interval) do
update_job(job.token, state: state)
end
end
2017-08-17 22:00:37 +05:30
end
describe 'PATCH /api/v4/jobs/:id/trace' do
2020-03-13 15:44:24 +05:30
let(:job) do
create(:ci_build, :running, :trace_live,
project: project, user: user, runner_id: runner.id, pipeline: pipeline)
end
2017-08-17 22:00:37 +05:30
let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
let(:update_interval) { 10.seconds.to_i }
2017-09-10 17:25:29 +05:30
before do
initial_patch_the_trace
end
2017-08-17 22:00:37 +05:30
2020-03-13 15:44:24 +05:30
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/trace' do
let(:send_request) { patch_the_trace }
end
2020-06-23 00:09:42 +05:30
it 'updates runner info' do
runner.update!(contacted_at: 1.year.ago)
expect { patch_the_trace }.to change { runner.reload.contacted_at }
end
2017-08-17 22:00:37 +05:30
context 'when request is valid' do
it 'gets correct response' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:accepted)
2017-08-17 22:00:37 +05:30
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Job-Status'
2020-03-13 15:44:24 +05:30
expect(response.header).to have_key 'X-GitLab-Trace-Update-Interval'
2017-08-17 22:00:37 +05:30
end
context 'when job has been updated recently' do
2018-03-17 18:26:18 +05:30
it { expect { patch_the_trace }.not_to change { job.updated_at }}
2017-08-17 22:00:37 +05:30
it "changes the job's trace" do
patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
2018-03-17 18:26:18 +05:30
it { expect { force_patch_the_trace }.not_to change { job.updated_at }}
2017-08-17 22:00:37 +05:30
it "doesn't change the build.trace" do
force_patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
context 'when job was not updated recently' do
let(:update_interval) { 15.minutes.to_i }
it { expect { patch_the_trace }.to change { job.updated_at } }
it 'changes the job.trace' do
patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
it { expect { force_patch_the_trace }.to change { job.updated_at } }
it "doesn't change the job.trace" do
force_patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
context 'when project for the build has been deleted' do
let(:job) do
2018-03-17 18:26:18 +05:30
create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
2017-08-17 22:00:37 +05:30
job.project.update(pending_delete: true)
end
end
it 'responds with forbidden' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
2018-10-15 14:42:47 +05:30
context 'when trace is patched' do
before do
patch_the_trace
end
it 'has valid trace' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:accepted)
2018-10-15 14:42:47 +05:30
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
2018-11-08 19:23:39 +05:30
context 'when job is cancelled' do
before do
job.cancel
end
context 'when trace is patched' do
before do
patch_the_trace
end
2019-02-15 15:39:39 +05:30
it 'returns Forbidden ' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2018-11-08 19:23:39 +05:30
end
end
end
2018-10-15 14:42:47 +05:30
context 'when redis data are flushed' do
before do
redis_shared_state_cleanup!
end
it 'has empty trace' do
expect(job.reload.trace.raw).to eq ''
end
context 'when we perform partial patch' do
before do
patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" }))
end
it 'returns an error' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:range_not_satisfiable)
2018-10-15 14:42:47 +05:30
expect(response.header['Range']).to eq('0-0')
end
end
context 'when we resend full trace' do
before do
patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" }))
end
it 'succeeds with updating trace' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:accepted)
2018-10-15 14:42:47 +05:30
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
end
end
end
end
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
context 'when concurrent update of trace is happening' do
before do
job.trace.write('wb') do
patch_the_trace
end
end
it 'returns that operation conflicts' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:conflict)
2019-02-15 15:39:39 +05:30
end
end
2018-11-08 19:23:39 +05:30
context 'when the job is canceled' do
before do
2019-02-15 15:39:39 +05:30
job.cancel
2018-11-08 19:23:39 +05:30
patch_the_trace
end
2019-02-15 15:39:39 +05:30
it 'receives status in header' do
2018-11-08 19:23:39 +05:30
expect(response.header['Job-Status']).to eq 'canceled'
end
end
2020-03-13 15:44:24 +05:30
context 'when build trace is being watched' do
before do
job.trace.being_watched!
end
it 'returns X-GitLab-Trace-Update-Interval as 3' do
patch_the_trace
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:accepted)
2020-03-13 15:44:24 +05:30
expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('3')
end
end
context 'when build trace is not being watched' do
it 'returns X-GitLab-Trace-Update-Interval as 30' do
patch_the_trace
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:accepted)
2020-03-13 15:44:24 +05:30
expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('30')
end
end
2017-08-17 22:00:37 +05:30
end
context 'when Runner makes a force-patch' do
before do
force_patch_the_trace
end
it 'gets correct response' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:accepted)
2017-08-17 22:00:37 +05:30
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Job-Status'
end
end
context 'when content-range start is too big' do
2018-10-15 14:42:47 +05:30
let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) }
2017-08-17 22:00:37 +05:30
it 'gets 416 error response with range headers' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:range_not_satisfiable)
2017-08-17 22:00:37 +05:30
expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-11'
end
end
context 'when content-range start is too small' do
2018-10-15 14:42:47 +05:30
let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) }
2017-08-17 22:00:37 +05:30
it 'gets 416 error response with range headers' do
2020-04-22 19:07:51 +05:30
expect(response).to have_gitlab_http_status(:range_not_satisfiable)
2017-08-17 22:00:37 +05:30
expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-11'
end
end
context 'when Content-Range header is missing' do
let(:headers_with_range) { headers }
2020-04-22 19:07:51 +05:30
it { expect(response).to have_gitlab_http_status(:bad_request) }
2017-08-17 22:00:37 +05:30
end
context 'when job has been errased' do
let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
2020-04-22 19:07:51 +05:30
it { expect(response).to have_gitlab_http_status(:forbidden) }
2017-08-17 22:00:37 +05:30
end
def patch_the_trace(content = ' appended', request_headers = nil)
unless request_headers
job.trace.read do |stream|
offset = stream.size
limit = offset + content.length - 1
request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
end
end
Timecop.travel(job.updated_at + update_interval) do
2019-02-15 15:39:39 +05:30
patch api("/jobs/#{job.id}/trace"), params: content, headers: request_headers
2017-08-17 22:00:37 +05:30
job.reload
end
end
def initial_patch_the_trace
patch_the_trace(' appended', headers_with_range)
end
def force_patch_the_trace
2.times { patch_the_trace('') }
end
end
describe 'artifacts' do
2020-03-13 15:44:24 +05:30
let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
2020-04-15 14:45:12 +05:30
let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
2017-08-17 22:00:37 +05:30
let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
2018-11-08 19:23:39 +05:30
let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
2017-08-17 22:00:37 +05:30
2017-09-10 17:25:29 +05:30
before do
2018-05-09 12:01:36 +05:30
stub_artifacts_object_storage
2017-09-10 17:25:29 +05:30
job.run!
end
2017-08-17 22:00:37 +05:30
2020-07-28 23:09:34 +05:30
shared_examples_for 'rejecting artifacts that are too large' do
let(:filesize) { 100.megabytes.to_i }
let(:sample_max_size) { (filesize / 1.megabyte) - 10 } # Set max size to be smaller than file size to trigger error
shared_examples_for 'failed request' do
it 'responds with payload too large error' do
send_request
expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
context 'based on plan limit setting' do
let(:application_max_size) { sample_max_size + 100 }
let(:limit_name) { "#{Ci::JobArtifact::PLAN_LIMIT_PREFIX}archive" }
before do
create(:plan_limits, :default_plan, limit_name => sample_max_size)
stub_application_setting(max_artifacts_size: application_max_size)
end
context 'and feature flag ci_max_artifact_size_per_type is enabled' do
before do
stub_feature_flags(ci_max_artifact_size_per_type: true)
end
it_behaves_like 'failed request'
end
context 'and feature flag ci_max_artifact_size_per_type is disabled' do
before do
stub_feature_flags(ci_max_artifact_size_per_type: false)
end
it 'bases of project closest setting' do
send_request
expect(response).to have_gitlab_http_status(success_code)
end
end
end
context 'based on application setting' do
before do
stub_application_setting(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
context 'based on root namespace setting' do
let(:application_max_size) { sample_max_size + 10 }
before do
stub_application_setting(max_artifacts_size: application_max_size)
root_namespace.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
context 'based on child namespace setting' do
let(:application_max_size) { sample_max_size + 10 }
let(:root_namespace_max_size) { sample_max_size + 10 }
before do
stub_application_setting(max_artifacts_size: application_max_size)
root_namespace.update!(max_artifacts_size: root_namespace_max_size)
namespace.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
context 'based on project setting' do
let(:application_max_size) { sample_max_size + 10 }
let(:root_namespace_max_size) { sample_max_size + 10 }
let(:child_namespace_max_size) { sample_max_size + 10 }
before do
stub_application_setting(max_artifacts_size: application_max_size)
root_namespace.update!(max_artifacts_size: root_namespace_max_size)
namespace.update!(max_artifacts_size: child_namespace_max_size)
project.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
end
2017-08-17 22:00:37 +05:30
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do
2020-07-28 23:09:34 +05:30
context 'and the artifact is too large' do
it_behaves_like 'rejecting artifacts that are too large' do
let(:success_code) { :ok }
let(:send_request) { authorize_artifacts_with_token_in_params(filesize: filesize) }
end
end
2018-05-09 12:01:36 +05:30
context 'posting artifacts to running job' do
subject do
authorize_artifacts_with_token_in_params
end
2017-08-17 22:00:37 +05:30
2020-03-13 15:44:24 +05:30
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
let(:send_request) { subject }
end
2020-06-23 00:09:42 +05:30
it 'updates runner info' do
expect { subject }.to change { runner.reload.contacted_at }
end
2018-05-09 12:01:36 +05:30
shared_examples 'authorizes local file' do
it 'succeeds' do
subject
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2020-03-13 15:44:24 +05:30
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
2018-05-09 12:01:36 +05:30
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
end
context 'when using local storage' do
it_behaves_like 'authorizes local file'
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: true)
end
it 'succeeds' do
subject
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2020-03-13 15:44:24 +05:30
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
2019-12-21 20:55:43 +05:30
expect(json_response).not_to have_key('TempPath')
2018-05-09 12:01:36 +05:30
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
2018-11-08 19:23:39 +05:30
expect(json_response['RemoteObject']).to have_key('MultipartUpload')
2018-05-09 12:01:36 +05:30
end
end
context 'when direct upload is disabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: false)
end
it_behaves_like 'authorizes local file'
end
end
2017-08-17 22:00:37 +05:30
end
end
context 'when using token as header' do
it 'authorizes posting artifacts to running job' do
authorize_artifacts_with_token_in_headers
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2020-03-13 15:44:24 +05:30
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
2017-08-17 22:00:37 +05:30
expect(json_response['TempPath']).not_to be_nil
end
it 'fails to post too large artifact' do
stub_application_setting(max_artifacts_size: 0)
authorize_artifacts_with_token_in_headers(filesize: 100)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:payload_too_large)
2017-08-17 22:00:37 +05:30
end
end
context 'when using runners token' do
it 'fails to authorize artifacts posting' do
authorize_artifacts(token: job.project.runners_token)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
it 'reject requests that did not go through gitlab-workhorse' do
headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
authorize_artifacts
2020-02-01 01:16:34 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
context 'authorization token is invalid' do
2019-02-15 15:39:39 +05:30
it 'responds with forbidden' do
2017-08-17 22:00:37 +05:30
authorize_artifacts(token: 'invalid', filesize: 100 )
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
2020-06-23 00:09:42 +05:30
context 'authorize uploading of an lsif artifact' do
before do
stub_feature_flags(code_navigation: job.project)
end
it 'adds ProcessLsif header' do
authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['ProcessLsif']).to be_truthy
end
2020-07-28 23:09:34 +05:30
it 'adds ProcessLsifReferences header' do
authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
2020-06-23 00:09:42 +05:30
2020-07-28 23:09:34 +05:30
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['ProcessLsifReferences']).to be_truthy
2020-06-23 00:09:42 +05:30
end
context 'code_navigation feature flag is disabled' do
2020-07-28 23:09:34 +05:30
it 'responds with a forbidden error' do
2020-06-23 00:09:42 +05:30
stub_feature_flags(code_navigation: false)
2020-07-28 23:09:34 +05:30
authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
aggregate_failures do
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['ProcessLsif']).to be_falsy
expect(json_response['ProcessLsifReferences']).to be_falsy
end
end
end
2020-06-23 00:09:42 +05:30
2020-07-28 23:09:34 +05:30
context 'code_navigation_references feature flag is disabled' do
it 'sets ProcessLsifReferences header to false' do
stub_feature_flags(code_navigation_references: false)
2020-06-23 00:09:42 +05:30
authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
2020-07-28 23:09:34 +05:30
aggregate_failures do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['ProcessLsif']).to be_truthy
expect(json_response['ProcessLsifReferences']).to be_falsy
end
2020-06-23 00:09:42 +05:30
end
end
end
2017-08-17 22:00:37 +05:30
def authorize_artifacts(params = {}, request_headers = headers)
2019-02-15 15:39:39 +05:30
post api("/jobs/#{job.id}/artifacts/authorize"), params: params, headers: request_headers
2017-08-17 22:00:37 +05:30
end
def authorize_artifacts_with_token_in_params(params = {}, request_headers = headers)
params = params.merge(token: job.token)
authorize_artifacts(params, request_headers)
end
def authorize_artifacts_with_token_in_headers(params = {}, request_headers = headers_with_token)
authorize_artifacts(params, request_headers)
end
end
describe 'POST /api/v4/jobs/:id/artifacts' do
2020-03-13 15:44:24 +05:30
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
let(:send_request) do
upload_artifacts(file_upload, headers_with_token)
end
end
2020-06-23 00:09:42 +05:30
it 'updates runner info' do
expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at }
end
2020-07-28 23:09:34 +05:30
context 'when the artifact is too large' do
it_behaves_like 'rejecting artifacts that are too large' do
# This filesize validation also happens in non remote stored files,
# it's just that it's hard to stub the filesize in other cases to be
# more than a megabyte.
let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
let(:object) do
fog_connection.directories.new(key: 'artifacts').files.create(
key: 'tmp/uploads/12312300',
body: 'content'
)
end
let(:file_upload) { fog_to_uploaded_file(object) }
let(:send_request) do
upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => '12312300')
end
let(:success_code) { :created }
before do
allow(object).to receive(:content_length).and_return(filesize)
end
end
end
2017-08-17 22:00:37 +05:30
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
2018-03-17 18:26:18 +05:30
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
2017-08-17 22:00:37 +05:30
end
context 'when job has been erased' do
let(:job) { create(:ci_build, erased_at: Time.now) }
before do
upload_artifacts(file_upload, headers_with_token)
end
it 'responds with forbidden' do
upload_artifacts(file_upload, headers_with_token)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
context 'when job is running' do
shared_examples 'successful artifacts upload' do
it 'updates successfully' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
end
end
2018-05-09 12:01:36 +05:30
context 'when uses accelerated file post' do
context 'for file stored locally' do
before do
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
2017-09-10 17:25:29 +05:30
end
2017-08-17 22:00:37 +05:30
2020-03-13 15:44:24 +05:30
context 'for file stored remotely' do
2018-05-09 12:01:36 +05:30
let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
2019-12-21 20:55:43 +05:30
let(:object) do
2019-02-15 15:39:39 +05:30
fog_connection.directories.new(key: 'artifacts').files.create(
2018-11-08 19:23:39 +05:30
key: 'tmp/uploads/12312300',
2018-05-09 12:01:36 +05:30
body: 'content'
)
2019-12-21 20:55:43 +05:30
end
let(:file_upload) { fog_to_uploaded_file(object) }
2018-05-09 12:01:36 +05:30
2019-12-21 20:55:43 +05:30
before do
upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => remote_id)
2018-05-09 12:01:36 +05:30
end
2017-08-17 22:00:37 +05:30
2018-05-09 12:01:36 +05:30
context 'when valid remote_id is used' do
let(:remote_id) { '12312300' }
it_behaves_like 'successful artifacts upload'
end
context 'when invalid remote_id is used' do
let(:remote_id) { 'invalid id' }
it 'responds with bad request' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:internal_server_error)
2018-05-09 12:01:36 +05:30
expect(json_response['message']).to eq("Missing file")
end
end
end
2017-08-17 22:00:37 +05:30
end
context 'when using runners token' do
2019-02-15 15:39:39 +05:30
it 'responds with forbidden' do
2017-08-17 22:00:37 +05:30
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
end
context 'when artifacts post request does not contain file' do
it 'fails to post artifacts without file' do
2019-02-15 15:39:39 +05:30
post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
2017-08-17 22:00:37 +05:30
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2017-08-17 22:00:37 +05:30
end
end
context 'GitLab Workhorse is not configured' do
it 'fails to post artifacts without GitLab-Workhorse' do
2019-02-15 15:39:39 +05:30
post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
2017-08-17 22:00:37 +05:30
2020-04-15 14:45:12 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2017-08-17 22:00:37 +05:30
end
end
2020-02-01 01:16:34 +05:30
context 'Is missing GitLab Workhorse token headers' do
2020-04-15 14:45:12 +05:30
let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
2020-02-01 01:16:34 +05:30
it 'fails to post artifacts without GitLab-Workhorse' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).once
upload_artifacts(file_upload, headers_with_token)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
2017-08-17 22:00:37 +05:30
context 'when setting an expire date' do
let(:default_artifacts_expire_in) {}
let(:post_data) do
2020-04-15 14:45:12 +05:30
{ file: file_upload,
expire_in: expire_in }
2017-08-17 22:00:37 +05:30
end
before do
stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
2020-04-15 14:45:12 +05:30
upload_artifacts(file_upload, headers_with_token, post_data)
2017-08-17 22:00:37 +05:30
end
context 'when an expire_in is given' do
let(:expire_in) { '7 days' }
it 'updates when specified' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
end
end
context 'when no expire_in is given' do
let(:expire_in) { nil }
it 'ignores if not specified' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(job.reload.artifacts_expire_at).to be_nil
end
context 'with application default' do
context 'when default is 5 days' do
let(:default_artifacts_expire_in) { '5 days' }
it 'sets to application default' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
end
end
context 'when default is 0' do
let(:default_artifacts_expire_in) { '0' }
it 'does not set expire_in' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2017-08-17 22:00:37 +05:30
expect(job.reload.artifacts_expire_at).to be_nil
end
end
end
end
end
context 'posts artifacts file and metadata file' do
let!(:artifacts) { file_upload }
2018-03-27 19:54:05 +05:30
let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest }
2017-08-17 22:00:37 +05:30
let!(:metadata) { file_upload2 }
2018-05-09 12:01:36 +05:30
let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest }
2017-08-17 22:00:37 +05:30
2019-09-04 21:01:54 +05:30
let(:stored_artifacts_file) { job.reload.artifacts_file }
let(:stored_metadata_file) { job.reload.artifacts_metadata }
2017-08-17 22:00:37 +05:30
let(:stored_artifacts_size) { job.reload.artifacts_size }
2018-03-27 19:54:05 +05:30
let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
2018-05-09 12:01:36 +05:30
let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
2020-04-15 14:45:12 +05:30
let(:file_keys) { post_data.keys }
let(:send_rewritten_field) { true }
2017-08-17 22:00:37 +05:30
before do
2020-04-15 14:45:12 +05:30
workhorse_finalize_with_multiple_files(
api("/jobs/#{job.id}/artifacts"),
method: :post,
file_keys: file_keys,
params: post_data,
headers: headers_with_token,
send_rewritten_field: send_rewritten_field
)
2017-08-17 22:00:37 +05:30
end
context 'when posts data accelerated by workhorse is correct' do
2020-04-15 14:45:12 +05:30
let(:post_data) { { file: artifacts, metadata: metadata } }
2017-08-17 22:00:37 +05:30
it 'stores artifacts and artifacts metadata' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2019-09-04 21:01:54 +05:30
expect(stored_artifacts_file.filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(artifacts.size)
2018-03-27 19:54:05 +05:30
expect(stored_artifacts_sha256).to eq(artifacts_sha256)
2018-05-09 12:01:36 +05:30
expect(stored_metadata_sha256).to eq(metadata_sha256)
2017-08-17 22:00:37 +05:30
end
end
2020-04-15 14:45:12 +05:30
context 'with a malicious file.path param' do
let(:post_data) { {} }
let(:tmp_file) { Tempfile.new('crafted.file.path') }
let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
it 'rejects the request' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(stored_artifacts_size).to be_nil
end
end
context 'when workhorse header is missing' do
let(:post_data) { { file: artifacts, metadata: metadata } }
let(:send_rewritten_field) { false }
it 'rejects the request' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(stored_artifacts_size).to be_nil
end
end
2017-08-17 22:00:37 +05:30
context 'when there is no artifacts file in post data' do
let(:post_data) do
2020-04-15 14:45:12 +05:30
{ metadata: metadata }
2017-08-17 22:00:37 +05:30
end
it 'is expected to respond with bad request' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2017-08-17 22:00:37 +05:30
end
it 'does not store metadata' do
expect(stored_metadata_file).to be_nil
end
end
end
2018-11-18 11:00:15 +05:30
context 'when artifact_type is archive' do
context 'when artifact_format is zip' do
let(:params) { { artifact_type: :archive, artifact_format: :zip } }
it 'stores junit test report' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-11-18 11:00:15 +05:30
expect(job.reload.job_artifacts_archive).not_to be_nil
end
end
context 'when artifact_format is gzip' do
let(:params) { { artifact_type: :archive, artifact_format: :gzip } }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2018-11-18 11:00:15 +05:30
expect(job.reload.job_artifacts_archive).to be_nil
end
end
end
context 'when artifact_type is junit' do
context 'when artifact_format is gzip' do
let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
let(:params) { { artifact_type: :junit, artifact_format: :gzip } }
it 'stores junit test report' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2018-11-18 11:00:15 +05:30
expect(job.reload.job_artifacts_junit).not_to be_nil
end
end
context 'when artifact_format is raw' do
let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
let(:params) { { artifact_type: :junit, artifact_format: :raw } }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2018-11-18 11:00:15 +05:30
expect(job.reload.job_artifacts_junit).to be_nil
end
end
end
2020-03-13 15:44:24 +05:30
context 'when artifact_type is metrics_referee' do
context 'when artifact_format is gzip' do
let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
let(:params) { { artifact_type: :metrics_referee, artifact_format: :gzip } }
it 'stores metrics_referee data' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-03-13 15:44:24 +05:30
expect(job.reload.job_artifacts_metrics_referee).not_to be_nil
end
end
context 'when artifact_format is raw' do
let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
let(:params) { { artifact_type: :metrics_referee, artifact_format: :raw } }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2020-03-13 15:44:24 +05:30
expect(job.reload.job_artifacts_metrics_referee).to be_nil
end
end
end
context 'when artifact_type is network_referee' do
context 'when artifact_format is gzip' do
let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
let(:params) { { artifact_type: :network_referee, artifact_format: :gzip } }
it 'stores network_referee data' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:created)
2020-03-13 15:44:24 +05:30
expect(job.reload.job_artifacts_network_referee).not_to be_nil
end
end
context 'when artifact_format is raw' do
let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
let(:params) { { artifact_type: :network_referee, artifact_format: :raw } }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2020-03-13 15:44:24 +05:30
expect(job.reload.job_artifacts_network_referee).to be_nil
end
end
end
2020-04-08 14:13:33 +05:30
context 'when artifact_type is dotenv' do
context 'when artifact_format is gzip' do
let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
let(:params) { { artifact_type: :dotenv, artifact_format: :gzip } }
it 'stores dotenv file' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_dotenv).not_to be_nil
end
it 'parses dotenv file' do
expect do
upload_artifacts(file_upload, headers_with_token, params)
end.to change { job.job_variables.count }.from(0).to(2)
end
context 'when parse error happens' do
let(:file_upload) { fixture_file_upload('spec/fixtures/ci_build_artifacts_metadata.gz') }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Invalid Format')
end
end
end
context 'when artifact_format is raw' do
let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
let(:params) { { artifact_type: :dotenv, artifact_format: :raw } }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_dotenv).to be_nil
end
end
end
2020-03-13 15:44:24 +05:30
end
context 'when artifacts already exist for the job' do
let(:params) do
{
artifact_type: :archive,
artifact_format: :zip,
'file.sha256' => uploaded_sha256
}
end
let(:existing_sha256) { '0' * 64 }
let!(:existing_artifact) do
create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
end
context 'when sha256 is the same of the existing artifact' do
let(:uploaded_sha256) { existing_sha256 }
it 'ignores the new artifact' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
end
end
context 'when sha256 is different than the existing artifact' do
let(:uploaded_sha256) { '1' * 64 }
it 'logs and returns an error' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
end
end
2017-08-17 22:00:37 +05:30
end
2020-04-08 14:13:33 +05:30
context 'when object storage throws errors' do
let(:params) { { artifact_type: :archive, artifact_format: :zip } }
it 'does not store artifacts' do
allow_next_instance_of(JobArtifactUploader) do |uploader|
allow(uploader).to receive(:store!).and_raise(Errno::EIO)
end
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:service_unavailable)
expect(job.reload.job_artifacts_archive).to be_nil
end
end
2017-08-17 22:00:37 +05:30
context 'when artifacts are being stored outside of tmp path' do
2018-05-09 12:01:36 +05:30
let(:new_tmpdir) { Dir.mktmpdir }
2017-08-17 22:00:37 +05:30
before do
2018-05-09 12:01:36 +05:30
# init before overwriting tmp dir
file_upload
2017-08-17 22:00:37 +05:30
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
2018-05-09 12:01:36 +05:30
allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
2017-08-17 22:00:37 +05:30
end
2017-09-10 17:25:29 +05:30
after do
2018-05-09 12:01:36 +05:30
FileUtils.remove_entry(new_tmpdir)
2017-09-10 17:25:29 +05:30
end
2017-08-17 22:00:37 +05:30
2020-07-28 23:09:34 +05:30
it 'fails to post artifacts for outside of tmp path' do
2017-08-17 22:00:37 +05:30
upload_artifacts(file_upload, headers_with_token)
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:bad_request)
2017-08-17 22:00:37 +05:30
end
end
2018-05-09 12:01:36 +05:30
def upload_artifacts(file, headers = {}, params = {})
2019-12-21 20:55:43 +05:30
workhorse_finalize(
api("/jobs/#{job.id}/artifacts"),
method: :post,
file_key: :file,
params: params.merge(file: file),
2020-04-15 14:45:12 +05:30
headers: headers,
send_rewritten_field: true
2019-12-21 20:55:43 +05:30
)
2017-08-17 22:00:37 +05:30
end
end
describe 'GET /api/v4/jobs/:id/artifacts' do
2019-02-15 15:39:39 +05:30
let(:token) { job.token }
2017-08-17 22:00:37 +05:30
2020-03-13 15:44:24 +05:30
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
let(:send_request) { download_artifact }
end
2020-06-23 00:09:42 +05:30
it 'updates runner info' do
expect { download_artifact }.to change { runner.reload.contacted_at }
end
2017-08-17 22:00:37 +05:30
context 'when job has artifacts' do
2019-02-15 15:39:39 +05:30
let(:job) { create(:ci_build) }
2018-05-09 12:01:36 +05:30
let(:store) { JobArtifactUploader::Store::LOCAL }
before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
2017-08-17 22:00:37 +05:30
end
context 'when using job token' do
2018-05-09 12:01:36 +05:30
context 'when artifacts are stored locally' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
2019-03-02 22:35:43 +05:30
'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) }
2018-05-09 12:01:36 +05:30
end
before do
download_artifact
end
it 'download artifacts' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2018-10-15 14:42:47 +05:30
expect(response.headers.to_h).to include download_headers
2018-05-09 12:01:36 +05:30
end
end
context 'when artifacts are stored remotely' do
let(:store) { JobArtifactUploader::Store::REMOTE }
2019-02-15 15:39:39 +05:30
let!(:job) { create(:ci_build) }
2018-05-09 12:01:36 +05:30
context 'when proxy download is being used' do
before do
download_artifact(direct_download: false)
end
it 'uses workhorse send-url' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:ok)
2018-10-15 14:42:47 +05:30
expect(response.headers.to_h).to include(
2018-05-09 12:01:36 +05:30
'Gitlab-Workhorse-Send-Data' => /send-url:/)
end
end
context 'when direct download is being used' do
before do
download_artifact(direct_download: true)
end
it 'receive redirect for downloading artifacts' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:found)
2018-05-09 12:01:36 +05:30
expect(response.headers).to include('Location')
end
end
2017-08-17 22:00:37 +05:30
end
end
context 'when using runnners token' do
let(:token) { job.project.runners_token }
2018-05-09 12:01:36 +05:30
before do
download_artifact
end
2019-02-15 15:39:39 +05:30
it 'responds with forbidden' do
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:forbidden)
2017-08-17 22:00:37 +05:30
end
end
end
2020-06-23 00:09:42 +05:30
context 'when job does not have artifacts' do
2017-08-17 22:00:37 +05:30
it 'responds with not found' do
2018-05-09 12:01:36 +05:30
download_artifact
2020-04-08 14:13:33 +05:30
expect(response).to have_gitlab_http_status(:not_found)
2017-08-17 22:00:37 +05:30
end
end
def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token)
2018-05-09 12:01:36 +05:30
job.reload
2019-02-15 15:39:39 +05:30
get api("/jobs/#{job.id}/artifacts"), params: params, headers: request_headers
2017-08-17 22:00:37 +05:30
end
end
end
end
end