2017-08-17 22:00:37 +05:30
|
|
|
require 'spec_helper'
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
describe API::Runner, :clean_gitlab_redis_shared_state do
|
2017-08-17 22:00:37 +05:30
|
|
|
include StubGitlabCalls
|
2018-10-15 14:42:47 +05:30
|
|
|
include RedisHelpers
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
let(:registration_token) { 'abcdefg123456' }
|
|
|
|
|
|
|
|
before do
|
2018-10-15 14:42:47 +05:30
|
|
|
stub_feature_flags(ci_enable_live_trace: true)
|
2017-08-17 22:00:37 +05:30
|
|
|
stub_gitlab_calls
|
|
|
|
stub_application_setting(runners_registration_token: registration_token)
|
2018-03-17 18:26:18 +05:30
|
|
|
allow_any_instance_of(Ci::Runner).to receive(:cache_attributes)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '/api/v4/runners' do
|
|
|
|
describe 'POST /api/v4/runners' do
|
|
|
|
context 'when no token is provided' do
|
|
|
|
it 'returns 400 error' do
|
|
|
|
post api('/runners')
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 400
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when invalid token is provided' do
|
|
|
|
it 'returns 403 error' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: { token: 'invalid' }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 403
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when valid token is provided' do
|
|
|
|
it 'creates runner with default values' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: { token: registration_token }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
runner = Ci::Runner.first
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['id']).to eq(runner.id)
|
|
|
|
expect(json_response['token']).to eq(runner.token)
|
|
|
|
expect(runner.run_untagged).to be true
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(runner.active).to be true
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(runner.token).not_to eq(registration_token)
|
2018-10-15 14:42:47 +05:30
|
|
|
expect(runner).to be_instance_type
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when project token is used' do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:project) { create(:project) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
it 'creates project runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: { token: project.runners_token }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(project.runners.size).to eq(1)
|
2018-10-15 14:42:47 +05:30
|
|
|
runner = Ci::Runner.first
|
|
|
|
expect(runner.token).not_to eq(registration_token)
|
|
|
|
expect(runner.token).not_to eq(project.runners_token)
|
|
|
|
expect(runner).to be_project_type
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when group token is used' do
|
|
|
|
let(:group) { create(:group) }
|
|
|
|
|
|
|
|
it 'creates a group runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: { token: group.runners_token }
|
2018-10-15 14:42:47 +05:30
|
|
|
|
|
|
|
expect(response).to have_http_status 201
|
|
|
|
expect(group.runners.size).to eq(1)
|
|
|
|
runner = Ci::Runner.first
|
|
|
|
expect(runner.token).not_to eq(registration_token)
|
|
|
|
expect(runner.token).not_to eq(group.runners_token)
|
|
|
|
expect(runner).to be_group_type
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when runner description is provided' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
description: 'server.hostname'
|
|
|
|
}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(Ci::Runner.first.description).to eq('server.hostname')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when runner tags are provided' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
tag_list: 'tag1, tag2'
|
|
|
|
}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when option for running untagged jobs is provided' do
|
|
|
|
context 'when tags are provided' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
run_untagged: false,
|
|
|
|
tag_list: ['tag']
|
|
|
|
}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(Ci::Runner.first.run_untagged).to be false
|
|
|
|
expect(Ci::Runner.first.tag_list.sort).to eq(['tag'])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when tags are not provided' do
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'returns 400 error' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
run_untagged: false
|
|
|
|
}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(response).to have_gitlab_http_status 400
|
|
|
|
expect(json_response['message']).to include(
|
|
|
|
'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs'])
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when option for locking Runner is provided' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
locked: true
|
|
|
|
}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(Ci::Runner.first.locked).to be true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when option for activating a Runner is provided' do
|
|
|
|
context 'when active is set to true' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
active: true
|
|
|
|
}
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status 201
|
|
|
|
expect(Ci::Runner.first.active).to be true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when active is set to false' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
active: false
|
|
|
|
}
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status 201
|
|
|
|
expect(Ci::Runner.first.active).to be false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
context 'when maximum job timeout is specified' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
maximum_timeout: 9000
|
|
|
|
}
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status 201
|
|
|
|
expect(Ci::Runner.first.maximum_timeout).to eq(9000)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when maximum job timeout is empty' do
|
|
|
|
it 'creates runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
maximum_timeout: ''
|
|
|
|
}
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status 201
|
|
|
|
expect(Ci::Runner.first.maximum_timeout).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
%w(name version revision platform architecture).each do |param|
|
|
|
|
context "when info parameter '#{param}' info is present" do
|
|
|
|
let(:value) { "#{param}_value" }
|
|
|
|
|
|
|
|
it "updates provided Runner's parameter" do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners'), params: {
|
|
|
|
token: registration_token,
|
|
|
|
info: { param => value }
|
|
|
|
}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-03-27 19:54:05 +05:30
|
|
|
|
|
|
|
it "sets the runner's ip_address" do
|
|
|
|
post api('/runners'),
|
2019-02-13 22:33:31 +05:30
|
|
|
params: { token: registration_token },
|
|
|
|
headers: { 'REMOTE_ADDR' => '123.111.123.111' }
|
2018-03-27 19:54:05 +05:30
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status 201
|
|
|
|
expect(Ci::Runner.first.ip_address).to eq('123.111.123.111')
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe 'DELETE /api/v4/runners' do
|
|
|
|
context 'when no token is provided' do
|
|
|
|
it 'returns 400 error' do
|
|
|
|
delete api('/runners')
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 400
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when invalid token is provided' do
|
|
|
|
it 'returns 403 error' do
|
2019-02-13 22:33:31 +05:30
|
|
|
delete api('/runners'), params: { token: 'invalid' }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 403
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when valid token is provided' do
|
|
|
|
let(:runner) { create(:ci_runner) }
|
|
|
|
|
|
|
|
it 'deletes Runner' do
|
2019-02-13 22:33:31 +05:30
|
|
|
delete api('/runners'), params: { token: runner.token }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 204
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(Ci::Runner.count).to eq(0)
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
it_behaves_like '412 response' do
|
|
|
|
let(:request) { api('/runners') }
|
|
|
|
let(:params) { { token: runner.token } }
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'POST /api/v4/runners/verify' do
|
|
|
|
let(:runner) { create(:ci_runner) }
|
|
|
|
|
|
|
|
context 'when no token is provided' do
|
|
|
|
it 'returns 400 error' do
|
|
|
|
post api('/runners/verify')
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status :bad_request
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when invalid token is provided' do
|
|
|
|
it 'returns 403 error' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners/verify'), params: { token: 'invalid-token' }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 403
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when valid token is provided' do
|
|
|
|
it 'verifies Runner credentials' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/runners/verify'), params: { token: runner.token }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 200
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '/api/v4/jobs' do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:project) { create(:project, shared_runners_enabled: false) }
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:runner) { create(:ci_runner, :project, projects: [project]) }
|
2018-05-09 12:01:36 +05:30
|
|
|
let(:job) do
|
2017-08-17 22:00:37 +05:30
|
|
|
create(:ci_build, :artifacts, :extended_options,
|
2019-02-13 22:33:31 +05:30
|
|
|
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe 'POST /api/v4/jobs/request' do
|
|
|
|
let!(:last_update) {}
|
|
|
|
let!(:new_update) { }
|
|
|
|
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
2018-05-09 12:01:36 +05:30
|
|
|
job
|
2017-09-10 17:25:29 +05:30
|
|
|
stub_container_registry_config(enabled: false)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
shared_examples 'no jobs available' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
request_job
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'when runner sends version in User-Agent' do
|
|
|
|
context 'for stable version' do
|
|
|
|
it 'gives 204 and set X-GitLab-Last-Update' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(204)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(response.header).to have_key('X-GitLab-Last-Update')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when last_update is up-to-date' do
|
|
|
|
let(:last_update) { runner.ensure_runner_queue_value }
|
|
|
|
|
|
|
|
it 'gives 204 and set the same X-GitLab-Last-Update' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(204)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when last_update is outdated' do
|
|
|
|
let(:last_update) { runner.ensure_runner_queue_value }
|
|
|
|
let(:new_update) { runner.tick_runner_queue }
|
|
|
|
|
|
|
|
it 'gives 204 and set a new X-GitLab-Last-Update' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(204)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when beta version is sent' do
|
|
|
|
let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it { expect(response).to have_gitlab_http_status(204) }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pre-9-0 version is sent' do
|
|
|
|
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it { expect(response).to have_gitlab_http_status(204) }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pre-9-0 beta version is sent' do
|
|
|
|
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it { expect(response).to have_gitlab_http_status(204) }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no token is provided' do
|
|
|
|
it 'returns 400 error' do
|
|
|
|
post api('/jobs/request')
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 400
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when invalid token is provided' do
|
|
|
|
it 'returns 403 error' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/jobs/request'), params: { token: 'invalid' }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 403
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when valid token is provided' do
|
|
|
|
context 'when Runner is not active' do
|
|
|
|
let(:runner) { create(:ci_runner, :inactive) }
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:update_value) { runner.ensure_runner_queue_value }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'returns 204 error' do
|
|
|
|
request_job
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(response).to have_gitlab_http_status(204)
|
|
|
|
expect(response.header['X-GitLab-Last-Update']).to eq(update_value)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when jobs are finished' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
job.success
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it_behaves_like 'no jobs available'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when other projects have pending jobs' do
|
|
|
|
before do
|
|
|
|
job.success
|
|
|
|
create(:ci_build, :pending)
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'no jobs available'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when shared runner requests job for project without shared_runners_enabled' do
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:runner) { create(:ci_runner, :instance) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it_behaves_like 'no jobs available'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is a pending job' do
|
|
|
|
let(:expected_job_info) do
|
|
|
|
{ 'name' => job.name,
|
|
|
|
'stage' => job.stage,
|
|
|
|
'project_id' => job.project.id,
|
|
|
|
'project_name' => job.project.name }
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:expected_git_info) do
|
|
|
|
{ 'repo_url' => job.repo_url,
|
|
|
|
'ref' => job.ref,
|
|
|
|
'sha' => job.sha,
|
|
|
|
'before_sha' => job.before_sha,
|
|
|
|
'ref_type' => 'branch' }
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:expected_steps) do
|
|
|
|
[{ 'name' => 'script',
|
2019-02-13 22:33:31 +05:30
|
|
|
'script' => %w(echo),
|
2018-05-09 12:01:36 +05:30
|
|
|
'timeout' => job.metadata_timeout,
|
2017-08-17 22:00:37 +05:30
|
|
|
'when' => 'on_success',
|
|
|
|
'allow_failure' => false },
|
|
|
|
{ 'name' => 'after_script',
|
|
|
|
'script' => %w(ls date),
|
2018-05-09 12:01:36 +05:30
|
|
|
'timeout' => job.metadata_timeout,
|
2017-08-17 22:00:37 +05:30
|
|
|
'when' => 'always',
|
|
|
|
'allow_failure' => true }]
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:expected_variables) do
|
|
|
|
[{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true },
|
|
|
|
{ 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true },
|
|
|
|
{ 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true }]
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:expected_artifacts) do
|
|
|
|
[{ 'name' => 'artifacts_file',
|
|
|
|
'untracked' => false,
|
|
|
|
'paths' => %w(out/),
|
|
|
|
'when' => 'always',
|
2018-11-18 11:00:15 +05:30
|
|
|
'expire_in' => '7d',
|
|
|
|
"artifact_type" => "archive",
|
|
|
|
"artifact_format" => "zip" }]
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
let(:expected_cache) do
|
|
|
|
[{ 'key' => 'cache_key',
|
|
|
|
'untracked' => false,
|
2017-09-10 17:25:29 +05:30
|
|
|
'paths' => ['vendor/*'],
|
|
|
|
'policy' => 'pull-push' }]
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:expected_features) { { 'trace_sections' => true } }
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'picks a job' do
|
|
|
|
request_job info: { platform: :darwin }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
|
2019-02-13 22:33:31 +05:30
|
|
|
expect(runner.reload.platform).to eq('darwin')
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['id']).to eq(job.id)
|
|
|
|
expect(json_response['token']).to eq(job.token)
|
|
|
|
expect(json_response['job_info']).to eq(expected_job_info)
|
|
|
|
expect(json_response['git_info']).to eq(expected_git_info)
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(json_response['image']).to eq({ 'name' => 'ruby:2.1', 'entrypoint' => '/bin/sh' })
|
|
|
|
expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
|
|
|
|
'alias' => nil, 'command' => nil },
|
2018-10-15 14:42:47 +05:30
|
|
|
{ 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
|
2017-09-10 17:25:29 +05:30
|
|
|
'alias' => 'docker', 'command' => 'sleep 30' }])
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['steps']).to eq(expected_steps)
|
|
|
|
expect(json_response['artifacts']).to eq(expected_artifacts)
|
|
|
|
expect(json_response['cache']).to eq(expected_cache)
|
|
|
|
expect(json_response['variables']).to include(*expected_variables)
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(json_response['features']).to eq(expected_features)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job is made for tag' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'sets branch as ref_type' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['git_info']['ref_type']).to eq('tag')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job is made for branch' do
|
|
|
|
it 'sets tag as ref_type' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['git_info']['ref_type']).to eq('branch')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates runner info' do
|
|
|
|
expect { request_job }.to change { runner.reload.contacted_at }
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
%w(version revision platform architecture).each do |param|
|
2017-08-17 22:00:37 +05:30
|
|
|
context "when info parameter '#{param}' is present" do
|
|
|
|
let(:value) { "#{param}_value" }
|
|
|
|
|
|
|
|
it "updates provided Runner's parameter" do
|
|
|
|
request_job info: { param => value }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
it "sets the runner's ip_address" do
|
|
|
|
post api('/jobs/request'),
|
2019-02-13 22:33:31 +05:30
|
|
|
params: { token: runner.token },
|
|
|
|
headers: { 'User-Agent' => user_agent, 'REMOTE_ADDR' => '123.222.123.222' }
|
2018-03-27 19:54:05 +05:30
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status 201
|
|
|
|
expect(runner.reload.ip_address).to eq('123.222.123.222')
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when concurrently updating a job' do
|
|
|
|
before do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect_any_instance_of(Ci::Build).to receive(:run!)
|
|
|
|
.and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a conflict' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(409)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when project and pipeline have multiple jobs' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
|
|
|
|
let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
|
2017-08-17 22:00:37 +05:30
|
|
|
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
job.success
|
|
|
|
job2.success
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns dependent jobs' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['id']).to eq(test_job.id)
|
|
|
|
expect(json_response['dependencies'].count).to eq(2)
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(json_response['dependencies']).to include(
|
2019-02-13 22:33:31 +05:30
|
|
|
{ 'id' => job.id, 'name' => job.name, 'token' => job.token },
|
|
|
|
{ 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when pipeline have jobs with artifacts' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
|
2017-09-10 17:25:29 +05:30
|
|
|
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
job.success
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns dependent jobs' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(json_response['id']).to eq(test_job.id)
|
|
|
|
expect(json_response['dependencies'].count).to eq(1)
|
|
|
|
expect(json_response['dependencies']).to include(
|
2019-02-13 22:33:31 +05:30
|
|
|
{ 'id' => job.id, 'name' => job.name, 'token' => job.token,
|
2017-09-10 17:25:29 +05:30
|
|
|
'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 106365 } })
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when explicit dependencies are defined' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
|
|
|
|
let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
|
2017-08-17 22:00:37 +05:30
|
|
|
let!(:test_job) do
|
|
|
|
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
|
|
|
|
stage: 'deploy', stage_idx: 1,
|
2019-02-13 22:33:31 +05:30
|
|
|
options: { script: ['bash'], dependencies: [job2.name] })
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
job.success
|
|
|
|
job2.success
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns dependent jobs' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['id']).to eq(test_job.id)
|
|
|
|
expect(json_response['dependencies'].count).to eq(1)
|
2019-02-13 22:33:31 +05:30
|
|
|
expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when dependencies is an empty array' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
|
|
|
|
let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
|
2017-08-17 22:00:37 +05:30
|
|
|
let!(:empty_dependencies_job) do
|
|
|
|
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
|
|
|
|
stage: 'deploy', stage_idx: 1,
|
2019-02-13 22:33:31 +05:30
|
|
|
options: { script: ['bash'], dependencies: [] })
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
job.success
|
|
|
|
job2.success
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an empty array' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(json_response['id']).to eq(empty_dependencies_job.id)
|
|
|
|
expect(json_response['dependencies'].count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job has no tags' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
job.update(tags: [])
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'when runner is allowed to pick untagged jobs' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
runner.update_column(:run_untagged, true)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'picks job' do
|
|
|
|
request_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status 201
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when runner is not allowed to pick untagged jobs' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
runner.update_column(:run_untagged, false)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it_behaves_like 'no jobs available'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when triggered job is available' do
|
|
|
|
let(:expected_variables) do
|
|
|
|
[{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true },
|
|
|
|
{ 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true },
|
|
|
|
{ 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true },
|
|
|
|
{ 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true },
|
|
|
|
{ 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false },
|
|
|
|
{ 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false }]
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:trigger) { create(:ci_trigger, project: project) }
|
|
|
|
let!(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, builds: [job], trigger: trigger) }
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
before do
|
|
|
|
project.variables << Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value')
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
shared_examples 'expected variables behavior' do
|
|
|
|
it 'returns variables for triggers' do
|
|
|
|
request_job
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(201)
|
|
|
|
expect(json_response['variables']).to include(*expected_variables)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when variables are stored in trigger_request' do
|
|
|
|
before do
|
|
|
|
trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } )
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'expected variables behavior'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when variables are stored in pipeline_variables' do
|
|
|
|
before do
|
|
|
|
create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it_behaves_like 'expected variables behavior'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'registry credentials support' do
|
|
|
|
let(:registry_url) { 'registry.example.com:5005' }
|
|
|
|
let(:registry_credentials) do
|
|
|
|
{ 'type' => 'registry',
|
|
|
|
'url' => registry_url,
|
|
|
|
'username' => 'gitlab-ci-token',
|
|
|
|
'password' => job.token }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when registry is enabled' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
stub_container_registry_config(enabled: true, host_port: registry_url)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'sends registry credentials key' do
|
|
|
|
request_job
|
|
|
|
|
|
|
|
expect(json_response).to have_key('credentials')
|
|
|
|
expect(json_response['credentials']).to include(registry_credentials)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when registry is disabled' do
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
stub_container_registry_config(enabled: false, host_port: registry_url)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'does not send registry credentials' do
|
|
|
|
request_job
|
|
|
|
|
|
|
|
expect(json_response).to have_key('credentials')
|
|
|
|
expect(json_response['credentials']).not_to include(registry_credentials)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
describe 'timeout support' do
|
|
|
|
context 'when project specifies job timeout' do
|
|
|
|
let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) }
|
|
|
|
|
|
|
|
it 'contains info about timeout taken from project' do
|
|
|
|
request_job
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(201)
|
|
|
|
expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when runner specifies lower timeout' do
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:runner) { create(:ci_runner, :project, maximum_timeout: 1000, projects: [project]) }
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
it 'contains info about timeout overridden by runner' do
|
|
|
|
request_job
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(201)
|
|
|
|
expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when runner specifies bigger timeout' do
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:runner) { create(:ci_runner, :project, maximum_timeout: 2000, projects: [project]) }
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
it 'contains info about timeout not overridden by runner' do
|
|
|
|
request_job
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(201)
|
|
|
|
expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def request_job(token = runner.token, **params)
|
|
|
|
new_params = params.merge(token: token, last_update: last_update)
|
2019-02-13 22:33:31 +05:30
|
|
|
post api('/jobs/request'), params: new_params, headers: { 'User-Agent' => user_agent }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'PUT /api/v4/jobs/:id' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:job) { create(:ci_build, :pending, :trace_live, pipeline: pipeline, runner_id: runner.id) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
job.run!
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'when status is given' do
|
|
|
|
it 'mark job as succeeded' do
|
|
|
|
update_job(state: 'success')
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
job.reload
|
|
|
|
expect(job).to be_success
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'mark job as failed' do
|
|
|
|
update_job(state: 'failed')
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
job.reload
|
|
|
|
expect(job).to be_failed
|
|
|
|
expect(job).to be_unknown_failure
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when failure_reason is script_failure' do
|
|
|
|
before do
|
|
|
|
update_job(state: 'failed', failure_reason: 'script_failure')
|
|
|
|
job.reload
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(job).to be_script_failure }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when failure_reason is runner_system_failure' do
|
|
|
|
before do
|
|
|
|
update_job(state: 'failed', failure_reason: 'runner_system_failure')
|
|
|
|
job.reload
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(job).to be_runner_system_failure }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2018-12-13 13:39:08 +05:30
|
|
|
|
|
|
|
context 'when failure_reason is unrecognized value' do
|
|
|
|
before do
|
|
|
|
update_job(state: 'failed', failure_reason: 'what_is_this')
|
|
|
|
job.reload
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(job).to be_unknown_failure }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when failure_reason is job_execution_timeout' do
|
|
|
|
before do
|
|
|
|
update_job(state: 'failed', failure_reason: 'job_execution_timeout')
|
|
|
|
job.reload
|
|
|
|
end
|
|
|
|
|
|
|
|
it { expect(job).to be_job_execution_timeout }
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
context 'when trace is given' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'creates a trace artifact' do
|
2018-03-27 19:54:05 +05:30
|
|
|
allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do
|
|
|
|
ArchiveTraceWorker.new.perform(job.id)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
|
|
|
|
|
|
|
|
job.reload
|
|
|
|
expect(response).to have_gitlab_http_status(200)
|
|
|
|
expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
|
|
|
|
expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2019-02-13 22:33:31 +05:30
|
|
|
|
|
|
|
context 'when concurrent update of trace is happening' do
|
|
|
|
before do
|
|
|
|
job.trace.write('wb') do
|
|
|
|
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns that operation conflicts' do
|
|
|
|
expect(response.status).to eq(409)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no trace is given' do
|
|
|
|
it 'does not override trace information' do
|
|
|
|
update_job
|
|
|
|
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE'
|
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
context 'when running state is sent' do
|
|
|
|
it 'updates update_at value' do
|
|
|
|
expect { update_job_after_time }.to change { job.reload.updated_at }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when other state is sent' do
|
|
|
|
it "doesn't update update_at value" do
|
|
|
|
expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at }
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job has been erased' do
|
|
|
|
let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
|
|
|
|
|
|
|
|
it 'responds with forbidden' do
|
|
|
|
update_job
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when job has already been finished' do
|
|
|
|
before do
|
|
|
|
job.trace.set('Job failed')
|
|
|
|
job.drop!(:script_failure)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not update job status and job trace' do
|
|
|
|
update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
|
|
|
|
|
|
|
|
job.reload
|
|
|
|
expect(response).to have_gitlab_http_status(403)
|
|
|
|
expect(response.header['Job-Status']).to eq 'failed'
|
|
|
|
expect(job.trace.raw).to eq 'Job failed'
|
|
|
|
expect(job).to be_failed
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def update_job(token = job.token, **params)
|
|
|
|
new_params = params.merge(token: token)
|
2019-02-13 22:33:31 +05:30
|
|
|
put api("/jobs/#{job.id}"), params: new_params
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
|
|
|
|
def update_job_after_time(update_interval = 20.minutes, state = 'running')
|
|
|
|
Timecop.travel(job.updated_at + update_interval) do
|
|
|
|
update_job(job.token, state: state)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe 'PATCH /api/v4/jobs/:id/trace' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:job) { create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) }
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
|
|
|
|
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
|
|
|
|
let(:update_interval) { 10.seconds.to_i }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
initial_patch_the_trace
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'when request is valid' do
|
|
|
|
it 'gets correct response' do
|
|
|
|
expect(response.status).to eq 202
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
|
|
|
|
expect(response.header).to have_key 'Range'
|
|
|
|
expect(response.header).to have_key 'Job-Status'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job has been updated recently' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it { expect { patch_the_trace }.not_to change { job.updated_at }}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it "changes the job's trace" do
|
|
|
|
patch_the_trace
|
|
|
|
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when Runner makes a force-patch' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it { expect { force_patch_the_trace }.not_to change { job.updated_at }}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it "doesn't change the build.trace" do
|
|
|
|
force_patch_the_trace
|
|
|
|
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job was not updated recently' do
|
|
|
|
let(:update_interval) { 15.minutes.to_i }
|
|
|
|
|
|
|
|
it { expect { patch_the_trace }.to change { job.updated_at } }
|
|
|
|
|
|
|
|
it 'changes the job.trace' do
|
|
|
|
patch_the_trace
|
|
|
|
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when Runner makes a force-patch' do
|
|
|
|
it { expect { force_patch_the_trace }.to change { job.updated_at } }
|
|
|
|
|
|
|
|
it "doesn't change the job.trace" do
|
|
|
|
force_patch_the_trace
|
|
|
|
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when project for the build has been deleted' do
|
|
|
|
let(:job) do
|
2018-03-17 18:26:18 +05:30
|
|
|
create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
|
2017-08-17 22:00:37 +05:30
|
|
|
job.project.update(pending_delete: true)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'responds with forbidden' do
|
|
|
|
expect(response.status).to eq(403)
|
|
|
|
end
|
|
|
|
end
|
2018-10-15 14:42:47 +05:30
|
|
|
|
|
|
|
context 'when trace is patched' do
|
|
|
|
before do
|
|
|
|
patch_the_trace
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'has valid trace' do
|
|
|
|
expect(response.status).to eq(202)
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when job is cancelled' do
|
|
|
|
before do
|
|
|
|
job.cancel
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when trace is patched' do
|
|
|
|
before do
|
|
|
|
patch_the_trace
|
|
|
|
end
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
it 'returns Forbidden ' do
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(response.status).to eq(403)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
context 'when redis data are flushed' do
|
|
|
|
before do
|
|
|
|
redis_shared_state_cleanup!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'has empty trace' do
|
|
|
|
expect(job.reload.trace.raw).to eq ''
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when we perform partial patch' do
|
|
|
|
before do
|
|
|
|
patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" }))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an error' do
|
|
|
|
expect(response.status).to eq(416)
|
|
|
|
expect(response.header['Range']).to eq('0-0')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when we resend full trace' do
|
|
|
|
before do
|
|
|
|
patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" }))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'succeeds with updating trace' do
|
|
|
|
expect(response.status).to eq(202)
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
context 'when concurrent update of trace is happening' do
|
|
|
|
before do
|
|
|
|
job.trace.write('wb') do
|
|
|
|
patch_the_trace
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns that operation conflicts' do
|
|
|
|
expect(response.status).to eq(409)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when the job is canceled' do
|
|
|
|
before do
|
2019-02-13 22:33:31 +05:30
|
|
|
job.cancel
|
2018-11-08 19:23:39 +05:30
|
|
|
patch_the_trace
|
|
|
|
end
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
it 'receives status in header' do
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(response.header['Job-Status']).to eq 'canceled'
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when Runner makes a force-patch' do
|
|
|
|
before do
|
|
|
|
force_patch_the_trace
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'gets correct response' do
|
|
|
|
expect(response.status).to eq 202
|
|
|
|
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
|
|
|
|
expect(response.header).to have_key 'Range'
|
|
|
|
expect(response.header).to have_key 'Job-Status'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when content-range start is too big' do
|
2018-10-15 14:42:47 +05:30
|
|
|
let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'gets 416 error response with range headers' do
|
|
|
|
expect(response.status).to eq 416
|
|
|
|
expect(response.header).to have_key 'Range'
|
|
|
|
expect(response.header['Range']).to eq '0-11'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when content-range start is too small' do
|
2018-10-15 14:42:47 +05:30
|
|
|
let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it 'gets 416 error response with range headers' do
|
|
|
|
expect(response.status).to eq 416
|
|
|
|
expect(response.header).to have_key 'Range'
|
|
|
|
expect(response.header['Range']).to eq '0-11'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when Content-Range header is missing' do
|
|
|
|
let(:headers_with_range) { headers }
|
|
|
|
|
|
|
|
it { expect(response.status).to eq 400 }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job has been errased' do
|
|
|
|
let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
|
|
|
|
|
|
|
|
it { expect(response.status).to eq 403 }
|
|
|
|
end
|
|
|
|
|
|
|
|
def patch_the_trace(content = ' appended', request_headers = nil)
|
|
|
|
unless request_headers
|
|
|
|
job.trace.read do |stream|
|
|
|
|
offset = stream.size
|
|
|
|
limit = offset + content.length - 1
|
|
|
|
request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
Timecop.travel(job.updated_at + update_interval) do
|
2019-02-13 22:33:31 +05:30
|
|
|
patch api("/jobs/#{job.id}/trace"), params: content, headers: request_headers
|
2017-08-17 22:00:37 +05:30
|
|
|
job.reload
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def initial_patch_the_trace
|
|
|
|
patch_the_trace(' appended', headers_with_range)
|
|
|
|
end
|
|
|
|
|
|
|
|
def force_patch_the_trace
|
|
|
|
2.times { patch_the_trace('') }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'artifacts' do
|
|
|
|
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) }
|
|
|
|
let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
|
|
|
|
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
|
|
|
|
let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
|
2018-11-08 19:23:39 +05:30
|
|
|
let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
|
|
|
|
let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
2018-05-09 12:01:36 +05:30
|
|
|
stub_artifacts_object_storage
|
2017-09-10 17:25:29 +05:30
|
|
|
job.run!
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
|
|
|
|
context 'when using token as parameter' do
|
2018-05-09 12:01:36 +05:30
|
|
|
context 'posting artifacts to running job' do
|
|
|
|
subject do
|
|
|
|
authorize_artifacts_with_token_in_params
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
shared_examples 'authorizes local file' do
|
|
|
|
it 'succeeds' do
|
|
|
|
subject
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(200)
|
|
|
|
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
|
|
|
|
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
|
|
|
|
expect(json_response['RemoteObject']).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using local storage' do
|
|
|
|
it_behaves_like 'authorizes local file'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using remote storage' do
|
|
|
|
context 'when direct upload is enabled' do
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(enabled: true, direct_upload: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'succeeds' do
|
|
|
|
subject
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(200)
|
|
|
|
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
|
|
|
|
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
|
|
|
|
expect(json_response['RemoteObject']).to have_key('ID')
|
|
|
|
expect(json_response['RemoteObject']).to have_key('GetURL')
|
|
|
|
expect(json_response['RemoteObject']).to have_key('StoreURL')
|
|
|
|
expect(json_response['RemoteObject']).to have_key('DeleteURL')
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(json_response['RemoteObject']).to have_key('MultipartUpload')
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when direct upload is disabled' do
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(enabled: true, direct_upload: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'authorizes local file'
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'fails to post too large artifact' do
|
|
|
|
stub_application_setting(max_artifacts_size: 0)
|
|
|
|
|
|
|
|
authorize_artifacts_with_token_in_params(filesize: 100)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(413)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using token as header' do
|
|
|
|
it 'authorizes posting artifacts to running job' do
|
|
|
|
authorize_artifacts_with_token_in_headers
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
|
|
|
|
expect(json_response['TempPath']).not_to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'fails to post too large artifact' do
|
|
|
|
stub_application_setting(max_artifacts_size: 0)
|
|
|
|
|
|
|
|
authorize_artifacts_with_token_in_headers(filesize: 100)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(413)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using runners token' do
|
|
|
|
it 'fails to authorize artifacts posting' do
|
|
|
|
authorize_artifacts(token: job.project.runners_token)
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'reject requests that did not go through gitlab-workhorse' do
|
|
|
|
headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
|
|
|
|
|
|
|
|
authorize_artifacts
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(500)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'authorization token is invalid' do
|
2019-02-13 22:33:31 +05:30
|
|
|
it 'responds with forbidden' do
|
2017-08-17 22:00:37 +05:30
|
|
|
authorize_artifacts(token: 'invalid', filesize: 100 )
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def authorize_artifacts(params = {}, request_headers = headers)
|
2019-02-13 22:33:31 +05:30
|
|
|
post api("/jobs/#{job.id}/artifacts/authorize"), params: params, headers: request_headers
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def authorize_artifacts_with_token_in_params(params = {}, request_headers = headers)
|
|
|
|
params = params.merge(token: job.token)
|
|
|
|
authorize_artifacts(params, request_headers)
|
|
|
|
end
|
|
|
|
|
|
|
|
def authorize_artifacts_with_token_in_headers(params = {}, request_headers = headers_with_token)
|
|
|
|
authorize_artifacts(params, request_headers)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'POST /api/v4/jobs/:id/artifacts' do
|
|
|
|
context 'when artifacts are being stored inside of tmp path' do
|
|
|
|
before do
|
|
|
|
# by configuring this path we allow to pass temp file from any path
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job has been erased' do
|
|
|
|
let(:job) { create(:ci_build, erased_at: Time.now) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
upload_artifacts(file_upload, headers_with_token)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'responds with forbidden' do
|
|
|
|
upload_artifacts(file_upload, headers_with_token)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job is running' do
|
|
|
|
shared_examples 'successful artifacts upload' do
|
|
|
|
it 'updates successfully' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
context 'when uses accelerated file post' do
|
|
|
|
context 'for file stored locally' do
|
|
|
|
before do
|
|
|
|
upload_artifacts(file_upload, headers_with_token)
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'successful artifacts upload'
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
context 'for file stored remotelly' do
|
|
|
|
let!(:fog_connection) do
|
|
|
|
stub_artifacts_object_storage(direct_upload: true)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
before do
|
2019-02-13 22:33:31 +05:30
|
|
|
fog_connection.directories.new(key: 'artifacts').files.create(
|
2018-11-08 19:23:39 +05:30
|
|
|
key: 'tmp/uploads/12312300',
|
2018-05-09 12:01:36 +05:30
|
|
|
body: 'content'
|
|
|
|
)
|
|
|
|
|
|
|
|
upload_artifacts(file_upload, headers_with_token,
|
|
|
|
{ 'file.remote_id' => remote_id })
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
context 'when valid remote_id is used' do
|
|
|
|
let(:remote_id) { '12312300' }
|
|
|
|
|
|
|
|
it_behaves_like 'successful artifacts upload'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when invalid remote_id is used' do
|
|
|
|
let(:remote_id) { 'invalid id' }
|
|
|
|
|
|
|
|
it 'responds with bad request' do
|
|
|
|
expect(response).to have_gitlab_http_status(500)
|
|
|
|
expect(json_response['message']).to eq("Missing file")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using runners token' do
|
2019-02-13 22:33:31 +05:30
|
|
|
it 'responds with forbidden' do
|
2017-08-17 22:00:37 +05:30
|
|
|
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when artifacts file is too large' do
|
|
|
|
it 'fails to post too large artifact' do
|
|
|
|
stub_application_setting(max_artifacts_size: 0)
|
|
|
|
|
|
|
|
upload_artifacts(file_upload, headers_with_token)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(413)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when artifacts post request does not contain file' do
|
|
|
|
it 'fails to post artifacts without file' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(400)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'GitLab Workhorse is not configured' do
|
|
|
|
it 'fails to post artifacts without GitLab-Workhorse' do
|
2019-02-13 22:33:31 +05:30
|
|
|
post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when setting an expire date' do
|
|
|
|
let(:default_artifacts_expire_in) {}
|
|
|
|
let(:post_data) do
|
|
|
|
{ 'file.path' => file_upload.path,
|
|
|
|
'file.name' => file_upload.original_filename,
|
|
|
|
'expire_in' => expire_in }
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when an expire_in is given' do
|
|
|
|
let(:expire_in) { '7 days' }
|
|
|
|
|
|
|
|
it 'updates when specified' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no expire_in is given' do
|
|
|
|
let(:expire_in) { nil }
|
|
|
|
|
|
|
|
it 'ignores if not specified' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(job.reload.artifacts_expire_at).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with application default' do
|
|
|
|
context 'when default is 5 days' do
|
|
|
|
let(:default_artifacts_expire_in) { '5 days' }
|
|
|
|
|
|
|
|
it 'sets to application default' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when default is 0' do
|
|
|
|
let(:default_artifacts_expire_in) { '0' }
|
|
|
|
|
|
|
|
it 'does not set expire_in' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(job.reload.artifacts_expire_at).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'posts artifacts file and metadata file' do
|
|
|
|
let!(:artifacts) { file_upload }
|
2018-03-27 19:54:05 +05:30
|
|
|
let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest }
|
2017-08-17 22:00:37 +05:30
|
|
|
let!(:metadata) { file_upload2 }
|
2018-05-09 12:01:36 +05:30
|
|
|
let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
let(:stored_artifacts_file) { job.reload.artifacts_file.file }
|
|
|
|
let(:stored_metadata_file) { job.reload.artifacts_metadata.file }
|
|
|
|
let(:stored_artifacts_size) { job.reload.artifacts_size }
|
2018-03-27 19:54:05 +05:30
|
|
|
let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
|
2018-05-09 12:01:36 +05:30
|
|
|
let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
before do
|
2019-02-13 22:33:31 +05:30
|
|
|
post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when posts data accelerated by workhorse is correct' do
|
|
|
|
let(:post_data) do
|
|
|
|
{ 'file.path' => artifacts.path,
|
|
|
|
'file.name' => artifacts.original_filename,
|
2018-03-27 19:54:05 +05:30
|
|
|
'file.sha256' => artifacts_sha256,
|
2017-08-17 22:00:37 +05:30
|
|
|
'metadata.path' => metadata.path,
|
2018-05-09 12:01:36 +05:30
|
|
|
'metadata.name' => metadata.original_filename,
|
|
|
|
'metadata.sha256' => metadata_sha256 }
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'stores artifacts and artifacts metadata' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(201)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
|
|
|
|
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(stored_artifacts_size).to eq(72821)
|
2018-03-27 19:54:05 +05:30
|
|
|
expect(stored_artifacts_sha256).to eq(artifacts_sha256)
|
2018-05-09 12:01:36 +05:30
|
|
|
expect(stored_metadata_sha256).to eq(metadata_sha256)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is no artifacts file in post data' do
|
|
|
|
let(:post_data) do
|
|
|
|
{ 'metadata' => metadata }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'is expected to respond with bad request' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(400)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not store metadata' do
|
|
|
|
expect(stored_metadata_file).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
context 'when artifact_type is archive' do
|
|
|
|
context 'when artifact_format is zip' do
|
|
|
|
let(:params) { { artifact_type: :archive, artifact_format: :zip } }
|
|
|
|
|
|
|
|
it 'stores junit test report' do
|
|
|
|
upload_artifacts(file_upload, headers_with_token, params)
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(201)
|
|
|
|
expect(job.reload.job_artifacts_archive).not_to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when artifact_format is gzip' do
|
|
|
|
let(:params) { { artifact_type: :archive, artifact_format: :gzip } }
|
|
|
|
|
|
|
|
it 'returns an error' do
|
|
|
|
upload_artifacts(file_upload, headers_with_token, params)
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(400)
|
|
|
|
expect(job.reload.job_artifacts_archive).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when artifact_type is junit' do
|
|
|
|
context 'when artifact_format is gzip' do
|
|
|
|
let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
|
|
|
|
let(:params) { { artifact_type: :junit, artifact_format: :gzip } }
|
|
|
|
|
|
|
|
it 'stores junit test report' do
|
|
|
|
upload_artifacts(file_upload, headers_with_token, params)
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(201)
|
|
|
|
expect(job.reload.job_artifacts_junit).not_to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when artifact_format is raw' do
|
|
|
|
let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
|
|
|
|
let(:params) { { artifact_type: :junit, artifact_format: :raw } }
|
|
|
|
|
|
|
|
it 'returns an error' do
|
|
|
|
upload_artifacts(file_upload, headers_with_token, params)
|
|
|
|
|
|
|
|
expect(response).to have_gitlab_http_status(400)
|
|
|
|
expect(job.reload.job_artifacts_junit).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when artifacts are being stored outside of tmp path' do
|
2018-05-09 12:01:36 +05:30
|
|
|
let(:new_tmpdir) { Dir.mktmpdir }
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
before do
|
2018-05-09 12:01:36 +05:30
|
|
|
# init before overwriting tmp dir
|
|
|
|
file_upload
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
# by configuring this path we allow to pass file from @tmpdir only
|
|
|
|
# but all temporary files are stored in system tmp directory
|
2018-05-09 12:01:36 +05:30
|
|
|
allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
after do
|
2018-05-09 12:01:36 +05:30
|
|
|
FileUtils.remove_entry(new_tmpdir)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it' "fails to post artifacts for outside of tmp path"' do
|
|
|
|
upload_artifacts(file_upload, headers_with_token)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(400)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
def upload_artifacts(file, headers = {}, params = {})
|
|
|
|
params = params.merge({
|
|
|
|
'file.path' => file.path,
|
|
|
|
'file.name' => file.original_filename
|
|
|
|
})
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
post api("/jobs/#{job.id}/artifacts"), params: params, headers: headers
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'GET /api/v4/jobs/:id/artifacts' do
|
2019-02-13 22:33:31 +05:30
|
|
|
let(:token) { job.token }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'when job has artifacts' do
|
2019-02-13 22:33:31 +05:30
|
|
|
let(:job) { create(:ci_build) }
|
2018-05-09 12:01:36 +05:30
|
|
|
let(:store) { JobArtifactUploader::Store::LOCAL }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_job_artifact, :archive, file_store: store, job: job)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using job token' do
|
2018-05-09 12:01:36 +05:30
|
|
|
context 'when artifacts are stored locally' do
|
|
|
|
let(:download_headers) do
|
|
|
|
{ 'Content-Transfer-Encoding' => 'binary',
|
|
|
|
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
download_artifact
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'download artifacts' do
|
|
|
|
expect(response).to have_http_status(200)
|
2018-10-15 14:42:47 +05:30
|
|
|
expect(response.headers.to_h).to include download_headers
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when artifacts are stored remotely' do
|
|
|
|
let(:store) { JobArtifactUploader::Store::REMOTE }
|
2019-02-13 22:33:31 +05:30
|
|
|
let!(:job) { create(:ci_build) }
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
context 'when proxy download is being used' do
|
|
|
|
before do
|
|
|
|
download_artifact(direct_download: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'uses workhorse send-url' do
|
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2018-10-15 14:42:47 +05:30
|
|
|
expect(response.headers.to_h).to include(
|
2018-05-09 12:01:36 +05:30
|
|
|
'Gitlab-Workhorse-Send-Data' => /send-url:/)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when direct download is being used' do
|
|
|
|
before do
|
|
|
|
download_artifact(direct_download: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'receive redirect for downloading artifacts' do
|
|
|
|
expect(response).to have_gitlab_http_status(302)
|
|
|
|
expect(response.headers).to include('Location')
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when using runnners token' do
|
|
|
|
let(:token) { job.project.runners_token }
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
before do
|
|
|
|
download_artifact
|
|
|
|
end
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
it 'responds with forbidden' do
|
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job does not has artifacts' do
|
|
|
|
it 'responds with not found' do
|
2018-05-09 12:01:36 +05:30
|
|
|
download_artifact
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def download_artifact(params = {}, request_headers = headers)
|
|
|
|
params = params.merge(token: token)
|
2018-05-09 12:01:36 +05:30
|
|
|
job.reload
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
get api("/jobs/#{job.id}/artifacts"), params: params, headers: request_headers
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|