debian-mirror-gitlab/spec/services/projects/update_pages_service_spec.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

289 lines
9.3 KiB
Ruby
Raw Normal View History

2019-07-31 22:56:46 +05:30
# frozen_string_literal: true
2017-08-17 22:00:37 +05:30
require "spec_helper"
2020-07-28 23:09:34 +05:30
RSpec.describe Projects::UpdatePagesService do
2020-03-13 15:44:24 +05:30
let_it_be(:project, refind: true) { create(:project, :repository) }
2021-10-27 15:23:28 +05:30
let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
2020-03-13 15:44:24 +05:30
let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
2021-09-30 23:02:18 +05:30
2020-02-01 01:16:34 +05:30
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
2018-11-08 19:23:39 +05:30
let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
2018-03-17 18:26:18 +05:30
2019-02-02 18:00:53 +05:30
let(:file) { fixture_file_upload("spec/fixtures/pages.zip") }
let(:empty_file) { fixture_file_upload("spec/fixtures/pages_empty.zip") }
let(:metadata_filename) { "spec/fixtures/pages.zip.meta" }
let(:metadata) { fixture_file_upload(metadata_filename) if File.exist?(metadata_filename) }
2017-08-17 22:00:37 +05:30
subject { described_class.new(project, build) }
2018-03-17 18:26:18 +05:30
context 'for new artifacts' do
context "for a valid job" do
2021-01-29 00:20:46 +05:30
let!(:artifacts_archive) { create(:ci_job_artifact, :correct_checksum, file: file, job: build) }
2020-11-24 15:15:51 +05:30
2017-08-17 22:00:37 +05:30
before do
2018-11-18 11:00:15 +05:30
create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
2018-03-17 18:26:18 +05:30
build.reload
2017-08-17 22:00:37 +05:30
end
2021-02-22 17:27:13 +05:30
it "doesn't delete artifacts after deploying" do
expect(execute).to eq(:success)
2017-08-17 22:00:37 +05:30
2021-02-22 17:27:13 +05:30
expect(project.pages_metadatum).to be_deployed
expect(build.artifacts?).to eq(true)
2017-08-17 22:00:37 +05:30
end
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
2019-12-21 20:55:43 +05:30
expect(project.pages_metadatum).to be_deployed
2017-08-17 22:00:37 +05:30
expect(project.pages_deployed?).to be_truthy
2021-03-11 19:13:27 +05:30
end
2021-01-03 14:25:43 +05:30
it 'creates pages_deployment and saves it in the metadata' do
expect do
expect(execute).to eq(:success)
end.to change { project.pages_deployments.count }.by(1)
deployment = project.pages_deployments.last
expect(deployment.size).to eq(file.size)
expect(deployment.file).to be
2021-01-29 00:20:46 +05:30
expect(deployment.file_count).to eq(3)
expect(deployment.file_sha256).to eq(artifacts_archive.file_sha256)
2021-01-03 14:25:43 +05:30
expect(project.pages_metadatum.reload.pages_deployment_id).to eq(deployment.id)
2021-10-27 15:23:28 +05:30
expect(deployment.ci_build_id).to eq(build.id)
2021-01-03 14:25:43 +05:30
end
2021-01-29 00:20:46 +05:30
it 'does not fail if pages_metadata is absent' do
project.pages_metadatum.destroy!
project.reload
expect do
expect(execute).to eq(:success)
end.to change { project.pages_deployments.count }.by(1)
expect(project.pages_metadatum.reload.pages_deployment).to eq(project.pages_deployments.last)
end
context 'when there is an old pages deployment' do
let!(:old_deployment_from_another_project) { create(:pages_deployment) }
let!(:old_deployment) { create(:pages_deployment, project: project) }
it 'schedules a destruction of older deployments' do
expect(DestroyPagesDeploymentsWorker).to(
receive(:perform_in).with(described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
project.id,
instance_of(Integer))
)
execute
end
it 'removes older deployments', :sidekiq_inline do
expect do
execute
end.not_to change { PagesDeployment.count } # it creates one and deletes one
expect(PagesDeployment.find_by_id(old_deployment.id)).to be_nil
end
end
2017-08-17 22:00:37 +05:30
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
end
2021-10-27 15:23:28 +05:30
it 'limits pages file count' do
create(:plan_limits, :default_plan, pages_file_entries: 2)
expect(execute).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("pages site contains 3 file entries, while limit is set to 2")
end
2018-03-17 18:26:18 +05:30
context 'when timeout happens by DNS error' do
before do
2020-03-13 15:44:24 +05:30
allow_next_instance_of(described_class) do |instance|
2021-11-11 11:23:49 +05:30
allow(instance).to receive(:create_pages_deployment).and_raise(SocketError)
2020-03-13 15:44:24 +05:30
end
2018-03-17 18:26:18 +05:30
end
it 'raises an error' do
expect { execute }.to raise_error(SocketError)
build.reload
2018-05-01 15:08:00 +05:30
expect(deploy_status).to be_failed
2019-12-21 20:55:43 +05:30
expect(project.pages_metadatum).not_to be_deployed
2018-05-01 15:08:00 +05:30
end
end
context 'when missing artifacts metadata' do
before do
2018-10-15 14:42:47 +05:30
expect(build).to receive(:artifacts_metadata?).and_return(false)
2018-05-01 15:08:00 +05:30
end
2018-10-15 14:42:47 +05:30
it 'does not raise an error as failed job' do
2018-05-01 15:08:00 +05:30
execute
build.reload
expect(deploy_status).to be_failed
2019-12-21 20:55:43 +05:30
expect(project.pages_metadatum).not_to be_deployed
2018-03-17 18:26:18 +05:30
end
end
2020-06-23 00:09:42 +05:30
context 'with background jobs running', :sidekiq_inline do
2020-10-24 23:57:45 +05:30
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
2020-06-23 00:09:42 +05:30
end
end
2021-10-27 15:23:28 +05:30
context "when sha on branch was updated before deployment was uploaded" do
before do
expect(subject).to receive(:create_pages_deployment).and_wrap_original do |m, *args|
build.update!(ref: 'feature')
m.call(*args)
end
end
shared_examples 'fails with outdated reference message' do
it 'fails' do
expect(execute).not_to eq(:success)
expect(project.reload.pages_metadatum).not_to be_deployed
expect(deploy_status).to be_failed
expect(deploy_status.description).to eq('build SHA is outdated for this ref')
end
end
shared_examples 'successfully deploys' do
it 'succeeds' do
expect do
expect(execute).to eq(:success)
end.to change { project.pages_deployments.count }.by(1)
deployment = project.pages_deployments.last
expect(deployment.ci_build_id).to eq(build.id)
end
end
include_examples 'successfully deploys'
context 'when old deployment present' do
before do
old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
old_deployment = create(:pages_deployment, ci_build: old_build, project: project)
project.update_pages_deployment!(old_deployment)
end
include_examples 'successfully deploys'
end
context 'when newer deployment present' do
before do
new_pipeline = create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha)
new_build = create(:ci_build, pipeline: new_pipeline, ref: 'HEAD')
new_deployment = create(:pages_deployment, ci_build: new_build, project: project)
project.update_pages_deployment!(new_deployment)
end
include_examples 'fails with outdated reference message'
end
end
2017-08-17 22:00:37 +05:30
end
end
2021-01-29 00:20:46 +05:30
# this situation should never happen in real life because all new archives have sha256
# and we only use new archives
# this test is here just to clarify that this behavior is intentional
context 'when artifacts archive does not have sha256' do
let!(:artifacts_archive) { create(:ci_job_artifact, file: file, job: build) }
before do
create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
build.reload
end
it 'fails with exception raised' do
expect do
execute
end.to raise_error("Validation failed: File sha256 can't be blank")
end
end
2017-08-17 22:00:37 +05:30
it 'fails if no artifacts' do
expect(execute).not_to eq(:success)
end
it 'fails for invalid archive' do
2019-09-04 21:01:54 +05:30
create(:ci_job_artifact, :archive, file: invalid_file, job: build)
2017-08-17 22:00:37 +05:30
expect(execute).not_to eq(:success)
end
2017-09-10 17:25:29 +05:30
describe 'maximum pages artifacts size' do
let(:metadata) { spy('metadata') }
before do
2018-11-08 19:23:39 +05:30
file = fixture_file_upload('spec/fixtures/pages.zip')
metafile = fixture_file_upload('spec/fixtures/pages.zip.meta')
2017-09-10 17:25:29 +05:30
2021-01-29 00:20:46 +05:30
create(:ci_job_artifact, :archive, :correct_checksum, file: file, job: build)
2019-09-04 21:01:54 +05:30
create(:ci_job_artifact, :metadata, file: metafile, job: build)
2017-09-10 17:25:29 +05:30
allow(build).to receive(:artifacts_metadata_entry)
.and_return(metadata)
end
context 'when maximum pages size is set to zero' do
before do
stub_application_setting(max_pages_size: 0)
end
2020-03-13 15:44:24 +05:30
it_behaves_like 'pages size limit is', ::Gitlab::Pages::MAX_SIZE
2017-09-10 17:25:29 +05:30
end
2020-03-13 15:44:24 +05:30
context 'when size is limited on the instance level' do
2017-09-10 17:25:29 +05:30
before do
2020-03-13 15:44:24 +05:30
stub_application_setting(max_pages_size: 100)
2017-09-10 17:25:29 +05:30
end
2020-03-13 15:44:24 +05:30
it_behaves_like 'pages size limit is', 100.megabytes
2017-09-10 17:25:29 +05:30
end
end
2021-04-17 20:07:23 +05:30
context 'when retrying the job' do
let!(:older_deploy_job) do
create(:generic_commit_status, :failed, pipeline: pipeline,
ref: build.ref,
stage: 'deploy',
name: 'pages:deploy')
end
before do
create(:ci_job_artifact, :correct_checksum, file: file, job: build)
create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
build.reload
end
it 'marks older pages:deploy jobs retried' do
expect(execute).to eq(:success)
expect(older_deploy_job.reload).to be_retried
end
end
private
2017-09-10 17:25:29 +05:30
def deploy_status
GenericCommitStatus.find_by(name: 'pages:deploy')
end
2017-08-17 22:00:37 +05:30
def execute
subject.execute[:status]
end
end