debian-mirror-gitlab/spec/services/ci/expire_pipeline_cache_service_spec.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

123 lines
4.4 KiB
Ruby
Raw Normal View History

2019-07-31 22:56:46 +05:30
# frozen_string_literal: true
require 'spec_helper'
2020-07-28 23:09:34 +05:30
RSpec.describe Ci::ExpirePipelineCacheService do
2020-03-13 15:44:24 +05:30
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
2021-06-08 01:23:25 +05:30
2019-07-31 22:56:46 +05:30
subject { described_class.new }
describe '#execute' do
it 'invalidates Etag caching for project pipelines path' do
2020-06-23 00:09:42 +05:30
pipelines_path = "/#{project.full_path}/-/pipelines.json"
2020-03-13 15:44:24 +05:30
new_mr_pipelines_path = "/#{project.full_path}/-/merge_requests/new.json"
2020-06-23 00:09:42 +05:30
pipeline_path = "/#{project.full_path}/-/pipelines/#{pipeline.id}.json"
2021-04-17 20:07:23 +05:30
graphql_pipeline_path = "/api/graphql:pipelines/id/#{pipeline.id}"
2021-06-08 01:23:25 +05:30
graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}"
2022-01-26 12:08:38 +05:30
graphql_project_on_demand_scan_counts_path = "/api/graphql:on_demand_scan/counts/#{project.full_path}"
2019-07-31 22:56:46 +05:30
2022-01-26 12:08:38 +05:30
expect_touched_etag_caching_paths(
pipelines_path,
new_mr_pipelines_path,
pipeline_path,
graphql_pipeline_path,
graphql_pipeline_sha_path,
graphql_project_on_demand_scan_counts_path
)
2019-07-31 22:56:46 +05:30
subject.execute(pipeline)
end
it 'invalidates Etag caching for merge request pipelines if pipeline runs on any commit of that source branch' do
2020-04-22 19:07:51 +05:30
merge_request = create(:merge_request, :with_detached_merge_request_pipeline)
project = merge_request.target_project
2020-03-13 15:44:24 +05:30
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
2021-01-03 14:25:43 +05:30
merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
2019-07-31 22:56:46 +05:30
2022-01-26 12:08:38 +05:30
expect_touched_etag_caching_paths(
merge_request_pipelines_path,
merge_request_widget_path
)
2019-07-31 22:56:46 +05:30
2020-04-22 19:07:51 +05:30
subject.execute(merge_request.all_pipelines.last)
2019-07-31 22:56:46 +05:30
end
it 'updates the cached status for a project' do
2020-04-22 19:07:51 +05:30
expect(Gitlab::Cache::Ci::ProjectPipelineStatus).to receive(:update_for_pipeline).with(pipeline)
2019-07-31 22:56:46 +05:30
subject.execute(pipeline)
end
context 'destroyed pipeline' do
let(:project_with_repo) { create(:project, :repository) }
let!(:pipeline_with_commit) { create(:ci_pipeline, :success, project: project_with_repo, sha: project_with_repo.commit.id) }
2021-06-08 01:23:25 +05:30
it 'clears the cache', :use_clean_rails_redis_caching do
2019-07-31 22:56:46 +05:30
create(:commit_status, :success, pipeline: pipeline_with_commit, ref: pipeline_with_commit.ref)
# Sanity check
expect(project_with_repo.pipeline_status.has_status?).to be_truthy
subject.execute(pipeline_with_commit, delete: true)
pipeline_with_commit.destroy!
2021-06-08 01:23:25 +05:30
# We need to reset lazy_latest_pipeline cache to simulate a new request
BatchLoader::Executor.clear_current
2019-07-31 22:56:46 +05:30
# Need to use find to avoid memoization
expect(Project.find(project_with_repo.id).pipeline_status.has_status?).to be_falsey
end
end
2021-04-17 20:07:23 +05:30
context 'when the pipeline is triggered by another pipeline' do
let(:source) { create(:ci_sources_pipeline, pipeline: pipeline) }
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json"
2022-01-26 12:08:38 +05:30
expect_touched_etag_caching_paths(dependent_pipeline_path)
2021-04-17 20:07:23 +05:30
subject.execute(pipeline)
end
end
context 'when the pipeline triggered another pipeline' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:source) { create(:ci_sources_pipeline, source_job: build) }
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json"
2022-01-26 12:08:38 +05:30
expect_touched_etag_caching_paths(dependent_pipeline_path)
2021-04-17 20:07:23 +05:30
subject.execute(pipeline)
end
end
2022-01-26 12:08:38 +05:30
it 'does not do N+1 queries' do
subject.execute(pipeline)
control = ActiveRecord::QueryRecorder.new { subject.execute(pipeline) }
create(:ci_sources_pipeline, pipeline: pipeline)
create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))
expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
end
end
def expect_touched_etag_caching_paths(*paths)
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
expect(store).to receive(:touch).and_wrap_original do |m, *args|
expect(args).to include(*paths)
m.call(*args)
end
end
2019-07-31 22:56:46 +05:30
end
end