debian-mirror-gitlab/spec/models/ci/build_trace_chunk_spec.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

953 lines
27 KiB
Ruby
Raw Normal View History

2019-12-26 22:10:19 +05:30
# frozen_string_literal: true
2018-10-15 14:42:47 +05:30
require 'spec_helper'
2021-09-04 01:27:46 +05:30
RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state, :clean_gitlab_redis_trace_chunks do
2018-11-08 19:23:39 +05:30
include ExclusiveLeaseHelpers
2020-03-13 15:44:24 +05:30
let_it_be(:build) { create(:ci_build, :running) }
2021-04-29 21:17:54 +05:30
2018-10-15 14:42:47 +05:30
let(:chunk_index) { 0 }
2021-09-04 01:27:46 +05:30
let(:data_store) { :redis_trace_chunks }
2018-10-15 14:42:47 +05:30
let(:raw_data) { nil }
let(:build_trace_chunk) do
described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data)
end
2019-02-15 15:39:39 +05:30
it_behaves_like 'having unique enum values'
2018-10-15 14:42:47 +05:30
before do
2021-09-04 01:27:46 +05:30
stub_feature_flags(ci_enable_live_trace: true)
2018-11-08 19:23:39 +05:30
stub_artifacts_object_storage
2018-10-15 14:42:47 +05:30
end
2021-09-04 01:27:46 +05:30
def redis_instance
{
redis: Gitlab::Redis::SharedState,
redis_trace_chunks: Gitlab::Redis::TraceChunks
}[data_store]
end
2020-11-24 15:15:51 +05:30
describe 'chunk creation' do
let(:metrics) { spy('metrics') }
before do
allow(::Gitlab::Ci::Trace::Metrics)
.to receive(:new)
.and_return(metrics)
end
it 'increments trace operation chunked metric' do
build_trace_chunk.save!
expect(metrics)
.to have_received(:increment_trace_operation)
.with(operation: :chunked)
.once
end
end
2018-10-15 14:42:47 +05:30
context 'FastDestroyAll' do
2022-03-02 08:16:31 +05:30
let(:pipeline) { create(:ci_pipeline) }
let!(:build) { create(:ci_build, :running, :trace_live, pipeline: pipeline) }
2018-10-15 14:42:47 +05:30
let(:subjects) { build.trace_chunks }
2020-03-13 15:44:24 +05:30
describe 'Forbid #destroy and #destroy_all' do
it 'does not delete database rows and associted external data' do
expect(external_data_counter).to be > 0
expect(subjects.count).to be > 0
2020-11-24 15:15:51 +05:30
expect { subjects.first.destroy! }.to raise_error('`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`')
2020-06-23 00:09:42 +05:30
expect { subjects.destroy_all }.to raise_error('`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`') # rubocop: disable Cop/DestroyAll
2020-03-13 15:44:24 +05:30
expect(subjects.count).to be > 0
expect(external_data_counter).to be > 0
end
end
describe '.fast_destroy_all' do
it 'deletes database rows and associted external data' do
expect(external_data_counter).to be > 0
expect(subjects.count).to be > 0
expect { subjects.fast_destroy_all }.not_to raise_error
expect(subjects.count).to eq(0)
expect(external_data_counter).to eq(0)
end
end
describe '.use_fast_destroy' do
it 'performs cascading delete with fast_destroy_all' do
expect(external_data_counter).to be > 0
expect(subjects.count).to be > 0
2022-03-02 08:16:31 +05:30
expect { pipeline.destroy! }.not_to raise_error
2020-03-13 15:44:24 +05:30
expect(subjects.count).to eq(0)
expect(external_data_counter).to eq(0)
end
end
2018-10-15 14:42:47 +05:30
def external_data_counter
2021-09-04 01:27:46 +05:30
redis_instance.with do |redis|
2018-10-15 14:42:47 +05:30
redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size
end
end
end
describe 'CHUNK_SIZE' do
2021-02-22 17:27:13 +05:30
it 'chunk size can not be changed without special care' do
2018-10-15 14:42:47 +05:30
expect(described_class::CHUNK_SIZE).to eq(128.kilobytes)
end
end
2018-11-08 19:23:39 +05:30
describe '.all_stores' do
subject { described_class.all_stores }
it 'returns a correctly ordered array' do
2021-09-04 01:27:46 +05:30
is_expected.to eq(%i[redis database fog redis_trace_chunks])
2018-11-08 19:23:39 +05:30
end
end
2018-10-15 14:42:47 +05:30
describe '#data' do
subject { build_trace_chunk.data }
2021-09-04 01:27:46 +05:30
where(:data_store) { %i[redis redis_trace_chunks] }
2018-10-15 14:42:47 +05:30
2021-09-04 01:27:46 +05:30
with_them do
2018-10-15 14:42:47 +05:30
before do
2019-12-26 22:10:19 +05:30
build_trace_chunk.send(:unsafe_set_data!, +'Sample data in redis')
2018-10-15 14:42:47 +05:30
end
it { is_expected.to eq('Sample data in redis') }
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let(:data_store) { :database }
2019-12-26 22:10:19 +05:30
let(:raw_data) { +'Sample data in database' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { is_expected.to eq('Sample data in database') }
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let(:data_store) { :fog }
2021-01-29 00:20:46 +05:30
before do
build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
end
2021-01-03 14:25:43 +05:30
2021-01-29 00:20:46 +05:30
it { is_expected.to eq('Sample data in fog') }
2021-01-03 14:25:43 +05:30
2021-01-29 00:20:46 +05:30
it 'returns a new Fog store' do
expect(described_class.get_store_class(data_store)).to be_a(Ci::BuildTraceChunks::Fog)
2018-10-15 14:42:47 +05:30
end
2021-01-29 00:20:46 +05:30
end
end
2018-10-15 14:42:47 +05:30
2021-09-04 01:27:46 +05:30
describe '#data_store' do
subject { described_class.new.data_store }
context 'default value' do
it { expect(subject).to eq('redis_trace_chunks') }
end
end
2021-01-29 00:20:46 +05:30
describe '#get_store_class' do
using RSpec::Parameterized::TableSyntax
2021-01-03 14:25:43 +05:30
2021-01-29 00:20:46 +05:30
where(:data_store, :expected_store) do
:redis | Ci::BuildTraceChunks::Redis
:database | Ci::BuildTraceChunks::Database
:fog | Ci::BuildTraceChunks::Fog
2021-09-04 01:27:46 +05:30
:redis_trace_chunks | Ci::BuildTraceChunks::RedisTraceChunks
2021-01-29 00:20:46 +05:30
end
2021-01-03 14:25:43 +05:30
2021-01-29 00:20:46 +05:30
with_them do
context "with store" do
it 'returns an instance of the right class' do
expect(expected_store).to receive(:new).twice.and_call_original
expect(described_class.get_store_class(data_store.to_s)).to be_a(expected_store)
expect(described_class.get_store_class(data_store.to_sym)).to be_a(expected_store)
2021-01-03 14:25:43 +05:30
end
end
2018-10-15 14:42:47 +05:30
end
2021-01-29 00:20:46 +05:30
it 'raises an error' do
expect { described_class.get_store_class('unknown') }.to raise_error('Unknown store type: unknown')
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
describe '#append' do
subject { build_trace_chunk.append(new_data, offset) }
2018-10-15 14:42:47 +05:30
2019-12-26 22:10:19 +05:30
let(:new_data) { +'Sample new data' }
2018-11-08 19:23:39 +05:30
let(:offset) { 0 }
let(:merged_data) { data + new_data.to_s }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
shared_examples_for 'Appending correctly' do
context 'when offset is negative' do
let(:offset) { -1 }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { expect { subject }.to raise_error('Offset is out of range') }
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when offset is bigger than data size' do
let(:offset) { data.bytesize + 1 }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { expect { subject }.to raise_error('Offset is out of range') }
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when new data overflows chunk size' do
let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { expect { subject }.to raise_error('Chunk size overflow') }
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when offset is EOF' do
let(:offset) { data.bytesize }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'appends' do
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
expect(build_trace_chunk.data).to eq(merged_data)
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when the other process is appending' do
let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
before do
stub_exclusive_lease_taken(lease_key)
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'raise an error' do
expect { subject }.to raise_error('Failed to obtain a lock')
end
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when new_data is nil' do
let(:new_data) { nil }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'raises an error' do
expect { subject }.to raise_error('New data is missing')
end
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when new_data is empty' do
2019-12-26 22:10:19 +05:30
let(:new_data) { +'' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'does not append' do
subject
expect(build_trace_chunk.data).to eq(data)
end
it 'does not execute UPDATE' do
ActiveRecord::QueryRecorder.new { subject }.log.map do |query|
expect(query).not_to include('UPDATE')
end
end
2018-10-15 14:42:47 +05:30
end
end
2018-11-08 19:23:39 +05:30
context 'when offset is middle of datasize' do
let(:offset) { data.bytesize / 2 }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'appends' do
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
2018-10-15 14:42:47 +05:30
end
end
end
2018-11-08 19:23:39 +05:30
shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do
2019-07-07 11:18:12 +05:30
context 'when new data fulfilled chunk size' do
2019-12-26 22:10:19 +05:30
let(:new_data) { +'a' * described_class::CHUNK_SIZE }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'schedules trace chunk flush worker' do
expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
subject
end
2018-10-15 14:42:47 +05:30
2019-12-26 22:10:19 +05:30
it 'migrates data to object storage', :sidekiq_might_not_need_inline do
2018-11-18 11:00:15 +05:30
perform_enqueued_jobs do
2018-11-08 19:23:39 +05:30
subject
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
build_trace_chunk.reload
2020-11-24 15:15:51 +05:30
expect(build_trace_chunk.checksum).to be_present
2018-11-08 19:23:39 +05:30
expect(build_trace_chunk.fog?).to be_truthy
expect(build_trace_chunk.data).to eq(new_data)
end
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
shared_examples_for 'Scheduling no sidekiq worker' do
2019-07-07 11:18:12 +05:30
context 'when new data fulfilled chunk size' do
2019-12-26 22:10:19 +05:30
let(:new_data) { +'a' * described_class::CHUNK_SIZE }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'does not schedule trace chunk flush worker' do
expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'does not migrate data to object storage' do
2018-11-18 11:00:15 +05:30
perform_enqueued_jobs do
2018-11-08 19:23:39 +05:30
data_store = build_trace_chunk.data_store
subject
build_trace_chunk.reload
expect(build_trace_chunk.data_store).to eq(data_store)
end
2018-10-15 14:42:47 +05:30
end
end
end
2021-09-04 01:27:46 +05:30
where(:data_store) { %i[redis redis_trace_chunks] }
2018-10-15 14:42:47 +05:30
2021-09-04 01:27:46 +05:30
with_them do
2018-11-08 19:23:39 +05:30
context 'when there are no data' do
2019-12-26 22:10:19 +05:30
let(:data) { +'' }
2018-11-08 19:23:39 +05:30
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
end
2020-10-24 23:57:45 +05:30
it 'does not read data when appending' do
expect(build_trace_chunk).not_to receive(:data)
build_trace_chunk.append(new_data, offset)
end
2018-11-08 19:23:39 +05:30
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when there are some data' do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in redis' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has data' do
expect(build_trace_chunk.data).to eq(data)
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data_store is database' do
let(:data_store) { :database }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when there are no data' do
2019-12-26 22:10:19 +05:30
let(:data) { +'' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when there are some data' do
2019-12-26 22:10:19 +05:30
let(:raw_data) { +'Sample data in database' }
2018-11-08 19:23:39 +05:30
let(:data) { raw_data }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has data' do
expect(build_trace_chunk.data).to eq(data)
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let(:data_store) { :fog }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when there are no data' do
2019-12-26 22:10:19 +05:30
let(:data) { +'' }
2018-11-08 19:23:39 +05:30
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when there are some data' do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in fog' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has data' do
expect(build_trace_chunk.data).to eq(data)
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
end
end
2020-11-24 15:15:51 +05:30
describe 'append metrics' do
let(:metrics) { spy('metrics') }
before do
allow(::Gitlab::Ci::Trace::Metrics)
.to receive(:new)
.and_return(metrics)
end
it 'increments trace operation appended metric' do
build_trace_chunk.append('123456', 0)
expect(metrics)
.to have_received(:increment_trace_operation)
.with(operation: :appended)
end
end
2018-11-08 19:23:39 +05:30
end
describe '#truncate' do
subject { build_trace_chunk.truncate(offset) }
shared_examples_for 'truncates' do
context 'when offset is negative' do
let(:offset) { -1 }
it { expect { subject }.to raise_error('Offset is out of range') }
end
context 'when offset is bigger than data size' do
let(:offset) { data.bytesize + 1 }
it { expect { subject }.to raise_error('Offset is out of range') }
2018-10-15 14:42:47 +05:30
end
context 'when offset is 10' do
let(:offset) { 10 }
2018-11-08 19:23:39 +05:30
it 'truncates' do
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
2018-10-15 14:42:47 +05:30
end
end
end
2021-09-04 01:27:46 +05:30
where(:data_store) { %i[redis redis_trace_chunks] }
with_them do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in redis' }
2018-10-15 14:42:47 +05:30
before do
2018-11-08 19:23:39 +05:30
build_trace_chunk.send(:unsafe_set_data!, data)
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
it_behaves_like 'truncates'
2018-10-15 14:42:47 +05:30
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let(:data_store) { :database }
2019-12-26 22:10:19 +05:30
let(:raw_data) { +'Sample data in database' }
2018-10-15 14:42:47 +05:30
let(:data) { raw_data }
2018-11-08 19:23:39 +05:30
it_behaves_like 'truncates'
end
context 'when data_store is fog' do
let(:data_store) { :fog }
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in fog' }
2018-11-08 19:23:39 +05:30
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
it_behaves_like 'truncates'
2018-10-15 14:42:47 +05:30
end
end
describe '#size' do
subject { build_trace_chunk.size }
2021-09-04 01:27:46 +05:30
where(:data_store) { %i[redis redis_trace_chunks] }
2018-10-15 14:42:47 +05:30
2021-09-04 01:27:46 +05:30
with_them do
2018-10-15 14:42:47 +05:30
context 'when data exists' do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in redis' }
2018-10-15 14:42:47 +05:30
before do
2018-11-08 19:23:39 +05:30
build_trace_chunk.send(:unsafe_set_data!, data)
2018-10-15 14:42:47 +05:30
end
it { is_expected.to eq(data.bytesize) }
end
context 'when data exists' do
it { is_expected.to eq(0) }
end
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let(:data_store) { :database }
2018-10-15 14:42:47 +05:30
context 'when data exists' do
2019-12-26 22:10:19 +05:30
let(:raw_data) { +'Sample data in database' }
2018-10-15 14:42:47 +05:30
let(:data) { raw_data }
it { is_expected.to eq(data.bytesize) }
end
context 'when data does not exist' do
it { is_expected.to eq(0) }
end
end
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let(:data_store) { :fog }
context 'when data exists' do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in fog' }
2018-11-08 19:23:39 +05:30
let(:key) { "tmp/builds/#{build.id}/chunks/#{chunk_index}.log" }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
it { is_expected.to eq(data.bytesize) }
end
context 'when data does not exist' do
it { is_expected.to eq(0) }
end
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
describe '#persist_data!' do
2020-11-24 15:15:51 +05:30
let(:build) { create(:ci_build, :running) }
2021-01-03 14:25:43 +05:30
before do
build_trace_chunk.save!
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2021-01-03 14:25:43 +05:30
subject { build_trace_chunk.persist_data! }
2021-09-04 01:27:46 +05:30
where(:data_store, :redis_class) do
[
[:redis, Ci::BuildTraceChunks::Redis],
[:redis_trace_chunks, Ci::BuildTraceChunks::RedisTraceChunks]
]
end
2018-10-15 14:42:47 +05:30
2021-09-04 01:27:46 +05:30
with_them do
2018-10-15 14:42:47 +05:30
context 'when data exists' do
before do
2018-11-08 19:23:39 +05:30
build_trace_chunk.send(:unsafe_set_data!, data)
2018-10-15 14:42:47 +05:30
end
2019-02-15 15:39:39 +05:30
context 'when data size reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let(:data) { +'a' * described_class::CHUNK_SIZE }
2018-10-15 14:42:47 +05:30
2019-02-15 15:39:39 +05:30
it 'persists the data' do
2021-09-04 01:27:46 +05:30
expect(build_trace_chunk.data_store).to eq(data_store.to_s)
expect(redis_class.new.data(build_trace_chunk)).to eq(data)
2019-02-15 15:39:39 +05:30
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
2020-10-24 23:57:45 +05:30
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
2019-02-15 15:39:39 +05:30
subject
expect(build_trace_chunk.fog?).to be_truthy
2021-09-04 01:27:46 +05:30
expect(redis_class.new.data(build_trace_chunk)).to be_nil
2019-02-15 15:39:39 +05:30
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
2018-10-15 14:42:47 +05:30
2020-11-24 15:15:51 +05:30
it 'calculates CRC32 checksum' do
subject
expect(build_trace_chunk.reload.checksum).to eq '3398914352'
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
context 'when data size has not reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in redis' }
2019-02-15 15:39:39 +05:30
it 'does not persist the data and the orignal data is intact' do
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
2021-09-04 01:27:46 +05:30
expect(build_trace_chunk.data_store).to eq(data_store.to_s)
expect(redis_class.new.data(build_trace_chunk)).to eq(data)
2019-02-15 15:39:39 +05:30
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
2020-10-24 23:57:45 +05:30
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
2019-02-15 15:39:39 +05:30
end
2020-11-24 15:15:51 +05:30
context 'when chunk is a final one' do
before do
create(:ci_build_pending_state, build: build)
end
it 'persists the data' do
subject
expect(build_trace_chunk.fog?).to be_truthy
end
end
2021-01-03 14:25:43 +05:30
context 'when the chunk has been modifed by a different worker' do
it 'reloads the chunk before migration' do
described_class
.find(build_trace_chunk.id)
.update!(data_store: :fog)
build_trace_chunk.persist_data!
end
it 'verifies the operation using optimistic locking' do
allow(build_trace_chunk)
.to receive(:save!)
.and_raise(ActiveRecord::StaleObjectError)
expect { build_trace_chunk.persist_data! }
.to raise_error(described_class::FailedToPersistDataError)
end
it 'does not allow flushing unpersisted chunk' do
build_trace_chunk.checksum = '12345'
expect { build_trace_chunk.persist_data! }
.to raise_error(described_class::FailedToPersistDataError,
/Modifed build trace chunk detected/)
end
end
context 'when the chunk is being locked by a different worker' do
let(:metrics) { spy('metrics') }
it 'increments stalled chunk trace metric' do
allow(build_trace_chunk)
.to receive(:metrics)
.and_return(metrics)
2021-01-29 00:20:46 +05:30
expect do
subject
2021-01-03 14:25:43 +05:30
2021-01-29 00:20:46 +05:30
expect(metrics)
.to have_received(:increment_trace_operation)
.with(operation: :stalled)
.once
end.to raise_error(described_class::FailedToPersistDataError)
2021-01-03 14:25:43 +05:30
end
def lock_chunk(&block)
"trace_write:#{build.id}:chunks:#{chunk_index}".then do |key|
build_trace_chunk.in_lock(key, &block)
end
end
end
2019-02-15 15:39:39 +05:30
end
2018-10-15 14:42:47 +05:30
end
context 'when data does not exist' do
2018-11-08 19:23:39 +05:30
it 'does not persist' do
2019-02-15 15:39:39 +05:30
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
2018-10-15 14:42:47 +05:30
end
end
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let(:data_store) { :database }
context 'when data exists' do
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
2019-02-15 15:39:39 +05:30
context 'when data size reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let(:data) { +'a' * described_class::CHUNK_SIZE }
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
it 'persists the data' do
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
2020-10-24 23:57:45 +05:30
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
2019-02-15 15:39:39 +05:30
subject
expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2019-02-15 15:39:39 +05:30
context 'when data size has not reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in database' }
2019-02-15 15:39:39 +05:30
it 'does not persist the data and the orignal data is intact' do
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
2020-10-24 23:57:45 +05:30
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
2019-02-15 15:39:39 +05:30
end
2020-11-24 15:15:51 +05:30
context 'when chunk is a final one' do
before do
create(:ci_build_pending_state, build: build)
end
it 'persists the data' do
subject
expect(build_trace_chunk.fog?).to be_truthy
end
end
2019-02-15 15:39:39 +05:30
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data does not exist' do
it 'does not persist' do
2019-02-15 15:39:39 +05:30
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
2018-11-08 19:23:39 +05:30
end
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let(:data_store) { :fog }
context 'when data exists' do
before do
build_trace_chunk.send(:unsafe_set_data!, data)
end
2019-02-15 15:39:39 +05:30
context 'when data size reached CHUNK_SIZE' do
let(:data) { 'a' * described_class::CHUNK_SIZE }
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
it 'does not change data store' do
expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
subject
2018-12-23 12:14:25 +05:30
2019-02-15 15:39:39 +05:30
expect(build_trace_chunk.fog?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
2018-11-08 19:23:39 +05:30
end
2019-02-15 15:39:39 +05:30
context 'when data size has not reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let(:data) { +'Sample data in fog' }
2019-02-15 15:39:39 +05:30
it 'does not raise error' do
expect { subject }.not_to raise_error
end
end
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
end
end
2020-11-24 15:15:51 +05:30
describe 'final?' do
let(:build) { create(:ci_build, :running) }
context 'when build pending state exists' do
before do
create(:ci_build_pending_state, build: build)
end
context 'when chunks is not the last one' do
before do
create(:ci_build_trace_chunk, chunk_index: 1, build: build)
end
it 'is not a final chunk' do
expect(build.reload.pending_state).to be_present
expect(build_trace_chunk).not_to be_final
end
end
context 'when chunks is the last one' do
it 'is a final chunk' do
expect(build.reload.pending_state).to be_present
expect(build_trace_chunk).to be_final
end
end
end
context 'when build pending state does not exist' do
context 'when chunks is not the last one' do
before do
create(:ci_build_trace_chunk, chunk_index: 1, build: build)
end
it 'is not a final chunk' do
expect(build.reload.pending_state).not_to be_present
expect(build_trace_chunk).not_to be_final
end
end
context 'when chunks is the last one' do
it 'is not a final chunk' do
expect(build.reload.pending_state).not_to be_present
expect(build_trace_chunk).not_to be_final
end
end
end
end
2018-10-15 14:42:47 +05:30
describe 'deletes data in redis after a parent record destroyed' do
let(:project) { create(:project) }
before do
pipeline = create(:ci_pipeline, project: project)
create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
end
shared_examples_for 'deletes all build_trace_chunk and data in redis' do
2019-12-26 22:10:19 +05:30
it 'deletes all build_trace_chunk and data in redis', :sidekiq_might_not_need_inline do
2021-09-04 01:27:46 +05:30
redis_instance.with do |redis|
2018-10-15 14:42:47 +05:30
expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3)
end
expect(described_class.count).to eq(3)
2022-03-02 08:16:31 +05:30
expect(subject).to be_truthy
2018-10-15 14:42:47 +05:30
expect(described_class.count).to eq(0)
2021-09-04 01:27:46 +05:30
redis_instance.with do |redis|
2018-10-15 14:42:47 +05:30
expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0)
end
end
end
context 'when traces are archived' do
let(:subject) do
project.builds.each do |build|
2020-03-13 15:44:24 +05:30
build.reset.success!
2018-10-15 14:42:47 +05:30
end
end
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
context 'when project is destroyed' do
let(:subject) do
2022-04-04 11:22:00 +05:30
Projects::DestroyService.new(project, project.first_owner).execute
2018-10-15 14:42:47 +05:30
end
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
end
2021-01-03 14:25:43 +05:30
describe 'comparable build trace chunks' do
describe '#<=>' do
context 'when chunks are associated with different builds' do
let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
it 'returns nil' do
expect(first <=> second).to be_nil
end
end
context 'when there are two chunks with different indexes' do
let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
it 'indicates the the first one is greater than then second' do
expect(first <=> second).to eq 1
end
end
context 'when there are two chunks with the same index within the same build' do
let(:chunk) { create(:ci_build_trace_chunk) }
it 'indicates the these are equal' do
expect(chunk <=> chunk).to be_zero # rubocop:disable Lint/UselessComparison
end
end
end
describe '#==' do
context 'when chunks have the same index' do
let(:chunk) { create(:ci_build_trace_chunk) }
it 'indicates that the chunks are equal' do
expect(chunk).to eq chunk
end
end
context 'when chunks have different indexes' do
let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
it 'indicates that the chunks are not equal' do
expect(first).not_to eq second
end
end
context 'when chunks are associated with different builds' do
let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
it 'indicates that the chunks are not equal' do
expect(first).not_to eq second
end
end
end
end
2021-09-04 01:27:46 +05:30
describe '#live?' do
subject { build_trace_chunk.live? }
where(:data_store, :value) do
[
[:redis, true],
[:redis_trace_chunks, true],
[:database, false],
[:fog, false]
]
end
with_them do
it { is_expected.to eq(value) }
end
end
describe '#flushed?' do
subject { build_trace_chunk.flushed? }
where(:data_store, :value) do
[
[:redis, false],
[:redis_trace_chunks, false],
[:database, true],
[:fog, true]
]
end
with_them do
it { is_expected.to eq(value) }
end
end
2018-10-15 14:42:47 +05:30
end