2019-12-26 22:10:19 +05:30
# frozen_string_literal: true
2018-10-15 14:42:47 +05:30
require 'spec_helper'
describe Ci :: BuildTraceChunk , :clean_gitlab_redis_shared_state do
2018-11-08 19:23:39 +05:30
include ExclusiveLeaseHelpers
2020-03-13 15:44:24 +05:30
let_it_be ( :build ) { create ( :ci_build , :running ) }
2018-10-15 14:42:47 +05:30
let ( :chunk_index ) { 0 }
let ( :data_store ) { :redis }
let ( :raw_data ) { nil }
let ( :build_trace_chunk ) do
described_class . new ( build : build , chunk_index : chunk_index , data_store : data_store , raw_data : raw_data )
end
2019-02-15 15:39:39 +05:30
it_behaves_like 'having unique enum values'
2018-10-15 14:42:47 +05:30
before do
stub_feature_flags ( ci_enable_live_trace : true )
2018-11-08 19:23:39 +05:30
stub_artifacts_object_storage
2018-10-15 14:42:47 +05:30
end
context 'FastDestroyAll' do
let ( :parent ) { create ( :project ) }
let ( :pipeline ) { create ( :ci_pipeline , project : parent ) }
2020-03-13 15:44:24 +05:30
let! ( :build ) { create ( :ci_build , :running , :trace_live , pipeline : pipeline , project : parent ) }
2018-10-15 14:42:47 +05:30
let ( :subjects ) { build . trace_chunks }
2020-03-13 15:44:24 +05:30
describe 'Forbid #destroy and #destroy_all' do
it 'does not delete database rows and associted external data' do
expect ( external_data_counter ) . to be > 0
expect ( subjects . count ) . to be > 0
expect { subjects . first . destroy } . to raise_error ( '`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`' )
2020-06-23 00:09:42 +05:30
expect { subjects . destroy_all } . to raise_error ( '`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`' ) # rubocop: disable Cop/DestroyAll
2020-03-13 15:44:24 +05:30
expect ( subjects . count ) . to be > 0
expect ( external_data_counter ) . to be > 0
end
end
describe '.fast_destroy_all' do
it 'deletes database rows and associted external data' do
expect ( external_data_counter ) . to be > 0
expect ( subjects . count ) . to be > 0
expect { subjects . fast_destroy_all } . not_to raise_error
expect ( subjects . count ) . to eq ( 0 )
expect ( external_data_counter ) . to eq ( 0 )
end
end
describe '.use_fast_destroy' do
it 'performs cascading delete with fast_destroy_all' do
expect ( external_data_counter ) . to be > 0
expect ( subjects . count ) . to be > 0
expect { parent . destroy } . not_to raise_error
expect ( subjects . count ) . to eq ( 0 )
expect ( external_data_counter ) . to eq ( 0 )
end
end
2018-10-15 14:42:47 +05:30
def external_data_counter
Gitlab :: Redis :: SharedState . with do | redis |
redis . scan_each ( match : " gitlab:ci:trace:*:chunks:* " ) . to_a . size
end
end
end
describe 'CHUNK_SIZE' do
it 'Chunk size can not be changed without special care' do
expect ( described_class :: CHUNK_SIZE ) . to eq ( 128 . kilobytes )
end
end
2018-11-08 19:23:39 +05:30
describe '.all_stores' do
subject { described_class . all_stores }
it 'returns a correctly ordered array' do
is_expected . to eq ( %w[ redis database fog ] )
end
2019-02-15 15:39:39 +05:30
it 'returns redis store as the lowest precedence' do
2018-11-08 19:23:39 +05:30
expect ( subject . first ) . to eq ( 'redis' )
end
2019-02-15 15:39:39 +05:30
it 'returns fog store as the highest precedence' do
2018-11-08 19:23:39 +05:30
expect ( subject . last ) . to eq ( 'fog' )
end
end
2018-10-15 14:42:47 +05:30
describe '#data' do
subject { build_trace_chunk . data }
context 'when data_store is redis' do
let ( :data_store ) { :redis }
before do
2019-12-26 22:10:19 +05:30
build_trace_chunk . send ( :unsafe_set_data! , + 'Sample data in redis' )
2018-10-15 14:42:47 +05:30
end
it { is_expected . to eq ( 'Sample data in redis' ) }
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let ( :data_store ) { :database }
2019-12-26 22:10:19 +05:30
let ( :raw_data ) { + 'Sample data in database' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { is_expected . to eq ( 'Sample data in database' ) }
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let ( :data_store ) { :fog }
2018-10-15 14:42:47 +05:30
before do
2019-12-26 22:10:19 +05:30
build_trace_chunk . send ( :unsafe_set_data! , + 'Sample data in fog' )
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
it { is_expected . to eq ( 'Sample data in fog' ) }
2018-10-15 14:42:47 +05:30
end
end
2018-11-08 19:23:39 +05:30
describe '#append' do
subject { build_trace_chunk . append ( new_data , offset ) }
2018-10-15 14:42:47 +05:30
2019-12-26 22:10:19 +05:30
let ( :new_data ) { + 'Sample new data' }
2018-11-08 19:23:39 +05:30
let ( :offset ) { 0 }
let ( :merged_data ) { data + new_data . to_s }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
shared_examples_for 'Appending correctly' do
context 'when offset is negative' do
let ( :offset ) { - 1 }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { expect { subject } . to raise_error ( 'Offset is out of range' ) }
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when offset is bigger than data size' do
let ( :offset ) { data . bytesize + 1 }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { expect { subject } . to raise_error ( 'Offset is out of range' ) }
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when new data overflows chunk size' do
let ( :new_data ) { 'a' * ( described_class :: CHUNK_SIZE + 1 ) }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it { expect { subject } . to raise_error ( 'Chunk size overflow' ) }
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when offset is EOF' do
let ( :offset ) { data . bytesize }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'appends' do
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
expect ( build_trace_chunk . data ) . to eq ( merged_data )
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when the other process is appending' do
let ( :lease_key ) { " trace_write: #{ build_trace_chunk . build . id } :chunks: #{ build_trace_chunk . chunk_index } " }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
before do
stub_exclusive_lease_taken ( lease_key )
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'raise an error' do
expect { subject } . to raise_error ( 'Failed to obtain a lock' )
end
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when new_data is nil' do
let ( :new_data ) { nil }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'raises an error' do
expect { subject } . to raise_error ( 'New data is missing' )
end
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when new_data is empty' do
2019-12-26 22:10:19 +05:30
let ( :new_data ) { + '' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'does not append' do
subject
expect ( build_trace_chunk . data ) . to eq ( data )
end
it 'does not execute UPDATE' do
ActiveRecord :: QueryRecorder . new { subject } . log . map do | query |
expect ( query ) . not_to include ( 'UPDATE' )
end
end
2018-10-15 14:42:47 +05:30
end
end
2018-11-08 19:23:39 +05:30
context 'when offset is middle of datasize' do
let ( :offset ) { data . bytesize / 2 }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'appends' do
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
expect ( build_trace_chunk . data ) . to eq ( data . byteslice ( 0 , offset ) + new_data )
2018-10-15 14:42:47 +05:30
end
end
end
2018-11-08 19:23:39 +05:30
shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do
2019-07-07 11:18:12 +05:30
context 'when new data fulfilled chunk size' do
2019-12-26 22:10:19 +05:30
let ( :new_data ) { + 'a' * described_class :: CHUNK_SIZE }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'schedules trace chunk flush worker' do
expect ( Ci :: BuildTraceChunkFlushWorker ) . to receive ( :perform_async ) . once
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
subject
end
2018-10-15 14:42:47 +05:30
2019-12-26 22:10:19 +05:30
it 'migrates data to object storage' , :sidekiq_might_not_need_inline do
2018-11-18 11:00:15 +05:30
perform_enqueued_jobs do
2018-11-08 19:23:39 +05:30
subject
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
build_trace_chunk . reload
expect ( build_trace_chunk . fog? ) . to be_truthy
expect ( build_trace_chunk . data ) . to eq ( new_data )
end
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
shared_examples_for 'Scheduling no sidekiq worker' do
2019-07-07 11:18:12 +05:30
context 'when new data fulfilled chunk size' do
2019-12-26 22:10:19 +05:30
let ( :new_data ) { + 'a' * described_class :: CHUNK_SIZE }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'does not schedule trace chunk flush worker' do
expect ( Ci :: BuildTraceChunkFlushWorker ) . not_to receive ( :perform_async )
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'does not migrate data to object storage' do
2018-11-18 11:00:15 +05:30
perform_enqueued_jobs do
2018-11-08 19:23:39 +05:30
data_store = build_trace_chunk . data_store
subject
build_trace_chunk . reload
expect ( build_trace_chunk . data_store ) . to eq ( data_store )
end
2018-10-15 14:42:47 +05:30
end
end
end
context 'when data_store is redis' do
let ( :data_store ) { :redis }
2018-11-08 19:23:39 +05:30
context 'when there are no data' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + '' }
2018-11-08 19:23:39 +05:30
it 'has no data' do
expect ( build_trace_chunk . data ) . to be_empty
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when there are some data' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in redis' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
before do
build_trace_chunk . send ( :unsafe_set_data! , data )
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has data' do
expect ( build_trace_chunk . data ) . to eq ( data )
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data_store is database' do
let ( :data_store ) { :database }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when there are no data' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + '' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has no data' do
expect ( build_trace_chunk . data ) . to be_empty
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when there are some data' do
2019-12-26 22:10:19 +05:30
let ( :raw_data ) { + 'Sample data in database' }
2018-11-08 19:23:39 +05:30
let ( :data ) { raw_data }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has data' do
expect ( build_trace_chunk . data ) . to eq ( data )
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let ( :data_store ) { :fog }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
context 'when there are no data' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + '' }
2018-11-08 19:23:39 +05:30
it 'has no data' do
expect ( build_trace_chunk . data ) . to be_empty
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when there are some data' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in fog' }
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
before do
build_trace_chunk . send ( :unsafe_set_data! , data )
end
2018-10-15 14:42:47 +05:30
2018-11-08 19:23:39 +05:30
it 'has data' do
expect ( build_trace_chunk . data ) . to eq ( data )
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling no sidekiq worker'
end
end
end
describe '#truncate' do
subject { build_trace_chunk . truncate ( offset ) }
shared_examples_for 'truncates' do
context 'when offset is negative' do
let ( :offset ) { - 1 }
it { expect { subject } . to raise_error ( 'Offset is out of range' ) }
end
context 'when offset is bigger than data size' do
let ( :offset ) { data . bytesize + 1 }
it { expect { subject } . to raise_error ( 'Offset is out of range' ) }
2018-10-15 14:42:47 +05:30
end
context 'when offset is 10' do
let ( :offset ) { 10 }
2018-11-08 19:23:39 +05:30
it 'truncates' do
2018-10-15 14:42:47 +05:30
subject
2018-11-08 19:23:39 +05:30
expect ( build_trace_chunk . data ) . to eq ( data . byteslice ( 0 , offset ) )
2018-10-15 14:42:47 +05:30
end
end
end
context 'when data_store is redis' do
let ( :data_store ) { :redis }
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in redis' }
2018-10-15 14:42:47 +05:30
before do
2018-11-08 19:23:39 +05:30
build_trace_chunk . send ( :unsafe_set_data! , data )
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
it_behaves_like 'truncates'
2018-10-15 14:42:47 +05:30
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let ( :data_store ) { :database }
2019-12-26 22:10:19 +05:30
let ( :raw_data ) { + 'Sample data in database' }
2018-10-15 14:42:47 +05:30
let ( :data ) { raw_data }
2018-11-08 19:23:39 +05:30
it_behaves_like 'truncates'
end
context 'when data_store is fog' do
let ( :data_store ) { :fog }
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in fog' }
2018-11-08 19:23:39 +05:30
before do
build_trace_chunk . send ( :unsafe_set_data! , data )
end
it_behaves_like 'truncates'
2018-10-15 14:42:47 +05:30
end
end
describe '#size' do
subject { build_trace_chunk . size }
context 'when data_store is redis' do
let ( :data_store ) { :redis }
context 'when data exists' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in redis' }
2018-10-15 14:42:47 +05:30
before do
2018-11-08 19:23:39 +05:30
build_trace_chunk . send ( :unsafe_set_data! , data )
2018-10-15 14:42:47 +05:30
end
it { is_expected . to eq ( data . bytesize ) }
end
context 'when data exists' do
it { is_expected . to eq ( 0 ) }
end
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let ( :data_store ) { :database }
2018-10-15 14:42:47 +05:30
context 'when data exists' do
2019-12-26 22:10:19 +05:30
let ( :raw_data ) { + 'Sample data in database' }
2018-10-15 14:42:47 +05:30
let ( :data ) { raw_data }
it { is_expected . to eq ( data . bytesize ) }
end
context 'when data does not exist' do
it { is_expected . to eq ( 0 ) }
end
end
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let ( :data_store ) { :fog }
context 'when data exists' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in fog' }
2018-11-08 19:23:39 +05:30
let ( :key ) { " tmp/builds/ #{ build . id } /chunks/ #{ chunk_index } .log " }
before do
build_trace_chunk . send ( :unsafe_set_data! , data )
end
it { is_expected . to eq ( data . bytesize ) }
end
context 'when data does not exist' do
it { is_expected . to eq ( 0 ) }
end
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
describe '#persist_data!' do
subject { build_trace_chunk . persist_data! }
shared_examples_for 'Atomic operation' do
context 'when the other process is persisting' do
let ( :lease_key ) { " trace_write: #{ build_trace_chunk . build . id } :chunks: #{ build_trace_chunk . chunk_index } " }
before do
stub_exclusive_lease_taken ( lease_key )
end
it 'raise an error' do
expect { subject } . to raise_error ( 'Failed to obtain a lock' )
end
end
end
2018-10-15 14:42:47 +05:30
context 'when data_store is redis' do
let ( :data_store ) { :redis }
context 'when data exists' do
before do
2018-11-08 19:23:39 +05:30
build_trace_chunk . send ( :unsafe_set_data! , data )
2018-10-15 14:42:47 +05:30
end
2019-02-15 15:39:39 +05:30
context 'when data size reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'a' * described_class :: CHUNK_SIZE }
2018-10-15 14:42:47 +05:30
2019-02-15 15:39:39 +05:30
it 'persists the data' do
expect ( build_trace_chunk . redis? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to eq ( data )
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to be_nil
expect { Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) } . to raise_error ( Excon :: Error :: NotFound )
subject
expect ( build_trace_chunk . fog? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) ) . to eq ( data )
end
2018-10-15 14:42:47 +05:30
2019-02-15 15:39:39 +05:30
it_behaves_like 'Atomic operation'
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
context 'when data size has not reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in redis' }
2019-02-15 15:39:39 +05:30
it 'does not persist the data and the orignal data is intact' do
expect { subject } . to raise_error ( described_class :: FailedToPersistDataError )
expect ( build_trace_chunk . redis? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to eq ( data )
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to be_nil
expect { Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) } . to raise_error ( Excon :: Error :: NotFound )
end
end
2018-10-15 14:42:47 +05:30
end
context 'when data does not exist' do
2018-11-08 19:23:39 +05:30
it 'does not persist' do
2019-02-15 15:39:39 +05:30
expect { subject } . to raise_error ( described_class :: FailedToPersistDataError )
2018-10-15 14:42:47 +05:30
end
end
end
context 'when data_store is database' do
2018-11-08 19:23:39 +05:30
let ( :data_store ) { :database }
context 'when data exists' do
before do
build_trace_chunk . send ( :unsafe_set_data! , data )
end
2019-02-15 15:39:39 +05:30
context 'when data size reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'a' * described_class :: CHUNK_SIZE }
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
it 'persists the data' do
expect ( build_trace_chunk . database? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to eq ( data )
expect { Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) } . to raise_error ( Excon :: Error :: NotFound )
subject
expect ( build_trace_chunk . fog? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) ) . to eq ( data )
end
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
it_behaves_like 'Atomic operation'
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
2019-02-15 15:39:39 +05:30
context 'when data size has not reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in database' }
2019-02-15 15:39:39 +05:30
it 'does not persist the data and the orignal data is intact' do
expect { subject } . to raise_error ( described_class :: FailedToPersistDataError )
expect ( build_trace_chunk . database? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to eq ( data )
expect { Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) } . to raise_error ( Excon :: Error :: NotFound )
end
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data does not exist' do
it 'does not persist' do
2019-02-15 15:39:39 +05:30
expect { subject } . to raise_error ( described_class :: FailedToPersistDataError )
2018-11-08 19:23:39 +05:30
end
end
2018-10-15 14:42:47 +05:30
end
2018-11-08 19:23:39 +05:30
context 'when data_store is fog' do
let ( :data_store ) { :fog }
context 'when data exists' do
before do
build_trace_chunk . send ( :unsafe_set_data! , data )
end
2019-02-15 15:39:39 +05:30
context 'when data size reached CHUNK_SIZE' do
let ( :data ) { 'a' * described_class :: CHUNK_SIZE }
2018-11-08 19:23:39 +05:30
2019-02-15 15:39:39 +05:30
it 'does not change data store' do
expect ( build_trace_chunk . fog? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) ) . to eq ( data )
subject
2018-12-23 12:14:25 +05:30
2019-02-15 15:39:39 +05:30
expect ( build_trace_chunk . fog? ) . to be_truthy
expect ( Ci :: BuildTraceChunks :: Redis . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Database . new . data ( build_trace_chunk ) ) . to be_nil
expect ( Ci :: BuildTraceChunks :: Fog . new . data ( build_trace_chunk ) ) . to eq ( data )
end
it_behaves_like 'Atomic operation'
2018-11-08 19:23:39 +05:30
end
2019-02-15 15:39:39 +05:30
context 'when data size has not reached CHUNK_SIZE' do
2019-12-26 22:10:19 +05:30
let ( :data ) { + 'Sample data in fog' }
2019-02-15 15:39:39 +05:30
it 'does not raise error' do
expect { subject } . not_to raise_error
end
end
2018-11-08 19:23:39 +05:30
end
2018-10-15 14:42:47 +05:30
end
end
describe 'deletes data in redis after a parent record destroyed' do
let ( :project ) { create ( :project ) }
before do
pipeline = create ( :ci_pipeline , project : project )
create ( :ci_build , :running , :trace_live , pipeline : pipeline , project : project )
create ( :ci_build , :running , :trace_live , pipeline : pipeline , project : project )
create ( :ci_build , :running , :trace_live , pipeline : pipeline , project : project )
end
shared_examples_for 'deletes all build_trace_chunk and data in redis' do
2019-12-26 22:10:19 +05:30
it 'deletes all build_trace_chunk and data in redis' , :sidekiq_might_not_need_inline do
2018-10-15 14:42:47 +05:30
Gitlab :: Redis :: SharedState . with do | redis |
expect ( redis . scan_each ( match : " gitlab:ci:trace:*:chunks:* " ) . to_a . size ) . to eq ( 3 )
end
expect ( described_class . count ) . to eq ( 3 )
subject
expect ( described_class . count ) . to eq ( 0 )
Gitlab :: Redis :: SharedState . with do | redis |
expect ( redis . scan_each ( match : " gitlab:ci:trace:*:chunks:* " ) . to_a . size ) . to eq ( 0 )
end
end
end
context 'when traces are archived' do
let ( :subject ) do
project . builds . each do | build |
2020-03-13 15:44:24 +05:30
build . reset . success!
2018-10-15 14:42:47 +05:30
end
end
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
context 'when project is destroyed' do
let ( :subject ) do
project . destroy!
end
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
end
end