2019-12-04 20:38:33 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
require 'spec_helper'
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
describe Gitlab::SidekiqLogging::StructuredLogger do
|
|
|
|
describe '#call' do
|
2020-03-09 13:42:32 +05:30
|
|
|
let(:timestamp) { Time.iso8601('2018-01-01T12:00:00.000Z') }
|
2019-10-12 21:52:04 +05:30
|
|
|
let(:created_at) { timestamp - 1.second }
|
|
|
|
let(:scheduling_latency_s) { 1.0 }
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
let(:job) do
|
|
|
|
{
|
|
|
|
"class" => "TestWorker",
|
2020-04-08 14:13:33 +05:30
|
|
|
"args" => [1234, 'hello', { 'key' => 'value' }],
|
2018-05-09 12:01:36 +05:30
|
|
|
"retry" => false,
|
|
|
|
"queue" => "cronjob:test_queue",
|
|
|
|
"queue_namespace" => "cronjob",
|
|
|
|
"jid" => "da883554ee4fe414012f5f42",
|
2019-10-12 21:52:04 +05:30
|
|
|
"created_at" => created_at.to_f,
|
|
|
|
"enqueued_at" => created_at.to_f,
|
2020-03-09 13:42:32 +05:30
|
|
|
"correlation_id" => 'cid',
|
|
|
|
"error_message" => "wrong number of arguments (2 for 3)",
|
|
|
|
"error_class" => "ArgumentError",
|
|
|
|
"error_backtrace" => []
|
2018-05-09 12:01:36 +05:30
|
|
|
}
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
let(:logger) { double }
|
2019-12-21 20:55:43 +05:30
|
|
|
let(:clock_thread_cputime_start) { 0.222222299 }
|
|
|
|
let(:clock_thread_cputime_end) { 1.333333799 }
|
2018-05-09 12:01:36 +05:30
|
|
|
let(:start_payload) do
|
2020-03-09 13:42:32 +05:30
|
|
|
job.except('error_backtrace', 'error_class', 'error_message').merge(
|
2020-04-08 14:13:33 +05:30
|
|
|
'args' => %w(1234 hello {"key"=>"value"}),
|
2018-05-09 12:01:36 +05:30
|
|
|
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
|
|
|
|
'job_status' => 'start',
|
|
|
|
'pid' => Process.pid,
|
2020-03-09 13:42:32 +05:30
|
|
|
'created_at' => created_at.to_f,
|
|
|
|
'enqueued_at' => created_at.to_f,
|
2019-10-12 21:52:04 +05:30
|
|
|
'scheduling_latency_s' => scheduling_latency_s
|
2018-05-09 12:01:36 +05:30
|
|
|
)
|
|
|
|
end
|
|
|
|
let(:end_payload) do
|
|
|
|
start_payload.merge(
|
|
|
|
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
|
|
|
|
'job_status' => 'done',
|
|
|
|
'duration' => 0.0,
|
2020-03-09 13:42:32 +05:30
|
|
|
'completed_at' => timestamp.to_f,
|
|
|
|
'cpu_s' => 1.111112,
|
|
|
|
'db_duration' => 0,
|
|
|
|
'db_duration_s' => 0
|
2018-05-09 12:01:36 +05:30
|
|
|
)
|
|
|
|
end
|
|
|
|
let(:exception_payload) do
|
|
|
|
end_payload.merge(
|
|
|
|
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
|
|
|
|
'job_status' => 'fail',
|
2019-12-21 20:55:43 +05:30
|
|
|
'error_class' => 'ArgumentError',
|
2018-05-09 12:01:36 +05:30
|
|
|
'error_message' => 'some exception'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(Sidekiq).to receive(:logger).and_return(logger)
|
|
|
|
|
|
|
|
allow(subject).to receive(:current_time).and_return(timestamp.to_f)
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
allow(Process).to receive(:clock_gettime).with(Process::CLOCK_THREAD_CPUTIME_ID).and_return(clock_thread_cputime_start, clock_thread_cputime_end)
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
subject { described_class.new }
|
|
|
|
|
|
|
|
context 'with SIDEKIQ_LOG_ARGUMENTS enabled' do
|
|
|
|
before do
|
|
|
|
stub_env('SIDEKIQ_LOG_ARGUMENTS', '1')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'logs start and end of job' do
|
|
|
|
Timecop.freeze(timestamp) do
|
|
|
|
expect(logger).to receive(:info).with(start_payload).ordered
|
|
|
|
expect(logger).to receive(:info).with(end_payload).ordered
|
|
|
|
expect(subject).to receive(:log_job_start).and_call_original
|
|
|
|
expect(subject).to receive(:log_job_done).and_call_original
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') { }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'logs an exception in job' do
|
|
|
|
Timecop.freeze(timestamp) do
|
|
|
|
expect(logger).to receive(:info).with(start_payload)
|
|
|
|
expect(logger).to receive(:warn).with(hash_including(exception_payload))
|
|
|
|
expect(subject).to receive(:log_job_start).and_call_original
|
|
|
|
expect(subject).to receive(:log_job_done).and_call_original
|
|
|
|
|
|
|
|
expect do
|
|
|
|
subject.call(job, 'test_queue') do
|
|
|
|
raise ArgumentError, 'some exception'
|
|
|
|
end
|
|
|
|
end.to raise_error(ArgumentError)
|
|
|
|
end
|
|
|
|
end
|
2019-03-02 22:35:43 +05:30
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
it 'does not modify the job' do
|
|
|
|
Timecop.freeze(timestamp) do
|
|
|
|
job_copy = job.deep_dup
|
|
|
|
|
|
|
|
allow(logger).to receive(:info)
|
|
|
|
allow(subject).to receive(:log_job_start).and_call_original
|
|
|
|
allow(subject).to receive(:log_job_done).and_call_original
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') do
|
|
|
|
expect(job).to eq(job_copy)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-02 22:35:43 +05:30
|
|
|
context 'when the job args are bigger than the maximum allowed' do
|
|
|
|
it 'keeps args from the front until they exceed the limit' do
|
|
|
|
Timecop.freeze(timestamp) do
|
2020-03-09 13:42:32 +05:30
|
|
|
half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
|
|
|
|
job['args'] = [1, 2, 'a' * half_limit, 'b' * half_limit, 3]
|
2019-03-02 22:35:43 +05:30
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
expected_args = job['args'].take(3).map(&:to_s) + ['...']
|
2019-03-02 22:35:43 +05:30
|
|
|
|
|
|
|
expect(logger).to receive(:info).with(start_payload.merge('args' => expected_args)).ordered
|
|
|
|
expect(logger).to receive(:info).with(end_payload.merge('args' => expected_args)).ordered
|
|
|
|
expect(subject).to receive(:log_job_start).and_call_original
|
|
|
|
expect(subject).to receive(:log_job_done).and_call_original
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') { }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with SIDEKIQ_LOG_ARGUMENTS disabled' do
|
2019-03-02 22:35:43 +05:30
|
|
|
it 'logs start and end of job without args' do
|
2018-05-09 12:01:36 +05:30
|
|
|
Timecop.freeze(timestamp) do
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
|
|
|
|
expect(logger).to receive(:info).with(end_payload.except('args')).ordered
|
2018-05-09 12:01:36 +05:30
|
|
|
expect(subject).to receive(:log_job_start).and_call_original
|
|
|
|
expect(subject).to receive(:log_job_done).and_call_original
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') { }
|
|
|
|
end
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
|
|
|
|
it 'logs without created_at and enqueued_at fields' do
|
|
|
|
Timecop.freeze(timestamp) do
|
|
|
|
excluded_fields = %w(created_at enqueued_at args scheduling_latency_s)
|
|
|
|
|
|
|
|
expect(logger).to receive(:info).with(start_payload.except(*excluded_fields)).ordered
|
|
|
|
expect(logger).to receive(:info).with(end_payload.except(*excluded_fields)).ordered
|
|
|
|
expect(subject).to receive(:log_job_start).and_call_original
|
|
|
|
expect(subject).to receive(:log_job_done).and_call_original
|
|
|
|
|
|
|
|
subject.call(job.except("created_at", "enqueued_at"), 'test_queue') { }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with latency' do
|
2020-03-09 13:42:32 +05:30
|
|
|
let(:created_at) { Time.iso8601('2018-01-01T10:00:00.000Z') }
|
2019-10-12 21:52:04 +05:30
|
|
|
let(:scheduling_latency_s) { 7200.0 }
|
|
|
|
|
|
|
|
it 'logs with scheduling latency' do
|
|
|
|
Timecop.freeze(timestamp) do
|
|
|
|
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
|
|
|
|
expect(logger).to receive(:info).with(end_payload.except('args')).ordered
|
|
|
|
expect(subject).to receive(:log_job_start).and_call_original
|
|
|
|
expect(subject).to receive(:log_job_done).and_call_original
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') { }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with Gitaly and Rugged calls' do
|
|
|
|
let(:timing_data) do
|
|
|
|
{
|
|
|
|
gitaly_calls: 10,
|
|
|
|
gitaly_duration: 10000,
|
|
|
|
rugged_calls: 1,
|
|
|
|
rugged_duration_ms: 5000
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
job.merge!(timing_data)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'logs with Gitaly and Rugged timing data' do
|
|
|
|
Timecop.freeze(timestamp) do
|
|
|
|
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
|
|
|
|
expect(logger).to receive(:info).with(end_payload.except('args')).ordered
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') { }
|
|
|
|
end
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
2020-03-09 13:42:32 +05:30
|
|
|
|
|
|
|
context 'when the job performs database queries' do
|
|
|
|
before do
|
|
|
|
allow(Time).to receive(:now).and_return(timestamp)
|
|
|
|
allow(Process).to receive(:clock_gettime).and_call_original
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:expected_start_payload) { start_payload.except('args') }
|
|
|
|
|
|
|
|
let(:expected_end_payload) do
|
|
|
|
end_payload.except('args').merge('cpu_s' => a_value > 0)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:expected_end_payload_with_db) do
|
|
|
|
expected_end_payload.merge(
|
|
|
|
'db_duration' => a_value >= 100,
|
|
|
|
'db_duration_s' => a_value >= 0.1
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'logs the database time' do
|
|
|
|
expect(logger).to receive(:info).with(expected_start_payload).ordered
|
|
|
|
expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') { ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'prevents database time from leaking to the next job' do
|
|
|
|
expect(logger).to receive(:info).with(expected_start_payload).ordered
|
|
|
|
expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered
|
|
|
|
expect(logger).to receive(:info).with(expected_start_payload).ordered
|
|
|
|
expect(logger).to receive(:info).with(expected_end_payload).ordered
|
|
|
|
|
|
|
|
subject.call(job, 'test_queue') { ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') }
|
|
|
|
subject.call(job, 'test_queue') { }
|
|
|
|
end
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
describe '#add_time_keys!' do
|
|
|
|
let(:time) { { duration: 0.1231234, cputime: 1.2342345 } }
|
|
|
|
let(:payload) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status' } }
|
2020-03-09 13:42:32 +05:30
|
|
|
let(:current_utc_time) { Time.now.utc }
|
|
|
|
let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration' => 0.123123, 'cpu_s' => 1.234235, 'completed_at' => current_utc_time.to_f } }
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
subject { described_class.new }
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'update payload correctly' do
|
2020-03-09 13:42:32 +05:30
|
|
|
Timecop.freeze(current_utc_time) do
|
|
|
|
subject.send(:add_time_keys!, time, payload)
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
expect(payload).to eq(payload_with_time_keys)
|
|
|
|
end
|
2019-12-04 20:38:33 +05:30
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
end
|