debian-mirror-gitlab/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb

267 lines
9.3 KiB
Ruby
Raw Normal View History

2019-12-04 20:38:33 +05:30
# frozen_string_literal: true
2020-03-13 15:44:24 +05:30
require 'spec_helper'
2018-05-09 12:01:36 +05:30
2020-07-28 23:09:34 +05:30
RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
2018-05-09 12:01:36 +05:30
describe '#call' do
2020-03-13 15:44:24 +05:30
let(:timestamp) { Time.iso8601('2018-01-01T12:00:00.000Z') }
2019-10-12 21:52:04 +05:30
let(:created_at) { timestamp - 1.second }
let(:scheduling_latency_s) { 1.0 }
2018-05-09 12:01:36 +05:30
let(:job) do
{
"class" => "TestWorker",
2020-04-08 14:13:33 +05:30
"args" => [1234, 'hello', { 'key' => 'value' }],
2018-05-09 12:01:36 +05:30
"retry" => false,
"queue" => "cronjob:test_queue",
"queue_namespace" => "cronjob",
"jid" => "da883554ee4fe414012f5f42",
2019-10-12 21:52:04 +05:30
"created_at" => created_at.to_f,
"enqueued_at" => created_at.to_f,
2020-03-13 15:44:24 +05:30
"correlation_id" => 'cid',
"error_message" => "wrong number of arguments (2 for 3)",
"error_class" => "ArgumentError",
2020-06-23 00:09:42 +05:30
"error_backtrace" => [],
"db_count" => 1,
"db_write_count" => 0,
"db_cached_count" => 0
2018-05-09 12:01:36 +05:30
}
end
2019-10-12 21:52:04 +05:30
2019-03-02 22:35:43 +05:30
let(:logger) { double }
2019-12-21 20:55:43 +05:30
let(:clock_thread_cputime_start) { 0.222222299 }
let(:clock_thread_cputime_end) { 1.333333799 }
2018-05-09 12:01:36 +05:30
let(:start_payload) do
2020-03-13 15:44:24 +05:30
job.except('error_backtrace', 'error_class', 'error_message').merge(
2018-05-09 12:01:36 +05:30
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
'job_status' => 'start',
'pid' => Process.pid,
2020-03-13 15:44:24 +05:30
'created_at' => created_at.to_f,
'enqueued_at' => created_at.to_f,
2019-10-12 21:52:04 +05:30
'scheduling_latency_s' => scheduling_latency_s
2018-05-09 12:01:36 +05:30
)
end
let(:end_payload) do
start_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
'job_status' => 'done',
2020-04-22 19:07:51 +05:30
'duration_s' => 0.0,
2020-03-13 15:44:24 +05:30
'completed_at' => timestamp.to_f,
2020-05-24 23:13:21 +05:30
'cpu_s' => 1.111112,
2020-04-22 19:07:51 +05:30
'db_duration_s' => 0.0
2018-05-09 12:01:36 +05:30
)
end
let(:exception_payload) do
end_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
'job_status' => 'fail',
2019-12-21 20:55:43 +05:30
'error_class' => 'ArgumentError',
2018-05-09 12:01:36 +05:30
'error_message' => 'some exception'
)
end
before do
allow(Sidekiq).to receive(:logger).and_return(logger)
allow(subject).to receive(:current_time).and_return(timestamp.to_f)
2019-12-04 20:38:33 +05:30
2019-12-21 20:55:43 +05:30
allow(Process).to receive(:clock_gettime).with(Process::CLOCK_THREAD_CPUTIME_ID).and_return(clock_thread_cputime_start, clock_thread_cputime_end)
2018-05-09 12:01:36 +05:30
end
subject { described_class.new }
context 'with SIDEKIQ_LOG_ARGUMENTS enabled' do
before do
stub_env('SIDEKIQ_LOG_ARGUMENTS', '1')
end
it 'logs start and end of job' do
Timecop.freeze(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(end_payload).ordered
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
subject.call(job, 'test_queue') { }
end
end
it 'logs an exception in job' do
Timecop.freeze(timestamp) do
expect(logger).to receive(:info).with(start_payload)
expect(logger).to receive(:warn).with(hash_including(exception_payload))
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
expect do
subject.call(job, 'test_queue') do
raise ArgumentError, 'some exception'
end
end.to raise_error(ArgumentError)
end
end
2019-03-02 22:35:43 +05:30
2020-04-08 14:13:33 +05:30
it 'does not modify the job' do
Timecop.freeze(timestamp) do
job_copy = job.deep_dup
allow(logger).to receive(:info)
allow(subject).to receive(:log_job_start).and_call_original
allow(subject).to receive(:log_job_done).and_call_original
subject.call(job, 'test_queue') do
expect(job).to eq(job_copy)
end
end
end
2018-05-09 12:01:36 +05:30
end
context 'with SIDEKIQ_LOG_ARGUMENTS disabled' do
2019-03-02 22:35:43 +05:30
it 'logs start and end of job without args' do
2018-05-09 12:01:36 +05:30
Timecop.freeze(timestamp) do
2019-03-02 22:35:43 +05:30
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
expect(logger).to receive(:info).with(end_payload.except('args')).ordered
2018-05-09 12:01:36 +05:30
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
subject.call(job, 'test_queue') { }
end
end
2019-10-12 21:52:04 +05:30
it 'logs without created_at and enqueued_at fields' do
Timecop.freeze(timestamp) do
excluded_fields = %w(created_at enqueued_at args scheduling_latency_s)
expect(logger).to receive(:info).with(start_payload.except(*excluded_fields)).ordered
expect(logger).to receive(:info).with(end_payload.except(*excluded_fields)).ordered
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
subject.call(job.except("created_at", "enqueued_at"), 'test_queue') { }
end
end
end
context 'with latency' do
2020-03-13 15:44:24 +05:30
let(:created_at) { Time.iso8601('2018-01-01T10:00:00.000Z') }
2019-10-12 21:52:04 +05:30
let(:scheduling_latency_s) { 7200.0 }
it 'logs with scheduling latency' do
Timecop.freeze(timestamp) do
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
expect(logger).to receive(:info).with(end_payload.except('args')).ordered
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
subject.call(job, 'test_queue') { }
end
end
end
2020-04-22 19:07:51 +05:30
context 'with Gitaly, Rugged, and Redis calls' do
2019-10-12 21:52:04 +05:30
let(:timing_data) do
{
gitaly_calls: 10,
2020-04-22 19:07:51 +05:30
gitaly_duration_s: 10000,
2019-10-12 21:52:04 +05:30
rugged_calls: 1,
2020-04-22 19:07:51 +05:30
rugged_duration_s: 5000,
redis_calls: 3,
redis_duration_s: 1234
2019-10-12 21:52:04 +05:30
}
end
2020-04-22 19:07:51 +05:30
let(:expected_end_payload) do
end_payload.except('args').merge(timing_data)
2019-10-12 21:52:04 +05:30
end
it 'logs with Gitaly and Rugged timing data' do
Timecop.freeze(timestamp) do
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
2020-04-22 19:07:51 +05:30
expect(logger).to receive(:info).with(expected_end_payload).ordered
2019-10-12 21:52:04 +05:30
2020-04-22 19:07:51 +05:30
subject.call(job, 'test_queue') do
job.merge!(timing_data)
end
2019-10-12 21:52:04 +05:30
end
end
2018-05-09 12:01:36 +05:30
end
2020-03-13 15:44:24 +05:30
context 'when the job performs database queries' do
before do
allow(Time).to receive(:now).and_return(timestamp)
allow(Process).to receive(:clock_gettime).and_call_original
end
let(:expected_start_payload) { start_payload.except('args') }
let(:expected_end_payload) do
2020-04-22 19:07:51 +05:30
end_payload.except('args').merge('cpu_s' => a_value >= 0)
2020-03-13 15:44:24 +05:30
end
let(:expected_end_payload_with_db) do
expected_end_payload.merge(
2020-06-23 00:09:42 +05:30
'db_duration_s' => a_value >= 0.1,
'db_count' => 1,
'db_cached_count' => 0,
'db_write_count' => 0
2020-03-13 15:44:24 +05:30
)
end
it 'logs the database time' do
expect(logger).to receive(:info).with(expected_start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered
subject.call(job, 'test_queue') { ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') }
end
it 'prevents database time from leaking to the next job' do
expect(logger).to receive(:info).with(expected_start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered
expect(logger).to receive(:info).with(expected_start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
subject.call(job, 'test_queue') { ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') }
subject.call(job, 'test_queue') { }
end
end
2020-05-24 23:13:21 +05:30
context 'when there is extra metadata set for the done log' do
let(:expected_start_payload) { start_payload.except('args') }
let(:expected_end_payload) do
end_payload.except('args').merge("#{ApplicationWorker::LOGGING_EXTRA_KEY}.key1" => 15, "#{ApplicationWorker::LOGGING_EXTRA_KEY}.key2" => 16)
end
it 'logs it in the done log' do
Timecop.freeze(timestamp) do
expect(logger).to receive(:info).with(expected_start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
subject.call(job, 'test_queue') do
job["#{ApplicationWorker::LOGGING_EXTRA_KEY}.key1"] = 15
job["#{ApplicationWorker::LOGGING_EXTRA_KEY}.key2"] = 16
job['key that will be ignored because it does not start with extra.'] = 17
end
end
end
end
2019-12-21 20:55:43 +05:30
end
2019-12-04 20:38:33 +05:30
2019-12-21 20:55:43 +05:30
describe '#add_time_keys!' do
let(:time) { { duration: 0.1231234, cputime: 1.2342345 } }
let(:payload) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status' } }
2020-03-13 15:44:24 +05:30
let(:current_utc_time) { Time.now.utc }
2020-05-24 23:13:21 +05:30
let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration_s' => 0.123123, 'cpu_s' => 1.234235, 'completed_at' => current_utc_time.to_f } }
2019-12-04 20:38:33 +05:30
2019-12-21 20:55:43 +05:30
subject { described_class.new }
2019-12-04 20:38:33 +05:30
2019-12-21 20:55:43 +05:30
it 'update payload correctly' do
2020-03-13 15:44:24 +05:30
Timecop.freeze(current_utc_time) do
subject.send(:add_time_keys!, time, payload)
2019-12-04 20:38:33 +05:30
2020-03-13 15:44:24 +05:30
expect(payload).to eq(payload_with_time_keys)
end
2019-12-04 20:38:33 +05:30
end
2018-05-09 12:01:36 +05:30
end
end