debian-mirror-gitlab/spec/initializers/lograge_spec.rb

263 lines
7.5 KiB
Ruby
Raw Normal View History

2019-02-15 15:39:39 +05:30
# frozen_string_literal: true
require 'spec_helper'
2020-07-28 23:09:34 +05:30
RSpec.describe 'lograge', type: :request do
2019-02-15 15:39:39 +05:30
let(:headers) { { 'X-Request-ID' => 'new-correlation-id' } }
2020-03-13 15:44:24 +05:30
let(:large_params) do
half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
{
a: 'a',
b: 'b' * half_limit,
c: 'c' * half_limit,
d: 'd'
}
end
let(:limited_params) do
2020-04-08 14:13:33 +05:30
large_params.slice(:a, :b).map { |k, v| { key: k.to_s, value: v } } + [{ key: 'truncated', value: '...' }]
2020-03-13 15:44:24 +05:30
end
2019-02-15 15:39:39 +05:30
2020-03-13 15:44:24 +05:30
context 'for API requests' do
2019-02-15 15:39:39 +05:30
it 'logs to api_json log' do
# we assert receiving parameters by grape logger
expect_any_instance_of(Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp).to receive(:call)
.with(anything, anything, anything, a_hash_including("correlation_id" => "new-correlation-id"))
.and_call_original
2020-03-13 15:44:24 +05:30
get("/api/v4/endpoint", params: {}, headers: headers)
end
it 'limits param size' do
expect(Lograge.formatter).to receive(:call)
.with(a_hash_including(params: limited_params))
.and_call_original
get("/api/v4/endpoint", params: large_params, headers: headers)
2019-02-15 15:39:39 +05:30
end
end
context 'for Controller requests' do
subject { get("/", params: {}, headers: headers) }
it 'logs to production_json log' do
# formatter receives a hash with correlation id
expect(Lograge.formatter).to receive(:call)
.with(a_hash_including("correlation_id" => "new-correlation-id"))
.and_call_original
# a log file receives a line with correlation id
expect(Lograge.logger).to receive(:send)
.with(anything, include('"correlation_id":"new-correlation-id"'))
.and_call_original
subject
end
2019-12-21 20:55:43 +05:30
it 'logs cpu_s on supported platform' do
allow(Gitlab::Metrics::System).to receive(:thread_cpu_time)
.and_return(
0.111222333,
0.222333833
)
expect(Lograge.formatter).to receive(:call)
2021-03-11 19:13:27 +05:30
.with(a_hash_including(cpu_s: 0.111112))
2019-12-21 20:55:43 +05:30
.and_call_original
expect(Lograge.logger).to receive(:send)
2021-03-11 19:13:27 +05:30
.with(anything, include('"cpu_s":0.111112'))
2019-12-21 20:55:43 +05:30
.and_call_original
subject
end
it 'does not log cpu_s on unsupported platform' do
allow(Gitlab::Metrics::System).to receive(:thread_cpu_time)
.and_return(nil)
expect(Lograge.formatter).to receive(:call)
.with(hash_not_including(:cpu_s))
.and_call_original
expect(Lograge.logger).not_to receive(:send)
.with(anything, include('"cpu_s":'))
.and_call_original
subject
end
2020-03-13 15:44:24 +05:30
2021-03-11 19:13:27 +05:30
context 'when logging memory allocations' do
include MemoryInstrumentationHelper
before do
skip_memory_instrumentation!
end
it 'logs memory usage metrics' do
expect(Lograge.formatter).to receive(:call)
.with(a_hash_including(:mem_objects))
.and_call_original
expect(Lograge.logger).to receive(:send)
.with(anything, include('"mem_objects":'))
.and_call_original
subject
end
end
2020-03-13 15:44:24 +05:30
it 'limits param size' do
expect(Lograge.formatter).to receive(:call)
.with(a_hash_including(params: limited_params))
.and_call_original
get("/", params: large_params, headers: headers)
end
2019-02-15 15:39:39 +05:30
end
2019-12-26 22:10:19 +05:30
context 'with a log subscriber' do
2020-07-01 16:08:20 +05:30
include_context 'parsed logs'
2020-06-23 00:09:42 +05:30
let(:subscriber) { Lograge::LogSubscribers::ActionController.new }
2019-12-26 22:10:19 +05:30
let(:event) do
ActiveSupport::Notifications::Event.new(
'process_action.action_controller',
Time.now,
Time.now,
2,
status: 200,
controller: 'HomeController',
action: 'index',
format: 'application/json',
method: 'GET',
path: '/home?foo=bar',
params: {},
db_runtime: 0.02,
view_runtime: 0.01
)
end
describe 'with an exception' do
let(:exception) { RuntimeError.new('bad request') }
let(:backtrace) { caller }
before do
allow(exception).to receive(:backtrace).and_return(backtrace)
event.payload[:exception_object] = exception
end
it 'adds exception data to log' do
subscriber.process_action(event)
expect(log_data['exception.class']).to eq('RuntimeError')
expect(log_data['exception.message']).to eq('bad request')
2020-03-13 15:44:24 +05:30
expect(log_data['exception.backtrace']).to eq(Gitlab::BacktraceCleaner.clean_backtrace(backtrace))
end
2022-01-26 12:08:38 +05:30
context 'with an ActiveRecord::StatementInvalid' do
let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
it 'adds the SQL query to the log' do
subscriber.process_action(event)
expect(log_data['exception.sql']).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
end
end
2020-03-13 15:44:24 +05:30
end
describe 'with etag_route' do
let(:etag_route) { 'etag route' }
before do
event.payload[:etag_route] = etag_route
end
it 'adds etag_route to log' do
subscriber.process_action(event)
expect(log_data['etag_route']).to eq(etag_route)
2019-12-26 22:10:19 +05:30
end
end
2020-07-28 23:09:34 +05:30
2021-06-02 17:11:27 +05:30
describe 'with access token in url' do
before do
event.payload[:location] = 'http://example.com/auth.html#access_token=secret_token&token_type=Bearer'
end
it 'strips location from sensitive information' do
subscriber.redirect_to(event)
subscriber.process_action(event)
expect(log_data['location']).not_to include('secret_token')
expect(log_data['location']).to include('filtered')
end
it 'leaves non-sensitive information from location' do
subscriber.redirect_to(event)
subscriber.process_action(event)
expect(log_data['location']).to include('&token_type=Bearer')
end
end
2021-02-22 17:27:13 +05:30
context 'with db payload' do
2021-09-04 01:27:46 +05:30
let(:db_load_balancing_logging_keys) do
%w[
db_primary_wal_count
db_replica_wal_count
2021-09-30 23:02:18 +05:30
db_primary_wal_cached_count
db_replica_wal_cached_count
2021-09-04 01:27:46 +05:30
db_replica_count
db_replica_cached_count
db_primary_count
db_primary_cached_count
db_primary_duration_s
db_replica_duration_s
]
end
before do
2021-10-27 15:23:28 +05:30
ApplicationRecord.connection.execute('SELECT pg_sleep(0.1);')
2021-09-04 01:27:46 +05:30
end
2020-07-28 23:09:34 +05:30
context 'when RequestStore is enabled', :request_store do
2021-02-22 17:27:13 +05:30
it 'includes db counters' do
subscriber.process_action(event)
2020-07-28 23:09:34 +05:30
2021-02-22 17:27:13 +05:30
expect(log_data).to include("db_count" => a_value >= 1, "db_write_count" => 0, "db_cached_count" => 0)
2020-07-28 23:09:34 +05:30
end
end
context 'when RequestStore is disabled' do
2021-02-22 17:27:13 +05:30
it 'does not include db counters' do
subscriber.process_action(event)
2020-07-28 23:09:34 +05:30
2021-02-22 17:27:13 +05:30
expect(log_data).not_to include("db_count", "db_write_count", "db_cached_count")
2020-07-28 23:09:34 +05:30
end
end
2021-09-04 01:27:46 +05:30
2021-11-18 22:05:49 +05:30
context 'with db payload' do
context 'when RequestStore is enabled', :request_store do
it 'includes db counters for load balancing' do
subscriber.process_action(event)
2021-09-04 01:27:46 +05:30
2021-11-18 22:05:49 +05:30
expect(log_data).to include(*db_load_balancing_logging_keys)
2021-09-04 01:27:46 +05:30
end
end
2021-11-18 22:05:49 +05:30
context 'when RequestStore is disabled' do
it 'does not include db counters for load balancing' do
subscriber.process_action(event)
2021-09-04 01:27:46 +05:30
2021-11-18 22:05:49 +05:30
expect(log_data).not_to include(*db_load_balancing_logging_keys)
end
2021-09-04 01:27:46 +05:30
end
end
2020-07-28 23:09:34 +05:30
end
2019-12-26 22:10:19 +05:30
end
2019-02-15 15:39:39 +05:30
end