2019-10-12 21:52:04 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
require 'spec_helper'
|
2019-10-12 21:52:04 +05:30
|
|
|
|
2021-04-17 20:07:23 +05:30
|
|
|
# rubocop: disable RSpec/MultipleMemoizedHelpers
|
2020-07-28 23:09:34 +05:30
|
|
|
RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
|
2021-04-29 21:17:54 +05:30
|
|
|
shared_examples "a metrics middleware" do
|
|
|
|
context "with mocked prometheus" do
|
|
|
|
include_context 'server metrics with mocked prometheus'
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
describe '.initialize_process_metrics' do
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'sets concurrency metrics' do
|
2023-01-13 00:05:48 +05:30
|
|
|
expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
described_class.initialize_process_metrics
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'initializes sidekiq_jobs_completion_seconds for the workers in the current Sidekiq process' do
|
|
|
|
allow(Gitlab::SidekiqConfig)
|
|
|
|
.to receive(:current_worker_queue_mappings)
|
|
|
|
.and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
|
|
|
|
|
|
|
|
expect(completion_seconds_metric)
|
2022-07-16 23:28:13 +05:30
|
|
|
.to receive(:get).with({ queue: 'merge',
|
2022-10-11 01:57:18 +05:30
|
|
|
worker: 'MergeWorker',
|
|
|
|
urgency: 'high',
|
|
|
|
external_dependencies: 'no',
|
|
|
|
feature_category: 'source_code_management',
|
|
|
|
boundary: '',
|
|
|
|
job_status: 'done' })
|
2021-09-30 23:02:18 +05:30
|
|
|
|
|
|
|
expect(completion_seconds_metric)
|
2022-07-16 23:28:13 +05:30
|
|
|
.to receive(:get).with({ queue: 'merge',
|
2022-10-11 01:57:18 +05:30
|
|
|
worker: 'MergeWorker',
|
|
|
|
urgency: 'high',
|
|
|
|
external_dependencies: 'no',
|
|
|
|
feature_category: 'source_code_management',
|
|
|
|
boundary: '',
|
|
|
|
job_status: 'fail' })
|
2021-09-30 23:02:18 +05:30
|
|
|
|
|
|
|
expect(completion_seconds_metric)
|
2022-07-16 23:28:13 +05:30
|
|
|
.to receive(:get).with({ queue: 'default',
|
2022-10-11 01:57:18 +05:30
|
|
|
worker: 'Ci::BuildFinishedWorker',
|
|
|
|
urgency: 'high',
|
|
|
|
external_dependencies: 'no',
|
|
|
|
feature_category: 'continuous_integration',
|
|
|
|
boundary: 'cpu',
|
|
|
|
job_status: 'done' })
|
2021-09-30 23:02:18 +05:30
|
|
|
|
|
|
|
expect(completion_seconds_metric)
|
2022-07-16 23:28:13 +05:30
|
|
|
.to receive(:get).with({ queue: 'default',
|
2022-10-11 01:57:18 +05:30
|
|
|
worker: 'Ci::BuildFinishedWorker',
|
|
|
|
urgency: 'high',
|
|
|
|
external_dependencies: 'no',
|
|
|
|
feature_category: 'continuous_integration',
|
|
|
|
boundary: 'cpu',
|
|
|
|
job_status: 'fail' })
|
2021-09-30 23:02:18 +05:30
|
|
|
|
|
|
|
described_class.initialize_process_metrics
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the sidekiq_job_completion_metric_initialize feature flag is disabled' do
|
|
|
|
before do
|
|
|
|
stub_feature_flags(sidekiq_job_completion_metric_initialize: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sets the concurrency metric' do
|
2023-01-13 00:05:48 +05:30
|
|
|
expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
|
2021-09-30 23:02:18 +05:30
|
|
|
|
|
|
|
described_class.initialize_process_metrics
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not initialize sidekiq_jobs_completion_seconds' do
|
|
|
|
allow(Gitlab::SidekiqConfig)
|
|
|
|
.to receive(:current_worker_queue_mappings)
|
|
|
|
.and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
|
|
|
|
|
|
|
|
expect(completion_seconds_metric).not_to receive(:get)
|
|
|
|
|
|
|
|
described_class.initialize_process_metrics
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
describe '#call' do
|
|
|
|
include_context 'server metrics call'
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'yields block' do
|
|
|
|
expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'calls BackgroundTransaction' do
|
|
|
|
expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |instance|
|
|
|
|
expect(instance).to receive(:run)
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
subject.call(worker, job, :test) {}
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'sets queue specific metrics' do
|
|
|
|
expect(running_jobs_metric).to receive(:increment).with(labels, -1)
|
|
|
|
expect(running_jobs_metric).to receive(:increment).with(labels, 1)
|
|
|
|
expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
|
|
|
|
expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
|
|
|
|
expect(db_seconds_metric).to receive(:observe).with(labels_with_job_status, db_duration)
|
|
|
|
expect(gitaly_seconds_metric).to receive(:observe).with(labels_with_job_status, gitaly_duration)
|
|
|
|
expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
|
|
|
|
expect(redis_seconds_metric).to receive(:observe).with(labels_with_job_status, redis_duration)
|
|
|
|
expect(elasticsearch_seconds_metric).to receive(:observe).with(labels_with_job_status, elasticsearch_duration)
|
|
|
|
expect(redis_requests_total).to receive(:increment).with(labels_with_job_status, redis_calls)
|
|
|
|
expect(elasticsearch_requests_total).to receive(:increment).with(labels_with_job_status, elasticsearch_calls)
|
2022-08-27 11:52:29 +05:30
|
|
|
expect(sidekiq_mem_total_bytes).to receive(:set).with(labels_with_job_status, mem_total_bytes)
|
2021-04-29 21:17:54 +05:30
|
|
|
|
|
|
|
subject.call(worker, job, :test) { nil }
|
|
|
|
end
|
2021-04-17 20:07:23 +05:30
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
it 'sets sidekiq_jobs_completion_seconds values that are compatible with those from .initialize_process_metrics' do
|
|
|
|
label_validator = Prometheus::Client::LabelSetValidator.new([:le])
|
|
|
|
|
|
|
|
allow(Gitlab::SidekiqConfig)
|
|
|
|
.to receive(:current_worker_queue_mappings)
|
|
|
|
.and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
|
|
|
|
|
|
|
|
allow(completion_seconds_metric).to receive(:get) do |labels|
|
|
|
|
expect { label_validator.validate(labels) }.not_to raise_error
|
|
|
|
end
|
|
|
|
|
|
|
|
allow(completion_seconds_metric).to receive(:observe) do |labels, _duration|
|
|
|
|
expect { label_validator.validate(labels) }.not_to raise_error
|
|
|
|
end
|
|
|
|
|
|
|
|
described_class.initialize_process_metrics
|
|
|
|
|
|
|
|
subject.call(worker, job, :test) { nil }
|
|
|
|
end
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'sets the thread name if it was nil' do
|
|
|
|
allow(Thread.current).to receive(:name).and_return(nil)
|
|
|
|
expect(Thread.current).to receive(:name=).with(Gitlab::Metrics::Samplers::ThreadsSampler::SIDEKIQ_WORKER_THREAD_NAME)
|
2021-04-17 20:07:23 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
subject.call(worker, job, :test) { nil }
|
|
|
|
end
|
2020-06-23 00:09:42 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
context 'when job_duration is not available' do
|
|
|
|
let(:queue_duration_for_job) { nil }
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'does not set the queue_duration_seconds histogram' do
|
|
|
|
expect(queue_duration_seconds).not_to receive(:observe)
|
2020-10-24 23:57:45 +05:30
|
|
|
|
|
|
|
subject.call(worker, job, :test) { nil }
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
context 'when error is raised' do
|
|
|
|
let(:job_status) { :fail }
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'sets sidekiq_jobs_failed_total and reraises' do
|
|
|
|
expect(failed_total_metric).to receive(:increment).with(labels, 1)
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
expect { subject.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
end
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
context 'when job is retried' do
|
|
|
|
let(:job) { { 'retry_count' => 1 } }
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'sets sidekiq_jobs_retried_total metric' do
|
|
|
|
expect(retried_total_metric).to receive(:increment)
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
subject.call(worker, job, :test) { nil }
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
end
|
2022-08-13 15:12:31 +05:30
|
|
|
|
|
|
|
context 'when job is interrupted' do
|
|
|
|
let(:job) { { 'interrupted_count' => 1 } }
|
|
|
|
|
|
|
|
it 'sets sidekiq_jobs_interrupted_total metric' do
|
|
|
|
expect(interrupted_total_metric).to receive(:increment)
|
|
|
|
|
|
|
|
subject.call(worker, job, :test) { nil }
|
|
|
|
end
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
end
|
|
|
|
end
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
context "with prometheus integrated" do
|
|
|
|
describe '#call' do
|
|
|
|
it 'yields block' do
|
|
|
|
expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
context 'when error is raised' do
|
|
|
|
let(:job_status) { :fail }
|
2019-10-12 21:52:04 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it 'sets sidekiq_jobs_failed_total and reraises' do
|
|
|
|
expect { subject.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
|
|
|
|
end
|
2021-03-11 19:13:27 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
2021-03-11 19:13:27 +05:30
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
it_behaves_like 'metrics middleware with worker attribution' do
|
|
|
|
let(:job_status) { :done }
|
|
|
|
let(:labels_with_job_status) { labels.merge(job_status: job_status.to_s) }
|
2019-10-12 21:52:04 +05:30
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
|
|
|
context 'DB load balancing' do
|
|
|
|
subject { described_class.new }
|
|
|
|
|
|
|
|
let(:queue) { :test }
|
|
|
|
let(:worker_class) { worker.class }
|
2021-09-30 23:02:18 +05:30
|
|
|
let(:worker) { TestWorker.new }
|
|
|
|
let(:client_middleware) { Gitlab::Database::LoadBalancing::SidekiqClientMiddleware.new }
|
|
|
|
let(:load_balancer) { double.as_null_object }
|
|
|
|
let(:load_balancing_metric) { double('load balancing metric') }
|
|
|
|
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
|
|
|
|
|
|
|
|
def process_job
|
|
|
|
client_middleware.call(worker_class, job, queue, double) do
|
|
|
|
worker_class.process_job(job)
|
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
include_context 'server metrics with mocked prometheus'
|
|
|
|
include_context 'server metrics call'
|
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
before do
|
|
|
|
stub_const('TestWorker', Class.new)
|
|
|
|
TestWorker.class_eval do
|
|
|
|
include Sidekiq::Worker
|
|
|
|
include WorkerAttributes
|
2021-09-30 23:02:18 +05:30
|
|
|
|
|
|
|
def perform(*args)
|
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
2021-09-30 23:02:18 +05:30
|
|
|
|
|
|
|
allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
|
|
|
|
allow(load_balancing_metric).to receive(:increment)
|
|
|
|
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
around do |example|
|
|
|
|
with_sidekiq_server_middleware do |chain|
|
|
|
|
chain.add Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
|
|
|
|
chain.add described_class
|
|
|
|
Sidekiq::Testing.inline! { example.run }
|
|
|
|
end
|
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
shared_context 'worker declaring data consistency' do
|
|
|
|
let(:worker_class) { LBTestWorker }
|
2022-01-26 12:08:38 +05:30
|
|
|
let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => 'AB/12345' } }
|
|
|
|
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => wal_locations } }
|
2021-09-04 01:27:46 +05:30
|
|
|
|
|
|
|
before do
|
2021-09-30 23:02:18 +05:30
|
|
|
stub_const('LBTestWorker', Class.new(TestWorker))
|
|
|
|
LBTestWorker.class_eval do
|
|
|
|
include ApplicationWorker
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
data_consistency :delayed
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
|
|
|
end
|
2021-09-30 23:02:18 +05:30
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
describe '#call' do
|
|
|
|
context 'when worker declares data consistency' do
|
|
|
|
include_context 'worker declaring data consistency'
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
it 'increments load balancing counter with defined data consistency' do
|
|
|
|
process_job
|
|
|
|
|
|
|
|
expect(load_balancing_metric).to have_received(:increment).with(
|
|
|
|
a_hash_including(
|
|
|
|
data_consistency: :delayed,
|
|
|
|
load_balancing_strategy: 'replica'
|
|
|
|
), 1)
|
|
|
|
end
|
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
context 'when worker does not declare data consistency' do
|
|
|
|
it 'increments load balancing counter with default data consistency' do
|
|
|
|
process_job
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
expect(load_balancing_metric).to have_received(:increment).with(
|
|
|
|
a_hash_including(
|
|
|
|
data_consistency: :always,
|
|
|
|
load_balancing_strategy: 'primary'
|
|
|
|
), 1)
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
2021-11-18 22:05:49 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
context 'feature attribution' do
|
|
|
|
let(:test_worker) do
|
|
|
|
category = worker_category
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
Class.new do
|
|
|
|
include Sidekiq::Worker
|
|
|
|
include WorkerAttributes
|
|
|
|
|
|
|
|
if category
|
|
|
|
feature_category category
|
|
|
|
else
|
2022-06-21 17:19:12 +05:30
|
|
|
feature_category :not_owned
|
2021-11-18 22:05:49 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def perform
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
let(:context_category) { 'continuous_integration' }
|
|
|
|
let(:job) { { 'meta.feature_category' => 'continuous_integration' } }
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
before do
|
|
|
|
stub_const('TestWorker', test_worker)
|
|
|
|
end
|
|
|
|
|
|
|
|
around do |example|
|
|
|
|
with_sidekiq_server_middleware do |chain|
|
|
|
|
Gitlab::SidekiqMiddleware.server_configurator(
|
|
|
|
metrics: true,
|
2022-11-25 23:54:43 +05:30
|
|
|
arguments_logger: false
|
2021-11-18 22:05:49 +05:30
|
|
|
).call(chain)
|
|
|
|
|
|
|
|
Sidekiq::Testing.inline! { example.run }
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
2021-11-18 22:05:49 +05:30
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
include_context 'server metrics with mocked prometheus'
|
|
|
|
include_context 'server metrics call'
|
2021-09-04 01:27:46 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
context 'when a worker has a feature category' do
|
|
|
|
let(:worker_category) { 'authentication_and_authorization' }
|
|
|
|
|
|
|
|
it 'uses that category for metrics' do
|
|
|
|
expect(completion_seconds_metric).to receive(:observe).with(a_hash_including(feature_category: worker_category), anything)
|
|
|
|
|
|
|
|
TestWorker.process_job(job)
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
2021-11-18 22:05:49 +05:30
|
|
|
end
|
2021-09-30 23:02:18 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
context 'when a worker does not have a feature category' do
|
|
|
|
let(:worker_category) { nil }
|
2021-09-30 23:02:18 +05:30
|
|
|
|
2021-11-18 22:05:49 +05:30
|
|
|
it 'uses the category from the context for metrics' do
|
|
|
|
expect(completion_seconds_metric).to receive(:observe).with(a_hash_including(feature_category: context_category), anything)
|
|
|
|
|
|
|
|
TestWorker.process_job(job)
|
2021-09-30 23:02:18 +05:30
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
end
|
2021-04-17 20:07:23 +05:30
|
|
|
# rubocop: enable RSpec/MultipleMemoizedHelpers
|