2020-04-08 14:13:33 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2023-05-27 22:25:52 +05:30
|
|
|
require 'spec_helper'
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2022-04-04 11:22:00 +05:30
|
|
|
require_relative '../../support/stub_settings_source'
|
2021-12-11 22:18:48 +05:30
|
|
|
require_relative '../../../sidekiq_cluster/cli'
|
2022-05-07 20:08:51 +05:30
|
|
|
require_relative '../../support/helpers/next_instance_of'
|
2021-12-11 22:18:48 +05:30
|
|
|
|
2023-05-27 22:25:52 +05:30
|
|
|
RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_settings_source: true do # rubocop:disable RSpec/FilePath
|
2022-05-07 20:08:51 +05:30
|
|
|
include NextInstanceOf
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
let(:cli) { described_class.new('/dev/null') }
|
2021-12-11 22:18:48 +05:30
|
|
|
let(:timeout) { Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS }
|
2020-04-08 14:13:33 +05:30
|
|
|
let(:default_options) do
|
2023-03-04 22:38:38 +05:30
|
|
|
{ env: 'test', directory: Dir.pwd, max_concurrency: 20, min_concurrency: 0, dryrun: false, timeout: timeout }
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
2022-01-26 12:08:38 +05:30
|
|
|
let(:sidekiq_exporter_enabled) { false }
|
|
|
|
let(:sidekiq_exporter_port) { '3807' }
|
|
|
|
|
2022-03-02 08:16:31 +05:30
|
|
|
let(:config) do
|
|
|
|
{
|
2023-07-09 08:55:56 +05:30
|
|
|
'sidekiq_exporter' => {
|
|
|
|
'address' => 'localhost',
|
|
|
|
'enabled' => sidekiq_exporter_enabled,
|
|
|
|
'port' => sidekiq_exporter_port
|
2022-01-26 12:08:38 +05:30
|
|
|
}
|
2022-03-02 08:16:31 +05:30
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
let(:supervisor) { instance_double(Gitlab::SidekiqCluster::SidekiqProcessSupervisor) }
|
2022-06-21 17:19:12 +05:30
|
|
|
let(:metrics_cleanup_service) { instance_double(Prometheus::CleanupMultiprocDirService, execute: nil) }
|
2022-05-07 20:08:51 +05:30
|
|
|
|
2022-03-02 08:16:31 +05:30
|
|
|
before do
|
2022-05-07 20:08:51 +05:30
|
|
|
allow(Gitlab::ProcessManagement).to receive(:write_pid)
|
|
|
|
allow(Gitlab::SidekiqCluster::SidekiqProcessSupervisor).to receive(:instance).and_return(supervisor)
|
|
|
|
allow(supervisor).to receive(:supervise)
|
2022-06-21 17:19:12 +05:30
|
|
|
|
|
|
|
allow(Prometheus::CleanupMultiprocDirService).to receive(:new).and_return(metrics_cleanup_service)
|
2023-07-09 08:55:56 +05:30
|
|
|
|
|
|
|
stub_config(sidekiq: { routing_rules: [] })
|
2022-03-02 08:16:31 +05:30
|
|
|
end
|
|
|
|
|
2023-07-09 08:55:56 +05:30
|
|
|
around do |example|
|
|
|
|
original = Settings['monitoring']
|
|
|
|
Settings['monitoring'] = config
|
|
|
|
|
|
|
|
example.run
|
|
|
|
|
|
|
|
Settings['monitoring'] = original
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#run' do
|
|
|
|
context 'without any arguments' do
|
|
|
|
it 'raises CommandError' do
|
|
|
|
expect { cli.run([]) }.to raise_error(described_class::CommandError)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with arguments' do
|
|
|
|
it 'starts the Sidekiq workers' do
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
2023-07-09 08:55:56 +05:30
|
|
|
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
|
2020-04-08 14:13:33 +05:30
|
|
|
.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'allows the special * selector' do
|
|
|
|
worker_queues = %w(foo bar baz)
|
|
|
|
|
|
|
|
expect(Gitlab::SidekiqConfig::CliMethods)
|
|
|
|
.to receive(:worker_queues).and_return(worker_queues)
|
|
|
|
|
|
|
|
expect(Gitlab::SidekiqCluster)
|
2022-05-07 20:08:51 +05:30
|
|
|
.to receive(:start).with([worker_queues], default_options).and_return([])
|
2020-04-08 14:13:33 +05:30
|
|
|
|
|
|
|
cli.run(%w(*))
|
|
|
|
end
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
it 'raises an error when the arguments contain newlines' do
|
|
|
|
invalid_arguments = [
|
|
|
|
["foo\n"],
|
|
|
|
["foo\r"],
|
|
|
|
%W[foo b\nar]
|
|
|
|
]
|
|
|
|
|
|
|
|
invalid_arguments.each do |arguments|
|
|
|
|
expect { cli.run(arguments) }.to raise_error(described_class::CommandError)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
context 'with --negate flag' do
|
|
|
|
it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
|
|
|
|
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz'])
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
2023-07-09 08:55:56 +05:30
|
|
|
.with([['baz'] + described_class::DEFAULT_QUEUES], default_options)
|
2020-04-08 14:13:33 +05:30
|
|
|
.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(foo -n))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with --max-concurrency flag' do
|
|
|
|
it 'starts Sidekiq workers for specified queues with a max concurrency' do
|
2023-07-09 08:55:56 +05:30
|
|
|
expected_queues = [%w(foo bar baz), %w(solo)].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
|
2020-04-08 14:13:33 +05:30
|
|
|
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
2023-07-09 08:55:56 +05:30
|
|
|
.with(expected_queues, default_options.merge(max_concurrency: 2))
|
2020-04-08 14:13:33 +05:30
|
|
|
.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(foo,bar,baz solo -m 2))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with --min-concurrency flag' do
|
|
|
|
it 'starts Sidekiq workers for specified queues with a min concurrency' do
|
2023-07-09 08:55:56 +05:30
|
|
|
expected_queues = [%w(foo bar baz), %w(solo)].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
|
2020-04-08 14:13:33 +05:30
|
|
|
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
2023-07-09 08:55:56 +05:30
|
|
|
.with(expected_queues, default_options.merge(min_concurrency: 2))
|
2020-04-08 14:13:33 +05:30
|
|
|
.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(foo,bar,baz solo --min-concurrency 2))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
context 'with --timeout flag' do
|
2020-04-22 19:07:51 +05:30
|
|
|
it 'when given', 'starts Sidekiq workers with given timeout' do
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
2023-07-09 08:55:56 +05:30
|
|
|
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options.merge(timeout: 10))
|
2022-05-07 20:08:51 +05:30
|
|
|
.and_return([])
|
2020-04-22 19:07:51 +05:30
|
|
|
|
|
|
|
cli.run(%w(foo --timeout 10))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'when not given', 'starts Sidekiq workers with default timeout' do
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
2023-07-09 08:55:56 +05:30
|
|
|
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options.merge(timeout:
|
|
|
|
Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
|
2022-05-07 20:08:51 +05:30
|
|
|
.and_return([])
|
2020-04-22 19:07:51 +05:30
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
context 'with --list-queues flag' do
|
|
|
|
it 'errors when given --list-queues and --dryrun' do
|
|
|
|
expect { cli.run(%w(foo --list-queues --dryrun)) }.to raise_error(described_class::CommandError)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'prints out a list of queues in alphabetical order' do
|
|
|
|
expected_queues = [
|
2023-07-09 08:55:56 +05:30
|
|
|
'default',
|
2021-10-27 15:23:28 +05:30
|
|
|
'epics:epics_update_epics_dates',
|
|
|
|
'epics_new_epic_issue',
|
2023-07-09 08:55:56 +05:30
|
|
|
'mailers',
|
2021-10-27 15:23:28 +05:30
|
|
|
'new_epic',
|
|
|
|
'todos_destroyer:todos_destroyer_confidential_epic'
|
|
|
|
]
|
|
|
|
|
|
|
|
allow(Gitlab::SidekiqConfig::CliMethods).to receive(:query_queues).and_return(expected_queues.shuffle)
|
|
|
|
|
|
|
|
expect(cli).to receive(:puts).with([expected_queues])
|
|
|
|
|
|
|
|
cli.run(%w(--queue-selector feature_category=epics --list-queues))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
context 'queue namespace expansion' do
|
|
|
|
it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
|
|
|
|
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
2023-07-09 08:55:56 +05:30
|
|
|
.with([['cronjob', 'cronjob:foo', 'cronjob:bar'] +
|
|
|
|
described_class::DEFAULT_QUEUES], default_options)
|
2020-04-08 14:13:33 +05:30
|
|
|
.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(cronjob))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
context "with --queue-selector" do
|
|
|
|
where do
|
|
|
|
{
|
|
|
|
'memory-bound queues' => {
|
|
|
|
query: 'resource_boundary=memory',
|
|
|
|
included_queues: %w(project_export),
|
|
|
|
excluded_queues: %w(merge)
|
|
|
|
},
|
|
|
|
'memory- or CPU-bound queues' => {
|
|
|
|
query: 'resource_boundary=memory,cpu',
|
|
|
|
included_queues: %w(auto_merge:auto_merge_process project_export),
|
|
|
|
excluded_queues: %w(merge)
|
|
|
|
},
|
|
|
|
'high urgency CI queues' => {
|
|
|
|
query: 'feature_category=continuous_integration&urgency=high',
|
2022-07-23 23:45:48 +05:30
|
|
|
included_queues: %w(pipeline_default:ci_drop_pipeline),
|
2021-09-04 01:27:46 +05:30
|
|
|
excluded_queues: %w(merge)
|
|
|
|
},
|
|
|
|
'CPU-bound high urgency CI queues' => {
|
|
|
|
query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
|
2022-07-23 23:45:48 +05:30
|
|
|
included_queues: %w(pipeline_default:ci_create_downstream_pipeline),
|
|
|
|
excluded_queues: %w(pipeline_default:ci_drop_pipeline merge)
|
2021-09-04 01:27:46 +05:30
|
|
|
},
|
|
|
|
'CPU-bound high urgency non-CI queues' => {
|
|
|
|
query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
|
|
|
|
included_queues: %w(new_issue),
|
2022-07-23 23:45:48 +05:30
|
|
|
excluded_queues: %w(pipeline_default:ci_create_downstream_pipeline)
|
2021-09-04 01:27:46 +05:30
|
|
|
},
|
|
|
|
'CI and SCM queues' => {
|
|
|
|
query: 'feature_category=continuous_integration|feature_category=source_code_management',
|
2022-07-23 23:45:48 +05:30
|
|
|
included_queues: %w(pipeline_default:ci_drop_pipeline merge),
|
2023-07-09 08:55:56 +05:30
|
|
|
excluded_queues: %w()
|
2021-01-29 00:20:46 +05:30
|
|
|
}
|
2021-09-04 01:27:46 +05:30
|
|
|
}
|
|
|
|
end
|
2021-01-29 00:20:46 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
with_them do
|
|
|
|
it 'expands queues by attributes' do
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
|
|
|
|
expect(opts).to eq(default_options)
|
|
|
|
expect(queues.first).to include(*included_queues)
|
|
|
|
expect(queues.first).not_to include(*excluded_queues)
|
2023-07-09 08:55:56 +05:30
|
|
|
expect(queues.first).to include(*described_class::DEFAULT_QUEUES)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
[]
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
cli.run(%W(--queue-selector #{query}))
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'works when negated' do
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
|
|
|
|
expect(opts).to eq(default_options)
|
|
|
|
expect(queues.first).not_to include(*included_queues)
|
|
|
|
expect(queues.first).to include(*excluded_queues)
|
2023-07-09 08:55:56 +05:30
|
|
|
expect(queues.first).to include(*described_class::DEFAULT_QUEUES)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
[]
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
|
|
|
|
cli.run(%W(--negate --queue-selector #{query}))
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'expands multiple queue groups correctly' do
|
2022-07-23 23:45:48 +05:30
|
|
|
expected_workers =
|
|
|
|
if Gitlab.ee?
|
2023-03-17 16:20:25 +05:30
|
|
|
[
|
2023-07-09 08:55:56 +05:30
|
|
|
%w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident status_page_publish] + described_class::DEFAULT_QUEUES,
|
|
|
|
%w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] +
|
|
|
|
described_class::DEFAULT_QUEUES
|
2023-03-17 16:20:25 +05:30
|
|
|
]
|
2022-07-23 23:45:48 +05:30
|
|
|
else
|
2023-03-17 16:20:25 +05:30
|
|
|
[
|
2023-07-09 08:55:56 +05:30
|
|
|
%w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident] + described_class::DEFAULT_QUEUES,
|
|
|
|
%w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] +
|
|
|
|
described_class::DEFAULT_QUEUES
|
2023-03-17 16:20:25 +05:30
|
|
|
]
|
2022-07-23 23:45:48 +05:30
|
|
|
end
|
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
expect(Gitlab::SidekiqCluster)
|
|
|
|
.to receive(:start)
|
2022-07-23 23:45:48 +05:30
|
|
|
.with(expected_workers, default_options)
|
|
|
|
.and_return([])
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2023-03-17 16:20:25 +05:30
|
|
|
cli.run(%w(--queue-selector feature_category=incident_management&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
|
2021-09-04 01:27:46 +05:30
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'allows the special * selector' do
|
|
|
|
worker_queues = %w(foo bar baz)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
expect(Gitlab::SidekiqConfig::CliMethods)
|
|
|
|
.to receive(:worker_queues).and_return(worker_queues)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
expect(Gitlab::SidekiqCluster)
|
2022-05-07 20:08:51 +05:30
|
|
|
.to receive(:start).with([worker_queues], default_options).and_return([])
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
cli.run(%w(--queue-selector *))
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'errors when the selector matches no queues' do
|
|
|
|
expect(Gitlab::SidekiqCluster).not_to receive(:start)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
expect { cli.run(%w(--queue-selector has_external_dependencies=true&has_external_dependencies=false)) }
|
|
|
|
.to raise_error(described_class::CommandError)
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
it 'errors on an invalid query multiple queue groups correctly' do
|
|
|
|
expect(Gitlab::SidekiqCluster).not_to receive(:start)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2021-09-04 01:27:46 +05:30
|
|
|
expect { cli.run(%w(--queue-selector unknown_field=chatops)) }
|
|
|
|
.to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError)
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
end
|
2023-07-09 08:55:56 +05:30
|
|
|
|
|
|
|
context "with routing rules specified" do
|
|
|
|
before do
|
|
|
|
stub_config(sidekiq: { routing_rules: [['resource_boundary=cpu', 'foo']] })
|
|
|
|
end
|
|
|
|
|
|
|
|
it "starts Sidekiq workers only for given queues without any additional DEFAULT_QUEUES" do
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
|
|
|
.with([['foo']], default_options)
|
|
|
|
.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with sidekiq settings not specified" do
|
|
|
|
before do
|
|
|
|
stub_config(sidekiq: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "does not throw an error" do
|
|
|
|
allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
|
|
|
|
|
|
|
|
expect { cli.run(%w(foo)) }.not_to raise_error
|
|
|
|
end
|
|
|
|
|
|
|
|
it "starts Sidekiq workers with given queues, and additional default and mailers queues (DEFAULT_QUEUES)" do
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start)
|
|
|
|
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
|
|
|
|
.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
context 'metrics server' do
|
|
|
|
let(:trapped_signals) { described_class::TERMINATE_SIGNALS + described_class::FORWARD_SIGNALS }
|
|
|
|
let(:metrics_dir) { Dir.mktmpdir }
|
|
|
|
|
|
|
|
before do
|
|
|
|
stub_env('prometheus_multiproc_dir', metrics_dir)
|
|
|
|
end
|
|
|
|
|
|
|
|
after do
|
|
|
|
FileUtils.rm_rf(metrics_dir, secure: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'starting the server' do
|
2023-03-04 22:38:38 +05:30
|
|
|
before do
|
|
|
|
allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
|
|
|
|
end
|
2022-05-07 20:08:51 +05:30
|
|
|
|
2023-03-04 22:38:38 +05:30
|
|
|
context 'without --dryrun' do
|
2022-06-21 17:19:12 +05:30
|
|
|
it 'wipes the metrics directory before starting workers' do
|
|
|
|
expect(metrics_cleanup_service).to receive(:execute).ordered
|
|
|
|
expect(Gitlab::SidekiqCluster).to receive(:start).ordered.and_return([])
|
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
context 'when sidekiq_exporter is not set up' do
|
2022-03-02 08:16:31 +05:30
|
|
|
let(:config) do
|
2023-07-09 08:55:56 +05:30
|
|
|
{ 'sidekiq_exporter' => {} }
|
2022-03-02 08:16:31 +05:30
|
|
|
end
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
it 'does not start a sidekiq metrics server' do
|
2022-07-16 23:28:13 +05:30
|
|
|
expect(MetricsServer).not_to receive(:start_for_sidekiq)
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
context 'with missing sidekiq_exporter setting' do
|
2022-03-02 08:16:31 +05:30
|
|
|
let(:config) do
|
2023-07-09 08:55:56 +05:30
|
|
|
{ 'sidekiq_exporter' => nil }
|
2022-03-02 08:16:31 +05:30
|
|
|
end
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
it 'does not start a sidekiq metrics server' do
|
2022-07-16 23:28:13 +05:30
|
|
|
expect(MetricsServer).not_to receive(:start_for_sidekiq)
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
2022-03-02 08:16:31 +05:30
|
|
|
|
|
|
|
it 'does not throw an error' do
|
|
|
|
expect { cli.run(%w(foo)) }.not_to raise_error
|
|
|
|
end
|
2022-01-26 12:08:38 +05:30
|
|
|
end
|
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
context 'when sidekiq_exporter is disabled' do
|
|
|
|
it 'does not start a sidekiq metrics server' do
|
|
|
|
expect(MetricsServer).not_to receive(:start_for_sidekiq)
|
2022-01-26 12:08:38 +05:30
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
cli.run(%w(foo))
|
2022-01-26 12:08:38 +05:30
|
|
|
end
|
2022-07-16 23:28:13 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when sidekiq_exporter is enabled' do
|
|
|
|
let(:sidekiq_exporter_enabled) { true }
|
2022-01-26 12:08:38 +05:30
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
it 'starts the metrics server' do
|
|
|
|
expect(MetricsServer).to receive(:start_for_sidekiq).with(metrics_dir: metrics_dir, reset_signals: trapped_signals)
|
2022-01-26 12:08:38 +05:30
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
cli.run(%w(foo))
|
2022-01-26 12:08:38 +05:30
|
|
|
end
|
|
|
|
end
|
2022-05-07 20:08:51 +05:30
|
|
|
|
|
|
|
context 'when a PID is specified' do
|
|
|
|
it 'writes the PID to a file' do
|
|
|
|
expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
|
|
|
|
|
|
|
|
cli.option_parser.parse!(%w(-P /dev/null))
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when no PID is specified' do
|
|
|
|
it 'does not write a PID' do
|
|
|
|
expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
|
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
end
|
2022-01-26 12:08:38 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with --dryrun set' do
|
|
|
|
let(:sidekiq_exporter_enabled) { true }
|
|
|
|
|
|
|
|
it 'does not start the server' do
|
2022-07-16 23:28:13 +05:30
|
|
|
expect(MetricsServer).not_to receive(:start_for_sidekiq)
|
2022-01-26 12:08:38 +05:30
|
|
|
|
|
|
|
cli.run(%w(foo --dryrun))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
context 'supervising the cluster' do
|
|
|
|
let(:sidekiq_exporter_enabled) { true }
|
|
|
|
let(:metrics_server_pid) { 99 }
|
|
|
|
let(:sidekiq_worker_pids) { [2, 42] }
|
2023-03-04 22:38:38 +05:30
|
|
|
let(:waiter_threads) { [instance_double('Process::Waiter'), instance_double('Process::Waiter')] }
|
|
|
|
let(:process_status) { instance_double('Process::Status') }
|
2020-04-08 14:13:33 +05:30
|
|
|
|
|
|
|
before do
|
2023-03-04 22:38:38 +05:30
|
|
|
allow(Gitlab::SidekiqCluster).to receive(:start).and_return(waiter_threads)
|
|
|
|
allow(process_status).to receive(:success?).and_return(true)
|
|
|
|
allow(cli).to receive(:exit)
|
|
|
|
|
|
|
|
waiter_threads.each.with_index do |thread, i|
|
|
|
|
allow(thread).to receive(:join)
|
|
|
|
allow(thread).to receive(:pid).and_return(sidekiq_worker_pids[i])
|
|
|
|
allow(thread).to receive(:value).and_return(process_status)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when one of the workers has been terminated gracefully' do
|
|
|
|
it 'stops the entire process cluster' do
|
|
|
|
expect(MetricsServer).to receive(:start_for_sidekiq).once.and_return(metrics_server_pid)
|
|
|
|
expect(supervisor).to receive(:supervise).and_yield([2, 99])
|
|
|
|
expect(supervisor).to receive(:shutdown)
|
|
|
|
expect(cli).not_to receive(:exit).with(1)
|
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when one of the workers has failed' do
|
|
|
|
it 'stops the entire process cluster and exits with a non-zero code' do
|
|
|
|
expect(MetricsServer).to receive(:start_for_sidekiq).once.and_return(metrics_server_pid)
|
|
|
|
expect(supervisor).to receive(:supervise).and_yield([2, 99])
|
|
|
|
expect(supervisor).to receive(:shutdown)
|
|
|
|
expect(process_status).to receive(:success?).and_return(false)
|
|
|
|
expect(cli).to receive(:exit).with(1)
|
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
|
2022-05-07 20:08:51 +05:30
|
|
|
it 'stops the entire process cluster if one of the workers has been terminated' do
|
2022-07-16 23:28:13 +05:30
|
|
|
expect(MetricsServer).to receive(:start_for_sidekiq).once.and_return(metrics_server_pid)
|
|
|
|
expect(supervisor).to receive(:supervise).and_yield([2, 99])
|
|
|
|
expect(supervisor).to receive(:shutdown)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
|
|
|
cli.run(%w(foo))
|
|
|
|
end
|
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
it 'restarts the metrics server when it is down' do
|
|
|
|
expect(supervisor).to receive(:supervise).and_yield([metrics_server_pid])
|
|
|
|
expect(MetricsServer).to receive(:start_for_sidekiq).twice.and_return(metrics_server_pid)
|
2020-04-08 14:13:33 +05:30
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
cli.run(%w(foo))
|
2022-05-07 20:08:51 +05:30
|
|
|
end
|
2020-04-08 14:13:33 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|