2017-08-17 22:00:37 +05:30
|
|
|
#!/usr/bin/env ruby
|
2021-03-11 19:13:27 +05:30
|
|
|
# frozen_string_literal: true
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
# We don't have auto-loading here
|
2019-10-12 21:52:04 +05:30
|
|
|
require_relative '../lib/gitlab'
|
2018-03-17 18:26:18 +05:30
|
|
|
require_relative '../lib/gitlab/popen'
|
|
|
|
require_relative '../lib/gitlab/popen/runner'
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
class StaticAnalysis
|
|
|
|
ALLOWED_WARNINGS = [
|
|
|
|
# https://github.com/browserslist/browserslist/blob/d0ec62eb48c41c218478cd3ac28684df051cc865/node.js#L329
|
|
|
|
# warns if caniuse-lite package is older than 6 months. Ignore this
|
|
|
|
# warning message so that GitLab backports don't fail.
|
|
|
|
"Browserslist: caniuse-lite is outdated. Please run next command `yarn upgrade`"
|
|
|
|
].freeze
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
Task = Struct.new(:command, :duration) do
|
|
|
|
def cmd
|
|
|
|
command.join(' ')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
NodeAssignment = Struct.new(:index, :tasks) do
|
|
|
|
def total_duration
|
|
|
|
return 0 if tasks.empty?
|
|
|
|
|
|
|
|
tasks.sum(&:duration)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.project_path
|
|
|
|
project_root = File.expand_path('..', __dir__)
|
|
|
|
|
|
|
|
if Gitlab.jh?
|
|
|
|
"#{project_root}/jh"
|
|
|
|
else
|
|
|
|
project_root
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
# `gettext:updated_check` and `gitlab:sidekiq:sidekiq_queues_yml:check` will fail on FOSS installations
|
|
|
|
# (e.g. gitlab-org/gitlab-foss) since they test against a single
|
|
|
|
# file that is generated by an EE installation, which can
|
|
|
|
# contain values that a FOSS installation won't find. To work
|
|
|
|
# around this we will only enable this task on EE installations.
|
2021-11-11 11:23:49 +05:30
|
|
|
TASKS_WITH_DURATIONS_SECONDS = [
|
|
|
|
Task.new(%w[bin/rake lint:haml], 562),
|
2021-09-30 23:02:18 +05:30
|
|
|
# We need to disable the cache for this cop since it creates files under tmp/feature_flags/*.used,
|
|
|
|
# the cache would prevent these files from being created.
|
2021-11-11 11:23:49 +05:30
|
|
|
Task.new(%w[bundle exec rubocop --only Gitlab/MarkUsedFeatureFlags --cache false], 400),
|
|
|
|
(Gitlab.ee? ? Task.new(%w[bin/rake gettext:updated_check], 360) : nil),
|
|
|
|
Task.new(%w[yarn run lint:eslint:all], 312),
|
|
|
|
Task.new(%w[bundle exec rubocop --parallel], 60),
|
|
|
|
Task.new(%w[yarn run lint:prettier], 160),
|
|
|
|
Task.new(%w[bin/rake gettext:lint], 85),
|
|
|
|
Task.new(%W[bundle exec license_finder --decisions-file config/dependency_decisions.yml --project-path #{project_path}], 20),
|
|
|
|
Task.new(%w[bin/rake lint:static_verification], 35),
|
|
|
|
Task.new(%w[bin/rake config_lint], 10),
|
|
|
|
Task.new(%w[bin/rake gitlab:sidekiq:all_queues_yml:check], 15),
|
|
|
|
(Gitlab.ee? ? Task.new(%w[bin/rake gitlab:sidekiq:sidekiq_queues_yml:check], 11) : nil),
|
|
|
|
Task.new(%w[yarn run internal:stylelint], 8),
|
|
|
|
Task.new(%w[scripts/lint-conflicts.sh], 1),
|
|
|
|
Task.new(%w[yarn run block-dependencies], 1),
|
|
|
|
Task.new(%w[scripts/lint-rugged], 1),
|
|
|
|
Task.new(%w[scripts/gemfile_lock_changed.sh], 1)
|
|
|
|
].compact.freeze
|
|
|
|
|
|
|
|
def run_tasks!(options = {})
|
|
|
|
node_assignment = tasks_to_run((ENV['CI_NODE_TOTAL'] || 1).to_i)[(ENV['CI_NODE_INDEX'] || 1).to_i - 1]
|
|
|
|
|
|
|
|
if options[:dry_run]
|
|
|
|
puts "Dry-run mode!"
|
|
|
|
return
|
|
|
|
end
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
static_analysis = Gitlab::Popen::Runner.new
|
2021-11-11 11:23:49 +05:30
|
|
|
start_time = Time.now
|
|
|
|
static_analysis.run(node_assignment.tasks.map(&:command)) do |command, &run|
|
|
|
|
task = node_assignment.tasks.find { |task| task.command == command }
|
2020-06-23 00:09:42 +05:30
|
|
|
puts
|
2021-11-11 11:23:49 +05:30
|
|
|
puts "$ #{task.cmd}"
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
result = run.call
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
puts "==> Finished in #{result.duration} seconds (expected #{task.duration} seconds)"
|
2020-06-23 00:09:42 +05:30
|
|
|
puts
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
puts
|
2020-06-23 00:09:42 +05:30
|
|
|
puts '==================================================='
|
2021-11-11 11:23:49 +05:30
|
|
|
puts "Node finished running all tasks in #{Time.now - start_time} seconds (expected #{node_assignment.total_duration})"
|
2018-03-17 18:26:18 +05:30
|
|
|
puts
|
|
|
|
puts
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
if static_analysis.all_success_and_clean?
|
|
|
|
puts 'All static analyses passed successfully.'
|
|
|
|
elsif static_analysis.all_success?
|
|
|
|
puts 'All static analyses passed successfully, but we have warnings:'
|
|
|
|
puts
|
2020-01-12 00:16:45 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
emit_warnings(static_analysis)
|
2020-01-12 00:16:45 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
exit 2 if warning_count(static_analysis).nonzero?
|
2019-12-26 22:10:19 +05:30
|
|
|
else
|
2020-06-23 00:09:42 +05:30
|
|
|
puts 'Some static analyses failed:'
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
emit_warnings(static_analysis)
|
|
|
|
emit_errors(static_analysis)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
exit 1
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
def emit_warnings(static_analysis)
|
|
|
|
static_analysis.warned_results.each do |result|
|
|
|
|
puts
|
|
|
|
puts "**** #{result.cmd.join(' ')} had the following warning(s):"
|
|
|
|
puts
|
|
|
|
puts result.stderr
|
|
|
|
puts
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
def emit_errors(static_analysis)
|
|
|
|
static_analysis.failed_results.each do |result|
|
|
|
|
puts
|
|
|
|
puts "**** #{result.cmd.join(' ')} failed with the following error(s):"
|
|
|
|
puts
|
|
|
|
puts result.stdout
|
|
|
|
puts result.stderr
|
|
|
|
puts
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
def warning_count(static_analysis)
|
|
|
|
static_analysis.warned_results
|
|
|
|
.count { |result| !ALLOWED_WARNINGS.include?(result.stderr.strip) }
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
def tasks_to_run(node_total)
|
|
|
|
total_time = TASKS_WITH_DURATIONS_SECONDS.sum(&:duration).to_f
|
|
|
|
ideal_time_per_node = total_time / node_total
|
|
|
|
tasks_by_duration_desc = TASKS_WITH_DURATIONS_SECONDS.sort_by { |a| -a.duration }
|
|
|
|
nodes = Array.new(node_total) { |i| NodeAssignment.new(i + 1, []) }
|
|
|
|
|
|
|
|
puts "Total expected time: #{total_time}; ideal time per job: #{ideal_time_per_node}.\n\n"
|
|
|
|
puts "Tasks to distribute:"
|
|
|
|
tasks_by_duration_desc.each { |task| puts "* #{task.cmd} (#{task.duration}s)" }
|
|
|
|
|
|
|
|
# Distribute tasks optimally first
|
|
|
|
puts "\nAssigning tasks optimally."
|
|
|
|
distribute_tasks(tasks_by_duration_desc, nodes, ideal_time_per_node: ideal_time_per_node)
|
|
|
|
|
|
|
|
# Distribute remaining tasks, ordered by ascending duration
|
|
|
|
leftover_tasks = tasks_by_duration_desc - nodes.flat_map(&:tasks)
|
|
|
|
|
|
|
|
if leftover_tasks.any?
|
|
|
|
puts "\n\nAssigning remaining tasks: #{leftover_tasks.flat_map(&:cmd)}"
|
|
|
|
distribute_tasks(leftover_tasks, nodes.sort_by { |node| node.total_duration })
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
nodes.each do |node|
|
|
|
|
puts "\nExpected duration for node #{node.index}: #{node.total_duration} seconds"
|
|
|
|
node.tasks.each { |task| puts "* #{task.cmd} (#{task.duration}s)" }
|
|
|
|
end
|
|
|
|
|
|
|
|
nodes
|
|
|
|
end
|
|
|
|
|
|
|
|
def distribute_tasks(tasks, nodes, ideal_time_per_node: nil)
|
|
|
|
condition =
|
|
|
|
if ideal_time_per_node
|
|
|
|
->(task, node, ideal_time_per_node) { (task.duration + node.total_duration) <= ideal_time_per_node }
|
|
|
|
else
|
|
|
|
->(*) { true }
|
|
|
|
end
|
|
|
|
|
|
|
|
tasks.each do |task|
|
|
|
|
nodes.each do |node|
|
|
|
|
if condition.call(task, node, ideal_time_per_node)
|
|
|
|
assign_task_to_node(tasks, node, task)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def assign_task_to_node(remaining_tasks, node, task)
|
|
|
|
node.tasks << task
|
|
|
|
puts "Assigning #{task.command} (#{task.duration}s) to node ##{node.index}. Node total duration: #{node.total_duration}s."
|
2020-06-23 00:09:42 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
if $0 == __FILE__
|
2021-11-11 11:23:49 +05:30
|
|
|
options = {}
|
|
|
|
|
|
|
|
if ARGV.include?('--dry-run')
|
|
|
|
options[:dry_run] = true
|
|
|
|
end
|
|
|
|
|
|
|
|
StaticAnalysis.new.run_tasks!(options)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|