debian-mirror-gitlab/app/workers/bulk_import_worker.rb

87 lines
2.2 KiB
Ruby
Raw Normal View History

2021-01-29 00:20:46 +05:30
# frozen_string_literal: true
class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :importers
2021-06-08 01:23:25 +05:30
tags :exclude_from_kubernetes
2021-01-29 00:20:46 +05:30
sidekiq_options retry: false, dead: false
2021-03-08 18:12:59 +05:30
PERFORM_DELAY = 5.seconds
DEFAULT_BATCH_SIZE = 5
2021-01-29 00:20:46 +05:30
def perform(bulk_import_id)
2021-03-08 18:12:59 +05:30
@bulk_import = BulkImport.find_by_id(bulk_import_id)
return unless @bulk_import
2021-09-04 01:27:46 +05:30
return if @bulk_import.finished? || @bulk_import.failed?
return @bulk_import.fail_op! if all_entities_failed?
2021-03-08 18:12:59 +05:30
return @bulk_import.finish! if all_entities_processed? && @bulk_import.started?
return re_enqueue if max_batch_size_exceeded? # Do not start more jobs if max allowed are already running
@bulk_import.start! if @bulk_import.created?
created_entities.first(next_batch_size).each do |entity|
2021-04-29 21:17:54 +05:30
create_pipeline_tracker_for(entity)
2021-03-08 18:12:59 +05:30
2021-06-08 01:23:25 +05:30
BulkImports::ExportRequestWorker.perform_async(entity.id)
2021-03-08 18:12:59 +05:30
BulkImports::EntityWorker.perform_async(entity.id)
2021-04-29 21:17:54 +05:30
entity.start!
2021-03-08 18:12:59 +05:30
end
re_enqueue
2021-06-08 01:23:25 +05:30
rescue StandardError => e
2021-03-11 19:13:27 +05:30
Gitlab::ErrorTracking.track_exception(e, bulk_import_id: @bulk_import&.id)
@bulk_import&.fail_op
2021-03-08 18:12:59 +05:30
end
private
def entities
@entities ||= @bulk_import.entities
end
def started_entities
entities.with_status(:started)
end
def created_entities
entities.with_status(:created)
end
def all_entities_processed?
entities.all? { |entity| entity.finished? || entity.failed? }
end
2021-09-04 01:27:46 +05:30
def all_entities_failed?
entities.all? { |entity| entity.failed? }
end
2021-03-08 18:12:59 +05:30
def max_batch_size_exceeded?
started_entities.count >= DEFAULT_BATCH_SIZE
end
def next_batch_size
[DEFAULT_BATCH_SIZE - started_entities.count, 0].max
end
# A new BulkImportWorker job is enqueued to either
# - Process the new BulkImports::Entity created during import (e.g. for the subgroups)
# - Or to mark the `bulk_import` as finished
def re_enqueue
BulkImportWorker.perform_in(PERFORM_DELAY, @bulk_import.id)
2021-01-29 00:20:46 +05:30
end
2021-04-29 21:17:54 +05:30
def create_pipeline_tracker_for(entity)
BulkImports::Stage.pipelines.each do |stage, pipeline|
entity.trackers.create!(
stage: stage,
pipeline_name: pipeline
)
end
end
2021-01-29 00:20:46 +05:30
end