2019-02-15 15:39:39 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module Ci
|
|
|
|
class ArchiveTraceService
|
2023-04-23 21:23:45 +05:30
|
|
|
include ::Gitlab::ExclusiveLeaseHelpers
|
|
|
|
|
|
|
|
EXCLUSIVE_LOCK_KEY = 'archive_trace_service:batch_execute:lock'
|
|
|
|
LOCK_TIMEOUT = 56.minutes
|
|
|
|
LOOP_TIMEOUT = 55.minutes
|
|
|
|
LOOP_LIMIT = 2000
|
|
|
|
BATCH_SIZE = 100
|
|
|
|
|
|
|
|
# rubocop: disable CodeReuse/ActiveRecord
|
|
|
|
def batch_execute(worker_name:)
|
|
|
|
start_time = Time.current
|
|
|
|
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
|
|
|
|
Ci::Build.with_stale_live_trace.find_each(batch_size: BATCH_SIZE).with_index do |build, index|
|
|
|
|
break if Time.current - start_time > LOOP_TIMEOUT
|
|
|
|
|
|
|
|
if index > LOOP_LIMIT
|
|
|
|
Sidekiq.logger.warn(class: worker_name, message: 'Loop limit reached.', job_id: build.id)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
|
|
|
execute(build, worker_name: worker_name)
|
|
|
|
rescue StandardError
|
|
|
|
next
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# rubocop: enable CodeReuse/ActiveRecord
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
def execute(job, worker_name:)
|
2021-11-18 22:05:49 +05:30
|
|
|
unless job.trace.archival_attempts_available?
|
|
|
|
Sidekiq.logger.warn(class: worker_name, message: 'The job is out of archival attempts.', job_id: job.id)
|
|
|
|
|
|
|
|
job.trace.attempt_archive_cleanup!
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2021-11-11 11:23:49 +05:30
|
|
|
unless job.trace.can_attempt_archival_now?
|
2021-11-18 22:05:49 +05:30
|
|
|
Sidekiq.logger.warn(class: worker_name, message: 'The job can not be archived right now.', job_id: job.id)
|
2021-11-11 11:23:49 +05:30
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
job.trace.archive!
|
2020-11-24 15:15:51 +05:30
|
|
|
job.remove_pending_state!
|
2019-10-12 21:52:04 +05:30
|
|
|
|
2022-10-11 01:57:18 +05:30
|
|
|
if job.job_artifacts_trace.present?
|
2022-03-02 08:16:31 +05:30
|
|
|
job.project.execute_integrations(Gitlab::DataBuilder::ArchiveTrace.build(job), :archive_trace_hooks)
|
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
rescue ::Gitlab::Ci::Trace::AlreadyArchivedError
|
|
|
|
# It's already archived, thus we can safely ignore this exception.
|
2021-06-08 01:23:25 +05:30
|
|
|
rescue StandardError => e
|
2021-11-11 11:23:49 +05:30
|
|
|
job.trace.increment_archival_attempts!
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
# Tracks this error with application logs, Sentry, and Prometheus.
|
|
|
|
# If `archive!` keeps failing for over a week, that could incur data loss.
|
2021-01-03 14:25:43 +05:30
|
|
|
# (See more https://docs.gitlab.com/ee/administration/job_logs.html#new-incremental-logging-architecture)
|
2019-02-15 15:39:39 +05:30
|
|
|
# In order to avoid interrupting the system, we do not raise an exception here.
|
2019-10-12 21:52:04 +05:30
|
|
|
archive_error(e, job, worker_name)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def failed_archive_counter
|
|
|
|
@failed_archive_counter ||=
|
2023-07-09 08:55:56 +05:30
|
|
|
Gitlab::Metrics.counter(:job_trace_archive_failed_total, "Counter of failed attempts of trace archiving")
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
def archive_error(error, job, worker_name)
|
2019-02-15 15:39:39 +05:30
|
|
|
failed_archive_counter.increment
|
2019-10-12 21:52:04 +05:30
|
|
|
|
2023-07-09 08:55:56 +05:30
|
|
|
Sidekiq.logger.warn(
|
|
|
|
class: worker_name,
|
|
|
|
message: "Failed to archive trace. message: #{error.message}.",
|
|
|
|
job_id: job.id
|
|
|
|
)
|
|
|
|
|
|
|
|
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
|
|
|
|
error,
|
|
|
|
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
|
|
|
|
job_id: job.id
|
|
|
|
)
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|