debian-mirror-gitlab/app/models/ci/pipeline.rb

1270 lines
41 KiB
Ruby
Raw Normal View History

2018-11-18 11:00:15 +05:30
# frozen_string_literal: true
2015-09-25 12:07:36 +05:30
module Ci
2019-07-07 11:18:12 +05:30
class Pipeline < ApplicationRecord
2018-03-17 18:26:18 +05:30
extend Gitlab::Ci::Model
2020-07-28 23:09:34 +05:30
include Ci::HasStatus
2016-09-29 09:46:39 +05:30
include Importable
2016-11-03 12:29:30 +05:30
include AfterCommitQueue
2017-08-17 22:00:37 +05:30
include Presentable
2021-02-22 17:27:13 +05:30
include Gitlab::Allowable
2018-03-17 18:26:18 +05:30
include Gitlab::OptimisticLocking
2018-05-09 12:01:36 +05:30
include Gitlab::Utils::StrongMemoize
2018-11-08 19:23:39 +05:30
include AtomicInternalId
include EnumWithNil
2020-04-08 14:13:33 +05:30
include Ci::HasRef
2019-07-07 11:18:12 +05:30
include ShaAttribute
include FromUnion
2020-01-01 13:55:28 +05:30
include UpdatedAtFilterable
2021-02-11 23:33:58 +05:30
include EachBatch
2019-07-07 11:18:12 +05:30
2021-02-22 17:27:13 +05:30
MAX_OPEN_MERGE_REQUESTS_REFS = 4
2020-04-08 14:13:33 +05:30
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
project: [:project_feature, :route, { namespace: :route }]
}.freeze
2020-11-24 15:15:51 +05:30
CONFIG_EXTENSION = '.gitlab-ci.yml'
DEFAULT_CONFIG_PATH = CONFIG_EXTENSION
2020-04-08 14:13:33 +05:30
2020-03-13 15:44:24 +05:30
BridgeStatusError = Class.new(StandardError)
2019-07-07 11:18:12 +05:30
sha_attribute :source_sha
sha_attribute :target_sha
2015-10-24 18:46:33 +05:30
2021-01-03 14:25:43 +05:30
# Ci::CreatePipelineService returns Ci::Pipeline so this is the only place
# where we can pass additional information from the service. This accessor
# is used for storing the processed CI YAML contents for linting purposes.
# There is an open issue to address this:
# https://gitlab.com/gitlab-org/gitlab/-/issues/259010
attr_accessor :merged_yaml
2019-02-15 15:39:39 +05:30
belongs_to :project, inverse_of: :all_pipelines
2016-08-24 12:49:21 +05:30
belongs_to :user
2017-08-17 22:00:37 +05:30
belongs_to :auto_canceled_by, class_name: 'Ci::Pipeline'
belongs_to :pipeline_schedule, class_name: 'Ci::PipelineSchedule'
2019-02-15 15:39:39 +05:30
belongs_to :merge_request, class_name: 'MergeRequest'
2019-12-04 20:38:33 +05:30
belongs_to :external_pull_request
2020-06-23 00:09:42 +05:30
belongs_to :ci_ref, class_name: 'Ci::Ref', foreign_key: :ci_ref_id, inverse_of: :pipelines
2017-08-17 22:00:37 +05:30
2021-01-29 00:20:46 +05:30
has_internal_id :iid, scope: :project, presence: false,
track_if: -> { !importing? },
ensure_if: -> { !importing? },
init: ->(pipeline, scope) do
if pipeline
pipeline.project&.all_pipelines&.maximum(:iid) || pipeline.project&.all_pipelines&.count
elsif scope
::Ci::Pipeline.where(**scope).maximum(:iid)
end
end
2018-11-08 19:23:39 +05:30
has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline
2018-03-27 19:54:05 +05:30
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
2020-01-01 13:55:28 +05:30
has_many :latest_statuses_ordered_by_stage, -> { latest.order(:stage_idx, :stage) }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
2021-01-03 14:25:43 +05:30
has_many :latest_statuses, -> { latest }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
2020-03-13 15:44:24 +05:30
has_many :processables, class_name: 'Ci::Processable', foreign_key: :commit_id, inverse_of: :pipeline
2020-06-23 00:09:42 +05:30
has_many :bridges, class_name: 'Ci::Bridge', foreign_key: :commit_id, inverse_of: :pipeline
2018-05-09 12:01:36 +05:30
has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline
2020-06-23 00:09:42 +05:30
has_many :job_artifacts, through: :builds
2017-09-10 17:25:29 +05:30
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent
has_many :variables, class_name: 'Ci::PipelineVariable'
2019-02-15 15:39:39 +05:30
has_many :deployments, through: :builds
has_many :environments, -> { distinct }, through: :deployments
2020-06-23 00:09:42 +05:30
has_many :latest_builds, -> { latest }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Build'
has_many :downloadable_artifacts, -> { not_expired.downloadable }, through: :latest_builds, source: :job_artifacts
2017-09-10 17:25:29 +05:30
2020-07-28 23:09:34 +05:30
has_many :messages, class_name: 'Ci::PipelineMessage', inverse_of: :pipeline
2017-09-10 17:25:29 +05:30
# Merge requests for which the current pipeline is running against
# the merge request's latest commit.
2019-07-07 11:18:12 +05:30
has_many :merge_requests_as_head_pipeline, foreign_key: "head_pipeline_id", class_name: 'MergeRequest'
2017-08-17 22:00:37 +05:30
2020-10-24 23:57:45 +05:30
has_many :pending_builds, -> { pending }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
2020-01-01 13:55:28 +05:30
has_many :failed_builds, -> { latest.failed }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
2020-10-24 23:57:45 +05:30
has_many :retryable_builds, -> { latest.failed_or_canceled.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
2017-08-17 22:00:37 +05:30
has_many :cancelable_statuses, -> { cancelable }, foreign_key: :commit_id, class_name: 'CommitStatus'
2020-10-24 23:57:45 +05:30
has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
has_many :scheduled_actions, -> { latest.scheduled_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
2017-09-10 17:25:29 +05:30
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
2019-12-21 20:55:43 +05:30
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_pipeline_id
2017-08-17 22:00:37 +05:30
2019-12-21 20:55:43 +05:30
has_one :source_pipeline, class_name: 'Ci::Sources::Pipeline', inverse_of: :pipeline
2020-04-08 14:13:33 +05:30
2019-07-07 11:18:12 +05:30
has_one :chat_data, class_name: 'Ci::PipelineChatData'
2019-12-21 20:55:43 +05:30
has_many :triggered_pipelines, through: :sourced_pipelines, source: :pipeline
2020-03-13 15:44:24 +05:30
has_many :child_pipelines, -> { merge(Ci::Sources::Pipeline.same_project) }, through: :sourced_pipelines, source: :pipeline
2019-12-21 20:55:43 +05:30
has_one :triggered_by_pipeline, through: :source_pipeline, source: :source_pipeline
2020-03-13 15:44:24 +05:30
has_one :parent_pipeline, -> { merge(Ci::Sources::Pipeline.same_project) }, through: :source_pipeline, source: :source_pipeline
2019-12-21 20:55:43 +05:30
has_one :source_job, through: :source_pipeline, source: :source_job
2020-03-13 15:44:24 +05:30
has_one :source_bridge, through: :source_pipeline, source: :source_bridge
has_one :pipeline_config, class_name: 'Ci::PipelineConfig', inverse_of: :pipeline
2019-12-21 20:55:43 +05:30
2020-05-24 23:13:21 +05:30
has_many :daily_build_group_report_results, class_name: 'Ci::DailyBuildGroupReportResult', foreign_key: :last_pipeline_id
2020-07-28 23:09:34 +05:30
has_many :latest_builds_report_results, through: :latest_builds, source: :report_results
2020-10-24 23:57:45 +05:30
has_many :pipeline_artifacts, class_name: 'Ci::PipelineArtifact', inverse_of: :pipeline, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
2020-04-22 19:07:51 +05:30
2018-10-15 14:42:47 +05:30
accepts_nested_attributes_for :variables, reject_if: :persisted?
2018-03-17 18:26:18 +05:30
delegate :full_path, to: :project, prefix: true
2015-09-25 12:07:36 +05:30
2017-08-17 22:00:37 +05:30
validates :sha, presence: { unless: :importing? }
validates :ref, presence: { unless: :importing? }
2020-03-13 15:44:24 +05:30
validates :tag, inclusion: { in: [false], if: :merge_request? }
2019-12-04 20:38:33 +05:30
validates :external_pull_request, presence: { if: :external_pull_request_event? }
validates :external_pull_request, absence: { unless: :external_pull_request_event? }
validates :tag, inclusion: { in: [false], if: :external_pull_request_event? }
2017-08-17 22:00:37 +05:30
validates :status, presence: { unless: :importing? }
2016-09-29 09:46:39 +05:30
validate :valid_commit_sha, unless: :importing?
2019-02-15 15:39:39 +05:30
validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create
2018-11-08 19:23:39 +05:30
2016-11-03 12:29:30 +05:30
after_create :keep_around_commits, unless: :importing?
2016-08-24 12:49:21 +05:30
2020-11-24 15:15:51 +05:30
# We use `Enums::Ci::Pipeline.sources` here so that EE can more easily extend
2019-02-15 15:39:39 +05:30
# this `Hash` with new values.
2020-11-24 15:15:51 +05:30
enum_with_nil source: Enums::Ci::Pipeline.sources
2019-02-15 15:39:39 +05:30
2020-11-24 15:15:51 +05:30
enum_with_nil config_source: Enums::Ci::Pipeline.config_sources
2019-02-15 15:39:39 +05:30
2020-11-24 15:15:51 +05:30
# We use `Enums::Ci::Pipeline.failure_reasons` here so that EE can more easily
2019-02-15 15:39:39 +05:30
# extend this `Hash` with new values.
2020-11-24 15:15:51 +05:30
enum failure_reason: Enums::Ci::Pipeline.failure_reasons
2018-03-17 18:26:18 +05:30
2020-07-28 23:09:34 +05:30
enum locked: { unlocked: 0, artifacts_locked: 1 }
2016-09-13 17:45:13 +05:30
state_machine :status, initial: :created do
event :enqueue do
2020-05-24 23:13:21 +05:30
transition [:created, :manual, :waiting_for_resource, :preparing, :skipped, :scheduled] => :pending
2018-03-17 18:26:18 +05:30
transition [:success, :failed, :canceled] => :running
2020-05-24 23:13:21 +05:30
# this is needed to ensure tests to be covered
transition [:running] => :running
2016-09-13 17:45:13 +05:30
end
2020-03-13 15:44:24 +05:30
event :request_resource do
transition any - [:waiting_for_resource] => :waiting_for_resource
end
2019-07-07 11:18:12 +05:30
event :prepare do
transition any - [:preparing] => :preparing
end
2016-09-13 17:45:13 +05:30
event :run do
2017-08-17 22:00:37 +05:30
transition any - [:running] => :running
2016-09-13 17:45:13 +05:30
end
event :skip do
2017-08-17 22:00:37 +05:30
transition any - [:skipped] => :skipped
2016-09-13 17:45:13 +05:30
end
event :drop do
2017-08-17 22:00:37 +05:30
transition any - [:failed] => :failed
2016-09-13 17:45:13 +05:30
end
event :succeed do
2017-08-17 22:00:37 +05:30
transition any - [:success] => :success
2016-09-13 17:45:13 +05:30
end
event :cancel do
2017-08-17 22:00:37 +05:30
transition any - [:canceled] => :canceled
end
event :block do
transition any - [:manual] => :manual
2016-09-13 17:45:13 +05:30
end
2018-12-05 23:21:45 +05:30
event :delay do
transition any - [:scheduled] => :scheduled
end
2016-11-03 12:29:30 +05:30
# IMPORTANT
# Do not add any operations to this state_machine
# Create a separate worker for each new operation
2020-03-13 15:44:24 +05:30
before_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline|
2020-06-23 00:09:42 +05:30
pipeline.started_at = Time.current
2016-09-13 17:45:13 +05:30
end
before_transition any => [:success, :failed, :canceled] do |pipeline|
2020-06-23 00:09:42 +05:30
pipeline.finished_at = Time.current
2016-11-03 12:29:30 +05:30
pipeline.update_duration
2016-09-13 17:45:13 +05:30
end
2017-08-17 22:00:37 +05:30
before_transition any => [:manual] do |pipeline|
pipeline.update_duration
end
before_transition canceled: any - [:canceled] do |pipeline|
pipeline.auto_canceled_by = nil
end
2018-03-17 18:26:18 +05:30
before_transition any => :failed do |pipeline, transition|
transition.args.first.try do |reason|
pipeline.failure_reason = reason
end
end
2020-03-13 15:44:24 +05:30
after_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline|
2017-08-17 22:00:37 +05:30
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
2016-09-29 09:46:39 +05:30
end
after_transition any => [:success] do |pipeline|
2017-08-17 22:00:37 +05:30
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
2016-09-29 09:46:39 +05:30
end
2020-03-13 15:44:24 +05:30
after_transition [:created, :waiting_for_resource, :preparing, :pending, :running] => :success do |pipeline|
2020-04-22 19:07:51 +05:30
# We wait a little bit to ensure that all BuildFinishedWorkers finish first
# because this is where some metrics like code coverage is parsed and stored
# in CI build records which the daily build metrics worker relies on.
2020-05-24 23:13:21 +05:30
pipeline.run_after_commit { Ci::DailyBuildGroupReportResultsWorker.perform_in(10.minutes, pipeline.id) }
2016-09-13 17:45:13 +05:30
end
after_transition do |pipeline, transition|
2016-11-03 12:29:30 +05:30
next if transition.loopback?
pipeline.run_after_commit do
2017-08-17 22:00:37 +05:30
PipelineHooksWorker.perform_async(pipeline.id)
2020-03-13 15:44:24 +05:30
ExpirePipelineCacheWorker.perform_async(pipeline.id) if pipeline.cacheable?
2017-08-17 22:00:37 +05:30
end
end
2019-09-04 21:01:54 +05:30
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
pipeline.run_after_commit do
2019-12-21 20:55:43 +05:30
pipeline.persistent_ref.delete
2019-09-04 21:01:54 +05:30
pipeline.all_merge_requests.each do |merge_request|
next unless merge_request.auto_merge_enabled?
AutoMergeProcessWorker.perform_async(merge_request.id)
end
2020-03-13 15:44:24 +05:30
if pipeline.auto_devops_source?
self.class.auto_devops_pipelines_completed_total.increment(status: pipeline.status)
end
end
end
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
2020-11-24 15:15:51 +05:30
pipeline.run_after_commit do
::Ci::Pipelines::CreateArtifactWorker.perform_async(pipeline.id)
end
end
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
2020-03-13 15:44:24 +05:30
next unless pipeline.bridge_waiting?
pipeline.run_after_commit do
::Ci::PipelineBridgeStatusWorker.perform_async(pipeline.id)
2019-09-04 21:01:54 +05:30
end
end
2021-02-22 17:27:13 +05:30
after_transition any => any do |pipeline|
next unless Feature.enabled?(:jira_sync_builds, pipeline.project)
pipeline.run_after_commit do
# Passing the seq-id ensures this is idempotent
seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id
::JiraConnect::SyncBuildsWorker.perform_async(pipeline.id, seq_id)
end
end
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
pipeline.run_after_commit do
::Ci::TestFailureHistoryService.new(pipeline).async.perform_if_needed # rubocop: disable CodeReuse/ServiceClass
end
end
2017-08-17 22:00:37 +05:30
after_transition any => [:success, :failed] do |pipeline|
2020-06-23 00:09:42 +05:30
ref_status = pipeline.ci_ref&.update_status_by!(pipeline)
2017-08-17 22:00:37 +05:30
pipeline.run_after_commit do
2020-06-23 00:09:42 +05:30
PipelineNotificationWorker.perform_async(pipeline.id, ref_status: ref_status)
2016-11-03 12:29:30 +05:30
end
2016-09-13 17:45:13 +05:30
end
2018-11-20 20:47:30 +05:30
after_transition any => [:failed] do |pipeline|
next unless pipeline.auto_devops_source?
pipeline.run_after_commit { AutoDevops::DisableWorker.perform_async(pipeline.id) }
end
2016-09-13 17:45:13 +05:30
end
2018-03-17 18:26:18 +05:30
scope :internal, -> { where(source: internal_sources) }
2020-03-13 15:44:24 +05:30
scope :no_child, -> { where.not(source: :parent_pipeline) }
2020-11-24 15:15:51 +05:30
scope :ci_sources, -> { where(source: Enums::Ci::Pipeline.ci_sources.values) }
2021-02-22 17:27:13 +05:30
scope :ci_branch_sources, -> { where(source: Enums::Ci::Pipeline.ci_branch_sources.values) }
2021-01-29 00:20:46 +05:30
scope :ci_and_parent_sources, -> { where(source: Enums::Ci::Pipeline.ci_and_parent_sources.values) }
2019-02-15 15:39:39 +05:30
scope :for_user, -> (user) { where(user: user) }
2019-07-07 11:18:12 +05:30
scope :for_sha, -> (sha) { where(sha: sha) }
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
scope :for_sha_or_source_sha, -> (sha) { for_sha(sha).or(for_source_sha(sha)) }
2019-12-21 20:55:43 +05:30
scope :for_ref, -> (ref) { where(ref: ref) }
2021-02-22 17:27:13 +05:30
scope :for_branch, -> (branch) { for_ref(branch).where(tag: false) }
2019-12-21 20:55:43 +05:30
scope :for_id, -> (id) { where(id: id) }
2020-06-23 00:09:42 +05:30
scope :for_iid, -> (iid) { where(iid: iid) }
2021-02-22 17:27:13 +05:30
scope :for_project, -> (project_id) { where(project_id: project_id) }
2019-09-04 21:01:54 +05:30
scope :created_after, -> (time) { where('ci_pipelines.created_at > ?', time) }
2020-07-28 23:09:34 +05:30
scope :created_before_id, -> (id) { where('ci_pipelines.id < ?', id) }
scope :before_pipeline, -> (pipeline) { created_before_id(pipeline.id).outside_pipeline_family(pipeline) }
scope :outside_pipeline_family, ->(pipeline) do
where.not(id: pipeline.same_family_pipeline_ids)
end
2019-07-07 11:18:12 +05:30
scope :with_reports, -> (reports_scope) do
where('EXISTS (?)', ::Ci::Build.latest.with_reports(reports_scope).where('ci_pipelines.id=ci_builds.commit_id').select(1))
2019-02-15 15:39:39 +05:30
end
2017-08-17 22:00:37 +05:30
2019-12-04 20:38:33 +05:30
scope :with_only_interruptible_builds, -> do
where('NOT EXISTS (?)',
Ci::Build.where('ci_builds.commit_id = ci_pipelines.id')
.with_status(:running, :success, :failed)
.not_interruptible
)
end
2020-07-28 23:09:34 +05:30
# Returns the pipelines that associated with the given merge request.
# In general, please use `Ci::PipelinesForMergeRequestFinder` instead,
# for checking permission of the actor.
scope :triggered_by_merge_request, -> (merge_request) do
2021-02-22 17:27:13 +05:30
where(source: :merge_request_event,
merge_request: merge_request,
project: [merge_request.source_project, merge_request.target_project])
2020-07-28 23:09:34 +05:30
end
2018-03-17 18:26:18 +05:30
# Returns the pipelines in descending order (= newest first), optionally
# limited to a number of references.
#
# ref - The name (or names) of the branch(es)/tag(s) to limit the list of
# pipelines to.
2019-10-12 21:52:04 +05:30
# sha - The commit SHA (or mutliple SHAs) to limit the list of pipelines to.
2018-12-13 13:39:08 +05:30
# limit - This limits a backlog search, default to 100.
2019-10-12 21:52:04 +05:30
def self.newest_first(ref: nil, sha: nil, limit: 100)
2018-03-17 18:26:18 +05:30
relation = order(id: :desc)
2018-12-13 13:39:08 +05:30
relation = relation.where(ref: ref) if ref
2019-10-12 21:52:04 +05:30
relation = relation.where(sha: sha) if sha
2018-12-13 13:39:08 +05:30
if limit
ids = relation.limit(limit).select(:id)
relation = relation.where(id: ids)
end
2018-03-17 18:26:18 +05:30
2018-12-13 13:39:08 +05:30
relation
2017-08-17 22:00:37 +05:30
end
def self.latest_status(ref = nil)
2018-12-13 13:39:08 +05:30
newest_first(ref: ref).pluck(:status).first
2017-08-17 22:00:37 +05:30
end
2019-10-12 21:52:04 +05:30
def self.latest_successful_for_ref(ref)
2018-12-13 13:39:08 +05:30
newest_first(ref: ref).success.take
2016-08-24 12:49:21 +05:30
end
2016-06-02 11:05:42 +05:30
2019-10-12 21:52:04 +05:30
def self.latest_successful_for_sha(sha)
newest_first(sha: sha).success.take
end
2017-08-17 22:00:37 +05:30
def self.latest_successful_for_refs(refs)
2018-12-13 13:39:08 +05:30
relation = newest_first(ref: refs).success
2018-03-17 18:26:18 +05:30
relation.each_with_object({}) do |pipeline, hash|
2017-08-17 22:00:37 +05:30
hash[pipeline.ref] ||= pipeline
end
2015-09-25 12:07:36 +05:30
end
2021-01-29 00:20:46 +05:30
def self.latest_running_for_ref(ref)
newest_first(ref: ref).running.take
end
def self.latest_failed_for_ref(ref)
newest_first(ref: ref).failed.take
end
2019-12-21 20:55:43 +05:30
# Returns a Hash containing the latest pipeline for every given
2018-03-17 18:26:18 +05:30
# commit.
#
2019-12-21 20:55:43 +05:30
# The keys of this Hash are the commit SHAs, the values the pipelines.
2018-03-17 18:26:18 +05:30
#
2019-12-21 20:55:43 +05:30
# commits - The list of commit SHAs to get the pipelines for.
2018-03-17 18:26:18 +05:30
# ref - The ref to scope the data to (e.g. "master"). If the ref is not
2019-12-21 20:55:43 +05:30
# given we simply get the latest pipelines for the commits, regardless
# of what refs the pipelines belong to.
def self.latest_pipeline_per_commit(commits, ref = nil)
2018-03-17 18:26:18 +05:30
p1 = arel_table
p2 = arel_table.alias
# This LEFT JOIN will filter out all but the newest row for every
# combination of (project_id, sha) or (project_id, sha, ref) if a ref is
# given.
cond = p1[:sha].eq(p2[:sha])
.and(p1[:project_id].eq(p2[:project_id]))
.and(p1[:id].lt(p2[:id]))
cond = cond.and(p1[:ref].eq(p2[:ref])) if ref
join = p1.join(p2, Arel::Nodes::OuterJoin).on(cond)
2019-12-21 20:55:43 +05:30
relation = where(sha: commits)
2018-03-17 18:26:18 +05:30
.where(p2[:id].eq(nil))
.joins(join.join_sources)
relation = relation.where(ref: ref) if ref
2019-12-21 20:55:43 +05:30
relation.each_with_object({}) do |pipeline, hash|
hash[pipeline.sha] = pipeline
2018-03-17 18:26:18 +05:30
end
end
2018-12-13 13:39:08 +05:30
def self.latest_successful_ids_per_project
success.group(:project_id).select('max(id) as id')
end
2020-07-28 23:09:34 +05:30
def self.last_finished_for_ref_id(ci_ref_id)
where(ci_ref_id: ci_ref_id).ci_sources.finished.order(id: :desc).select(:id).take
end
2017-08-17 22:00:37 +05:30
def self.truncate_sha(sha)
sha[0...8]
2015-09-25 12:07:36 +05:30
end
2016-09-13 17:45:13 +05:30
def self.total_duration
where.not(duration: nil).sum(:duration)
end
2017-09-10 17:25:29 +05:30
def self.internal_sources
sources.reject { |source| source == "external" }.values
2016-09-13 17:45:13 +05:30
end
2019-10-12 21:52:04 +05:30
def self.bridgeable_statuses
2020-03-13 15:44:24 +05:30
::Ci::Pipeline::AVAILABLE_STATUSES - %w[created waiting_for_resource preparing pending]
end
def self.auto_devops_pipelines_completed_total
@auto_devops_pipelines_completed_total ||= Gitlab::Metrics.counter(:auto_devops_pipelines_completed_total, 'Number of completed auto devops pipelines')
2019-10-12 21:52:04 +05:30
end
2017-08-17 22:00:37 +05:30
def stages_count
statuses.select(:stage).distinct.count
end
2018-03-17 18:26:18 +05:30
def total_size
statuses.count(:id)
end
2017-09-10 17:25:29 +05:30
def stages_names
statuses.order(:stage_idx).distinct
.pluck(:stage, :stage_idx).map(&:first)
end
def legacy_stage(name)
stage = Ci::LegacyStage.new(self, name: name)
2020-10-24 23:57:45 +05:30
stage unless stage.statuses_count == 0
2017-08-17 22:00:37 +05:30
end
2018-12-13 13:39:08 +05:30
def ref_exists?
project.repository.ref_exists?(git_ref)
rescue Gitlab::Git::Repository::NoRepository
false
end
2019-12-21 20:55:43 +05:30
def legacy_stages_using_composite_status
2020-01-01 13:55:28 +05:30
stages = latest_statuses_ordered_by_stage.group_by(&:stage)
2019-12-21 20:55:43 +05:30
stages.map do |stage_name, jobs|
composite_status = Gitlab::Ci::Status::Composite
.new(jobs)
Ci::LegacyStage.new(self,
name: stage_name,
status: composite_status.status,
warnings: composite_status.warnings?)
end
end
2020-07-28 23:09:34 +05:30
def triggered_pipelines_with_preloads
triggered_pipelines.preload(:source_job)
end
2020-10-24 23:57:45 +05:30
# TODO: Remove usage of this method in templates
2019-12-21 20:55:43 +05:30
def legacy_stages
2020-10-24 23:57:45 +05:30
legacy_stages_using_composite_status
2019-12-21 20:55:43 +05:30
end
2015-09-25 12:07:36 +05:30
def valid_commit_sha
2015-12-23 02:04:40 +05:30
if self.sha == Gitlab::Git::BLANK_SHA
2015-09-25 12:07:36 +05:30
self.errors.add(:sha, " cant be 00000000 (branch removal)")
end
end
def git_author_name
2018-10-15 14:42:47 +05:30
strong_memoize(:git_author_name) do
commit.try(:author_name)
end
2015-09-25 12:07:36 +05:30
end
def git_author_email
2018-10-15 14:42:47 +05:30
strong_memoize(:git_author_email) do
commit.try(:author_email)
end
2015-09-25 12:07:36 +05:30
end
def git_commit_message
2018-10-15 14:42:47 +05:30
strong_memoize(:git_commit_message) do
commit.try(:message)
end
2015-09-25 12:07:36 +05:30
end
2016-08-24 12:49:21 +05:30
def git_commit_title
2018-10-15 14:42:47 +05:30
strong_memoize(:git_commit_title) do
commit.try(:title)
end
end
def git_commit_full_title
strong_memoize(:git_commit_full_title) do
commit.try(:full_title)
end
end
def git_commit_description
strong_memoize(:git_commit_description) do
commit.try(:description)
end
2016-08-24 12:49:21 +05:30
end
2020-11-24 15:15:51 +05:30
def git_commit_timestamp
strong_memoize(:git_commit_timestamp) do
commit.try(:timestamp)
end
end
2020-01-01 13:55:28 +05:30
def before_sha
super || Gitlab::Git::BLANK_SHA
end
2015-09-25 12:07:36 +05:30
def short_sha
Ci::Pipeline.truncate_sha(sha)
2015-09-25 12:07:36 +05:30
end
2018-03-17 18:26:18 +05:30
# NOTE: This is loaded lazily and will never be nil, even if the commit
# cannot be found.
#
# Use constructs like: `pipeline.commit.present?`
2016-06-22 15:30:34 +05:30
def commit
2018-03-17 18:26:18 +05:30
@commit ||= Commit.lazy(project, sha)
2015-09-25 12:07:36 +05:30
end
2017-08-17 22:00:37 +05:30
def stuck?
pending_builds.any?(&:stuck?)
2016-08-24 12:49:21 +05:30
end
2016-06-02 11:05:42 +05:30
def retryable?
2017-08-17 22:00:37 +05:30
retryable_builds.any?
2016-06-02 11:05:42 +05:30
end
def cancelable?
2017-08-17 22:00:37 +05:30
cancelable_statuses.any?
end
def auto_canceled?
canceled? && auto_canceled_by_id?
end
2019-12-04 20:38:33 +05:30
def cancel_running(retries: nil)
retry_optimistic_lock(cancelable_statuses, retries) do |cancelable|
2017-08-17 22:00:37 +05:30
cancelable.find_each do |job|
yield(job) if block_given?
job.cancel
end
end
2016-06-02 11:05:42 +05:30
end
2019-12-04 20:38:33 +05:30
def auto_cancel_running(pipeline, retries: nil)
2017-08-17 22:00:37 +05:30
update(auto_canceled_by: pipeline)
2019-12-04 20:38:33 +05:30
cancel_running(retries: retries) do |job|
2017-08-17 22:00:37 +05:30
job.auto_canceled_by = pipeline
end
2016-06-02 11:05:42 +05:30
end
2018-12-05 23:21:45 +05:30
# rubocop: disable CodeReuse/ServiceClass
2017-08-17 22:00:37 +05:30
def retry_failed(current_user)
Ci::RetryPipelineService.new(project, current_user)
.execute(self)
end
2018-12-05 23:21:45 +05:30
# rubocop: enable CodeReuse/ServiceClass
2017-08-17 22:00:37 +05:30
2020-07-28 23:09:34 +05:30
def lazy_ref_commit
return unless ::Gitlab::Ci::Features.pipeline_latest?
BatchLoader.for(ref).batch do |refs, loader|
next unless project.repository_exists?
project.repository.list_commits_by_ref_name(refs).then do |commits|
commits.each { |key, commit| loader.call(key, commits[key]) }
end
end
end
2016-06-02 11:05:42 +05:30
def latest?
2019-07-07 11:18:12 +05:30
return false unless git_ref && commit.present?
2018-03-17 18:26:18 +05:30
2020-07-28 23:09:34 +05:30
unless ::Gitlab::Ci::Features.pipeline_latest?
return project.commit(git_ref) == commit
end
return false if lazy_ref_commit.nil?
lazy_ref_commit.id == commit.id
2015-09-25 12:07:36 +05:30
end
2015-10-24 18:46:33 +05:30
def retried
2021-01-03 14:25:43 +05:30
@retried ||= (statuses.order(id: :desc) - latest_statuses)
2015-09-25 12:07:36 +05:30
end
def coverage
2021-01-03 14:25:43 +05:30
coverage_array = latest_statuses.map(&:coverage).compact
2015-12-23 02:04:40 +05:30
if coverage_array.size >= 1
'%.2f' % (coverage_array.reduce(:+) / coverage_array.size)
2015-09-25 12:07:36 +05:30
end
end
2020-07-28 23:09:34 +05:30
def batch_lookup_report_artifact_for_file_type(file_type)
latest_report_artifacts
.values_at(*::Ci::JobArtifact.associated_file_types_for(file_type.to_s))
.flatten
.compact
.last
end
# This batch loads the latest reports for each CI job artifact
# type (e.g. sast, dast, etc.) in a single SQL query to eliminate
# the need to do N different `job_artifacts.where(file_type:
# X).last` calls.
#
# Return a hash of file type => array of 1 job artifact
def latest_report_artifacts
::Gitlab::SafeRequestStore.fetch("pipeline:#{self.id}:latest_report_artifacts") do
# Note we use read_attribute(:project_id) to read the project
# ID instead of self.project_id. The latter appears to load
# the Project model. This extra filter doesn't appear to
# affect query plan but included to ensure we don't leak the
# wrong informaiton.
::Ci::JobArtifact.where(
id: job_artifacts.with_reports
.select('max(ci_job_artifacts.id) as id')
.where(project_id: self.read_attribute(:project_id))
.group(:file_type)
)
.preload(:job)
.group_by(&:file_type)
end
end
2018-03-17 18:26:18 +05:30
def has_kubernetes_active?
project.deployment_platform&.active?
end
2020-07-28 23:09:34 +05:30
def freeze_period?
Ci::FreezePeriodStatus.new(project: project).execute
end
2016-08-24 12:49:21 +05:30
def has_warnings?
2020-10-24 23:57:45 +05:30
number_of_warnings > 0
2018-11-08 19:23:39 +05:30
end
def number_of_warnings
BatchLoader.for(id).batch(default_value: 0) do |pipeline_ids, loader|
::Ci::Build.where(commit_id: pipeline_ids)
.latest
.failed_but_allowed
.group(:commit_id)
.count
.each { |id, amount| loader.call(id, amount) }
end
2016-08-24 12:49:21 +05:30
end
2020-03-13 15:44:24 +05:30
def needs_processing?
statuses
.where(processed: [false, nil])
.latest
.exists?
end
2019-12-26 22:10:19 +05:30
# TODO: this logic is duplicate with Pipeline::Chain::Config::Content
# we should persist this is `ci_pipelines.config_path`
def config_path
2019-02-15 15:39:39 +05:30
return unless repository_source? || unknown_source?
2020-11-24 15:15:51 +05:30
project.ci_config_path_or_default
2017-09-10 17:25:29 +05:30
end
2017-08-17 22:00:37 +05:30
def has_yaml_errors?
yaml_errors.present?
2015-10-24 18:46:33 +05:30
end
2020-07-28 23:09:34 +05:30
def add_error_message(content)
add_message(:error, content)
end
def add_warning_message(content)
add_message(:warning, content)
end
# We can't use `messages.error` scope here because messages should also be
# read when the pipeline is not persisted. Using the scope will return no
# results as it would query persisted data.
def error_messages
messages.select(&:error?)
end
2020-11-24 15:15:51 +05:30
def warning_messages(limit: nil)
messages.select(&:warning?).tap do |warnings|
break warnings.take(limit) if limit
end
2020-07-28 23:09:34 +05:30
end
2016-08-24 12:49:21 +05:30
# Manually set the notes for a Ci::Pipeline
# There is no ActiveRecord relation between Ci::Pipeline and notes
# as they are related to a commit sha. This method helps importing
2020-04-08 14:13:33 +05:30
# them using the +Gitlab::ImportExport::Project::RelationFactory+ class.
2016-08-24 12:49:21 +05:30
def notes=(notes)
notes.each do |note|
note[:id] = nil
note[:commit_id] = sha
note[:noteable_id] = self['id']
note.save!
end
end
2016-06-22 15:30:34 +05:30
def notes
2018-03-17 18:26:18 +05:30
project.notes.for_commit_id(sha)
2016-06-22 15:30:34 +05:30
end
2020-03-13 15:44:24 +05:30
def set_status(new_status)
2018-03-17 18:26:18 +05:30
retry_optimistic_lock(self) do
2019-12-21 20:55:43 +05:30
case new_status
2018-11-08 19:23:39 +05:30
when 'created' then nil
2020-03-13 15:44:24 +05:30
when 'waiting_for_resource' then request_resource
2019-07-07 11:18:12 +05:30
when 'preparing' then prepare
2016-09-29 09:46:39 +05:30
when 'pending' then enqueue
when 'running' then run
when 'success' then succeed
when 'failed' then drop
when 'canceled' then cancel
when 'skipped' then skip
2017-08-17 22:00:37 +05:30
when 'manual' then block
2018-12-05 23:21:45 +05:30
when 'scheduled' then delay
2018-11-08 19:23:39 +05:30
else
2020-07-28 23:09:34 +05:30
raise Ci::HasStatus::UnknownStatusError,
2019-12-21 20:55:43 +05:30
"Unknown status `#{new_status}`"
2016-09-29 09:46:39 +05:30
end
2016-09-13 17:45:13 +05:30
end
end
2018-05-09 12:01:36 +05:30
def protected_ref?
2019-01-03 12:48:30 +05:30
strong_memoize(:protected_ref) { project.protected_for?(git_ref) }
2018-05-09 12:01:36 +05:30
end
def legacy_trigger
strong_memoize(:legacy_trigger) { trigger_requests.first }
end
2018-11-08 19:23:39 +05:30
def persisted_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
break variables unless persisted?
variables.append(key: 'CI_PIPELINE_ID', value: id.to_s)
variables.append(key: 'CI_PIPELINE_URL', value: Gitlab::Routing.url_helpers.project_pipeline_url(project, self))
end
end
2016-08-24 12:49:21 +05:30
def predefined_variables
2019-02-15 15:39:39 +05:30
Gitlab::Ci::Variables::Collection.new.tap do |variables|
variables.append(key: 'CI_PIPELINE_IID', value: iid.to_s)
variables.append(key: 'CI_PIPELINE_SOURCE', value: source.to_s)
2020-01-01 13:55:28 +05:30
variables.append(key: 'CI_CONFIG_PATH', value: config_path)
variables.concat(predefined_commit_variables)
2019-02-15 15:39:39 +05:30
2020-03-13 15:44:24 +05:30
if merge_request?
2019-12-04 20:38:33 +05:30
variables.append(key: 'CI_MERGE_REQUEST_EVENT_TYPE', value: merge_request_event_type.to_s)
2019-07-07 11:18:12 +05:30
variables.append(key: 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA', value: source_sha.to_s)
variables.append(key: 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA', value: target_sha.to_s)
2021-02-22 17:27:13 +05:30
diff = self.merge_request_diff
if diff.present?
variables.append(key: 'CI_MERGE_REQUEST_DIFF_ID', value: diff.id.to_s)
variables.append(key: 'CI_MERGE_REQUEST_DIFF_BASE_SHA', value: diff.base_commit_sha)
end
2019-02-15 15:39:39 +05:30
variables.concat(merge_request.predefined_variables)
end
2019-12-04 20:38:33 +05:30
2021-02-22 17:27:13 +05:30
if Gitlab::Ci::Features.pipeline_open_merge_requests?(project) && open_merge_requests_refs.any?
variables.append(key: 'CI_OPEN_MERGE_REQUESTS', value: open_merge_requests_refs.join(','))
end
2020-05-24 23:13:21 +05:30
variables.append(key: 'CI_KUBERNETES_ACTIVE', value: 'true') if has_kubernetes_active?
2020-07-28 23:09:34 +05:30
variables.append(key: 'CI_DEPLOY_FREEZE', value: 'true') if freeze_period?
2020-05-24 23:13:21 +05:30
2019-12-04 20:38:33 +05:30
if external_pull_request_event? && external_pull_request
variables.concat(external_pull_request.predefined_variables)
end
2019-02-15 15:39:39 +05:30
end
2016-08-24 12:49:21 +05:30
end
2020-01-01 13:55:28 +05:30
def predefined_commit_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
variables.append(key: 'CI_COMMIT_SHA', value: sha)
variables.append(key: 'CI_COMMIT_SHORT_SHA', value: short_sha)
variables.append(key: 'CI_COMMIT_BEFORE_SHA', value: before_sha)
variables.append(key: 'CI_COMMIT_REF_NAME', value: source_ref)
variables.append(key: 'CI_COMMIT_REF_SLUG', value: source_ref_slug)
variables.append(key: 'CI_COMMIT_BRANCH', value: ref) if branch?
variables.append(key: 'CI_COMMIT_TAG', value: ref) if tag?
variables.append(key: 'CI_COMMIT_MESSAGE', value: git_commit_message.to_s)
variables.append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s)
variables.append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s)
variables.append(key: 'CI_COMMIT_REF_PROTECTED', value: (!!protected_ref?).to_s)
2020-11-24 15:15:51 +05:30
variables.append(key: 'CI_COMMIT_TIMESTAMP', value: git_commit_timestamp.to_s)
2020-01-01 13:55:28 +05:30
# legacy variables
variables.append(key: 'CI_BUILD_REF', value: sha)
variables.append(key: 'CI_BUILD_BEFORE_SHA', value: before_sha)
variables.append(key: 'CI_BUILD_REF_NAME', value: source_ref)
variables.append(key: 'CI_BUILD_REF_SLUG', value: source_ref_slug)
variables.append(key: 'CI_BUILD_TAG', value: ref) if tag?
end
end
2016-09-29 09:46:39 +05:30
def queued_duration
return unless started_at
seconds = (started_at - created_at).to_i
2020-10-24 23:57:45 +05:30
seconds unless seconds == 0
2016-09-29 09:46:39 +05:30
end
2016-09-13 17:45:13 +05:30
def update_duration
2016-09-29 09:46:39 +05:30
return unless started_at
2018-03-17 18:26:18 +05:30
self.duration = Gitlab::Ci::Pipeline::Duration.from_pipeline(self)
2016-09-13 17:45:13 +05:30
end
def execute_hooks
2021-02-22 17:27:13 +05:30
project.execute_hooks(pipeline_data, :pipeline_hooks) if project.has_active_hooks?(:pipeline_hooks)
project.execute_services(pipeline_data, :pipeline_hooks) if project.has_active_services?(:pipeline_hooks)
2016-09-13 17:45:13 +05:30
end
2017-08-17 22:00:37 +05:30
# All the merge requests for which the current pipeline runs/ran against
def all_merge_requests
2019-02-15 15:39:39 +05:30
@all_merge_requests ||=
2020-03-13 15:44:24 +05:30
if merge_request?
2019-07-07 11:18:12 +05:30
MergeRequest.where(id: merge_request_id)
2019-02-15 15:39:39 +05:30
else
2019-07-07 11:18:12 +05:30
MergeRequest.where(source_project_id: project_id, source_branch: ref)
2020-04-22 19:07:51 +05:30
.by_commit_sha(sha)
2019-02-15 15:39:39 +05:30
end
2017-08-17 22:00:37 +05:30
end
2019-12-21 20:55:43 +05:30
def all_merge_requests_by_recency
all_merge_requests.order(id: :desc)
end
2021-02-22 17:27:13 +05:30
# This returns a list of MRs that point
# to the same source project/branch
def related_merge_requests
if merge_request?
# We look for all other MRs that this branch might be pointing to
MergeRequest.where(
source_project_id: merge_request.source_project_id,
source_branch: merge_request.source_branch)
else
MergeRequest.where(
source_project_id: project_id,
source_branch: ref)
end
end
# We cannot use `all_merge_requests`, due to race condition
# This returns a list of at most 4 open MRs
def open_merge_requests_refs
strong_memoize(:open_merge_requests_refs) do
# We ensure that triggering user can actually read the pipeline
related_merge_requests
.opened
.limit(MAX_OPEN_MERGE_REQUESTS_REFS)
.order(id: :desc)
.preload(:target_project)
.select { |mr| can?(user, :read_merge_request, mr) }
.map { |mr| mr.to_reference(project, full: true) }
end
end
2020-03-13 15:44:24 +05:30
def same_family_pipeline_ids
2021-01-03 14:25:43 +05:30
::Gitlab::Ci::PipelineObjectHierarchy.new(
2021-02-22 17:27:13 +05:30
self.class.where(id: root_ancestor), options: { same_project: true }
2021-01-03 14:25:43 +05:30
).base_and_descendants.select(:id)
end
def build_with_artifacts_in_self_and_descendants(name)
builds_in_self_and_descendants
.ordered_by_pipeline # find job in hierarchical order
.with_downloadable_artifacts
.find_by_name(name)
end
def builds_in_self_and_descendants
Ci::Build.latest.where(pipeline: self_and_descendants)
end
# Without using `unscoped`, caller scope is also included into the query.
# Using `unscoped` here will be redundant after Rails 6.1
def self_and_descendants
::Gitlab::Ci::PipelineObjectHierarchy
.new(self.class.unscoped.where(id: id), options: { same_project: true })
.base_and_descendants
2020-03-13 15:44:24 +05:30
end
2021-02-22 17:27:13 +05:30
def root_ancestor
return self unless child?
Gitlab::Ci::PipelineObjectHierarchy
.new(self.class.unscoped.where(id: id), options: { same_project: true })
.base_and_ancestors(hierarchy_order: :desc)
.first
end
2020-03-13 15:44:24 +05:30
def bridge_triggered?
source_bridge.present?
end
def bridge_waiting?
source_bridge&.dependent?
end
def child?
2021-02-22 17:27:13 +05:30
parent_pipeline? && # child pipelines have `parent_pipeline` source
parent_pipeline.present?
2020-03-13 15:44:24 +05:30
end
def parent?
child_pipelines.exists?
end
2020-04-08 14:13:33 +05:30
def created_successfully?
persisted? && failure_reason.blank?
end
2017-08-17 22:00:37 +05:30
def detailed_status(current_user)
Gitlab::Ci::Status::Pipeline::Factory
.new(self, current_user)
.fabricate!
2016-09-29 09:46:39 +05:30
end
2020-04-08 14:13:33 +05:30
def find_job_with_archive_artifacts(name)
2020-05-24 23:13:21 +05:30
builds.latest.with_downloadable_artifacts.find_by_name(name)
2020-04-08 14:13:33 +05:30
end
2018-03-17 18:26:18 +05:30
def latest_builds_with_artifacts
# We purposely cast the builds to an Array here. Because we always use the
# rows if there are more than 0 this prevents us from having to run two
# queries: one to get the count and one to get the rows.
2019-07-31 22:56:46 +05:30
@latest_builds_with_artifacts ||= builds.latest.with_artifacts_not_expired.to_a
2018-03-17 18:26:18 +05:30
end
2020-06-23 00:09:42 +05:30
def latest_report_builds(reports_scope = ::Ci::JobArtifact.with_reports)
builds.latest.with_reports(reports_scope)
end
2021-02-22 17:27:13 +05:30
def latest_test_report_builds
latest_report_builds(Ci::JobArtifact.test_reports).preload(:project)
end
2020-11-24 15:15:51 +05:30
def builds_with_coverage
2021-01-03 14:25:43 +05:30
builds.latest.with_coverage
2020-11-24 15:15:51 +05:30
end
2021-02-22 17:27:13 +05:30
def builds_with_failed_tests(limit: nil)
latest_test_report_builds.failed.limit(limit)
end
2019-07-07 11:18:12 +05:30
def has_reports?(reports_scope)
2020-06-23 00:09:42 +05:30
complete? && latest_report_builds(reports_scope).exists?
2018-11-18 11:00:15 +05:30
end
2020-11-24 15:15:51 +05:30
def has_coverage_reports?
pipeline_artifacts&.has_code_coverage?
end
def can_generate_coverage_reports?
has_reports?(Ci::JobArtifact.coverage_reports)
end
2020-07-28 23:09:34 +05:30
def test_report_summary
2020-11-24 15:15:51 +05:30
strong_memoize(:test_report_summary) do
Gitlab::Ci::Reports::TestReportSummary.new(latest_builds_report_results)
end
2020-07-28 23:09:34 +05:30
end
2018-11-18 11:00:15 +05:30
def test_reports
Gitlab::Ci::Reports::TestReports.new.tap do |test_reports|
2021-02-22 17:27:13 +05:30
latest_test_report_builds.find_each do |build|
2018-11-18 11:00:15 +05:30
build.collect_test_reports!(test_reports)
end
end
end
2020-05-24 23:13:21 +05:30
def accessibility_reports
Gitlab::Ci::Reports::AccessibilityReports.new.tap do |accessibility_reports|
2021-01-29 00:20:46 +05:30
latest_report_builds(Ci::JobArtifact.accessibility_reports).each do |build|
2020-05-24 23:13:21 +05:30
build.collect_accessibility_reports!(accessibility_reports)
end
end
end
2020-04-08 14:13:33 +05:30
def coverage_reports
Gitlab::Ci::Reports::CoverageReports.new.tap do |coverage_reports|
2021-02-22 17:27:13 +05:30
latest_report_builds(Ci::JobArtifact.coverage_reports).includes(:project).find_each do |build|
2020-04-08 14:13:33 +05:30
build.collect_coverage_reports!(coverage_reports)
end
end
end
2021-02-22 17:27:13 +05:30
def codequality_reports
Gitlab::Ci::Reports::CodequalityReports.new.tap do |codequality_reports|
latest_report_builds(Ci::JobArtifact.codequality_reports).each do |build|
build.collect_codequality_reports!(codequality_reports)
end
end
end
2020-05-24 23:13:21 +05:30
def terraform_reports
::Gitlab::Ci::Reports::TerraformReports.new.tap do |terraform_reports|
2020-06-23 00:09:42 +05:30
latest_report_builds(::Ci::JobArtifact.terraform_reports).each do |build|
2020-05-24 23:13:21 +05:30
build.collect_terraform_reports!(terraform_reports)
end
end
end
2020-07-28 23:09:34 +05:30
def has_archive_artifacts?
complete? && builds.latest.with_existing_job_artifacts(Ci::JobArtifact.archive.or(Ci::JobArtifact.metadata)).exists?
end
2019-12-26 22:10:19 +05:30
def has_exposed_artifacts?
complete? && builds.latest.with_exposed_artifacts.exists?
end
2018-12-05 23:21:45 +05:30
def branch_updated?
strong_memoize(:branch_updated) do
push_details.branch_updated?
end
end
2019-07-07 11:18:12 +05:30
# Returns the modified paths.
#
# The returned value is
# * Array: List of modified paths that should be evaluated
# * nil: Modified path can not be evaluated
2018-12-05 23:21:45 +05:30
def modified_paths
strong_memoize(:modified_paths) do
2020-03-13 15:44:24 +05:30
if merge_request?
2019-07-07 11:18:12 +05:30
merge_request.modified_paths
elsif branch_updated?
push_details.modified_paths
end
2018-12-05 23:21:45 +05:30
end
end
2019-12-21 20:55:43 +05:30
def all_worktree_paths
strong_memoize(:all_worktree_paths) do
project.repository.ls_files(sha)
end
end
def top_level_worktree_paths
strong_memoize(:top_level_worktree_paths) do
project.repository.tree(sha).blobs.map(&:path)
end
end
2018-12-05 23:21:45 +05:30
def default_branch?
ref == project.default_branch
end
2020-03-13 15:44:24 +05:30
def merge_request?
merge_request_id.present?
2019-07-07 11:18:12 +05:30
end
def detached_merge_request_pipeline?
2020-03-13 15:44:24 +05:30
merge_request? && target_sha.nil?
2019-07-07 11:18:12 +05:30
end
def legacy_detached_merge_request_pipeline?
detached_merge_request_pipeline? && !merge_request_ref?
end
def merge_request_pipeline?
2020-03-13 15:44:24 +05:30
merge_request? && target_sha.present?
2019-07-07 11:18:12 +05:30
end
def merge_request_ref?
MergeRequest.merge_request_ref?(ref)
end
def matches_sha_or_source_sha?(sha)
self.sha == sha || self.source_sha == sha
end
def triggered_by?(current_user)
user == current_user
end
def source_ref
2020-03-13 15:44:24 +05:30
if merge_request?
2019-07-07 11:18:12 +05:30
merge_request.source_branch
else
ref
end
end
def source_ref_slug
Gitlab::Utils.slugify(source_ref.to_s)
end
2019-07-31 22:56:46 +05:30
def find_stage_by_name!(name)
stages.find_by!(name: name)
end
2020-07-28 23:09:34 +05:30
def full_error_messages
2019-09-30 21:07:59 +05:30
errors ? errors.full_messages.to_sentence : ""
end
2019-12-04 20:38:33 +05:30
def merge_request_event_type
2020-03-13 15:44:24 +05:30
return unless merge_request?
2019-12-04 20:38:33 +05:30
strong_memoize(:merge_request_event_type) do
2020-01-01 13:55:28 +05:30
if merge_request_pipeline?
2019-12-04 20:38:33 +05:30
:merged_result
elsif detached_merge_request_pipeline?
:detached
end
end
end
2019-12-21 20:55:43 +05:30
def persistent_ref
@persistent_ref ||= PersistentRef.new(pipeline: self)
end
2020-03-13 15:44:24 +05:30
def cacheable?
2020-11-24 15:15:51 +05:30
!dangling?
end
def dangling?
Enums::Ci::Pipeline.dangling_sources.key?(source.to_sym)
2020-03-13 15:44:24 +05:30
end
2020-04-22 19:07:51 +05:30
def source_ref_path
if branch? || merge_request?
Gitlab::Git::BRANCH_REF_PREFIX + source_ref.to_s
elsif tag?
Gitlab::Git::TAG_REF_PREFIX + source_ref.to_s
end
end
2020-05-24 23:13:21 +05:30
# Set scheduling type of processables if they were created before scheduling_type
# data was deployed (https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22246).
def ensure_scheduling_type!
processables.populate_scheduling_type!
end
2020-06-23 00:09:42 +05:30
def ensure_ci_ref!
self.ci_ref = Ci::Ref.ensure_for(self)
end
2020-11-24 15:15:51 +05:30
def base_and_ancestors(same_project: false)
# Without using `unscoped`, caller scope is also included into the query.
# Using `unscoped` here will be redundant after Rails 6.1
::Gitlab::Ci::PipelineObjectHierarchy
.new(self.class.unscoped.where(id: id), options: { same_project: same_project })
.base_and_ancestors
end
# We need `base_and_ancestors` in a specific order to "break" when needed.
# If we use `find_each`, then the order is broken.
# rubocop:disable Rails/FindEach
def reset_ancestor_bridges!
base_and_ancestors.includes(:source_bridge).each do |pipeline|
break unless pipeline.bridge_waiting?
pipeline.source_bridge.pending!
end
end
# rubocop:enable Rails/FindEach
2015-09-25 12:07:36 +05:30
private
2020-07-28 23:09:34 +05:30
def add_message(severity, content)
return unless Gitlab::Ci::Features.store_pipeline_messages?(project)
messages.build(severity: severity, content: content)
end
2016-09-13 17:45:13 +05:30
def pipeline_data
2021-02-22 17:27:13 +05:30
strong_memoize(:pipeline_data) do
Gitlab::DataBuilder::Pipeline.build(self)
end
end
def merge_request_diff_sha
return unless merge_request?
if merge_request_pipeline?
source_sha
else
sha
end
end
def merge_request_diff
return unless merge_request?
merge_request.merge_request_diff_for(merge_request_diff_sha)
2016-09-13 17:45:13 +05:30
end
2018-12-05 23:21:45 +05:30
def push_details
strong_memoize(:push_details) do
2018-12-13 13:39:08 +05:30
Gitlab::Git::Push.new(project, before_sha, sha, git_ref)
2018-12-05 23:21:45 +05:30
end
end
2019-02-15 15:39:39 +05:30
def git_ref
2019-07-07 11:18:12 +05:30
strong_memoize(:git_ref) do
2020-03-13 15:44:24 +05:30
if merge_request?
2019-07-07 11:18:12 +05:30
##
# In the future, we're going to change this ref to
# merge request's merged reference, such as "refs/merge-requests/:iid/merge".
# In order to do that, we have to update GitLab-Runner's source pulling
# logic.
# See https://gitlab.com/gitlab-org/gitlab-runner/merge_requests/1092
Gitlab::Git::BRANCH_REF_PREFIX + ref.to_s
else
super
end
2019-02-15 15:39:39 +05:30
end
end
2016-08-24 12:49:21 +05:30
def keep_around_commits
return unless project
2018-11-20 20:47:30 +05:30
project.repository.keep_around(self.sha, self.before_sha)
2016-08-24 12:49:21 +05:30
end
2015-09-25 12:07:36 +05:30
end
end
2019-12-04 20:38:33 +05:30
Ci::Pipeline.prepend_if_ee('EE::Ci::Pipeline')