# frozen_string_literal: true module Ci class Pipeline < Ci::ApplicationRecord include Ci::Partitionable include Ci::HasStatus include Importable include AfterCommitQueue include Presentable include Gitlab::Allowable include Gitlab::OptimisticLocking include Gitlab::Utils::StrongMemoize include AtomicInternalId include EnumWithNil include Ci::HasRef include ShaAttribute include FromUnion include UpdatedAtFilterable include EachBatch include FastDestroyAll::Helpers MAX_OPEN_MERGE_REQUESTS_REFS = 4 PROJECT_ROUTE_AND_NAMESPACE_ROUTE = { project: [:project_feature, :route, { namespace: :route }] }.freeze CONFIG_EXTENSION = '.gitlab-ci.yml' DEFAULT_CONFIG_PATH = CONFIG_EXTENSION CANCELABLE_STATUSES = (Ci::HasStatus::CANCELABLE_STATUSES + ['manual']).freeze paginates_per 15 sha_attribute :source_sha sha_attribute :target_sha partitionable scope: ->(_) { Ci::Pipeline.current_partition_value } # Ci::CreatePipelineService returns Ci::Pipeline so this is the only place # where we can pass additional information from the service. This accessor # is used for storing the processed metadata for linting purposes. # There is an open issue to address this: # https://gitlab.com/gitlab-org/gitlab/-/issues/259010 attr_accessor :config_metadata # This is used to retain access to the method defined by `Ci::HasRef` # before being overridden in this class. alias_method :jobs_git_ref, :git_ref belongs_to :project, inverse_of: :all_pipelines belongs_to :user belongs_to :auto_canceled_by, class_name: 'Ci::Pipeline' belongs_to :pipeline_schedule, class_name: 'Ci::PipelineSchedule' belongs_to :merge_request, class_name: 'MergeRequest' belongs_to :external_pull_request belongs_to :ci_ref, class_name: 'Ci::Ref', foreign_key: :ci_ref_id, inverse_of: :pipelines has_internal_id :iid, scope: :project, presence: false, track_if: -> { !importing? }, ensure_if: -> { !importing? }, init: ->(pipeline, scope) do if pipeline pipeline.project&.all_pipelines&.maximum(:iid) || pipeline.project&.all_pipelines&.count elsif scope ::Ci::Pipeline.where(**scope).maximum(:iid) end end has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline has_many :latest_statuses_ordered_by_stage, -> { latest.order(:stage_idx, :stage) }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline has_many :latest_statuses, -> { latest }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline has_many :statuses_order_id_desc, -> { order_id_desc }, class_name: 'CommitStatus', foreign_key: :commit_id has_many :processables, class_name: 'Ci::Processable', foreign_key: :commit_id, inverse_of: :pipeline has_many :bridges, class_name: 'Ci::Bridge', foreign_key: :commit_id, inverse_of: :pipeline has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline has_many :generic_commit_statuses, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'GenericCommitStatus' has_many :job_artifacts, through: :builds has_many :build_trace_chunks, class_name: 'Ci::BuildTraceChunk', through: :builds, source: :trace_chunks has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent has_many :variables, class_name: 'Ci::PipelineVariable' has_many :latest_builds, -> { latest.with_project_and_metadata }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Build' has_many :downloadable_artifacts, -> do not_expired.or(where_exists(::Ci::Pipeline.artifacts_locked.where('ci_pipelines.id = ci_builds.commit_id'))).downloadable.with_job end, through: :latest_builds, source: :job_artifacts has_many :latest_successful_builds, -> { latest.success.with_project_and_metadata }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Build' has_many :messages, class_name: 'Ci::PipelineMessage', inverse_of: :pipeline # Merge requests for which the current pipeline is running against # the merge request's latest commit. has_many :merge_requests_as_head_pipeline, foreign_key: "head_pipeline_id", class_name: 'MergeRequest' has_many :pending_builds, -> { pending }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :failed_builds, -> { latest.failed }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :retryable_builds, -> { latest.failed_or_canceled.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :cancelable_statuses, -> { cancelable }, foreign_key: :commit_id, class_name: 'CommitStatus' has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :scheduled_actions, -> { latest.scheduled_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id' has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id' has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_pipeline_id has_one :source_pipeline, class_name: 'Ci::Sources::Pipeline', inverse_of: :pipeline has_one :chat_data, class_name: 'Ci::PipelineChatData' has_many :triggered_pipelines, through: :sourced_pipelines, source: :pipeline # Only includes direct and not nested children has_many :child_pipelines, -> { merge(Ci::Sources::Pipeline.same_project) }, through: :sourced_pipelines, source: :pipeline has_one :triggered_by_pipeline, through: :source_pipeline, source: :source_pipeline has_one :parent_pipeline, -> { merge(Ci::Sources::Pipeline.same_project) }, through: :source_pipeline, source: :source_pipeline has_one :source_job, through: :source_pipeline, source: :source_job has_one :source_bridge, through: :source_pipeline, source: :source_bridge has_one :pipeline_config, class_name: 'Ci::PipelineConfig', inverse_of: :pipeline has_one :pipeline_metadata, class_name: 'Ci::PipelineMetadata', inverse_of: :pipeline has_many :daily_build_group_report_results, class_name: 'Ci::DailyBuildGroupReportResult', foreign_key: :last_pipeline_id has_many :latest_builds_report_results, through: :latest_builds, source: :report_results has_many :pipeline_artifacts, class_name: 'Ci::PipelineArtifact', inverse_of: :pipeline, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent accepts_nested_attributes_for :variables, reject_if: :persisted? delegate :full_path, to: :project, prefix: true delegate :name, to: :pipeline_metadata, allow_nil: true validates :sha, presence: { unless: :importing? } validates :ref, presence: { unless: :importing? } validates :tag, inclusion: { in: [false], if: :merge_request? } validates :external_pull_request, presence: { if: :external_pull_request_event? } validates :external_pull_request, absence: { unless: :external_pull_request_event? } validates :tag, inclusion: { in: [false], if: :external_pull_request_event? } validates :status, presence: { unless: :importing? } validate :valid_commit_sha, unless: :importing? validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create after_create :keep_around_commits, unless: :importing? after_find :observe_age_in_minutes, unless: :importing? use_fast_destroy :job_artifacts use_fast_destroy :build_trace_chunks # We use `Enums::Ci::Pipeline.sources` here so that EE can more easily extend # this `Hash` with new values. enum_with_nil source: Enums::Ci::Pipeline.sources enum_with_nil config_source: Enums::Ci::Pipeline.config_sources # We use `Enums::Ci::Pipeline.failure_reasons` here so that EE can more easily # extend this `Hash` with new values. enum failure_reason: Enums::Ci::Pipeline.failure_reasons enum locked: { unlocked: 0, artifacts_locked: 1 } state_machine :status, initial: :created do event :enqueue do transition [:created, :manual, :waiting_for_resource, :preparing, :skipped, :scheduled] => :pending transition [:success, :failed, :canceled] => :running # this is needed to ensure tests to be covered transition [:running] => :running end event :request_resource do transition any - [:waiting_for_resource] => :waiting_for_resource end event :prepare do transition any - [:preparing] => :preparing end event :run do transition any - [:running] => :running end event :skip do transition any - [:skipped] => :skipped end event :drop do transition any - [:failed] => :failed end event :succeed do # A success pipeline can also be retried, for example; a pipeline with a failed manual job. # When retrying the pipeline, the status of the pipeline is not changed because the failed # manual job transitions to the `manual` status. # More info: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98967#note_1144718316 transition any => :success end event :cancel do transition any - [:canceled] => :canceled end event :block do transition any - [:manual] => :manual end event :delay do transition any - [:scheduled] => :scheduled end # IMPORTANT # Do not add any operations to this state_machine # Create a separate worker for each new operation before_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline| pipeline.started_at ||= Time.current end before_transition any => [:success, :failed, :canceled] do |pipeline| pipeline.finished_at = Time.current pipeline.update_duration end before_transition any => [:manual] do |pipeline| pipeline.update_duration end before_transition canceled: any - [:canceled] do |pipeline| pipeline.auto_canceled_by = nil end before_transition any => :failed do |pipeline, transition| transition.args.first.try do |reason| pipeline.failure_reason = reason end end after_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline| pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) } end after_transition any => [:success] do |pipeline| pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) } end after_transition [:created, :waiting_for_resource, :preparing, :pending, :running] => :success do |pipeline| # We wait a little bit to ensure that all Ci::BuildFinishedWorkers finish first # because this is where some metrics like code coverage is parsed and stored # in CI build records which the daily build metrics worker relies on. pipeline.run_after_commit { Ci::DailyBuildGroupReportResultsWorker.perform_in(10.minutes, pipeline.id) } end after_transition do |pipeline, transition| next if transition.loopback? pipeline.run_after_commit do unless pipeline.user&.blocked? Gitlab::AppLogger.info( message: "Enqueuing hooks for Pipeline #{pipeline.id}: #{pipeline.status}", class: self.class.name, pipeline_id: pipeline.id, project_id: pipeline.project_id, pipeline_status: pipeline.status) PipelineHooksWorker.perform_async(pipeline.id) end if pipeline.project.jira_subscription_exists? # Passing the seq-id ensures this is idempotent seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id ::JiraConnect::SyncBuildsWorker.perform_async(pipeline.id, seq_id) end Ci::ExpirePipelineCacheService.new.execute(pipeline) # rubocop: disable CodeReuse/ServiceClass end end after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline| pipeline.run_after_commit do pipeline.all_merge_requests.each do |merge_request| next unless merge_request.auto_merge_enabled? AutoMergeProcessWorker.perform_async(merge_request.id) end if pipeline.auto_devops_source? self.class.auto_devops_pipelines_completed_total.increment(status: pipeline.status) end end end after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline| pipeline.run_after_commit do ::Ci::PipelineArtifacts::CoverageReportWorker.perform_async(pipeline.id) ::Ci::PipelineArtifacts::CreateQualityReportWorker.perform_async(pipeline.id) end end after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline| next unless pipeline.bridge_waiting? pipeline.run_after_commit do ::Ci::PipelineBridgeStatusWorker.perform_async(pipeline.id) end end after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline| pipeline.run_after_commit do ::Ci::TestFailureHistoryService.new(pipeline).async.perform_if_needed # rubocop: disable CodeReuse/ServiceClass end end after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline| pipeline.run_after_commit do ::Ci::JobArtifacts::TrackArtifactReportWorker.perform_async(pipeline.id) end end after_transition any => ::Ci::Pipeline.stopped_statuses do |pipeline| pipeline.run_after_commit do pipeline.persistent_ref.delete end end after_transition any => [:success, :failed] do |pipeline| ref_status = pipeline.ci_ref&.update_status_by!(pipeline) pipeline.run_after_commit do # We don't send notifications for a pipeline dropped due to the # user been blocked. unless pipeline.user&.blocked? PipelineNotificationWorker .perform_async(pipeline.id, ref_status: ref_status) end end end after_transition any => [:failed] do |pipeline| pipeline.run_after_commit do ::Gitlab::Ci::Pipeline::Metrics.pipeline_failure_reason_counter.increment(reason: pipeline.failure_reason) AutoDevops::DisableWorker.perform_async(pipeline.id) if pipeline.auto_devops_source? end end end scope :internal, -> { where(source: internal_sources) } scope :no_child, -> { where.not(source: :parent_pipeline) } scope :ci_sources, -> { where(source: Enums::Ci::Pipeline.ci_sources.values) } scope :ci_branch_sources, -> { where(source: Enums::Ci::Pipeline.ci_branch_sources.values) } scope :ci_and_parent_sources, -> { where(source: Enums::Ci::Pipeline.ci_and_parent_sources.values) } scope :for_user, -> (user) { where(user: user) } scope :for_sha, -> (sha) { where(sha: sha) } scope :where_not_sha, -> (sha) { where.not(sha: sha) } scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) } scope :for_sha_or_source_sha, -> (sha) { for_sha(sha).or(for_source_sha(sha)) } scope :for_ref, -> (ref) { where(ref: ref) } scope :for_branch, -> (branch) { for_ref(branch).where(tag: false) } scope :for_iid, -> (iid) { where(iid: iid) } scope :for_project, -> (project_id) { where(project_id: project_id) } scope :for_name, -> (name) do name_column = Ci::PipelineMetadata.arel_table[:name] joins(:pipeline_metadata).where(name_column.lower.eq(name.downcase)) end scope :created_after, -> (time) { where(arel_table[:created_at].gt(time)) } scope :created_before_id, -> (id) { where(arel_table[:id].lt(id)) } scope :before_pipeline, -> (pipeline) { created_before_id(pipeline.id).outside_pipeline_family(pipeline) } scope :with_pipeline_source, -> (source) { where(source: source) } scope :outside_pipeline_family, ->(pipeline) do where.not(id: pipeline.same_family_pipeline_ids) end scope :with_reports, -> (reports_scope) do where('EXISTS (?)', ::Ci::Build.latest.with_artifacts(reports_scope).where('ci_pipelines.id=ci_builds.commit_id').select(1)) end scope :with_only_interruptible_builds, -> do where('NOT EXISTS (?)', Ci::Build.where('ci_builds.commit_id = ci_pipelines.id') .with_status(STARTED_STATUSES) .not_interruptible ) end # Returns the pipelines that associated with the given merge request. # In general, please use `Ci::PipelinesForMergeRequestFinder` instead, # for checking permission of the actor. scope :triggered_by_merge_request, -> (merge_request) do where(source: :merge_request_event, merge_request: merge_request, project: [merge_request.source_project, merge_request.target_project]) end # Returns the pipelines in descending order (= newest first), optionally # limited to a number of references. # # ref - The name (or names) of the branch(es)/tag(s) to limit the list of # pipelines to. # sha - The commit SHA (or multiple SHAs) to limit the list of pipelines to. # limit - This limits a backlog search, default to 100. def self.newest_first(ref: nil, sha: nil, limit: 100) relation = order(id: :desc) relation = relation.where(ref: ref) if ref relation = relation.where(sha: sha) if sha if limit ids = relation.limit(limit).select(:id) relation = relation.where(id: ids) end relation end def self.latest_status(ref = nil) newest_first(ref: ref).pick(:status) end def self.latest_successful_for_ref(ref) newest_first(ref: ref).success.take end def self.latest_successful_for_sha(sha) newest_first(sha: sha).success.take end def self.latest_successful_for_refs(refs) return Ci::Pipeline.none if refs.empty? refs_values = refs.map { |ref| "(#{connection.quote(ref)})" }.join(",") join_query = success.where("refs_values.ref = ci_pipelines.ref").order(id: :desc).limit(1) Ci::Pipeline .from("(VALUES #{refs_values}) refs_values (ref)") .joins("INNER JOIN LATERAL (#{join_query.to_sql}) #{Ci::Pipeline.table_name} ON TRUE") .index_by(&:ref) end def self.latest_running_for_ref(ref) newest_first(ref: ref).running.take end def self.latest_failed_for_ref(ref) newest_first(ref: ref).failed.take end def self.jobs_count_in_alive_pipelines created_after(24.hours.ago).alive.joins(:statuses).count end def self.builds_count_in_alive_pipelines created_after(24.hours.ago).alive.joins(:builds).count end # Returns a Hash containing the latest pipeline for every given # commit. # # The keys of this Hash are the commit SHAs, the values the pipelines. # # commits - The list of commit SHAs to get the pipelines for. # ref - The ref to scope the data to (e.g. "master"). If the ref is not # given we simply get the latest pipelines for the commits, regardless # of what refs the pipelines belong to. def self.latest_pipeline_per_commit(commits, ref = nil) sql = select('DISTINCT ON (sha) *') .where(sha: commits) .order(:sha, id: :desc) sql = sql.where(ref: ref) if ref sql.index_by(&:sha) end def self.latest_successful_ids_per_project success.group(:project_id).select('max(id) as id') end def self.last_finished_for_ref_id(ci_ref_id) where(ci_ref_id: ci_ref_id).ci_sources.finished.order(id: :desc).select(:id).take end def self.truncate_sha(sha) sha[0...8] end def self.total_duration where.not(duration: nil).sum(:duration) end def self.internal_sources sources.reject { |source| source == "external" }.values end def self.bridgeable_statuses ::Ci::Pipeline::AVAILABLE_STATUSES - %w[created waiting_for_resource preparing pending] end def self.auto_devops_pipelines_completed_total @auto_devops_pipelines_completed_total ||= Gitlab::Metrics.counter(:auto_devops_pipelines_completed_total, 'Number of completed auto devops pipelines') end def self.current_partition_value 100 end def uses_needs? processables.where(scheduling_type: :dag).any? end def stages_count statuses.select(:stage).distinct.count end def total_size statuses.count(:id) end def tags_count ActsAsTaggableOn::Tagging.where(taggable: builds).count end def distinct_tags_count ActsAsTaggableOn::Tagging.where(taggable: builds).count('distinct(tag_id)') end def stages_names statuses.order(:stage_idx).distinct .pluck(:stage, :stage_idx).map(&:first) end def ref_exists? project.repository.ref_exists?(git_ref) rescue Gitlab::Git::Repository::NoRepository false end def triggered_pipelines_with_preloads triggered_pipelines.preload(:source_job) end def valid_commit_sha if self.sha == Gitlab::Git::BLANK_SHA self.errors.add(:sha, " cant be 00000000 (branch removal)") end end def git_author_name strong_memoize(:git_author_name) do commit.try(:author_name) end end def git_author_email strong_memoize(:git_author_email) do commit.try(:author_email) end end def git_author_full_text strong_memoize(:git_author_full_text) do commit.try(:author_full_text) end end def git_commit_message strong_memoize(:git_commit_message) do commit.try(:message) end end def git_commit_title strong_memoize(:git_commit_title) do commit.try(:title) end end def git_commit_full_title strong_memoize(:git_commit_full_title) do commit.try(:full_title) end end def git_commit_description strong_memoize(:git_commit_description) do commit.try(:description) end end def git_commit_timestamp strong_memoize(:git_commit_timestamp) do commit.try(:timestamp) end end def before_sha super || Gitlab::Git::BLANK_SHA end def short_sha Ci::Pipeline.truncate_sha(sha) end # NOTE: This is loaded lazily and will never be nil, even if the commit # cannot be found. # # Use constructs like: `pipeline.commit.present?` def commit @commit ||= Commit.lazy(project, sha) end def stuck? pending_builds.any?(&:stuck?) end def retryable? retryable_builds.any? end def cancelable? cancelable_statuses.any? end def auto_canceled? canceled? && auto_canceled_by_id? end # Cancel a pipelines cancelable jobs and optionally it's child pipelines cancelable jobs # retries - # of times to retry if errors # cascade_to_children - if true cancels all related child pipelines for parent child pipelines # auto_canceled_by_pipeline_id - store the pipeline_id of the pipeline that triggered cancellation # execute_async - if true cancel the children asyncronously def cancel_running(retries: 1, cascade_to_children: true, auto_canceled_by_pipeline_id: nil, execute_async: true) Gitlab::AppJsonLogger.info( event: 'pipeline_cancel_running', pipeline_id: id, auto_canceled_by_pipeline_id: auto_canceled_by_pipeline_id, cascade_to_children: cascade_to_children, execute_async: execute_async, **Gitlab::ApplicationContext.current ) update(auto_canceled_by_id: auto_canceled_by_pipeline_id) if auto_canceled_by_pipeline_id cancel_jobs(cancelable_statuses, retries: retries, auto_canceled_by_pipeline_id: auto_canceled_by_pipeline_id) if cascade_to_children # cancel any bridges that could spin up new child pipelines cancel_jobs(bridges_in_self_and_project_descendants.cancelable, retries: retries, auto_canceled_by_pipeline_id: auto_canceled_by_pipeline_id) cancel_children(auto_canceled_by_pipeline_id: auto_canceled_by_pipeline_id, execute_async: execute_async) end end # rubocop: disable CodeReuse/ServiceClass def retry_failed(current_user) Ci::RetryPipelineService.new(project, current_user) .execute(self) end # rubocop: enable CodeReuse/ServiceClass def lazy_ref_commit BatchLoader.for(ref).batch do |refs, loader| next unless project.repository_exists? project.repository.list_commits_by_ref_name(refs).then do |commits| commits.each { |key, commit| loader.call(key, commits[key]) } end end end def latest? return false unless git_ref && commit.present? return false if lazy_ref_commit.nil? lazy_ref_commit.id == commit.id end def retried @retried ||= (statuses.order(id: :desc) - latest_statuses) end def coverage coverage_array = latest_statuses.map(&:coverage).compact if coverage_array.size >= 1 coverage_array.sum / coverage_array.size end end def update_builds_coverage builds.with_coverage_regex.without_coverage.each(&:update_coverage) end def batch_lookup_report_artifact_for_file_type(file_type) batch_lookup_report_artifact_for_file_types([file_type]) end def batch_lookup_report_artifact_for_file_types(file_types) file_types_to_search = [] file_types.each { |file_type| file_types_to_search.append(*::Ci::JobArtifact.associated_file_types_for(file_type.to_s)) } latest_report_artifacts .values_at(*file_types_to_search.uniq) .flatten .compact .last end # This batch loads the latest reports for each CI job artifact # type (e.g. sast, dast, etc.) in a single SQL query to eliminate # the need to do N different `job_artifacts.where(file_type: # X).last` calls. # # Return a hash of file type => array of 1 job artifact def latest_report_artifacts ::Gitlab::SafeRequestStore.fetch("pipeline:#{self.id}:latest_report_artifacts") do ::Ci::JobArtifact.where( id: job_artifacts.all_reports .select('max(ci_job_artifacts.id) as id') .group(:file_type) ) .preload(:job) .group_by(&:file_type) end end def has_kubernetes_active? strong_memoize(:has_kubernetes_active) do project.deployment_platform&.active? end end def freeze_period? strong_memoize(:freeze_period) do project.freeze_periods.any?(&:active?) end end def has_warnings? number_of_warnings > 0 end def number_of_warnings BatchLoader.for(id).batch(default_value: 0) do |pipeline_ids, loader| ::CommitStatus.where(commit_id: pipeline_ids) .latest .failed_but_allowed .group(:commit_id) .count .each { |id, amount| loader.call(id, amount) } end end def needs_processing? statuses .where(processed: [false, nil]) .latest .exists? end def has_yaml_errors? yaml_errors.present? end def add_error_message(content) add_message(:error, content) end def add_warning_message(content) add_message(:warning, content) end # We can't use `messages.error` scope here because messages should also be # read when the pipeline is not persisted. Using the scope will return no # results as it would query persisted data. def error_messages messages.select(&:error?) end def warning_messages(limit: nil) messages.select(&:warning?).tap do |warnings| break warnings.take(limit) if limit end end # Manually set the notes for a Ci::Pipeline # There is no ActiveRecord relation between Ci::Pipeline and notes # as they are related to a commit sha. This method helps importing # them using the +Gitlab::ImportExport::Project::RelationFactory+ class. def notes=(notes_to_save) notes_to_save.reject! do |note_to_save| notes.any? do |note| [note_to_save.note, note_to_save.created_at.to_i] == [note.note, note.created_at.to_i] end end notes_to_save.each do |note| note[:id] = nil note[:commit_id] = sha note[:noteable_id] = self['id'] note.save! end end def notes project.notes.for_commit_id(sha) end def set_status(new_status) retry_optimistic_lock(self, name: 'ci_pipeline_set_status') do case new_status when 'created' then nil when 'waiting_for_resource' then request_resource when 'preparing' then prepare when 'pending' then enqueue when 'running' then run when 'success' then succeed when 'failed' then drop when 'canceled' then cancel when 'skipped' then skip when 'manual' then block when 'scheduled' then delay else raise Ci::HasStatus::UnknownStatusError, "Unknown status `#{new_status}`" end end end def protected_ref? strong_memoize(:protected_ref) { project.protected_for?(git_ref) } end def legacy_trigger strong_memoize(:legacy_trigger) { trigger_requests.first } end def variables_builder @variables_builder ||= ::Gitlab::Ci::Variables::Builder.new(self) end def persisted_variables Gitlab::Ci::Variables::Collection.new.tap do |variables| break variables unless persisted? variables.append(key: 'CI_PIPELINE_ID', value: id.to_s) variables.append(key: 'CI_PIPELINE_URL', value: Gitlab::Routing.url_helpers.project_pipeline_url(project, self)) end end def predefined_variables Gitlab::Ci::Variables::Collection.new.tap do |variables| variables.append(key: 'CI_PIPELINE_IID', value: iid.to_s) variables.append(key: 'CI_PIPELINE_SOURCE', value: source.to_s) variables.append(key: 'CI_PIPELINE_CREATED_AT', value: created_at&.iso8601) variables.concat(predefined_commit_variables) variables.concat(predefined_merge_request_variables) if open_merge_requests_refs.any? variables.append(key: 'CI_OPEN_MERGE_REQUESTS', value: open_merge_requests_refs.join(',')) end variables.append(key: 'CI_GITLAB_FIPS_MODE', value: 'true') if Gitlab::FIPS.enabled? variables.append(key: 'CI_KUBERNETES_ACTIVE', value: 'true') if has_kubernetes_active? variables.append(key: 'CI_DEPLOY_FREEZE', value: 'true') if freeze_period? if external_pull_request_event? && external_pull_request variables.concat(external_pull_request.predefined_variables) end end end def predefined_commit_variables strong_memoize(:predefined_commit_variables) do Gitlab::Ci::Variables::Collection.new.tap do |variables| next variables unless sha.present? variables.append(key: 'CI_COMMIT_SHA', value: sha) variables.append(key: 'CI_COMMIT_SHORT_SHA', value: short_sha) variables.append(key: 'CI_COMMIT_BEFORE_SHA', value: before_sha) variables.append(key: 'CI_COMMIT_REF_NAME', value: source_ref) variables.append(key: 'CI_COMMIT_REF_SLUG', value: source_ref_slug) variables.append(key: 'CI_COMMIT_BRANCH', value: ref) if branch? variables.append(key: 'CI_COMMIT_MESSAGE', value: git_commit_message.to_s) variables.append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s) variables.append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s) variables.append(key: 'CI_COMMIT_REF_PROTECTED', value: (!!protected_ref?).to_s) variables.append(key: 'CI_COMMIT_TIMESTAMP', value: git_commit_timestamp.to_s) variables.append(key: 'CI_COMMIT_AUTHOR', value: git_author_full_text.to_s) # legacy variables variables.append(key: 'CI_BUILD_REF', value: sha) variables.append(key: 'CI_BUILD_BEFORE_SHA', value: before_sha) variables.append(key: 'CI_BUILD_REF_NAME', value: source_ref) variables.append(key: 'CI_BUILD_REF_SLUG', value: source_ref_slug) variables.concat(predefined_commit_tag_variables) end end end def predefined_merge_request_variables strong_memoize(:predefined_merge_request_variables) do Gitlab::Ci::Variables::Collection.new.tap do |variables| next variables unless merge_request? variables.append(key: 'CI_MERGE_REQUEST_EVENT_TYPE', value: merge_request_event_type.to_s) variables.append(key: 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA', value: source_sha.to_s) variables.append(key: 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA', value: target_sha.to_s) diff = self.merge_request_diff if diff.present? variables.append(key: 'CI_MERGE_REQUEST_DIFF_ID', value: diff.id.to_s) variables.append(key: 'CI_MERGE_REQUEST_DIFF_BASE_SHA', value: diff.base_commit_sha) end variables.concat(merge_request.predefined_variables) end end end def predefined_commit_tag_variables strong_memoize(:predefined_commit_ref_variables) do Gitlab::Ci::Variables::Collection.new.tap do |variables| next variables unless tag? git_tag = project.repository.find_tag(ref) next variables unless git_tag variables.append(key: 'CI_COMMIT_TAG', value: ref) variables.append(key: 'CI_COMMIT_TAG_MESSAGE', value: git_tag.message) # legacy variable variables.append(key: 'CI_BUILD_TAG', value: ref) end end end def queued_duration return unless started_at seconds = (started_at - created_at).to_i seconds unless seconds == 0 end def update_duration return unless started_at self.duration = Gitlab::Ci::Pipeline::Duration.from_pipeline(self) end # All the merge requests for which the current pipeline runs/ran against def all_merge_requests @all_merge_requests ||= if merge_request? MergeRequest.where(id: merge_request_id) else MergeRequest.where(source_project_id: project_id, source_branch: ref) .by_commit_sha(sha) end end def all_merge_requests_by_recency all_merge_requests.order(id: :desc) end # This returns a list of MRs that point # to the same source project/branch def related_merge_requests if merge_request? # We look for all other MRs that this branch might be pointing to MergeRequest.where( source_project_id: merge_request.source_project_id, source_branch: merge_request.source_branch) else MergeRequest.where( source_project_id: project_id, source_branch: ref) end end # We cannot use `all_merge_requests`, due to race condition # This returns a list of at most 4 open MRs def open_merge_requests_refs strong_memoize(:open_merge_requests_refs) do # We ensure that triggering user can actually read the pipeline related_merge_requests .opened .limit(MAX_OPEN_MERGE_REQUESTS_REFS) .order(id: :desc) .preload(:target_project) .select { |mr| can?(user, :read_merge_request, mr) } .map { |mr| mr.to_reference(project, full: true) } end end def same_family_pipeline_ids ::Gitlab::Ci::PipelineObjectHierarchy.new( self.class.default_scoped.where(id: root_ancestor), options: { project_condition: :same } ).base_and_descendants.select(:id) end def build_with_artifacts_in_self_and_project_descendants(name) builds_in_self_and_project_descendants .ordered_by_pipeline # find job in hierarchical order .with_downloadable_artifacts .find_by_name(name) end def builds_in_self_and_project_descendants Ci::Build.latest.where(pipeline: self_and_project_descendants) end def bridges_in_self_and_project_descendants Ci::Bridge.latest.where(pipeline: self_and_project_descendants) end def environments_in_self_and_project_descendants(deployment_status: nil) # We limit to 100 unique environments for application safety. # See: https://gitlab.com/gitlab-org/gitlab/-/issues/340781#note_699114700 expanded_environment_names = builds_in_self_and_project_descendants.joins(:metadata) .where.not(Ci::BuildMetadata.table_name => { expanded_environment_name: nil }) .distinct("#{Ci::BuildMetadata.quoted_table_name}.expanded_environment_name") .limit(100) .pluck(:expanded_environment_name) Environment.where(project: project, name: expanded_environment_names).with_deployment(sha, status: deployment_status) end # With multi-project and parent-child pipelines def self_and_upstreams object_hierarchy.base_and_ancestors end # With multi-project and parent-child pipelines def self_and_downstreams object_hierarchy.base_and_descendants end # With multi-project and parent-child pipelines def upstream_and_all_downstreams object_hierarchy.all_objects end # With only parent-child pipelines def self_and_project_ancestors object_hierarchy(project_condition: :same).base_and_ancestors end # With only parent-child pipelines def self_and_project_descendants object_hierarchy(project_condition: :same).base_and_descendants end # With only parent-child pipelines def all_child_pipelines object_hierarchy(project_condition: :same).descendants end def self_and_project_descendants_complete? self_and_project_descendants.all?(&:complete?) end # Follow the parent-child relationships and return the top-level parent def root_ancestor return self unless child? object_hierarchy(project_condition: :same) .base_and_ancestors(hierarchy_order: :desc) .first end # Follow the upstream pipeline relationships, regardless of multi-project or # parent-child, and return the top-level ancestor. def upstream_root @upstream_root ||= object_hierarchy.base_and_ancestors(hierarchy_order: :desc).first end # Applies to all parent-child and multi-project pipelines def complete_hierarchy_count upstream_root.self_and_downstreams.count end def bridge_triggered? source_bridge.present? end def bridge_waiting? source_bridge&.dependent? end def child? parent_pipeline? && # child pipelines have `parent_pipeline` source parent_pipeline.present? end def parent? child_pipelines.exists? end def created_successfully? persisted? && failure_reason.blank? end def detailed_status(current_user) Gitlab::Ci::Status::Pipeline::Factory .new(self.present, current_user) .fabricate! end def find_job_with_archive_artifacts(name) builds.latest.with_downloadable_artifacts.find_by_name(name) end def latest_builds_with_artifacts # We purposely cast the builds to an Array here. Because we always use the # rows if there are more than 0 this prevents us from having to run two # queries: one to get the count and one to get the rows. @latest_builds_with_artifacts ||= builds.latest.with_artifacts_not_expired.to_a end def latest_report_builds(reports_scope = ::Ci::JobArtifact.all_reports) builds.latest.with_artifacts(reports_scope) end def latest_test_report_builds latest_report_builds(Ci::JobArtifact.of_report_type(:test)).preload(:project, :metadata) end def latest_report_builds_in_self_and_project_descendants(reports_scope = ::Ci::JobArtifact.all_reports) builds_in_self_and_project_descendants.with_artifacts(reports_scope) end def builds_with_coverage builds.latest.with_coverage end def builds_with_failed_tests(limit: nil) latest_test_report_builds.failed.limit(limit) end def has_reports?(reports_scope) latest_report_builds(reports_scope).exists? end def complete_and_has_reports?(reports_scope) if Feature.enabled?(:mr_show_reports_immediately, project, type: :development) latest_report_builds(reports_scope).exists? else complete? && has_reports?(reports_scope) end end def has_coverage_reports? pipeline_artifacts&.report_exists?(:code_coverage) end def has_codequality_mr_diff_report? pipeline_artifacts&.report_exists?(:code_quality_mr_diff) end def can_generate_codequality_reports? complete_and_has_reports?(Ci::JobArtifact.of_report_type(:codequality)) end def test_report_summary strong_memoize(:test_report_summary) do Gitlab::Ci::Reports::TestReportSummary.new(latest_builds_report_results) end end def test_reports Gitlab::Ci::Reports::TestReport.new.tap do |test_reports| latest_test_report_builds.find_each do |build| build.collect_test_reports!(test_reports) end end end def accessibility_reports Gitlab::Ci::Reports::AccessibilityReports.new.tap do |accessibility_reports| latest_report_builds(Ci::JobArtifact.of_report_type(:accessibility)).each do |build| build.collect_accessibility_reports!(accessibility_reports) end end end def codequality_reports Gitlab::Ci::Reports::CodequalityReports.new.tap do |codequality_reports| latest_report_builds(Ci::JobArtifact.of_report_type(:codequality)).each do |build| build.collect_codequality_reports!(codequality_reports) end end end def terraform_reports ::Gitlab::Ci::Reports::TerraformReports.new.tap do |terraform_reports| latest_report_builds(::Ci::JobArtifact.of_report_type(:terraform)).each do |build| build.collect_terraform_reports!(terraform_reports) end end end def has_archive_artifacts? complete? && builds.latest.with_existing_job_artifacts(Ci::JobArtifact.archive.or(Ci::JobArtifact.metadata)).exists? end def has_exposed_artifacts? complete? && builds.latest.with_exposed_artifacts.exists? end def has_erasable_artifacts? complete? && builds.latest.with_erasable_artifacts.exists? end def branch_updated? strong_memoize(:branch_updated) do push_details.branch_updated? end end # Returns the modified paths. # # The returned value is # * Array: List of modified paths that should be evaluated # * nil: Modified path can not be evaluated def modified_paths strong_memoize(:modified_paths) do if merge_request? merge_request.modified_paths elsif branch_updated? push_details.modified_paths elsif external_pull_request? external_pull_request.modified_paths end end end def modified_paths_since(compare_to_sha) project.repository.diff_stats(project.repository.merge_base(compare_to_sha, sha), sha).paths end def all_worktree_paths strong_memoize(:all_worktree_paths) do project.repository.ls_files(sha) end end def top_level_worktree_paths strong_memoize(:top_level_worktree_paths) do project.repository.tree(sha).blobs.map(&:path) end end def default_branch? ref == project.default_branch end def merge_request? merge_request_id.present? && merge_request.present? end def external_pull_request? external_pull_request_id.present? end def detached_merge_request_pipeline? merge_request? && target_sha.nil? end def legacy_detached_merge_request_pipeline? detached_merge_request_pipeline? && !merge_request_ref? end def merged_result_pipeline? merge_request? && target_sha.present? end def merge_request_ref? MergeRequest.merge_request_ref?(ref) end def matches_sha_or_source_sha?(sha) self.sha == sha || self.source_sha == sha end def triggered_by?(current_user) user == current_user end def source_ref if merge_request? merge_request.source_branch else ref end end def source_ref_slug Gitlab::Utils.slugify(source_ref.to_s) end def stage(name) stages.find_by(name: name) end def full_error_messages errors ? errors.full_messages.to_sentence : "" end def merge_request_event_type return unless merge_request? strong_memoize(:merge_request_event_type) do if merged_result_pipeline? :merged_result elsif detached_merge_request_pipeline? :detached end end end def persistent_ref @persistent_ref ||= PersistentRef.new(pipeline: self) end def dangling? Enums::Ci::Pipeline.dangling_sources.key?(source.to_sym) end def source_ref_path if branch? || merge_request? Gitlab::Git::BRANCH_REF_PREFIX + source_ref.to_s elsif tag? Gitlab::Git::TAG_REF_PREFIX + source_ref.to_s end end # Set scheduling type of processables if they were created before scheduling_type # data was deployed (https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22246). def ensure_scheduling_type! processables.populate_scheduling_type! end def ensure_ci_ref! self.ci_ref = Ci::Ref.ensure_for(self) end def ensure_persistent_ref return if persistent_ref.exist? persistent_ref.create end # For dependent bridge jobs we reset the upstream bridge recursively # to reflect that a downstream pipeline is running again def reset_source_bridge!(current_user) # break recursion when no source_pipeline bridge (first upstream pipeline) return unless bridge_waiting? return unless current_user.can?(:update_pipeline, source_bridge.pipeline) Ci::EnqueueJobService.new(source_bridge, current_user: current_user).execute(&:pending!) # rubocop:disable CodeReuse/ServiceClass end # EE-only def merge_train_pipeline? false end def security_reports(report_types: []) reports_scope = report_types.empty? ? ::Ci::JobArtifact.security_reports : ::Ci::JobArtifact.security_reports(file_types: report_types) types_to_collect = report_types.empty? ? ::Ci::JobArtifact::SECURITY_REPORT_FILE_TYPES : report_types ::Gitlab::Ci::Reports::Security::Reports.new(self).tap do |security_reports| latest_report_builds(reports_scope).each do |build| build.collect_security_reports!(security_reports, report_types: types_to_collect) end end end def build_matchers self.builds.latest.build_matchers(project) end def cluster_agent_authorizations strong_memoize(:cluster_agent_authorizations) do ::Clusters::AgentAuthorizationsFinder.new(project).execute end end def has_test_reports? strong_memoize(:has_test_reports) do has_reports?(::Ci::JobArtifact.of_report_type(:test)) end end def age_in_minutes return 0 unless persisted? unless has_attribute?(:created_at) raise ArgumentError, 'pipeline not fully loaded' end return 0 unless created_at (Time.current - created_at).ceil / 60 end private def cancel_jobs(jobs, retries: 1, auto_canceled_by_pipeline_id: nil) retry_lock(jobs, retries, name: 'ci_pipeline_cancel_running') do |statuses| preloaded_relations = [:project, :pipeline, :deployment, :taggings] statuses.find_in_batches do |status_batch| relation = CommitStatus.where(id: status_batch) Preloaders::CommitStatusPreloader.new(relation).execute(preloaded_relations) relation.each do |job| job.auto_canceled_by_id = auto_canceled_by_pipeline_id if auto_canceled_by_pipeline_id job.cancel end end end end # For parent child-pipelines only (not multi-project) def cancel_children(auto_canceled_by_pipeline_id: nil, execute_async: true) all_child_pipelines.each do |child_pipeline| if execute_async ::Ci::CancelPipelineWorker.perform_async( child_pipeline.id, auto_canceled_by_pipeline_id ) else child_pipeline.cancel_running( # cascade_to_children is false because we iterate through children # we also cancel bridges prior to prevent more children cascade_to_children: false, execute_async: execute_async, auto_canceled_by_pipeline_id: auto_canceled_by_pipeline_id ) end end end def add_message(severity, content) messages.build(severity: severity, content: content) end def merge_request_diff_sha return unless merge_request? if merged_result_pipeline? source_sha else sha end end def merge_request_diff return unless merge_request? merge_request.merge_request_diff_for(merge_request_diff_sha) end def push_details strong_memoize(:push_details) do Gitlab::Git::Push.new(project, before_sha, sha, git_ref) end end def git_ref strong_memoize(:git_ref) do if merge_request? ## # In the future, we're going to change this ref to # merge request's merged reference, such as "refs/merge-requests/:iid/merge". # In order to do that, we have to update GitLab-Runner's source pulling # logic. # See https://gitlab.com/gitlab-org/gitlab-runner/merge_requests/1092 Gitlab::Git::BRANCH_REF_PREFIX + ref.to_s else super end end end def keep_around_commits return unless project project.repository.keep_around(self.sha, self.before_sha) end def observe_age_in_minutes return unless age_metric_enabled? return unless persisted? && has_attribute?(:created_at) ::Gitlab::Ci::Pipeline::Metrics .pipeline_age_histogram .observe({}, age_in_minutes) end def age_metric_enabled? ::Gitlab::SafeRequestStore.fetch(:age_metric_enabled) do ::Feature.enabled?(:ci_pipeline_age_histogram, type: :ops) end end # Without using `unscoped`, caller scope is also included into the query. # Using `unscoped` here will be redundant after Rails 6.1 def object_hierarchy(options = {}) ::Gitlab::Ci::PipelineObjectHierarchy .new(self.class.unscoped.where(id: id), options: options) end end end Ci::Pipeline.prepend_mod_with('Ci::Pipeline')