debian-mirror-gitlab/app/models/deployment.rb

496 lines
14 KiB
Ruby
Raw Normal View History

2018-11-18 11:00:15 +05:30
# frozen_string_literal: true
2019-07-07 11:18:12 +05:30
class Deployment < ApplicationRecord
2018-10-15 14:42:47 +05:30
include AtomicInternalId
2018-11-08 19:23:39 +05:30
include IidRoutes
2018-12-13 13:39:08 +05:30
include AfterCommitQueue
2020-01-01 13:55:28 +05:30
include UpdatedAtFilterable
2020-03-13 15:44:24 +05:30
include Importable
2020-01-01 13:55:28 +05:30
include Gitlab::Utils::StrongMemoize
2020-04-22 19:07:51 +05:30
include FastDestroyAll
2021-09-04 01:27:46 +05:30
2021-11-18 22:05:49 +05:30
StatusUpdateError = Class.new(StandardError)
StatusSyncError = Class.new(StandardError)
2021-12-11 22:18:48 +05:30
ARCHIVABLE_OFFSET = 50_000
2022-06-21 17:19:12 +05:30
belongs_to :project, optional: false
belongs_to :environment, optional: false
2019-09-30 21:07:59 +05:30
belongs_to :cluster, class_name: 'Clusters::Cluster', optional: true
belongs_to :user
2022-10-11 01:57:18 +05:30
belongs_to :deployable, polymorphic: true, optional: true, inverse_of: :deployment # rubocop:disable Cop/PolymorphicAssociations
2019-12-26 22:10:19 +05:30
has_many :deployment_merge_requests
has_many :merge_requests,
through: :deployment_merge_requests
2020-03-13 15:44:24 +05:30
has_one :deployment_cluster
2021-01-29 00:20:46 +05:30
has_internal_id :iid, scope: :project, track_if: -> { !importing? }
2018-10-15 14:42:47 +05:30
validates :sha, presence: true
validates :ref, presence: true
2020-03-13 15:44:24 +05:30
validate :valid_sha, on: :create
validate :valid_ref, on: :create
delegate :name, to: :environment, prefix: true
2020-03-13 15:44:24 +05:30
delegate :kubernetes_namespace, to: :deployment_cluster, allow_nil: true
2022-10-11 01:57:18 +05:30
scope :for_iid, -> (project, iid) { where(project: project, iid: iid) }
2018-12-13 13:39:08 +05:30
scope :for_environment, -> (environment) { where(environment_id: environment) }
2021-06-08 01:23:25 +05:30
scope :for_environment_name, -> (project, name) do
where('deployments.environment_id = (?)',
Environment.select(:id).where(project: project, name: name).limit(1))
2020-03-13 15:44:24 +05:30
end
scope :for_status, -> (status) { where(status: status) }
2021-02-22 17:27:13 +05:30
scope :for_project, -> (project_id) { where(project_id: project_id) }
2021-03-11 19:13:27 +05:30
scope :for_projects, -> (projects) { where(project: projects) }
2018-12-13 13:39:08 +05:30
2022-06-21 17:19:12 +05:30
scope :visible, -> { where(status: VISIBLE_STATUSES) }
2020-03-13 15:44:24 +05:30
scope :stoppable, -> { where.not(on_stop: nil).where.not(deployable_id: nil).success }
scope :active, -> { where(status: %i[created running]) }
2022-01-26 12:08:38 +05:30
scope :upcoming, -> { where(status: %i[blocked running]) }
2021-02-22 17:27:13 +05:30
scope :older_than, -> (deployment) { where('deployments.id < ?', deployment.id) }
2021-04-29 21:17:54 +05:30
scope :with_api_entity_associations, -> { preload({ deployable: { runner: [], tags: [], user: [], job_artifacts_archive: [] } }) }
2022-07-23 23:45:48 +05:30
scope :with_environment_page_associations, -> { preload(project: [], environment: [], deployable: [:user, :metadata, :project, pipeline: [:manual_actions]]) }
2021-02-22 17:27:13 +05:30
2021-03-11 19:13:27 +05:30
scope :finished_after, ->(date) { where('finished_at >= ?', date) }
scope :finished_before, ->(date) { where('finished_at < ?', date) }
2019-12-21 20:55:43 +05:30
2021-12-11 22:18:48 +05:30
scope :ordered, -> { order(finished_at: :desc) }
2022-10-11 01:57:18 +05:30
scope :ordered_as_upcoming, -> { order(id: :desc) }
2021-12-11 22:18:48 +05:30
2022-06-21 17:19:12 +05:30
VISIBLE_STATUSES = %i[running success failed canceled blocked].freeze
2021-01-03 14:25:43 +05:30
FINISHED_STATUSES = %i[success failed canceled].freeze
2022-10-11 01:57:18 +05:30
UPCOMING_STATUSES = %i[created blocked running].freeze
2021-01-03 14:25:43 +05:30
2018-12-13 13:39:08 +05:30
state_machine :status, initial: :created do
event :run do
transition created: :running
end
2022-01-26 12:08:38 +05:30
event :block do
transition created: :blocked
end
2022-07-16 23:28:13 +05:30
# This transition is possible when we have manual jobs.
event :create do
transition skipped: :created
end
2022-03-02 08:16:31 +05:30
event :unblock do
transition blocked: :created
end
2018-12-13 13:39:08 +05:30
event :succeed do
transition any - [:success] => :success
end
event :drop do
transition any - [:failed] => :failed
end
event :cancel do
transition any - [:canceled] => :canceled
end
2021-02-22 17:27:13 +05:30
event :skip do
transition any - [:skipped] => :skipped
end
2021-01-03 14:25:43 +05:30
before_transition any => FINISHED_STATUSES do |deployment|
2020-06-23 00:09:42 +05:30
deployment.finished_at = Time.current
2018-12-13 13:39:08 +05:30
end
2022-08-13 15:12:31 +05:30
after_transition any => :running do |deployment, transition|
2019-07-31 22:56:46 +05:30
deployment.run_after_commit do
2023-04-23 21:23:45 +05:30
perform_params = { deployment_id: id, status: transition.to, status_changed_at: Time.current }
serialize_params_for_sidekiq!(perform_params)
Deployments::HooksWorker.perform_async(perform_params)
2019-07-31 22:56:46 +05:30
end
end
2020-03-13 15:44:24 +05:30
2021-01-03 14:25:43 +05:30
after_transition any => :success do |deployment|
deployment.run_after_commit do
Deployments::UpdateEnvironmentWorker.perform_async(id)
Deployments::LinkMergeRequestWorker.perform_async(id)
2022-03-02 08:16:31 +05:30
Deployments::ArchiveInProjectWorker.perform_async(deployment.project_id)
2021-01-03 14:25:43 +05:30
end
end
2020-03-13 15:44:24 +05:30
2022-08-13 15:12:31 +05:30
after_transition any => FINISHED_STATUSES do |deployment, transition|
2020-03-13 15:44:24 +05:30
deployment.run_after_commit do
2023-04-23 21:23:45 +05:30
perform_params = { deployment_id: id, status: transition.to, status_changed_at: Time.current }
serialize_params_for_sidekiq!(perform_params)
Deployments::HooksWorker.perform_async(perform_params)
2020-03-13 15:44:24 +05:30
end
end
2021-03-08 18:12:59 +05:30
after_transition any => any - [:skipped] do |deployment, transition|
next if transition.loopback?
deployment.run_after_commit do
2022-01-26 12:08:38 +05:30
next unless deployment.project.jira_subscription_exists?
2021-03-08 18:12:59 +05:30
::JiraConnect::SyncDeploymentsWorker.perform_async(id)
end
end
end
after_create unless: :importing? do |deployment|
run_after_commit do
2022-01-26 12:08:38 +05:30
next unless deployment.project.jira_subscription_exists?
2021-03-08 18:12:59 +05:30
::JiraConnect::SyncDeploymentsWorker.perform_async(deployment.id)
end
2018-12-13 13:39:08 +05:30
end
enum status: {
created: 0,
running: 1,
success: 2,
failed: 3,
2021-02-22 17:27:13 +05:30
canceled: 4,
2022-01-26 12:08:38 +05:30
skipped: 5,
blocked: 6
2018-12-13 13:39:08 +05:30
}
2021-12-11 22:18:48 +05:30
def self.archivables_in(project, limit:)
start_iid = project.deployments.order(iid: :desc).limit(1)
.select("(iid - #{ARCHIVABLE_OFFSET}) AS start_iid")
project.deployments.preload(:environment).where('iid <= (?)', start_iid)
.where(archived: false).limit(limit)
end
2018-12-13 13:39:08 +05:30
def self.last_for_environment(environment)
ids = self
.for_environment(environment)
.select('MAX(id) AS id')
.group(:environment_id)
.map(&:id)
find(ids)
end
2016-08-24 12:49:21 +05:30
2022-07-23 23:45:48 +05:30
# This method returns the deployment records of the last deployment pipeline, that successfully executed for the given environment.
# e.g.
# A pipeline contains
# - deploy job A => production environment
# - deploy job B => production environment
# In this case, `last_deployment_group` returns both deployments.
#
# NOTE: Preload environment.last_deployment and pipeline.latest_successful_builds prior to avoid N+1.
def self.last_deployment_group_for_environment(env)
return self.none unless env.last_deployment_pipeline&.latest_successful_builds&.present?
2022-08-13 15:12:31 +05:30
BatchLoader.for(env).batch(default_value: self.none) do |environments, loader|
2022-07-23 23:45:48 +05:30
latest_successful_build_ids = []
environments_hash = {}
environments.each do |environment|
environments_hash[environment.id] = environment
# Refer comment note above, if not preloaded this can lead to N+1.
latest_successful_build_ids << environment.last_deployment_pipeline.latest_successful_builds.map(&:id)
end
Deployment
.where(deployable_type: 'CommitStatus', deployable_id: latest_successful_build_ids.flatten)
.preload(last_deployment_group_associations)
.group_by { |deployment| deployment.environment_id }
.each do |env_id, deployment_group|
loader.call(environments_hash[env_id], deployment_group)
end
end
end
2019-12-21 20:55:43 +05:30
def self.find_successful_deployment!(iid)
success.find_by!(iid: iid)
end
2021-11-18 22:05:49 +05:30
# It should be used with caution especially on chaining.
# Fetching any unbounded or large intermediate dataset could lead to loading too many IDs into memory.
# See: https://docs.gitlab.com/ee/development/database/multiple_databases.html#use-disable_joins-for-has_one-or-has_many-through-relations
# For safety we default limit to fetch not more than 1000 records.
def self.builds(limit = 1000)
deployable_ids = where.not(deployable_id: nil).limit(limit).pluck(:deployable_id)
Ci::Build.where(id: deployable_ids)
end
2022-10-11 01:57:18 +05:30
def build
deployable if deployable.is_a?(::Ci::Build)
end
2020-04-22 19:07:51 +05:30
class << self
##
# FastDestroyAll concerns
def begin_fast_destroy
preload(:project).find_each.map do |deployment|
[deployment.project, deployment.ref_path]
end
end
##
# FastDestroyAll concerns
def finalize_fast_destroy(params)
by_project = params.group_by(&:shift)
by_project.each do |project, ref_paths|
project.repository.delete_refs(*ref_paths.flatten)
end
end
2021-02-22 17:27:13 +05:30
def latest_for_sha(sha)
where(sha: sha).order(id: :desc).take
end
2020-04-22 19:07:51 +05:30
end
def commit
2021-04-29 21:17:54 +05:30
@commit ||= project.commit(sha)
end
def commit_title
commit.try(:title)
end
def short_sha
Commit.truncate_sha(sha)
end
2022-08-13 15:12:31 +05:30
def execute_hooks(status, status_changed_at)
deployment_data = Gitlab::DataBuilder::Deployment.build(self, status, status_changed_at)
2020-11-24 15:15:51 +05:30
project.execute_hooks(deployment_data, :deployment_hooks)
2021-09-30 23:02:18 +05:30
project.execute_integrations(deployment_data, :deployment_hooks)
2019-07-31 22:56:46 +05:30
end
def last?
self == environment.last_deployment
end
2016-08-24 12:49:21 +05:30
2016-11-03 12:29:30 +05:30
def create_ref
2020-04-08 14:13:33 +05:30
project.repository.create_ref(sha, ref_path)
2016-08-24 12:49:21 +05:30
end
2017-09-10 17:25:29 +05:30
def invalidate_cache
environment.expire_etag_cache
end
2016-08-24 12:49:21 +05:30
def manual_actions
2022-11-25 23:54:43 +05:30
@manual_actions ||= deployable.try(:other_manual_actions)
2018-12-13 13:39:08 +05:30
end
def scheduled_actions
2022-11-25 23:54:43 +05:30
@scheduled_actions ||= deployable.try(:other_scheduled_actions)
2016-08-24 12:49:21 +05:30
end
2016-09-13 17:45:13 +05:30
2020-01-01 13:55:28 +05:30
def playable_build
strong_memoize(:playable_build) do
deployable.try(:playable?) ? deployable : nil
end
end
2022-04-04 11:22:00 +05:30
def includes_commit?(ancestor_sha)
return false unless sha
2016-09-13 17:45:13 +05:30
2022-04-04 11:22:00 +05:30
project.repository.ancestor?(ancestor_sha, sha)
2016-09-13 17:45:13 +05:30
end
2016-09-29 09:46:39 +05:30
2022-10-11 01:57:18 +05:30
def older_than_last_successful_deployment?
2023-03-17 16:20:25 +05:30
last_deployment_id = environment&.last_deployment&.id
2022-10-11 01:57:18 +05:30
return false unless last_deployment_id.present?
return false if self.id == last_deployment_id
2023-01-13 00:05:48 +05:30
return false if self.sha == environment.last_deployment&.sha
2022-10-11 01:57:18 +05:30
self.id < last_deployment_id
end
2016-09-29 09:46:39 +05:30
def update_merge_request_metrics!
2021-04-29 21:17:54 +05:30
return unless environment.production? && success?
2016-09-29 09:46:39 +05:30
2017-09-10 17:25:29 +05:30
merge_requests = project.merge_requests
.joins(:metrics)
.where(target_branch: self.ref, merge_request_metrics: { first_deployed_to_production_at: nil })
2018-12-13 13:39:08 +05:30
.where("merge_request_metrics.merged_at <= ?", finished_at)
2016-09-29 09:46:39 +05:30
if previous_deployment
2018-12-13 13:39:08 +05:30
merge_requests = merge_requests.where("merge_request_metrics.merged_at >= ?", previous_deployment.finished_at)
2016-09-29 09:46:39 +05:30
end
2017-09-10 17:25:29 +05:30
MergeRequest::Metrics
2019-10-12 21:52:04 +05:30
.where(merge_request_id: merge_requests.select(:id), first_deployed_to_production_at: nil)
2018-12-13 13:39:08 +05:30
.update_all(first_deployed_to_production_at: finished_at)
2016-09-29 09:46:39 +05:30
end
def previous_deployment
@previous_deployment ||=
2021-04-29 21:17:54 +05:30
self.class.for_environment(environment_id)
.success
.where('id < ?', id)
.order(id: :desc)
.take
2016-09-29 09:46:39 +05:30
end
2016-11-03 12:29:30 +05:30
def stop_action
2017-08-17 22:00:37 +05:30
return unless on_stop.present?
return unless manual_actions
2016-11-03 12:29:30 +05:30
2021-04-29 21:17:54 +05:30
@stop_action ||= manual_actions.find { |action| action.name == self.on_stop }
2016-11-03 12:29:30 +05:30
end
2018-12-13 13:39:08 +05:30
def deployed_at
return unless success?
finished_at
end
2016-11-03 12:29:30 +05:30
def formatted_deployment_time
2018-12-13 13:39:08 +05:30
deployed_at&.to_time&.in_time_zone&.to_s(:medium)
2016-11-03 12:29:30 +05:30
end
2019-12-04 20:38:33 +05:30
def deployed_by
# We use deployable's user if available because Ci::PlayBuildService
# does not update the deployment's user, just the one for the deployable.
# TODO: use deployment's user once https://gitlab.com/gitlab-org/gitlab-foss/issues/66442
# is completed.
deployable&.user || user
end
2023-03-04 22:38:38 +05:30
def triggered_by?(user)
deployed_by == user
end
2019-12-26 22:10:19 +05:30
def link_merge_requests(relation)
2020-04-08 14:13:33 +05:30
# NOTE: relation.select will perform column deduplication,
# when id == environment_id it will outputs 2 columns instead of 3
# i.e.:
# MergeRequest.select(1, 2).to_sql #=> SELECT 1, 2 FROM "merge_requests"
# MergeRequest.select(1, 1).to_sql #=> SELECT 1 FROM "merge_requests"
2023-07-09 08:55:56 +05:30
select = relation.select(
'merge_requests.id',
"#{id} as deployment_id",
"#{environment_id} as environment_id"
).to_sql
2019-12-26 22:10:19 +05:30
2021-12-11 22:18:48 +05:30
# We don't use `ApplicationRecord.legacy_bulk_insert` here so that we don't need to
2019-12-26 22:10:19 +05:30
# first pluck lots of IDs into memory.
2020-03-13 15:44:24 +05:30
#
# We also ignore any duplicates so this method can be called multiple times
# for the same deployment, only inserting any missing merge requests.
2019-12-26 22:10:19 +05:30
DeploymentMergeRequest.connection.execute(<<~SQL)
INSERT INTO #{DeploymentMergeRequest.table_name}
2020-04-08 14:13:33 +05:30
(merge_request_id, deployment_id, environment_id)
2019-12-26 22:10:19 +05:30
#{select}
2020-03-13 15:44:24 +05:30
ON CONFLICT DO NOTHING
2019-12-26 22:10:19 +05:30
SQL
end
2021-01-03 14:25:43 +05:30
# Changes the status of a deployment and triggers the corresponding state
2020-01-01 13:55:28 +05:30
# machine events.
def update_status(status)
2021-11-18 22:05:49 +05:30
update_status!(status)
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(
StatusUpdateError.new(e.message), deployment_id: self.id)
false
end
def sync_status_with(build)
return false unless ::Deployment.statuses.include?(build.status)
2022-07-16 23:28:13 +05:30
return false if build.status == self.status
2021-11-18 22:05:49 +05:30
update_status!(build.status)
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(
StatusSyncError.new(e.message), deployment_id: self.id, build_id: build.id)
false
2020-01-01 13:55:28 +05:30
end
2020-03-13 15:44:24 +05:30
def valid_sha
return if project&.commit(sha)
errors.add(:sha, _('The commit does not exist'))
end
def valid_ref
return if project&.commit(ref)
errors.add(:ref, _('The branch or tag does not exist'))
end
2016-11-03 12:29:30 +05:30
def ref_path
File.join(environment.ref_path, 'deployments', iid.to_s)
end
2018-12-13 13:39:08 +05:30
2021-03-11 19:13:27 +05:30
def equal_to?(params)
ref == params[:ref] &&
tag == params[:tag] &&
sha == params[:sha] &&
status == params[:status]
end
2022-06-21 17:19:12 +05:30
def tier_in_yaml
return unless deployable
2022-08-27 11:52:29 +05:30
deployable.environment_tier_from_options
2022-06-21 17:19:12 +05:30
end
2022-10-11 01:57:18 +05:30
# default tag limit is 100, 0 means no limit
2023-01-13 00:05:48 +05:30
# when refs_by_oid is passed an SHA, returns refs for that commit
2022-10-11 01:57:18 +05:30
def tags(limit: 100)
2023-03-04 22:38:38 +05:30
strong_memoize_with(:tag, limit) do
project.repository.refs_by_oid(oid: sha, limit: limit, ref_patterns: [Gitlab::Git::TAG_REF_PREFIX]) || []
end
2022-10-11 01:57:18 +05:30
end
2020-04-22 19:07:51 +05:30
private
2021-11-18 22:05:49 +05:30
def update_status!(status)
case status
when 'running'
run!
when 'success'
succeed!
when 'failed'
drop!
when 'canceled'
cancel!
when 'skipped'
skip!
2022-01-26 12:08:38 +05:30
when 'blocked'
block!
2022-07-16 23:28:13 +05:30
when 'created'
create!
2021-11-18 22:05:49 +05:30
else
raise ArgumentError, "The status #{status.inspect} is invalid"
end
end
2022-07-23 23:45:48 +05:30
2023-04-23 21:23:45 +05:30
def serialize_params_for_sidekiq!(perform_params)
perform_params[:status_changed_at] = perform_params[:status_changed_at].to_s
perform_params.stringify_keys!
end
2022-07-23 23:45:48 +05:30
def self.last_deployment_group_associations
{
deployable: {
pipeline: {
manual_actions: []
}
}
}
end
private_class_method :last_deployment_group_associations
end
2019-12-21 20:55:43 +05:30
2021-06-08 01:23:25 +05:30
Deployment.prepend_mod_with('Deployment')