2021-04-29 21:17:54 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module Ci
|
|
|
|
module JobArtifacts
|
|
|
|
class CreateService < ::BaseService
|
|
|
|
include Gitlab::Utils::UsageData
|
|
|
|
|
|
|
|
LSIF_ARTIFACT_TYPE = 'lsif'
|
|
|
|
|
|
|
|
OBJECT_STORAGE_ERRORS = [
|
|
|
|
Errno::EIO,
|
|
|
|
Google::Apis::ServerError,
|
|
|
|
Signet::RemoteServerError
|
|
|
|
].freeze
|
|
|
|
|
|
|
|
def initialize(job)
|
|
|
|
@job = job
|
|
|
|
@project = job.project
|
2023-03-04 22:38:38 +05:30
|
|
|
@pipeline = job.pipeline
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def authorize(artifact_type:, filesize: nil)
|
|
|
|
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
|
|
|
|
return result unless result[:status] == :success
|
|
|
|
|
2023-07-09 08:55:56 +05:30
|
|
|
headers = JobArtifactUploader.workhorse_authorize(
|
|
|
|
has_length: false,
|
|
|
|
maximum_size: max_size(artifact_type),
|
|
|
|
use_final_store_path: Feature.enabled?(:ci_artifacts_upload_to_final_location, project)
|
|
|
|
)
|
2021-04-29 21:17:54 +05:30
|
|
|
|
|
|
|
if lsif?(artifact_type)
|
|
|
|
headers[:ProcessLsif] = true
|
|
|
|
track_usage_event('i_source_code_code_intelligence', project.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
success(headers: headers)
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute(artifacts_file, params, metadata_file: nil)
|
|
|
|
result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
|
|
|
|
return result unless result[:status] == :success
|
|
|
|
|
|
|
|
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
build_result = build_artifact(artifacts_file, params, metadata_file)
|
|
|
|
return build_result unless build_result[:status] == :success
|
|
|
|
|
|
|
|
artifact = build_result[:artifact]
|
|
|
|
artifact_metadata = build_result[:artifact_metadata]
|
2021-04-29 21:17:54 +05:30
|
|
|
|
|
|
|
track_artifact_uploader(artifact)
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
parse_result = parse_artifact(artifact)
|
|
|
|
return parse_result unless parse_result[:status] == :success
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
persist_artifact(artifact, artifact_metadata)
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
attr_reader :job, :project, :pipeline
|
2021-04-29 21:17:54 +05:30
|
|
|
|
|
|
|
def validate_requirements(artifact_type:, filesize:)
|
|
|
|
return too_large_error if too_large?(artifact_type, filesize)
|
|
|
|
|
|
|
|
success
|
|
|
|
end
|
|
|
|
|
|
|
|
def too_large?(type, size)
|
|
|
|
size > max_size(type) if size
|
|
|
|
end
|
|
|
|
|
|
|
|
def lsif?(type)
|
|
|
|
type == LSIF_ARTIFACT_TYPE
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_size(type)
|
|
|
|
Ci::JobArtifact.max_artifact_size(type: type, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
def too_large_error
|
|
|
|
error('file size has reached maximum size limit', :payload_too_large)
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_artifact(artifacts_file, params, metadata_file)
|
2021-12-11 22:18:48 +05:30
|
|
|
artifact_attributes = {
|
2022-10-11 01:57:18 +05:30
|
|
|
job: job,
|
2021-04-29 21:17:54 +05:30
|
|
|
project: project,
|
2023-06-20 00:43:36 +05:30
|
|
|
expire_in: expire_in(params),
|
|
|
|
accessibility: accessibility(params),
|
|
|
|
locked: pipeline.locked
|
2021-12-11 22:18:48 +05:30
|
|
|
}
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
file_attributes = {
|
|
|
|
file_type: params[:artifact_type],
|
|
|
|
file_format: params[:artifact_format],
|
|
|
|
file_sha256: artifacts_file.sha256,
|
|
|
|
file: artifacts_file
|
|
|
|
}
|
2021-12-11 22:18:48 +05:30
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
artifact = Ci::JobArtifact.new(artifact_attributes.merge(file_attributes))
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
artifact_metadata = build_metadata_artifact(artifact, metadata_file) if metadata_file
|
|
|
|
|
|
|
|
success(artifact: artifact, artifact_metadata: artifact_metadata)
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_metadata_artifact(job_artifact, metadata_file)
|
|
|
|
Ci::JobArtifact.new(
|
|
|
|
job: job_artifact.job,
|
|
|
|
project: job_artifact.project,
|
|
|
|
expire_at: job_artifact.expire_at,
|
|
|
|
locked: job_artifact.locked,
|
|
|
|
file: metadata_file,
|
|
|
|
file_type: :metadata,
|
|
|
|
file_format: :gzip,
|
|
|
|
file_sha256: metadata_file.sha256,
|
|
|
|
accessibility: job_artifact.accessibility
|
|
|
|
)
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
def expire_in(params)
|
|
|
|
params['expire_in'] || Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
2023-03-17 16:20:25 +05:30
|
|
|
def accessibility(params)
|
2023-05-27 22:25:52 +05:30
|
|
|
accessibility = params[:accessibility]
|
|
|
|
|
|
|
|
return :public if Feature.disabled?(:non_public_artifacts, type: :development)
|
|
|
|
|
|
|
|
return accessibility if accessibility.present?
|
|
|
|
|
|
|
|
job.artifacts_public? ? :public : :private
|
2023-03-17 16:20:25 +05:30
|
|
|
end
|
|
|
|
|
2021-04-29 21:17:54 +05:30
|
|
|
def parse_artifact(artifact)
|
|
|
|
case artifact.file_type
|
|
|
|
when 'dotenv' then parse_dotenv_artifact(artifact)
|
|
|
|
else success
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
def persist_artifact(artifact, artifact_metadata)
|
|
|
|
job.transaction do
|
2021-04-29 21:17:54 +05:30
|
|
|
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
|
2023-05-27 22:25:52 +05:30
|
|
|
# Running it first because in migrations we lock the `ci_builds` table
|
|
|
|
# first and then the others. This reduces the chances of deadlocks.
|
2021-04-29 21:17:54 +05:30
|
|
|
job.update_column(:artifacts_expire_at, artifact.expire_at)
|
2023-05-27 22:25:52 +05:30
|
|
|
|
|
|
|
artifact.save!
|
|
|
|
artifact_metadata&.save!
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
success(artifact: artifact)
|
2021-04-29 21:17:54 +05:30
|
|
|
rescue ActiveRecord::RecordNotUnique => error
|
2023-06-20 00:43:36 +05:30
|
|
|
track_exception(error, artifact.file_type)
|
2021-04-29 21:17:54 +05:30
|
|
|
error('another artifact of the same type already exists', :bad_request)
|
|
|
|
rescue *OBJECT_STORAGE_ERRORS => error
|
2023-06-20 00:43:36 +05:30
|
|
|
track_exception(error, artifact.file_type)
|
2021-04-29 21:17:54 +05:30
|
|
|
error(error.message, :service_unavailable)
|
2021-06-08 01:23:25 +05:30
|
|
|
rescue StandardError => error
|
2023-06-20 00:43:36 +05:30
|
|
|
track_exception(error, artifact.file_type)
|
2021-04-29 21:17:54 +05:30
|
|
|
error(error.message, :bad_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
|
|
|
|
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
|
|
|
|
return false unless existing_artifact
|
|
|
|
|
|
|
|
existing_artifact.file_sha256 == artifacts_file.sha256
|
|
|
|
end
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
def track_exception(error, artifact_type)
|
|
|
|
Gitlab::ErrorTracking.track_exception(
|
|
|
|
error,
|
2021-04-29 21:17:54 +05:30
|
|
|
job_id: job.id,
|
|
|
|
project_id: job.project_id,
|
2023-06-20 00:43:36 +05:30
|
|
|
uploading_type: artifact_type
|
2021-04-29 21:17:54 +05:30
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2022-08-13 15:12:31 +05:30
|
|
|
def track_artifact_uploader(_artifact)
|
|
|
|
# Overridden in EE
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def parse_dotenv_artifact(artifact)
|
|
|
|
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2022-08-13 15:12:31 +05:30
|
|
|
|
|
|
|
Ci::JobArtifacts::CreateService.prepend_mod
|