2021-04-29 21:17:54 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module Ci
|
|
|
|
module JobArtifacts
|
|
|
|
class CreateService < ::BaseService
|
|
|
|
include Gitlab::Utils::UsageData
|
|
|
|
|
|
|
|
LSIF_ARTIFACT_TYPE = 'lsif'
|
|
|
|
|
|
|
|
OBJECT_STORAGE_ERRORS = [
|
|
|
|
Errno::EIO,
|
|
|
|
Google::Apis::ServerError,
|
|
|
|
Signet::RemoteServerError
|
|
|
|
].freeze
|
|
|
|
|
|
|
|
def initialize(job)
|
|
|
|
@job = job
|
|
|
|
@project = job.project
|
2021-12-11 22:18:48 +05:30
|
|
|
@pipeline = job.pipeline if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, @project)
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def authorize(artifact_type:, filesize: nil)
|
|
|
|
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
|
|
|
|
return result unless result[:status] == :success
|
|
|
|
|
|
|
|
headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
|
|
|
|
|
|
|
|
if lsif?(artifact_type)
|
|
|
|
headers[:ProcessLsif] = true
|
|
|
|
track_usage_event('i_source_code_code_intelligence', project.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
success(headers: headers)
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute(artifacts_file, params, metadata_file: nil)
|
|
|
|
result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
|
|
|
|
return result unless result[:status] == :success
|
|
|
|
|
|
|
|
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
|
|
|
|
|
|
|
|
artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
|
|
|
|
result = parse_artifact(artifact)
|
|
|
|
|
|
|
|
track_artifact_uploader(artifact)
|
|
|
|
|
|
|
|
return result unless result[:status] == :success
|
|
|
|
|
|
|
|
persist_artifact(artifact, artifact_metadata, params)
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
attr_reader :job, :project, :pipeline
|
2021-04-29 21:17:54 +05:30
|
|
|
|
|
|
|
def validate_requirements(artifact_type:, filesize:)
|
|
|
|
return too_large_error if too_large?(artifact_type, filesize)
|
|
|
|
|
|
|
|
success
|
|
|
|
end
|
|
|
|
|
|
|
|
def too_large?(type, size)
|
|
|
|
size > max_size(type) if size
|
|
|
|
end
|
|
|
|
|
|
|
|
def lsif?(type)
|
|
|
|
type == LSIF_ARTIFACT_TYPE
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_size(type)
|
|
|
|
Ci::JobArtifact.max_artifact_size(type: type, project: project)
|
|
|
|
end
|
|
|
|
|
|
|
|
def too_large_error
|
|
|
|
error('file size has reached maximum size limit', :payload_too_large)
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_artifact(artifacts_file, params, metadata_file)
|
|
|
|
expire_in = params['expire_in'] ||
|
|
|
|
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
|
|
|
|
|
2021-12-11 22:18:48 +05:30
|
|
|
artifact_attributes = {
|
2022-10-11 01:57:18 +05:30
|
|
|
job: job,
|
2021-04-29 21:17:54 +05:30
|
|
|
project: project,
|
2021-12-11 22:18:48 +05:30
|
|
|
expire_in: expire_in
|
|
|
|
}
|
|
|
|
|
|
|
|
artifact_attributes[:locked] = pipeline.locked if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, project)
|
|
|
|
|
|
|
|
artifact = Ci::JobArtifact.new(
|
|
|
|
artifact_attributes.merge(
|
|
|
|
file: artifacts_file,
|
|
|
|
file_type: params[:artifact_type],
|
|
|
|
file_format: params[:artifact_format],
|
|
|
|
file_sha256: artifacts_file.sha256
|
|
|
|
)
|
|
|
|
)
|
2021-04-29 21:17:54 +05:30
|
|
|
|
|
|
|
artifact_metadata = if metadata_file
|
|
|
|
Ci::JobArtifact.new(
|
2021-12-11 22:18:48 +05:30
|
|
|
artifact_attributes.merge(
|
|
|
|
file: metadata_file,
|
|
|
|
file_type: :metadata,
|
|
|
|
file_format: :gzip,
|
|
|
|
file_sha256: metadata_file.sha256
|
|
|
|
)
|
|
|
|
)
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
[artifact, artifact_metadata]
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_artifact(artifact)
|
|
|
|
case artifact.file_type
|
|
|
|
when 'dotenv' then parse_dotenv_artifact(artifact)
|
|
|
|
else success
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def persist_artifact(artifact, artifact_metadata, params)
|
|
|
|
Ci::JobArtifact.transaction do
|
|
|
|
artifact.save!
|
|
|
|
artifact_metadata&.save!
|
|
|
|
|
|
|
|
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
|
|
|
|
job.update_column(:artifacts_expire_at, artifact.expire_at)
|
|
|
|
end
|
|
|
|
|
2022-08-27 11:52:29 +05:30
|
|
|
Gitlab::Ci::Artifacts::Logger.log_created(artifact)
|
|
|
|
|
2022-07-16 23:28:13 +05:30
|
|
|
success(artifact: artifact)
|
2021-04-29 21:17:54 +05:30
|
|
|
rescue ActiveRecord::RecordNotUnique => error
|
|
|
|
track_exception(error, params)
|
|
|
|
error('another artifact of the same type already exists', :bad_request)
|
|
|
|
rescue *OBJECT_STORAGE_ERRORS => error
|
|
|
|
track_exception(error, params)
|
|
|
|
error(error.message, :service_unavailable)
|
2021-06-08 01:23:25 +05:30
|
|
|
rescue StandardError => error
|
2021-04-29 21:17:54 +05:30
|
|
|
track_exception(error, params)
|
|
|
|
error(error.message, :bad_request)
|
|
|
|
end
|
|
|
|
|
|
|
|
def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
|
|
|
|
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
|
|
|
|
return false unless existing_artifact
|
|
|
|
|
|
|
|
existing_artifact.file_sha256 == artifacts_file.sha256
|
|
|
|
end
|
|
|
|
|
|
|
|
def track_exception(error, params)
|
|
|
|
Gitlab::ErrorTracking.track_exception(error,
|
|
|
|
job_id: job.id,
|
|
|
|
project_id: job.project_id,
|
|
|
|
uploading_type: params[:artifact_type]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2022-08-13 15:12:31 +05:30
|
|
|
def track_artifact_uploader(_artifact)
|
|
|
|
# Overridden in EE
|
2021-04-29 21:17:54 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def parse_dotenv_artifact(artifact)
|
|
|
|
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2022-08-13 15:12:31 +05:30
|
|
|
|
|
|
|
Ci::JobArtifacts::CreateService.prepend_mod
|