2019-10-12 21:52:04 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
module WorkhorseHelpers
|
|
|
|
extend self
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
UPLOAD_PARAM_NAMES = %w[name size path remote_id sha256 type].freeze
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
def workhorse_send_data
|
|
|
|
@_workhorse_send_data ||= begin
|
|
|
|
header = response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
|
|
|
|
split_header = header.split(':')
|
|
|
|
type = split_header.shift
|
|
|
|
header = split_header.join(':')
|
|
|
|
[
|
|
|
|
type,
|
2020-05-24 23:13:21 +05:30
|
|
|
Gitlab::Json.parse(Base64.urlsafe_decode64(header))
|
2016-06-02 11:05:42 +05:30
|
|
|
]
|
|
|
|
end
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
def workhorse_internal_api_request_header
|
|
|
|
{ 'HTTP_' + Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER.upcase.tr('-', '_') => jwt_token }
|
|
|
|
end
|
2019-12-04 20:38:33 +05:30
|
|
|
|
|
|
|
# workhorse_post_with_file will transform file_key inside params as if it was disk accelerated by workhorse
|
|
|
|
def workhorse_post_with_file(url, file_key:, params:)
|
2019-12-21 20:55:43 +05:30
|
|
|
workhorse_request_with_file(:post, url,
|
|
|
|
file_key: file_key,
|
|
|
|
params: params,
|
|
|
|
env: { 'CONTENT_TYPE' => 'multipart/form-data' },
|
|
|
|
send_rewritten_field: true
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
# workhorse_finalize will transform file_key inside params as if it was the finalize call of an inline object storage upload.
|
|
|
|
# note that based on the content of the params it can simulate a disc acceleration or an object storage upload
|
2020-04-08 14:13:33 +05:30
|
|
|
def workhorse_finalize(url, method: :post, file_key:, params:, headers: {}, send_rewritten_field: false)
|
2020-04-15 14:45:12 +05:30
|
|
|
workhorse_finalize_with_multiple_files(url, method: method, file_keys: file_key, params: params, headers: headers, send_rewritten_field: send_rewritten_field)
|
|
|
|
end
|
|
|
|
|
|
|
|
def workhorse_finalize_with_multiple_files(url, method: :post, file_keys:, params:, headers: {}, send_rewritten_field: false)
|
|
|
|
workhorse_request_with_multiple_files(method, url,
|
|
|
|
file_keys: file_keys,
|
|
|
|
params: params,
|
|
|
|
extra_headers: headers,
|
|
|
|
send_rewritten_field: send_rewritten_field
|
2019-12-21 20:55:43 +05:30
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def workhorse_request_with_file(method, url, file_key:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
|
2020-04-15 14:45:12 +05:30
|
|
|
workhorse_request_with_multiple_files(method, url, file_keys: file_key, params: params, env: env, extra_headers: extra_headers, send_rewritten_field: send_rewritten_field)
|
|
|
|
end
|
|
|
|
|
|
|
|
def workhorse_request_with_multiple_files(method, url, file_keys:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
|
2019-12-04 20:38:33 +05:30
|
|
|
workhorse_params = params.dup
|
|
|
|
|
2020-04-15 14:45:12 +05:30
|
|
|
file_keys = Array(file_keys)
|
|
|
|
rewritten_fields = {}
|
|
|
|
|
|
|
|
file_keys.each do |key|
|
|
|
|
file = workhorse_params.delete(key)
|
|
|
|
rewritten_fields[key] = file.path if file
|
|
|
|
workhorse_params = workhorse_disk_accelerated_file_params(key, file).merge(workhorse_params)
|
2020-11-24 15:15:51 +05:30
|
|
|
workhorse_params = workhorse_params.merge(jwt_file_upload_param(key: key, params: workhorse_params))
|
2020-04-15 14:45:12 +05:30
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
|
|
|
headers = if send_rewritten_field
|
2020-04-15 14:45:12 +05:30
|
|
|
workhorse_rewritten_fields_header(rewritten_fields)
|
2019-12-21 20:55:43 +05:30
|
|
|
else
|
|
|
|
{}
|
|
|
|
end
|
|
|
|
|
|
|
|
headers.merge!(extra_headers)
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
process(method, url, params: workhorse_params, headers: headers, env: env)
|
2019-12-04 20:38:33 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
def jwt_file_upload_param(key:, params:)
|
|
|
|
upload_params = UPLOAD_PARAM_NAMES.map do |file_upload_param|
|
|
|
|
[file_upload_param, params["#{key}.#{file_upload_param}"]]
|
|
|
|
end
|
|
|
|
upload_params = upload_params.to_h.compact
|
|
|
|
|
|
|
|
return {} if upload_params.empty?
|
|
|
|
|
|
|
|
{ "#{key}.gitlab-workhorse-upload" => jwt_token('upload' => upload_params) }
|
|
|
|
end
|
|
|
|
|
|
|
|
def jwt_token(data = {}, issuer: 'gitlab-workhorse', secret: Gitlab::Workhorse.secret, algorithm: 'HS256')
|
|
|
|
JWT.encode({ 'iss' => issuer }.merge(data), secret, algorithm)
|
2019-12-04 20:38:33 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def workhorse_rewritten_fields_header(fields)
|
|
|
|
{ Gitlab::Middleware::Multipart::RACK_ENV_KEY => jwt_token('rewritten_fields' => fields) }
|
|
|
|
end
|
|
|
|
|
|
|
|
def workhorse_disk_accelerated_file_params(key, file)
|
2019-12-21 20:55:43 +05:30
|
|
|
return {} unless file
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
{
|
|
|
|
"#{key}.name" => file.original_filename,
|
2019-12-21 20:55:43 +05:30
|
|
|
"#{key}.size" => file.size
|
|
|
|
}.tap do |params|
|
2020-04-15 14:45:12 +05:30
|
|
|
if file.path
|
|
|
|
params["#{key}.path"] = file.path
|
|
|
|
params["#{key}.sha256"] = Digest::SHA256.file(file.path).hexdigest
|
|
|
|
end
|
|
|
|
|
2020-03-28 13:19:24 +05:30
|
|
|
params["#{key}.remote_id"] = file.remote_id if file.respond_to?(:remote_id) && file.remote_id.present?
|
2019-12-21 20:55:43 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fog_to_uploaded_file(file)
|
|
|
|
filename = File.basename(file.key)
|
|
|
|
|
|
|
|
UploadedFile.new(nil,
|
|
|
|
filename: filename,
|
|
|
|
remote_id: filename,
|
|
|
|
size: file.content_length
|
|
|
|
)
|
2019-12-04 20:38:33 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|