debian-mirror-gitlab/app/uploaders/file_uploader.rb

181 lines
4.5 KiB
Ruby
Raw Normal View History

2018-03-17 18:26:18 +05:30
# This class breaks the actual CarrierWave concept.
# Every uploader should use a base_dir that is model agnostic so we can build
# back URLs from base_dir-relative paths saved in the `Upload` model.
#
# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
# there is no way to build back the correct file path without the model, which defies
# CarrierWave way of storing files.
#
2017-08-17 22:00:37 +05:30
class FileUploader < GitlabUploader
2015-11-26 14:37:03 +05:30
include UploaderHelper
2018-03-17 18:26:18 +05:30
include RecordsUploads::Concern
2018-05-09 12:01:36 +05:30
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
2017-08-17 22:00:37 +05:30
2016-06-02 11:05:42 +05:30
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
2018-03-17 18:26:18 +05:30
DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
2015-11-26 14:37:03 +05:30
2018-03-17 18:26:18 +05:30
after :remove, :prune_store_dir
2018-05-09 12:01:36 +05:30
# FileUploader do not run in a model transaction, so we can simply
# enqueue a job after the :store hook.
after :store, :schedule_background_upload
2018-03-17 18:26:18 +05:30
def self.root
File.join(options.storage_path, 'uploads')
end
def self.absolute_path(upload)
2017-08-17 22:00:37 +05:30
File.join(
2018-03-17 18:26:18 +05:30
absolute_base_dir(upload.model),
upload.path # already contain the dynamic_segment, see #upload_path
2017-08-17 22:00:37 +05:30
)
end
2015-04-26 12:48:37 +05:30
2018-05-09 12:01:36 +05:30
def self.base_dir(model, store = Store::LOCAL)
decorated_model = model
decorated_model = Storage::HashedProject.new(model) if store == Store::REMOTE
model_path_segment(decorated_model)
2018-03-17 18:26:18 +05:30
end
# used in migrations and import/exports
def self.absolute_base_dir(model)
File.join(root, base_dir(model))
2014-09-02 18:07:02 +05:30
end
2017-08-17 22:00:37 +05:30
# Returns the part of `store_dir` that can change based on the model's current
# path
#
# This is used to build Upload paths dynamically based on the model's current
# namespace and path, allowing us to ignore renames or transfers.
#
2018-03-17 18:26:18 +05:30
# model - Object that responds to `full_path` and `disk_path`
2017-08-17 22:00:37 +05:30
#
# Returns a String without a trailing slash
2018-03-17 18:26:18 +05:30
def self.model_path_segment(model)
2018-05-09 12:01:36 +05:30
case model
when Storage::HashedProject then model.disk_path
2018-03-17 18:26:18 +05:30
else
2018-05-09 12:01:36 +05:30
model.hashed_storage?(:attachments) ? model.disk_path : model.full_path
2018-03-17 18:26:18 +05:30
end
end
def self.generate_secret
SecureRandom.hex
2014-09-02 18:07:02 +05:30
end
2018-05-09 12:01:36 +05:30
def upload_paths(filename)
[
File.join(secret, filename),
File.join(base_dir(Store::REMOTE), secret, filename)
]
end
2017-08-17 22:00:37 +05:30
attr_accessor :model
2018-03-17 18:26:18 +05:30
def initialize(model, mounted_as = nil, **uploader_context)
super(model, nil, **uploader_context)
2017-08-17 22:00:37 +05:30
@model = model
2018-03-17 18:26:18 +05:30
apply_context!(uploader_context)
2014-09-02 18:07:02 +05:30
end
2018-05-09 12:01:36 +05:30
# enforce the usage of Hashed storage when storing to
# remote store as the FileMover doesn't support OS
def base_dir(store = nil)
self.class.base_dir(@model, store || object_store)
2014-09-02 18:07:02 +05:30
end
2018-03-17 18:26:18 +05:30
# we don't need to know the actual path, an uploader instance should be
# able to yield the file content on demand, so we should build the digest
def absolute_path
self.class.absolute_path(@upload)
2014-09-02 18:07:02 +05:30
end
2018-03-17 18:26:18 +05:30
def upload_path
2018-05-09 12:01:36 +05:30
if file_storage?
# Legacy path relative to project.full_path
File.join(dynamic_segment, identifier)
else
File.join(store_dir, identifier)
end
2018-03-17 18:26:18 +05:30
end
2018-05-09 12:01:36 +05:30
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(base_dir(ObjectStorage::Store::REMOTE), dynamic_segment)
}
2018-03-17 18:26:18 +05:30
end
def markdown_link
markdown = "[#{markdown_name}](#{secure_url})"
2017-08-17 22:00:37 +05:30
markdown.prepend("!") if image_or_video? || dangerous?
2018-03-17 18:26:18 +05:30
markdown
end
2018-03-17 18:26:18 +05:30
def to_h
{
2018-03-17 18:26:18 +05:30
alt: markdown_name,
2017-08-17 22:00:37 +05:30
url: secure_url,
2018-03-17 18:26:18 +05:30
markdown: markdown_link
}
end
2016-06-02 11:05:42 +05:30
2018-03-17 18:26:18 +05:30
def filename
self.file.filename
end
def upload=(value)
super
return unless value
return if apply_context!(value.uploader_context)
# fallback to the regex based extraction
if matches = DYNAMIC_PATH_PATTERN.match(value.path)
@secret = matches[:secret]
@identifier = matches[:identifier]
end
end
def secret
@secret ||= self.class.generate_secret
end
2017-08-17 22:00:37 +05:30
private
2018-03-17 18:26:18 +05:30
def apply_context!(uploader_context)
@secret, @identifier = uploader_context.values_at(:secret, :identifier)
!!(@secret && @identifier)
2017-08-17 22:00:37 +05:30
end
2018-03-17 18:26:18 +05:30
def build_upload
super.tap do |upload|
upload.secret = secret
end
end
def prune_store_dir
storage.delete_dir!(store_dir) # only remove when empty
end
def markdown_name
(image_or_video? ? File.basename(filename, File.extname(filename)) : filename).gsub("]", "\\]")
end
def identifier
@identifier ||= filename
end
def dynamic_segment
secret
2016-06-02 11:05:42 +05:30
end
2017-08-17 22:00:37 +05:30
def secure_url
File.join('/uploads', @secret, file.filename)
end
2014-09-02 18:07:02 +05:30
end